repo_name
stringlengths
4
116
path
stringlengths
4
379
size
stringlengths
1
7
content
stringlengths
3
1.05M
license
stringclasses
15 values
markphip/testing
jira-dvcs-connector-plugin/src/main/java/com/atlassian/jira/plugins/dvcs/dao/impl/SyncAuditLogDaoImpl.java
12884
package com.atlassian.jira.plugins.dvcs.dao.impl; import com.atlassian.activeobjects.external.ActiveObjects; import com.atlassian.event.api.EventPublisher; import com.atlassian.jira.plugins.dvcs.activeobjects.v3.SyncAuditLogMapping; import com.atlassian.jira.plugins.dvcs.analytics.event.DvcsSyncEndAnalyticsEvent; import com.atlassian.jira.plugins.dvcs.dao.SyncAuditLogDao; import com.atlassian.jira.plugins.dvcs.util.ActiveObjectsUtils; import com.atlassian.plugin.spring.scanner.annotation.imports.ComponentImport; import com.atlassian.sal.api.transaction.TransactionCallback; import net.java.ao.Query; import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.exception.ExceptionUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; import java.util.Date; import java.util.HashMap; import java.util.Map; import java.util.concurrent.Callable; import static com.google.common.base.Preconditions.checkNotNull; @Component public class SyncAuditLogDaoImpl implements SyncAuditLogDao { private static final int BIG_DATA_PAGESIZE = 200; private static final int ROTATION_PERIOD = 1000 * 60 * 60 * 24 * 7; private final ActiveObjects ao; private static final Logger log = LoggerFactory.getLogger(SyncAuditLogDaoImpl.class); private EventPublisher eventPublisher; @Autowired public SyncAuditLogDaoImpl(@ComponentImport ActiveObjects ao, @ComponentImport EventPublisher publisher) { super(); this.ao = checkNotNull(ao); this.eventPublisher = checkNotNull(publisher); } @Override public SyncAuditLogMapping newSyncAuditLog(final int repoId, final String syncType, final Date startDate) { return doTxQuietly(new Callable<SyncAuditLogMapping>(){ @Override public SyncAuditLogMapping call() throws Exception { // rotate(repoId); // Map<String, Object> data = new HashMap<String, Object>(); data.put(SyncAuditLogMapping.REPO_ID, repoId); data.put(SyncAuditLogMapping.SYNC_TYPE, syncType); data.put(SyncAuditLogMapping.START_DATE, startDate); data.put(SyncAuditLogMapping.SYNC_STATUS, SyncAuditLogMapping.SYNC_STATUS_RUNNING); data.put(SyncAuditLogMapping.TOTAL_ERRORS , 0); return ao.create(SyncAuditLogMapping.class, data); } private void rotate(int repoId) { ActiveObjectsUtils.delete(ao, SyncAuditLogMapping.class, Query.select().from(SyncAuditLogMapping.class).where(SyncAuditLogMapping.REPO_ID + " = ? AND " + SyncAuditLogMapping.START_DATE + " < ?" , repoId, new Date(System.currentTimeMillis() - ROTATION_PERIOD)) ); } }); } @Override public SyncAuditLogMapping finish(final int syncId, final Date firstRequestDate, final int numRequests, final int flightTimeMs, final Date finishDate) { return doTxQuietly(new Callable<SyncAuditLogMapping>(){ @Override public SyncAuditLogMapping call() throws Exception { SyncAuditLogMapping mapping = find(syncId); if (mapping != null) { mapping.setFirstRequestDate(firstRequestDate); mapping.setEndDate(finishDate); mapping.setNumRequests(numRequests); mapping.setFlightTimeMs(flightTimeMs); if (StringUtils.isNotBlank(mapping.getExcTrace())) { mapping.setSyncStatus(SyncAuditLogMapping.SYNC_STATUS_FAILED); } else { mapping.setSyncStatus(SyncAuditLogMapping.SYNC_STATUS_SUCCESS); } mapping.save(); fireAnalyticsEvent(mapping); } return mapping; } }); } private void fireAnalyticsEvent(SyncAuditLogMapping sync) { String syncTypeString = sync.getSyncType() == null ? "" : sync.getSyncType(); boolean soft = syncTypeString.contains(SyncAuditLogMapping.SYNC_TYPE_SOFT); boolean commits = syncTypeString.contains(SyncAuditLogMapping.SYNC_TYPE_CHANGESETS); boolean pullRequests = syncTypeString.contains(SyncAuditLogMapping.SYNC_TYPE_PULLREQUESTS); boolean webhook = syncTypeString.contains(SyncAuditLogMapping.SYNC_TYPE_WEBHOOKS); eventPublisher.publish(new DvcsSyncEndAnalyticsEvent(soft, commits, pullRequests, webhook, sync.getEndDate(), sync.getEndDate().getTime() - sync.getStartDate().getTime())); } @Override public SyncAuditLogMapping pause(final int syncId) { return status(syncId); } protected SyncAuditLogMapping status(final int syncId) { return doTxQuietly(new Callable<SyncAuditLogMapping>(){ @Override public SyncAuditLogMapping call() throws Exception { SyncAuditLogMapping mapping = find(syncId); if (mapping != null) { mapping.setSyncStatus(SyncAuditLogMapping.SYNC_STATUS_SLEEPING); mapping.save(); } return mapping; } }); } @Override public SyncAuditLogMapping resume(final int syncId) { return doTxQuietly(new Callable<SyncAuditLogMapping>(){ @Override public SyncAuditLogMapping call() throws Exception { SyncAuditLogMapping mapping = find(syncId); if (mapping != null) { if (SyncAuditLogMapping.SYNC_STATUS_SLEEPING.equals(mapping.getSyncStatus())) { mapping.setSyncStatus(SyncAuditLogMapping.SYNC_STATUS_RUNNING); mapping.save(); } } return mapping; } }); } @Override public int removeAllForRepo(final int repoId) { Integer ret = doTxQuietly(new Callable<Integer>(){ @Override public Integer call() throws Exception { return ActiveObjectsUtils.delete(ao, SyncAuditLogMapping.class, repoQuery(repoId).q()); } }); return ret == null ? -1 : ret; } @Override public SyncAuditLogMapping setException(final int syncId, final Throwable t, final boolean overwriteOld) { return doTxQuietly(new Callable<SyncAuditLogMapping>(){ @Override public SyncAuditLogMapping call() throws Exception { SyncAuditLogMapping found = find(syncId); boolean noExceptionYet = StringUtils.isBlank(found.getExcTrace()); if (t != null && (overwriteOld || noExceptionYet)) { found.setExcTrace(ExceptionUtils.getStackTrace(t)); } found.setTotalErrors(found.getTotalErrors() + 1); found.save(); return found; } }); } @Override public SyncAuditLogMapping[] getAllForRepo(final int repoId, final Integer page) { return doTxQuietly(new Callable<SyncAuditLogMapping []>(){ @Override public SyncAuditLogMapping [] call() throws Exception { return ao.find(SyncAuditLogMapping.class, repoQuery(repoId).page(page).order(SyncAuditLogMapping.START_DATE + " DESC")); } }); } @Override public SyncAuditLogMapping[] getAll(final Integer page) { return doTxQuietly(new Callable<SyncAuditLogMapping []>(){ @Override public SyncAuditLogMapping [] call() throws Exception { return ao.find(SyncAuditLogMapping.class, pageQuery(Query.select().order(SyncAuditLogMapping.START_DATE + " DESC"), page)); } }); } @Override public SyncAuditLogMapping getLastForRepo(final int repoId) { return doTxQuietly(new Callable<SyncAuditLogMapping>(){ @Override public SyncAuditLogMapping call() throws Exception { SyncAuditLogMapping[] found = ao.find(SyncAuditLogMapping.class, repoQuery(repoId).q().limit(1).order(SyncAuditLogMapping.START_DATE + " DESC")); return found.length == 1 ? found[0] : null; } }); } @Override public SyncAuditLogMapping getLastSuccessForRepo(final int repoId) { return doTxQuietly(new Callable<SyncAuditLogMapping>(){ @Override public SyncAuditLogMapping call() throws Exception { Query query = statusQueryLimitOne(repoId, SyncAuditLogMapping.SYNC_STATUS_SUCCESS); SyncAuditLogMapping[] found = ao.find(SyncAuditLogMapping.class, query); return found.length == 1 ? found[0] : null; } }); } @Override public SyncAuditLogMapping getLastFailedForRepo(final int repoId) { return doTxQuietly(new Callable<SyncAuditLogMapping>(){ @Override public SyncAuditLogMapping call() throws Exception { Query query = statusQueryLimitOne(repoId, SyncAuditLogMapping.SYNC_STATUS_FAILED); SyncAuditLogMapping[] found = ao.find(SyncAuditLogMapping.class, query); return found.length == 1 ? found[0] : null; } }); } @Override public boolean hasException(final int syncId) { Boolean ret = doTxQuietly(new Callable<Boolean>(){ @Override public Boolean call() throws Exception { SyncAuditLogMapping found = find(syncId); return found != null && StringUtils.isNotBlank(found.getExcTrace()); } }); return ret == null ? false : ret; } private SyncAuditLogMapping find(int syncId) { return ao.get(SyncAuditLogMapping.class, syncId); } private PageableQuery repoQuery(final int repoId) { return new PageableQuery(repoId); } private Query statusQueryLimitOne(int repoId, String status) { return Query.select() .from(SyncAuditLogMapping.class) .where(SyncAuditLogMapping.REPO_ID + " = ? AND " + SyncAuditLogMapping.SYNC_STATUS + " = ?", repoId, status) .limit(1) .order(SyncAuditLogMapping.START_DATE + " DESC"); } private <RET> RET doTxQuietly(final Callable<RET> callable) { return ao.executeInTransaction(new TransactionCallback<RET>() { @Override public RET doInTransaction() { try { return callable.call(); } catch (Throwable e) { log.warn("Problem during sync audit log. " + e.getMessage()); if (log.isDebugEnabled()) { log.debug("Sync audit log.", e); } return null; } } }); } class PageableQuery { private Query q; private PageableQuery(int repoId) { super(); this.q = Query.select().from(SyncAuditLogMapping.class).where(SyncAuditLogMapping.REPO_ID + " = ?", repoId); } PageableQuery offset(int offset) { q.setOffset(offset); return this; } PageableQuery limit (int limit) { q.setLimit(limit); return this; } Query page(Integer page) { pageQuery(q, page); return q; } Query q() { return q; } } private static Query pageQuery(Query q, Integer page) { q.setLimit(BIG_DATA_PAGESIZE); if (page == null) { q.setOffset(0); } else { q.setOffset(BIG_DATA_PAGESIZE * page); } return q; } }
bsd-2-clause
AlexRakita/gst_streamer
py_nodes/gst_viewer_node.py
2738
#!/usr/bin/env python from __future__ import print_function import rospy import std_msgs.msg import gst_engines DEFAULT_PIPELINE_STRING = gst_engines.DEFAULT_VIEWER_PIPELINE DEFAULT_PORT = gst_engines.DEFAULT_PORT DEFAULT_AUTO_RESTART = True class GstViewerNode(object): """ A ROS node class handling the reception of multimedia data and viewing it. """ def __init__(self): """GStreamer multimedia viewer ROS node""" rospy.init_node('gst_viewer') self._is_playing_publisher = None self._image_publisher = None port = str(rospy.get_param( '~port', DEFAULT_PORT)) source_override = rospy.get_param( '~source_override', None) sink_override = rospy.get_param( '~sink_override', None) pipeline_string = rospy.get_param( '~pipeline_string', DEFAULT_PIPELINE_STRING) auto_restart = rospy.get_param( '~auto_restart', DEFAULT_AUTO_RESTART) gst_engines.GstViewer._notify = self._ros_log self._engine = gst_engines.GstViewer( pipeline_string, port, source_override, sink_override) self._is_auto_restart = auto_restart self._init_publishers() self.start_engine() self.spin_loop() def start_engine(self): """Start the multimedia streaming""" rospy.loginfo('Starting engine.') self._engine.start() def stop_engine(self): """Stop multimedia streaming""" rospy.loginfo('Stopping engine.') self._engine.stop() def spin_loop(self): """The main spin loop""" rospy.loginfo('Starting main loop') rate = rospy.Rate(10) while not rospy.is_shutdown(): self._is_playing_publisher.publish(self._engine.is_playing) if not self._engine.is_playing and self._is_auto_restart: rospy.logwarn("Auto-Restarting engine.") self.start_engine() rate.sleep() if self._engine.is_playing: self.stop_engine() def _init_publishers(self): """Initialize the publishers""" self._is_playing_publisher = rospy.Publisher( 'gst_viewer_is_playing', std_msgs.msg.Bool, queue_size=1) def _ros_log(self, severity, msg): """Log event messages""" if severity == 'fatal': rospy.logfatal(msg) elif severity == 'err': rospy.logerr(msg) elif severity == 'warn': rospy.logwarn(msg) elif severity == 'info': rospy.loginfo(msg) elif severity == 'debug': rospy.logdebug(msg) else: raise NotImplementedError('Unsupported severity') if __name__ == '__main__': gst_viewer_node = GstViewerNode()
bsd-2-clause
GauthamGoli/django-organizations
organizations/views.py
8405
# -*- coding: utf-8 -*- # Copyright (c) 2012-2015, Ben Lopatin and contributors # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # Redistributions of source code must retain the above copyright notice, this # list of conditions and the following disclaimer. Redistributions in binary # form must reproduce the above copyright notice, this list of conditions and the # following disclaimer in the documentation and/or other materials provided with # the distribution # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. from django.contrib.sites.models import get_current_site from django.core.urlresolvers import reverse from django.http import HttpResponseBadRequest from django.shortcuts import render, redirect from django.utils.translation import ugettext as _ from django.views.generic import (ListView, DetailView, UpdateView, CreateView, DeleteView, FormView) from .backends import invitation_backend, registration_backend from .forms import (OrganizationForm, OrganizationUserForm, OrganizationUserAddForm, OrganizationAddForm, SignUpForm) from .mixins import (OrganizationMixin, OrganizationUserMixin, MembershipRequiredMixin, AdminRequiredMixin, OwnerRequiredMixin) from .models import Organization from .utils import create_organization class BaseOrganizationList(ListView): # TODO change this to query on the specified model queryset = Organization.active.all() context_object_name = "organizations" def get_queryset(self): return super(BaseOrganizationList, self).get_queryset().filter(users=self.request.user) class BaseOrganizationDetail(OrganizationMixin, DetailView): def get_context_data(self, **kwargs): context = super(BaseOrganizationDetail, self).get_context_data(**kwargs) context['organization_users'] = self.organization.organization_users.all() context['organization'] = self.organization return context class BaseOrganizationCreate(CreateView): model = Organization form_class = OrganizationAddForm template_name = 'organizations/organization_form.html' def get_success_url(self): return reverse("organization_list") def get_form_kwargs(self): kwargs = super(BaseOrganizationCreate, self).get_form_kwargs() kwargs.update({'request': self.request}) return kwargs class BaseOrganizationUpdate(OrganizationMixin, UpdateView): form_class = OrganizationForm def get_form_kwargs(self): kwargs = super(BaseOrganizationUpdate, self).get_form_kwargs() kwargs.update({'request': self.request}) return kwargs class BaseOrganizationDelete(OrganizationMixin, DeleteView): def get_success_url(self): return reverse("organization_list") class BaseOrganizationUserList(OrganizationMixin, ListView): def get(self, request, *args, **kwargs): self.organization = self.get_organization() self.object_list = self.organization.organization_users.all() context = self.get_context_data(object_list=self.object_list, organization_users=self.object_list, organization=self.organization) return self.render_to_response(context) class BaseOrganizationUserDetail(OrganizationUserMixin, DetailView): pass class BaseOrganizationUserCreate(OrganizationMixin, CreateView): form_class = OrganizationUserAddForm template_name = 'organizations/organizationuser_form.html' def get_success_url(self): return reverse('organization_user_list', kwargs={'organization_pk': self.object.organization.pk}) def get_form_kwargs(self): kwargs = super(BaseOrganizationUserCreate, self).get_form_kwargs() kwargs.update({'organization': self.organization, 'request': self.request}) return kwargs def get(self, request, *args, **kwargs): self.organization = self.get_object() return super(BaseOrganizationUserCreate, self).get(request, *args, **kwargs) def post(self, request, *args, **kwargs): self.organization = self.get_object() return super(BaseOrganizationUserCreate, self).post(request, *args, **kwargs) class BaseOrganizationUserRemind(OrganizationUserMixin, DetailView): template_name = 'organizations/organizationuser_remind.html' # TODO move to invitations backend? def get_object(self, **kwargs): self.organization_user = super(BaseOrganizationUserRemind, self).get_object() if self.organization_user.user.is_active: raise HttpResponseBadRequest(_("User is already active")) return self.organization_user def post(self, request, *args, **kwargs): self.object = self.get_object() invitation_backend().send_reminder(self.object.user, **{'domain': get_current_site(self.request), 'organization': self.organization, 'sender': request.user}) return redirect(self.object) class BaseOrganizationUserUpdate(OrganizationUserMixin, UpdateView): form_class = OrganizationUserForm class BaseOrganizationUserDelete(OrganizationUserMixin, DeleteView): def get_success_url(self): return reverse('organization_user_list', kwargs={'organization_pk': self.object.organization.pk}) class OrganizationSignup(FormView): """ View that allows unregistered users to create an organization account. It simply processes the form and then calls the specified registration backend. """ form_class = SignUpForm template_name = "organizations/signup_form.html" # TODO get success from backend, because some backends may do something # else, like require verification backend = registration_backend() def dispatch(self, request, *args, **kwargs): if request.user.is_authenticated(): return redirect('organization_add') return super(OrganizationSignup, self).dispatch(request, *args, **kwargs) def get_success_url(self): if hasattr(self, 'success_url'): return self.success_url return reverse('organization_signup_success') def form_valid(self, form): """ """ user = self.backend.register_by_email(form.cleaned_data['email']) create_organization(user=user, name=form.cleaned_data['name'], slug=form.cleaned_data['slug'], is_active=False) return redirect(self.get_success_url()) def signup_success(self, request): return render(request, "organizations/signup_success.html", {}) class OrganizationList(BaseOrganizationList): pass class OrganizationCreate(BaseOrganizationCreate): """ Allows any user to create a new organization. """ pass class OrganizationDetail(MembershipRequiredMixin, BaseOrganizationDetail): pass class OrganizationUpdate(AdminRequiredMixin, BaseOrganizationUpdate): pass class OrganizationDelete(OwnerRequiredMixin, BaseOrganizationDelete): pass class OrganizationUserList(MembershipRequiredMixin, BaseOrganizationUserList): pass class OrganizationUserDetail(AdminRequiredMixin, BaseOrganizationUserDetail): pass class OrganizationUserUpdate(AdminRequiredMixin, BaseOrganizationUserUpdate): pass class OrganizationUserCreate(AdminRequiredMixin, BaseOrganizationUserCreate): pass class OrganizationUserRemind(AdminRequiredMixin, BaseOrganizationUserRemind): pass class OrganizationUserDelete(AdminRequiredMixin, BaseOrganizationUserDelete): pass
bsd-2-clause
liyanage/python-xcodeproject
xcodeproject/__init__.py
27
from .xcodeproject import *
bsd-2-clause
zielmicha/dotlang
dot/lib/core.py
5485
from __future__ import division from functools import partial import operator class Environ(object): def __init__(self, parents=[]): self.data = {} self.parents = parents def __getitem__(self, key): if key in self.data: return self.data[key] for env in self.parents: try: return env[key] except KeyError: pass raise KeyError(key) def __setitem__(self, key, val): if key in self.data: self.data[key] = val return for env in self.parents: if type(env) is Environ and env.can_set(key): env[key] = val break else: self.data[key] = val def can_set(self, key): return key in self.data or any( key in parent for parent in self.parents ) class Ref(object): def __init__(self, env, name): self.env = env self.name = name def __call__(self): return self.env[self.name] def set(self, val): self.env[self.name] = val def __repr__(self): return '<dotlib.Ref %r env=%r>' % (self.name, self.env) class BuiltinRef(object): def __init__(self, fget, fset): self.fget = fget self.fset = fset def __call__(self): return self.fget() def set(self, val): self.fset(val) builtins = {} def func_print(*args): print ' '.join(map(str, args)) builtins['func-print'] = func_print def reducer(a): return lambda *args: reduce(a, args) builtins['func-add'] = reducer(lambda a, b: a + b) builtins['func-mul'] = reducer(lambda a, b: a * b) builtins['func-div'] = lambda a, b: a / b builtins['func-intdiv'] = lambda a, b: a // b builtins['func-sub'] = lambda a, b: a - b builtins['func-neq'] = lambda a, b: a != b builtins['func-eq'] = lambda a, b: a == b builtins['func-leq'] = lambda a, b: a <= b # TODO: one name builtins['func-lt'] = builtins['func-le'] = lambda a, b: a < b builtins['func-geq'] = lambda a, b: a >= b # TODO: one name builtins['func-gt'] = builtins['func-ge'] = lambda a, b: a > b builtins['func-not'] = lambda a: not a builtins['func-int'] = lambda x: int(x) builtins['func-sum'] = sum builtins['func-len'] = len builtins['func-abs'] = abs builtins['func-at'] = lambda x, val: x[val] builtins['func-or'] = lambda a, b: a or b builtins['func-and'] = lambda a, b: a and b class AtRef(object): def __init__(self, x, key): self.x = x self.key = key def __call_(self): return self.x[key] def set(self, val): self.x[self.key] = val builtins['func-atref'] = AtRef builtins['func-call'] = lambda *args: args[-1](*args[:-1]) builtins['func-set'] = lambda val, ref: ref.set(val) builtins['func-list'] = lambda *args: list(args) def func_multi(arg, *funclist): # TODO: use streams # (streams are not yet implemented and I just come up with name) return map(lambda func: func(arg), funclist) builtins['func-multi'] = func_multi builtins['func-map'] = lambda *args: map(partial(args[-1]), *args[:-1]) def func_assert(a): assert a builtins['func-assert'] = func_assert def func_def(name, body): if isinstance(name, Ref): body.name = name.name name.set(body) builtins['func-def'] = func_def class UserFunction(object): def __init__(self, env, code): self.env = env self.code = code self.name = '<lambda>' def __repr__(self): return '<dotlib.UserFunction %r at %x>' % (self.name, id(self)) def __call__(self, *args): import dot.interpreter frame = dot.interpreter.RootFrame(self.env, self.code) frame.frame.stack += args return frame.run() def func_not_found(name): if name.startswith('@'): return operator.attrgetter(name[1:]) else: def helper(self=Ellipsis, *args): if self == Ellipsis: raise AttributeError('no function named %r' % name) try: target = getattr(self, name) except AttributeError: raise AttributeError('no function named %r' % name) return target(*args) return helper builtins['func-func-not-found'] = func_not_found def dollar_swap(frame, *args): frame.stack += reversed(args) return frame dollar_swap.call_with_frame = True def dollar_list(frame, items): frame.stack += list(items) return frame dollar_list.call_with_frame = True builtins['func-$swap'] = dollar_swap builtins['func-$list'] = dollar_list for key, func in builtins.items(): if isinstance(func, type(lambda: 0)): func.func_name = key def func_next(obj): return obj[0], obj[1:] # TODO builtins['func-next'] = func_next def func_first(obj): return obj[0] # TODO builtins['func-first'] = func_first def func_slice(start, end, seq=None): if seq is None: seq = end end = len(seq) return seq[start: end] builtins['func-slice'] = func_slice def func_listref(*args): def fset(setargs): assert len(args) == len(setargs) for arg, setarg in zip(args, setargs): arg.set(setarg) return BuiltinRef(fget=lambda: args, fset=fset) builtins['func-listref'] = func_listref builtins['func-push'] = lambda item, obj: obj.append(item) def func_log_by(val, *args): builtins['func-print'](val, *args) return val builtins['func-log-by'] = func_log_by
bsd-2-clause
alebcay/homebrew-cask
Casks/cryptomator.rb
784
cask "cryptomator" do version "1.6.2" sha256 "5eb93817950df592730ad492c47ddb913d3868af4545a235e0bbebd7d528c257" url "https://github.com/cryptomator/cryptomator/releases/download/#{version}/Cryptomator-#{version}.dmg", verified: "github.com/cryptomator/cryptomator/" name "Cryptomator" desc "Multi-platform client-side cloud file encryption tool" homepage "https://cryptomator.org/" livecheck do url "https://cryptomator.org/downloads/mac/thanks/" strategy :page_match regex(%r{href=.*?/Cryptomator-(\d+(?:\.\d+)+)\.dmg}i) end depends_on macos: ">= :high_sierra" app "Cryptomator.app" zap trash: [ "~/Library/Application Support/Cryptomator", "~/Library/Logs/Cryptomator", "~/Library/Preferences/org.cryptomator.plist", ] end
bsd-2-clause
universalcore/unicore.comments.client
unicore/comments/client/tests/fixtures.py
1553
import json from datetime import datetime import pytz from uuid import uuid4 ''' Comment fixtures ''' comment_data = { 'uuid': 'd269f09c4672400da4250342d9d7e1e4', 'user_uuid': '2923280ee1904478bfcf7a46f26f443b', 'content_uuid': 'f587b74816bb425ab043f1cf30de7abe', 'app_uuid': 'bbc0035128b34ed48bdacab1799087c5', 'comment': 'this is a comment', 'user_name': 'foo', 'submit_datetime': datetime.now(pytz.utc).isoformat(), 'content_type': 'page', 'content_title': 'I Am A Page', 'content_url': 'http://example.com/page/', 'locale': 'eng_ZA', 'flag_count': '0', 'is_removed': 'False', 'moderation_state': 'visible', 'ip_address': '192.168.1.1' } comment_json = json.dumps(comment_data) comment_stream_data = { 'start': 20, 'end': 30, 'total': 100, 'count': 10, 'objects': map( lambda i: dict(comment_data.items() + [('uuid', uuid4().hex)]), range(10)), 'metadata': {'state': 'open'} } comment_stream_json = json.dumps(comment_stream_data) ''' Flag fixtures ''' flag_data = { 'app_uuid': 'bbc0035128b34ed48bdacab1799087c5', 'comment_uuid': 'd269f09c4672400da4250342d9d7e1e4', 'user_uuid': 'f0ee8eac105b485287d7633673dc93ef', 'submit_datetime': datetime.now(pytz.utc).isoformat(), } flag_json = json.dumps(flag_data) ''' Error fixtures ''' generic_error_data = { 'status': 'error', 'error_code': 'ERROR_CODE', 'error_dict': {}, 'error_message': 'ERROR_MESSAGE', } generic_error_json = json.dumps(generic_error_data)
bsd-2-clause
phpmentors-jp/codeiq-greeter-php
scripts/app.php
974
<?php require_once __DIR__.'/../vendor/autoload.php'; use CodeIQ\Greeter\Clock; use CodeIQ\Greeter\Globe; use CodeIQ\Greeter\Greeter; use CodeIQ\Greeter\TimeRangeFactory; $clock = new Clock(); $globe = new Globe(); $greeter = new Greeter($clock, $globe); $timeRange = new TimeRangeFactory(); $greeter->addTimeRange($timeRange->create( 'morning', '05:00:00', '12:00:00' )); $greeter->addTimeRange($timeRange->create( 'afternoon', '12:00:00', '18:00:00' )); $greeter->addTimeRange($timeRange->create( 'night', '18:00:00', '05:00:00' )); $greeter->addGreeting('ja', 'morning', 'おはようございます'); $greeter->addGreeting('ja', 'afternoon', 'こんにちは'); $greeter->addGreeting('ja', 'night', 'こんばんは'); $greeter->addGreeting('en', 'morning', 'Good morning'); $greeter->addGreeting('en', 'afternoon', 'Good afternoon'); $greeter->addGreeting('en', 'night', 'Good evening'); echo $greeter->greet();
bsd-2-clause
wied03/ansible-ruby
lib/ansible/ruby/modules/generated/net_tools/ipinfoio_facts.rb
593
# frozen_string_literal: true # See LICENSE.txt at root of repository # GENERATED FILE - DO NOT EDIT!! require 'ansible/ruby/modules/base' module Ansible module Ruby module Modules # Gather IP geolocation facts of a host's IP address using ipinfo.io API class Ipinfoio_facts < Base # @return [Integer, nil] HTTP connection timeout in seconds attribute :timeout validates :timeout, type: Integer # @return [String, nil] Set http user agent attribute :http_agent validates :http_agent, type: String end end end end
bsd-2-clause
Maplecroft/django-cookie-law
cookielaw/templatetags/cookielaw_tags.py
906
import warnings from classytags.helpers import InclusionTag from django import template from django.template.loader import render_to_string register = template.Library() class CookielawBanner(InclusionTag): """ Displays cookie law banner only if user has not dismissed it yet. """ template = 'cookielaw/banner.html' def render_tag(self, context, **kwargs): template_filename = self.get_template(context, **kwargs) if 'request' not in context: warnings.warn('No request object in context. ' 'Are you sure you have django.core.context_processors.request enabled?') if context['request'].COOKIES.get('cookielaw_accepted', False): return '' data = self.get_context(context, **kwargs) return render_to_string(template_filename, data, context_instance=context) register.tag(CookielawBanner)
bsd-2-clause
mysteryjeans/Feedbook
Feedbook/Services/Security/OAuthSignatureBase.cs
6616
using System; using System.Collections.Generic; using System.Linq; using System.Text; using Feedbook.Helper; using System.Collections.ObjectModel; using CoreSystem.RefTypeExtension; using CoreSystem.Util; using CoreSystem.ValueTypeExtension; using System.Security.Cryptography; using System.Collections.Specialized; using System.Net; namespace Feedbook.Services.Security { internal class OAuthSignatureBase { public HttpMethod Method { get; private set; } public Uri Url { get; private set; } public string Nonce { get; private set; } public string TimeStamp { get; private set; } public string OAuthVersion { get; private set; } public SignatureMethod SignatureMethod { get; private set; } public ReadOnlyCollection<QueryParameter> QueryParameters { get; private set; } public OAuthSignatureBase(HttpMethod method, Uri url, IEnumerable<QueryParameter> queryParameters) { this.Method = method; this.Url = url; this.Nonce = OAuthHelper.GenerateNonce(); this.TimeStamp = OAuthHelper.GenerateTimeStamp(); this.OAuthVersion = OAuthHelper.OAUTHVERSION_1_0; List<QueryParameter> parameters = new List<QueryParameter>(); parameters.AddRange(GetQueryParameters(url)); if (queryParameters != null) parameters.AddRange(queryParameters); this.QueryParameters = parameters.ToReadOnly(); } public OAuthSignatureBase(HttpMethod method, Uri url) : this(method, url, (IEnumerable<QueryParameter>)null) { } public OAuthSignatureBase(HttpMethod method, Uri url, string consumerKey) : this(method, url, new QueryParameter[] { new QueryParameter(OAuthHelper.OAUTH_CONSUMER_KEY, consumerKey) }) { Guard.CheckNullOrEmpty(consumerKey, "OAuthSignatureBase(consumerKey)"); } public OAuthSignatureBase(HttpMethod method, Uri url, string consumerKey, string token) : this(method, url, new QueryParameter[] { new QueryParameter(OAuthHelper.OAUTH_CONSUMER_KEY, consumerKey) ,new QueryParameter(OAuthHelper.OAUTH_TOKEN, token)}) { Guard.CheckNullOrEmpty(consumerKey, "OAuthSignatureBase(consumerKey)"); Guard.CheckNullOrEmpty(consumerKey, "OAuthSignatureBase(token)"); } private IEnumerable<QueryParameter> GetQueryParameters(Uri url) { if (!string.IsNullOrEmpty(url.Query)) { //Decode the parameters and re-encode using the oAuth UrlEncode method. var parameters = OAuthHelper.ParseQueryString(url.Query); foreach (string key in parameters.Keys) yield return new QueryParameter(key, parameters[key]); } } public string GenerateSignature(string consumerSecret, string tokenSecret, out string normalizedUrl, out string normalizedParameters) { Guard.CheckNullOrEmpty(consumerSecret, "GenerateSignature(ConsumerSecret)"); string signatureBase = this.GetSignatureBase(out normalizedUrl, out normalizedParameters); switch (this.SignatureMethod) { case SignatureMethod.HMAC_SHA1: var hmacsha1 = new HMACSHA1(); hmacsha1.Key = Encoding.UTF8.GetBytes(string.Format("{0}&{1}", OAuthHelper.UrlEncode(consumerSecret), OAuthHelper.UrlEncode(tokenSecret))); var hashBytes = hmacsha1.ComputeHash(Encoding.UTF8.GetBytes(signatureBase)); return Convert.ToBase64String(hashBytes); default: throw new NotSupportedException(string.Format("Signature Method not supported: '{0}'", this.SignatureMethod)); } } public override string ToString() { string normalizedUrl; string normalizedParameters; return this.GetSignatureBase(out normalizedUrl, out normalizedParameters); } private string GetSignatureBase(out string normalizedUrl, out string normalizedParameters) { List<QueryParameter> queryParameters = new List<QueryParameter>(); queryParameters.AddRange(this.QueryParameters); queryParameters.Add(new QueryParameter(OAuthHelper.OAUTH_NONCE, this.Nonce)); queryParameters.Add(new QueryParameter(OAuthHelper.OAUTH_TIMESTAMP, this.TimeStamp)); queryParameters.Add(new QueryParameter(OAuthHelper.OAUTH_VERSION, this.OAuthVersion)); queryParameters.Add(new QueryParameter(OAuthHelper.OAUTH_SIGNATURE_METHOD, this.SignatureMethod.ToDescription())); queryParameters.Sort(); normalizedUrl = string.Format("{0}://{1}", this.Url.Scheme, this.Url.Host); if (!((this.Url.Scheme == "http" && this.Url.Port == 80) || (this.Url.Scheme == "https" && this.Url.Port == 443))) normalizedUrl += ":" + this.Url.Port; normalizedUrl += this.Url.AbsolutePath; normalizedParameters = NormalizeRequestParameters(queryParameters); StringBuilder signatureBase = new StringBuilder(); signatureBase.AppendFormat("{0}&", this.Method.ToString()); signatureBase.AppendFormat("{0}&", OAuthHelper.UrlEncode(normalizedUrl)); signatureBase.AppendFormat("{0}", OAuthHelper.UrlEncode(normalizedParameters)); return signatureBase.ToString(); } private static string NormalizeRequestParameters(IList<QueryParameter> parameters) { QueryParameter parameter = null; StringBuilder parameterStrBuilder = new StringBuilder(); for (int i = 0; i < parameters.Count; i++) { parameter = parameters[i]; if (!string.IsNullOrEmpty(parameter.Name)) parameterStrBuilder.AppendFormat("{0}={1}", OAuthHelper.UrlEncode(parameter.Name), OAuthHelper.UrlEncode(parameter.Value)); else parameterStrBuilder.AppendFormat("{0}", OAuthHelper.UrlEncode(parameter.Value)); if (i < parameters.Count - 1) parameterStrBuilder.Append("&"); } return parameterStrBuilder.ToString(); } } }
bsd-2-clause
ffalchi/it.cnr.isti.vir
src/it/cnr/isti/vir/similarity/SimilarityOptionException.java
1757
/******************************************************************************* * Copyright (c) 2013, Fabrizio Falchi (NeMIS Lab., ISTI-CNR, Italy) * All rights reserved. * * Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ******************************************************************************/ package it.cnr.isti.vir.similarity; public class SimilarityOptionException extends Exception { public SimilarityOptionException() { super(); } public SimilarityOptionException(String str) { super (str); } }
bsd-2-clause
applesrc/WebCore
inspector/InspectorOverlay.cpp
39415
/* * Copyright (C) 2011 Google Inc. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * 3. Neither the name of Apple Inc. ("Apple") nor the names of * its contributors may be used to endorse or promote products derived * from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #include "config.h" #include "InspectorOverlay.h" #include "DocumentLoader.h" #include "Element.h" #include "EmptyClients.h" #include "FrameView.h" #include "GraphicsContext.h" #include "InspectorClient.h" #include "InspectorOverlayPage.h" #include "MainFrame.h" #include "Node.h" #include "Page.h" #include "PageConfiguration.h" #include "PolygonShape.h" #include "PseudoElement.h" #include "RectangleShape.h" #include "RenderBoxModelObject.h" #include "RenderElement.h" #include "RenderFlowThread.h" #include "RenderInline.h" #include "RenderNamedFlowFragment.h" #include "RenderNamedFlowThread.h" #include "RenderRegion.h" #include "RenderView.h" #include "ScriptController.h" #include "ScriptSourceCode.h" #include "Settings.h" #include "StyledElement.h" #include <bindings/ScriptValue.h> #include <inspector/InspectorProtocolObjects.h> #include <inspector/InspectorValues.h> #include <wtf/text/StringBuilder.h> using namespace Inspector; namespace WebCore { static void contentsQuadToCoordinateSystem(const FrameView* mainView, const FrameView* view, FloatQuad& quad, InspectorOverlay::CoordinateSystem coordinateSystem) { quad.setP1(view->contentsToRootView(roundedIntPoint(quad.p1()))); quad.setP2(view->contentsToRootView(roundedIntPoint(quad.p2()))); quad.setP3(view->contentsToRootView(roundedIntPoint(quad.p3()))); quad.setP4(view->contentsToRootView(roundedIntPoint(quad.p4()))); if (coordinateSystem == InspectorOverlay::CoordinateSystem::View) quad += toIntSize(mainView->scrollPosition()); } static void contentsQuadToPage(const FrameView* mainView, const FrameView* view, FloatQuad& quad) { contentsQuadToCoordinateSystem(mainView, view, quad, InspectorOverlay::CoordinateSystem::View); } static void buildRendererHighlight(RenderObject* renderer, RenderRegion* region, const HighlightConfig& highlightConfig, Highlight& highlight, InspectorOverlay::CoordinateSystem coordinateSystem) { Frame* containingFrame = renderer->document().frame(); if (!containingFrame) return; highlight.setDataFromConfig(highlightConfig); FrameView* containingView = containingFrame->view(); FrameView* mainView = containingFrame->page()->mainFrame().view(); // RenderSVGRoot should be highlighted through the isBox() code path, all other SVG elements should just dump their absoluteQuads(). bool isSVGRenderer = renderer->node() && renderer->node()->isSVGElement() && !renderer->isSVGRoot(); if (isSVGRenderer) { highlight.type = HighlightType::Rects; renderer->absoluteQuads(highlight.quads); for (auto& quad : highlight.quads) contentsQuadToCoordinateSystem(mainView, containingView, quad, coordinateSystem); } else if (is<RenderBox>(*renderer) || is<RenderInline>(*renderer)) { LayoutRect contentBox; LayoutRect paddingBox; LayoutRect borderBox; LayoutRect marginBox; if (is<RenderBox>(*renderer)) { auto& renderBox = downcast<RenderBox>(*renderer); LayoutBoxExtent margins(renderBox.marginTop(), renderBox.marginRight(), renderBox.marginBottom(), renderBox.marginLeft()); if (!renderBox.isOutOfFlowPositioned() && region) { RenderBox::LogicalExtentComputedValues computedValues; renderBox.computeLogicalWidthInRegion(computedValues, region); margins.start(renderBox.style().writingMode()) = computedValues.m_margins.m_start; margins.end(renderBox.style().writingMode()) = computedValues.m_margins.m_end; } paddingBox = renderBox.clientBoxRectInRegion(region); contentBox = LayoutRect(paddingBox.x() + renderBox.paddingLeft(), paddingBox.y() + renderBox.paddingTop(), paddingBox.width() - renderBox.paddingLeft() - renderBox.paddingRight(), paddingBox.height() - renderBox.paddingTop() - renderBox.paddingBottom()); borderBox = LayoutRect(paddingBox.x() - renderBox.borderLeft(), paddingBox.y() - renderBox.borderTop(), paddingBox.width() + renderBox.borderLeft() + renderBox.borderRight(), paddingBox.height() + renderBox.borderTop() + renderBox.borderBottom()); marginBox = LayoutRect(borderBox.x() - margins.left(), borderBox.y() - margins.top(), borderBox.width() + margins.left() + margins.right(), borderBox.height() + margins.top() + margins.bottom()); } else { auto& renderInline = downcast<RenderInline>(*renderer); // RenderInline's bounding box includes paddings and borders, excludes margins. borderBox = renderInline.linesBoundingBox(); paddingBox = LayoutRect(borderBox.x() + renderInline.borderLeft(), borderBox.y() + renderInline.borderTop(), borderBox.width() - renderInline.borderLeft() - renderInline.borderRight(), borderBox.height() - renderInline.borderTop() - renderInline.borderBottom()); contentBox = LayoutRect(paddingBox.x() + renderInline.paddingLeft(), paddingBox.y() + renderInline.paddingTop(), paddingBox.width() - renderInline.paddingLeft() - renderInline.paddingRight(), paddingBox.height() - renderInline.paddingTop() - renderInline.paddingBottom()); // Ignore marginTop and marginBottom for inlines. marginBox = LayoutRect(borderBox.x() - renderInline.marginLeft(), borderBox.y(), borderBox.width() + renderInline.horizontalMarginExtent(), borderBox.height()); } FloatQuad absContentQuad; FloatQuad absPaddingQuad; FloatQuad absBorderQuad; FloatQuad absMarginQuad; if (region) { RenderFlowThread* flowThread = region->flowThread(); // Figure out the quads in the space of the RenderFlowThread. absContentQuad = renderer->localToContainerQuad(FloatRect(contentBox), flowThread); absPaddingQuad = renderer->localToContainerQuad(FloatRect(paddingBox), flowThread); absBorderQuad = renderer->localToContainerQuad(FloatRect(borderBox), flowThread); absMarginQuad = renderer->localToContainerQuad(FloatRect(marginBox), flowThread); // Move the quad relative to the space of the current region. LayoutRect flippedRegionRect(region->flowThreadPortionRect()); flowThread->flipForWritingMode(flippedRegionRect); FloatSize delta = region->contentBoxRect().location() - flippedRegionRect.location(); absContentQuad.move(delta); absPaddingQuad.move(delta); absBorderQuad.move(delta); absMarginQuad.move(delta); // Resolve the absolute quads starting from the current region. absContentQuad = region->localToAbsoluteQuad(absContentQuad); absPaddingQuad = region->localToAbsoluteQuad(absPaddingQuad); absBorderQuad = region->localToAbsoluteQuad(absBorderQuad); absMarginQuad = region->localToAbsoluteQuad(absMarginQuad); } else { absContentQuad = renderer->localToAbsoluteQuad(FloatRect(contentBox)); absPaddingQuad = renderer->localToAbsoluteQuad(FloatRect(paddingBox)); absBorderQuad = renderer->localToAbsoluteQuad(FloatRect(borderBox)); absMarginQuad = renderer->localToAbsoluteQuad(FloatRect(marginBox)); } contentsQuadToCoordinateSystem(mainView, containingView, absContentQuad, coordinateSystem); contentsQuadToCoordinateSystem(mainView, containingView, absPaddingQuad, coordinateSystem); contentsQuadToCoordinateSystem(mainView, containingView, absBorderQuad, coordinateSystem); contentsQuadToCoordinateSystem(mainView, containingView, absMarginQuad, coordinateSystem); highlight.type = HighlightType::Node; highlight.quads.append(absMarginQuad); highlight.quads.append(absBorderQuad); highlight.quads.append(absPaddingQuad); highlight.quads.append(absContentQuad); } } static void buildNodeHighlight(Node& node, RenderRegion* region, const HighlightConfig& highlightConfig, Highlight& highlight, InspectorOverlay::CoordinateSystem coordinateSystem) { RenderObject* renderer = node.renderer(); if (!renderer) return; buildRendererHighlight(renderer, region, highlightConfig, highlight, coordinateSystem); } static void buildQuadHighlight(const FloatQuad& quad, const HighlightConfig& highlightConfig, Highlight& highlight) { highlight.setDataFromConfig(highlightConfig); highlight.type = HighlightType::Rects; highlight.quads.append(quad); } InspectorOverlay::InspectorOverlay(Page& page, InspectorClient* client) : m_page(page) , m_client(client) , m_paintRectUpdateTimer(*this, &InspectorOverlay::updatePaintRectsTimerFired) { } InspectorOverlay::~InspectorOverlay() { } void InspectorOverlay::paint(GraphicsContext& context) { if (!shouldShowOverlay()) return; GraphicsContextStateSaver stateSaver(context); FrameView* view = overlayPage()->mainFrame().view(); view->updateLayoutAndStyleIfNeededRecursive(); view->paint(context, IntRect(0, 0, view->width(), view->height())); } void InspectorOverlay::getHighlight(Highlight& highlight, InspectorOverlay::CoordinateSystem coordinateSystem) const { if (!m_highlightNode && !m_highlightQuad && !m_highlightNodeList) return; highlight.type = HighlightType::Rects; if (m_highlightNode) buildNodeHighlight(*m_highlightNode, nullptr, m_nodeHighlightConfig, highlight, coordinateSystem); else if (m_highlightNodeList) { highlight.setDataFromConfig(m_nodeHighlightConfig); for (unsigned i = 0; i < m_highlightNodeList->length(); ++i) { Highlight nodeHighlight; buildNodeHighlight(*(m_highlightNodeList->item(i)), nullptr, m_nodeHighlightConfig, nodeHighlight, coordinateSystem); if (nodeHighlight.type == HighlightType::Node) highlight.quads.appendVector(nodeHighlight.quads); } highlight.type = HighlightType::NodeList; } else buildQuadHighlight(*m_highlightQuad, m_quadHighlightConfig, highlight); } void InspectorOverlay::setPausedInDebuggerMessage(const String* message) { m_pausedInDebuggerMessage = message ? *message : String(); update(); } void InspectorOverlay::hideHighlight() { m_highlightNode = nullptr; m_highlightNodeList = nullptr; m_highlightQuad = nullptr; update(); } void InspectorOverlay::highlightNodeList(RefPtr<NodeList>&& nodes, const HighlightConfig& highlightConfig) { m_nodeHighlightConfig = highlightConfig; m_highlightNodeList = WTFMove(nodes); m_highlightNode = nullptr; update(); } void InspectorOverlay::highlightNode(Node* node, const HighlightConfig& highlightConfig) { m_nodeHighlightConfig = highlightConfig; m_highlightNode = node; m_highlightNodeList = nullptr; update(); } void InspectorOverlay::highlightQuad(std::unique_ptr<FloatQuad> quad, const HighlightConfig& highlightConfig) { if (highlightConfig.usePageCoordinates) *quad -= toIntSize(m_page.mainFrame().view()->scrollPosition()); m_quadHighlightConfig = highlightConfig; m_highlightQuad = WTFMove(quad); update(); } Node* InspectorOverlay::highlightedNode() const { return m_highlightNode.get(); } void InspectorOverlay::didSetSearchingForNode(bool enabled) { m_client->didSetSearchingForNode(enabled); } void InspectorOverlay::setIndicating(bool indicating) { m_indicating = indicating; if (m_indicating) evaluateInOverlay(ASCIILiteral("showPageIndication")); else evaluateInOverlay(ASCIILiteral("hidePageIndication")); update(); } bool InspectorOverlay::shouldShowOverlay() const { return m_highlightNode || m_highlightNodeList || m_highlightQuad || m_indicating || m_showingPaintRects || !m_pausedInDebuggerMessage.isNull(); } void InspectorOverlay::update() { if (!shouldShowOverlay()) { m_client->hideHighlight(); return; } FrameView* view = m_page.mainFrame().view(); if (!view) return; FrameView* overlayView = overlayPage()->mainFrame().view(); IntSize viewportSize = view->unscaledVisibleContentSizeIncludingObscuredArea(); IntSize frameViewFullSize = view->unscaledVisibleContentSizeIncludingObscuredArea(ScrollableArea::IncludeScrollbars); overlayView->resize(frameViewFullSize); // Clear canvas and paint things. // FIXME: Remove extra parameter? reset(viewportSize, IntSize()); // Include scrollbars to avoid masking them by the gutter. drawGutter(); drawNodeHighlight(); drawQuadHighlight(); drawPausedInDebuggerMessage(); drawPaintRects(); // Position DOM elements. overlayPage()->mainFrame().document()->recalcStyle(Style::Force); if (overlayView->needsLayout()) overlayView->layout(); forcePaint(); } static Ref<Inspector::Protocol::OverlayTypes::Point> buildObjectForPoint(const FloatPoint& point) { return Inspector::Protocol::OverlayTypes::Point::create() .setX(point.x()) .setY(point.y()) .release(); } static Ref<Inspector::Protocol::OverlayTypes::Rect> buildObjectForRect(const FloatRect& rect) { return Inspector::Protocol::OverlayTypes::Rect::create() .setX(rect.x()) .setY(rect.y()) .setWidth(rect.width()) .setHeight(rect.height()) .release(); } static Ref<Inspector::Protocol::OverlayTypes::Quad> buildArrayForQuad(const FloatQuad& quad) { auto array = Inspector::Protocol::OverlayTypes::Quad::create(); array->addItem(buildObjectForPoint(quad.p1())); array->addItem(buildObjectForPoint(quad.p2())); array->addItem(buildObjectForPoint(quad.p3())); array->addItem(buildObjectForPoint(quad.p4())); return array; } static Ref<Inspector::Protocol::OverlayTypes::FragmentHighlightData> buildObjectForHighlight(const Highlight& highlight) { auto arrayOfQuads = Inspector::Protocol::Array<Inspector::Protocol::OverlayTypes::Quad>::create(); for (auto& quad : highlight.quads) arrayOfQuads->addItem(buildArrayForQuad(quad)); return Inspector::Protocol::OverlayTypes::FragmentHighlightData::create() .setQuads(WTFMove(arrayOfQuads)) .setContentColor(highlight.contentColor.serialized()) .setContentOutlineColor(highlight.contentOutlineColor.serialized()) .setPaddingColor(highlight.paddingColor.serialized()) .setBorderColor(highlight.borderColor.serialized()) .setMarginColor(highlight.marginColor.serialized()) .release(); } static RefPtr<Inspector::Protocol::OverlayTypes::Region> buildObjectForRegion(FrameView* mainView, RenderRegion* region) { FrameView* containingView = region->frame().view(); if (!containingView) return nullptr; RenderBlockFlow& regionContainer = downcast<RenderBlockFlow>(*region->parent()); LayoutRect borderBox = regionContainer.borderBoxRect(); borderBox.setWidth(borderBox.width() + regionContainer.verticalScrollbarWidth()); borderBox.setHeight(borderBox.height() + regionContainer.horizontalScrollbarHeight()); // Create incoming and outgoing boxes that we use to chain the regions toghether. const LayoutSize linkBoxSize(10, 10); const LayoutSize linkBoxMidpoint(linkBoxSize.width() / 2, linkBoxSize.height() / 2); LayoutRect incomingRectBox = LayoutRect(borderBox.location() - linkBoxMidpoint, linkBoxSize); LayoutRect outgoingRectBox = LayoutRect(borderBox.location() - linkBoxMidpoint + borderBox.size(), linkBoxSize); // Move the link boxes slightly inside the region border box. LayoutUnit maxUsableHeight = std::max(LayoutUnit(), borderBox.height() - linkBoxMidpoint.height()); LayoutUnit linkBoxVerticalOffset = std::min(LayoutUnit::fromPixel(15), maxUsableHeight); incomingRectBox.move(0, linkBoxVerticalOffset); outgoingRectBox.move(0, -linkBoxVerticalOffset); FloatQuad borderRectQuad = regionContainer.localToAbsoluteQuad(FloatRect(borderBox)); FloatQuad incomingRectQuad = regionContainer.localToAbsoluteQuad(FloatRect(incomingRectBox)); FloatQuad outgoingRectQuad = regionContainer.localToAbsoluteQuad(FloatRect(outgoingRectBox)); contentsQuadToPage(mainView, containingView, borderRectQuad); contentsQuadToPage(mainView, containingView, incomingRectQuad); contentsQuadToPage(mainView, containingView, outgoingRectQuad); return Inspector::Protocol::OverlayTypes::Region::create() .setBorderQuad(buildArrayForQuad(borderRectQuad)) .setIncomingQuad(buildArrayForQuad(incomingRectQuad)) .setOutgoingQuad(buildArrayForQuad(outgoingRectQuad)) .release(); } static Ref<Inspector::Protocol::Array<Inspector::Protocol::OverlayTypes::Region>> buildObjectForFlowRegions(RenderRegion* region, RenderFlowThread* flowThread) { FrameView* mainFrameView = region->document().page()->mainFrame().view(); auto arrayOfRegions = Inspector::Protocol::Array<Inspector::Protocol::OverlayTypes::Region>::create(); const RenderRegionList& regionList = flowThread->renderRegionList(); for (auto& iterRegion : regionList) { if (!iterRegion->isValid()) continue; RefPtr<Inspector::Protocol::OverlayTypes::Region> regionObject = buildObjectForRegion(mainFrameView, iterRegion); if (!regionObject) continue; if (region == iterRegion) { // Let the script know that this is the currently highlighted node. regionObject->setIsHighlighted(true); } arrayOfRegions->addItem(WTFMove(regionObject)); } return arrayOfRegions; } static Ref<Inspector::Protocol::OverlayTypes::Size> buildObjectForSize(const IntSize& size) { return Inspector::Protocol::OverlayTypes::Size::create() .setWidth(size.width()) .setHeight(size.height()) .release(); } static RefPtr<Inspector::Protocol::OverlayTypes::Quad> buildQuadObjectForCSSRegionContentClip(RenderRegion* region) { Frame* containingFrame = region->document().frame(); if (!containingFrame) return nullptr; FrameView* containingView = containingFrame->view(); FrameView* mainView = containingFrame->page()->mainFrame().view(); RenderFlowThread* flowThread = region->flowThread(); // Get the clip box of the current region and covert it into an absolute quad. LayoutRect flippedRegionRect(region->flowThreadPortionOverflowRect()); flowThread->flipForWritingMode(flippedRegionRect); // Apply any border or padding of the region. flippedRegionRect.setLocation(region->contentBoxRect().location()); FloatQuad clipQuad = region->localToAbsoluteQuad(FloatRect(flippedRegionRect)); contentsQuadToPage(mainView, containingView, clipQuad); return buildArrayForQuad(clipQuad); } void InspectorOverlay::setShowingPaintRects(bool showingPaintRects) { if (m_showingPaintRects == showingPaintRects) return; m_showingPaintRects = showingPaintRects; if (!m_showingPaintRects) { m_paintRects.clear(); m_paintRectUpdateTimer.stop(); drawPaintRects(); forcePaint(); } } void InspectorOverlay::showPaintRect(const FloatRect& rect) { if (!m_showingPaintRects) return; IntRect rootRect = m_page.mainFrame().view()->contentsToRootView(enclosingIntRect(rect)); const auto removeDelay = 250ms; std::chrono::steady_clock::time_point removeTime = std::chrono::steady_clock::now() + removeDelay; m_paintRects.append(TimeRectPair(removeTime, rootRect)); if (!m_paintRectUpdateTimer.isActive()) { const double paintRectsUpdateIntervalSeconds = 0.032; m_paintRectUpdateTimer.startRepeating(paintRectsUpdateIntervalSeconds); } drawPaintRects(); forcePaint(); } void InspectorOverlay::updatePaintRectsTimerFired() { std::chrono::steady_clock::time_point now = std::chrono::steady_clock::now(); bool rectsChanged = false; while (!m_paintRects.isEmpty() && m_paintRects.first().first < now) { m_paintRects.removeFirst(); rectsChanged = true; } if (m_paintRects.isEmpty()) m_paintRectUpdateTimer.stop(); if (rectsChanged) { drawPaintRects(); forcePaint(); } } void InspectorOverlay::drawPaintRects() { auto arrayOfRects = Inspector::Protocol::Array<Inspector::Protocol::OverlayTypes::Rect>::create(); for (const auto& pair : m_paintRects) arrayOfRects->addItem(buildObjectForRect(pair.second)); evaluateInOverlay(ASCIILiteral("updatePaintRects"), WTFMove(arrayOfRects)); } void InspectorOverlay::drawGutter() { evaluateInOverlay(ASCIILiteral("drawGutter")); } static RefPtr<Inspector::Protocol::Array<Inspector::Protocol::OverlayTypes::FragmentHighlightData>> buildArrayForRendererFragments(RenderObject* renderer, const HighlightConfig& config) { auto arrayOfFragments = Inspector::Protocol::Array<Inspector::Protocol::OverlayTypes::FragmentHighlightData>::create(); RenderFlowThread* containingFlowThread = renderer->flowThreadContainingBlock(); if (!containingFlowThread) { Highlight highlight; buildRendererHighlight(renderer, nullptr, config, highlight, InspectorOverlay::CoordinateSystem::View); arrayOfFragments->addItem(buildObjectForHighlight(highlight)); } else { RenderRegion* startRegion = nullptr; RenderRegion* endRegion = nullptr; if (!containingFlowThread->getRegionRangeForBox(&renderer->enclosingBox(), startRegion, endRegion)) { // The flow has no visible regions. The renderer is not visible on screen. return nullptr; } const RenderRegionList& regionList = containingFlowThread->renderRegionList(); for (RenderRegionList::const_iterator iter = regionList.find(startRegion); iter != regionList.end(); ++iter) { RenderRegion* region = *iter; if (region->isValid()) { // Compute the highlight of the fragment inside the current region. Highlight highlight; buildRendererHighlight(renderer, region, config, highlight, InspectorOverlay::CoordinateSystem::View); Ref<Inspector::Protocol::OverlayTypes::FragmentHighlightData> fragmentHighlight = buildObjectForHighlight(highlight); // Compute the clipping area of the region. fragmentHighlight->setRegionClippingArea(buildQuadObjectForCSSRegionContentClip(region)); arrayOfFragments->addItem(WTFMove(fragmentHighlight)); } if (region == endRegion) break; } } return WTFMove(arrayOfFragments); } #if ENABLE(CSS_SHAPES) static FloatPoint localPointToRoot(RenderObject* renderer, const FrameView* mainView, const FrameView* view, const FloatPoint& point) { FloatPoint result = renderer->localToAbsolute(point); result = view->contentsToRootView(roundedIntPoint(result)); result += toIntSize(mainView->scrollPosition()); return result; } struct PathApplyInfo { FrameView* rootView; FrameView* view; Inspector::Protocol::OverlayTypes::DisplayPath* pathArray; RenderObject* renderer; const ShapeOutsideInfo* shapeOutsideInfo; }; static void appendPathCommandAndPoints(PathApplyInfo& info, const String& command, const FloatPoint points[], unsigned length) { FloatPoint point; info.pathArray->addItem(command); for (unsigned i = 0; i < length; i++) { point = info.shapeOutsideInfo->shapeToRendererPoint(points[i]); point = localPointToRoot(info.renderer, info.rootView, info.view, point); info.pathArray->addItem(point.x()); info.pathArray->addItem(point.y()); } } static void appendPathSegment(PathApplyInfo& pathApplyInfo, const PathElement& pathElement) { FloatPoint point; switch (pathElement.type) { // The points member will contain 1 value. case PathElementMoveToPoint: appendPathCommandAndPoints(pathApplyInfo, ASCIILiteral("M"), pathElement.points, 1); break; // The points member will contain 1 value. case PathElementAddLineToPoint: appendPathCommandAndPoints(pathApplyInfo, ASCIILiteral("L"), pathElement.points, 1); break; // The points member will contain 3 values. case PathElementAddCurveToPoint: appendPathCommandAndPoints(pathApplyInfo, ASCIILiteral("C"), pathElement.points, 3); break; // The points member will contain 2 values. case PathElementAddQuadCurveToPoint: appendPathCommandAndPoints(pathApplyInfo, ASCIILiteral("Q"), pathElement.points, 2); break; // The points member will contain no values. case PathElementCloseSubpath: appendPathCommandAndPoints(pathApplyInfo, ASCIILiteral("Z"), nullptr, 0); break; } } static RefPtr<Inspector::Protocol::OverlayTypes::ShapeOutsideData> buildObjectForShapeOutside(Frame* containingFrame, RenderBox* renderer) { const ShapeOutsideInfo* shapeOutsideInfo = renderer->shapeOutsideInfo(); if (!shapeOutsideInfo) return nullptr; LayoutRect shapeBounds = shapeOutsideInfo->computedShapePhysicalBoundingBox(); FloatQuad shapeQuad = renderer->localToAbsoluteQuad(FloatRect(shapeBounds)); contentsQuadToPage(containingFrame->page()->mainFrame().view(), containingFrame->view(), shapeQuad); auto shapeObject = Inspector::Protocol::OverlayTypes::ShapeOutsideData::create() .setBounds(buildArrayForQuad(shapeQuad)) .release(); Shape::DisplayPaths paths; shapeOutsideInfo->computedShape().buildDisplayPaths(paths); if (paths.shape.length()) { auto shapePath = Inspector::Protocol::OverlayTypes::DisplayPath::create(); PathApplyInfo info; info.rootView = containingFrame->page()->mainFrame().view(); info.view = containingFrame->view(); info.pathArray = &shapePath.get(); info.renderer = renderer; info.shapeOutsideInfo = shapeOutsideInfo; paths.shape.apply([&info](const PathElement& pathElement) { appendPathSegment(info, pathElement); }); shapeObject->setShape(shapePath.copyRef()); if (paths.marginShape.length()) { auto marginShapePath = Inspector::Protocol::OverlayTypes::DisplayPath::create(); info.pathArray = &marginShapePath.get(); paths.marginShape.apply([&info](const PathElement& pathElement) { appendPathSegment(info, pathElement); }); shapeObject->setMarginShape(marginShapePath.copyRef()); } } return WTFMove(shapeObject); } #endif static RefPtr<Inspector::Protocol::OverlayTypes::ElementData> buildObjectForElementData(Node* node, HighlightType type) { if (!is<Element>(node) || !node->document().frame()) return nullptr; Element* effectiveElement = downcast<Element>(node); if (node->isPseudoElement()) { Element* hostElement = downcast<PseudoElement>(*node).hostElement(); if (!hostElement) return nullptr; effectiveElement = hostElement; } Element& element = *effectiveElement; bool isXHTML = element.document().isXHTMLDocument(); auto elementData = Inspector::Protocol::OverlayTypes::ElementData::create() .setTagName(isXHTML ? element.nodeName() : element.nodeName().convertToASCIILowercase()) .setIdValue(element.getIdAttribute()) .release(); StringBuilder classNames; if (element.hasClass() && is<StyledElement>(element)) { HashSet<AtomicString> usedClassNames; const SpaceSplitString& classNamesString = downcast<StyledElement>(element).classNames(); size_t classNameCount = classNamesString.size(); for (size_t i = 0; i < classNameCount; ++i) { const AtomicString& className = classNamesString[i]; if (usedClassNames.contains(className)) continue; usedClassNames.add(className); classNames.append('.'); classNames.append(className); } } if (node->isPseudoElement()) { if (node->pseudoId() == BEFORE) classNames.appendLiteral("::before"); else if (node->pseudoId() == AFTER) classNames.appendLiteral("::after"); } if (!classNames.isEmpty()) elementData->setClassName(classNames.toString()); RenderElement* renderer = element.renderer(); if (!renderer) return nullptr; Frame* containingFrame = node->document().frame(); FrameView* containingView = containingFrame->view(); IntRect boundingBox = snappedIntRect(containingView->contentsToRootView(renderer->absoluteBoundingBoxRect())); RenderBoxModelObject* modelObject = is<RenderBoxModelObject>(*renderer) ? downcast<RenderBoxModelObject>(renderer) : nullptr; auto sizeObject = Inspector::Protocol::OverlayTypes::Size::create() .setWidth(modelObject ? adjustForAbsoluteZoom(roundToInt(modelObject->offsetWidth()), *modelObject) : boundingBox.width()) .setHeight(modelObject ? adjustForAbsoluteZoom(roundToInt(modelObject->offsetHeight()), *modelObject) : boundingBox.height()) .release(); elementData->setSize(WTFMove(sizeObject)); if (type != HighlightType::NodeList && renderer->isRenderNamedFlowFragmentContainer()) { RenderNamedFlowFragment& region = *downcast<RenderBlockFlow>(*renderer).renderNamedFlowFragment(); if (region.isValid()) { RenderFlowThread* flowThread = region.flowThread(); auto regionFlowData = Inspector::Protocol::OverlayTypes::RegionFlowData::create() .setName(downcast<RenderNamedFlowThread>(*flowThread).flowThreadName()) .setRegions(buildObjectForFlowRegions(&region, flowThread)) .release(); elementData->setRegionFlowData(WTFMove(regionFlowData)); } } RenderFlowThread* containingFlowThread = renderer->flowThreadContainingBlock(); if (is<RenderNamedFlowThread>(containingFlowThread)) { auto contentFlowData = Inspector::Protocol::OverlayTypes::ContentFlowData::create() .setName(downcast<RenderNamedFlowThread>(*containingFlowThread).flowThreadName()) .release(); elementData->setContentFlowData(WTFMove(contentFlowData)); } #if ENABLE(CSS_SHAPES) if (is<RenderBox>(*renderer)) { auto& renderBox = downcast<RenderBox>(*renderer); if (RefPtr<Inspector::Protocol::OverlayTypes::ShapeOutsideData> shapeObject = buildObjectForShapeOutside(containingFrame, &renderBox)) elementData->setShapeOutsideData(WTFMove(shapeObject)); } #endif // Need to enable AX to get the computed role. if (!WebCore::AXObjectCache::accessibilityEnabled()) WebCore::AXObjectCache::enableAccessibility(); if (AXObjectCache* axObjectCache = node->document().axObjectCache()) { if (AccessibilityObject* axObject = axObjectCache->getOrCreate(node)) elementData->setRole(axObject->computedRoleString()); } return WTFMove(elementData); } RefPtr<Inspector::Protocol::OverlayTypes::NodeHighlightData> InspectorOverlay::buildHighlightObjectForNode(Node* node, HighlightType type) const { if (!node) return nullptr; RenderObject* renderer = node->renderer(); if (!renderer) return nullptr; RefPtr<Inspector::Protocol::Array<Inspector::Protocol::OverlayTypes::FragmentHighlightData>> arrayOfFragmentHighlights = buildArrayForRendererFragments(renderer, m_nodeHighlightConfig); if (!arrayOfFragmentHighlights) return nullptr; // The main view's scroll offset is shared across all quads. FrameView* mainView = m_page.mainFrame().view(); auto nodeHighlightObject = Inspector::Protocol::OverlayTypes::NodeHighlightData::create() .setScrollOffset(buildObjectForPoint(!mainView->delegatesScrolling() ? mainView->visibleContentRect().location() : FloatPoint())) .setFragments(WTFMove(arrayOfFragmentHighlights)) .release(); if (m_nodeHighlightConfig.showInfo) { if (RefPtr<Inspector::Protocol::OverlayTypes::ElementData> elementData = buildObjectForElementData(node, type)) nodeHighlightObject->setElementData(WTFMove(elementData)); } return WTFMove(nodeHighlightObject); } Ref<Inspector::Protocol::Array<Inspector::Protocol::OverlayTypes::NodeHighlightData>> InspectorOverlay::buildObjectForHighlightedNodes() const { auto highlights = Inspector::Protocol::Array<Inspector::Protocol::OverlayTypes::NodeHighlightData>::create(); if (m_highlightNode) { if (RefPtr<Inspector::Protocol::OverlayTypes::NodeHighlightData> nodeHighlightData = buildHighlightObjectForNode(m_highlightNode.get(), HighlightType::Node)) highlights->addItem(WTFMove(nodeHighlightData)); } else if (m_highlightNodeList) { for (unsigned i = 0; i < m_highlightNodeList->length(); ++i) { if (RefPtr<Inspector::Protocol::OverlayTypes::NodeHighlightData> nodeHighlightData = buildHighlightObjectForNode(m_highlightNodeList->item(i), HighlightType::NodeList)) highlights->addItem(WTFMove(nodeHighlightData)); } } return highlights; } void InspectorOverlay::drawNodeHighlight() { if (m_highlightNode || m_highlightNodeList) evaluateInOverlay("drawNodeHighlight", buildObjectForHighlightedNodes()); } void InspectorOverlay::drawQuadHighlight() { if (!m_highlightQuad) return; Highlight highlight; buildQuadHighlight(*m_highlightQuad, m_quadHighlightConfig, highlight); evaluateInOverlay("drawQuadHighlight", buildObjectForHighlight(highlight)); } void InspectorOverlay::drawPausedInDebuggerMessage() { if (!m_pausedInDebuggerMessage.isNull()) evaluateInOverlay("drawPausedInDebuggerMessage", m_pausedInDebuggerMessage); } Page* InspectorOverlay::overlayPage() { if (m_overlayPage) return m_overlayPage.get(); PageConfiguration pageConfiguration(makeUniqueRef<EmptyEditorClient>(), SocketProvider::create()); fillWithEmptyClients(pageConfiguration); m_overlayPage = std::make_unique<Page>(WTFMove(pageConfiguration)); m_overlayPage->setDeviceScaleFactor(m_page.deviceScaleFactor()); Settings& settings = m_page.settings(); Settings& overlaySettings = m_overlayPage->settings(); overlaySettings.setStandardFontFamily(settings.standardFontFamily()); overlaySettings.setSerifFontFamily(settings.serifFontFamily()); overlaySettings.setSansSerifFontFamily(settings.sansSerifFontFamily()); overlaySettings.setCursiveFontFamily(settings.cursiveFontFamily()); overlaySettings.setFantasyFontFamily(settings.fantasyFontFamily()); overlaySettings.setPictographFontFamily(settings.pictographFontFamily()); overlaySettings.setMinimumFontSize(settings.minimumFontSize()); overlaySettings.setMinimumLogicalFontSize(settings.minimumLogicalFontSize()); overlaySettings.setMediaEnabled(false); overlaySettings.setScriptEnabled(true); overlaySettings.setPluginsEnabled(false); Frame& frame = m_overlayPage->mainFrame(); frame.setView(FrameView::create(frame)); frame.init(); FrameLoader& loader = frame.loader(); frame.view()->setCanHaveScrollbars(false); frame.view()->setTransparent(true); ASSERT(loader.activeDocumentLoader()); loader.activeDocumentLoader()->writer().setMIMEType("text/html"); loader.activeDocumentLoader()->writer().begin(); loader.activeDocumentLoader()->writer().addData(reinterpret_cast<const char*>(InspectorOverlayPage_html), sizeof(InspectorOverlayPage_html)); loader.activeDocumentLoader()->writer().end(); #if OS(WINDOWS) evaluateInOverlay("setPlatform", "windows"); #elif OS(MAC_OS_X) evaluateInOverlay("setPlatform", "mac"); #elif OS(UNIX) evaluateInOverlay("setPlatform", "linux"); #endif return m_overlayPage.get(); } void InspectorOverlay::forcePaint() { // This overlay page is very weird and doesn't automatically paint. We have to force paints manually. m_client->highlight(); } void InspectorOverlay::reset(const IntSize& viewportSize, const IntSize& frameViewFullSize) { auto configObject = Inspector::Protocol::OverlayTypes::OverlayConfiguration::create() .setDeviceScaleFactor(m_page.deviceScaleFactor()) .setViewportSize(buildObjectForSize(viewportSize)) .setFrameViewFullSize(buildObjectForSize(frameViewFullSize)) .release(); evaluateInOverlay("reset", WTFMove(configObject)); } void InspectorOverlay::evaluateInOverlay(const String& method) { Ref<InspectorArray> command = InspectorArray::create(); command->pushString(method); overlayPage()->mainFrame().script().evaluate(ScriptSourceCode(makeString("dispatch(", command->toJSONString(), ')'))); } void InspectorOverlay::evaluateInOverlay(const String& method, const String& argument) { Ref<InspectorArray> command = InspectorArray::create(); command->pushString(method); command->pushString(argument); overlayPage()->mainFrame().script().evaluate(ScriptSourceCode(makeString("dispatch(", command->toJSONString(), ')'))); } void InspectorOverlay::evaluateInOverlay(const String& method, RefPtr<InspectorValue>&& argument) { Ref<InspectorArray> command = InspectorArray::create(); command->pushString(method); command->pushValue(WTFMove(argument)); overlayPage()->mainFrame().script().evaluate(ScriptSourceCode(makeString("dispatch(", command->toJSONString(), ')'))); } void InspectorOverlay::freePage() { m_overlayPage = nullptr; } } // namespace WebCore
bsd-2-clause
michaelvlaar/etcd-endpointer
examples/echoservice/client/client.go
1200
package main import ( "flag" "github.com/michaelvlaar/etcd-endpointer/examples/echoservice/api/protos" "golang.org/x/net/context" "google.golang.org/grpc" "gopkg.in/inconshreveable/log15.v2" "time" ) var ( endpoint = flag.String("endpoint", ":9090", "GeneralsView API endpoint. Usage <host>:<port>.") userID = flag.Uint64("userID", 0, "The userID to simulate for loadbalancing") message = flag.String("message", "Hello etcd-endpointer", "Type any string") log log15.Logger ) func init() { log = log15.New() } func main() { flag.Parse() conn, err := grpc.Dial(*endpoint) if err != nil { log.Crit("could not connect to gRPC endpoint", "error", err) } defer func() { if err := conn.Close(); err != nil { log.Error("Error closing connection", "error", err) } }() client := echo.NewEchoserviceClient(conn) request := &echo.EchoRequest{ UserID: *userID, Message: *message, } for { response, err := client.EchoMessage(context.Background(), request) if err != nil { log.Error("error sending EchoMessage", "error", err) } log.Info("Received EchoMessage", "request", request, "response", response) time.Sleep(time.Duration(1) * time.Second) } }
bsd-2-clause
leviroth/praw
praw/models/reddit/message.py
3806
"""Provide the Message class.""" from ...const import API_PATH from .base import RedditBase from .mixins import FullnameMixin, InboxableMixin, ReplyableMixin from .redditor import Redditor from .subreddit import Subreddit class Message(InboxableMixin, ReplyableMixin, FullnameMixin, RedditBase): """A class for private messages. **Typical Attributes** This table describes attributes that typically belong to objects of this class. Since attributes are dynamically provided (see :ref:`determine-available-attributes-of-an-object`), there is not a guarantee that these attributes will always be present, nor is this list comprehensive in any way. ======================= =================================================== Attribute Description ======================= =================================================== ``author`` Provides an instance of :class:`.Redditor`. ``body`` The body of the message. ``created_utc`` Time the message was created, represented in `Unix Time`_. ``dest`` Provides an instance of :class:`.Redditor`. The recipient of the message. ``id`` The ID of the message. ``name`` The full ID of the message, prefixed with 't4'. ``subject`` The subject of the message. ``subreddit`` If the message was sent from a subreddit, provides an instance of :class:`.Subreddit`. ``was_comment`` Whether or not the message was a comment reply. ======================= =================================================== .. _Unix Time: https://en.wikipedia.org/wiki/Unix_time """ STR_FIELD = "id" @classmethod def parse(cls, data, reddit): """Return an instance of Message or SubredditMessage from ``data``. :param data: The structured data. :param reddit: An instance of :class:`.Reddit`. """ if data["author"]: data["author"] = Redditor(reddit, data["author"]) if data["dest"].startswith("#"): data["dest"] = Subreddit(reddit, data["dest"][1:]) else: data["dest"] = Redditor(reddit, data["dest"]) if data["replies"]: replies = data["replies"] data["replies"] = reddit._objector.objectify( replies["data"]["children"] ) else: data["replies"] = [] if data["subreddit"]: data["subreddit"] = Subreddit(reddit, data["subreddit"]) return SubredditMessage(reddit, _data=data) return cls(reddit, _data=data) @property def _kind(self): """Return the class's kind.""" return self._reddit.config.kinds["message"] def __init__(self, reddit, _data): """Construct an instance of the Message object.""" super(Message, self).__init__(reddit, _data=_data) self._fetched = True def delete(self): """Delete the message. .. note:: Reddit does not return an indication of whether or not the message was successfully deleted. """ self._reddit.post( API_PATH["delete_message"], data={"id": self.fullname} ) class SubredditMessage(Message): """A class for messages to a subreddit.""" def mute(self, _unmute=False): """Mute the sender of this SubredditMessage.""" self._reddit.post(API_PATH["mute_sender"], data={"id": self.fullname}) def unmute(self): """Unmute the sender of this SubredditMessage.""" self._reddit.post( API_PATH["unmute_sender"], data={"id": self.fullname} )
bsd-2-clause
sightmachine/simplecv2-facerecognizer
simplecv_facerecognizer/haar_cascade.py
1742
import os from simplecv.base import logger import cv2 from simplecv_facerecognizer import DATA_DIR class HaarCascade(object): """ This class wraps HaarCascade files for the find_haar_features file. To use the class provide it with the path to a Haar cascade XML file and optionally a name. """ _cache = {} def __init__(self, fname=None, name=None): self._cascade = None if name is None: self._name = fname else: self._name = name if fname is not None: if os.path.exists(fname): self._fhandle = os.path.abspath(fname) else: self._fhandle = os.path.join(DATA_DIR, 'HaarCascades', fname) if not os.path.exists(self._fhandle): logger.warning("Could not find Haar Cascade file " + fname) logger.warning("Try running the function " "img.list_haar_features() to see what is " "available") return if self._fhandle in HaarCascade._cache: self._cascade = HaarCascade._cache[self._fhandle] return else: self._cascade = cv2.CascadeClassifier(self._fhandle) HaarCascade._cache[self._fhandle] = self._cascade else: logger.warning("No file path mentioned.") def load(self, fname=None, name=None): self.__init__(fname, name) def get_cascade(self): return self._cascade def get_name(self): return self._name def set_name(self, name): self._name = name def get_fhandle(self): return self._fhandle
bsd-2-clause
bureau14/qdb-benchmark
thirdparty/boost/boost/iostreams/detail/bool_trait_def.hpp
1956
// (C) Copyright 2008 CodeRage, LLC (turkanis at coderage dot com) // (C) Copyright 2003-2007 Jonathan Turkanis // Distributed under the Boost Software License, Version 1.0. (See accompanying // file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt.) // See http://www.boost.org/libs/iostreams for documentation. #ifndef BOOST_IOSTREAMS_DETAIL_BOOL_TRAIT_DEF_HPP_INCLUDED #define BOOST_IOSTREAMS_DETAIL_BOOL_TRAIT_DEF_HPP_INCLUDED #include <boost/config.hpp> // BOOST_STATIC_CONSTANT. #include <boost/iostreams/detail/template_params.hpp> #include <boost/mpl/aux_/lambda_support.hpp> #include <boost/mpl/bool.hpp> #include <boost/preprocessor/cat.hpp> #include <boost/preprocessor/repetition/enum_params.hpp> #include <boost/type_traits/detail/yes_no_type.hpp> // // Macro name: BOOST_IOSTREAMS_BOOL_TRAIT_DEF // Description: Used to generate the traits classes is_istream, is_ostream, // etc. // #if BOOST_WORKAROUND(__BORLANDC__, BOOST_TESTED_AT(0x582)) # define BOOST_IOSTREAMS_TRAIT_NAMESPACE(trait) #else # define BOOST_IOSTREAMS_TRAIT_NAMESPACE(trait) BOOST_PP_CAT(trait, _impl_):: #endif #define BOOST_IOSTREAMS_BOOL_TRAIT_DEF(trait, type, arity) \ namespace BOOST_PP_CAT(trait, _impl_) { \ BOOST_IOSTREAMS_TEMPLATE_PARAMS(arity, T) \ type_traits::yes_type helper \ (const volatile type BOOST_IOSTREAMS_TEMPLATE_ARGS(arity, T)*); \ type_traits::no_type helper(...); \ template<typename T> \ struct impl { \ BOOST_STATIC_CONSTANT(bool, value = \ (sizeof(BOOST_IOSTREAMS_TRAIT_NAMESPACE(trait) \ helper(static_cast<T*>(0))) == \ sizeof(type_traits::yes_type))); \ }; \ } \ template<typename T> \ struct trait \ : mpl::bool_<BOOST_PP_CAT(trait, _impl_)::impl<T>::value> \ { BOOST_MPL_AUX_LAMBDA_SUPPORT(1, trait, (T)) }; \ /**/ #endif // #ifndef BOOST_IOSTREAMS_DETAIL_BOOL_TRAIT_DEF_HPP_INCLUDED
bsd-2-clause
laeotropic/HTTP-Proxy
deps/asio-1.10.1/include/asio/detail/std_event.hpp
2691
// // detail/std_event.hpp // ~~~~~~~~~~~~~~~~~~~~ // // Copyright (c) 2003-2013 Christopher M. Kohlhoff (chris at kohlhoff dot com) // // Distributed under the Boost Software License, Version 1.0. (See accompanying // file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) // #ifndef ASIO_DETAIL_STD_EVENT_HPP #define ASIO_DETAIL_STD_EVENT_HPP #if defined(_MSC_VER) && (_MSC_VER >= 1200) # pragma once #endif // defined(_MSC_VER) && (_MSC_VER >= 1200) #include "asio/detail/config.hpp" #if defined(ASIO_HAS_STD_MUTEX_AND_CONDVAR) #include <chrono> #include <condition_variable> #include "asio/detail/assert.hpp" #include "asio/detail/noncopyable.hpp" #include "asio/detail/push_options.hpp" namespace asio { namespace detail { class std_event : private noncopyable { public: // Constructor. std_event() : signalled_(false) { } // Destructor. ~std_event() { } // Signal the event. template <typename Lock> void signal(Lock& lock) { ASIO_ASSERT(lock.locked()); (void)lock; signalled_ = true; cond_.notify_one(); } // Signal the event and unlock the mutex. template <typename Lock> void signal_and_unlock(Lock& lock) { ASIO_ASSERT(lock.locked()); signalled_ = true; lock.unlock(); cond_.notify_one(); } // Reset the event. template <typename Lock> void clear(Lock& lock) { ASIO_ASSERT(lock.locked()); (void)lock; signalled_ = false; } // Wait for the event to become signalled. template <typename Lock> void wait(Lock& lock) { ASIO_ASSERT(lock.locked()); unique_lock_adapter u_lock(lock); while (!signalled_) cond_.wait(u_lock.unique_lock_); } // Timed wait for the event to become signalled. template <typename Lock> bool wait_for_usec(Lock& lock, long usec) { ASIO_ASSERT(lock.locked()); unique_lock_adapter u_lock(lock); if (!signalled_) cond_.wait_for(u_lock.unique_lock_, std::chrono::microseconds(usec)); return signalled_; } private: // Helper class to temporarily adapt a scoped_lock into a unique_lock so that // it can be passed to std::condition_variable::wait(). struct unique_lock_adapter { template <typename Lock> explicit unique_lock_adapter(Lock& lock) : unique_lock_(lock.mutex().mutex_, std::adopt_lock) { } ~unique_lock_adapter() { unique_lock_.release(); } std::unique_lock<std::mutex> unique_lock_; }; std::condition_variable cond_; bool signalled_; }; } // namespace detail } // namespace asio #include "asio/detail/pop_options.hpp" #endif // defined(ASIO_HAS_STD_MUTEX_AND_CONDVAR) #endif // ASIO_DETAIL_STD_EVENT_HPP
bsd-2-clause
bobthekingofegypt/bobball-playn
core/src/main/java/org/bobstuff/bobball/core/Ball.java
4754
/* Copyright (c) 2012 Richard Martin. All rights reserved. Licensed under the terms of the BSD License, see LICENSE.txt */ package org.bobstuff.bobball.core; import org.bobstuff.bobball.android.Rect; import pythagoras.f.Point; public class Ball { private static final int BALL_UNDEFINED = 0; private static final int BALL_LEFT = 1; private static final int BALL_RIGHT = 2; private static final int BALL_UP = 3; private static final int BALL_DOWN = 4; private Rect frame = new Rect(); private double speed; private int size; private int horizontalVelocity; private int verticalVelocity; private Point pointOne = new Point(); private Point pointTwo = new Point(); public Ball(final int x, final int y, final int horizontalVelocity, final int verticalVelocity, final double speed, final int size) { this.size = size; this.frame.set(x, y, x+this.size, y+this.size); this.horizontalVelocity = horizontalVelocity; this.verticalVelocity = verticalVelocity; this.speed = speed; } public Ball(final Ball ball) { this.size = ball.getSize(); this.frame.set(ball.getX1(), ball.getY1(), ball.getX2(), ball.getY2()); this.horizontalVelocity = ball.getHorizontalVelocity(); this.verticalVelocity = ball.getVerticalVelocity(); this.speed = ball.getSpeed(); } public int getX1() { return frame.left; } public int getY1() { return frame.top; } public int getX2() { return frame.right; } public int getY2() { return frame.bottom; } public int getSize() { return size; } public double getSpeed() { return speed; } public int getHorizontalVelocity() { return horizontalVelocity; } public int getVerticalVelocity() { return verticalVelocity; } public Rect getFrame() { return frame; } public void collision(Ball other) { int radius = size/2; pointOne.set(frame.left + radius, frame.top + radius); pointTwo.set(other.frame.left + radius, other.frame.top + radius); double xDistance=pointTwo.x-pointOne.x; double yDistance=pointTwo.y-pointOne.y; if((horizontalVelocity>0 && xDistance>0) || (horizontalVelocity<0 && xDistance<0)) { horizontalVelocity=-horizontalVelocity; } if((verticalVelocity>0 && yDistance>0) || (verticalVelocity<0 && yDistance<0)) { verticalVelocity=-verticalVelocity; } double distanceSquared = 0; do { int x = (int)(frame.left + (horizontalVelocity * speed)); int y = (int)(frame.top + (verticalVelocity * speed)); frame.set(x, y, x+size, y+size); pointOne.set(frame.left + radius, frame.top + radius); distanceSquared = ((pointOne.x - pointTwo.x) * (pointOne.x - pointTwo.x)) + ((pointOne.y - pointTwo.y) * (pointOne.y - pointTwo.y)); } while (distanceSquared < (size*size)); } public void collision(final Rect other) { int x1 = getX1(); int y1 = getY1(); int x2 = getX2(); int y2 = getY2(); int otherX1 = other.left; int otherY1 = other.top; int otherX2 = other.right; int otherY2 = other.bottom; int minDistance = size; int direction = BALL_UNDEFINED; int distance = x2 - otherX1; if (distance < minDistance && distance >= 0){ minDistance = distance; direction = BALL_RIGHT; } distance = y2 - otherY1; if (distance < minDistance && distance >= 0 ){ minDistance = distance; direction = BALL_UP; } distance = otherX2 - x1; if (distance < minDistance && distance >= 0 ){ minDistance = distance; direction = BALL_LEFT; } distance = otherY2 - y1; if (distance < minDistance && distance >= 0 ){ minDistance = distance; direction = BALL_DOWN; } switch(direction){ case BALL_LEFT : case BALL_RIGHT : horizontalVelocity = -horizontalVelocity; break; case BALL_DOWN : case BALL_UP : verticalVelocity = -verticalVelocity; break; default : { break; } } while (Rect.intersects(frame, other)){ int x = (int)(frame.left + (horizontalVelocity * speed)); int y = (int)(frame.top + (verticalVelocity * speed)); frame.set(x, y, x+size, y+size); } } public boolean collide(Ball other) { int radius = size/2; pointOne.set(frame.left + radius, frame.top + radius); pointTwo.set(other.frame.left + radius, other.frame.top + radius); double distance = ((pointOne.x - pointTwo.x) * (pointOne.x - pointTwo.x)) + ((pointOne.y - pointTwo.y) * (pointOne.y - pointTwo.y)); return distance < (size*size); } public boolean collide(Rect otherRect) { return Rect.intersects(frame, otherRect); } public void move(){ int x = (int)(frame.left + (horizontalVelocity * speed)); int y = (int)(frame.top + (verticalVelocity * speed)); frame.set(x, y, x+size, y+size); } }
bsd-2-clause
jrgoldfinemiddleton/cs61b
pj2/list/DList.java
4736
/* DList.java */ package list; import java.util.Iterator; /** * A mutable doubly-linked list ADT. Its implementation is circularly-linked * and employs a sentinel node at the head of the list. */ public class DList extends List { /* Instance Fields * (inherited) size is the number of items in the list. * head references the sentinel node. * Note that the sentinel node does not store an item, and is not included * in the count stored by the "size" field. */ protected DListNode head; /* DList invariants: * 1) head != null. * 2) For every DListNode x in a DList, x.next != null. * 3) For every DListNode x in a DList, x.prev != null. * 4) For every DListNode x in a DList, if x.next == y, then y.prev == x. * 5) For every DListNode x in a DList, if x.prev == y, then y.next == x. * 6) For every DList l, l.head.myList = null. (Note that l.head is the * sentinel.) * 7) For every DListNode x in a DList l EXCEPT l.head (the sentinel), * x.myList = l. * 8) size is the number of DListNodes, NOT COUNTING the sentinel, * that can be accessed from the sentinel (head) by a sequence of * "next" references. */ /** * DList() constructs for an empty DList. */ public DList() { head = newNode(null, null, null, null); head.next = head; head.prev = head; size = 0; } /** * back() returns the node at the back of this DList. If the DList is * empty, return an "invalid" node--a node with the property that any * attempt to use it will cause an exception. (The sentinel is "invalid".) * * @return a ListNode at the back of this DList. * * Performance: runs in O(1) time. */ @Override public ListNode back() { return head.prev; } /** * front() returns the node at the front of this DList. If the DList is * empty, return an "invalid" node--a node with the property that any * attempt to use it will cause an exception. (The sentinel is "invalid".) * * @return a ListNode at the front of this DList. * * Performance: runs in O(1) time. */ @Override public ListNode front() { return head.next; } /** * insertBack() inserts an item at the back of this DList. * * @param item is the item to be inserted. * * Performance: runs in O(1) time. */ @Override public void insertBack(Object item) { DListNode newNode = newNode(item, this, head.prev, head); head.prev = newNode; newNode.prev.next = newNode; size++; } /** * insertFront() inserts an item at the front of this DList. * * @param item is the item to be inserted. * * Performance: runs in O(1) time. */ @Override public void insertFront(Object item) { DListNode newNode = newNode(item, this, head, head.next); head.next = newNode; newNode.next.prev = newNode; size++; } /** * iterator() returns a new Iterator for this list. */ @SuppressWarnings("rawtypes") @Override public Iterator iterator() { List toIterate = this; return new Iterator(){ List list = toIterate; ListNode cur = list.front(); @Override public boolean hasNext() { return cur.isValidNode(); } @Override public Object next() { Object item = cur.item; try { cur = cur.next(); } catch (InvalidNodeException ine) { ine.printStackTrace(); System.exit(0); } return item; } }; } /** * newNode() calls the DListNode constructor. Use this method to allocate * new DListNodes rather than calling the DListNode constructor directly. * That way, only this method need be overridden if a subclass of DList * wants to use a different kind of node. * * @param item the item to store in the node. * @param list the list that owns this node. (null for sentinels.) * @param prev the node previous to this node. * @param next the node following this node. */ protected DListNode newNode(Object item, DList list, DListNode prev, DListNode next) { return new DListNode(item, list, prev, next); } /** * toString() returns a String representation of this DList. * * @return a String representation of this DList. * * Performance: runs in O(n) time, where n is the length of the list. */ @Override public String toString() { StringBuilder s = new StringBuilder("[ "); DListNode current = head.next; while (current != head) { s.append(current.item + " "); current = current.next; } s.append("]"); return s.toString(); } }
bsd-2-clause
Linuxbrew/homebrew-core
Formula/triton.rb
1283
require "language/node" class Triton < Formula desc "Joyent Triton CLI" homepage "https://www.npmjs.com/package/triton" url "https://registry.npmjs.org/triton/-/triton-7.15.0.tgz" sha256 "96d2f68caf6bb68187da619ccfa305f8d85126a2e5199b2830255a6b1fc9a67c" license "MPL-2.0" bottle do sha256 cellar: :any_skip_relocation, arm64_big_sur: "df4487f4d2822294ed4bde9b4f8af1a67bf23e467dceaba168ecff50c4865b8d" sha256 cellar: :any_skip_relocation, big_sur: "58babe7098da230bf1daa9b7e7838b683ee97b7a82d6a68e32ac1dc9699f8f84" sha256 cellar: :any_skip_relocation, catalina: "e3c6ee7a64059050fed8b4577af8711be23d63c0e765371347842a4233b36d3b" sha256 cellar: :any_skip_relocation, mojave: "de43242aef253303a8303740989570dc99a2ec2a9015668064e14ce4ec4a4d79" sha256 cellar: :any_skip_relocation, x86_64_linux: "e083fe946e07fbbcf263f03112333bc2112ea40a39fe26fe1df0a1954f3fc978" end depends_on "node" def install system "npm", "install", *Language::Node.std_npm_install_args(libexec) bin.install_symlink Dir["#{libexec}/bin/*"] (bash_completion/"triton").write `#{bin}/triton completion` end test do output = shell_output("#{bin}/triton profile ls") assert_match(/\ANAME CURR ACCOUNT USER URL$/, output) end end
bsd-2-clause
Linuxbrew/homebrew-core
Formula/bowtie2.rb
1369
class Bowtie2 < Formula desc "Fast and sensitive gapped read aligner" homepage "https://bowtie-bio.sourceforge.io/bowtie2/" url "https://github.com/BenLangmead/bowtie2/archive/v2.4.4.tar.gz" sha256 "ef8272fc1b3e18a30f16cb4b6a4344bf50e1f82fbd3af93dc8194b58e5856f64" license "GPL-3.0-or-later" revision 1 bottle do sha256 cellar: :any_skip_relocation, arm64_big_sur: "a32813ba9105f8e70c93ea9b66b290da4107a91fa09e7a0e8ceb9d1413050eab" sha256 cellar: :any_skip_relocation, big_sur: "96c8bdffc7e247135089bf5ebc6eb6b4ee1d7bdb82d25a56be5c55680c0a50e9" sha256 cellar: :any_skip_relocation, catalina: "39a5b463bedd3beeb0f17e95da9a485bc0c95187663e284ca0b45b1a0e09b846" sha256 cellar: :any_skip_relocation, mojave: "0b36d2735b4eff060d2ecf3d4a2c3fe71a88cedec08514a4bf6ec23210faf696" sha256 cellar: :any_skip_relocation, x86_64_linux: "505094388be1f46326a0b099a46b818f95c6fab9bc5e7380fcc532c9aa644c3b" end depends_on "simde" depends_on "tbb" def install system "make", "install", "PREFIX=#{prefix}" pkgshare.install "example", "scripts" end test do system "#{bin}/bowtie2-build", "#{pkgshare}/example/reference/lambda_virus.fa", "lambda_virus" assert_predicate testpath/"lambda_virus.1.bt2", :exist?, "Failed to create viral alignment lambda_virus.1.bt2" end end
bsd-2-clause
JCount/homebrew-core
Formula/http_load.rb
1805
class HttpLoad < Formula desc "Test throughput of a web server by running parallel fetches" homepage "https://www.acme.com/software/http_load/" url "https://www.acme.com/software/http_load/http_load-09Mar2016.tar.gz" version "20160309" sha256 "5a7b00688680e3fca8726dc836fd3f94f403fde831c71d73d9a1537f215b4587" revision 2 livecheck do url :homepage regex(/href=.*?http_load[._-]v?(\d+[a-z]+\d+)\.t/i) strategy :page_match do |page, regex| # Convert date-based version from 09Mar2016 format to 20160309 page.scan(regex).map do |match| date_str = match&.first date_str ? Date.parse(date_str)&.strftime("%Y%m%d") : nil end end end bottle do sha256 cellar: :any, arm64_big_sur: "d5fc5ba0ce6baf991e45fcb70f6e2fd3153e1f902d1d510cf015b3ff8cc4d0c3" sha256 cellar: :any, big_sur: "67456aed34ccc1d9873b946ed2adb7c86ecd52ad90a495f9527afd0a883710d0" sha256 cellar: :any, catalina: "36fada1e1b8cbe35a9eb1fb2374c175a003d750f0560565c6bfaf6b90a17f748" sha256 cellar: :any, mojave: "d0d672723564b758fc3ef0721239e108ec063a395e183db033071200d5d9ee48" sha256 cellar: :any, high_sierra: "22e21275c49121c174024104f9b99c5f55d37e032ff7cae42bba89746c26bd88" sha256 cellar: :any, sierra: "a949ed2040faf49c7cdb6bf0110dfbbff465641c811e78a035998a4160170a05" end depends_on "openssl@1.1" def install bin.mkpath man1.mkpath args = %W[ BINDIR=#{bin} LIBDIR=#{lib} MANDIR=#{man1} CC=#{ENV.cc} SSL_TREE=#{Formula["openssl@1.1"].opt_prefix} ] inreplace "Makefile", "#SSL_", "SSL_" system "make", "install", *args end test do (testpath/"urls").write "https://brew.sh/" system "#{bin}/http_load", "-rate", "1", "-fetches", "1", "urls" end end
bsd-2-clause
renemilk/DUNE-Stuff
dune/stuff/test/grid_walker.cc
3303
// This file is part of the dune-stuff project: // https://github.com/wwu-numerik/dune-stuff // Copyright holders: Rene Milk, Felix Schindler // License: BSD 2-Clause License (http://opensource.org/licenses/BSD-2-Clause) #include "main.hxx" #if HAVE_DUNE_GRID # include <dune/stuff/grid/walker.hh> # include <dune/stuff/grid/provider/cube.hh> # include <dune/stuff/common/parallel/partitioner.hh> # include <dune/stuff/common/logstreams.hh> # if DUNE_VERSION_NEWER(DUNE_COMMON,3,9) && HAVE_TBB // EXADUNE # include <dune/grid/utility/partitioning/seedlist.hh> # endif using namespace Dune::Stuff; using namespace Dune::Stuff::Common; using namespace Dune::Stuff::Grid; using namespace std; typedef testing::Types< Int<1>, Int<2>, Int<3> > GridDims; template < class T > struct GridWalkerTest : public ::testing::Test { static const size_t griddim = T::value; static const size_t level = 4; typedef Dune::YaspGrid<griddim> GridType; typedef typename GridType::LeafGridView GridViewType; typedef typename DSG::Entity< GridViewType >::Type EntityType; typedef typename DSG::Intersection< GridViewType >::Type IntersectionType; const DSG::Providers::Cube<GridType> grid_prv; GridWalkerTest() :grid_prv(0.f,1.f,level) {} void check_count() { const auto gv = grid_prv.grid().leafGridView(); Walker<GridViewType> walker(gv); const auto correct_size = gv.size(0); atomic<size_t> count(0); auto counter = [&](const EntityType&){count++;}; auto test1 = [&]{ walker.add(counter); walker.walk(false); }; auto test2 = [&]{ walker.add(counter); walker.walk(true); }; list<function<void()>> tests({ test1, test2 }); # if DUNE_VERSION_NEWER(DUNE_COMMON,3,9) // EXADUNE auto test3 = [&]{ IndexSetPartitioner<GridViewType> partitioner(gv.grid().leafIndexSet()); Dune::SeedListPartitioning<GridType, 0> partitioning(gv, partitioner); walker.add(counter); walker.walk(partitioning); }; tests.push_back(test3); # endif // DUNE_VERSION_NEWER(DUNE_COMMON,3,9) // EXADUNE for (const auto& test : tests) { count = 0; test(); EXPECT_EQ(count, correct_size); } } void check_apply_on() { const auto gv = grid_prv.grid().leafGridView(); Walker<GridViewType> walker(gv); size_t filter_count = 0, all_count = 0; auto boundaries = [=](const GridViewType&, const IntersectionType& inter){return inter.boundary();}; auto filter_counter = [&](const IntersectionType&, const EntityType&, const EntityType&){filter_count++;}; auto all_counter = [&](const IntersectionType&, const EntityType&, const EntityType&){all_count++;}; auto on_filter_boundaries = new DSG::ApplyOn::FilteredIntersections<GridViewType>(boundaries); auto on_all_boundaries = new DSG::ApplyOn::BoundaryIntersections<GridViewType>(); walker.add(filter_counter, on_filter_boundaries); walker.add(all_counter, on_all_boundaries); walker.walk(); EXPECT_EQ(filter_count, all_count); } }; TYPED_TEST_CASE(GridWalkerTest, GridDims); TYPED_TEST(GridWalkerTest, Misc) { this->check_count(); this->check_apply_on(); } # else // HAVE_DUNE_GRID TEST(DISABLED_GridWalkerTest, Misc) {}; # endif // HAVE_DUNE_GRID
bsd-2-clause
labibramadhan/cbt-crossword-web
app/index.js
1820
require('./app'); require('./app-acl'); require('./app-chart'); require('./app-config'); require('./app-constant'); require('./app-formly'); require('./app-locale'); require('./app-routes'); require('./app-run'); require('./app-translate'); require('./components/controllers/login'); require('./components/controllers/dashboard'); require('./components/controllers/user/model'); require('./components/controllers/user/create'); require('./components/controllers/user/update'); require('./components/controllers/user/view'); require('./components/controllers/user/list'); require('./components/controllers/package/model'); require('./components/controllers/package/create'); require('./components/controllers/package/update'); require('./components/controllers/package/view'); require('./components/controllers/package/list'); require('./components/controllers/question/model'); require('./components/controllers/question/create'); require('./components/controllers/question/update'); require('./components/controllers/question/view'); require('./components/controllers/question/list'); require('./components/controllers/package/schedule/model'); require('./components/controllers/package/schedule/list'); require('./components/controllers/grade/list'); require('./components/controllers/grade/answer/list'); require('./components/controllers/grade/answer/view'); require('./components/controllers/test/start'); require('./components/controllers/test/finish'); require('./components/services/rest'); require('./components/services/menu'); require('./components/services/utils'); require('./components/services/authentication'); require('./components/services/excel-extractor'); require('./components/services/excel-importer'); require('./components/filters/range'); require('./components/directives/compile');
bsd-2-clause
sebastienros/jint
Jint.Tests.Test262/test/annexB/language/function-code/block-decl-func-skip-early-err.js
1006
// This file was procedurally generated from the following sources: // - src/annex-b-fns/func-skip-early-err.case // - src/annex-b-fns/func/block.template /*--- description: Extension not observed when creation of variable binding would produce an early error (Block statement in function scope containing a function declaration) esid: sec-web-compat-functiondeclarationinstantiation es6id: B.3.3.1 flags: [generated, noStrict] info: | B.3.3.1 Changes to FunctionDeclarationInstantiation [...] ii. If replacing the FunctionDeclaration f with a VariableStatement that has F as a BindingIdentifier would not produce any Early Errors for func and F is not an element of BoundNames of argumentsList, then [...] ---*/ var init, after; (function() { let f = 123; init = f; { function f() { } } after = f; }()); assert.sameValue(init, 123, 'binding is not initialized to `undefined`'); assert.sameValue(after, 123, 'value is not updated following evaluation');
bsd-2-clause
bliksemlabs/bliksemintegration-realtime
src/main/java/nl/ovapi/bison/JourneyProcessor.java
44665
package nl.ovapi.bison; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.*; import javax.xml.datatype.DatatypeConfigurationException; import javax.xml.datatype.DatatypeFactory; import lombok.Getter; import lombok.NonNull; import lombok.Setter; import lombok.Synchronized; import nl.ovapi.bison.VehicleDatabase.VehicleType; import nl.ovapi.bison.model.DataOwnerCode; import nl.ovapi.bison.model.DatedPasstime; import nl.ovapi.bison.model.JourneyStopType; import nl.ovapi.bison.model.KV17cvlinfo; import nl.ovapi.bison.model.KV17cvlinfo.Mutation; import nl.ovapi.bison.model.KV6posinfo; import nl.ovapi.bison.model.KV6posinfo.Type; import nl.ovapi.bison.model.TripStopStatus; import nl.ovapi.bison.model.WheelChairAccessible; import nl.ovapi.exceptions.StopNotFoundException; import nl.ovapi.exceptions.TooEarlyException; import nl.ovapi.exceptions.TooOldException; import nl.ovapi.exceptions.UnknownKV6PosinfoType; import nl.ovapi.rid.gtfsrt.Utils; import nl.ovapi.rid.gtfsrt.services.GeometryService; import nl.ovapi.rid.gtfsrt.services.RIDservice; import nl.ovapi.rid.model.Journey; import nl.ovapi.rid.model.JourneyPattern.JourneyPatternPoint; import nl.ovapi.rid.model.StopPoint; import nl.ovapi.rid.model.TimeDemandGroup.TimeDemandGroupPoint; import nl.tt_solutions.schemas.ns.rti._1.ServiceInfoKind; import nl.tt_solutions.schemas.ns.rti._1.ServiceInfoServiceType; import nl.tt_solutions.schemas.ns.rti._1.ServiceInfoServiceType.StopList; import nl.tt_solutions.schemas.ns.rti._1.ServiceInfoStopKind; import nl.tt_solutions.schemas.ns.rti._1.ServiceInfoStopType; import org.joda.time.DateTime; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.collect.Maps; import com.google.transit.realtime.GtfsRealtime.FeedEntity; import com.google.transit.realtime.GtfsRealtime.Position; import com.google.transit.realtime.GtfsRealtime.Position.Builder; import com.google.transit.realtime.GtfsRealtime.TripDescriptor; import com.google.transit.realtime.GtfsRealtime.TripDescriptor.ScheduleRelationship; import com.google.transit.realtime.GtfsRealtime.TripUpdate; import com.google.transit.realtime.GtfsRealtime.TripUpdate.StopTimeEvent; import com.google.transit.realtime.GtfsRealtime.TripUpdate.StopTimeUpdate; import com.google.transit.realtime.GtfsRealtime.VehiclePosition; import com.google.transit.realtime.GtfsRealtime.VehiclePosition.VehicleStopStatus; import com.google.transit.realtime.GtfsRealtimeOVapi; import com.google.transit.realtime.GtfsRealtimeOVapi.OVapiVehiclePosition; public class JourneyProcessor { @Getter /** * Last received KV6posinfo for this Journey */ private KV6posinfo posinfo; private Map<Integer, ArrayList<Mutation>> mutations; @Getter @Setter /** * Map with last received posinfo for reinforcementnumber. */ private Map<Integer, KV6posinfo> reinforcements; @Getter private List<DatedPasstime> datedPasstimes; private static final Logger _log = LoggerFactory.getLogger(JourneyProcessor.class); private Journey _journey; public JourneyProcessor(@NonNull Journey journey){ _journey = journey; mutations = Maps.newHashMap(); reinforcements = Maps.newHashMap(); datedPasstimes = new ArrayList<DatedPasstime>(journey.getJourneypattern().getPoints().size()); materializeDatedPasstimes(journey); } //Speeds used to calculate theoretical fastest speeds private static final int DEFAULT_SPEED = (int) (75 / 3.6); // meters per seconds private static final int LONGHAUL_SPEED = (int) (90 / 3.6); // meters per seconds private final static int LONGHAUL_DISTANCE = 10000; //meters private static final int SHORTHAUL_SPEED = (int) (45 / 3.6); // meters per seconds private final static int SHORTHAUL_DISTANCE = 1000; //meters // Minimum allowed punctuality, filter out very negative punctualities. //We don't expect vehicles to drive minutes ahead of schedule. private static final int MIN_PUNCTUALITY = -360; // Minimum allowed punctuality when departing from timingpoint, in seconds private static final int MIN_PROGNOSIS_FROM_TIMINGPOINT = -30; //Punctuality floor, threshold punctuality to regard punctuality as too insignificant to propagate. private static final int PUNCTUALITY_FLOOR = 15; // seconds // Time it takes to unload a bus at a major stop eg a trainstation. private static final int MIN_STOPWAITTIME = 300; //Seconds private final static int POSINFO_MAX_AGE = 120; private final Object writeLock = new Object(); /** * @return POSIX time when journey end in seconds since January 1st 1970 00:00:00 UTC */ public long getEndEpoch(){ return _journey.getEndEpoch(); } private TripUpdate.Builder filter(TripUpdate.Builder tripUpdate){ if (tripUpdate.getStopTimeUpdateCount() == 0) return tripUpdate; tripUpdate.getStopTimeUpdateOrBuilderList(); long lastTime = Long.MAX_VALUE; for (int i = tripUpdate.getStopTimeUpdateCount()-1; i >= 0; i--){ //Filter negative dwells and stoptimes StopTimeUpdate.Builder update = tripUpdate.getStopTimeUpdateBuilder(i); if (update.getScheduleRelationship() == StopTimeUpdate.ScheduleRelationship.NO_DATA || update.getScheduleRelationship() == StopTimeUpdate.ScheduleRelationship.SKIPPED || update.hasExtension(GtfsRealtimeOVapi.ovapiStopTimeUpdate)){ continue; } if (i == 0 && (!update.hasDeparture() || !update.hasArrival())){ //System.out.println(tripUpdate.build()); //System.out.println(update.build()); _log.error("Departure or arrival is missing"); } if (update.getDeparture().getTime() > lastTime){ int offset = (int) (lastTime - update.getDeparture().getTime()); update.getDepartureBuilder().setTime(update.getDeparture().getTime()+offset); update.getDepartureBuilder().setDelay((int)(update.getDepartureBuilder().getTime()-_journey.getDepartureTime(update.getStopSequence()))); } lastTime = update.getDeparture().getTime(); if (update.getArrival().getTime() > lastTime){ int offset = (int) (lastTime - update.getArrival().getTime()); update.getArrivalBuilder().setTime(update.getArrival().getTime()+offset); update.getArrivalBuilder().setDelay((int)(update.getArrivalBuilder().getTime()-_journey.getArrivalTime(update.getStopSequence()))); } lastTime = update.getArrival().getTime(); } ArrayList<StopTimeUpdate.Builder> updates = new ArrayList<StopTimeUpdate.Builder>(); int lastDelay = Integer.MIN_VALUE; StopTimeUpdate.ScheduleRelationship lastSchedule = StopTimeUpdate.ScheduleRelationship.SCHEDULED; boolean hadStopTimeExtension = false; List<StopTimeUpdate.Builder> unfilteredUpdates = tripUpdate.getStopTimeUpdateBuilderList(); for (int i = 0; i < unfilteredUpdates.size(); i++){ StopTimeUpdate.Builder update = unfilteredUpdates.get(i); if (update.getScheduleRelationship() == StopTimeUpdate.ScheduleRelationship.NO_DATA || update.getScheduleRelationship() == StopTimeUpdate.ScheduleRelationship.SKIPPED || update.hasExtension(GtfsRealtimeOVapi.ovapiStopTimeUpdate)){ if (update.hasArrival()){ update.clearArrival(); } if (update.hasDeparture()){ update.clearDeparture(); } updates.add(update); //No data lastDelay = Integer.MIN_VALUE; lastSchedule = update.hasScheduleRelationship() ? StopTimeUpdate.ScheduleRelationship.SCHEDULED : update.getScheduleRelationship(); hadStopTimeExtension = update.hasExtension(GtfsRealtimeOVapi.ovapiStopTimeUpdate); continue; } boolean override = lastSchedule != update.getScheduleRelationship() || hadStopTimeExtension != update.hasExtension(GtfsRealtimeOVapi.ovapiStopTimeUpdate); if (update.hasArrival()){ if ((update.getArrival().getDelay() == lastDelay && !override ) || (i == 0 && update.getDeparture().getDelay() == update.getArrival().getDelay())){ update.clearArrival(); }else{ lastDelay = update.getArrival().getDelay(); } } if (update.hasDeparture()){ if (update.getDeparture().getDelay() == lastDelay && (i != 0) && !override){ //update.clearDeparture(); }else{ lastDelay = update.getDeparture().getDelay(); } } if (update.hasArrival() || (update.hasDeparture() && i == 0)){ updates.add(update); } lastSchedule = update.hasScheduleRelationship() ? StopTimeUpdate.ScheduleRelationship.SCHEDULED : update.getScheduleRelationship(); hadStopTimeExtension = update.hasExtension(GtfsRealtimeOVapi.ovapiStopTimeUpdate); } tripUpdate.clearStopTimeUpdate(); for (StopTimeUpdate.Builder update: updates){ tripUpdate.addStopTimeUpdate(update); } return tripUpdate; } /** * Materialize journey into DatedPasstime's * @param journey */ private void materializeDatedPasstimes(Journey journey){ long time = System.currentTimeMillis(); int distanceAtStartTrip = 0; for (int i = 0; i < journey.getTimedemandgroup().getPoints().size();i++){ TimeDemandGroupPoint tpt = journey.getTimedemandgroup().getPoints().get(i); JourneyPatternPoint jpt = journey.getJourneypattern().getPoint(tpt.getPointorder()); DatedPasstime dp = new DatedPasstime(); if (i == 0){ distanceAtStartTrip = jpt.getDistancefromstartroute(); dp.setDistanceDriven(0); }else{ dp.setDistanceDriven(jpt.getDistancefromstartroute()-distanceAtStartTrip); } String[] privateRef = journey.getPrivateCode().split(":"); dp.setDataOwnerCode(DataOwnerCode.valueOf(privateRef[0])); dp.setLinePlanningNumber(privateRef[1]); dp.setJourneyNumber(Integer.parseInt(privateRef[2])); if (i == 0){ dp.setJourneyStopType(JourneyStopType.FIRST); }else if (i == journey.getTimedemandgroup().getPoints().size()-1){ dp.setJourneyStopType(JourneyStopType.LAST); }else if (!jpt.isScheduled()){ dp.setJourneyStopType(JourneyStopType.INFOPOINT); }else{ dp.setJourneyStopType(JourneyStopType.INTERMEDIATE); } dp.setFortifyOrderNumber((short)0); dp.setTimingPointCode(jpt.getPointref()+""); dp.setUserStopCode(jpt.getOperatorpointref()); dp.setTargetArrivalTime(journey.getDeparturetime()+tpt.getTotaldrivetime()); dp.setTargetDepartureTime(journey.getDeparturetime()+tpt.getTotaldrivetime()+tpt.getStopwaittime()); dp.setExpectedArrivalTime(dp.getTargetArrivalTime()); dp.setExpectedDepartureTime(dp.getTargetDepartureTime()); dp.setLocalServiceLevelCode((int)journey.getAvailabilityConditionRef().intValue()); //TODO HACK very large overflow risk here dp.setDestinationCode(jpt.getDestinationCode()); dp.setUserStopOrderNumber(tpt.getPointorder()); if (journey.getWheelchairaccessible() == null){ dp.setWheelChairAccessible(WheelChairAccessible.UNKNOWN); }else if (journey.getWheelchairaccessible()){ dp.setWheelChairAccessible(WheelChairAccessible.ACCESSIBLE); }else{ dp.setWheelChairAccessible(WheelChairAccessible.NOTACCESSIBLE); } dp.setOperationDate(journey.getOperatingDay().toString()); dp.setTimingPointDataOwnerCode(DataOwnerCode.ALGEMEEN); dp.setTripStopStatus(TripStopStatus.PLANNED); dp.setLineDirection(journey.getJourneypattern().getDirectiontype()); dp.setTimingStop(jpt.isWaitpoint()); dp.setJourneyPatternCode(Integer.valueOf(journey.getJourneypattern().getJourneyPatternRef())); //TODO very large overflow risk here dp.setSideCode(jpt.getPlatformCode()); dp.setForAlighting(jpt.isForAlighting()); dp.setForBoarding(jpt.isForBoarding()); dp.setLastUpdateTimeStamp(time); datedPasstimes.add(dp); } } /** * Clear * KV6 posinfo object. */ @Synchronized("writeLock") public void clearKV6(){ posinfo = null; } /** * Set tripStatus for all datedPasstimes in journey * @param tripStatus */ private void setTripStatusForJourney(TripStopStatus tripStatus){ for (DatedPasstime dp : datedPasstimes){ dp.setTripStopStatus(tripStatus); } } /** * Set reason fields to all datedPasstimes * @param reasonType * @param subReasonType * @param reasonContent */ private void setReasonForJourney(String reasonType, String subReasonType, String reasonContent){ for (DatedPasstime dp : datedPasstimes){ dp.setReasonType(reasonType); dp.setSubReasonType(subReasonType); dp.setReasonContent(reasonContent); } } /** * Set advice fields to all datedPasstimes * @param adviceType * @param subAdviceType * @param adviceContent */ private void setAdviceForJourney(String adviceType, String subAdviceType, String adviceContent){ for (DatedPasstime dp : datedPasstimes){ dp.setAdviceType(adviceType); dp.setSubAdviceType(subAdviceType); dp.setAdviceContent(adviceContent); } } /** * Process KV17MutateJourney mutation * @param timestamp unix timestamp: milliseconds sinds 1970 * @param m KV17Mutation message */ private void parseMutateJourney(Long timestamp, Mutation m) { switch (m.getMutationtype()) { case CANCEL: setTripStatusForJourney(TripStopStatus.CANCEL); setReasonForJourney(m.getReasontype(),m.getSubreasontype(),m.getReasoncontent()); setAdviceForJourney(m.getAdvicetype(),m.getAdvicetype(),m.getAdvicetype()); break; case RECOVER: clearKV17mutations(); //Set UNKNOWN if past departure time //Set PLANNED if before setTripStatusForJourney(System.currentTimeMillis() > _journey.getDepartureEpoch() ? TripStopStatus.UNKNOWN : TripStopStatus.PLANNED); break; default: break; } } /** * Remove all modifications made by KV17. */ private void clearKV17mutations(){ for (DatedPasstime dp : datedPasstimes){ if (dp.getTripStopStatus() == TripStopStatus.CANCEL){ dp.setTripStopStatus(TripStopStatus.PLANNED); } dp.setAdviceContent(null); dp.setAdviceType(null); dp.setSubAdviceType(null); dp.setReasonContent(null); dp.setReasonType(null); dp.setSubReasonType(null); dp.setMessageContent(null); dp.setMessageType(null); dp.setLag(0); } } private void parseMutateJourneyStop(Long timestamp, Mutation m) throws StopNotFoundException { int passageSequence = 0; //Counter for how many times we came across the userstopcode in posinfo for (DatedPasstime dp : datedPasstimes){ boolean userStopMatches = dp.getUserStopCode().equals(m.getUserstopcode()); if (userStopMatches && passageSequence == m.getPassagesequencenumber()){ switch (m.getMutationtype()) { case MUTATIONMESSAGE: dp.setAdviceType(m.getAdvicetype()); dp.setSubAdviceType(m.getSubadvicetype()); dp.setAdviceContent(m.getAdvicecontent()); dp.setReasonType(m.getReasontype()); dp.setSubReasonType(m.getSubreasontype()); dp.setReasonContent(m.getReasoncontent()); break; case CHANGEDESTINATION://Not supported by Koppelvlak78 break; case CHANGEPASSTIMES: //TODO break; case LAG: dp.setLag(m.getLagtime()); break; case RECOVER: m.setLagtime(0); setTripStatusForJourney(System.currentTimeMillis() > _journey.getDepartureEpoch() ? TripStopStatus.UNKNOWN : TripStopStatus.PLANNED); break; case CANCEL: case SHORTEN: dp.setTripStopStatus(TripStopStatus.CANCEL); break; default: _log.info("Unknown mutationtype {}",m); break; } }else if (userStopMatches){ passageSequence++; } } } public FeedEntity vehiclePosition(String id,JourneyProcessor journey,KV6posinfo posinfo,RIDservice ridService,GeometryService geomService){ switch(posinfo.getMessagetype()){ case DELAY: case END://These messagetype do not contain vehicle-position information return null; default: break; } FeedEntity.Builder feedEntity = FeedEntity.newBuilder(); feedEntity.setId(id); VehiclePosition.Builder vehiclePosition = VehiclePosition.newBuilder(); vehiclePosition.setTimestamp(posinfo.getTimestamp()); int delay = posinfo.getPunctuality() == null ? 0 : posinfo.getPunctuality(); int passageSequence = 0; //Counter for how many times we came across the userstopcode in posinfo for (int i = 0; i < datedPasstimes.size();i++){ DatedPasstime dp = datedPasstimes.get(i); boolean userStopMatches = dp.getUserStopCode().equals(posinfo.getUserstopcode()); if (userStopMatches && passageSequence == posinfo.getPassagesequencenumber()){ //Find datedpasstime of next scheduled stoppoint DatedPasstime dpNext = null; SCAN_NEXT : for (int j = i+1; j < datedPasstimes.size();j++){ if (datedPasstimes.get(j).getJourneyStopType() != JourneyStopType.INFOPOINT){ dpNext = datedPasstimes.get(i); // First non Dummy stop break SCAN_NEXT; } } switch (posinfo.getMessagetype()){ case ARRIVAL: case ONSTOP: case INIT: vehiclePosition.setCurrentStatus(VehicleStopStatus.STOPPED_AT); vehiclePosition.setCurrentStopSequence(dp.getUserStopOrderNumber()); StopPoint sp = ridService.getStopPoint(Long.valueOf(dp.getTimingPointCode())); if (sp != null){ Builder position = Position.newBuilder(); position.setLatitude(sp.getLatitude()); position.setLongitude(sp.getLongitude()); vehiclePosition.setPosition(position); } break; case DEPARTURE: //Set location of stop sp = ridService.getStopPoint(Long.valueOf(dp.getTimingPointCode())); if (sp != null){ Builder position = Position.newBuilder(); position.setLatitude(sp.getLatitude()); position.setLongitude(sp.getLongitude()); vehiclePosition.setPosition(position); } case OFFROUTE: case ONROUTE: if (dpNext == null){ return null; } vehiclePosition.setCurrentStatus(VehicleStopStatus.IN_TRANSIT_TO); vehiclePosition.setStopId(dpNext.getTimingPointCode()); vehiclePosition.setCurrentStopSequence(dpNext.getUserStopOrderNumber()); break; default: return null; } if (posinfo.getRd_x() != null){ Position position = geomService.toWGS84(posinfo.getRd_x(), posinfo.getRd_y()); if (position != null) vehiclePosition.setPosition(position); } TripDescriptor.Builder tripDescription = _journey.tripDescriptor(); if (posinfo.getReinforcementnumber() > 0){ tripDescription.setScheduleRelationship(ScheduleRelationship.ADDED); } vehiclePosition.setTrip(tripDescription); //Set punctuality in OVapi extension if (posinfo.getPunctuality() != null){ OVapiVehiclePosition.Builder ovapiVehiclePosition = OVapiVehiclePosition.newBuilder(); if (vehiclePosition.hasCurrentStopSequence() && vehiclePosition.getCurrentStopSequence() <= 1 && delay < 0){ delay = 0; } ovapiVehiclePosition.setDelay(delay); vehiclePosition.setExtension(GtfsRealtimeOVapi.ovapiVehiclePosition, ovapiVehiclePosition.build()); } feedEntity.setVehicle(vehiclePosition); return feedEntity.build(); }else if (userStopMatches){ passageSequence++; } } return null; } @Synchronized("writeLock") public Update update(ArrayList<KV17cvlinfo> cvlinfos) throws StopNotFoundException, UnknownKV6PosinfoType, TooEarlyException, TooOldException, ParseException { long timestamp = 0; if (cvlinfos.size() == 0){ return null; } mutations.clear(); //KV17 mutations are not differential, remove possible previous modifications. clearKV17mutations(); for (KV17cvlinfo cvlinfo : cvlinfos) { SCAN_MUTATIONS: for (Mutation mut : cvlinfo.getMutations()) { try { timestamp = Math.max(timestamp, cvlinfo.getTimestamp()); switch (mut.getMessagetype()) { case KV17MUTATEJOURNEY: parseMutateJourney(cvlinfo.getTimestamp(), mut); continue SCAN_MUTATIONS; case KV17MUTATEJOURNEYSTOP: parseMutateJourneyStop(cvlinfo.getTimestamp(), mut); continue SCAN_MUTATIONS; } } catch (Exception e) { _log.error("Error applying KV17",e); } } } int posinfoAge = (posinfo == null) ? Integer.MAX_VALUE : (int)(Utils.currentTimeSecs()-posinfo.getTimestamp()); if (posinfo != null && posinfoAge < POSINFO_MAX_AGE){ setPunctuality(posinfo); return update(posinfo,true,true); }else{ KV6posinfo posinfo = new KV6posinfo(); posinfo.setMessagetype(Type.DELAY); //Fake KV6posinfo to get things moving posinfo.setPunctuality(0); posinfo.setTimestamp(Utils.currentTimeSecs()); SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd"); posinfo.setOperatingday(format.format(new Date())); setPunctuality(posinfo); return update(posinfo,true,true); } } /** * Set recorded arrival/departure time using timestamp of departure /arrival trigger * * Reset all recorded times to null when (re)INIT message is received. * Also set meta-data in a INIT KV6 message to the passtimes * * @param posinfo KV6posinfo object */ private void setRecordedTimes(KV6posinfo posinfo){ switch (posinfo.getMessagetype()){ case ARRIVAL: case DEPARTURE: break; case INIT: //Clear all recorded times default: return; } int departureTime = datedPasstimes.get(0).getTargetArrivalTime(); try{ for (DatedPasstime dp : datedPasstimes){ if (dp.getUserStopCode().equals(posinfo.getUserstopcode())){ long time = _journey.getDepartureEpoch(); if (posinfo.getMessagetype() == Type.ARRIVAL){ time += dp.getTargetArrivalTime()-departureTime; int delay = (int) (posinfo.getTimestamp()-time); dp.setRecordedArrivalTime(dp.getTargetArrivalTime()+delay); }else if (posinfo.getMessagetype() == Type.DEPARTURE){ /*if the current stop is a timingpoint, filter out significant negative punctualities This is done to filter false departure signals, where a vehicle falsely claims to have departed. */ time += dp.getTargetDepartureTime()-departureTime; int delay = (int) (posinfo.getTimestamp()-time); if (dp.isTimingStop() || dp.getJourneyStopType() == JourneyStopType.FIRST){ if (delay < MIN_PROGNOSIS_FROM_TIMINGPOINT){ break;//Ignore trigger } }else if (delay < MIN_PUNCTUALITY){ break;//Ignore trigger }else{ dp.setRecordedDepartureTime(dp.getTargetDepartureTime()+delay); } } break; } } }catch (Exception e){ //Exceptions are relatively impossible e.printStackTrace(); } } /** * Initialize trip with values from KV6posinfo INIT message * Set wheelchairaccessibility,NumberOfCoaches from INIT message * @param posinfo */ private void initTrip(KV6posinfo posinfo){ if (posinfo.getMessagetype() != Type.INIT){ return; } VehicleType type = VehicleDatabase.vehicleType(posinfo); WheelChairAccessible accessible = null; if (type != null){ if (type.isWheelchairAccessible()){ accessible = WheelChairAccessible.ACCESSIBLE; }else{ accessible = WheelChairAccessible.NOTACCESSIBLE; } } for (DatedPasstime pt : datedPasstimes){ if (pt.getRecordedArrivalTime() >= 0 || pt.getRecordedDepartureTime() >= 0){ pt.setRecordedArrivalTime(null); pt.setRecordedDepartureTime(null); } pt.setLastUpdateTimeStamp(posinfo.getTimestamp()); if (accessible != null) pt.setWheelChairAccessible(accessible); pt.setNumberOfCoaches(posinfo.getNumberofcoaches()); } } private boolean isJourneyCanceled(){ for (DatedPasstime dp : datedPasstimes){ if (dp.getJourneyStopType() == JourneyStopType.INFOPOINT){ continue; } switch (dp.getTripStopStatus()){ case PASSED: case UNKNOWN: case ARRIVED: case DRIVING: case OFFROUTE: case PLANNED: return false; case CANCEL: continue; default: break; } } return datedPasstimes.get(datedPasstimes.size()-1).getTripStopStatus() == TripStopStatus.CANCEL; } /** * Set TripStopStatus for each DatedPasstime. * @param posinfo KV6posinfo object */ private void setTripStatus(KV6posinfo posinfo){ //beforeCurrent: we're scanning prior to the current stop in Posinfo //Delay messages are always "before the current stop" boolean beforeCurrent = posinfo.getMessagetype() != Type.DELAY; int passageSequence = 0; //Counter for how many times we came across the userstopcode in posinfo for (DatedPasstime dp : datedPasstimes){ boolean userStopMatches = dp.getUserStopCode().equals(posinfo.getUserstopcode()); if (userStopMatches && passageSequence == posinfo.getPassagesequencenumber()){ switch (posinfo.getMessagetype()){ case DEPARTURE: //Vehicle departed from this stop case ONROUTE: //Vehicle is driving away from this stop case OFFROUTE: //Vehicle is driving away from this stop, but off planned route if (dp.getTripStopStatus() != TripStopStatus.CANCEL) dp.setTripStopStatus(TripStopStatus.PASSED); break; case ARRIVAL: //Vehicle arrived at this stop case ONSTOP: //Vehicle is halted at this stop if (dp.getTripStopStatus() != TripStopStatus.CANCEL) dp.setTripStopStatus(TripStopStatus.ARRIVED); break; default: //Shouldn't happen as all other messageType's do not contain userstopcode break; } beforeCurrent = false; }else if (beforeCurrent){ if (userStopMatches){ passageSequence++; //We're on a stop with the same userstopcode, but the KV6posinfo is referring to the same stop on a later passage } if (dp.getTripStopStatus() != TripStopStatus.CANCEL) dp.setTripStopStatus(TripStopStatus.PASSED); }else{//Now we're scanning stops not yet visited if (dp.getTripStopStatus() != TripStopStatus.CANCEL) dp.setTripStopStatus(TripStopStatus.DRIVING); } } } @Synchronized("writeLock") public Update setAsUnknown(){ Update update = new Update(); for (DatedPasstime dp : datedPasstimes){ switch (dp.getTripStopStatus()){ case ARRIVED: case CANCEL: case PASSED: case UNKNOWN: continue; case OFFROUTE: case DRIVING: case PLANNED: dp.setTripStopStatus(TripStopStatus.UNKNOWN); if (update.changedPasstimes == null) update.changedPasstimes = new ArrayList<DatedPasstime>(); update.changedPasstimes.add(dp); if (update.gtfsRealtimeTrip == null) update.gtfsRealtimeTrip = filter(tripUpdateFromKV8()); } } return update; } /** * Set estimated times using the punctuality in posinfo for stop and make prognoses for subsequent stops * @param posinfo KV6posinfo object */ public void setPunctuality(KV6posinfo posinfo){ switch (posinfo.getMessagetype()){ case INIT: //No punctuality information case END://No punctuality information case OFFROUTE: //No punctuality information case ARRIVAL://No punctuality information default: return; case DELAY: if (posinfo.getPunctuality() > 3600){ return; //Filter DELAY messages with +1 hour delay. } case DEPARTURE: case ONROUTE: case ONSTOP: break; } // We first need to scan all the stops in the journey, to set prognoses // Unless it's a KV6 delay message Integer prognosis = posinfo.getMessagetype() == Type.DELAY ? posinfo.getPunctuality() : null; //Time since start journey, at the pointsince start journey, at which the punctuality starts for with the punctuality is estimated in KV6 //Used to decay KV6 punctuality using simple time decay. int timeAtCurrentKV6Stop = 0; int passageSequence = 0; //Counter for how many times we came across the userstopcode in posinfo for (int i = 0; i < datedPasstimes.size();i++){ DatedPasstime dp = datedPasstimes.get(i); //next datedPasstime, null on last stop. DatedPasstime dpNext = (i != datedPasstimes.size()-1) ? datedPasstimes.get(i+1) : null; boolean userStopMatches = dp.getUserStopCode().equals(posinfo.getUserstopcode()); if (userStopMatches && passageSequence == posinfo.getPassagesequencenumber()){ prognosis = posinfo.getPunctuality(); //Set initial prognosis for following stops //Punctuality in departure message indicates difference between target and expected departure at stop if (posinfo.getMessagetype() == Type.DEPARTURE && prognosis >= MIN_PUNCTUALITY && (!dp.isTimingStop() || prognosis > MIN_PROGNOSIS_FROM_TIMINGPOINT)){ dp.setExpectedDepartureTime(dp.getTargetDepartureTime()+prognosis); } //set time to possibly use in simple timedecay timeAtCurrentKV6Stop = dp.getTargetArrivalTime(); /*if the current stop is a timingpoint, filter out significant negative punctualities This is done to filter false departure signals, where a vehicle falsely claims to have departed. */ if (dp.isTimingStop() || dp.getJourneyStopType() == JourneyStopType.FIRST){ if (prognosis < MIN_PROGNOSIS_FROM_TIMINGPOINT){ prognosis = 0; } }else if (prognosis < MIN_PUNCTUALITY){ //Set prognosis to 0 if vehicle has a too large punctuality. prognosis = 0; } }else if (prognosis == null && userStopMatches){ passageSequence++; //We're on a stop with the same userstopcode, but the KV6posinfo is referring to the same stop on a later passage }else if (prognosis != null){//Now we're scanning stops not yet visited and have a prognosis if (Math.abs(prognosis) < PUNCTUALITY_FLOOR){ prognosis = 0; //Punctuality is thus low, it's no longer significant enough to propagate } dp.setExpectedArrivalTime(dp.getTargetArrivalTime()+prognosis); int stopWaitTime = dp.getTargetDepartureTime()-dp.getTargetArrivalTime(); if (dp.isTimingStop() && prognosis < 0){ prognosis = 0; //This is a timingstop, vehicles are not expected to depart early. }else if (stopWaitTime > MIN_STOPWAITTIME){ //Dwell-time cq stopwaittime is larger than the minimum set, use comfort zone to reduce delay int dwellComfort = stopWaitTime-MIN_STOPWAITTIME; prognosis -= dwellComfort; } if (dp.getLag() != null && dp.getLag() >= 0){ //Lag mutation via KV17, delay is at minimum the lag time prognosis = Math.min(prognosis, dp.getLag()); } dp.setExpectedDepartureTime(dp.getTargetDepartureTime()+prognosis); if (Math.abs(prognosis) > PUNCTUALITY_FLOOR && dpNext != null){ if (prognosis < 0){ //Negative punctuality int driveTime = dpNext.getTargetArrivalTime()-dp.getTargetDepartureTime(); int theoreticalMinDriveTime = theoreticalMinDriveTime(dpNext.getDistanceDriven()-dp.getDistanceDriven()); if (driveTime < theoreticalMinDriveTime){ // Use too fast legs to reduce negative punctuality. prognosis = decayByDistance(prognosis, dpNext.getTargetArrivalTime()-dp.getTargetDepartureTime(), dpNext.getDistanceDriven()-dp.getDistanceDriven()); }else{ // Use simple time decaying to decay the delay prognosis = decayByTime(prognosis,dpNext.getTargetArrivalTime()-timeAtCurrentKV6Stop); } }else if (prognosis > 0){ //Positive punctuality, decay using delta between theoretical and planned drivetime int distance = dpNext.getDistanceDriven()-dp.getDistanceDriven(); prognosis = decayByDistance(prognosis, dpNext.getTargetArrivalTime()-dp.getTargetDepartureTime(),distance); } } } } } /** * @param driveDistance distance in meters between current_stop and next stop * @return theoretical min drivetime in seconds */ private int theoreticalMinDriveTime(int driveDistance){ int speed = DEFAULT_SPEED; if (driveDistance < SHORTHAUL_DISTANCE){ speed = SHORTHAUL_SPEED; }else if (driveDistance > LONGHAUL_DISTANCE){ speed = LONGHAUL_SPEED; } return driveDistance / speed; } /** * Decay delays using Ttheoretical_fastest - Tplanned difference(comfortzone) * @param delay deviation in seconds from targettime * @param plannedDriveTime drivetime in seconds between current_stop and next stop * @param driveDistance distance in meters between current_stop and next stop * @return */ private int decayByDistance(int delay,int plannedDriveTime,int driveDistance){ if (driveDistance == 0){ _log.trace("Drive distance 0"); return delay; } int theoreticalMinDriveTime = theoreticalMinDriveTime(driveDistance); int newDelay = delay-(plannedDriveTime-theoreticalMinDriveTime); //If the delay is smaller than the comfortzone scheduled, return 0 if (delay >= 0 && newDelay < 0){ return 0; }else if (delay <= 0 && newDelay > 0){ return 0; } return newDelay; } /** * Decay delay using simple timedecay * @param delay deviation in seconds from targettime * @param elapsedTime time between current stop and next stop * @return decayed delay at next stop */ private int decayByTime(int delay,int elapsedTime){ if (delay == 0) return 0; double vlamba = 1.0 / 500.0; double decay = Math.exp(-vlamba * elapsedTime); int decayeddelay = (int) (decay * delay); return decayeddelay; } /** * @return * @throws ParseException */ public TripUpdate.Builder tripUpdateFromKV8(){ TripUpdate.Builder trip = TripUpdate.newBuilder(); TripDescriptor.Builder tripDesc = _journey.tripDescriptor(); if (isJourneyCanceled()) tripDesc.setScheduleRelationship(ScheduleRelationship.CANCELED); trip.setTrip(tripDesc); long departureTime = _journey.getDepartureEpoch()-datedPasstimes.get(0).getTargetDepartureTime(); for (DatedPasstime dp : datedPasstimes){ if (dp.getJourneyStopType() == JourneyStopType.INFOPOINT){ continue; } StopTimeUpdate.Builder stop = StopTimeUpdate.newBuilder(); stop.setStopSequence(dp.getUserStopOrderNumber()); stop.setStopId(dp.getTimingPointCode()); switch (dp.getTripStopStatus()){ case CANCEL: stop.setScheduleRelationship(StopTimeUpdate.ScheduleRelationship.SKIPPED); break; case DRIVING: case ARRIVED: stop.setScheduleRelationship(StopTimeUpdate.ScheduleRelationship.SCHEDULED); break; case OFFROUTE: case UNKNOWN: stop.setScheduleRelationship(StopTimeUpdate.ScheduleRelationship.NO_DATA); break; case PASSED: case PLANNED: break; } StopTimeEvent.Builder arrival = StopTimeEvent.newBuilder(); if (dp.getRecordedArrivalTime() >= 0){ arrival.setTime(departureTime+dp.getRecordedArrivalTime()); arrival.setDelay((dp.getRecordedArrivalTime()-dp.getTargetArrivalTime())); }else if (dp.getRecordedDepartureTime() > 0 && dp.getTargetArrivalTime() == dp.getTargetDepartureTime()){ arrival.setDelay((dp.getRecordedDepartureTime()-dp.getTargetDepartureTime())); arrival.setTime(departureTime+dp.getRecordedDepartureTime()); }else{ arrival.setTime(departureTime+dp.getExpectedArrivalTime()); arrival.setDelay((dp.getExpectedArrivalTime()-dp.getTargetArrivalTime())); } stop.setArrival(arrival); StopTimeEvent.Builder departure = StopTimeEvent.newBuilder(); if (dp.getRecordedDepartureTime() >= 0){ departure.setDelay((dp.getRecordedDepartureTime()-dp.getTargetDepartureTime())); departure.setTime(departureTime+dp.getRecordedDepartureTime()); }else if (dp.getRecordedArrivalTime() >= 0 && dp.getTargetArrivalTime() == dp.getTargetDepartureTime() && dp.getRecordedArrivalTime() >= dp.getTargetDepartureTime()){ departure.setDelay((dp.getRecordedArrivalTime()-dp.getTargetDepartureTime())); departure.setTime(departureTime+dp.getRecordedArrivalTime()); }else{ departure.setDelay((dp.getExpectedDepartureTime()-dp.getTargetDepartureTime())); departure.setTime(departureTime+dp.getExpectedDepartureTime()); } stop.setDeparture(departure); trip.addStopTimeUpdate(stop); } return trip; } public static class Update{ @Getter private TripUpdate.Builder gtfsRealtimeTrip; @Getter private List<DatedPasstime> changedPasstimes; @Getter private ServiceInfoServiceType serviceInfo; } private ServiceInfoServiceType serviceInfoFromKV8(){ try { ServiceInfoServiceType serviceInfo = new ServiceInfoServiceType(); serviceInfo.setCompanyCode(_journey.getAgencyId()); serviceInfo.setTransportModeCode(_journey.getRouteId()+""); serviceInfo.setServiceCode(_journey.getPrivateCode()); serviceInfo.setStopList(new StopList()); serviceInfo.setServiceType(ServiceInfoKind.NORMAL_SERVICE); long dayEpoch = _journey.getDepartureEpoch()-datedPasstimes.get(0).getTargetArrivalTime(); for (DatedPasstime dp : datedPasstimes){ if (dp.getJourneyStopType() == JourneyStopType.INFOPOINT){ continue; // Skip dummies } ServiceInfoStopType stop = new ServiceInfoStopType(); stop.setStopCode(String.format("%s:%s",dp.getDataOwnerCode().name(),dp.getUserStopCode())); stop.setStopServiceCode(_journey.getPrivateCode()); if (dp.getTripStopStatus() == TripStopStatus.CANCEL){ serviceInfo.setServiceType(ServiceInfoKind.CANCELLED_SERVICE); stop.setStopType(ServiceInfoStopKind.CANCELLED_STOP); } if (dp.isForAlighting() && dp.getJourneyStopType() != JourneyStopType.FIRST){ stop.setArrivalPlatform(dp.getSideCode()); DateTime arrivalDt = _journey.getArrivalDateTime(dp.getUserStopOrderNumber()); if (arrivalDt != null) { if (arrivalDt.getSecondOfMinute() >= 30) { arrivalDt.plusMinutes(1); } arrivalDt = arrivalDt.withSecondOfMinute(0); int delay = 0; // in Seconds if (dp.getRecordedArrivalTime() > 0 && (dp.getRecordedDepartureTime() > 0 || dp.getRecordedDepartureTime() <= dp.getRecordedArrivalTime())) { //No recorded arrivaltime and either no or >= recorded departuretime delay = dp.getRecordedArrivalTime() - dp.getTargetArrivalTime(); } else if (dp.getRecordedArrivalTime() > 0 && dp.getTargetArrivalTime() == dp.getTargetDepartureTime()) { //No recorded arrivaltime fall back to recorded departure time if possible delay = dp.getRecordedDepartureTime() - dp.getTargetArrivalTime(); } else { delay = dp.getExpectedArrivalTime() - dp.getTargetArrivalTime(); } int delayMin = roundSecondsToMinute(delay); if (delayMin != 0) stop.setArrivalTimeDelay(DatatypeFactory.newInstance().newDuration(delayMin * 60 * 1000)); stop.setArrival(arrivalDt); } } if (dp.isForBoarding() && dp.getJourneyStopType() != JourneyStopType.LAST){ stop.setDeparturePlatform(dp.getSideCode()); DateTime departureDt = _journey.getDepartureDateTime(dp.getUserStopOrderNumber()); if (departureDt != null) { if (departureDt.getSecondOfMinute() >= 30) { departureDt.plusMinutes(1); } departureDt = departureDt.withSecondOfMinute(0); int delay = 0; // in Seconds if (dp.getRecordedDepartureTime() > 0) { delay = dp.getRecordedDepartureTime() - dp.getTargetArrivalTime(); } else if (dp.getRecordedArrivalTime() > 0 && dp.getTargetArrivalTime() == dp.getTargetDepartureTime()) { delay = dp.getRecordedArrivalTime() - dp.getTargetArrivalTime(); } else { delay = dp.getExpectedDepartureTime() - dp.getTargetArrivalTime(); } int delayMin = roundSecondsToMinute(delay); if (delayMin != 0) stop.setDepartureTimeDelay(DatatypeFactory.newInstance().newDuration(delayMin * 60 * 1000)); stop.setDeparture(departureDt); } } serviceInfo.getStopList().getStop().add(stop); } return serviceInfo; } catch (DatatypeConfigurationException e) { return null; } } /** * @param seconds * @return rounded minute */ private static int roundSecondsToMinute(int seconds){ int minutes = seconds / 60; if (seconds%60 >= 30) minutes++; return minutes; } /** * Process KV6posinfo object * @param posinfo KV6posinfo object * @return Update object, with GTFSrealtime and KV8 objects * @throws StopNotFoundException UserstopCode of Posinfo not in journey * @throws UnknownKV6PosinfoType Unknown MessageType in KV6 * @throws TooEarlyException KV6posinfo arrives too sucipiosuly eraly * @throws TooOldException KV6posinfo is too old * @throws ParseException */ public Update update(KV6posinfo posinfo) throws StopNotFoundException, UnknownKV6PosinfoType, TooEarlyException, TooOldException, ParseException{ return update(posinfo,false,false); } /** * Process KV6posinfo object * @param posinfo KV6posinfo object * @param ignoreState ignore previous state, always create GTFSrealtime update * @return Update object, with GTFSrealtime and KV8 objects * @throws StopNotFoundException UserstopCode of Posinfo not in journey * @throws UnknownKV6PosinfoType Unknown MessageType in KV6 * @throws TooEarlyException KV6posinfo arrives too sucipiosuly eraly * @throws TooOldException KV6posinfo is too old * @throws ParseException */ public Update update(KV6posinfo posinfo,boolean ignoreState) throws StopNotFoundException, UnknownKV6PosinfoType, TooEarlyException, TooOldException, ParseException{ return update(posinfo,ignoreState,false); } /** * Process KV6posinfo object * @param posinfo KV6posinfo object * @param ignoreState ignore previous state, always create GTFSrealtime update. * @param ignoreTooEarly ignore too early/late KV6 messages, used for KV17 pseudo KV6 * @return Update object, with GTFSrealtime and KV8 objects * @throws StopNotFoundException UserstopCode of Posinfo not in journey * @throws UnknownKV6PosinfoType Unknown MessageType in KV6 * @throws TooEarlyException KV6posinfo arrives too sucipiosuly eraly * @throws TooOldException KV6posinfo is too old * @throws ParseException */ @Synchronized("writeLock") public Update update(KV6posinfo posinfo,boolean ignoreState,boolean ignoreTooEarly) throws StopNotFoundException,UnknownKV6PosinfoType, TooEarlyException, TooOldException, ParseException { int[] arriveDelays = new int[datedPasstimes.size()]; int[] departureDelays = new int[datedPasstimes.size()]; long lastUpdate = 0; for (int i = 0; i < datedPasstimes.size(); i++){ DatedPasstime dp = datedPasstimes.get(i); arriveDelays[i] = dp.getExpectedArrivalTime()-dp.getTargetArrivalTime(); departureDelays[i] = dp.getExpectedDepartureTime()-dp.getTargetDepartureTime(); lastUpdate = Math.max(dp.getLastUpdateTimeStamp(), lastUpdate); } long currentTime = Utils.currentTimeSecs(); if (posinfo.getTimestamp()<currentTime-POSINFO_MAX_AGE && !ignoreTooEarly){ throw new TooOldException(posinfo.toString()); } long departureTime = _journey.getDepartureEpoch(); if (!posinfo.getOperatingday().equals(datedPasstimes.get(0).getOperationDate())){ throw new IllegalArgumentException("Wrong date"); } if (currentTime < departureTime){ int timeDeltaSeconds = (int)(departureTime-Utils.currentTimeSecs()); if (timeDeltaSeconds>=3600 && !ignoreTooEarly &&(posinfo.getMessagetype() != Type.INIT || posinfo.getMessagetype() != Type.DELAY)){ throw new TooEarlyException(posinfo.toString()); } } if (posinfo.getUserstopcode() != null && !_journey.getJourneypattern().contains(posinfo.getUserstopcode())) { throw new StopNotFoundException(posinfo.toString()); } setRecordedTimes(posinfo); if (this.posinfo == null || posinfo.getTimestamp() >= this.posinfo.getTimestamp()){ //This condition makes sure we're not overriding good information with out-of-sequence/old position info's if (posinfo.getMessagetype() == Type.INIT) initTrip(posinfo); setTripStatus(posinfo); setPunctuality(posinfo); if (posinfo.getDataownercode() != null){ this.posinfo = posinfo; } } Update update = new Update(); update.changedPasstimes = new ArrayList<>(); if (ignoreState){ update.changedPasstimes.addAll(datedPasstimes); update.gtfsRealtimeTrip = filter(tripUpdateFromKV8()); update.serviceInfo = serviceInfoFromKV8(); }else { for (int i = 0; i < datedPasstimes.size(); i++){ DatedPasstime dp = datedPasstimes.get(i); if (dp.getLastUpdateTimeStamp() > lastUpdate){ update.changedPasstimes.add(dp); } if (dp.getJourneyStopType() == JourneyStopType.INFOPOINT){ continue; //Dummy's don't warrant a new tripupdate } if (update.gtfsRealtimeTrip == null){ if (arriveDelays[i] != dp.getExpectedArrivalTime()-dp.getTargetArrivalTime()){ update.gtfsRealtimeTrip = filter(tripUpdateFromKV8());; }else if (departureDelays[i] != dp.getExpectedDepartureTime()-dp.getTargetDepartureTime()){ update.gtfsRealtimeTrip = filter(tripUpdateFromKV8());; } } if (update.serviceInfo == null){ if (dp.isForAlighting() && roundSecondsToMinute(arriveDelays[i]) != roundSecondsToMinute(dp.getExpectedArrivalTime()-dp.getTargetArrivalTime())){ update.serviceInfo = serviceInfoFromKV8(); }else if (dp.isForBoarding() && roundSecondsToMinute(departureDelays[i]) != roundSecondsToMinute((dp.getExpectedDepartureTime()-dp.getTargetDepartureTime()))){ update.serviceInfo = serviceInfoFromKV8(); } } } } return update; } }
bsd-2-clause
MadMikeyB/HotOrNot
lib/Cake/Test/Case/Model/Datasource/DboSourceTest.php
26649
<?php /** * DboSourceTest file * * PHP 5 * * CakePHP(tm) Tests <http://book.cakephp.org/view/1196/Testing> * Copyright 2005-2011, Cake Software Foundation, Inc. (http://cakefoundation.org) * * Licensed under The Open Group Test Suite License * Redistributions of files must retain the above copyright notice. * * @copyright Copyright 2005-2011, Cake Software Foundation, Inc. (http://cakefoundation.org) * @link http://book.cakephp.org/view/1196/Testing CakePHP(tm) Tests * @package Cake.Test.Case.Model.Datasource * @since CakePHP(tm) v 1.2.0.4206 * @license MIT License (http://www.opensource.org/licenses/mit-license.php) */ App::uses('Model', 'Model'); App::uses('AppModel', 'Model'); App::uses('DataSource', 'Model/Datasource'); App::uses('DboSource', 'Model/Datasource'); require_once dirname(dirname(__FILE__)) . DS . 'models.php'; class MockDataSource extends DataSource { } class DboTestSource extends DboSource { public function connect($config = array()) { $this->connected = true; } public function mergeAssociation(&$data, &$merge, $association, $type, $selfJoin = false) { return parent::_mergeAssociation(&$data, &$merge, $association, $type, $selfJoin); } public function setConfig($config) { $this->config = $config; } } /** * DboSourceTest class * * @package Cake.Test.Case.Model.Datasource */ class DboSourceTest extends CakeTestCase { /** * debug property * * @var mixed null */ public $debug = null; /** * autoFixtures property * * @var bool false */ public $autoFixtures = false; /** * fixtures property * * @var array */ public $fixtures = array( 'core.apple', 'core.article', 'core.articles_tag', 'core.attachment', 'core.comment', 'core.sample', 'core.tag', 'core.user', 'core.post', 'core.author', 'core.data_test' ); /** * setUp method * * @return void */ public function setUp() { parent::setUp(); $this->__config = $this->db->config; $this->testDb = new DboTestSource(); $this->testDb->cacheSources = false; $this->testDb->startQuote = '`'; $this->testDb->endQuote = '`'; $this->Model = new TestModel(); } /** * endTest method * * @return void */ public function tearDown() { parent::tearDown(); unset($this->Model); } /** * test that booleans and null make logical condition strings. * * @return void */ public function testBooleanNullConditionsParsing() { $result = $this->testDb->conditions(true); $this->assertEqual($result, ' WHERE 1 = 1', 'true conditions failed %s'); $result = $this->testDb->conditions(false); $this->assertEqual($result, ' WHERE 0 = 1', 'false conditions failed %s'); $result = $this->testDb->conditions(null); $this->assertEqual($result, ' WHERE 1 = 1', 'null conditions failed %s'); $result = $this->testDb->conditions(array()); $this->assertEqual($result, ' WHERE 1 = 1', 'array() conditions failed %s'); $result = $this->testDb->conditions(''); $this->assertEqual($result, ' WHERE 1 = 1', '"" conditions failed %s'); $result = $this->testDb->conditions(' ', '" " conditions failed %s'); $this->assertEqual($result, ' WHERE 1 = 1'); } /** * test that order() will accept objects made from DboSource::expression * * @return void */ public function testOrderWithExpression() { $expression = $this->testDb->expression("CASE Sample.id WHEN 1 THEN 'Id One' ELSE 'Other Id' END AS case_col"); $result = $this->testDb->order($expression); $expected = " ORDER BY CASE Sample.id WHEN 1 THEN 'Id One' ELSE 'Other Id' END AS case_col"; $this->assertEqual($expected, $result); } /** * testMergeAssociations method * * @return void */ public function testMergeAssociations() { $data = array('Article2' => array( 'id' => '1', 'user_id' => '1', 'title' => 'First Article', 'body' => 'First Article Body', 'published' => 'Y', 'created' => '2007-03-18 10:39:23', 'updated' => '2007-03-18 10:41:31' )); $merge = array('Topic' => array(array( 'id' => '1', 'topic' => 'Topic', 'created' => '2007-03-17 01:16:23', 'updated' => '2007-03-17 01:18:31' ))); $expected = array( 'Article2' => array( 'id' => '1', 'user_id' => '1', 'title' => 'First Article', 'body' => 'First Article Body', 'published' => 'Y', 'created' => '2007-03-18 10:39:23', 'updated' => '2007-03-18 10:41:31' ), 'Topic' => array( 'id' => '1', 'topic' => 'Topic', 'created' => '2007-03-17 01:16:23', 'updated' => '2007-03-17 01:18:31' ) ); $this->testDb->mergeAssociation($data, $merge, 'Topic', 'hasOne'); $this->assertEqual($data, $expected); $data = array('Article2' => array( 'id' => '1', 'user_id' => '1', 'title' => 'First Article', 'body' => 'First Article Body', 'published' => 'Y', 'created' => '2007-03-18 10:39:23', 'updated' => '2007-03-18 10:41:31' )); $merge = array('User2' => array(array( 'id' => '1', 'user' => 'mariano', 'password' => '5f4dcc3b5aa765d61d8327deb882cf99', 'created' => '2007-03-17 01:16:23', 'updated' => '2007-03-17 01:18:31' ))); $expected = array( 'Article2' => array( 'id' => '1', 'user_id' => '1', 'title' => 'First Article', 'body' => 'First Article Body', 'published' => 'Y', 'created' => '2007-03-18 10:39:23', 'updated' => '2007-03-18 10:41:31' ), 'User2' => array( 'id' => '1', 'user' => 'mariano', 'password' => '5f4dcc3b5aa765d61d8327deb882cf99', 'created' => '2007-03-17 01:16:23', 'updated' => '2007-03-17 01:18:31' ) ); $this->testDb->mergeAssociation($data, $merge, 'User2', 'belongsTo'); $this->assertEqual($data, $expected); $data = array( 'Article2' => array( 'id' => '1', 'user_id' => '1', 'title' => 'First Article', 'body' => 'First Article Body', 'published' => 'Y', 'created' => '2007-03-18 10:39:23', 'updated' => '2007-03-18 10:41:31' ) ); $merge = array(array('Comment' => false)); $expected = array( 'Article2' => array( 'id' => '1', 'user_id' => '1', 'title' => 'First Article', 'body' => 'First Article Body', 'published' => 'Y', 'created' => '2007-03-18 10:39:23', 'updated' => '2007-03-18 10:41:31' ), 'Comment' => array() ); $this->testDb->mergeAssociation($data, $merge, 'Comment', 'hasMany'); $this->assertEqual($data, $expected); $data = array( 'Article' => array( 'id' => '1', 'user_id' => '1', 'title' => 'First Article', 'body' => 'First Article Body', 'published' => 'Y', 'created' => '2007-03-18 10:39:23', 'updated' => '2007-03-18 10:41:31' ) ); $merge = array( array( 'Comment' => array( 'id' => '1', 'comment' => 'Comment 1', 'created' => '2007-03-17 01:16:23', 'updated' => '2007-03-17 01:18:31' ) ), array( 'Comment' => array( 'id' => '2', 'comment' => 'Comment 2', 'created' => '2007-03-17 01:16:23', 'updated' => '2007-03-17 01:18:31' ) ) ); $expected = array( 'Article' => array( 'id' => '1', 'user_id' => '1', 'title' => 'First Article', 'body' => 'First Article Body', 'published' => 'Y', 'created' => '2007-03-18 10:39:23', 'updated' => '2007-03-18 10:41:31' ), 'Comment' => array( array( 'id' => '1', 'comment' => 'Comment 1', 'created' => '2007-03-17 01:16:23', 'updated' => '2007-03-17 01:18:31' ), array( 'id' => '2', 'comment' => 'Comment 2', 'created' => '2007-03-17 01:16:23', 'updated' => '2007-03-17 01:18:31' ) ) ); $this->testDb->mergeAssociation($data, $merge, 'Comment', 'hasMany'); $this->assertEqual($data, $expected); $data = array( 'Article' => array( 'id' => '1', 'user_id' => '1', 'title' => 'First Article', 'body' => 'First Article Body', 'published' => 'Y', 'created' => '2007-03-18 10:39:23', 'updated' => '2007-03-18 10:41:31' ) ); $merge = array( array( 'Comment' => array( 'id' => '1', 'comment' => 'Comment 1', 'created' => '2007-03-17 01:16:23', 'updated' => '2007-03-17 01:18:31' ), 'User2' => array( 'id' => '1', 'user' => 'mariano', 'password' => '5f4dcc3b5aa765d61d8327deb882cf99', 'created' => '2007-03-17 01:16:23', 'updated' => '2007-03-17 01:18:31' ) ), array( 'Comment' => array( 'id' => '2', 'comment' => 'Comment 2', 'created' => '2007-03-17 01:16:23', 'updated' => '2007-03-17 01:18:31' ), 'User2' => array( 'id' => '1', 'user' => 'mariano', 'password' => '5f4dcc3b5aa765d61d8327deb882cf99', 'created' => '2007-03-17 01:16:23', 'updated' => '2007-03-17 01:18:31' ) ) ); $expected = array( 'Article' => array( 'id' => '1', 'user_id' => '1', 'title' => 'First Article', 'body' => 'First Article Body', 'published' => 'Y', 'created' => '2007-03-18 10:39:23', 'updated' => '2007-03-18 10:41:31' ), 'Comment' => array( array( 'id' => '1', 'comment' => 'Comment 1', 'created' => '2007-03-17 01:16:23', 'updated' => '2007-03-17 01:18:31', 'User2' => array( 'id' => '1', 'user' => 'mariano', 'password' => '5f4dcc3b5aa765d61d8327deb882cf99', 'created' => '2007-03-17 01:16:23', 'updated' => '2007-03-17 01:18:31' ) ), array( 'id' => '2', 'comment' => 'Comment 2', 'created' => '2007-03-17 01:16:23', 'updated' => '2007-03-17 01:18:31', 'User2' => array( 'id' => '1', 'user' => 'mariano', 'password' => '5f4dcc3b5aa765d61d8327deb882cf99', 'created' => '2007-03-17 01:16:23', 'updated' => '2007-03-17 01:18:31' ) ) ) ); $this->testDb->mergeAssociation($data, $merge, 'Comment', 'hasMany'); $this->assertEqual($data, $expected); $data = array( 'Article' => array( 'id' => '1', 'user_id' => '1', 'title' => 'First Article', 'body' => 'First Article Body', 'published' => 'Y', 'created' => '2007-03-18 10:39:23', 'updated' => '2007-03-18 10:41:31' ) ); $merge = array( array( 'Comment' => array( 'id' => '1', 'comment' => 'Comment 1', 'created' => '2007-03-17 01:16:23', 'updated' => '2007-03-17 01:18:31' ), 'User2' => array( 'id' => '1', 'user' => 'mariano', 'password' => '5f4dcc3b5aa765d61d8327deb882cf99', 'created' => '2007-03-17 01:16:23', 'updated' => '2007-03-17 01:18:31' ), 'Tag' => array( array('id' => 1, 'tag' => 'Tag 1'), array('id' => 2, 'tag' => 'Tag 2') ) ), array( 'Comment' => array( 'id' => '2', 'comment' => 'Comment 2', 'created' => '2007-03-17 01:16:23', 'updated' => '2007-03-17 01:18:31' ), 'User2' => array( 'id' => '1', 'user' => 'mariano', 'password' => '5f4dcc3b5aa765d61d8327deb882cf99', 'created' => '2007-03-17 01:16:23', 'updated' => '2007-03-17 01:18:31' ), 'Tag' => array() ) ); $expected = array( 'Article' => array( 'id' => '1', 'user_id' => '1', 'title' => 'First Article', 'body' => 'First Article Body', 'published' => 'Y', 'created' => '2007-03-18 10:39:23', 'updated' => '2007-03-18 10:41:31' ), 'Comment' => array( array( 'id' => '1', 'comment' => 'Comment 1', 'created' => '2007-03-17 01:16:23', 'updated' => '2007-03-17 01:18:31', 'User2' => array( 'id' => '1', 'user' => 'mariano', 'password' => '5f4dcc3b5aa765d61d8327deb882cf99', 'created' => '2007-03-17 01:16:23', 'updated' => '2007-03-17 01:18:31' ), 'Tag' => array( array('id' => 1, 'tag' => 'Tag 1'), array('id' => 2, 'tag' => 'Tag 2') ) ), array( 'id' => '2', 'comment' => 'Comment 2', 'created' => '2007-03-17 01:16:23', 'updated' => '2007-03-17 01:18:31', 'User2' => array( 'id' => '1', 'user' => 'mariano', 'password' => '5f4dcc3b5aa765d61d8327deb882cf99', 'created' => '2007-03-17 01:16:23', 'updated' => '2007-03-17 01:18:31' ), 'Tag' => array() ) ) ); $this->testDb->mergeAssociation($data, $merge, 'Comment', 'hasMany'); $this->assertEqual($data, $expected); $data = array( 'Article' => array( 'id' => '1', 'user_id' => '1', 'title' => 'First Article', 'body' => 'First Article Body', 'published' => 'Y', 'created' => '2007-03-18 10:39:23', 'updated' => '2007-03-18 10:41:31' ) ); $merge = array( array( 'Tag' => array( 'id' => '1', 'tag' => 'Tag 1', 'created' => '2007-03-17 01:16:23', 'updated' => '2007-03-17 01:18:31' ) ), array( 'Tag' => array( 'id' => '2', 'tag' => 'Tag 2', 'created' => '2007-03-17 01:16:23', 'updated' => '2007-03-17 01:18:31' ) ), array( 'Tag' => array( 'id' => '3', 'tag' => 'Tag 3', 'created' => '2007-03-17 01:16:23', 'updated' => '2007-03-17 01:18:31' ) ) ); $expected = array( 'Article' => array( 'id' => '1', 'user_id' => '1', 'title' => 'First Article', 'body' => 'First Article Body', 'published' => 'Y', 'created' => '2007-03-18 10:39:23', 'updated' => '2007-03-18 10:41:31' ), 'Tag' => array( array( 'id' => '1', 'tag' => 'Tag 1', 'created' => '2007-03-17 01:16:23', 'updated' => '2007-03-17 01:18:31' ), array( 'id' => '2', 'tag' => 'Tag 2', 'created' => '2007-03-17 01:16:23', 'updated' => '2007-03-17 01:18:31' ), array( 'id' => '3', 'tag' => 'Tag 3', 'created' => '2007-03-17 01:16:23', 'updated' => '2007-03-17 01:18:31' ) ) ); $this->testDb->mergeAssociation($data, $merge, 'Tag', 'hasAndBelongsToMany'); $this->assertEqual($data, $expected); $data = array( 'Article' => array( 'id' => '1', 'user_id' => '1', 'title' => 'First Article', 'body' => 'First Article Body', 'published' => 'Y', 'created' => '2007-03-18 10:39:23', 'updated' => '2007-03-18 10:41:31' ) ); $merge = array( array( 'Tag' => array( 'id' => '1', 'tag' => 'Tag 1', 'created' => '2007-03-17 01:16:23', 'updated' => '2007-03-17 01:18:31' ) ), array( 'Tag' => array( 'id' => '2', 'tag' => 'Tag 2', 'created' => '2007-03-17 01:16:23', 'updated' => '2007-03-17 01:18:31' ) ), array( 'Tag' => array( 'id' => '3', 'tag' => 'Tag 3', 'created' => '2007-03-17 01:16:23', 'updated' => '2007-03-17 01:18:31' ) ) ); $expected = array( 'Article' => array( 'id' => '1', 'user_id' => '1', 'title' => 'First Article', 'body' => 'First Article Body', 'published' => 'Y', 'created' => '2007-03-18 10:39:23', 'updated' => '2007-03-18 10:41:31' ), 'Tag' => array('id' => '1', 'tag' => 'Tag 1', 'created' => '2007-03-17 01:16:23', 'updated' => '2007-03-17 01:18:31') ); $this->testDb->mergeAssociation($data, $merge, 'Tag', 'hasOne'); $this->assertEqual($data, $expected); } /** * testMagicMethodQuerying method * * @return void */ public function testMagicMethodQuerying() { $result = $this->db->query('findByFieldName', array('value'), $this->Model); $expected = array('first', array( 'conditions' => array('TestModel.field_name' => 'value'), 'fields' => null, 'order' => null, 'recursive' => null )); $this->assertEqual($expected, $result); $result = $this->db->query('findByFindBy', array('value'), $this->Model); $expected = array('first', array( 'conditions' => array('TestModel.find_by' => 'value'), 'fields' => null, 'order' => null, 'recursive' => null )); $this->assertEqual($expected, $result); $result = $this->db->query('findAllByFieldName', array('value'), $this->Model); $expected = array('all', array( 'conditions' => array('TestModel.field_name' => 'value'), 'fields' => null, 'order' => null, 'limit' => null, 'page' => null, 'recursive' => null )); $this->assertEqual($expected, $result); $result = $this->db->query('findAllById', array('a'), $this->Model); $expected = array('all', array( 'conditions' => array('TestModel.id' => 'a'), 'fields' => null, 'order' => null, 'limit' => null, 'page' => null, 'recursive' => null )); $this->assertEqual($expected, $result); $result = $this->db->query('findByFieldName', array(array('value1', 'value2', 'value3')), $this->Model); $expected = array('first', array( 'conditions' => array('TestModel.field_name' => array('value1', 'value2', 'value3')), 'fields' => null, 'order' => null, 'recursive' => null )); $this->assertEqual($expected, $result); $result = $this->db->query('findByFieldName', array(null), $this->Model); $expected = array('first', array( 'conditions' => array('TestModel.field_name' => null), 'fields' => null, 'order' => null, 'recursive' => null )); $this->assertEqual($expected, $result); $result = $this->db->query('findByFieldName', array('= a'), $this->Model); $expected = array('first', array( 'conditions' => array('TestModel.field_name' => '= a'), 'fields' => null, 'order' => null, 'recursive' => null )); $this->assertEqual($expected, $result); $result = $this->db->query('findByFieldName', array(), $this->Model); $expected = false; $this->assertEqual($expected, $result); } /** * * @expectedException PDOException * @return void */ public function testDirectCallThrowsException() { $result = $this->db->query('directCall', array(), $this->Model); } /** * testValue method * * @return void */ public function testValue() { $result = $this->db->value('{$__cakeForeignKey__$}'); $this->assertEqual($result, '{$__cakeForeignKey__$}'); $result = $this->db->value(array('first', 2, 'third')); $expected = array('\'first\'', 2, '\'third\''); $this->assertEqual($expected, $result); } /** * testReconnect method * * @return void */ public function testReconnect() { $this->testDb->reconnect(array('prefix' => 'foo')); $this->assertTrue($this->testDb->connected); $this->assertEqual($this->testDb->config['prefix'], 'foo'); } /** * testName method * * @return void */ public function testName() { $result = $this->testDb->name('name'); $expected = '`name`'; $this->assertEqual($expected, $result); $result = $this->testDb->name(array('name', 'Model.*')); $expected = array('`name`', '`Model`.*'); $this->assertEqual($expected, $result); $result = $this->testDb->name('MTD()'); $expected = 'MTD()'; $this->assertEqual($expected, $result); $result = $this->testDb->name('(sm)'); $expected = '(sm)'; $this->assertEqual($expected, $result); $result = $this->testDb->name('name AS x'); $expected = '`name` AS `x`'; $this->assertEqual($expected, $result); $result = $this->testDb->name('Model.name AS x'); $expected = '`Model`.`name` AS `x`'; $this->assertEqual($expected, $result); $result = $this->testDb->name('Function(Something.foo)'); $expected = 'Function(`Something`.`foo`)'; $this->assertEqual($expected, $result); $result = $this->testDb->name('Function(SubFunction(Something.foo))'); $expected = 'Function(SubFunction(`Something`.`foo`))'; $this->assertEqual($expected, $result); $result = $this->testDb->name('Function(Something.foo) AS x'); $expected = 'Function(`Something`.`foo`) AS `x`'; $this->assertEqual($expected, $result); $result = $this->testDb->name('name-with-minus'); $expected = '`name-with-minus`'; $this->assertEqual($expected, $result); $result = $this->testDb->name(array('my-name', 'Foo-Model.*')); $expected = array('`my-name`', '`Foo-Model`.*'); $this->assertEqual($expected, $result); $result = $this->testDb->name(array('Team.P%', 'Team.G/G')); $expected = array('`Team`.`P%`', '`Team`.`G/G`'); $this->assertEqual($expected, $result); $result = $this->testDb->name('Model.name as y'); $expected = '`Model`.`name` AS `y`'; $this->assertEqual($expected, $result); } /** * test that cacheMethod works as exepected * * @return void */ public function testCacheMethod() { $this->testDb->cacheMethods = true; $result = $this->testDb->cacheMethod('name', 'some-key', 'stuff'); $this->assertEqual($result, 'stuff'); $result = $this->testDb->cacheMethod('name', 'some-key'); $this->assertEqual($result, 'stuff'); $result = $this->testDb->cacheMethod('conditions', 'some-key'); $this->assertNull($result); $result = $this->testDb->cacheMethod('name', 'other-key'); $this->assertNull($result); $this->testDb->cacheMethods = false; $result = $this->testDb->cacheMethod('name', 'some-key', 'stuff'); $this->assertEqual($result, 'stuff'); $result = $this->testDb->cacheMethod('name', 'some-key'); $this->assertNull($result); } /** * testLog method * * @outputBuffering enabled * @return void */ public function testLog() { $this->testDb->logQuery('Query 1'); $this->testDb->logQuery('Query 2'); $log = $this->testDb->getLog(false, false); $result = Set::extract($log['log'], '/query'); $expected = array('Query 1', 'Query 2'); $this->assertEqual($expected, $result); $oldDebug = Configure::read('debug'); Configure::write('debug', 2); ob_start(); $this->testDb->showLog(); $contents = ob_get_clean(); $this->assertPattern('/Query 1/s', $contents); $this->assertPattern('/Query 2/s', $contents); ob_start(); $this->testDb->showLog(true); $contents = ob_get_clean(); $this->assertPattern('/Query 1/s', $contents); $this->assertPattern('/Query 2/s', $contents); Configure::write('debug', $oldDebug); } /** * test getting the query log as an array. * * @return void */ public function testGetLog() { $this->testDb->logQuery('Query 1'); $this->testDb->logQuery('Query 2'); $log = $this->testDb->getLog(); $expected = array('query' => 'Query 1', 'affected' => '', 'numRows' => '', 'took' => ''); $this->assertEqual($log['log'][0], $expected); $expected = array('query' => 'Query 2', 'affected' => '', 'numRows' => '', 'took' => ''); $this->assertEqual($log['log'][1], $expected); $expected = array('query' => 'Error 1', 'affected' => '', 'numRows' => '', 'took' => ''); } /** * test that query() returns boolean values from operations like CREATE TABLE * * @return void */ public function testFetchAllBooleanReturns() { $name = $this->db->fullTableName('test_query'); $query = "CREATE TABLE {$name} (name varchar(10));"; $result = $this->db->query($query); $this->assertTrue($result, 'Query did not return a boolean'); $query = "DROP TABLE {$name};"; $result = $this->db->query($query); $this->assertTrue($result, 'Query did not return a boolean'); } /** * test order to generate query order clause for virtual fields * * @return void */ public function testVirtualFieldsInOrder() { $Article = ClassRegistry::init('Article'); $Article->virtualFields = array( 'this_moment' => 'NOW()', 'two' => '1 + 1', ); $order = array('two', 'this_moment'); $result = $this->db->order($order, 'ASC', $Article); $expected = ' ORDER BY (1 + 1) ASC, (NOW()) ASC'; $this->assertEqual($expected, $result); $order = array('Article.two', 'Article.this_moment'); $result = $this->db->order($order, 'ASC', $Article); $expected = ' ORDER BY (1 + 1) ASC, (NOW()) ASC'; $this->assertEqual($expected, $result); } /** * test the permutations of fullTableName() * * @return void */ public function testFullTablePermutations() { $Article = ClassRegistry::init('Article'); $result = $this->testDb->fullTableName($Article, false); $this->assertEqual($result, 'articles'); $Article->tablePrefix = 'tbl_'; $result = $this->testDb->fullTableName($Article, false); $this->assertEqual($result, 'tbl_articles'); $Article->useTable = $Article->table = 'with spaces'; $Article->tablePrefix = ''; $result = $this->testDb->fullTableName($Article); $this->assertEqual($result, '`with spaces`'); } /** * test that read() only calls queryAssociation on db objects when the method is defined. * * @return void */ public function testReadOnlyCallingQueryAssociationWhenDefined() { $this->loadFixtures('Article', 'User', 'ArticlesTag', 'Tag'); ConnectionManager::create('test_no_queryAssociation', array( 'datasource' => 'MockDataSource' )); $Article = ClassRegistry::init('Article'); $Article->Comment->useDbConfig = 'test_no_queryAssociation'; $result = $Article->find('all'); $this->assertTrue(is_array($result)); } /** * test that fields() is using methodCache() * * @return void */ public function testFieldsUsingMethodCache() { $this->testDb->cacheMethods = false; $this->assertTrue(empty($this->testDb->methodCache['fields']), 'Cache not empty'); $Article = ClassRegistry::init('Article'); $this->testDb->fields($Article, null, array('title', 'body', 'published')); $this->assertTrue(empty($this->testDb->methodCache['fields']), 'Cache not empty'); } /** * testStatements method * * @return void */ public function testStatements() { $this->skipIf(!$this->testDb instanceof DboMysql); $this->loadFixtures('Article', 'User', 'Comment', 'Tag', 'Attachment', 'ArticlesTag'); $Article = new Article(); $result = $this->testDb->update($Article, array('field1'), array('value1')); $this->assertFalse($result); $result = $this->testDb->getLastQuery(); $this->assertPattern('/^\s*UPDATE\s+' . $this->testDb->fullTableName('articles') . '\s+SET\s+`field1`\s*=\s*\'value1\'\s+WHERE\s+1 = 1\s*$/', $result); $result = $this->testDb->update($Article, array('field1'), array('2'), '2=2'); $this->assertFalse($result); $result = $this->testDb->getLastQuery(); $this->assertPattern('/^\s*UPDATE\s+' . $this->testDb->fullTableName('articles') . ' AS `Article`\s+LEFT JOIN\s+' . $this->testDb->fullTableName('users') . ' AS `User` ON \(`Article`.`user_id` = `User`.`id`\)\s+SET\s+`Article`\.`field1`\s*=\s*2\s+WHERE\s+2\s*=\s*2\s*$/', $result); $result = $this->testDb->delete($Article); $this->assertTrue($result); $result = $this->testDb->getLastQuery(); $this->assertPattern('/^\s*DELETE\s+FROM\s+' . $this->testDb->fullTableName('articles') . '\s+WHERE\s+1 = 1\s*$/', $result); $result = $this->testDb->delete($Article, true); $this->assertTrue($result); $result = $this->testDb->getLastQuery(); $this->assertPattern('/^\s*DELETE\s+`Article`\s+FROM\s+' . $this->testDb->fullTableName('articles') . '\s+AS `Article`\s+LEFT JOIN\s+' . $this->testDb->fullTableName('users') . ' AS `User` ON \(`Article`.`user_id` = `User`.`id`\)\s+WHERE\s+1\s*=\s*1\s*$/', $result); $result = $this->testDb->delete($Article, '2=2'); $this->assertTrue($result); $result = $this->testDb->getLastQuery(); $this->assertPattern('/^\s*DELETE\s+`Article`\s+FROM\s+' . $this->testDb->fullTableName('articles') . '\s+AS `Article`\s+LEFT JOIN\s+' . $this->testDb->fullTableName('users') . ' AS `User` ON \(`Article`.`user_id` = `User`.`id`\)\s+WHERE\s+2\s*=\s*2\s*$/', $result); $result = $this->testDb->hasAny($Article, '1=2'); $this->assertFalse($result); } /** * Test that group works without a model * * @return void */ function testGroupNoModel() { $result = $this->db->group('created'); $this->assertEqual(' GROUP BY created', $result); } }
bsd-2-clause
wjw465150/jodd
jodd-json/src/test/java/jodd/json/model/MyFolder3.java
1568
// Copyright (c) 2003-present, Jodd Team (http://jodd.org) // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are met: // // 1. Redistributions of source code must retain the above copyright notice, // this list of conditions and the following disclaimer. // // 2. Redistributions in binary form must reproduce the above copyright // notice, this list of conditions and the following disclaimer in the // documentation and/or other materials provided with the distribution. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" // AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE // ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE // LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR // CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF // SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS // INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN // CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE // POSSIBILITY OF SUCH DAMAGE. package jodd.json.model; import jodd.json.meta.JSON; public class MyFolder3 { public String getFolder() { return folder; } @JSON(name = "foo.folder") private String folder; }
bsd-2-clause
petr-panteleyev/money-manager
src/main/java/org/panteleyev/money/app/database/TCPEditor.java
5163
/* Copyright (c) Petr Panteleyev. All rights reserved. Licensed under the BSD license. See LICENSE file in the project root for full license information. */ package org.panteleyev.money.app.database; import javafx.event.ActionEvent; import javafx.geometry.Insets; import javafx.scene.control.PasswordField; import javafx.scene.control.TextField; import javafx.scene.image.ImageView; import javafx.scene.layout.Priority; import javafx.scene.layout.VBox; import org.controlsfx.validation.ValidationSupport; import org.panteleyev.money.app.Images; import java.util.List; import java.util.function.Consumer; import static javafx.event.ActionEvent.ACTION; import static org.panteleyev.fx.ButtonFactory.button; import static org.panteleyev.fx.FxUtils.COLON; import static org.panteleyev.fx.FxUtils.fxString; import static org.panteleyev.fx.LabelFactory.label; import static org.panteleyev.fx.grid.GridBuilder.columnConstraints; import static org.panteleyev.fx.grid.GridBuilder.gridCell; import static org.panteleyev.fx.grid.GridBuilder.gridPane; import static org.panteleyev.fx.grid.GridRowBuilder.gridRow; import static org.panteleyev.money.app.MainWindowController.UI; import static org.panteleyev.money.app.Styles.GRID_PANE; import static org.panteleyev.money.bundles.Internationalization.I18N_WORD_LOGIN; import static org.panteleyev.money.bundles.Internationalization.I18N_WORD_PASSWORD; import static org.panteleyev.money.bundles.Internationalization.I18N_WORD_PORT; import static org.panteleyev.money.bundles.Internationalization.I18N_WORD_RESET; import static org.panteleyev.money.bundles.Internationalization.I18N_WORD_SCHEMA; import static org.panteleyev.money.bundles.Internationalization.I18N_WORD_SERVER; final class TCPEditor extends VBox { private final TextField schemaEdit = initSchemaEdit(); private final TextField dataBaseHostEdit = new TextField(); private final TextField dataBasePortEdit = new TextField(); private final TextField dataBaseUserEdit = new TextField(); private final PasswordField dataBasePasswordEdit = new PasswordField(); TCPEditor(ValidationSupport validation, Consumer<ActionEvent> resetSchemaHandler) { var resetSchemaButton = button(fxString(UI, I18N_WORD_RESET)); resetSchemaButton.setGraphic(new ImageView(Images.WARNING)); resetSchemaButton.disableProperty().bind(validation.invalidProperty()); resetSchemaButton.addEventFilter(ACTION, resetSchemaHandler::accept); getChildren().addAll(gridPane( List.of( gridRow(label(fxString(UI, I18N_WORD_SERVER, COLON)), dataBaseHostEdit, label(fxString(UI, I18N_WORD_PORT, COLON)), dataBasePortEdit), gridRow(label(fxString(UI, I18N_WORD_LOGIN, COLON)), gridCell(dataBaseUserEdit, 3, 1)), gridRow(label(fxString(UI, I18N_WORD_PASSWORD, COLON)), gridCell(dataBasePasswordEdit, 3, 1)), gridRow(label(fxString(UI, I18N_WORD_SCHEMA, COLON)), gridCell(schemaEdit, 2, 1), resetSchemaButton) ), b -> b.withStyle(GRID_PANE) .withConstraints(columnConstraints(Priority.NEVER), columnConstraints(Priority.ALWAYS)) )); VBox.setMargin(getChildren().get(0), new Insets(10.0, 10.0, 10.0, 10.0)); } TextField getSchemaEdit() { return schemaEdit; } TextField getDataBaseHostEdit() { return dataBaseHostEdit; } TextField getDataBasePortEdit() { return dataBasePortEdit; } TextField getDataBaseUserEdit() { return dataBaseUserEdit; } String getSchema() { return schemaEdit.getText(); } void setSchema(String schema) { schemaEdit.setText(schema); } String getDataBaseHost() { return dataBaseHostEdit.getText(); } void setDataBaseHost(String host) { dataBaseHostEdit.setText(host); } int getDataBasePort() { return Integer.parseInt(dataBasePortEdit.getText()); } void setDataBasePort(int port) { dataBasePortEdit.setText(Integer.toString(port)); } String getDataBaseUser() { return dataBaseUserEdit.getText(); } void setDataBaseUser(String user) { dataBaseUserEdit.setText(user); } String getDataBasePassword() { return dataBasePasswordEdit.getText(); } void setDataBasePassword(String password) { dataBasePasswordEdit.setText(password); } private TextField initSchemaEdit() { var schemaEdit = new TextField(); schemaEdit.setMaxWidth(Double.MAX_VALUE); return schemaEdit; } void setProfile(ConnectionProfile profile) { if (profile != null) { setDataBaseHost(profile.dataBaseHost()); setDataBasePort(profile.dataBasePort()); setDataBaseUser(profile.dataBaseUser()); setDataBasePassword(profile.dataBasePassword()); setSchema(profile.schema()); } else { setDataBaseHost(""); setDataBasePort(3306); setDataBaseUser(""); setDataBasePassword(""); setSchema(""); } } }
bsd-2-clause
mwilliamson/java-mammoth
src/main/java/org/zwobble/mammoth/internal/archives/Archives.java
357
package org.zwobble.mammoth.internal.archives; import java.io.IOException; import java.io.InputStream; public class Archives { public static InputStream getInputStream(Archive file, String name) throws IOException { return file.tryGetInputStream(name) .orElseThrow(() -> new IOException("Missing entry in file: " + name)); } }
bsd-2-clause
neverfox/homebrew-cask
Casks/ibettercharge.rb
712
cask "ibettercharge" do version "1.0.12,1568119585" sha256 "33de59c5a1157b23f9313348ef60e213200d007e2b16c4f53859ddcb4c66d696" url "https://dl.devmate.com/com.softorino.iBetterCharge/#{version.before_comma}/#{version.after_comma}/iBetterCharge-#{version.before_comma}.zip", verified: "devmate.com/com.softorino.iBetterCharge/" name "iBetterCharge" desc "Battery level monitoring software" homepage "https://softorino.com/ibettercharge/" livecheck do url "https://updates.devmate.com/com.softorino.iBetterCharge.xml" strategy :sparkle do |item| "#{item.short_version},#{item.url[%r{/(\d+)/iBetterCharge-(?:\d+(?:\.\d+)*)\.zip}, 1]}" end end app "iBetterCharge.app" end
bsd-2-clause
pyrus/Pyrus
vendor/php/Pyrus/Developer/CoverageAnalyzer/Web/View.php
16900
<?php namespace Pyrus\Developer\CoverageAnalyzer\Web; use Pyrus\Developer\CoverageAnalyzer\SourceFile; /** * Takes a source file and outputs HTML source highlighting showing the * number of hits on each line, highlights un-executed lines in red */ class View { protected $savePath; protected $testPath; protected $sourcePath; protected $source; protected $controller; function getDatabase() { $output = new \XMLWriter; if (!$output->openUri('php://output')) { throw new Exception('Cannot open output - this should never happen'); } $output->startElement('html'); $output->startElement('head'); $output->writeElement('title', 'Enter a path to the database'); $output->endElement(); $output->startElement('body'); $output->writeElement('h2', 'Please enter the path to a coverage database'); $output->startElement('form'); $output->writeAttribute('name', 'getdatabase'); $output->writeAttribute('method', 'GET'); $output->writeAttribute('action', $this->controller->getTOCLink()); $output->startElement('input'); $output->writeAttribute('size', '90'); $output->writeAttribute('type', 'text'); $output->writeAttribute('name', 'setdatabase'); $output->endElement(); $output->startElement('input'); $output->writeAttribute('type', 'submit'); $output->endElement(); $output->endElement(); $output->endElement(); $output->endElement(); $output->endDocument(); } function setController($controller) { $this->controller = $controller; } function logoutLink(\XMLWriter $output) { $output->startElement('h5'); $output->startElement('a'); $output->writeAttribute('href', $this->controller->getLogoutLink()); $output->text('Current database: ' . $_SESSION['fullpath'] . '. Click to start over'); $output->endElement(); $output->endElement(); } function TOC($sqlite) { $coverage = $sqlite->retrieveProjectCoverage(); $this->renderSummary($sqlite, $sqlite->retrievePaths(), false, $coverage[1], $coverage[0], $coverage[2]); } function testTOC($sqlite, $test = null) { if ($test) { return $this->renderTestCoverage($sqlite, $test); } $this->renderTestSummary($sqlite); } function fileLineTOC($sqlite, $file, $line) { $source = new SourceFile($file, $sqlite, $sqlite->testpath, $sqlite->codepath); return $this->renderLineSummary($file, $line, $sqlite->testpath, $source->getLineLinks($line)); } function fileCoverage($sqlite, $file, $test = null) { if ($test) { $source = new SourceFile\PerTest($file, $sqlite, $sqlite->testpath, $sqlite->codepath, $test); } else { $source = new SourceFile($file, $sqlite, $sqlite->testpath, $sqlite->codepath); } return $this->render($source, $test); } function mangleFile($path, $istest = false) { return $this->controller->getFileLink($path, $istest); } function mangleTestFile($path) { return $this->controller->getTOClink($path); } function getLineLink($name, $line) { return $this->controller->getFileLink($name, null, $line); } function renderLineSummary($name, $line, $testpath, $tests) { $output = new \XMLWriter; if (!$output->openUri('php://output')) { throw new Exception('Cannot render ' . $name . ' line ' . $line . ', opening XML failed'); } $output->setIndentString(' '); $output->setIndent(true); $output->startElement('html'); $output->startElement('head'); $output->writeElement('title', 'Tests covering line ' . $line . ' of ' . $name); $output->startElement('link'); $output->writeAttribute('href', 'cover.css'); $output->writeAttribute('rel', 'stylesheet'); $output->writeAttribute('type', 'text/css'); $output->endElement(); $output->endElement(); $output->startElement('body'); $this->logoutLink($output); $output->writeElement('h2', 'Tests covering line ' . $line . ' of ' . $name); $output->startElement('p'); $output->startElement('a'); $output->writeAttribute('href', $this->controller->getTOCLink()); $output->text('Aggregate Code Coverage for all tests'); $output->endElement(); $output->endElement(); $output->startElement('p'); $output->startElement('a'); $output->writeAttribute('href', $this->mangleFile($name)); $output->text('File ' . $name . ' code coverage'); $output->endElement(); $output->endElement(); $output->startElement('ul'); foreach ($tests as $testfile) { $output->startElement('li'); $output->startElement('a'); $output->writeAttribute('href', $this->mangleTestFile($testfile)); $output->text(str_replace($testpath . '/', '', $testfile)); $output->endElement(); $output->endElement(); } $output->endElement(); $output->endElement(); $output->endDocument(); } /** * @param Pyrus\Developer\CodeCoverage\SourceFile $source * @param string $istest path to test file this is covering, or false for aggregate */ function render(SourceFile $source, $istest = false) { $output = new \XMLWriter; if (!$output->openUri('php://output')) { throw new Exception('Cannot render ' . $source->shortName() . ', opening XML failed'); } $output->setIndent(false); $output->startElement('html'); $output->text("\n "); $output->startElement('head'); $output->text("\n "); if ($istest) { $output->writeElement('title', 'Code Coverage for ' . $source->shortName() . ' in ' . str_replace($source->testpath() . DIRECTORY_SEPARATOR, '', $istest)); } else { $output->writeElement('title', 'Code Coverage for ' . $source->shortName()); } $output->text("\n "); $output->startElement('link'); $output->writeAttribute('href', 'cover.css'); $output->writeAttribute('rel', 'stylesheet'); $output->writeAttribute('type', 'text/css'); $output->endElement(); $output->text("\n "); $output->endElement(); $output->text("\n "); $output->startElement('body'); $output->text("\n "); $this->logoutLink($output); if ($istest) { $output->writeElement('h2', 'Code Coverage for ' . $source->shortName() . ' in ' . str_replace($source->testpath() . DIRECTORY_SEPARATOR, '', $istest)); } else { $output->writeElement('h2', 'Code Coverage for ' . $source->shortName()); } $output->text("\n "); $output->writeElement('h3', 'Coverage: ' . $source->coveragePercentage() . '% (Covered lines / Executable lines)'); $info = $source->coverageInfo(); $sourceCode = $source->source(); $total = count($sourceCode); $output->writeRaw('<p><strong>' . $total . '</strong> total lines, of which <strong>' . $info[1] . '</strong> are executable, <strong>' . $info[2] .'</strong> are dead and <strong>' . ($total - $info[2] - $info[1]) . '</strong> are non-executable lines</p>'); $output->writeRaw('<p>Of those <strong>' . $info[1] . '</strong> executable lines there are <strong>' . $info[0] . '</strong> lines covered with tests and <strong>' . ($info[1] - $info[0]) . '</strong> lack coverage</p>'); $output->text("\n "); $output->startElement('p'); $output->startElement('a'); $output->writeAttribute('href', $this->controller->getTOCLink()); $output->text('Aggregate Code Coverage for all tests'); $output->endElement(); $output->endElement(); $output->startElement('pre'); foreach ($sourceCode as $num => $line) { $coverage = $source->coverage($num); $output->startElement('span'); $output->writeAttribute('class', 'ln'); $output->text(str_pad($num, 8, ' ', STR_PAD_LEFT)); $output->endElement(); if ($coverage === false) { $output->text(str_pad(': ', 13, ' ', STR_PAD_LEFT) . $line); continue; } $output->startElement('span'); $cov = is_array($coverage) ? $coverage['coverage'] : $coverage; if ($cov === -2) { $output->writeAttribute('class', 'dead'); $output->text(' '); } elseif ($cov < 1) { $output->writeAttribute('class', 'nc'); $output->text(' '); } else { $output->writeAttribute('class', 'cv'); if (!$istest) { $output->startElement('a'); $output->writeAttribute('href', $this->getLineLink($source->name(), $num)); } $text = is_string($coverage) ? $coverage : $coverage['link']; $output->text(str_pad($text, 10, ' ', STR_PAD_LEFT) . ' '); if (!$istest) { $output->endElement(); } } $output->text(': ' . $line); $output->endElement(); } $output->endElement(); $output->text("\n "); $output->endElement(); $output->text("\n "); $output->endElement(); $output->endDocument(); } function renderSummary(Aggregator $agg, array $results, $istest = false, $total = 1, $covered = 1, $dead = 1) { $output = new \XMLWriter; if (!$output->openUri('php://output')) { throw new Exception('Cannot render test summary, opening XML failed'); } $output->setIndentString(' '); $output->setIndent(true); $output->startElement('html'); $output->startElement('head'); if ($istest) { $output->writeElement('title', 'Code Coverage Summary [' . $istest . ']'); } else { $output->writeElement('title', 'Code Coverage Summary'); } $output->startElement('link'); $output->writeAttribute('href', 'cover.css'); $output->writeAttribute('rel', 'stylesheet'); $output->writeAttribute('type', 'text/css'); $output->endElement(); $output->endElement(); $output->startElement('body'); if ($istest) { $output->writeElement('h2', 'Code Coverage Files for test ' . $istest); } else { $output->writeElement('h2', 'Code Coverage Files'); $output->writeElement('h3', 'Total lines: ' . $total . ', covered lines: ' . $covered . ', dead lines: ' . $dead); $percent = 0; if ($total > 0) { $percent = round(($covered / $total) * 100, 1); } $output->startElement('p'); if ($percent < 50) { $output->writeAttribute('class', 'bad'); } elseif ($percent < 75) { $output->writeAttribute('class', 'ok'); } else { $output->writeAttribute('class', 'good'); } $output->text($percent . '% code coverage'); $output->endElement(); } $this->logoutLink($output); $output->startElement('p'); $output->startElement('a'); $output->writeAttribute('href', $this->controller->getTOCLink(true)); $output->text('Code Coverage per PHPT test'); $output->endElement(); $output->endElement(); $output->startElement('ul'); foreach ($results as $i => $name) { $output->flush(); $source = new SourceFile($name, $agg, $agg->testpath, $agg->codepath, null, false); $output->startElement('li'); $percent = $source->coveragePercentage(); $output->startElement('div'); if ($percent < 50) { $output->writeAttribute('class', 'bad'); } elseif ($percent < 75) { $output->writeAttribute('class', 'ok'); } else { $output->writeAttribute('class', 'good'); } $output->text(' Coverage: ' . str_pad($percent . '%', 4, ' ', STR_PAD_LEFT)); $output->endElement(); $output->startElement('a'); $output->writeAttribute('href', $this->mangleFile($name, $istest)); $output->text($source->shortName()); $output->endElement(); $output->endElement(); } $output->endElement(); $output->endElement(); $output->endDocument(); } function renderTestSummary(Aggregator $agg) { $output = new \XMLWriter; if (!$output->openUri('php://output')) { throw new Exception('Cannot render tests summary, opening XML failed'); } $output->setIndentString(' '); $output->setIndent(true); $output->startElement('html'); $output->startElement('head'); $output->writeElement('title', 'Test Summary'); $output->startElement('link'); $output->writeAttribute('href', 'cover.css'); $output->writeAttribute('rel', 'stylesheet'); $output->writeAttribute('type', 'text/css'); $output->endElement(); $output->endElement(); $output->startElement('body'); $this->logoutLink($output); $output->writeElement('h2', 'Tests Executed, click for code coverage summary'); $output->startElement('p'); $output->startElement('a'); $output->writeAttribute('href', $this->controller->getTOClink()); $output->text('Aggregate Code Coverage for all tests'); $output->endElement(); $output->endElement(); $output->startElement('ul'); foreach ($agg->retrieveTestPaths() as $test) { $output->startElement('li'); $output->startElement('a'); $output->writeAttribute('href', $this->mangleTestFile($test)); $output->text(str_replace($agg->testpath . '/', '', $test)); $output->endElement(); $output->endElement(); } $output->endElement(); $output->endElement(); $output->endDocument(); } function renderTestCoverage(Aggregator $agg, $test) { $reltest = str_replace($agg->testpath . '/', '', $test); $output = new \XMLWriter; if (!$output->openUri('php://output')) { throw new Exception('Cannot render test ' . $reltest . ' coverage, opening XML failed'); } $output->setIndentString(' '); $output->setIndent(true); $output->startElement('html'); $output->startElement('head'); $output->writeElement('title', 'Code Coverage Summary for test ' . $reltest); $output->startElement('link'); $output->writeAttribute('href', 'cover.css'); $output->writeAttribute('rel', 'stylesheet'); $output->writeAttribute('type', 'text/css'); $output->endElement(); $output->endElement(); $output->startElement('body'); $this->logoutLink($output); $output->writeElement('h2', 'Code Coverage Files for test ' . $reltest); $output->startElement('ul'); $paths = $agg->retrievePathsForTest($test); foreach ($paths as $name) { $source = new SourceFile\PerTest($name, $agg, $agg->testpath, $agg->codepath, $test); $output->startElement('li'); $percent = $source->coveragePercentage(); $output->startElement('div'); if ($percent < 50) { $output->writeAttribute('class', 'bad'); } elseif ($percent < 75) { $output->writeAttribute('class', 'ok'); } else { $output->writeAttribute('class', 'good'); } $output->text(' Coverage: ' . str_pad($source->coveragePercentage() . '%', 4, ' ', STR_PAD_LEFT)); $output->endElement(); $output->startElement('a'); $output->writeAttribute('href', $this->mangleFile($name, $test)); $output->text($source->shortName()); $output->endElement(); $output->endElement(); } $output->endElement(); $output->endElement(); $output->endDocument(); } }
bsd-2-clause
ravikiranj/rkjanardhana-dot-com
stockPrice.py
881
#!/usr/bin/env python import requests import cgi import json ## Extract symbol if it exists arguments = cgi.FieldStorage() symbol = None if arguments != None: symbol = arguments.getvalue("symbol") if symbol == None: symbol = "TRIP" url = "http://dev.markitondemand.com/Api/v2/Quote/json?symbol=" + symbol r = requests.get(url) leanRespJsonString = "{\"error\": \"There was an error fetching the stock price for %s\"}" % (symbol) if r.status_code == 200: resp = json.loads(r.text) leanResp = {'Symbol': resp['Symbol'], 'LastPrice': resp['LastPrice'], 'Change' : resp['Change'], 'ChangePercent' : resp['ChangePercent'] } leanRespJsonString = json.dumps(leanResp, ensure_ascii=False) ## Print response print "Content-Type: application/json" print print leanRespJsonString
bsd-2-clause
ColumbiaCMB/kid_readout
kid_readout/measurement/core.py
40786
""" This module is the core of the measurement subpackage. See __init__.py for documentation. """ import copy_reg import re import logging import inspect import keyword import importlib from numbers import Number from collections import OrderedDict import numpy as np import pandas as pd from kid_readout.measurement import classes CLASS_NAME = '_class' # This is the string used by IO objects to save class names. VERSION = '_version' # This is the string used by IO objects to save class versions. METADATA = '_metadata' # This is the string used by IO objects to save metadata dictionaries. # TODO: decide which names really need to be reserved, and clean this up after add_legacy_origin is refactored. # These names cannot be used for attributes because they are used as part of the public DataFrame interface. IO_CLASS_NAME = 'io_class' # This is the fully-qualified name of the IO class used to read a measurement from disk. ROOT_PATH = 'root_path' # This is the root file or directory from which a measurement was read from disk. NODE_PATH = 'node_path' # This is the node path from the root node to the measurement node. NUMBER = 'number' # This is the index of a single measurement slice from an array-like measurement. # IO_MODULE = 'io_module' # This is the full-qualified name of the module used to read and write legacy data. RESERVED = [IO_CLASS_NAME, ROOT_PATH, NODE_PATH, NUMBER] # io_node_path is included here # These strings have a corresponding private attribute. PRIVATE = ['io_node_path'] # This character separates nodes in a node path. NODE_PATH_SEPARATOR = '/' logger = logging.getLogger(__name__) class Node(object): """ This is an abstract class that represents a node in the tree data structure used to save data. """ _version = 0 def __init__(self): """ Set internal state variables. :return: a new Node instance. """ self._parent = None self._io = None self._io_node_path = None @classmethod def class_name(cls): """ Return the name of the class that should be used to load this object from disk. Usually, this will simply be the name of the class, but this can be overridden if necessary. The return value from this method will be stored on disk and used to recreate the object as an instance of the class with this name. The purpose of this method is to allow classes to recommend some other class that should be used to load their data. For example, the IOList class cannot be instantiated using the data that it writes to disk, so its class_name() returns MeasurementList, which is a class that can load the data. Returns ------- str The class name. """ return cls.__name__ @property def io_node_path(self): """ Return a string representing the node path last used to write or read this node. When a node is written to or read from disk it is tagged with the path used by the IO object to write or read it, and that path will be the return value until it is updated by another save or load. Returns ------- str The node path of this Node on disk. """ return self._io_node_path @property def current_node_path(self): """ Return a string representing the node path of this node according to the current structure of its tree. This path is always relative to the top-level node, which is always a Node and never an IO object. For example, if this node is the Stream stored at index 3 in the list of Streams in the Sweep of a SweepStream, this would return '/sweep/streams/3' regardless of whether some or all of these measurements have been written to disk. Thus, it will always differ from `io_node_path`. Because this function has to traverse the contents of each parent, it could be slow for large structures. Returns ------- str The node path of this Node. """ if self._parent is None: return NODE_PATH_SEPARATOR else: return join(self._parent.current_node_path, self._parent._locate(self)) def add_origin(self, dataframe): """ Add to the given dataframe enough information to load the data from which it was created. This method adds columns named for the IO_CLASS_NAME, ROOT_PATH, and NODE_PATH variables in this module; the columns contain the IO class, the path to the root file or directory, and the node path to this node. The from_series() function in this module can use this information to load the original data. If this node was not loaded from disk then it has no origin information and the values of the above will all be None. If this is the case, this method will attempt to add origin information for each child node, using the attribute name as a prefix. For example, if a measurement has an attribute `child`, then this function will create columns `io_class_name`: None etc., because the top-level node has no origin information, and will also create columns `child.io_class_name`: NCFile and so on. The from_series() function will be able to load the child measurements once the prefix is stripped. Parameters ---------- dataframe : pandas.DataFrame The dataframe to which this method will add origin information. """ try: dataframe[IO_CLASS_NAME] = self._io.__class__.__name__ dataframe[ROOT_PATH] = self._io.root_path dataframe[NODE_PATH] = self._io_node_path except AttributeError: # This node has not been read from or written to disk, so try its children. dataframe[IO_CLASS_NAME] = None dataframe[ROOT_PATH] = None dataframe[NODE_PATH] = None for key, value in self.__dict__.items(): if not key.startswith('_') and isinstance(value, Node): try: dataframe['.'.join((key, IO_CLASS_NAME))] = value._io.__class__.__name__ dataframe['.'.join((key, ROOT_PATH))] = value._io.root_path dataframe['.'.join((key, NODE_PATH))] = value._io_node_path except AttributeError: dataframe['.'.join((key, IO_CLASS_NAME))] = None dataframe['.'.join((key, ROOT_PATH))] = None dataframe['.'.join((key, NODE_PATH))] = None def _locate(self, node): """ Subclasses should implement this method to enable nodes to discover their location in the node tree: self._parent._locate(self) returns the node path relative to the parent node. Parameters ---------- node : Node The node to locate. Returns ------- str The node name of the given node; depending on how it is stored in the current node, this could be an attribute name, a dictionary key, or string representation of an integer sequence index. Raises ------ AttributeError If the given node is not contained in this node. """ pass def __setattr__(self, key, value): """ This differs from object.__setattr__() only in that it sets the _parent attribute of public Node instances. The goal is to ensure that child nodes always have a link to their parent. If a node is set as an attribute of multiple nodes, its _parent will be the node to which it was most recently added. Parameters ---------- key : str The attribute name to set. value : object The attribute value to set. Returns ------- None """ if not key.startswith('_') and isinstance(value, Node): value._parent = self super(Node, self).__setattr__(key, value) class Measurement(Node): """ This class represents a measurement. A measurement specifies a data format on disk, and can contain analysis code for that data format. To create a new measurement, write a subclass that obeys the restrictions described in the module docstring and below. Array dimensions. Each measurement has a dimensions class attribute that contains metadata for its array dimensions. This is necessary for the netCDF4 IO class to handle the array dimensions correctly, and it also allows the classes to check the dimensions of their arrays on instantiation through the _validate_dimensions() method. The format of the an entry in the dimensions OrderedDict is 'array_name': dimension_tuple, where dimension tuple is a tuple of strings that are the names of the dimensions. To pass validation, each dimension name must correspond to an attribute or property of the class that is a 1-D array with size equal to the corresponding element of array.shape. Thus, arrays that have a given dimension must all have the same length along that dimension. For example, if there is an entry 's21_raw': ('time', 'frequency) and another entry 'frequency': ('frequency',) then s21_raw.shape[1] == frequency.size must be True. If the array corresponding to some dimension is not intended to be saved, it can be implemented as a property. For example, in the case above, the 'time' dimension could be implemented as a property. The instance would still pass validation as long as s21_raw.shape[0] == time.size were True. Content restrictions. Measurements store state information in a dictionary. (They actually use a subclass called StateDict, which has extra access features and validation.) Supporting multiple """ _version = 0 dimensions = OrderedDict() def __init__(self, state=None, description='', validate=True): """ Return a new Measurement instance. Setting validate=False allows improperly-formed measurements to be created in special cases. Note that such a measurement may not be saved correctly -- or at all -- by the IO classes. Parameters ---------- state : dict A dictionary of state information that should be valid throughout the measurement period. description : str A verbose description of the measurement. validate : bool If true, verify that the array shapes are correctly described by the dimensions OrderedDict. Raises ------ ValueError If the array shapes do not pass validation. """ super(Measurement, self).__init__() if state is None: state = {} self.state = StateDict(state) self.description = description if validate: self._validate_dimensions() def as_class(self, class_): public = dict([(k, v) for k, v in self.__dict__.items() if not k.startswith('_')]) return class_(**public) def to_dataframe(self): """ This method should return a pandas DataFrame containing state information and analysis products, such as fit parameters, but not large objects such as time-ordered data. Returns ------- pandas.DataFrame A DataFrame containing data from this measurement. """ return pd.DataFrame({'description': self.description}, index=[0]) # TODO: decide how to implement this, if at all. """ def add_legacy_origin(self, dataframe): dataframe['io_module'] = 'kid_readout.measurement.legacy' dataframe['root_path'] = self._root_path """ def _validate_dimensions(self): for name, dimension_tuple in self.dimensions.items(): if not getattr(self, name).shape == tuple(getattr(self, dimension).size for dimension in dimension_tuple): raise ValueError("Shape of {} does not match size of {}.".format(name, dimension_tuple)) def _locate(self, node): for key, value in self.__dict__.items(): if node is value: return key raise AttributeError("Node {} is not contained in {}.".format(repr(Node), repr(self))) def __eq__(self, other): """ Recursively compare two measurements. At each level, the function tests that both instances have the same public attributes (meaning those that do not start with an underscore), that all these attributes are equal, and that the classes of the measurements are equal. Because the data we store mixes booleans and numbers, boolean values stored as attributes are compared using identity, not equality. Note that this is not done within containers. The function does not compare private attributes, and does not even check whether the instances have the same private attributes. Parameters ---------- other : Measurement The measurement to compare to self. Returns ------- bool True if self compares equal with other, and False if not. """ try: keys_s = ['__class__'] + [k for k in self.__dict__ if not k.startswith('_')] keys_o = ['__class__'] + [k for k in other.__dict__ if not k.startswith('_')] assert set(keys_s) == set(keys_o) for key in keys_s: value_s = getattr(self, key) value_o = getattr(other, key) if issubclass(value_s.__class__, Measurement): assert value_s.__eq__(value_o) elif issubclass(value_s.__class__, MeasurementList): assert len(value_s) == len(value_o) for meas_s, meas_o in zip(value_s, value_o): assert meas_s.__eq__(meas_o) # This allows arrays to contain NaN and be equal. elif isinstance(value_s, np.ndarray) or isinstance(value_o, np.ndarray): assert np.all(np.isnan(value_s) == np.isnan(value_o)) assert np.all(value_s[~np.isnan(value_s)] == value_o[~np.isnan(value_o)]) else: # This will fail for NaN or sequences that contain any NaN values. if isinstance(value_s, bool) or isinstance(value_o, bool): assert value_s is value_o else: assert value_s == value_o except AssertionError: return False return True def __ne__(self, other): """The Python object model recommends implementing this if __eq__() is implemented.""" return not self.__eq__(other) # Recommended by the Python object model docs. __hash__ = None class MeasurementList(list, Node): """ This class implements all the list methods. It should contain only Node instances, such as Measurement subclasses. Measurements containing lists of Measurements must use instances of this class so that loading and saving are handled correctly. """ _version = 0 def __init__(self, iterable=()): super(MeasurementList, self).__init__(iterable) Node.__init__(self) for item in self: item._parent = self def _locate(self, node): for index, value in enumerate(self): if node is value: return str(index) raise AttributeError("Node {} is not contained in {}.".format(repr(Node), repr(self))) def append(self, item): item._parent = self super(MeasurementList, self).append(item) def extend(self, iterable): for item in iterable: item._parent = self super(MeasurementList, self).extend(iterable) def insert(self, index, item): item._parent = self super(MeasurementList, self).insert(index, item) def __setitem__(self, key, value): value._parent = self super(MeasurementList, self).__setitem__(key, value) def __repr__(self): return '{}({})'.format(self.__class__.__name__, super(MeasurementList, self).__repr__()) class IOList(MeasurementList): """ This class acts like a MeasurementList that writes Measurements to disk as they are added to the list. It can only be created empty, and implements only the append() and extend() methods. To use this class, pass it as an argument when instantiating a class that normally contains a MeasurementList, save that class to disk, then use the append() or extend() methods to save measurements directly to disk instead of storing them in memory. The IO class must remain open until writing is finished. """ @classmethod def class_name(cls): return cls.__base__.__name__ def __init__(self): super(IOList, self).__init__() self._len = 0 def append(self, item): self._io.write(item, join(self._io_node_path, str(len(self)))) self._len += 1 def extend(self, iterable): for item in iterable: self.append(item) def insert(self, index, item): raise NotImplementedError() def remove(self, value): raise NotImplementedError() def pop(self, index=None): raise NotImplementedError() def index(self, value, start=None, stop=None): raise NotImplementedError() def count(self, value): raise NotImplementedError() def sort(self, cmp=None, key=None, reverse=False): raise NotImplementedError() def reverse(self): raise NotImplementedError() def __iter__(self): """ Instances of this class appear like empty lists, so iteration stops immediately. :return: an empty iterator. """ return iter(()) def __len__(self): return self._len def __getitem__(self, item): raise NotImplementedError() def __setitem__(self, key, value): raise NotImplementedError() def __repr__(self): return '{}({}, {})'.format(self.__class__.__name__, self._io, self._io_node_path) class MeasurementError(Exception): """ Raised for module-specific errors. """ pass # ToDo: when a dict is added after construction, it is not converted to a StateDict so there is no type checking class StateDict(dict): """ This class adds attribute access and some content restrictions to the dict class. To support attribute access, keys must be strings that are valid Python attributes The value restrictions are the union of the restrictions imposed by the libraries used to write non-array data to disk. (Currently, these are netCDF4 for the nc.NCFile class and JSON for the npy.NumpyDirectory class.) -basic types, such as numbers, booleans, strings, booleans, and None; -sequences, i.e. lists, tuples, and arrays, that contain exactly one of the above basic types except None; sequences cannot contain other sequences -dictionaries that obey the key and value restrictions. Classes that do not obey these restrictions may fail to save or may have missing or corrupted data when read from disk. Two values that are particularly problematic are None and NaN. None requires a special case because netCDF4 cannot write it, so it is allowed as an attribute value or dictionary value but not in sequences. NaN is stored and retrieved correctly but is not recommended as an indicator of missing state because it causes comparisons to fail: float('NaN') == float('NaN') is always False. See Measurement.__eq__() for more. Origin of restrictions: -JSON has only a single sequence type, so all iterable non-dictionary objects are converted to lists on input; -netCDF cannot write None, so it cannot be an element of a sequence; """ __setattr__ = dict.__setitem__ __getattr__ = dict.__getitem__ __delattr__ = dict.__delitem__ __copy__ = lambda self: StateDict(self) __getstate__ = lambda: None __slots__ = () ALLOWED_VALUE_TYPES = (Number, str, unicode) #(Number, np.number, str, unicode) _invalid_key_type = "Dictionary keys must be strings." _invalid_variable_name = "Invalid variable name: {0}" _invalid_value = "Key {0} maps to invalid value: {1!s} ({1!r})" _invalid_sequence_value = "Key {0} maps to a sequence containing invalid value: {1!s} ({1!r})" def __init__(self, *args, **kwargs): super(StateDict, self).__init__(*args, **kwargs) for key, value in self.items(): if not isinstance(key, (str, unicode)): raise MeasurementError(self._invalid_key_type) elif re.match(r'[a-zA-Z_][a-zA-Z0-9_]*$', key) is None or keyword.iskeyword(key) or key in __builtins__: raise MeasurementError(self._invalid_variable_name.format(key)) if isinstance(value, dict): self[key] = StateDict(value) else: # Given that we are testing for sequence-ness using iteration, we have to try value validation first # because strings are iterable. try: self[key] = self._validate_value(key, value) except ValueError as e: try: self[key] = self._validate_list(key, list(value)) except TypeError: # Not iterable, and str would have passed value validation raise e def _validate_value(self, key, value): if value is None or isinstance(value, self.ALLOWED_VALUE_TYPES): return value else: raise ValueError(self._invalid_value.format(key, value)) def _validate_list(self, key, list_): for index, element in enumerate(list_): if not isinstance(element, self.ALLOWED_VALUE_TYPES): try: list_[index] = self._validate_list(key, list(element)) except TypeError: # Not iterable, and str would have passed value validation raise ValueError(self._invalid_sequence_value.format(key, element)) return list_ def __repr__(self): return '{}({})'.format(self.__class__.__name__, super(StateDict, self).__repr__()) def flatten(self, prefix='', wrap_lists=False): results = StateDict() for k, v in self.items(): this_label = k if prefix: this_label = prefix + '_' + this_label if isinstance(v, StateDict): results.update(v.flatten(prefix=this_label, wrap_lists=wrap_lists)) elif wrap_lists and isinstance(v, list): results[this_label] = [v] else: results[this_label] = v return results def pickle_state(s): return StateDict, (dict(s),) copy_reg.pickle(StateDict, pickle_state) class IO(object): """ This is an abstract class that specifies the IO interface. Implementations should implement the abstract methods and should be able to store large numpy arrays efficiently, as well as the following objects: -basic types, such as numbers, booleans, strings, and None; -sequences, i.e. lists, tuples, and arrays, that contain exactly one of the above basic types; sequences cannot contain None or other sequences, and all input sequences are returned as lists on read. -dictionaries whose keys are strings and whose values are dictionaries, sequences, or basic types; the contained dictionaries and sequences have the same requirements as above. Classes that do not obey these restrictions may fail to save or may have missing or corrupted data when read from disk. See StateDict for details. """ # Subclasses can define a conventional extension for files or directories they create. EXTENSION = '' def __init__(self, root_path, metadata=None): """ Return a new IO object that will read to or write from the given root directory or file. If the root does not exist, it should be created. Implementations should never clobber an existing file and should make it difficult to overwrite data, though appending to or modifying existing data may be useful. Parameters ---------- root_path : str The path to the root directory or file. metadata : dict If the root does not exist, write this dict to the root node. """ self.root_path = root_path if self._root_path_exists(self.root_path): if metadata is not None: raise ValueError("Cannot set metadata for an existing root: {}".format(root_path)) self._root = self._open_existing(self.root_path) try: metadata = self.read_other('/', METADATA) if metadata is not None: self.metadata = StateDict(metadata) else: self.metadata = None except ValueError: self.metadata = None else: self._root = self._create_new(self.root_path) self.write_other('/', METADATA, metadata) self.metadata = metadata # These private methods must be implemented by subclasses. def _root_path_exists(self, root_path): return False def _open_existing(self, root_path): return None def _create_new(self, root_path): return None # These public methods should work. @property def closed(self): """ Returns ------- bool True if all files on disk are closed. """ return True def default_name(self, node): """ Return a name for the given Node subclass or instance that is guaranteed to be unique at the root level. Parameters ---------- node : Node An instance or subclass. Returns ------- str A string consisting of the class name and a number that is one plus the number of nodes already stored at the root level, guaranteeing uniqueness. """ return node.class_name() + str(len(self.node_names())) def write(self, node, node_path=None): """ Write the node to disk at the given node path. If no node path is specified, write at the root level using the name given by self.default_name(). If a node path is specified, all but the final node must already exist. Parameters ---------- node : Node The instance to write to disk. node_path : str The node path to the node that will contain this object. """ if node_path is None: node_path = self.default_name(node) elif node_path == NODE_PATH_SEPARATOR: raise MeasurementError("Nothing may be written to the IO root.") validate_node_path(node_path) if node_path.startswith(NODE_PATH_SEPARATOR): absolute_node_path = node_path else: absolute_node_path = NODE_PATH_SEPARATOR + node_path self._write_node(node, absolute_node_path) logger.info("Wrote {} to node path {}".format(node.__class__.__name__, absolute_node_path)) def read(self, node_path, translate=None, force=False): """ Read a measurement from disk and return it. The `force` keyword is intended for inspecting measurements for which the data on disk does not match the class structure; see _instantiate(). Parameters ---------- node_path : str The path to the node to be loaded, in the form 'node0/node1/node2' or '/node0/node1/node2'. translate : dict A dictionary with entries 'original_class': 'new_class'; class names must be fully-qualified. force : bool If True, attempt to create the classes specified on disk even if the variables do not match. Returns ------- Measurement The data stored to the given node, including all other measurements it contains. """ validate_node_path(node_path) if node_path == NODE_PATH_SEPARATOR: raise MeasurementError("Nothing may be read from the IO root.") if not node_path.startswith(NODE_PATH_SEPARATOR): absolute_node_path = NODE_PATH_SEPARATOR + node_path else: absolute_node_path = node_path if translate is None: translate = {} return self._read_node(node_path=absolute_node_path, translate=translate, force=force) # The remaining public methods should be implemented by subclasses. # TODO: update comments, especially with exceptions raised and handling of private variables. def close(self): """ Close open files. """ pass def create_node(self, node_path): """ Create a node at the end of the given path; all but the final node in the path must already exist. """ pass def write_other(self, node_path, key, value): """ Write value to node_path with name key; value should not be a numpy array. """ pass def write_array(self, node_path, key, value, dimensions): """ Write value, a numpy array, to node_path with name key. """ pass def read_array(self, node_path, key): """ Read array key from node_path. """ pass def read_other(self, node_path, key): """ Read non-array object with name key from node_path. """ pass def node_names(self, node_path='/'): """ Return the names of all nodes contained in the node at node_path. """ pass def array_names(self, node_path): """ Return the names of all arrays contained in the node at node_path. """ pass def other_names(self, node_path): """ Return the names of all other variables contained in the node at node_path. """ pass # Private methods def __getattr__(self, item): if item in self.node_names(): return self.read(item) else: raise AttributeError("Node name not found: {}".format(item)) def __dir__(self): attrs = dir(self.__class__) + self.__dict__.keys() + self.node_names() return list(set([attr for attr in attrs if not attr.startswith('_')])) def __repr__(self): return '{}({})'.format(self.__class__.__name__, repr(self.root_path)) def _write_node(self, node, node_path): """ Write the data in node to a new node at the given node path. Parameters ---------- node : Node This will usually be a subclass of Measurement or MeasurementList. node_path : str The path of the new node into which the instance will be written. """ self.create_node(node_path) self.write_other(node_path, CLASS_NAME, node.class_name()) if hasattr(node, '_version'): self.write_other(node_path, VERSION, getattr(node, '_version')) else: self.write_other(node_path, VERSION, None) for key, value in node.__dict__.items(): if not key.startswith('_'): if isinstance(value, Node): self._write_node(value, join(node_path, key)) elif hasattr(node, 'dimensions') and key in node.dimensions: pass # Skip array writing on the first pass so that the dimensions can be created in order. else: self.write_other(node_path, key, value) if isinstance(node, MeasurementList): for index, child in enumerate(node): self._write_node(child, join(node_path, str(index))) # Saving arrays in order allows the netCDF group to create the dimensions. if hasattr(node, 'dimensions'): for array_name, dimensions in node.dimensions.items(): self.write_array(node_path, array_name, getattr(node, array_name), dimensions) # Update the node with information about how it was saved. node._io = self node._io_node_path = node_path def _read_node(self, node_path, translate, force): saved_class_name = self.read_other(node_path, CLASS_NAME) try: version = self.read_other(node_path, VERSION) except ValueError: version = None full_class_name = translate.get(saved_class_name, classes.full_name(saved_class_name, version)) class_ = get_class(full_class_name) measurement_names = self.node_names(node_path) if issubclass(class_, MeasurementList): # Use the name of each measurement, which is an int, to restore the order in the sequence. contents = [self._read_node(join(node_path, measurement_name), translate, force) for measurement_name in sorted(measurement_names, key=int)] node = class_(contents) else: variables = {} for measurement_name in measurement_names: variables[measurement_name] = self._read_node(join(node_path, measurement_name), translate, force) array_names = self.array_names(node_path) for array_name in array_names: variables[array_name] = self.read_array(node_path, array_name) for other_name in self.other_names(node_path): variables[other_name] = self.read_other(node_path, other_name) node = _instantiate(class_, variables, force) # Update the node with information about how it was loaded. node._io = self node._io_node_path = node_path return node # Class-related functions def get_class(full_class_name): module_name, class_name = full_class_name.rsplit('.', 1) module = importlib.import_module(module_name) return getattr(module, class_name) # ToDo: look at effect of None in number field, and handle it here def from_series(series): io_class = get_class(classes.full_name(class_name=series[IO_CLASS_NAME], version=None)) io = io_class(series[ROOT_PATH]) node = io.read(series[NODE_PATH]) if NUMBER in series and pd.notnull(series[NUMBER]): return node[series[NUMBER]] else: return node def _instantiate(class_, variables, force): """ Return a new instance of `class_` using the `variables` dict. If `force == True`, variables required by __init__() that are not present will be passed as None, and variables on disk that are not in the instantiation signature will be monkey-patched in after instantiation. Additionally, for measurements that have the `validate` argument (as all measurements that directly contain arrays should), it will be set to False to reduce the chance of errors. While this should work in many cases, it is not guaranteed to load arbitrary data successfully. If a forced instantiation still fails, many different errors could be raised. Parameters ---------- class_ : type The class to instantiate. variables : dict A mapping from argument name to argument value. force : bool If True, attempt to force the measurement to instantiate; see IO.read(). Raises ------ MeasurementError If `force == False` and the keys of `variables` do not match the __init__() signature. Returns ------- class_ A new instance. """ # The Measurement framework does not support varargs or keywords, so these should be None. args, varargs, keywords, defaults = inspect.getargspec(class_.__init__) # Populate this list from last to first. arg_values = [] for arg, default in zip(reversed(args), reversed(defaults)): if arg == 'validate' and force: arg_values.append(False) # Skip validation to increase the chances of success. else: arg_values.append(variables.get(arg, default)) for arg in reversed(args[1:-len(defaults)]): # Skip the first arg, which is 'self'. try: arg_values.append(variables[arg]) except KeyError: if force: arg_values.append(None) else: raise MeasurementError("A required argument is not present on disk: {}".format(arg)) instance = class_(*reversed(arg_values)) extras = set(variables.keys()) - set(args) if force: for key in extras: setattr(instance, key, variables[key]) # Monkey-patch. else: if extras: raise MeasurementError("Extra values are present on disk: {}".format(extras)) return instance # Node-related functions def join(node_path, *node_paths): """ Join the given node paths into a single path. The code is copied from os.path.join(). Note that the last path in *node_paths that is absolute will replace node_path as the base, and subsequent paths will be appended to it. This function does not test validity of either the inputs or output. Parameters ---------- node_path : str The base of the node path. node_paths : iterable of str Additional node paths to join to the base. Returns ------- str The joined node path. """ path = node_path for p in node_paths: if p.startswith(NODE_PATH_SEPARATOR): path = p elif path == '' or path.endswith(NODE_PATH_SEPARATOR): path += p else: path += NODE_PATH_SEPARATOR + p return path def split(node_path): """ Split the given node path into a (head, tail) tuple, either element of which may be empty. The code is copied from os.path.split(). Examples: split('one/two/three') -> ('one/two', 'three') split('/one') -> ('/', 'one') split('/') -> ('/', '') This function does not test validity of either the inputs or outputs. Parameters ---------- node_path : str The node path to split. Returns ------- (str, str) head, tail are respectively the path except for the last node and the last node. """ last_separator_index = node_path.rfind(NODE_PATH_SEPARATOR) + 1 head, tail = node_path[:last_separator_index], node_path[last_separator_index:] if head and head != NODE_PATH_SEPARATOR * len(head): head = head.rstrip(NODE_PATH_SEPARATOR) return head, tail def explode(node_path): """ Return a list of the node names in the given node path with the node separator removed. Empty names are dropped. Examples: explode('one/two/three') -> ['one', 'two', 'three'] explode('/one') -> ['one'] explode('//') -> [] Parameters ---------- node_path : str The node path to explode into individual nodes. Returns ------- list A list of strings that are the individual nodes in the path. """ if node_path.startswith(NODE_PATH_SEPARATOR): # Avoid an empty string at the start for valid absolute paths. node_path = node_path[1:] if not node_path: return [] else: return node_path.split(NODE_PATH_SEPARATOR) def validate_node_path(node_path): """ Raise an exception if the given node path is not correctly formed. A valid node path is a string sequence of zero or more valid node names separated by slashes; an otherwise valid node path may or may not also begin with a slash. A valid node name is either a valid Python variable or a nonnegative integer. Note that, unlike a unix path, a valid node path may not end with the node path separator unless it is the root. Parameters ---------- node_path : str The node path to validate. Returns ------- None Raises ------ MeasurementError if the given node path is not valid. """ if not node_path: raise MeasurementError("Empty node path.") if node_path != NODE_PATH_SEPARATOR: for node in explode(node_path): if not node: raise MeasurementError("Empty node in {}".format(node_path)) elif re.match(r'[0-9]*$|[_a-zA-Z][_a-zA-Z0-9]*$', node) is None: raise MeasurementError("Invalid node {} in {}".format(node, node_path))
bsd-2-clause
dscorbett/pygments
doc/examples/example.py
317
from typing import Iterator # This is an example class Math: @staticmethod def fib(n: int) -> Iterator[int]: """ Fibonacci series up to n """ a, b = 0, 1 while a < n: yield a a, b = b, a + b result = sum(Math.fib(42)) print("The answer is {}".format(result))
bsd-2-clause
gameduell/input
backends/input_android/java/org/haxe/duell/input/keyboard/TextField.java
6633
/* * Copyright (c) 2003-2015, GameDuell GmbH * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. * IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.haxe.duell.input.keyboard; import android.text.Editable; import android.text.TextWatcher; import org.haxe.duell.DuellActivity; import org.haxe.duell.hxjni.HaxeObject; import org.haxe.duell.input.DuellInputActivityExtension; import org.haxe.duell.input.event.CentralHaxeDispatcher; import java.util.BitSet; import java.util.regex.Pattern; import android.util.Log; public class TextField implements KeyboardViewDelegate, TextWatcher { private final CentralHaxeDispatcher dispatcher; private KeyboardView keyboardView; private String text; private BitSet validCharacters; private boolean eatEvent; public static TextField init(HaxeObject hxObject) { return new TextField(hxObject); } private TextField(HaxeObject hxObject) { dispatcher = new CentralHaxeDispatcher(hxObject); text = ""; validCharacters = new BitSet(256); final Thread currentThread = Thread.currentThread(); DuellActivity.getInstance().runOnUiThread(new Runnable() { @Override public void run() { keyboardView = new KeyboardView(DuellActivity.getInstance()); keyboardView.setDelegate(TextField.this); DuellInputActivityExtension.extension.get().setManagedKeyboardView(keyboardView); keyboardView.addTextChangedListener(TextField.this); currentThread.interrupt(); } }); long timestamp = System.currentTimeMillis(); try { Thread.sleep(1000); } catch (InterruptedException e) { Log.d("duell", "TextField ready after " + (System.currentTimeMillis() - timestamp) + " ms"); } } public void setAllowedCharCodes(boolean[] charCodes) { if (charCodes.length > validCharacters.length()) { // expand the size if needed validCharacters = new BitSet(charCodes.length); } // reset all flags to false validCharacters.clear(); for (int i = 0; i < charCodes.length; i++) { validCharacters.set(i, charCodes[i]); } } public boolean show() { return keyboardView.show(); } public boolean hide() { return keyboardView.hide(); } @Override public void willShow() { dispatcher.dispatchEvent(CentralHaxeDispatcher.INPUT_STARTED_EVENT, null); } @Override public void willHide() { dispatcher.dispatchEvent(CentralHaxeDispatcher.INPUT_ENDED_EVENT, null); final String string = keyboardView.getText().toString(); // manually call this to reset the string from the edittext, ensure nothing was broken by the keyboard dismissing onTextChanged(string, 0, 0, string.length()); } @Override public void beforeTextChanged(CharSequence s, int start, int count, int after) { } @Override public void onTextChanged(CharSequence s, int start, int before, int count) { // event was flagged as eaten, don't execute logic if (eatEvent) { return; } String string = text; if (string.length() > s.length()) { // keyboardViewDidDeleteBackward string = s.toString(); } else if (string.length() <= s.length()) { // keyboardViewDidInsertText string = s.toString(); String processedText = string; for (int i = 0; i < string.length(); i++) { if (!validCharacters.get(string.charAt(i))) { // force event to be eaten, as this method will execute again eatEvent = true; processedText = processedText.replaceAll(Pattern.quote("" + string.charAt(i)), ""); } if (eatEvent) { // set the text back and update the keyboard view string = processedText; final CharSequence text = s; DuellActivity.getInstance().runOnUiThread(new Runnable() { @Override public void run() { keyboardView.setText(text); // point the cursor to the end, its position gets messed up after replacing keyboardView.setSelection(Math.max(0, text.length() - 1)); } }); } } } text = string; dispatcher.dispatchEvent(CentralHaxeDispatcher.TEXT_CHANGED_EVENT, text); } @Override public void afterTextChanged(Editable s) { if (eatEvent) { eatEvent = false; } } public void setText(final String s) { text = s; DuellActivity.getInstance().runOnUiThread(new Runnable() { @Override public void run() { eatEvent = true; keyboardView.setText(s); keyboardView.setSelection(s.length()); } }); } }
bsd-2-clause
nicolasdespres/hunittest
hunittest/runner.py
5732
# -*- encoding: utf-8 -*- """Routines to execute test suites. """ import unittest import multiprocessing as mp import time import sys import traceback from collections import namedtuple from hunittest.unittestresultlib import HTestResultClient from hunittest.unittestresultlib import TestResultMsg from hunittest.coveragelib import CoverageInstrument class _ErrMsg(namedtuple("ErrMsg", ("type", "value", "msg"))): """Message representing an uncaught exception raised in the worker process. """ def print_exception(self, prefix): for item in self.msg: for line in item.splitlines(): print(prefix, line) _testLoader = unittest.loader.defaultTestLoader def _worker_run_aux(test_name, result): test = _testLoader.loadTestsFromNames([test_name]) test(result) def _worker_run(conn, worker_id, worker_kwargs, cov_args): """Executed in the worker process. """ if cov_args is None: cov_args = {} cov = CoverageInstrument(**cov_args) with conn, cov: result = HTestResultClient(worker_id, conn, **worker_kwargs) done = False while not done: try: msg = conn.recv() except EOFError: raise RuntimeError("parent process of worker {} probably " "died unexpectedly.".format(worker_id)) else: if msg is None: done = True elif isinstance(msg, str): try: _worker_run_aux(msg, result) except (Exception, KeyboardInterrupt, SystemExit) as e: msg_obj = _ErrMsg( type(e), e, traceback.format_exception(*sys.exc_info())) conn.send((worker_id, msg_obj)) done = True else: raise RuntimeError("worker {} received unexpected message: " "{!r}".format(worker_id, msg)) def run_concurrent_tests(test_names, result, njobs=1, cov_args=None, worker_kwargs=None): """Run multiple tests concurrently using multiple process. This function is executed in the master process. It distribute tests to each worker. The scheduling is trivial: when a worker finished the next not-yet-run test spec is sent to it. Workers later send a TestResultMsg back to the master process. If an error occurred the worker is stopped and never re-spawned. A bidirectional connection pipe connects the master process to each of its worker. """ def start_test(conn, test_name): conn.send(test_name) result.startTest(test_name) def stop_worker(conn): """Tell the worker connected to the given *conn* pipe to stop.""" conn.send(None) ntest = len(test_names) if ntest == 0: return nproc = min(ntest, njobs) ### Create workers conns = [] workers = [] for i in range(nproc): my_conn, worker_conn = mp.Pipe() proc = mp.Process(target=_worker_run, args=(worker_conn, i, worker_kwargs, cov_args)) conns.append(my_conn) workers.append(proc) proc.start() # We close the writable end of the pipe now to be sure that # p is the only process which owns a handle for it. This # ensures that when p closes its handle for the writable end, # wait() will promptly report the readable end as being ready. worker_conn.close() ### Distribute work try: # Bootstrap the pool of workers by sending one test to each of them. t = 0 while t < nproc: start_test(conns[t], test_names[t]) t += 1 while conns: for conn in mp.connection.wait(conns): try: msg = conn.recv() except EOFError: # The other end of the connection has been closed. Remove # it so that we exit the loop when the connections list is # empty. conns.remove(conn) else: # Print results and send new test. worker_id, obj = msg if isinstance(obj, _ErrMsg): obj.print_exception("[worker{}]".format(worker_id)) elif isinstance(obj, TestResultMsg): result.process_result(obj) # Start next test if there is still some if t < ntest and not result.shouldStop: start_test(conn, test_names[t]) t += 1 else: stop_worker(conn) else: raise RuntimeError("main process cannot handle " "message {!r} " "from worker {}" .format(obj, worker_id)) finally: for conn in conns: stop_worker(conn) conn.close() ### Wait for workers to finish. # FIXME(Nicolas Despres): Add a timeout on join() for p in workers: p.join() def run_monoproc_tests(test_names, result, cov): with cov: for test_name in test_names: # If a test has failed and -f/--failfast is set we must exit now. if result.shouldStop: break test = _testLoader.loadTestsFromNames([test_name]) test(result)
bsd-2-clause
DomT4/homebrew-core
Formula/saxon.rb
1045
class Saxon < Formula desc "XSLT and XQuery processor" homepage "https://saxon.sourceforge.io" url "https://downloads.sourceforge.net/project/saxon/Saxon-HE/9.8/SaxonHE9-8-0-14J.zip" version "9.8.0.14" sha256 "139644e35aed79f16218848cd5e6c00f70c54378b3821e6d6ac723d8b84a4287" bottle :unneeded def install libexec.install Dir["*.jar", "doc", "notices"] bin.write_jar_script libexec/"saxon9he.jar", "saxon" end test do (testpath/"test.xml").write <<~EOS <test>It works!</test> EOS (testpath/"test.xsl").write <<~EOS <xsl:stylesheet xmlns:xsl="http://www.w3.org/1999/XSL/Transform" version="2.0"> <xsl:template match="/"> <html> <body> <p><xsl:value-of select="test"/></p> </body> </html> </xsl:template> </xsl:stylesheet> EOS assert_equal <<~EOS.chop, shell_output("#{bin}/saxon test.xml test.xsl") <html> <body> <p>It works!</p> </body> </html> EOS end end
bsd-2-clause
uwgraphics/Ubiqu-Ity
Ity/Taggers/DocuscopeTagger/DocuscopeTagger.py
16245
# coding=utf-8 __author__ = 'kohlmannj' from copy import copy, deepcopy import os import Ity import DocuscopeDictionary from DocuscopeCSVDictionary import DocuscopeCSVDictionary from Ity.Tokenizers import Tokenizer from Ity.Taggers import Tagger import time class DocuscopeTagger(Tagger): """ DocuscopeTagger uses an implementation of the Docuscope rule-matching algorithm to apply rules ("lats") from the Docucsope dictionary (by Kaufer and Ishizaki of Carnegie Mellon University). The dictionary maps rule names to one or more "phrases", which themselves are one or more words ("we") or "word classes" ("!ROYALWE"). These rules may also include punctuation characters. The algorithm prioritizes the longest rules, so it applies the rule for which there appears the longest contiguous subset of matching words, given a starting token from a text. If the Docuscope dictionary does not contain an applicable long rule, it provides additional "short" rules that apply for single words (or punctuation characters, in theory). This Tagger excludes whitespace and newline characters, but does so in a way that such tokens are simply passed. There is the potential for erroneous long rule applications in cases where a long rule may be matched across a newline token, for example. Most of the time, the structure of the Docuscope dictionary's rules and the structure of the document itself should prevent this from happening often. (That is, a long rule matching "who goes there" could not be applied to "who goes.\n\nThere" because the period ending the sentence prevents the rule from being applied.) The long rule application algorithm is based on the original one written by Michael Gleicher in his DocuscopeJr module. DocuscopeTagger may be instantiated with an alternative `dictionary_path`, which refers to either a folder containing Docuscope-style plain text files with rule and word class specifications, or a CSV file specifying rule and word class specifications. If `None` is provided, DocuscopeTagger defaults to the "stock" Docuscope dictionary, which is not publicly available at this time. """ def __init__( self, debug=False, label="", excluded_token_types=( Tokenizer.TYPES["WHITESPACE"], Tokenizer.TYPES["NEWLINE"] ), untagged_rule_name=None, unrecognized_rule_name=None, excluded_rule_name=None, return_untagged_tags=False, return_unrecognized_tags=False, return_excluded_tags=False, return_included_tags=False, allow_overlapping_tags=False, dictionary_path=None, blacklist=[], return_tag_maps=False, ): super(DocuscopeTagger, self).__init__( debug=debug, label=label, excluded_token_types=excluded_token_types, untagged_rule_name=untagged_rule_name, unrecognized_rule_name=unrecognized_rule_name, excluded_rule_name=excluded_rule_name, return_untagged_tags=return_untagged_tags, return_unrecognized_tags=return_unrecognized_tags, return_excluded_tags=return_excluded_tags, return_included_tags=return_included_tags, blacklist=blacklist, return_tag_maps=return_tag_maps ) # Set blacklist self.blacklist = blacklist self.return_tag_maps = return_tag_maps # This is a weird setting self.allow_overlapping_tags = allow_overlapping_tags # Allow DocuscopeTagger to be initialized with a different path to the Docuscope dictionary. if dictionary_path is not None and os.path.exists(dictionary_path): self.dictionary_path = dictionary_path # Swizzle the dictionary filename into this instance's label. self._label += "." + os.path.basename(dictionary_path) if self.return_excluded_tags: self._label += "." + "return_excluded_tags" if self.allow_overlapping_tags: self._label += "." + "allow_overlapping_tags" elif dictionary_path is not None and os.path.exists(os.path.join(Ity.dictionaries_root, 'Docuscope', dictionary_path)): self.dictionary_path = os.path.join(Ity.dictionaries_root, 'Docuscope', dictionary_path) self._label += '.' + dictionary_path # If the given dictionary path is invalid, use the following default value. else: # lf.write("swapped to default at 102"+ '\n') self.dictionary_path = os.path.join(Ity.dictionaries_root, "Docuscope/default") # Swizzle ".default" into this instance's label. self._label += ".default" # Is this dictionary a folder? if os.path.isdir(self.dictionary_path): # Cool, use DocuscopeDictionary.getDict to load that dictionary. self._ds_dict = DocuscopeDictionary.getDict(self.dictionary_path) # Is the dictionary a file with the extension ".csv"? elif os.path.isfile(self.dictionary_path) and os.path.splitext(self.dictionary_path)[1] == ".csv": # Load the Dictionary with a TopicModelDictionary. self._ds_dict = DocuscopeCSVDictionary(rules_filename=self.dictionary_path) self._ds_dict._load_rules() # lf.close() def _get_ds_words_for_token(self, token, case_sensitive=False): # Get all the str representations of this token. token_strs = token[Tokenizer.INDEXES["STRS"]] # Try to find a matching Docuscope token while we still have # token_strs to try with. ds_words = [] for token_str in token_strs: if not case_sensitive: token_str = token_str.lower() # UnicodeWarning previously happened here when this was a try / KeyError block if token_str in self._ds_dict.words: ds_words = self._ds_dict.words[token_str] return ds_words def _get_ds_words_for_token_index(self, token_index, case_sensitive=False): try: token = self.tokens[token_index] if token[0][0] in self.blacklist: return [] return self._get_ds_words_for_token(token, case_sensitive) except IndexError: return [] def _get_long_rule_tag(self): rule = copy(Tagger.empty_rule) tag = deepcopy(Tagger.empty_tag) # Is this token's type one that is excluded? if self.tokens[self.token_index][Tokenizer.INDEXES["TYPE"]] in self.excluded_token_types: # Early return, then. return None, None # Is there a next token? next_token_index = self._get_nth_next_included_token_index() if next_token_index is None: # Nope, no next token, so we can't look for long rules. return None, None # Oh good, there's a next token. Go find the longest rule, then. # This algorithm below is based on Mike Gleicher's DocuscopeJr tagger. best_ds_rule = None best_ds_lat = None best_ds_rule_len = 0 for token_ds_word in self._get_ds_words_for_token_index(self.token_index): try: rule_dict = self._ds_dict.rules[token_ds_word] for next_token_ds_word in self._get_ds_words_for_token_index(next_token_index): try: # for the rd[nw] for ds_lat, ds_rule in rule_dict[next_token_ds_word]: # check to see if the rule applies ds_rule_len = len(ds_rule) if ds_rule_len > best_ds_rule_len and self._long_rule_applies_at_token_index(ds_rule): # keep the "best" rule best_ds_rule = ds_rule best_ds_lat = ds_lat best_ds_rule_len = ds_rule_len except KeyError: pass except KeyError: pass if best_ds_rule is not None and best_ds_rule_len > 0: # Update the rule structure. rule["name"] = best_ds_lat rule["full_name"] = best_ds_lat # Update the tag structure. last_token_index = self._get_nth_next_included_token_index(n=best_ds_rule_len - 1) tag.update( rules=[ (rule["name"], best_ds_rule) ], index_start=self.token_index, index_end=last_token_index, pos_start=self.tokens[self.token_index][Tokenizer.INDEXES["POS"]], pos_end=self.tokens[last_token_index][Tokenizer.INDEXES["POS"]], len=tag["index_end"] - tag["index_start"] + 1, token_end_len=self.tokens[last_token_index][Tokenizer.INDEXES["LENGTH"]], num_included_tokens=best_ds_rule_len ) # Okay, do we have a valid tag and tag to return? (That's the best rule). if self._is_valid_rule(rule) and self._is_valid_tag(tag): # Return the best rule's rule and tag. return rule, tag else: # No long rule applies. return None, None def _long_rule_applies_at_token_index(self, rule): try: # Get the next token index so that the first reassignment to # next_token_index in the loop references the 3rd token in the rule. next_token_index = self._get_nth_next_included_token_index() for i in range(2, len(rule)): next_token_index = self._get_nth_next_included_token_index(starting_token_index=next_token_index) if next_token_index is None or not (rule[i] in self._get_ds_words_for_token_index(next_token_index)): return False # Made it out of the loop? Then the rule applies! return next_token_index except IndexError: return False def _get_short_rule_tag(self): rule = copy(Tagger.empty_rule) # Some data for the current token. token = self.tokens[self.token_index] token_ds_words = self._get_ds_words_for_token(token) # Update some information in tag right away for this one-token tag. tag = deepcopy(Tagger.empty_tag) tag.update( index_start=self.token_index, index_end=self.token_index, pos_start=token[Tokenizer.INDEXES["POS"]], pos_end=token[Tokenizer.INDEXES["POS"]], len=1, num_included_tokens=1, token_end_len=token[Tokenizer.INDEXES["LENGTH"]] ) # For words and punctuation... matching_ds_word = None if token[0][0] in self.blacklist: rule["name"] = "!BLACKLISTED" elif token[Tokenizer.INDEXES["TYPE"]] not in self.excluded_token_types: # Try to find a short rule for one of this token's ds_words. for ds_word in token_ds_words: try: # Note: we'll set rule["full_name"] later. rule["name"] = self._ds_dict.shortRules[ds_word] matching_ds_word = ds_word break except KeyError: continue # Handle untagged tokens (words and punctuation that # exist in the Docuscope dictionary's words dict but do not have # an applicable rule). if rule["name"] is None: for ds_word in token_ds_words: if ds_word in self._ds_dict.words: rule["name"] = self.untagged_rule_name break # Still don't have a rule? # Handle !UNRECOGNIZED tokens---tokens that do not exist in the dictionary. if rule["name"] is None: rule["name"] = self.unrecognized_rule_name # For excluded token types...uh, they're excluded. else: rule["name"] = self.excluded_rule_name # For all cases, we should have a rule "name" by now. # Update the rule's full_name value and append a rule tuple to the # tag's "rules" list. if "name" in rule and type(rule["name"]) is str: rule["full_name"] = rule["name"] rule_tuple = (rule["full_name"], matching_ds_word) tag["rules"].append(rule_tuple) # self._get_tag() will validate the returned rule and tag. return rule, tag def _get_tag(self): # Try finding a long rule. rule, tag = self._get_long_rule_tag() # If the long rule and tag are invalid (i.e. we got None and None), try finding a short rule. if not self._is_valid_rule(rule) and not self._is_valid_tag(tag): # Try finding a short rule (which could be the "untagged", # "no rule", or "excluded" rules). This method *should* never # return None, None (but technically it can). rule, tag = self._get_short_rule_tag() # We should absolutely have a valid rule and tag at this point. if not self._is_valid_rule(rule) or not self._is_valid_tag(tag): raise ValueError("Unexpected None, None return value/s from\ self._get_short_rule_tag(). Can't tag token '%s' at index %u." % ( self.tokens[self.token_index], self.token_index )) # Add the rule to self.rules (if we're supposed to) and add the tag to # self.tags. if self._should_return_rule(rule): # Is this the first time we've seen this rule? if rule["full_name"] not in self.rules: rule["num_tags"] = 1 rule["num_included_tokens"] = tag["num_included_tokens"] self.rules[rule["name"]] = rule # We've seen this rule already, but update its num_tags count. else: self.rules[rule["name"]]["num_tags"] += 1 self.rules[rule["name"]]["num_included_tokens"] += tag["num_included_tokens"] # Append the tag to self.tags. if self.return_tag_maps: self.tags.append(tag) # Debug: print the tokens that have been tagged. if self.debug: tag_token_strs = [] for token in self.tokens[tag["index_start"]:(tag["index_end"] + 1)]: tag_token_strs.append(token[Tokenizer.INDEXES["STRS"]][-1]) print ">>> BEST RULE: %s for \"%s\"" % ( rule["name"], str(tag_token_strs) ) # Compute the new token index. # If "overlapping tags" are allowed, start at the token following # the **first** token in the tag we just finished making. if self.allow_overlapping_tags: self.token_index = tag["index_start"] + 1 # Otherwise, start at the token following the **last** token in the # tag we just finished making. else: self.token_index = tag["index_end"] + 1 def tag(self, tokens): # Several helper methods need access to the tokens. self.tokens = tokens self.token_index = 0 # Loop through the tokens and tag them. while self.token_index < len(self.tokens) and self.token_index is not None: if self.debug: print "\nPassing self.tokens[%u] = %s" % (self.token_index, str(self.tokens[self.token_index])) self._get_tag() # All done, so let's do some cleanup. rules = self.rules tags = self.tags # Clear this instance's tokens, rules, and tags. # (This is an attempt to free up memory a bit earlier.) self.tokens = [] self.rules = {} self.tags = [] # Return the goods. if self.return_tag_maps: return rules, tags else: return rules
bsd-2-clause
micschk/silverstripe-liveseo
code/SeoInformationProvider.php
554
<?php /** * Optionally provide extra information for the SEO plugin to use to calculate a score from JS */ interface SeoInformationProvider { /** * Provide a list of images. Currently only the number of images is used * @return DataList Images, either objects, or URLs */ public function getImagesForSeo(); /** * Provide a list of links, e.g. from a related links relation. * Note currently the number of items only is used * @return {DataList} List of links */ public function getLinksForSeo(); }
bsd-2-clause
samrushing/caesure
caesure/verifyd.py
1969
# -*- Mode: Python; indent-tabs-mode: nil -*- import argparse import os import coro import struct from caesure.bitcoin import TX from coro.asn1.python import encode, decode W = coro.write_stderr def serve (G): path = os.path.join (G.args.base, G.args.file) s = coro.sock (coro.AF.UNIX, coro.SOCK.STREAM) try: os.unlink (path) except OSError: pass s.bind (path) s.listen (100) while 1: conn, addr = s.accept() if coro.fork() == 0: coro.spawn (go, G, conn) s.close() return else: conn.close() coro.set_exit() def go (G, s): try: while 1: # what are the per-txn size limits? pktlen = s.recv_exact (4) if not pktlen: break else: pktlen, = struct.unpack ('>I', pktlen) packet = s.recv_exact (pktlen) data, size = decode (packet) assert size == pktlen [index, block_timestamp, raw_tx, lock_scripts] = data tx = TX() tx.unpack (raw_tx) result = True for i in range (len (tx.inputs)): lock_script = lock_scripts[i] try: tx.verify (i, lock_script, block_timestamp) except SystemError: result = False pkt = encode ((result, index)) s.writev ([struct.pack ('>I', len(pkt)), pkt]) except EOFError: pass coro.set_exit() class GlobalState: pass G = GlobalState() p = argparse.ArgumentParser() p.add_argument ('-b', '--base', help='data directory', default='/usr/local/caesure', metavar='PATH') p.add_argument ('-f', '--file', help='server socket filename', default='verifyd.sock', metavar='PATH') args = G.args = p.parse_args() coro.spawn (serve, G) coro.event_loop()
bsd-2-clause
sandor-balazs/nosql-java
mongodb/src/main/java/com/github/sandor_balazs/nosql_java/web/rest/dto/EmploymentDTO.java
1617
package com.github.sandor_balazs.nosql_java.web.rest.dto; import java.time.LocalDate; import java.io.Serializable; import java.util.Objects; /** * A DTO for the Employment entity. */ public class EmploymentDTO implements Serializable { private String id; private Float fte; private LocalDate startDate; private LocalDate endDate; public String getId() { return id; } public void setId(String id) { this.id = id; } public Float getFte() { return fte; } public void setFte(Float fte) { this.fte = fte; } public LocalDate getStartDate() { return startDate; } public void setStartDate(LocalDate startDate) { this.startDate = startDate; } public LocalDate getEndDate() { return endDate; } public void setEndDate(LocalDate endDate) { this.endDate = endDate; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } EmploymentDTO employmentDTO = (EmploymentDTO) o; if ( ! Objects.equals(id, employmentDTO.id)) return false; return true; } @Override public int hashCode() { return Objects.hashCode(id); } @Override public String toString() { return "EmploymentDTO{" + "id=" + id + ", fte='" + fte + "'" + ", startDate='" + startDate + "'" + ", endDate='" + endDate + "'" + '}'; } }
bsd-2-clause
ThibaultReuille/graphiti
Scripts/console/graph.py
6448
import script from script import * class Topology(script.Script): def neighbors(self, args): new_neighbors = list() if 'nodes' in self.console.query: graph = std.load_nx_graph() for nid in self.console.query['nodes']: for neighbor in graph.neighbors(nid): if neighbor not in self.console.query['nodes']: new_neighbors.append(neighbor) for nn in new_neighbors: self.console.query['nodes'].append(nn) self.console.print_query() def connected_components(self, args): og.set_attribute("graphiti:space:edgemode", "string", "node_color") graph = std.load_nx_graph() cc = nx.connected_components(graph) for component in cc: r = random.random() g = random.random() b = random.random() color = str(r) + " " + str(g) + " " + str(b) for node in component: og.set_node_attribute(node, "graphiti:space:color", "vec3", color) def directions(self, args): for id in og.get_edge_ids(): og.set_edge_attribute(id, "og:space:icon", "string", "styles/triangles") def connections(self, args): og.set_attribute("graphiti:space:edgemode", "string", "node_color") # Building node degree table ... edges = og.get_edge_ids() degree_table = dict() for eid in edges: nid1 = og.get_edge_node1(eid) nid2 = og.get_edge_node2(eid) if nid1 not in degree_table: degree_table[nid1] = { "in" : 0, "out" : 0 } if nid2 not in degree_table: degree_table[nid2] = { "in" : 0, "out" : 0 } degree_table[nid1]["out"] += 1 degree_table[nid2]["in"] += 1 # Randomizing color map m = dict() m["isolated"] = [0.95, 0.98, 0.36, 1.0] m["leaf"] = [0.06, 0.94, 0.61, 1.0] m["source"] = [0.91, 0.18, 0.17, 1.0] m["sink"] = [0.03, 0.65, 0.94, 1.0] m["other"] = [0.77, 0.78, 0.75, 1.0] # Coloring for nid in og.get_node_ids(): if nid not in degree_table: t = "isolated" else: deg = degree_table[nid] if deg["in"] == 0 and deg["out"] == 1: t = "leaf" elif deg["in"] == 0 and deg["out"] > 1: t = "source" elif deg["in"] > 0 and deg["out"] == 0: t = "sink" else: t = "other" og.set_node_attribute(nid, "graphiti:space:color", "vec4", std.vec4_to_str(m[t])) def degrees_high(self): og.set_attribute("graphiti:space:edgemode", "string", "node_color") graph = std.load_nx_graph() max_degree = max(nx.degree(graph).values()) for n in graph.nodes(data = True): deg = nx.degree(graph, n[0]) tint = 0.3 + 0.9 * float(deg) / float(max_degree) color = og.get_node_attribute(n[0], "graphiti:space:color") color[0] = tint * color[0] color[1] = tint * color[1] color[2] = tint * color[2] color[3] = 1.0 c = str(color[0]) + " " + str(color[1]) + " " + str(color[2]) og.set_node_attribute(n[0], "graphiti:space:color", "vec3", c) def degrees_low(self): og.set_attribute("graphiti:space:edgemode", "string", "node_color") graph = std.load_nx_graph() max_degree = max(nx.degree(graph).values()) for n in graph.nodes(data = True): deg = nx.degree(graph, n[0]) tint = 0.3 + 0.9 * (1.0 - float(deg) / float(max_degree)) color = og.get_node_attribute(n[0], "graphiti:space:color") color[0] = tint * color[0] color[1] = tint * color[1] color[2] = tint * color[2] c = str(color[0]) + " " + str(color[1]) + " " + str(color[2]) og.set_node_attribute(n[0], "graphiti:space:color", "vec3", c) def degrees(self, args): if len(args) == 1: self.degrees_high() elif len(args) == 2 and args[1] == "high": self.degrees_high() elif len(args) == 2 and args[1] == "low": self.degrees_low() else: self.console.log("Error: {0}: Wrong arguments!".format(args[0])) def get_degree_map(self): degrees = dict() for eid in og.get_edge_ids(): bi = False e_type = og.get_edge_attribute(eid, "type") if e_type is not None and "<->" in e_type: bi = True nid1 = og.get_edge_node1(eid) nid2 = og.get_edge_node2(eid) if nid1 not in degrees: degrees[nid1] = { "in" : 0, "out" : 0 } if nid2 not in degrees: degrees[nid2] = { "in" : 0, "out" : 0 } if bi: degrees[nid1]["in"] += 1 degrees[nid1]["out"] += 1 degrees[nid2]["in"] += 1 degrees[nid2]["out"] += 1 else: degrees[nid1]["out"] += 1 degrees[nid2]["in"] += 1 return degrees def spn(self, args): degree_map = self.get_degree_map() source_map = dict() for eid in og.get_edge_ids(): src = og.get_edge_node1(eid) if src not in degree_map: continue if degree_map[src]["in"] == 0 and degree_map[src]["out"] >= 0: dst = og.get_edge_node2(eid) if src not in source_map: source_map[src] = [(dst, eid)] elif dst not in source_map[src]: source_map[src].append((dst, eid)) for nid in og.get_node_ids(): og.set_node_attribute(nid, "og:space:lod", "float", "0.0") for eid in og.get_edge_ids(): og.set_edge_attribute(eid, "og:space:lod", "float", "0.0") for source in source_map.keys(): og.set_node_attribute(source, "og:space:lod", "float", "1.0") for successor in source_map[source]: og.set_node_attribute(successor[0], "og:space:lod", "float", "1.0") og.set_edge_attribute(successor[1], "og:space:lod", "float", "1.0") def run(self, args): if len(args) == 2 and args[1] == "neighbors": self.neighbors(args) elif len(args) == 2 and args[1] == "cc": self.connected_components(args) elif len(args) == 2 and args[1] == "directions": self.directions(args) elif len(args) == 2 and args[1] == "connections": self.connections(args) elif len(args) >= 2 and args[1] == "degrees": self.degrees(args[1:]) elif len(args) == 2 and args[1] == "spn": self.spn(args) else: self.console.log("Error: {0}: Wrong arguments!".format(args[0]))
bsd-2-clause
EIDSS/EIDSS-Legacy
EIDSS v6.1/eidss.model/AVR/Pivot/LayoutBaseComplexity.cs
615
using System; namespace eidss.model.Avr.Pivot { public class LayoutBaseComplexity { public virtual long CellCount { get { return 0; } } public virtual double Complexity { get { return 0; } } public long MemoryInMB { get { return GC.GetTotalMemory(false) / (1024 * 1024); } } public override string ToString() { return String.Format("CellCount={0}, Complexity={1}, Memory Usage={2}MB", CellCount, Complexity, MemoryInMB); } } }
bsd-2-clause
ajgallegog/gem5_arm
src/cpu/base.cc
23808
/* * Copyright (c) 2011-2012 ARM Limited * All rights reserved * * The license below extends only to copyright in the software and shall * not be construed as granting a license to any other intellectual * property including but not limited to intellectual property relating * to a hardware implementation of the functionality of the software * licensed hereunder. You may use the software subject to the license * terms below provided that you ensure that this notice is replicated * unmodified and in its entirety in all distributions of the software, * modified or unmodified, in source code or in binary form. * * Copyright (c) 2002-2005 The Regents of The University of Michigan * Copyright (c) 2011 Regents of the University of California * Copyright (c) 2013 Advanced Micro Devices, Inc. * Copyright (c) 2013 Mark D. Hill and David A. Wood * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer; * redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution; * neither the name of the copyright holders nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * * Authors: Steve Reinhardt * Nathan Binkert * Rick Strong */ #include <iostream> #include <sstream> #include <string> #include "arch/tlb.hh" #include "base/loader/symtab.hh" #include "base/cprintf.hh" #include "base/misc.hh" #include "base/output.hh" #include "base/trace.hh" #include "cpu/checker/cpu.hh" #include "cpu/base.hh" #include "cpu/cpuevent.hh" #include "cpu/profile.hh" #include "cpu/thread_context.hh" #include "debug/Mwait.hh" #include "debug/SyscallVerbose.hh" #include "mem/page_table.hh" #include "params/BaseCPU.hh" #include "sim/full_system.hh" #include "sim/process.hh" #include "sim/sim_events.hh" #include "sim/sim_exit.hh" #include "sim/system.hh" // Hack #include "sim/stat_control.hh" using namespace std; vector<BaseCPU *> BaseCPU::cpuList; // This variable reflects the max number of threads in any CPU. Be // careful to only use it once all the CPUs that you care about have // been initialized int maxThreadsPerCPU = 1; CPUProgressEvent::CPUProgressEvent(BaseCPU *_cpu, Tick ival) : Event(Event::Progress_Event_Pri), _interval(ival), lastNumInst(0), cpu(_cpu), _repeatEvent(true) { if (_interval) cpu->schedule(this, curTick() + _interval); } void CPUProgressEvent::process() { Counter temp = cpu->totalOps(); #ifndef NDEBUG double ipc = double(temp - lastNumInst) / (_interval / cpu->clockPeriod()); DPRINTFN("%s progress event, total committed:%i, progress insts committed: " "%lli, IPC: %0.8d\n", cpu->name(), temp, temp - lastNumInst, ipc); ipc = 0.0; #else cprintf("%lli: %s progress event, total committed:%i, progress insts " "committed: %lli\n", curTick(), cpu->name(), temp, temp - lastNumInst); #endif lastNumInst = temp; if (_repeatEvent) cpu->schedule(this, curTick() + _interval); } const char * CPUProgressEvent::description() const { return "CPU Progress"; } BaseCPU::BaseCPU(Params *p, bool is_checker) : MemObject(p), instCnt(0), _cpuId(p->cpu_id), _socketId(p->socket_id), _instMasterId(p->system->getMasterId(name() + ".inst")), _dataMasterId(p->system->getMasterId(name() + ".data")), _taskId(ContextSwitchTaskId::Unknown), _pid(Request::invldPid), _switchedOut(p->switched_out), _cacheLineSize(p->system->cacheLineSize()), interrupts(p->interrupts), profileEvent(NULL), numThreads(p->numThreads), system(p->system), addressMonitor() { // if Python did not provide a valid ID, do it here if (_cpuId == -1 ) { _cpuId = cpuList.size(); } // add self to global list of CPUs cpuList.push_back(this); DPRINTF(SyscallVerbose, "Constructing CPU with id %d, socket id %d\n", _cpuId, _socketId); if (numThreads > maxThreadsPerCPU) maxThreadsPerCPU = numThreads; // allocate per-thread instruction-based event queues comInstEventQueue = new EventQueue *[numThreads]; for (ThreadID tid = 0; tid < numThreads; ++tid) comInstEventQueue[tid] = new EventQueue("instruction-based event queue"); // // set up instruction-count-based termination events, if any // if (p->max_insts_any_thread != 0) { const char *cause = "a thread reached the max instruction count"; for (ThreadID tid = 0; tid < numThreads; ++tid) scheduleInstStop(tid, p->max_insts_any_thread, cause); } // Set up instruction-count-based termination events for SimPoints // Typically, there are more than one action points. // Simulation.py is responsible to take the necessary actions upon // exitting the simulation loop. if (!p->simpoint_start_insts.empty()) { const char *cause = "simpoint starting point found"; for (size_t i = 0; i < p->simpoint_start_insts.size(); ++i) scheduleInstStop(0, p->simpoint_start_insts[i], cause); } if (p->max_insts_all_threads != 0) { const char *cause = "all threads reached the max instruction count"; // allocate & initialize shared downcounter: each event will // decrement this when triggered; simulation will terminate // when counter reaches 0 int *counter = new int; *counter = numThreads; for (ThreadID tid = 0; tid < numThreads; ++tid) { Event *event = new CountedExitEvent(cause, *counter); comInstEventQueue[tid]->schedule(event, p->max_insts_all_threads); } } // allocate per-thread load-based event queues comLoadEventQueue = new EventQueue *[numThreads]; for (ThreadID tid = 0; tid < numThreads; ++tid) comLoadEventQueue[tid] = new EventQueue("load-based event queue"); // // set up instruction-count-based termination events, if any // if (p->max_loads_any_thread != 0) { const char *cause = "a thread reached the max load count"; for (ThreadID tid = 0; tid < numThreads; ++tid) scheduleLoadStop(tid, p->max_loads_any_thread, cause); } if (p->max_loads_all_threads != 0) { const char *cause = "all threads reached the max load count"; // allocate & initialize shared downcounter: each event will // decrement this when triggered; simulation will terminate // when counter reaches 0 int *counter = new int; *counter = numThreads; for (ThreadID tid = 0; tid < numThreads; ++tid) { Event *event = new CountedExitEvent(cause, *counter); comLoadEventQueue[tid]->schedule(event, p->max_loads_all_threads); } } functionTracingEnabled = false; if (p->function_trace) { const string fname = csprintf("ftrace.%s", name()); functionTraceStream = simout.find(fname); if (!functionTraceStream) functionTraceStream = simout.create(fname); currentFunctionStart = currentFunctionEnd = 0; functionEntryTick = p->function_trace_start; if (p->function_trace_start == 0) { functionTracingEnabled = true; } else { typedef EventWrapper<BaseCPU, &BaseCPU::enableFunctionTrace> wrap; Event *event = new wrap(this, true); schedule(event, p->function_trace_start); } } // The interrupts should always be present unless this CPU is // switched in later or in case it is a checker CPU if (!params()->switched_out && !is_checker) { if (interrupts) { interrupts->setCPU(this); } else { fatal("CPU %s has no interrupt controller.\n" "Ensure createInterruptController() is called.\n", name()); } } if (FullSystem) { if (params()->profile) profileEvent = new ProfileEvent(this, params()->profile); } tracer = params()->tracer; if (params()->isa.size() != numThreads) { fatal("Number of ISAs (%i) assigned to the CPU does not equal number " "of threads (%i).\n", params()->isa.size(), numThreads); } } void BaseCPU::enableFunctionTrace() { functionTracingEnabled = true; } BaseCPU::~BaseCPU() { delete profileEvent; delete[] comLoadEventQueue; delete[] comInstEventQueue; } void BaseCPU::armMonitor(Addr address) { addressMonitor.armed = true; addressMonitor.vAddr = address; addressMonitor.pAddr = 0x0; DPRINTF(Mwait,"Armed monitor (vAddr=0x%lx)\n", address); } bool BaseCPU::mwait(PacketPtr pkt) { if(addressMonitor.gotWakeup == false) { int block_size = cacheLineSize(); uint64_t mask = ~((uint64_t)(block_size - 1)); assert(pkt->req->hasPaddr()); addressMonitor.pAddr = pkt->getAddr() & mask; addressMonitor.waiting = true; DPRINTF(Mwait,"mwait called (vAddr=0x%lx, line's paddr=0x%lx)\n", addressMonitor.vAddr, addressMonitor.pAddr); return true; } else { addressMonitor.gotWakeup = false; return false; } } void BaseCPU::mwaitAtomic(ThreadContext *tc, TheISA::TLB *dtb) { Request req; Addr addr = addressMonitor.vAddr; int block_size = cacheLineSize(); uint64_t mask = ~((uint64_t)(block_size - 1)); int size = block_size; //The address of the next line if it crosses a cache line boundary. Addr secondAddr = roundDown(addr + size - 1, block_size); if (secondAddr > addr) size = secondAddr - addr; req.setVirt(0, addr, size, 0x0, dataMasterId(), tc->instAddr()); // translate to physical address Fault fault = dtb->translateAtomic(&req, tc, BaseTLB::Read); assert(fault == NoFault); addressMonitor.pAddr = req.getPaddr() & mask; addressMonitor.waiting = true; DPRINTF(Mwait,"mwait called (vAddr=0x%lx, line's paddr=0x%lx)\n", addressMonitor.vAddr, addressMonitor.pAddr); } void BaseCPU::init() { if (!params()->switched_out) { registerThreadContexts(); verifyMemoryMode(); } } void BaseCPU::startup() { if (FullSystem) { if (!params()->switched_out && profileEvent) schedule(profileEvent, curTick()); } if (params()->progress_interval) { new CPUProgressEvent(this, params()->progress_interval); } } ProbePoints::PMUUPtr BaseCPU::pmuProbePoint(const char *name) { ProbePoints::PMUUPtr ptr; ptr.reset(new ProbePoints::PMU(getProbeManager(), name)); return ptr; } void BaseCPU::regProbePoints() { ppCycles = pmuProbePoint("Cycles"); ppRetiredInsts = pmuProbePoint("RetiredInsts"); ppRetiredLoads = pmuProbePoint("RetiredLoads"); ppRetiredStores = pmuProbePoint("RetiredStores"); ppRetiredBranches = pmuProbePoint("RetiredBranches"); } void BaseCPU::probeInstCommit(const StaticInstPtr &inst) { if (!inst->isMicroop() || inst->isLastMicroop()) ppRetiredInsts->notify(1); if (inst->isLoad()) ppRetiredLoads->notify(1); if (inst->isStore()) ppRetiredLoads->notify(1); if (inst->isControl()) ppRetiredBranches->notify(1); } void BaseCPU::regStats() { using namespace Stats; numCycles .name(name() + ".numCycles") .desc("number of cpu cycles simulated") ; numWorkItemsStarted .name(name() + ".numWorkItemsStarted") .desc("number of work items this cpu started") ; numWorkItemsCompleted .name(name() + ".numWorkItemsCompleted") .desc("number of work items this cpu completed") ; int size = threadContexts.size(); if (size > 1) { for (int i = 0; i < size; ++i) { stringstream namestr; ccprintf(namestr, "%s.ctx%d", name(), i); threadContexts[i]->regStats(namestr.str()); } } else if (size == 1) threadContexts[0]->regStats(name()); } BaseMasterPort & BaseCPU::getMasterPort(const string &if_name, PortID idx) { // Get the right port based on name. This applies to all the // subclasses of the base CPU and relies on their implementation // of getDataPort and getInstPort. In all cases there methods // return a MasterPort pointer. if (if_name == "dcache_port") return getDataPort(); else if (if_name == "icache_port") return getInstPort(); else return MemObject::getMasterPort(if_name, idx); } void BaseCPU::registerThreadContexts() { ThreadID size = threadContexts.size(); for (ThreadID tid = 0; tid < size; ++tid) { ThreadContext *tc = threadContexts[tid]; /** This is so that contextId and cpuId match where there is a * 1cpu:1context relationship. Otherwise, the order of registration * could affect the assignment and cpu 1 could have context id 3, for * example. We may even want to do something like this for SMT so that * cpu 0 has the lowest thread contexts and cpu N has the highest, but * I'll just do this for now */ if (numThreads == 1) tc->setContextId(system->registerThreadContext(tc, _cpuId)); else tc->setContextId(system->registerThreadContext(tc)); if (!FullSystem) tc->getProcessPtr()->assignThreadContext(tc->contextId()); } } int BaseCPU::findContext(ThreadContext *tc) { ThreadID size = threadContexts.size(); for (ThreadID tid = 0; tid < size; ++tid) { if (tc == threadContexts[tid]) return tid; } return 0; } void BaseCPU::switchOut() { assert(!_switchedOut); _switchedOut = true; if (profileEvent && profileEvent->scheduled()) deschedule(profileEvent); // Flush all TLBs in the CPU to avoid having stale translations if // it gets switched in later. flushTLBs(); } void BaseCPU::takeOverFrom(BaseCPU *oldCPU) { assert(threadContexts.size() == oldCPU->threadContexts.size()); assert(_cpuId == oldCPU->cpuId()); assert(_switchedOut); assert(oldCPU != this); _pid = oldCPU->getPid(); _taskId = oldCPU->taskId(); _switchedOut = false; ThreadID size = threadContexts.size(); for (ThreadID i = 0; i < size; ++i) { ThreadContext *newTC = threadContexts[i]; ThreadContext *oldTC = oldCPU->threadContexts[i]; newTC->takeOverFrom(oldTC); CpuEvent::replaceThreadContext(oldTC, newTC); assert(newTC->contextId() == oldTC->contextId()); assert(newTC->threadId() == oldTC->threadId()); system->replaceThreadContext(newTC, newTC->contextId()); /* This code no longer works since the zero register (e.g., * r31 on Alpha) doesn't necessarily contain zero at this * point. if (DTRACE(Context)) ThreadContext::compare(oldTC, newTC); */ BaseMasterPort *old_itb_port = oldTC->getITBPtr()->getMasterPort(); BaseMasterPort *old_dtb_port = oldTC->getDTBPtr()->getMasterPort(); BaseMasterPort *new_itb_port = newTC->getITBPtr()->getMasterPort(); BaseMasterPort *new_dtb_port = newTC->getDTBPtr()->getMasterPort(); // Move over any table walker ports if they exist if (new_itb_port) { assert(!new_itb_port->isConnected()); assert(old_itb_port); assert(old_itb_port->isConnected()); BaseSlavePort &slavePort = old_itb_port->getSlavePort(); old_itb_port->unbind(); new_itb_port->bind(slavePort); } if (new_dtb_port) { assert(!new_dtb_port->isConnected()); assert(old_dtb_port); assert(old_dtb_port->isConnected()); BaseSlavePort &slavePort = old_dtb_port->getSlavePort(); old_dtb_port->unbind(); new_dtb_port->bind(slavePort); } newTC->getITBPtr()->takeOverFrom(oldTC->getITBPtr()); newTC->getDTBPtr()->takeOverFrom(oldTC->getDTBPtr()); // Checker whether or not we have to transfer CheckerCPU // objects over in the switch CheckerCPU *oldChecker = oldTC->getCheckerCpuPtr(); CheckerCPU *newChecker = newTC->getCheckerCpuPtr(); if (oldChecker && newChecker) { BaseMasterPort *old_checker_itb_port = oldChecker->getITBPtr()->getMasterPort(); BaseMasterPort *old_checker_dtb_port = oldChecker->getDTBPtr()->getMasterPort(); BaseMasterPort *new_checker_itb_port = newChecker->getITBPtr()->getMasterPort(); BaseMasterPort *new_checker_dtb_port = newChecker->getDTBPtr()->getMasterPort(); newChecker->getITBPtr()->takeOverFrom(oldChecker->getITBPtr()); newChecker->getDTBPtr()->takeOverFrom(oldChecker->getDTBPtr()); // Move over any table walker ports if they exist for checker if (new_checker_itb_port) { assert(!new_checker_itb_port->isConnected()); assert(old_checker_itb_port); assert(old_checker_itb_port->isConnected()); BaseSlavePort &slavePort = old_checker_itb_port->getSlavePort(); old_checker_itb_port->unbind(); new_checker_itb_port->bind(slavePort); } if (new_checker_dtb_port) { assert(!new_checker_dtb_port->isConnected()); assert(old_checker_dtb_port); assert(old_checker_dtb_port->isConnected()); BaseSlavePort &slavePort = old_checker_dtb_port->getSlavePort(); old_checker_dtb_port->unbind(); new_checker_dtb_port->bind(slavePort); } } } interrupts = oldCPU->interrupts; interrupts->setCPU(this); oldCPU->interrupts = NULL; if (FullSystem) { for (ThreadID i = 0; i < size; ++i) threadContexts[i]->profileClear(); if (profileEvent) schedule(profileEvent, curTick()); } // All CPUs have an instruction and a data port, and the new CPU's // ports are dangling while the old CPU has its ports connected // already. Unbind the old CPU and then bind the ports of the one // we are switching to. assert(!getInstPort().isConnected()); assert(oldCPU->getInstPort().isConnected()); BaseSlavePort &inst_peer_port = oldCPU->getInstPort().getSlavePort(); oldCPU->getInstPort().unbind(); getInstPort().bind(inst_peer_port); assert(!getDataPort().isConnected()); assert(oldCPU->getDataPort().isConnected()); BaseSlavePort &data_peer_port = oldCPU->getDataPort().getSlavePort(); oldCPU->getDataPort().unbind(); getDataPort().bind(data_peer_port); } void BaseCPU::flushTLBs() { for (ThreadID i = 0; i < threadContexts.size(); ++i) { ThreadContext &tc(*threadContexts[i]); CheckerCPU *checker(tc.getCheckerCpuPtr()); tc.getITBPtr()->flushAll(); tc.getDTBPtr()->flushAll(); if (checker) { checker->getITBPtr()->flushAll(); checker->getDTBPtr()->flushAll(); } } } BaseCPU::ProfileEvent::ProfileEvent(BaseCPU *_cpu, Tick _interval) : cpu(_cpu), interval(_interval) { } void BaseCPU::ProfileEvent::process() { ThreadID size = cpu->threadContexts.size(); for (ThreadID i = 0; i < size; ++i) { ThreadContext *tc = cpu->threadContexts[i]; tc->profileSample(); } cpu->schedule(this, curTick() + interval); } void BaseCPU::serialize(std::ostream &os) { SERIALIZE_SCALAR(instCnt); if (!_switchedOut) { /* Unlike _pid, _taskId is not serialized, as they are dynamically * assigned unique ids that are only meaningful for the duration of * a specific run. We will need to serialize the entire taskMap in * system. */ SERIALIZE_SCALAR(_pid); interrupts->serialize(os); // Serialize the threads, this is done by the CPU implementation. for (ThreadID i = 0; i < numThreads; ++i) { nameOut(os, csprintf("%s.xc.%i", name(), i)); serializeThread(os, i); } } } void BaseCPU::unserialize(Checkpoint *cp, const std::string &section) { UNSERIALIZE_SCALAR(instCnt); if (!_switchedOut) { UNSERIALIZE_SCALAR(_pid); interrupts->unserialize(cp, section); // Unserialize the threads, this is done by the CPU implementation. for (ThreadID i = 0; i < numThreads; ++i) unserializeThread(cp, csprintf("%s.xc.%i", section, i), i); } } void BaseCPU::scheduleInstStop(ThreadID tid, Counter insts, const char *cause) { const Tick now(comInstEventQueue[tid]->getCurTick()); Event *event(new LocalSimLoopExitEvent(cause, 0)); comInstEventQueue[tid]->schedule(event, now + insts); } AddressMonitor::AddressMonitor() { armed = false; waiting = false; gotWakeup = false; } bool AddressMonitor::doMonitor(PacketPtr pkt) { assert(pkt->req->hasPaddr()); if(armed && waiting) { if(pAddr == pkt->getAddr()) { DPRINTF(Mwait,"pAddr=0x%lx invalidated: waking up core\n", pkt->getAddr()); waiting = false; return true; } } return false; } void BaseCPU::scheduleLoadStop(ThreadID tid, Counter loads, const char *cause) { const Tick now(comLoadEventQueue[tid]->getCurTick()); Event *event(new LocalSimLoopExitEvent(cause, 0)); comLoadEventQueue[tid]->schedule(event, now + loads); } void BaseCPU::traceFunctionsInternal(Addr pc) { if (!debugSymbolTable) return; // if pc enters different function, print new function symbol and // update saved range. Otherwise do nothing. if (pc < currentFunctionStart || pc >= currentFunctionEnd) { string sym_str; bool found = debugSymbolTable->findNearestSymbol(pc, sym_str, currentFunctionStart, currentFunctionEnd); if (!found) { // no symbol found: use addr as label sym_str = csprintf("0x%x", pc); currentFunctionStart = pc; currentFunctionEnd = pc + 1; } ccprintf(*functionTraceStream, " (%d)\n%d: %s", curTick() - functionEntryTick, curTick(), sym_str); functionEntryTick = curTick(); } }
bsd-3-clause
IdentityModel/Thinktecture.IdentityModel.40
IdentityModel/Thinktecture.IdentityModel/Tokens/Http/AuthenticationConfiguration.cs
9913
/* * Copyright (c) Dominick Baier & Brock Allen. All rights reserved. * see license.txt */ using System; using System.Collections.Generic; using System.Linq; using Microsoft.IdentityModel.Claims; using Microsoft.IdentityModel.Tokens; namespace Thinktecture.IdentityModel.Tokens.Http { public class AuthenticationConfiguration { private bool _hasAuthorizationHeader; private bool _hasHeader; private bool _hasQueryString; private bool _hasCookie; private bool _hasClientCert; public List<AuthenticationOptionMapping> Mappings { get; set; } public string DefaultAuthenticationScheme { get; set; } public bool SendWwwAuthenticateResponseHeader { get; set; } public ClaimsAuthenticationManager ClaimsAuthenticationManager { get; set; } public bool InheritHostClientIdentity { get; set; } public bool EnableSessionToken { get; set; } public SessionTokenConfiguration SessionToken { get; set; } #region HasMapping Properties public bool HasAuthorizationHeaderMapping { get { return _hasAuthorizationHeader; } } public bool HasHeaderMapping { get { return _hasHeader; } } public bool HasQueryStringMapping { get { return _hasQueryString; } } public bool HasCookieMapping { get { return _hasCookie; } } public bool HasClientCertificateMapping { get { return _hasClientCert; } } #endregion public AuthenticationConfiguration() { Mappings = new List<AuthenticationOptionMapping>(); DefaultAuthenticationScheme = "unspecified"; SendWwwAuthenticateResponseHeader = true; InheritHostClientIdentity = false; EnableSessionToken = false; SessionToken = new SessionTokenConfiguration(); } public void AddAccessKey(SimpleSecurityTokenHandler handler, AuthenticationOptions options) { AddMapping(new AuthenticationOptionMapping { TokenHandler = new SecurityTokenHandlerCollection { handler }, Options = options }); } public void AddAccessKey(SimpleSecurityTokenHandler.ValidateTokenDelegate validateTokenDelegate, AuthenticationOptions options) { AddMapping(new AuthenticationOptionMapping { TokenHandler = new SecurityTokenHandlerCollection { new SimpleSecurityTokenHandler(validateTokenDelegate) }, Options = options }); } public void AddSimpleWebToken(string issuer, string audience, string signingKey, AuthenticationOptions options) { var config = new SecurityTokenHandlerConfiguration(); var registry = new WebTokenIssuerNameRegistry(); registry.AddTrustedIssuer(issuer, issuer); config.IssuerNameRegistry = registry; var issuerResolver = new WebTokenIssuerTokenResolver(); issuerResolver.AddSigningKey(issuer, signingKey); config.IssuerTokenResolver = issuerResolver; config.AudienceRestriction.AllowedAudienceUris.Add(new Uri(audience)); var handler = new SimpleWebTokenHandler(); handler.Configuration = config; AddMapping(new AuthenticationOptionMapping { TokenHandler = new SecurityTokenHandlerCollection { handler }, Options = options }); } public void AddJsonWebToken(string issuer, string audience, string signingKey, AuthenticationOptions options) { var config = new SecurityTokenHandlerConfiguration(); var registry = new WebTokenIssuerNameRegistry(); registry.AddTrustedIssuer(issuer, issuer); config.IssuerNameRegistry = registry; var issuerResolver = new WebTokenIssuerTokenResolver(); issuerResolver.AddSigningKey(issuer, signingKey); config.IssuerTokenResolver = issuerResolver; config.AudienceRestriction.AllowedAudienceUris.Add(new Uri(audience)); var handler = new JsonWebTokenHandler(); handler.Configuration = config; AddMapping(new AuthenticationOptionMapping { TokenHandler = new SecurityTokenHandlerCollection { handler }, Options = options }); } public void AddBasicAuthentication(BasicAuthenticationSecurityTokenHandler.ValidateUserNameCredentialDelegate validationDelegate, bool retainPassword = false) { var handler = new BasicAuthenticationSecurityTokenHandler(validationDelegate); handler.RetainPassword = retainPassword; AddMapping(new AuthenticationOptionMapping { TokenHandler = new SecurityTokenHandlerCollection { handler }, Options = AuthenticationOptions.ForAuthorizationHeader(scheme: "Basic") }); } public void AddClientCertificate(SecurityTokenHandler handler) { AddMapping(new AuthenticationOptionMapping { TokenHandler = new SecurityTokenHandlerCollection { handler }, Options = AuthenticationOptions.ForClientCertificate() }); } public void AddClientCertificate(ClientCertificateMode mode, params string[] values) { var handler = new ClientCertificateHandler(mode, values); AddMapping(new AuthenticationOptionMapping { TokenHandler = new SecurityTokenHandlerCollection { handler }, Options = AuthenticationOptions.ForClientCertificate() }); } public void AddSaml2(SecurityTokenHandlerConfiguration configuration, AuthenticationOptions options) { var handler = new HttpSaml2SecurityTokenHandler(); handler.Configuration = configuration; AddMapping(new AuthenticationOptionMapping { TokenHandler = new SecurityTokenHandlerCollection { handler }, Options = options }); } public void AddSaml11(SecurityTokenHandlerConfiguration configuration, AuthenticationOptions options) { var handler = new HttpSamlSecurityTokenHandler(); handler.Configuration = configuration; AddMapping(new AuthenticationOptionMapping { TokenHandler = new SecurityTokenHandlerCollection { handler }, Options = options }); } private void AddMapping(AuthenticationOptionMapping mapping) { var hit = from m in Mappings where m.Options.RequestType == mapping.Options.RequestType && m.Options.Name == mapping.Options.Name && m.Options.Scheme == mapping.Options.Scheme select m; if (hit.FirstOrDefault() != null) { throw new InvalidOperationException("Duplicate authentication entry"); } Mappings.Add(mapping); switch (mapping.Options.RequestType) { case HttpRequestType.AuthorizationHeader: _hasAuthorizationHeader = true; break; case HttpRequestType.Header: _hasHeader = true; break; case HttpRequestType.QueryString: _hasQueryString = true; break; case HttpRequestType.Cookie: _hasCookie = true; break; case HttpRequestType.ClientCertificate: _hasClientCert = true; break; default: throw new InvalidOperationException("Invalid request type"); } } #region Mapping retrieval public bool TryGetAuthorizationHeaderMapping(string scheme, out SecurityTokenHandlerCollection handler) { handler = (from m in Mappings where m.Options.RequestType == HttpRequestType.AuthorizationHeader && m.Options.Name == "Authorization" && m.Options.Scheme == scheme select m.TokenHandler).SingleOrDefault(); return (handler != null); } public bool TryGetHeaderMapping(string headerName, out SecurityTokenHandlerCollection handler) { handler = (from m in Mappings where m.Options.RequestType == HttpRequestType.Header && m.Options.Name == headerName select m.TokenHandler).SingleOrDefault(); return (handler != null); } public bool TryGetQueryStringMapping(string paramName, out SecurityTokenHandlerCollection handler) { handler = (from m in Mappings where m.Options.RequestType == HttpRequestType.QueryString && m.Options.Name == paramName select m.TokenHandler).SingleOrDefault(); return (handler != null); } public bool TryGetClientCertificateMapping(out SecurityTokenHandlerCollection handler) { handler = (from m in Mappings where m.Options.RequestType == HttpRequestType.ClientCertificate select m.TokenHandler).SingleOrDefault(); return (handler != null); } #endregion } }
bsd-3-clause
huntergdavis/Quick-Grapher
js/plain/math.js
6148
//+ Jonas Raoni Soares Silva //@ http://jsfromhell.com/classes/math-processor [rev. #1] // extra math functions hunter added from SO examples and TA MathProcessor = function(){ var o = this; o.o = { "+": function(a, b){ return +a + b; }, "-": function(a, b){ return a - b; }, "%": function(a, b){ return a % b; }, "/": function(a, b){ return a / b; }, "*": function(a, b){ return a * b; }, "^": function(a, b){ return Math.pow(a, b); } }; o.s = { "^": 3, "*": 2, "/": 2, "%": 1, "+": 0, "-": 0 }; o.u = {"+": 1, "-": -1}, o.p = {"(": 1, ")": -1}; }; with({p: MathProcessor.prototype}){ p.methods = { // div: function(a, b){ return parseInt(a / b); }, // fra: function(a){ return a - parseInt(a); }, sin: function(n){ return Math.sin(n); }, cos: function(n){ return Math.cos(n); }, tan: function(n){ return Math.tan(n); }, asin: function(n){ return Math.asin(n); }, acos: function(n){ return Math.acos(n); }, atan: function(n){ return Math.atan(n); }, euler: function(n){ return (n * Math.E); }, natlogten: function(n){ return (n * Math.LN10); }, natlog: function(n){ return (n * Math.LN2); }, logtwoe: function(n){ return (n * Math.LOG2E); }, logtene: function(n){ return (n * Math.LOG10E); }, abs: function(n){ return (Math.abs(n)); }, ceil: function(n){ return (Math.ceil(n)); }, exp: function(n){ return (Math.exp(n)); }, floor: function(n){ return (Math.floor(n)); }, log: function(n){ return (Math.log(n)); }, random: function(n){ return (n * Math.random()); }, round: function(n){ return (Math.round(n)); }, sqrt: function(n){ return (Math.sqrt(n)); }, pi: function(n){ return (n * Math.PI); }, min: function(n1, n2){var result; var args = arguments;for (var i = 0; i <= args.length-1; i++){if(i == 0){result = args[0];};if(i > 0){result = Math.min(result,args[i]);}} return result; }, max: function(n1, n2){var result; var args = arguments;for (var i = 0; i <= args.length-1; i++){if(i == 0){result = args[0];};if(i > 0){result = Math.max(result,args[i]);}} return result; }, nextprime: function(n){var totest=Math.floor(n);if(totest >= 2){var smaller=1;while(smaller*smaller<=totest){totest++;smaller=2;while((totest%smaller>0)&&(smaller*smaller<=totest)){smaller++;}}return totest;}else{return 2;}}, fibonacci: function(n){var fibs = new Array();fibs[0] = 0; fibs[1] = 1; for(i=0; i<n; i++){fibs.push(fibs[0] + fibs[1]);fibs.shift();}return fibs[0];}, factorial: function(n){var result = 1;for (var i = 2; i <= n; i++) {result *= i;} return result;}, greater: function(n1, n2){ if (n1 > n2) { return 1; } return 0; }, less: function(n1, n2){ if (n1 < n2) { return 1; } return 0; }, sum: function(n1, n2, n3, n){ for(var r = 0, a, l = (a = arguments).length; l; r += a[--l]); return r; }, medium: function(n1, n2, n3, n){ for(var r = 0, a, l = (a = arguments).length; l; r += a[--l]); return (r / a.length); } }; p.parse = function(e){ for(var n, x, _ = this, o = [], s = [x = _.RPN(e.replace(/ /g, "").split(""))]; s.length;) for((n = s[s.length-1], --s.length); n[2]; o[o.length] = n, s[s.length] = n[3], n = n[2]); for(; (n = o.pop()) != undefined; n[0] = _.o[n[0]](isNaN(n[2][0]) ? _.f(n[2][0]) : n[2][0], isNaN(n[3][0]) ? _.f(n[3][0]) : n[3][0])); return +x[0]; }; p.RPN = function(e){ var x, r, _ = this, c = r = [, , , 0]; if(e[0] in _.u || !e.unshift("+")) for(; e[1] in _.u; e[0] = _.u[e.shift()] * _.u[e[0]] + 1 ? "+" : "-"); (c[3] = [_.u[e.shift()], c, , 0])[1][0] = "*", (r = [, , c, 0])[2][1] = r; (c[2] = _.v(e))[1] = c; (!e.length && (r = c)) || (e[0] in _.s && ((c = r)[0] = e.shift(), !e.length && _.error())); while(e.length){ if(e[0] in _.u){ for(; e[1] in _.u; e[0] = _.u[e.shift()] * _.u[e[0]] + 1 ? "+" : "-"); (c = c[3] = ["*", c, , 0])[2] = [-1, c, , 0]; } (c[3] = _.v(e))[1] = c; e[0] in _.s && (c = _.s[e[0]] > _.s[c[0]] ? ((c[3] = (x = c[3], c[2]))[1][2] = [e.shift(), c, x, 0])[2][1] = c[2] : r == c ? (r = [e.shift(), , c, 0])[2][1] = r : ((r[2] = (x = r[2], [e.shift(), r, ,0]))[2] = x)[1] = r[2]); } return r; }; p.v = function(e){ var i, j, l, _ = this; if("0123456789.".indexOf(e[0]) + 1){ for(i = -1, l = e.length; ++i < l && "0123456789.".indexOf(e[i]) + 1;); return [+e.splice(0,i).join(""), , , 0]; } else if(e[0] == "("){ for(i = 0, l = e.length, j = 1; ++i < l && (e[i] in _.p && (j += _.p[e[i]]), j);); return _.RPN(l = e.splice(0,i), l.shift(), !j && e.shift()); } else{ if(((j = e[0].toLowerCase()) >= "a" && j <= "z") || j == "_"){ for(i = 0; ((j = e[++i].toLowerCase()) >= "a" && j <= "z") || j == "_" || (j >= 0 && j <= 9);); if(j == "("){ for(var l = e.length, j = 1; ++i < l && (e[i] in _.p && (j += _.p[e[i]]), j);); return [e.splice(0,i+1).join(""), , , 0]; } } } _.error(); }; p.f = function(e){ var n, i = 0, _ = this; if(((e = e.split(""))[i] >= "a" && e[i] <= "z") || e[i] == "_"){ while((e[++i] >= "a" && e[i] <= "z") || e[i] == "_" || (e[i] >= 0 && e[i] <= 9)); if(e[i] == "("){ !_.methods[n = e.splice(0, i).join("")] && _.error("Função \"" + n + "\" não encontrada"), e.shift(); for(var a = [], i = -1, j = 1; e[++i] && (e[i] in _.p && (j += _.p[e[i]]), j);) j == 1 && e[i] == "," && (a.push(_.parse(e.splice(0, i).join(""))), e.shift(), i = -1); a.push(_.parse(e.splice(0,i).join(""))), !j && e.shift(); } return _.methods[n].apply(_, a); } }; p.error = function(s){ throw new Error("MathProcessor: " + (s || "Erro na expressão")); }; }
bsd-3-clause
arunchaganty/ctm-cvb
src/ctm-data.cpp
1779
/* * ctm-cvb * * Structures pertaining to manipulation of data */ #include <cstdio> #include <vector> #include <string> #include <cassert> #include <cstring> #include <iostream> #include <fstream> using namespace std; #include "ctm-data.h" #include "util.h" #define BUF_SIZE 50 #define MAX(x,y) ( ((x) >= (y)) ? (x) : (y) ) namespace ctm { Corpus Corpus::construct( string filename ) { Corpus corpus; assert( file_exists( filename ) ); // Open file FILE* file = fopen( filename.c_str(), "r" ); int maxTerm = 0; // For every line, in format: // M term1:count1 ... termM:countM while( !feof( file ) ) { Document doc; int M = 0; int length = 0; // Get M fscanf( file, "%d", &M ); for( int i = 0; i < M; i++ ) { int vocab, count; // term_i, count_i fscanf( file, "%d:%d", &vocab, &count ); doc.terms.push_back( vocab ); doc.counts.push_back( count ); maxTerm = MAX( maxTerm, vocab ); length += count; } doc.length = length; fscanf( file, "\n" ); corpus.docs.push_back( doc ); } corpus.D = corpus.docs.size(); corpus.V = maxTerm + 1; return corpus; } void Corpus::write( string filename ) { // Open file fstream file ( filename.c_str(), fstream::out ); // For every document, a line for( vector<Document>::iterator doc = docs.begin(); doc != docs.end(); doc++ ) { file << doc->terms.size(); for( unsigned int i = 0; i < doc->terms.size(); i++ ) { // M term1:count1 ... termM:countM file << " " << doc->terms[ i ] << ":" << doc->counts[ i ]; } file << endl; } file.close(); } };
bsd-3-clause
m-ober/byceps
tests/blueprints/metrics/test_metrics.py
1195
""" :Copyright: 2006-2020 Jochen Kupperschmidt :License: Modified BSD, see LICENSE for details. """ import pytest from ...conftest import database_recreated # To be overridden by test parametrization @pytest.fixture def config_overrides(): return {} @pytest.fixture def client(config_overrides, make_admin_app, db): app = make_admin_app(**config_overrides) with app.app_context(): with database_recreated(db): yield app.test_client() @pytest.mark.parametrize('config_overrides', [{'METRICS_ENABLED': True}]) def test_metrics(client): response = client.get('/metrics') assert response.status_code == 200 assert response.content_type == 'text/plain; version=0.0.4; charset=utf-8' assert response.mimetype == 'text/plain' assert response.get_data(as_text=True) == ( 'users_active_count 0\n' 'users_uninitialized_count 0\n' 'users_suspended_count 0\n' 'users_deleted_count 0\n' 'users_total_count 0\n' ) @pytest.mark.parametrize('config_overrides', [{'METRICS_ENABLED': False}]) def test_disabled_metrics(client): response = client.get('/metrics') assert response.status_code == 404
bsd-3-clause
luisxue/TreesFramework
Application/library/elements.php
24877
<?php use Phalcon\Mvc\User\Component; /** * Elements * * Helps to build UI elements for the application */ class elements extends Component { protected $_logger; private function setLogger($account) { if($this->_logger==false) $this->_logger=new Loggers($account,"Admin"); return $this; } private function getLogger() { return $this->_logger; } private $_headerMenu = array( 'navbar-left' => array( 'index' => array( 'caption' => '首页', 'action' => 'home', 'class' => 'icon-home' ), 'sysuser' => array( 'caption' => '系统用户管理', 'action' => 'index', 'class' => 'icon-basket', 'child'=>array( 'index'=>array( 'caption' => '系统用户列表', 'action' => 'index', 'class' => 'icon-tag'), 'new'=>array( 'caption' => '创建用户', 'action' => 'new', 'class' => 'icon-pencil'), 'sysuserLog'=>array( 'caption' => '操作日志', 'action' => 'sysuserLog', 'class' => 'icon-basket') ) ), 'roles' => array( 'caption' => '角色管理', 'action' => 'index', 'class' => 'icon-rocket', 'child'=>array( 'index'=>array( 'caption' => '角色列表', 'action' => 'index', 'class' => 'icon-tag'), 'new'=>array( 'caption' => '创建角色', 'action' => 'new', 'class' => 'icon-pencil') ) ), 'ad' => array( 'caption' => '广告主管理', 'action' => 'index', 'class' => 'icon-star', 'child'=>array( 'index'=>array( 'caption' => '广告主列表', 'action' => 'index', 'class' => 'icon-tag'), 'new'=>array( 'caption' => '添加广告主', 'action' => 'new', 'class' => 'icon-pencil'), 'prepayrecord'=>array( 'caption' => '预付记录', 'action' => 'prepayrecord', 'class' => 'icon-docs') ) ), 'app' => array( 'caption' => '应用管理', 'action' => 'index', 'class' => 'icon-diamond', 'child'=>array( 'index'=>array( 'caption' => '应用列表', 'action' => 'index', 'class' => 'icon-tag'), 'new'=>array( 'caption' => '添加应用', 'action' => 'new', 'class' => 'icon-pencil') ) ), 'taskinfo' => array( 'caption' => '任务管理', 'action' => 'index', 'class' => 'icon-puzzle', 'child'=>array( 'index'=>array( 'caption' => '任务列表', 'action' => 'index', 'class' => 'icon-tag'), 'new'=>array( 'caption' => '添加任务', 'action' => 'new', 'class' => 'icon-pencil'), 'reportforms'=>array( 'caption' => '任务报表', 'action' => 'reportforms', 'class' => 'icon-docs'), 'taskpush'=>array( 'caption' => '任务推送', 'action' => 'taskpush', 'class' => 'icon-docs'), 'selectIDFA'=>array( 'caption' => '查询IDFA', 'action' => 'selectIDFA', 'class' => 'icon-docs') ) ), 'user' => array( 'caption' => '用户管理', 'action' => 'index', 'class' => 'icon-user', 'child'=>array( 'index'=>array( 'caption' => '用户列表', 'action' => 'index', 'class' => 'icon-tag'), 'usermoney'=>array( 'caption' => '提现审核', 'action' => 'usermoney', 'class' => 'icon-tag') ) ), 'interfaceinfo' => array( 'caption' => '对接管理', 'action' => 'index', 'class' => 'icon-equalizer', 'child'=>array( 'selectIDFA'=>array( 'caption' => '去重记录', 'action' => 'selectIDFA', 'class' => 'icon-tag'), 'clickinfo'=>array( 'caption' => '点击记录', 'action' => 'clickinfo', 'class' => 'icon-pencil'), 'callbackinfo'=>array( 'caption' => '回调记录', 'action' => 'callbackinfo', 'class' => 'icon-docs') ) ), 'distribute' => array( 'caption' => ' 分发管理', 'action' => 'index', 'class' => 'icon-folder', 'child'=>array( 'channel'=>array( 'caption' => '分发渠道列表', 'action' => 'index', 'class' => 'icon-tag'), 'task'=>array( 'caption' => '分发任务列表', 'action' => 'task', 'class' => 'icon-pencil'), 'record'=>array( 'caption' => '分发任务记录', 'action' => 'record', 'class' => 'icon-docs') ) ), 'popularize' => array( 'caption' => ' 推广管理', 'action' => 'channnel', 'class' => 'icon-wallet', 'child'=>array( 'channnel'=>array( 'caption' => '推广渠道', 'action' => 'channnel', 'class' => 'icon-tag'), 'record'=>array( 'caption' => '推广记录', 'action' => 'record', 'class' => 'icon-pencil') ) ), 'thirdmark' => array( 'caption' => ' 公共号管理', 'action' => 'index', 'class' => 'icon-wallet', 'child'=>array( 'index'=>array( 'caption' => '公众号', 'action' => 'index', 'class' => 'icon-tag'), 'userinfo'=>array( 'caption' => '公众号用户', 'action' => 'userinfo', 'class' => 'icon-pencil'), 'trygame'=>array( 'caption' => '公众号用户试玩', 'action' => 'trygame', 'class' => 'icon-pencil') ) ), ) ); private $_tabs = array( 'Invoices' => array( 'controller' => 'invoices', 'action' => 'index', 'any' => false ), 'Companies' => array( 'controller' => 'companies', 'action' => 'index', 'any' => true ), 'Products' => array( 'controller' => 'products', 'action' => 'index', 'any' => true ), 'Product Types' => array( 'controller' => 'producttypes', 'action' => 'index', 'any' => true ), 'Your Profile' => array( 'controller' => 'invoices', 'action' => 'profile', 'any' => false ) ); /** * Builds header menu with left and right items * * @return string */ public function getMenu() { $auth = $this->session->get('auth_polyRich'); $this->persistent->currentuser= Sysuser::findFirst($auth['user_id']); if ($auth) { /* $this->_headerMenu['navbar-right']['login'] = array( 'caption' => 'Log Out', 'action' => 'end' ); */ $query = $this->modelsManager->createQuery("SELECT SysRight.rightinfo FROM SysRight , SysRoleRight where SysRight.ID=SysRoleRight.rightId and SysRoleRight.roleId = :roleid:"); $userrights = $query->execute(array( 'roleid' => $this->persistent->currentuser->getRoleid() )); $sysrights=SysRight::find(); $roles =array(); foreach($userrights as $row) { array_push($roles, $row->rightinfo); } foreach($sysrights as $menu1) { if(!in_array($menu1->getRightinfo(),$roles)) { if($menu1->getRightinfo()!="index") unset($this->_headerMenu['navbar-left'][$menu1->getRightinfo()]); } } } else { //unset($this->_headerMenu['navbar-left']['login']); } $controllerName = $this->view->getControllerName(); $action = $this->view->getActionName(); foreach ($this->_headerMenu as $position => $menu) { // echo '<li>'; // echo '<ul class="nav navbar-nav ', $position, '">'; foreach ($menu as $controller => $option) { if($controller!="index") { if ($controllerName == $controller) { echo '<li class="active open">'; echo '<a href="javascript:;"><i class="' . $option['class'] . '"></i> <span class="title"> ' . $option['caption'] . '</span> <span class="selected"></span><span class="arrow "></span></a>'; } else { echo '<li>'; echo '<a href="javascript:;"><i class="' . $option['class'] . '"></i> <span class="title"> ' . $option['caption'] . '</span><span class="arrow "></span></a>'; } echo '<ul class="sub-menu">'; foreach ($option['child'] as $child => $coption) { if($action==$child) { echo '<li class="active">'; } else { echo '<li>'; } echo $this->tag->linkTo($controller . '/' . $child, '<i class="' . $coption['class'] . '"></i>'. $coption['caption']); echo '</li>'; } echo '</ul>'; echo '</li>'; } else { if ($controllerName == $controller) { echo '<li class="active open">'; echo $this->tag->linkTo($controller . '/' . $option['action'], '<i class="' . $option['class'] . '"></i> <span class="title"> ' . $option['caption'] . '</span> <span class="selected"></span><span class="arrow "></span>'); } else { echo '<li>'; echo $this->tag->linkTo($controller . '/' . $option['action'], '<i class="' . $option['class'] . '"></i> <span class="title"> ' . $option['caption'] . '</span><span class="arrow "></span>'); } echo '</li>'; } } // echo '</ul>'; // echo '</li>'; } } public function CloseWinXinPage() { $controllerName = $this->view->getControllerName(); $action = $this->view->getActionName(); if($controllerName=="weixin"&&$action=="downdzg") { echo ' setTimeout(function () { wx.closeWindow(); },1800)'; } } private function LoadRanking() { /* $controllerName = $this->view->getControllerName(); $action = $this->view->getActionName(); if($controllerName=="weixin"&&$action=="ranking") { echo 'window.onload = function(){   $.post(\'/weixin/rankinfo\',{type:0},function(data){ eval("data="+data); if(data[\'success\']){ $(".ranking-task").html(data[\'data\'][\'content\']); }else{ alert(data[\'msg\']); } });   }); '; } */ } public function GetHomeJs() { $controllerName = $this->view->getControllerName(); $action = $this->view->getActionName(); if($controllerName=="weixin"&&$action=="index") { echo ' <script src="http://code.jquery.com/jquery-2.1.3.min.js"></script> <script src="http://code.jquery.com/mobile/1.3.2/jquery.mobile-1.3.2.min.js"></script> '; } } public function GetCheckedJs() { $action = $this->view->getActionName(); if(!($action=="usermoney")) { echo ' Metronic.init();'; } } public function GetJs() { $controllerName = $this->view->getControllerName(); $action = $this->view->getActionName(); if(($controllerName=="sysuser"&&$action=="sysuserLog")||($controllerName=="ad"&&$action=="index")|| ($controllerName=="app"&&$action=="index")||($controllerName=="user"&&$action=="usermoney")||($controllerName=="user"&&$action=="openapp")||($controllerName=="user"&&$action=="friendprice")||($controllerName=="user"&&$action=="otherprice")||($controllerName=="user"&&$action=="friendrecord")|| ($controllerName=="user"&&$action=="index")||($controllerName=="user"&&$action=="trygame")|| ($controllerName=="taskinfo"&&$action=="index")|| ($controllerName=="taskinfo"&&$action=="reportforms")||($controllerName=="taskinfo"&&$action=="finsh")||($controllerName=="taskinfo"&&$action=="doneing")) { echo ' TableAjax.init(); '; } else { echo ' Index.init(); Index.initDashboardDaterange(); Index.initJQVMAP(); Index.initCalendar(); Index.initCharts(); Index.initChat(); Index.initMiniCharts(); Index.initIntro(); Tasks.initDashboardWidget()'; } } //设置定时刷新 public function SetWeiXinIndex() { $controllerName = $this->view->getControllerName(); $action1 = $this->view->getActionName(); if($controllerName=="weixin"&&$action1=="index") { echo 'setTimeout(function () { DaZhangGuiStatu(); }, 200); setInterval(function () { DaZhangGuiStatu(); }, 5000); function DaZhangGuiStatu() { var configPathR=""; // var configPathR="/polyrichweb"; $.ajax({ type: "POST", url: configPathR + "/weixin/daZhangGuiStatus", data: { t: 3 }, beforeSend: function () { }, success: function (data) { eval("data=" + data); if (data[\'success\']) { $(".mask").attr("style","display:none"); $("#Newdazhanggui").attr("style","display:none"); $("#Actiondzg").attr("style","display:none"); $("#dazhanggui1").attr("class", "manager display"); $("#dazhanggui2").attr("class", "manager01"); } else { if(data[\'code\']==206) { $(".mask").attr("style","display:block"); $("#Actiondzg").attr("style","display:block"); $(".mask").css("height",$(document).height()); $(".mask").css("width",$(document).width()); $("#dazhanggui2").attr("class", "manager01 display"); $("#dazhanggui1").attr("class", "manager"); } else if(data[\'code\']==400) { $(".mask").attr("style","display:block"); $("#Newdazhanggui").attr("style","display:block"); $("#Newdazhanggui").css("height","100"); $(".mask").css("height",$(document).height()); $(".mask").css("width",$(document).width()); $("#dazhanggui2").attr("class", "manager01 display"); $("#dazhanggui1").attr("class", "manager"); } else { $(".mask").attr("style","display:none"); $("#Newdazhanggui").attr("style","display:none"); $("#Actiondzg").attr("style","display:none"); $("#dazhanggui2").attr("class", "manager01 display"); $("#dazhanggui1").attr("class", "manager"); } } } }); }'; } } public function SetRightMenu() { $controllerName = $this->view->getControllerName(); $action1 = $this->view->getActionName(); if(($controllerName=="weixin"&&$action1!="invitepage"&&$action1!="downdzg"&&$action1!="index"&&$action1!="qrRedirect"&&$action1!="qrCode")||$controllerName=="weigo") { echo'wx.hideAllNonBaseMenuItem({ success: function () { } });'; } else { $this->setLogger("weixin"); if ($this->session->has("auth_weixin")) { $auth = $this->session->get('auth_weixin'); $userinfo = Userinfo::findFirstByID($auth['wuserid']); if($userinfo) { $wusernickname=$userinfo->getNickname(); // $wuserheaderurl= $userinfo->getHeaderurl(); $wuserheaderurl="https://mmbiz.qlogo.cn/mmbiz/YnfDoNY49nQic3HoBniako8hcKc3QAM4ma2WJ4teqMwd6JXKuib48PGfPfm8gArBqIlSS0uWtdTbbYibeNsNMiaFMWA/0"; $wuserid=$userinfo->getId(); } $this->getLogger()->logInfo('邀请者地址:https://open.weixin.qq.com/connect/oauth2/authorize?appid=wxe3f13ae65402c4c8&redirect_uri=http://jyq.ihmedia.com.cn/weixin/invitepage?wuserid='.$wuserid.'&response_type=code&scope=snsapi_base&state=1#wechat_redirect'); echo ' wx.onMenuShareAppMessage({ title: \''. str_replace("??", "", $wusernickname).':关注聚有钱下载应用就赚钱\', desc: \' 聚有钱\', link: \'https://open.weixin.qq.com/connect/oauth2/authorize?appid=wx89f2077a35b48a40&redirect_uri=http://jyq.ihmedia.com.cn/weixin/invitepage?wuserid='.$wuserid.'&response_type=code&scope=snsapi_base&state=1#wechat_redirect\', imgUrl:\''.$wuserheaderurl.'\', trigger: function (res) { // 不要尝试在trigger中使用ajax异步请求修改本次分享的内容,因为客户端分享操作是一个同步操作,这时候使用ajax的回包会还没有返回 // alert(JSON.stringify(res)); }, success: function (res) { // alert(JSON.stringify(res)); }, cancel: function (res) { // alert(JSON.stringify(res)); }, fail: function (res) { //alert(JSON.stringify(res)); } }); wx.showAllNonBaseMenuItem(); wx.onMenuShareTimeline({ title: \''. str_replace("??", "", $wusernickname).':关注聚有钱下载应用就赚钱\', link: \'https://open.weixin.qq.com/connect/oauth2/authorize?appid=wx89f2077a35b48a40&redirect_uri=http://jyq.ihmedia.com.cn/weixin/invitepage?wuserid='.$wuserid.'&response_type=code&scope=snsapi_base&state=1#wechat_redirect\', imgUrl: \''.$wuserheaderurl.'\', success: function () { // 用户确认分享后执行的回调函数 }, cancel: function () { // 用户取消分享后执行的回调函数 } }); '; } } } /** * Returns menu tabs */ public function getTabs() { $controllerName = $this->view->getControllerName(); $actionName = $this->view->getActionName(); echo '<ul class="nav nav-tabs">'; foreach ($this->_tabs as $caption => $option) { if ($option['controller'] == $controllerName && ($option['action'] == $actionName || $option['any'])) { echo '<li class="active">'; } else { echo '<li>'; } echo $this->tag->linkTo($option['controller'] . '/' . $option['action'], $caption), '<li>'; } echo '</ul>'; } public function getLogoutHtml() { $auth = $this->session->get('auth_polyRich'); if ($auth) { $this->persistent->currentuser= Sysuser::findFirst($auth['user_id']); } echo $this->persistent->currentuser->getName(); } public function getCheckFiledForRigth($ID,$new) { $auth = $this->session->get(' }auth_polyRich'); //Query the active user $sysrights=SysRight::find(); /* $userrights=SysRoleRight::find(array( "(roleId Like :roleId:)", 'bind' => array('roleId' => '%' . . '%') )); */ $query = $this->modelsManager->createQuery("SELECT rightId FROM SysRoleRight WHERE roleId = :roleid:"); $userrights = $query->execute(array( 'roleid' => $ID )); $rights =array(); foreach($userrights as $row) { array_push($rights, $row->rightId); } foreach ($sysrights as $sysright) { if(in_array($sysright->getId(),$rights)&&$new) { echo '<input type="checkbox" name="hobby[]" value="' . $sysright->getId() . '" checked="checked" />' . $sysright->getDescription() . '<br>'; } else echo '<input type="checkbox" name="hobby[]" value="' . $sysright->getId() . '" />' . $sysright->getDescription() . '<br>'; /* echo '&nbsp;&nbsp;&nbsp&nbsp;&nbsp;&nbsp&nbsp;&nbsp;&nbsp<input type="checkbox" name="hobby[]" value="1"/>查询 &nbsp;&nbsp;&nbsp; <input type="checkbox" name="hobby[]" value="2"/>创建 &nbsp;&nbsp;&nbsp; <input type="checkbox" name="hobby[]" value="3"/>编辑 &nbsp;&nbsp;&nbsp; <input type="checkbox" name="hobby[]" value="3"/>删除 <br><br>'; */ } } }
bsd-3-clause
vaas-krish/openthread
src/core/thread/mle_router_ftd.hpp
29855
/* * Copyright (c) 2016, The OpenThread Authors. * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * 3. Neither the name of the copyright holder nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ /** * @file * This file includes definitions for MLE functionality required by the Thread Router and Leader roles. */ #ifndef MLE_ROUTER_HPP_ #define MLE_ROUTER_HPP_ #include "utils/wrap_string.h" #include "coap/coap.hpp" #include "coap/coap_header.hpp" #include "common/timer.hpp" #include "common/trickle_timer.hpp" #include "mac/mac_frame.hpp" #include "meshcop/meshcop_tlvs.hpp" #include "net/icmp6.hpp" #include "net/udp6.hpp" #include "thread/mle.hpp" #include "thread/mle_tlvs.hpp" #include "thread/thread_tlvs.hpp" #include "thread/topology.hpp" namespace ot { namespace Mle { class AddressResolver; class MeshForwarder; class NetworkDataLeader; /** * @addtogroup core-mle-router * * @brief * This module includes definitions for MLE functionality required by the Thread Router and Leader roles. * * @{ */ /** * This class implements MLE functionality required by the Thread Router and Leader roles. * */ class MleRouter: public Mle { friend class Mle; public: /** * This constructor initializes the object. * * @param[in] aThreadNetif A reference to the Thread network interface. * */ explicit MleRouter(ThreadNetif &aThreadNetif); /** * This method indicates whether or not the Router Role is enabled. * * @retval true If the Router Role is enabled. * @retval false If the Router Role is not enabled. * */ bool IsRouterRoleEnabled(void) const; /** * This method sets whether or not the Router Role is enabled. * * If @p aEnable is false and the device is currently operating as a router, this call will cause the device to * detach and attempt to reattach as a child. * * @param[in] aEnabled TRUE to enable the Router Role, FALSE otherwise. * */ void SetRouterRoleEnabled(bool aEnabled); /** * This method indicates whether a node is the only router on the network. * * @retval TRUE It is the only router in the network. * @retval FALSE It is a child or is not a single router in the network. * */ bool IsSingleton(void); /** * This method generates an Address Solicit request for a Router ID. * * @param[in] aStatus The reason for requesting a Router ID. * * @retval OT_ERROR_NONE Successfully generated an Address Solicit message. * @retval OT_ERROR_NOT_CAPABLE Device is not capable of becoming a router * @retval OT_ERROR_INVALID_STATE Thread is not enabled * */ otError BecomeRouter(ThreadStatusTlv::Status aStatus); /** * This method causes the Thread interface to become a Leader and start a new partition. * * @retval OT_ERROR_NONE Successfully become a Leader and started a new partition. * @retval OT_ERROR_NOT_CAPABLE Device is not capable of becoming a leader * @retval OT_ERROR_INVALID_STATE Thread is not enabled * */ otError BecomeLeader(void); /** * This method returns the number of active routers. * * @returns The number of active routers. * */ uint8_t GetActiveRouterCount(void) const; /** * This method returns the time in seconds since the last Router ID Sequence update. * * @returns The time in seconds since the last Router ID Sequence update. * */ uint32_t GetLeaderAge(void) const; /** * This method returns the Leader Weighting value for this Thread interface. * * @returns The Leader Weighting value for this Thread interface. * */ uint8_t GetLeaderWeight(void) const { return mLeaderWeight; } /** * This method sets the Leader Weighting value for this Thread interface. * * @param[in] aWeight The Leader Weighting value. * */ void SetLeaderWeight(uint8_t aWeight) { mLeaderWeight = aWeight; } /** * This method returns the fixed Partition Id of Thread network partition for certification testing. * * @returns The Partition Id for this Thread network partition. * */ uint32_t GetLeaderPartitionId(void) const { return mFixedLeaderPartitionId; } /** * This method sets the fixed Partition Id for Thread network partition for certification testing. * * @param[in] aPartitionId The Leader Partition Id. * */ void SetLeaderPartitionId(uint32_t aPartitionId) { mFixedLeaderPartitionId = aPartitionId; } /** * This method sets the preferred Router Id. Upon becoming a router/leader the node * attempts to use this Router Id. If the preferred Router Id is not set or if it * can not be used, a randomly generated router Id is picked. * This property can be set when he device role is detached or disabled. * * @param[in] aRouterId The preferred Router Id. * * @retval OT_ERROR_NONE Successfully set the preferred Router Id. * @retval OT_ERROR_INVALID_STATE Could not set (role is other than detached and disabled) * */ otError SetPreferredRouterId(uint8_t aRouterId); /** * This method gets the Partition Id which the device joined successfully once. * */ uint32_t GetPreviousPartitionId(void) const { return mPreviousPartitionId; } /** * This method sets the Partition Id which the device joins successfully. * * @param[in] aPartitionId The Partition Id. * */ void SetPreviousPartitionId(uint32_t aPartitionId) { mPreviousPartitionId = aPartitionId; } /** * This method sets the Router Id. * * @param[in] aRouterId The Router Id. * */ void SetRouterId(uint8_t aRouterId); /** * This method returns the next hop towards an RLOC16 destination. * * @param[in] aDestination The RLOC16 of the destination. * * @returns A RLOC16 of the next hop if a route is known, kInvalidRloc16 otherwise. * */ uint16_t GetNextHop(uint16_t aDestination); /** * This method returns the NETWORK_ID_TIMEOUT value. * * @returns The NETWORK_ID_TIMEOUT value. * */ uint8_t GetNetworkIdTimeout(void) const { return mNetworkIdTimeout; } /** * This method sets the NETWORK_ID_TIMEOUT value. * * @param[in] aTimeout The NETWORK_ID_TIMEOUT value. * */ void SetNetworkIdTimeout(uint8_t aTimeout) { mNetworkIdTimeout = aTimeout; } /** * This method returns the route cost to a RLOC16. * * @param[in] aRloc16 The RLOC16 of the destination. * * @returns The route cost to a RLOC16. * */ uint8_t GetRouteCost(uint16_t aRloc16) const; /** * This method returns the link cost to the given Router. * * @param[in] aRouterId The Router ID. * * @returns The link cost to the Router. * */ uint8_t GetLinkCost(uint8_t aRouterId); /** * This method returns the minimum cost to the given router. * * @param[in] aRloc16 The short address of the given router. * * @returns The minimum cost to the given router (via direct link or forwarding). * */ uint8_t GetCost(uint16_t aRloc16); /** * This method returns the ROUTER_SELECTION_JITTER value. * * @returns The ROUTER_SELECTION_JITTER value. * */ uint8_t GetRouterSelectionJitter(void) const { return mRouterSelectionJitter; } /** * This method sets the ROUTER_SELECTION_JITTER value. * * @returns The ROUTER_SELECTION_JITTER value. * */ otError SetRouterSelectionJitter(uint8_t aRouterJitter); /** * This method returns the current Router ID Sequence value. * * @returns The current Router ID Sequence value. * */ uint8_t GetRouterIdSequence(void) const { return mRouterIdSequence; } /** * This method returns the ROUTER_UPGRADE_THRESHOLD value. * * @returns The ROUTER_UPGRADE_THRESHOLD value. * */ uint8_t GetRouterUpgradeThreshold(void) const { return mRouterUpgradeThreshold; } /** * This method sets the ROUTER_UPGRADE_THRESHOLD value. * * @returns The ROUTER_UPGRADE_THRESHOLD value. * */ void SetRouterUpgradeThreshold(uint8_t aThreshold) { mRouterUpgradeThreshold = aThreshold; } /** * This method returns the ROUTER_DOWNGRADE_THRESHOLD value. * * @returns The ROUTER_DOWNGRADE_THRESHOLD value. * */ uint8_t GetRouterDowngradeThreshold(void) const { return mRouterDowngradeThreshold; } /** * This method sets the ROUTER_DOWNGRADE_THRESHOLD value. * * @returns The ROUTER_DOWNGRADE_THRESHOLD value. * */ void SetRouterDowngradeThreshold(uint8_t aThreshold) { mRouterDowngradeThreshold = aThreshold; } /** * This method release a given Router ID. * * @param[in] aRouterId The Router ID to release. * * @retval OT_ERROR_NONE Successfully released the Router ID. * @retval OT_ERROR_INVALID_STATE The Router ID was not allocated. * */ otError ReleaseRouterId(uint8_t aRouterId); /** * This method removes a link to a neighbor. * * @param[in] aAddress The link address of the neighbor. * * @retval OT_ERROR_NONE Successfully removed the neighbor. * @retval OT_ERROR_NOT_FOUND Could not find the neighbor. * */ otError RemoveNeighbor(const Mac::Address &aAddress); /** * This method removes a link to a neighbor. * * @param[in] aNeighbor A reference to the neighbor object. * * @retval OT_ERROR_NONE Successfully removed the neighbor. * */ otError RemoveNeighbor(Neighbor &aNeighbor); /** * This method returns a pointer to a Child object. * * @param[in] aAddress The address of the Child. * * @returns A pointer to the Child object. * */ Child *GetChild(uint16_t aAddress); /** * This method returns a pointer to a Child object. * * @param[in] aAddress A reference to the address of the Child. * * @returns A pointer to the Child object. * */ Child *GetChild(const Mac::ExtAddress &aAddress); /** * This method returns a pointer to a Child object. * * @param[in] aAddress A reference to the address of the Child. * * @returns A pointer to the Child corresponding to @p aAddress, NULL otherwise. * */ Child *GetChild(const Mac::Address &aAddress); /** * This method returns a child index for the Child object. * * @param[in] aChild A reference to the Child object. * * @returns The index for the Child corresponding to @p aChild. * */ uint8_t GetChildIndex(const Child &aChild); /** * This method returns a pointer to a Child array. * * @param[out] aNumChildren A pointer to output the number of children. * * @returns A pointer to the Child array. * */ Child *GetChildren(uint8_t *aNumChildren); /** * This method sets the max children allowed value for this Thread interface. * * @param[in] aMaxChildren The max children allowed value. * * @retval OT_ERROR_NONE Successfully set the max. * @retval OT_ERROR_INVALID_ARGS If @p aMaxChildren is not in the range [1, kMaxChildren]. * @retval OT_ERROR_INVALID_STATE If MLE has already been started. * */ otError SetMaxAllowedChildren(uint8_t aMaxChildren); /** * This method restores children information from non-volatile memory. * * @retval OT_ERROR_NONE Successfully restored children information. * @retval OT_ERROR_FAILED The saved child info in non-volatile memory is invalid. * @retval OT_ERROR_NO_BUFS More children in settings than max children. * */ otError RestoreChildren(void); /** * This method remove a stored child information from non-volatile memory. * * @param[in] aChildRloc16 The child RLOC16 to remove. * * @retval OT_ERROR_NONE Successfully remove child. * @retval OT_ERROR_NOT_FOUND There is no specified child stored in non-volatile memory. * */ otError RemoveStoredChild(uint16_t aChildRloc16); /** * This method store a child information into non-volatile memory. * * @param[in] aChildRloc16 The child RLOC16 to store. * * @retval OT_ERROR_NONE Successfully store child. * @retval OT_ERROR_NO_BUFS Insufficient available buffers to store child. * */ otError StoreChild(uint16_t aChildRloc16); /** * This method refreshes all the saved children information in non-volatile memory by first erasing any saved * child information in non-volatile memory and then saving all children info. * * @retval OT_ERROR_NONE Successfully refreshed all children info in non-volatile memory * @retval OT_ERROR_NO_BUFS Insufficient available buffers to store child. * */ otError RefreshStoredChildren(void); /** * This method returns a pointer to a Neighbor object. * * @param[in] aAddress The address of the Neighbor. * * @returns A pointer to the Neighbor corresponding to @p aAddress, NULL otherwise. * */ Neighbor *GetNeighbor(uint16_t aAddress); /** * This method returns a pointer to a Neighbor object. * * @param[in] aAddress The address of the Neighbor. * * @returns A pointer to the Neighbor corresponding to @p aAddress, NULL otherwise. * */ Neighbor *GetNeighbor(const Mac::ExtAddress &aAddress); /** * This method returns a pointer to a Neighbor object. * * @param[in] aAddress The address of the Neighbor. * * @returns A pointer to the Neighbor corresponding to @p aAddress, NULL otherwise. * */ Neighbor *GetNeighbor(const Mac::Address &aAddress); /** * This method returns a pointer to a Neighbor object. * * @param[in] aAddress The address of the Neighbor. * * @returns A pointer to the Neighbor corresponding to @p aAddress, NULL otherwise. * */ Neighbor *GetNeighbor(const Ip6::Address &aAddress); /** * This method retains diagnostic information for an attached child by Child ID or RLOC16. * * @param[in] aChildId The Child ID or RLOC16 for an attached child. * @param[out] aChildInfo The child information. * */ otError GetChildInfoById(uint16_t aChildId, otChildInfo &aChildInfo); /** * This method retains diagnostic information for an attached child by the internal table index. * * @param[in] aChildIndex The table index. * @param[out] aChildInfo The child information. * */ otError GetChildInfoByIndex(uint8_t aChildIndex, otChildInfo &aChildInfo); /** * This method gets the next neighbor information. It is used to iterate through the entries of * the neighbor table. * * @param[inout] aIterator A reference to the iterator context. To get the first neighbor entry it should be set to OT_NEIGHBOR_INFO_ITERATOR_INIT. * @param[out] aNeighInfo The neighbor information. * * @retval OT_ERROR_NONE Successfully found the next neighbor entry in table. * @retval OT_ERROR_NOT_FOUND No subsequent neighbor entry exists in the table. * */ otError GetNextNeighborInfo(otNeighborInfoIterator &aIterator, otNeighborInfo &aNeighInfo); /** * This method returns a pointer to a Router array. * * @param[out] aNumRouters A pointer to output the number of routers. * * @returns A pointer to the Router array. * */ Router *GetRouters(uint8_t *aNumRouters); /** * This method returns a pointer to a Router entry. * * @param[in] aRouterId The Router ID. * * @returns A pointer to a Router entry or NULL if @p aRouterId is out-of-range. * */ Router *GetRouter(uint8_t aRouterId); /** * This method returns a pointer to a Router entry. * * @param[in] aRouterId The Router ID. * * @returns A pointer to a Router entry or NULL if @p aRouterId is out-of-range. * */ const Router *GetRouter(uint8_t aRouterId) const; /** * This method retains diagnostic information for a given router. * * @param[in] aRouterId The router ID or RLOC16 for a given router. * @param[out] aRouterInfo The router information. * */ otError GetRouterInfo(uint16_t aRouterId, otRouterInfo &aRouterInfo); /** * This method indicates whether or not the given Thread partition attributes are preferred. * * @param[in] aSingletonA Whether or not the Thread Partition A has a single router. * @param[in] aLeaderDataA A reference to Thread Partition A's Leader Data. * @param[in] aSingletonB Whether or not the Thread Partition B has a single router. * @param[in] aLeaderDataB A reference to Thread Partition B's Leader Data. * * @retval 1 If partition A is preferred. * @retval 0 If partition A and B have equal preference. * @retval -1 If partition B is preferred. * */ static int ComparePartitions(bool aSingletonA, const LeaderDataTlv &aLeaderDataA, bool aSingletonB, const LeaderDataTlv &aleaderDataB); /** * This method checks if the destination is reachable. * * @param[in] aMeshSource The RLOC16 of the source. * @param[in] aMeshDest The RLOC16 of the destination. * @param[in] aIp6Header A reference to the IPv6 header of the message. * * @retval OT_ERROR_NONE The destination is reachable. * @retval OT_ERROR_DROP The destination is not reachable and the message should be dropped. * */ otError CheckReachability(uint16_t aMeshSource, uint16_t aMeshDest, Ip6::Header &aIp6Header); /** * This method resolves 2-hop routing loops. * * @param[in] aSourceMac The RLOC16 of the previous hop. * @param[in] aDestRloc16 The RLOC16 of the final destination. * */ void ResolveRoutingLoops(uint16_t aSourceMac, uint16_t aDestRloc16); /** * This method checks if a given Router ID has correct value. * * @param[in] aRouterId The Router ID value. * * @retval TRUE If @p aRouterId is in correct range [0..62]. * @retval FALSE If @p aRouterId is not a valid Router ID. * */ static bool IsRouterIdValid(uint8_t aRouterId) { return aRouterId <= kMaxRouterId; } /** * This method fills an ConnectivityTlv. * * @param[out] aTlv A reference to the tlv to be filled. * */ void FillConnectivityTlv(ConnectivityTlv &aTlv); /** * This method fills an RouteTlv. * * @param[out] aTlv A reference to the tlv to be filled. * */ void FillRouteTlv(RouteTlv &aTlv); /** * This method generates an MLE Child Update Request message to be sent to the parent. * * @retval OT_ERROR_NONE Successfully generated an MLE Child Update Request message. * @retval OT_ERROR_NO_BUFS Insufficient buffers to generate the MLE Child Update Request message. * */ otError SendChildUpdateRequest(void) { return Mle::SendChildUpdateRequest(); } #if OPENTHREAD_CONFIG_ENABLE_STEERING_DATA_SET_OOB /** * This method sets steering data out of band * * @param[in] aExtAddress Value used to set steering data * All zeros clears steering data * All 0xFFs sets steering data to 0xFF * Anything else is used to compute the bloom filter * * @retval OT_ERROR_NONE Steering data was set * */ otError SetSteeringData(otExtAddress *aExtAddress); #endif // OPENTHREAD_CONFIG_ENABLE_STEERING_DATA_SET_OOB /** * This method gets the assigned parent priority. * * @returns The assigned parent priority value, -2 means not assigned. * */ int8_t GetAssignParentPriority(void) const; /** * This method sets the parent priority. * * @param[in] aParentPriority The parent priority value. * * @retval OT_ERROR_NONE Successfully set the parent priority. * @retval OT_ERROR_INVALID_ARGS If the parent priority value is not among 1, 0, -1 and -2. * */ otError SetAssignParentPriority(int8_t aParentPriority); /** * This method gets the longest MLE Timeout TLV for all active MTD children. * * @param[out] aTimeout A reference to where the information is placed. * * @retval OT_ERROR_NONE Successfully get the max child timeout * @retval OT_ERROR_INVALID_STATE Not an active router * @retval OT_ERROR_NOT_FOUND NO MTD child * */ otError GetMaxChildTimeout(uint32_t &aTimeout) const; private: enum { kDiscoveryMaxJitter = 250u, ///< Maximum jitter time used to delay Discovery Responses in milliseconds. kStateUpdatePeriod = 1000u, ///< State update period in milliseconds. kUnsolicitedDataResponseJitter = 500u, ///< Maximum delay before unsolicited Data Response in milliseconds. }; otError AppendConnectivity(Message &aMessage); otError AppendChildAddresses(Message &aMessage, Child &aChild); otError AppendRoute(Message &aMessage); otError AppendActiveDataset(Message &aMessage); otError AppendPendingDataset(Message &aMessage); otError GetChildInfo(Child &aChild, otChildInfo &aChildInfo); otError HandleDetachStart(void); otError HandleChildStart(AttachMode aMode); otError HandleLinkRequest(const Message &aMessage, const Ip6::MessageInfo &aMessageInfo); otError HandleLinkAccept(const Message &aMessage, const Ip6::MessageInfo &aMessageInfo, uint32_t aKeySequence); otError HandleLinkAccept(const Message &aMessage, const Ip6::MessageInfo &aMessageInfo, uint32_t aKeySequence, bool request); otError HandleLinkAcceptAndRequest(const Message &aMessage, const Ip6::MessageInfo &aMessageInfo, uint32_t aKeySequence); otError HandleAdvertisement(const Message &aMessage, const Ip6::MessageInfo &aMessageInfo); otError HandleParentRequest(const Message &aMessage, const Ip6::MessageInfo &aMessageInfo); otError HandleChildIdRequest(const Message &aMessage, const Ip6::MessageInfo &aMessageInfo, uint32_t aKeySequence); otError HandleChildUpdateRequest(const Message &aMessage, const Ip6::MessageInfo &aMessageInfo, uint32_t aKeySequence); otError HandleChildUpdateResponse(const Message &aMessage, const Ip6::MessageInfo &aMessageInfo, uint32_t aKeySequence); otError HandleDataRequest(const Message &aMessage, const Ip6::MessageInfo &aMessageInfo); otError HandleNetworkDataUpdateRouter(void); otError HandleDiscoveryRequest(const Message &aMessage, const Ip6::MessageInfo &aMessageInfo); otError ProcessRouteTlv(const RouteTlv &aRoute); void StopAdvertiseTimer(void); void ResetAdvertiseInterval(void); otError SendAddressSolicit(ThreadStatusTlv::Status aStatus); otError SendAddressRelease(void); void SendAddressSolicitResponse(const Coap::Header &aRequest, uint8_t aRouterId, const Ip6::MessageInfo &aMessageInfo); otError SendAdvertisement(void); otError SendLinkRequest(Neighbor *aNeighbor); otError SendLinkAccept(const Ip6::MessageInfo &aMessageInfo, Neighbor *aNeighbor, const TlvRequestTlv &aTlvRequest, const ChallengeTlv &aChallenge); otError SendParentResponse(Child *aChild, const ChallengeTlv &aChallenge, bool aRoutersOnlyRequest); otError SendChildIdResponse(Child *aChild); otError SendChildUpdateRequest(Child *aChild); otError SendChildUpdateResponse(Child *aChild, const Ip6::MessageInfo &aMessageInfo, const uint8_t *aTlvs, uint8_t aTlvsLength, const ChallengeTlv *challenge); otError SendDataResponse(const Ip6::Address &aDestination, const uint8_t *aTlvs, uint8_t aTlvsLength, uint16_t aDelay); otError SendDiscoveryResponse(const Ip6::Address &aDestination, uint16_t aPanId); otError SetStateRouter(uint16_t aRloc16); otError SetStateLeader(uint16_t aRloc16); void StopLeader(void); void SynchronizeChildNetworkData(void); otError UpdateChildAddresses(const AddressRegistrationTlv &aTlv, Child &aChild); void UpdateRoutes(const RouteTlv &aTlv, uint8_t aRouterId); static void HandleAddressSolicitResponse(void *aContext, otCoapHeader *aHeader, otMessage *aMessage, const otMessageInfo *aMessageInfo, otError result); void HandleAddressSolicitResponse(Coap::Header *aHeader, Message *aMessage, const Ip6::MessageInfo *aMessageInfo, otError result); static void HandleAddressRelease(void *aContext, otCoapHeader *aHeader, otMessage *aMessage, const otMessageInfo *aMessageInfo); void HandleAddressRelease(Coap::Header &aHeader, Message &aMessage, const Ip6::MessageInfo &aMessageInfo); static void HandleAddressSolicit(void *aContext, otCoapHeader *aHeader, otMessage *aMessage, const otMessageInfo *aMessageInfo); void HandleAddressSolicit(Coap::Header &aHeader, Message &aMessage, const Ip6::MessageInfo &aMessageInfo); static uint8_t LinkQualityToCost(uint8_t aLinkQuality); Child *NewChild(void); Child *FindChild(uint16_t aChildId); Child *FindChild(const Mac::ExtAddress &aMacAddr); void SetChildStateToValid(Child *aChild); bool HasChildren(void); void RemoveChildren(void); bool HasMinDowngradeNeighborRouters(void); bool HasOneNeighborwithComparableConnectivity(const RouteTlv &aRoute, uint8_t aRouterId); bool HasSmallNumberOfChildren(void); uint8_t GetMinDowngradeNeighborRouters(void); uint8_t AllocateRouterId(void); uint8_t AllocateRouterId(uint8_t aRouterId); bool InRouterIdMask(uint8_t aRouterId); static bool HandleAdvertiseTimer(TrickleTimer &aTimer); bool HandleAdvertiseTimer(void); static void HandleStateUpdateTimer(Timer &aTimer); void HandleStateUpdateTimer(void); static MleRouter &GetOwner(const Context &aContext); TrickleTimer mAdvertiseTimer; TimerMilli mStateUpdateTimer; Coap::Resource mAddressSolicit; Coap::Resource mAddressRelease; uint8_t mRouterIdSequence; uint32_t mRouterIdSequenceLastUpdated; Router mRouters[kMaxRouterId + 1]; uint8_t mMaxChildrenAllowed; Child mChildren[kMaxChildren]; uint8_t mChallengeTimeout; uint8_t mChallenge[8]; uint16_t mNextChildId; uint8_t mNetworkIdTimeout; uint8_t mRouterUpgradeThreshold; uint8_t mRouterDowngradeThreshold; uint8_t mLeaderWeight; uint32_t mFixedLeaderPartitionId; ///< only for certification testing bool mRouterRoleEnabled; bool mIsRouterRestoringChildren; uint8_t mRouterId; uint8_t mPreviousRouterId; uint32_t mPreviousPartitionId; uint8_t mRouterSelectionJitter; ///< The variable to save the assigned jitter value. uint8_t mRouterSelectionJitterTimeout; ///< The Timeout prior to request/release Router ID. int8_t mParentPriority; ///< The assigned parent priority value, -2 means not assigned. #if OPENTHREAD_CONFIG_ENABLE_STEERING_DATA_SET_OOB MeshCoP::SteeringDataTlv mSteeringData; #endif // OPENTHREAD_CONFIG_ENABLE_STEERING_DATA_SET_OOB }; } // namespace Mle /** * @} */ } // namespace ot #endif // MLE_ROUTER_HPP_
bsd-3-clause
cassioozarias/lojainstrumento
module/Loja/src/Loja/Entity/Musico.php
1718
<?php namespace Loja\Entity; use Doctrine\ORM\Mapping as ORM; /** * @ORM\Entity * @ORM\Table(name="musicos") * @ORM\Entity(repositoryClass="Loja\Entity\MusicoRepository") */ class Musico { /** * * @ORM\Id * @ORM\Column(type="integer") * @ORM\GeneratedValue * @var int */ protected $id; /** * * @ORM\Column(type="text") * @var */ protected $nome; /** * @ORM\ManyToOne(targetEntity="Loja\Entity\Categoria", inversedBy="musicos") * @ORM\JoinColumn(name="categoria_id", referencedColumnName="id") */ protected $categoria; /** * @ORM\Column(type="text") * @var string */ protected $cpf; /** * @ORM\Column(type="text") * @var string */ public function __construct($options = null) { Configurator::configure($this, $options); } public function getId() { return $this->id; } public function getNome() { return $this->nome; } public function getCategoria() { return $this->categoria; } public function getCpf() { return $this->cpf; } public function setId($id) { $this->id = $id; } public function setNome($nome) { $this->nome = $nome; } public function setCategoria($categoria) { $this->categoria = $categoria; } public function setCpf($cpf) { $this->cpf = $cpf; } public function toArray() { return array( 'id' => $this->getId(), 'nome' => $this->getNome(), 'cpf' => $this->getCpf(), 'categoria'=> $this->getCategoria()->getId() ); } }
bsd-3-clause
jonas747/discordgo
events_easyjson.go
40365
// Code generated by easyjson for marshaling/unmarshaling. DO NOT EDIT. package discordgo import ( json "encoding/json" easyjson "github.com/mailru/easyjson" jlexer "github.com/mailru/easyjson/jlexer" jwriter "github.com/mailru/easyjson/jwriter" ) // suppress unused package warning var ( _ *json.RawMessage _ *jlexer.Lexer _ *jwriter.Writer _ easyjson.Marshaler ) func easyjson692db02bDecodeGithubComJonas747Discordgo(in *jlexer.Lexer, out *PresenceUpdate) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { in.Consumed() } in.Skip() return } in.Delim('{') for !in.IsDelim('}') { key := in.UnsafeString() in.WantColon() if in.IsNull() { in.Skip() in.WantComma() continue } switch key { case "guild_id": out.GuildID = int64(in.Int64Str()) case "user": if in.IsNull() { in.Skip() out.User = nil } else { if out.User == nil { out.User = new(User) } easyjson692db02bDecodeGithubComJonas747Discordgo1(in, out.User) } case "status": out.Status = Status(in.String()) case "game": if in.IsNull() { in.Skip() out.Game = nil } else { if out.Game == nil { out.Game = new(Game) } easyjson692db02bDecodeGithubComJonas747Discordgo2(in, out.Game) } case "nick": out.Nick = string(in.String()) case "roles": if data := in.Raw(); in.Ok() { in.AddError((out.Roles).UnmarshalJSON(data)) } case "activities": if in.IsNull() { in.Skip() out.Activities = nil } else { in.Delim('[') if out.Activities == nil { if !in.IsDelim(']') { out.Activities = make(Activities, 0, 8) } else { out.Activities = Activities{} } } else { out.Activities = (out.Activities)[:0] } for !in.IsDelim(']') { var v1 *Game if in.IsNull() { in.Skip() v1 = nil } else { if v1 == nil { v1 = new(Game) } easyjson692db02bDecodeGithubComJonas747Discordgo2(in, v1) } out.Activities = append(out.Activities, v1) in.WantComma() } in.Delim(']') } case "since": out.Since = int64(in.Int64()) default: in.SkipRecursive() } in.WantComma() } in.Delim('}') if isTopLevel { in.Consumed() } } func easyjson692db02bEncodeGithubComJonas747Discordgo(out *jwriter.Writer, in PresenceUpdate) { out.RawByte('{') first := true _ = first { const prefix string = ",\"guild_id\":" out.RawString(prefix[1:]) out.Int64Str(int64(in.GuildID)) } { const prefix string = ",\"user\":" out.RawString(prefix) if in.User == nil { out.RawString("null") } else { easyjson692db02bEncodeGithubComJonas747Discordgo1(out, *in.User) } } { const prefix string = ",\"status\":" out.RawString(prefix) out.String(string(in.Status)) } { const prefix string = ",\"game\":" out.RawString(prefix) if in.Game == nil { out.RawString("null") } else { easyjson692db02bEncodeGithubComJonas747Discordgo2(out, *in.Game) } } { const prefix string = ",\"nick\":" out.RawString(prefix) out.String(string(in.Nick)) } { const prefix string = ",\"roles\":" out.RawString(prefix) out.Raw((in.Roles).MarshalJSON()) } { const prefix string = ",\"activities\":" out.RawString(prefix) if in.Activities == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { out.RawString("null") } else { out.RawByte('[') for v2, v3 := range in.Activities { if v2 > 0 { out.RawByte(',') } if v3 == nil { out.RawString("null") } else { easyjson692db02bEncodeGithubComJonas747Discordgo2(out, *v3) } } out.RawByte(']') } } { const prefix string = ",\"since\":" out.RawString(prefix) out.Int64(int64(in.Since)) } out.RawByte('}') } // MarshalEasyJSON supports easyjson.Marshaler interface func (v PresenceUpdate) MarshalEasyJSON(w *jwriter.Writer) { easyjson692db02bEncodeGithubComJonas747Discordgo(w, v) } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *PresenceUpdate) UnmarshalEasyJSON(l *jlexer.Lexer) { easyjson692db02bDecodeGithubComJonas747Discordgo(l, v) } func easyjson692db02bDecodeGithubComJonas747Discordgo2(in *jlexer.Lexer, out *Game) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { in.Consumed() } in.Skip() return } in.Delim('{') for !in.IsDelim('}') { key := in.UnsafeString() in.WantColon() if in.IsNull() { in.Skip() in.WantComma() continue } switch key { case "name": out.Name = string(in.String()) case "type": out.Type = GameType(in.Int()) case "url": out.URL = string(in.String()) case "details": out.Details = string(in.String()) case "state": out.State = string(in.String()) case "timestamps": if data := in.Raw(); in.Ok() { in.AddError((out.TimeStamps).UnmarshalJSON(data)) } case "assets": easyjson692db02bDecodeGithubComJonas747Discordgo3(in, &out.Assets) case "application_id": out.ApplicationID = string(in.String()) case "instance": out.Instance = int8(in.Int8()) default: in.SkipRecursive() } in.WantComma() } in.Delim('}') if isTopLevel { in.Consumed() } } func easyjson692db02bEncodeGithubComJonas747Discordgo2(out *jwriter.Writer, in Game) { out.RawByte('{') first := true _ = first { const prefix string = ",\"name\":" out.RawString(prefix[1:]) out.String(string(in.Name)) } { const prefix string = ",\"type\":" out.RawString(prefix) out.Int(int(in.Type)) } if in.URL != "" { const prefix string = ",\"url\":" out.RawString(prefix) out.String(string(in.URL)) } if in.Details != "" { const prefix string = ",\"details\":" out.RawString(prefix) out.String(string(in.Details)) } if in.State != "" { const prefix string = ",\"state\":" out.RawString(prefix) out.String(string(in.State)) } if true { const prefix string = ",\"timestamps\":" out.RawString(prefix) easyjson692db02bEncodeGithubComJonas747Discordgo3(out, in.TimeStamps) } if true { const prefix string = ",\"assets\":" out.RawString(prefix) easyjson692db02bEncodeGithubComJonas747Discordgo4(out, in.Assets) } if in.ApplicationID != "" { const prefix string = ",\"application_id\":" out.RawString(prefix) out.String(string(in.ApplicationID)) } if in.Instance != 0 { const prefix string = ",\"instance\":" out.RawString(prefix) out.Int8(int8(in.Instance)) } out.RawByte('}') } func easyjson692db02bDecodeGithubComJonas747Discordgo4(in *jlexer.Lexer, out *TimeStamps) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { in.Consumed() } in.Skip() return } in.Delim('{') for !in.IsDelim('}') { key := in.UnsafeString() in.WantColon() if in.IsNull() { in.Skip() in.WantComma() continue } switch key { case "end": out.EndTimestamp = int64(in.Int64()) case "start": out.StartTimestamp = int64(in.Int64()) default: in.SkipRecursive() } in.WantComma() } in.Delim('}') if isTopLevel { in.Consumed() } } func easyjson692db02bEncodeGithubComJonas747Discordgo3(out *jwriter.Writer, in TimeStamps) { out.RawByte('{') first := true _ = first if in.EndTimestamp != 0 { const prefix string = ",\"end\":" first = false out.RawString(prefix[1:]) out.Int64(int64(in.EndTimestamp)) } if in.StartTimestamp != 0 { const prefix string = ",\"start\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.Int64(int64(in.StartTimestamp)) } out.RawByte('}') } func easyjson692db02bDecodeGithubComJonas747Discordgo3(in *jlexer.Lexer, out *Assets) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { in.Consumed() } in.Skip() return } in.Delim('{') for !in.IsDelim('}') { key := in.UnsafeString() in.WantColon() if in.IsNull() { in.Skip() in.WantComma() continue } switch key { case "large_image": out.LargeImageID = string(in.String()) case "small_image": out.SmallImageID = string(in.String()) case "large_text": out.LargeText = string(in.String()) case "small_text": out.SmallText = string(in.String()) default: in.SkipRecursive() } in.WantComma() } in.Delim('}') if isTopLevel { in.Consumed() } } func easyjson692db02bEncodeGithubComJonas747Discordgo4(out *jwriter.Writer, in Assets) { out.RawByte('{') first := true _ = first if in.LargeImageID != "" { const prefix string = ",\"large_image\":" first = false out.RawString(prefix[1:]) out.String(string(in.LargeImageID)) } if in.SmallImageID != "" { const prefix string = ",\"small_image\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.String(string(in.SmallImageID)) } if in.LargeText != "" { const prefix string = ",\"large_text\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.String(string(in.LargeText)) } if in.SmallText != "" { const prefix string = ",\"small_text\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.String(string(in.SmallText)) } out.RawByte('}') } func easyjson692db02bDecodeGithubComJonas747Discordgo1(in *jlexer.Lexer, out *User) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { in.Consumed() } in.Skip() return } in.Delim('{') for !in.IsDelim('}') { key := in.UnsafeString() in.WantColon() if in.IsNull() { in.Skip() in.WantComma() continue } switch key { case "id": out.ID = int64(in.Int64Str()) case "username": out.Username = string(in.String()) case "avatar": out.Avatar = string(in.String()) case "locale": out.Locale = string(in.String()) case "discriminator": out.Discriminator = string(in.String()) case "bot": out.Bot = bool(in.Bool()) default: in.SkipRecursive() } in.WantComma() } in.Delim('}') if isTopLevel { in.Consumed() } } func easyjson692db02bEncodeGithubComJonas747Discordgo1(out *jwriter.Writer, in User) { out.RawByte('{') first := true _ = first { const prefix string = ",\"id\":" out.RawString(prefix[1:]) out.Int64Str(int64(in.ID)) } { const prefix string = ",\"username\":" out.RawString(prefix) out.String(string(in.Username)) } { const prefix string = ",\"avatar\":" out.RawString(prefix) out.String(string(in.Avatar)) } { const prefix string = ",\"locale\":" out.RawString(prefix) out.String(string(in.Locale)) } { const prefix string = ",\"discriminator\":" out.RawString(prefix) out.String(string(in.Discriminator)) } { const prefix string = ",\"bot\":" out.RawString(prefix) out.Bool(bool(in.Bot)) } out.RawByte('}') } func easyjson692db02bDecodeGithubComJonas747Discordgo5(in *jlexer.Lexer, out *GuildCreate) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { in.Consumed() } in.Skip() return } out.Guild = new(Guild) in.Delim('{') for !in.IsDelim('}') { key := in.UnsafeString() in.WantColon() if in.IsNull() { in.Skip() in.WantComma() continue } switch key { case "id": out.ID = int64(in.Int64Str()) case "name": out.Name = string(in.String()) case "description": out.Description = string(in.String()) case "preferred_locale": out.PreferredLocale = string(in.String()) case "icon": out.Icon = string(in.String()) case "region": out.Region = string(in.String()) case "afk_channel_id": out.AfkChannelID = int64(in.Int64Str()) case "embed_channel_id": out.EmbedChannelID = int64(in.Int64Str()) case "owner_id": out.OwnerID = int64(in.Int64Str()) case "joined_at": out.JoinedAt = Timestamp(in.String()) case "splash": out.Splash = string(in.String()) case "afk_timeout": out.AfkTimeout = int(in.Int()) case "member_count": out.MemberCount = int(in.Int()) case "verification_level": out.VerificationLevel = VerificationLevel(in.Int()) case "embed_enabled": out.EmbedEnabled = bool(in.Bool()) case "large": out.Large = bool(in.Bool()) case "default_message_notifications": out.DefaultMessageNotifications = int(in.Int()) case "roles": if in.IsNull() { in.Skip() out.Roles = nil } else { in.Delim('[') if out.Roles == nil { if !in.IsDelim(']') { out.Roles = make([]*Role, 0, 8) } else { out.Roles = []*Role{} } } else { out.Roles = (out.Roles)[:0] } for !in.IsDelim(']') { var v4 *Role if in.IsNull() { in.Skip() v4 = nil } else { if v4 == nil { v4 = new(Role) } easyjson692db02bDecodeGithubComJonas747Discordgo6(in, v4) } out.Roles = append(out.Roles, v4) in.WantComma() } in.Delim(']') } case "emojis": if in.IsNull() { in.Skip() out.Emojis = nil } else { in.Delim('[') if out.Emojis == nil { if !in.IsDelim(']') { out.Emojis = make([]*Emoji, 0, 8) } else { out.Emojis = []*Emoji{} } } else { out.Emojis = (out.Emojis)[:0] } for !in.IsDelim(']') { var v5 *Emoji if in.IsNull() { in.Skip() v5 = nil } else { if v5 == nil { v5 = new(Emoji) } easyjson692db02bDecodeGithubComJonas747Discordgo7(in, v5) } out.Emojis = append(out.Emojis, v5) in.WantComma() } in.Delim(']') } case "members": if in.IsNull() { in.Skip() out.Members = nil } else { in.Delim('[') if out.Members == nil { if !in.IsDelim(']') { out.Members = make([]*Member, 0, 8) } else { out.Members = []*Member{} } } else { out.Members = (out.Members)[:0] } for !in.IsDelim(']') { var v6 *Member if in.IsNull() { in.Skip() v6 = nil } else { if v6 == nil { v6 = new(Member) } easyjson692db02bDecodeGithubComJonas747Discordgo8(in, v6) } out.Members = append(out.Members, v6) in.WantComma() } in.Delim(']') } case "presences": if in.IsNull() { in.Skip() out.Presences = nil } else { in.Delim('[') if out.Presences == nil { if !in.IsDelim(']') { out.Presences = make([]*Presence, 0, 8) } else { out.Presences = []*Presence{} } } else { out.Presences = (out.Presences)[:0] } for !in.IsDelim(']') { var v7 *Presence if in.IsNull() { in.Skip() v7 = nil } else { if v7 == nil { v7 = new(Presence) } easyjson692db02bDecodeGithubComJonas747Discordgo9(in, v7) } out.Presences = append(out.Presences, v7) in.WantComma() } in.Delim(']') } case "channels": if in.IsNull() { in.Skip() out.Channels = nil } else { in.Delim('[') if out.Channels == nil { if !in.IsDelim(']') { out.Channels = make([]*Channel, 0, 8) } else { out.Channels = []*Channel{} } } else { out.Channels = (out.Channels)[:0] } for !in.IsDelim(']') { var v8 *Channel if in.IsNull() { in.Skip() v8 = nil } else { if v8 == nil { v8 = new(Channel) } easyjson692db02bDecodeGithubComJonas747Discordgo10(in, v8) } out.Channels = append(out.Channels, v8) in.WantComma() } in.Delim(']') } case "voice_states": if in.IsNull() { in.Skip() out.VoiceStates = nil } else { in.Delim('[') if out.VoiceStates == nil { if !in.IsDelim(']') { out.VoiceStates = make([]*VoiceState, 0, 8) } else { out.VoiceStates = []*VoiceState{} } } else { out.VoiceStates = (out.VoiceStates)[:0] } for !in.IsDelim(']') { var v9 *VoiceState if in.IsNull() { in.Skip() v9 = nil } else { if v9 == nil { v9 = new(VoiceState) } easyjson692db02bDecodeGithubComJonas747Discordgo11(in, v9) } out.VoiceStates = append(out.VoiceStates, v9) in.WantComma() } in.Delim(']') } case "max_presences": out.MaxPresences = int(in.Int()) case "max_members": out.MaxMembers = int(in.Int()) case "unavailable": out.Unavailable = bool(in.Bool()) case "explicit_content_filter": out.ExplicitContentFilter = ExplicitContentFilterLevel(in.Int()) case "features": if in.IsNull() { in.Skip() out.Features = nil } else { in.Delim('[') if out.Features == nil { if !in.IsDelim(']') { out.Features = make([]string, 0, 4) } else { out.Features = []string{} } } else { out.Features = (out.Features)[:0] } for !in.IsDelim(']') { var v10 string v10 = string(in.String()) out.Features = append(out.Features, v10) in.WantComma() } in.Delim(']') } case "mfa_level": out.MfaLevel = MfaLevel(in.Int()) case "widget_enabled": out.WidgetEnabled = bool(in.Bool()) case "widget_channel_id": out.WidgetChannelID = string(in.String()) case "system_channel_id": out.SystemChannelID = string(in.String()) default: in.SkipRecursive() } in.WantComma() } in.Delim('}') if isTopLevel { in.Consumed() } } func easyjson692db02bEncodeGithubComJonas747Discordgo5(out *jwriter.Writer, in GuildCreate) { out.RawByte('{') first := true _ = first { const prefix string = ",\"id\":" out.RawString(prefix[1:]) out.Int64Str(int64(in.ID)) } { const prefix string = ",\"name\":" out.RawString(prefix) out.String(string(in.Name)) } { const prefix string = ",\"description\":" out.RawString(prefix) out.String(string(in.Description)) } { const prefix string = ",\"preferred_locale\":" out.RawString(prefix) out.String(string(in.PreferredLocale)) } { const prefix string = ",\"icon\":" out.RawString(prefix) out.String(string(in.Icon)) } { const prefix string = ",\"region\":" out.RawString(prefix) out.String(string(in.Region)) } { const prefix string = ",\"afk_channel_id\":" out.RawString(prefix) out.Int64Str(int64(in.AfkChannelID)) } { const prefix string = ",\"embed_channel_id\":" out.RawString(prefix) out.Int64Str(int64(in.EmbedChannelID)) } { const prefix string = ",\"owner_id\":" out.RawString(prefix) out.Int64Str(int64(in.OwnerID)) } { const prefix string = ",\"joined_at\":" out.RawString(prefix) out.String(string(in.JoinedAt)) } { const prefix string = ",\"splash\":" out.RawString(prefix) out.String(string(in.Splash)) } { const prefix string = ",\"afk_timeout\":" out.RawString(prefix) out.Int(int(in.AfkTimeout)) } { const prefix string = ",\"member_count\":" out.RawString(prefix) out.Int(int(in.MemberCount)) } { const prefix string = ",\"verification_level\":" out.RawString(prefix) out.Int(int(in.VerificationLevel)) } { const prefix string = ",\"embed_enabled\":" out.RawString(prefix) out.Bool(bool(in.EmbedEnabled)) } { const prefix string = ",\"large\":" out.RawString(prefix) out.Bool(bool(in.Large)) } { const prefix string = ",\"default_message_notifications\":" out.RawString(prefix) out.Int(int(in.DefaultMessageNotifications)) } { const prefix string = ",\"roles\":" out.RawString(prefix) if in.Roles == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { out.RawString("null") } else { out.RawByte('[') for v11, v12 := range in.Roles { if v11 > 0 { out.RawByte(',') } if v12 == nil { out.RawString("null") } else { easyjson692db02bEncodeGithubComJonas747Discordgo6(out, *v12) } } out.RawByte(']') } } { const prefix string = ",\"emojis\":" out.RawString(prefix) if in.Emojis == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { out.RawString("null") } else { out.RawByte('[') for v13, v14 := range in.Emojis { if v13 > 0 { out.RawByte(',') } if v14 == nil { out.RawString("null") } else { easyjson692db02bEncodeGithubComJonas747Discordgo7(out, *v14) } } out.RawByte(']') } } { const prefix string = ",\"members\":" out.RawString(prefix) if in.Members == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { out.RawString("null") } else { out.RawByte('[') for v15, v16 := range in.Members { if v15 > 0 { out.RawByte(',') } if v16 == nil { out.RawString("null") } else { easyjson692db02bEncodeGithubComJonas747Discordgo8(out, *v16) } } out.RawByte(']') } } { const prefix string = ",\"presences\":" out.RawString(prefix) if in.Presences == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { out.RawString("null") } else { out.RawByte('[') for v17, v18 := range in.Presences { if v17 > 0 { out.RawByte(',') } if v18 == nil { out.RawString("null") } else { easyjson692db02bEncodeGithubComJonas747Discordgo9(out, *v18) } } out.RawByte(']') } } { const prefix string = ",\"channels\":" out.RawString(prefix) if in.Channels == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { out.RawString("null") } else { out.RawByte('[') for v19, v20 := range in.Channels { if v19 > 0 { out.RawByte(',') } if v20 == nil { out.RawString("null") } else { easyjson692db02bEncodeGithubComJonas747Discordgo10(out, *v20) } } out.RawByte(']') } } { const prefix string = ",\"voice_states\":" out.RawString(prefix) if in.VoiceStates == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { out.RawString("null") } else { out.RawByte('[') for v21, v22 := range in.VoiceStates { if v21 > 0 { out.RawByte(',') } if v22 == nil { out.RawString("null") } else { easyjson692db02bEncodeGithubComJonas747Discordgo11(out, *v22) } } out.RawByte(']') } } { const prefix string = ",\"max_presences\":" out.RawString(prefix) out.Int(int(in.MaxPresences)) } { const prefix string = ",\"max_members\":" out.RawString(prefix) out.Int(int(in.MaxMembers)) } { const prefix string = ",\"unavailable\":" out.RawString(prefix) out.Bool(bool(in.Unavailable)) } { const prefix string = ",\"explicit_content_filter\":" out.RawString(prefix) out.Int(int(in.ExplicitContentFilter)) } { const prefix string = ",\"features\":" out.RawString(prefix) if in.Features == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { out.RawString("null") } else { out.RawByte('[') for v23, v24 := range in.Features { if v23 > 0 { out.RawByte(',') } out.String(string(v24)) } out.RawByte(']') } } { const prefix string = ",\"mfa_level\":" out.RawString(prefix) out.Int(int(in.MfaLevel)) } { const prefix string = ",\"widget_enabled\":" out.RawString(prefix) out.Bool(bool(in.WidgetEnabled)) } { const prefix string = ",\"widget_channel_id\":" out.RawString(prefix) out.String(string(in.WidgetChannelID)) } { const prefix string = ",\"system_channel_id\":" out.RawString(prefix) out.String(string(in.SystemChannelID)) } out.RawByte('}') } // MarshalEasyJSON supports easyjson.Marshaler interface func (v GuildCreate) MarshalEasyJSON(w *jwriter.Writer) { easyjson692db02bEncodeGithubComJonas747Discordgo5(w, v) } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *GuildCreate) UnmarshalEasyJSON(l *jlexer.Lexer) { easyjson692db02bDecodeGithubComJonas747Discordgo5(l, v) } func easyjson692db02bDecodeGithubComJonas747Discordgo11(in *jlexer.Lexer, out *VoiceState) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { in.Consumed() } in.Skip() return } in.Delim('{') for !in.IsDelim('}') { key := in.UnsafeString() in.WantColon() if in.IsNull() { in.Skip() in.WantComma() continue } switch key { case "user_id": out.UserID = int64(in.Int64Str()) case "session_id": out.SessionID = string(in.String()) case "channel_id": out.ChannelID = int64(in.Int64Str()) case "guild_id": out.GuildID = int64(in.Int64Str()) case "suppress": out.Suppress = bool(in.Bool()) case "self_mute": out.SelfMute = bool(in.Bool()) case "self_deaf": out.SelfDeaf = bool(in.Bool()) case "mute": out.Mute = bool(in.Bool()) case "deaf": out.Deaf = bool(in.Bool()) default: in.SkipRecursive() } in.WantComma() } in.Delim('}') if isTopLevel { in.Consumed() } } func easyjson692db02bEncodeGithubComJonas747Discordgo11(out *jwriter.Writer, in VoiceState) { out.RawByte('{') first := true _ = first { const prefix string = ",\"user_id\":" out.RawString(prefix[1:]) out.Int64Str(int64(in.UserID)) } { const prefix string = ",\"session_id\":" out.RawString(prefix) out.String(string(in.SessionID)) } { const prefix string = ",\"channel_id\":" out.RawString(prefix) out.Int64Str(int64(in.ChannelID)) } { const prefix string = ",\"guild_id\":" out.RawString(prefix) out.Int64Str(int64(in.GuildID)) } { const prefix string = ",\"suppress\":" out.RawString(prefix) out.Bool(bool(in.Suppress)) } { const prefix string = ",\"self_mute\":" out.RawString(prefix) out.Bool(bool(in.SelfMute)) } { const prefix string = ",\"self_deaf\":" out.RawString(prefix) out.Bool(bool(in.SelfDeaf)) } { const prefix string = ",\"mute\":" out.RawString(prefix) out.Bool(bool(in.Mute)) } { const prefix string = ",\"deaf\":" out.RawString(prefix) out.Bool(bool(in.Deaf)) } out.RawByte('}') } func easyjson692db02bDecodeGithubComJonas747Discordgo10(in *jlexer.Lexer, out *Channel) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { in.Consumed() } in.Skip() return } in.Delim('{') for !in.IsDelim('}') { key := in.UnsafeString() in.WantColon() if in.IsNull() { in.Skip() in.WantComma() continue } switch key { case "id": out.ID = int64(in.Int64Str()) case "guild_id": out.GuildID = int64(in.Int64Str()) case "name": out.Name = string(in.String()) case "topic": out.Topic = string(in.String()) case "type": out.Type = ChannelType(in.Int()) case "last_message_id": out.LastMessageID = int64(in.Int64Str()) case "nsfw": out.NSFW = bool(in.Bool()) case "icon": out.Icon = string(in.String()) case "position": out.Position = int(in.Int()) case "bitrate": out.Bitrate = int(in.Int()) case "recipients": if in.IsNull() { in.Skip() out.Recipients = nil } else { in.Delim('[') if out.Recipients == nil { if !in.IsDelim(']') { out.Recipients = make([]*User, 0, 8) } else { out.Recipients = []*User{} } } else { out.Recipients = (out.Recipients)[:0] } for !in.IsDelim(']') { var v25 *User if in.IsNull() { in.Skip() v25 = nil } else { if v25 == nil { v25 = new(User) } easyjson692db02bDecodeGithubComJonas747Discordgo1(in, v25) } out.Recipients = append(out.Recipients, v25) in.WantComma() } in.Delim(']') } case "permission_overwrites": if in.IsNull() { in.Skip() out.PermissionOverwrites = nil } else { in.Delim('[') if out.PermissionOverwrites == nil { if !in.IsDelim(']') { out.PermissionOverwrites = make([]*PermissionOverwrite, 0, 8) } else { out.PermissionOverwrites = []*PermissionOverwrite{} } } else { out.PermissionOverwrites = (out.PermissionOverwrites)[:0] } for !in.IsDelim(']') { var v26 *PermissionOverwrite if in.IsNull() { in.Skip() v26 = nil } else { if v26 == nil { v26 = new(PermissionOverwrite) } easyjson692db02bDecodeGithubComJonas747Discordgo12(in, v26) } out.PermissionOverwrites = append(out.PermissionOverwrites, v26) in.WantComma() } in.Delim(']') } case "user_limit": out.UserLimit = int(in.Int()) case "parent_id": out.ParentID = int64(in.Int64Str()) case "rate_limit_per_user": out.RateLimitPerUser = int(in.Int()) default: in.SkipRecursive() } in.WantComma() } in.Delim('}') if isTopLevel { in.Consumed() } } func easyjson692db02bEncodeGithubComJonas747Discordgo10(out *jwriter.Writer, in Channel) { out.RawByte('{') first := true _ = first { const prefix string = ",\"id\":" out.RawString(prefix[1:]) out.Int64Str(int64(in.ID)) } { const prefix string = ",\"guild_id\":" out.RawString(prefix) out.Int64Str(int64(in.GuildID)) } { const prefix string = ",\"name\":" out.RawString(prefix) out.String(string(in.Name)) } { const prefix string = ",\"topic\":" out.RawString(prefix) out.String(string(in.Topic)) } { const prefix string = ",\"type\":" out.RawString(prefix) out.Int(int(in.Type)) } { const prefix string = ",\"last_message_id\":" out.RawString(prefix) out.Int64Str(int64(in.LastMessageID)) } { const prefix string = ",\"nsfw\":" out.RawString(prefix) out.Bool(bool(in.NSFW)) } { const prefix string = ",\"icon\":" out.RawString(prefix) out.String(string(in.Icon)) } { const prefix string = ",\"position\":" out.RawString(prefix) out.Int(int(in.Position)) } { const prefix string = ",\"bitrate\":" out.RawString(prefix) out.Int(int(in.Bitrate)) } { const prefix string = ",\"recipients\":" out.RawString(prefix) if in.Recipients == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { out.RawString("null") } else { out.RawByte('[') for v27, v28 := range in.Recipients { if v27 > 0 { out.RawByte(',') } if v28 == nil { out.RawString("null") } else { easyjson692db02bEncodeGithubComJonas747Discordgo1(out, *v28) } } out.RawByte(']') } } { const prefix string = ",\"permission_overwrites\":" out.RawString(prefix) if in.PermissionOverwrites == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { out.RawString("null") } else { out.RawByte('[') for v29, v30 := range in.PermissionOverwrites { if v29 > 0 { out.RawByte(',') } if v30 == nil { out.RawString("null") } else { easyjson692db02bEncodeGithubComJonas747Discordgo12(out, *v30) } } out.RawByte(']') } } { const prefix string = ",\"user_limit\":" out.RawString(prefix) out.Int(int(in.UserLimit)) } { const prefix string = ",\"parent_id\":" out.RawString(prefix) out.Int64Str(int64(in.ParentID)) } { const prefix string = ",\"rate_limit_per_user\":" out.RawString(prefix) out.Int(int(in.RateLimitPerUser)) } out.RawByte('}') } func easyjson692db02bDecodeGithubComJonas747Discordgo12(in *jlexer.Lexer, out *PermissionOverwrite) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { in.Consumed() } in.Skip() return } in.Delim('{') for !in.IsDelim('}') { key := in.UnsafeString() in.WantColon() if in.IsNull() { in.Skip() in.WantComma() continue } switch key { case "id": out.ID = int64(in.Int64Str()) case "type": out.Type = string(in.String()) case "deny": out.Deny = int(in.Int()) case "allow": out.Allow = int(in.Int()) default: in.SkipRecursive() } in.WantComma() } in.Delim('}') if isTopLevel { in.Consumed() } } func easyjson692db02bEncodeGithubComJonas747Discordgo12(out *jwriter.Writer, in PermissionOverwrite) { out.RawByte('{') first := true _ = first { const prefix string = ",\"id\":" out.RawString(prefix[1:]) out.Int64Str(int64(in.ID)) } { const prefix string = ",\"type\":" out.RawString(prefix) out.String(string(in.Type)) } { const prefix string = ",\"deny\":" out.RawString(prefix) out.Int(int(in.Deny)) } { const prefix string = ",\"allow\":" out.RawString(prefix) out.Int(int(in.Allow)) } out.RawByte('}') } func easyjson692db02bDecodeGithubComJonas747Discordgo9(in *jlexer.Lexer, out *Presence) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { in.Consumed() } in.Skip() return } in.Delim('{') for !in.IsDelim('}') { key := in.UnsafeString() in.WantColon() if in.IsNull() { in.Skip() in.WantComma() continue } switch key { case "user": if in.IsNull() { in.Skip() out.User = nil } else { if out.User == nil { out.User = new(User) } easyjson692db02bDecodeGithubComJonas747Discordgo1(in, out.User) } case "status": out.Status = Status(in.String()) case "game": if in.IsNull() { in.Skip() out.Game = nil } else { if out.Game == nil { out.Game = new(Game) } easyjson692db02bDecodeGithubComJonas747Discordgo2(in, out.Game) } case "nick": out.Nick = string(in.String()) case "roles": if data := in.Raw(); in.Ok() { in.AddError((out.Roles).UnmarshalJSON(data)) } case "activities": if in.IsNull() { in.Skip() out.Activities = nil } else { in.Delim('[') if out.Activities == nil { if !in.IsDelim(']') { out.Activities = make(Activities, 0, 8) } else { out.Activities = Activities{} } } else { out.Activities = (out.Activities)[:0] } for !in.IsDelim(']') { var v31 *Game if in.IsNull() { in.Skip() v31 = nil } else { if v31 == nil { v31 = new(Game) } easyjson692db02bDecodeGithubComJonas747Discordgo2(in, v31) } out.Activities = append(out.Activities, v31) in.WantComma() } in.Delim(']') } case "since": out.Since = int64(in.Int64()) default: in.SkipRecursive() } in.WantComma() } in.Delim('}') if isTopLevel { in.Consumed() } } func easyjson692db02bEncodeGithubComJonas747Discordgo9(out *jwriter.Writer, in Presence) { out.RawByte('{') first := true _ = first { const prefix string = ",\"user\":" out.RawString(prefix[1:]) if in.User == nil { out.RawString("null") } else { easyjson692db02bEncodeGithubComJonas747Discordgo1(out, *in.User) } } { const prefix string = ",\"status\":" out.RawString(prefix) out.String(string(in.Status)) } { const prefix string = ",\"game\":" out.RawString(prefix) if in.Game == nil { out.RawString("null") } else { easyjson692db02bEncodeGithubComJonas747Discordgo2(out, *in.Game) } } { const prefix string = ",\"nick\":" out.RawString(prefix) out.String(string(in.Nick)) } { const prefix string = ",\"roles\":" out.RawString(prefix) out.Raw((in.Roles).MarshalJSON()) } { const prefix string = ",\"activities\":" out.RawString(prefix) if in.Activities == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { out.RawString("null") } else { out.RawByte('[') for v32, v33 := range in.Activities { if v32 > 0 { out.RawByte(',') } if v33 == nil { out.RawString("null") } else { easyjson692db02bEncodeGithubComJonas747Discordgo2(out, *v33) } } out.RawByte(']') } } { const prefix string = ",\"since\":" out.RawString(prefix) out.Int64(int64(in.Since)) } out.RawByte('}') } func easyjson692db02bDecodeGithubComJonas747Discordgo8(in *jlexer.Lexer, out *Member) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { in.Consumed() } in.Skip() return } in.Delim('{') for !in.IsDelim('}') { key := in.UnsafeString() in.WantColon() if in.IsNull() { in.Skip() in.WantComma() continue } switch key { case "guild_id": out.GuildID = int64(in.Int64Str()) case "joined_at": out.JoinedAt = Timestamp(in.String()) case "nick": out.Nick = string(in.String()) case "deaf": out.Deaf = bool(in.Bool()) case "mute": out.Mute = bool(in.Bool()) case "user": if in.IsNull() { in.Skip() out.User = nil } else { if out.User == nil { out.User = new(User) } easyjson692db02bDecodeGithubComJonas747Discordgo1(in, out.User) } case "roles": if data := in.Raw(); in.Ok() { in.AddError((out.Roles).UnmarshalJSON(data)) } default: in.SkipRecursive() } in.WantComma() } in.Delim('}') if isTopLevel { in.Consumed() } } func easyjson692db02bEncodeGithubComJonas747Discordgo8(out *jwriter.Writer, in Member) { out.RawByte('{') first := true _ = first { const prefix string = ",\"guild_id\":" out.RawString(prefix[1:]) out.Int64Str(int64(in.GuildID)) } { const prefix string = ",\"joined_at\":" out.RawString(prefix) out.String(string(in.JoinedAt)) } { const prefix string = ",\"nick\":" out.RawString(prefix) out.String(string(in.Nick)) } { const prefix string = ",\"deaf\":" out.RawString(prefix) out.Bool(bool(in.Deaf)) } { const prefix string = ",\"mute\":" out.RawString(prefix) out.Bool(bool(in.Mute)) } { const prefix string = ",\"user\":" out.RawString(prefix) if in.User == nil { out.RawString("null") } else { easyjson692db02bEncodeGithubComJonas747Discordgo1(out, *in.User) } } { const prefix string = ",\"roles\":" out.RawString(prefix) out.Raw((in.Roles).MarshalJSON()) } out.RawByte('}') } func easyjson692db02bDecodeGithubComJonas747Discordgo7(in *jlexer.Lexer, out *Emoji) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { in.Consumed() } in.Skip() return } in.Delim('{') for !in.IsDelim('}') { key := in.UnsafeString() in.WantColon() if in.IsNull() { in.Skip() in.WantComma() continue } switch key { case "id": out.ID = int64(in.Int64Str()) case "name": out.Name = string(in.String()) case "roles": if data := in.Raw(); in.Ok() { in.AddError((out.Roles).UnmarshalJSON(data)) } case "managed": out.Managed = bool(in.Bool()) case "require_colons": out.RequireColons = bool(in.Bool()) case "animated": out.Animated = bool(in.Bool()) default: in.SkipRecursive() } in.WantComma() } in.Delim('}') if isTopLevel { in.Consumed() } } func easyjson692db02bEncodeGithubComJonas747Discordgo7(out *jwriter.Writer, in Emoji) { out.RawByte('{') first := true _ = first { const prefix string = ",\"id\":" out.RawString(prefix[1:]) out.Int64Str(int64(in.ID)) } { const prefix string = ",\"name\":" out.RawString(prefix) out.String(string(in.Name)) } { const prefix string = ",\"roles\":" out.RawString(prefix) out.Raw((in.Roles).MarshalJSON()) } { const prefix string = ",\"managed\":" out.RawString(prefix) out.Bool(bool(in.Managed)) } { const prefix string = ",\"require_colons\":" out.RawString(prefix) out.Bool(bool(in.RequireColons)) } { const prefix string = ",\"animated\":" out.RawString(prefix) out.Bool(bool(in.Animated)) } out.RawByte('}') } func easyjson692db02bDecodeGithubComJonas747Discordgo6(in *jlexer.Lexer, out *Role) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { in.Consumed() } in.Skip() return } in.Delim('{') for !in.IsDelim('}') { key := in.UnsafeString() in.WantColon() if in.IsNull() { in.Skip() in.WantComma() continue } switch key { case "id": out.ID = int64(in.Int64Str()) case "name": out.Name = string(in.String()) case "managed": out.Managed = bool(in.Bool()) case "mentionable": out.Mentionable = bool(in.Bool()) case "hoist": out.Hoist = bool(in.Bool()) case "color": out.Color = int(in.Int()) case "position": out.Position = int(in.Int()) case "permissions": out.Permissions = int(in.Int()) default: in.SkipRecursive() } in.WantComma() } in.Delim('}') if isTopLevel { in.Consumed() } } func easyjson692db02bEncodeGithubComJonas747Discordgo6(out *jwriter.Writer, in Role) { out.RawByte('{') first := true _ = first { const prefix string = ",\"id\":" out.RawString(prefix[1:]) out.Int64Str(int64(in.ID)) } { const prefix string = ",\"name\":" out.RawString(prefix) out.String(string(in.Name)) } { const prefix string = ",\"managed\":" out.RawString(prefix) out.Bool(bool(in.Managed)) } { const prefix string = ",\"mentionable\":" out.RawString(prefix) out.Bool(bool(in.Mentionable)) } { const prefix string = ",\"hoist\":" out.RawString(prefix) out.Bool(bool(in.Hoist)) } { const prefix string = ",\"color\":" out.RawString(prefix) out.Int(int(in.Color)) } { const prefix string = ",\"position\":" out.RawString(prefix) out.Int(int(in.Position)) } { const prefix string = ",\"permissions\":" out.RawString(prefix) out.Int(int(in.Permissions)) } out.RawByte('}') }
bsd-3-clause
wfnex/openbras
src/ace/ACE_wrappers/tests/Compiler_Features_26_Test.cpp
1848
/** * This program checks if the compiler doesn't have a certain bug * that we encountered when testing C++11 features */ #include "test_config.h" #if defined (ACE_HAS_CPP11) #include <memory> int run_main (int, ACE_TCHAR *[]) { ACE_START_TEST (ACE_TEXT("Compiler_Features_26_Test")); int retval = 0; std::shared_ptr<int> a,b,c,d; a = std::make_shared<int> (10); b = std::make_shared<int> (10); c = b; if (!(a!=b) || (a==b)) { ACE_ERROR ((LM_ERROR, ACE_TEXT ("Problem using a!=b\n"))); ++retval; } if (!(b==c) || (b!=c)) { ACE_ERROR ((LM_ERROR, ACE_TEXT ("Problem using b==c\n"))); ++retval; } if ((c==d) || !(d!=c)) { ACE_ERROR ((LM_ERROR, ACE_TEXT ("Problem using b==c\n"))); ++retval; } if ((a==nullptr) || !(a!=nullptr)) { ACE_ERROR ((LM_ERROR, ACE_TEXT ("Problem using a==nullptr\n"))); ++retval; } if ((b==nullptr) || !(b!=nullptr)) { ACE_ERROR ((LM_ERROR, ACE_TEXT ("Problem using b==nullptr\n"))); ++retval; } if ((c==nullptr) || !(c!=nullptr)) { ACE_ERROR ((LM_ERROR, ACE_TEXT ("Problem using c==nullptr\n"))); ++retval; } if ((d!=nullptr) || !(d==nullptr)) { ACE_ERROR ((LM_ERROR, ACE_TEXT ("Problem using d!=nullptr\n"))); ++retval; } if (retval == 0) { ACE_DEBUG ((LM_INFO, ACE_TEXT ("Compiler Feature 26 Test does compile and run.\n"))); } ACE_END_TEST; return retval; } #else int run_main (int, ACE_TCHAR *[]) { ACE_START_TEST (ACE_TEXT("Compiler_Features_26_Test")); ACE_DEBUG ((LM_INFO, ACE_TEXT ("No C++11 support enabled\n"))); ACE_END_TEST; return 0; } #endif
bsd-3-clause
team-worthwhile/worthwhile
implementierung/src/worthwhile.ui/src/edu/kit/iti/formal/pse/worthwhile/ui/actions/WorthwhileRulerBreakpointPropertiesActionDelegate.java
674
package edu.kit.iti.formal.pse.worthwhile.ui.actions; import org.eclipse.jface.action.IAction; import org.eclipse.jface.text.source.IVerticalRulerInfo; import org.eclipse.ui.texteditor.AbstractRulerActionDelegate; import org.eclipse.ui.texteditor.ITextEditor; /** * A delegate for providing the "Breakpoint properties …" action in the ruler. * * @author Joachim * */ public class WorthwhileRulerBreakpointPropertiesActionDelegate extends AbstractRulerActionDelegate { @Override protected final IAction createAction(final ITextEditor editor, final IVerticalRulerInfo rulerInfo) { return new WorthwhileRulerBreakpointPropertiesAction(editor, rulerInfo); } }
bsd-3-clause
a7000q/lk
backend/views/user/gridLimitations.php
597
<? use kartik\grid\GridView; ?> <?= GridView::widget([ 'dataProvider' => $dataProvider, 'id' => 'grid-limitations', 'columns' => [ 'field.table.rus_name', 'field.rus_name', 'operand', 'valueField', [ 'class' => 'yii\grid\ActionColumn', 'template' => '{delete}', 'controller' => 'limitation' ], ], 'pjax'=>true, 'pjaxSettings'=>[ 'neverTimeout'=>true, 'options' => [ 'id' => 'pjax-grid-limitation' ] ], 'panelBeforeTemplate' => '{before}' ]); ?>
bsd-3-clause
drhodes/rust-postgres
tests/get_all_row_test.rs
1111
import test_basic::*; import glue::*; #[test] fn GetAllRowTest() { let conn = TestConnect(); Assure(conn.Exec("drop table if exists movie2")); Assure(conn.Exec("create table movie2 (\ did serial,\ unique(did),\ title varchar(255),\ year int,\ director varchar(255)\ );" )); InsertStarWars(conn, "movie2"); let res = Assure(conn.Exec("select * from movie2")); unsafe { let rows = GetAllRows(res); assert rows == [ [Int32(1), VarChar("a new hope"), Int32(1977), VarChar("lucas"), ], [Int32(2), VarChar("the empire strikes back"), Int32(1980), VarChar("Kershner") ], [Int32(3), VarChar("return of the jedi"), Int32(1983), VarChar("lucas") ], ]; } conn.Exec("drop table if exists movie2"); }
bsd-3-clause
ierror/BeautifulMind.io
beautifulmind/mindmaptornado/decorators.py
1019
# -*- coding: utf-8 -*- from .exceptions import HTTPException class check_for_data(object): def __init__(self, *args, **kwargs): self.neede_data = args self.options = kwargs def __call__(self, method): def wrapped_method(*args, **kwargs): try: for data in self.neede_data: data_value = args[1][data] if self.options.get('force_int', False): try: args[1][data] = int(data_value) except (ValueError, TypeError): raise HTTPException(log_message='field "%s" for check_for_data decorated method "%s" with option "force_int" needs to be an integer' % (data, method.__name__)) except (KeyError, IndexError): raise HTTPException(log_message='check_for_data decorated method "%s" needs "%s" data' % (method.__name__, data)) method(*args, **kwargs) return wrapped_method
bsd-3-clause
element-doo/beepo
code/scala/client/src/main/scala/hr/element/beepo/client/Task.scala
1403
package hr.element.beepo.client import email._ import sms._ import io._ import xml._ sealed trait Action case object Persist extends Action case object Send extends Action { def apply(id: String, otherIDs: String*): Send = Send(id +: otherIDs) } case class Send(val ids: Seq[String]) extends xml.SendXMLConverter with Communicator { def send(): String = send(Send) } object Task { def apply(email: Email): Task = Task(None, Seq(email), Nil) def apply(sms: Sms): Task = Task(None, Nil, Seq(sms)) def apply(email: Email, Sms: Sms): Task = Task(None, Seq(email), Seq(Sms)) def apply(requestID: String, email: Email): Task = Task(Some(requestID), Seq(email), Nil) def apply(requestID: String, Sms: Sms): Task = Task(Some(requestID), Nil, Seq(Sms)) def apply(requestID: String, email: Email, Sms: Sms): Task = Task(Some(requestID), Seq(email), Seq(Sms)) } case class Task( requestID: Option[String] , emails: Seq[Email] , smses: Seq[Sms]) extends xml.TaskXMLConverter with Communicator { def setRequestID(requestID: String) = copy(requestID = Some(requestID)) def add(email: Email, otherEmails: Email*) = copy(emails = (emails :+ email) ++ otherEmails) def add(sms: Sms, otherSmses: Sms*) = copy(smses = (smses :+ sms) ++ otherSmses) def persist(): String = send(Persist) def send(): String = send(Send) }
bsd-3-clause
NVIDIAGameWorks/Falcor
Source/RenderPasses/PathTracer/PathTracer.cpp
59486
/*************************************************************************** # Copyright (c) 2015-21, NVIDIA CORPORATION. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # * Neither the name of NVIDIA CORPORATION nor the names of its # contributors may be used to endorse or promote products derived # from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS "AS IS" AND ANY # EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR # PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR # CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR # PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY # OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. **************************************************************************/ #include "PathTracer.h" #include "RenderGraph/RenderPassHelpers.h" #include "Rendering/Lights/EmissiveUniformSampler.h" const RenderPass::Info PathTracer::kInfo { "PathTracer", "Reference path tracer." }; namespace { const std::string kGeneratePathsFilename = "RenderPasses/PathTracer/GeneratePaths.cs.slang"; const std::string kTracePassFilename = "RenderPasses/PathTracer/TracePass.rt.slang"; const std::string kResolvePassFilename = "RenderPasses/PathTracer/ResolvePass.cs.slang"; const std::string kReflectTypesFile = "RenderPasses/PathTracer/ReflectTypes.cs.slang"; const std::string kShaderModel = "6_5"; // Render pass inputs and outputs. const std::string kInputVBuffer = "vbuffer"; const std::string kInputMotionVectors = "mvec"; const std::string kInputViewDir = "viewW"; const std::string kInputSampleCount = "sampleCount"; const Falcor::ChannelList kInputChannels = { { kInputVBuffer, "gVBuffer", "Visibility buffer in packed format" }, { kInputMotionVectors, "gMotionVectors", "Motion vector buffer (float format)", true /* optional */ }, { kInputViewDir, "gViewW", "World-space view direction (xyz float format)", true /* optional */ }, { kInputSampleCount, "gSampleCount", "Sample count buffer (integer format)", true /* optional */, ResourceFormat::R8Uint }, }; const std::string kOutputColor = "color"; const std::string kOutputAlbedo = "albedo"; const std::string kOutputSpecularAlbedo = "specularAlbedo"; const std::string kOutputIndirectAlbedo = "indirectAlbedo"; const std::string kOutputNormal = "normal"; const std::string kOutputReflectionPosW = "reflectionPosW"; const std::string kOutputNRDDiffuseRadianceHitDist = "nrdDiffuseRadianceHitDist"; const std::string kOutputNRDSpecularRadianceHitDist = "nrdSpecularRadianceHitDist"; const std::string kOutputNRDResidualRadianceHitDist = "nrdResidualRadianceHitDist"; const std::string kOutputNRDEmission = "nrdEmission"; const std::string kOutputNRDDiffuseReflectance = "nrdDiffuseReflectance"; const std::string kOutputNRDSpecularReflectance = "nrdSpecularReflectance"; const std::string kOutputRayCount = "rayCount"; const std::string kOutputPathLength = "pathLength"; const Falcor::ChannelList kOutputChannels = { { kOutputColor, "gOutputColor", "Output color (linear)", true /* optional */, ResourceFormat::RGBA32Float }, { kOutputAlbedo, "gOutputAlbedo", "Output albedo (linear)", true /* optional */, ResourceFormat::RGBA8Unorm }, { kOutputSpecularAlbedo, "gOutputSpecularAlbedo", "Output specular albedo (linear)", true /* optional */, ResourceFormat::RGBA8Unorm }, { kOutputIndirectAlbedo, "gOutputIndirectAlbedo", "Output indirect albedo (linear)", true /* optional */, ResourceFormat::RGBA8Unorm }, { kOutputNormal, "gOutputNormal", "Output normal (linear)", true /* optional */, ResourceFormat::RGBA16Float }, { kOutputReflectionPosW, "gOutputReflectionPosW", "Output reflection pos (world space)", true /* optional */, ResourceFormat::RGBA32Float }, { kOutputNRDDiffuseRadianceHitDist, "gOutputNRDDiffuseRadianceHitDist", "Output demodulated diffuse color (linear) and hit distance", true /* optional */, ResourceFormat::RGBA32Float }, { kOutputNRDSpecularRadianceHitDist, "gOutputNRDSpecularRadianceHitDist", "Output demodulated specular color (linear) and hit distance", true /* optional */, ResourceFormat::RGBA32Float }, { kOutputNRDResidualRadianceHitDist, "gOutputNRDResidualRadianceHitDist", "Output residual color (linear) and hit distance", true /* optional */, ResourceFormat::RGBA32Float }, { kOutputNRDEmission, "gOutputNRDEmission", "Output primary surface emission", true /* optional */, ResourceFormat::RGBA32Float }, { kOutputNRDDiffuseReflectance, "gOutputNRDDiffuseReflectance", "Output primary surface diffuse reflectance", true /* optional */, ResourceFormat::RGBA16Float }, { kOutputNRDSpecularReflectance, "gOutputNRDSpecularReflectance", "Output primary surface specular reflectance", true /* optional */, ResourceFormat::RGBA16Float }, { kOutputRayCount, "", "Per-pixel ray count", true /* optional */, ResourceFormat::R32Uint }, { kOutputPathLength, "", "Per-pixel path length", true /* optional */, ResourceFormat::R32Uint }, }; // UI variables. const Gui::DropdownList kColorFormatList = { { (uint32_t)ColorFormat::RGBA32F, "RGBA32F (128bpp)" }, { (uint32_t)ColorFormat::LogLuvHDR, "LogLuvHDR (32bpp)" }, }; const Gui::DropdownList kMISHeuristicList = { { (uint32_t)MISHeuristic::Balance, "Balance heuristic" }, { (uint32_t)MISHeuristic::PowerTwo, "Power heuristic (exp=2)" }, { (uint32_t)MISHeuristic::PowerExp, "Power heuristic" }, }; const Gui::DropdownList kEmissiveSamplerList = { { (uint32_t)EmissiveLightSamplerType::Uniform, "Uniform" }, { (uint32_t)EmissiveLightSamplerType::LightBVH, "LightBVH" }, { (uint32_t)EmissiveLightSamplerType::Power, "Power" }, }; const Gui::DropdownList kLODModeList = { { (uint32_t)TexLODMode::Mip0, "Mip0" }, { (uint32_t)TexLODMode::RayDiffs, "Ray Diffs" } }; // Scripting options. const std::string kSamplesPerPixel = "samplesPerPixel"; const std::string kMaxSurfaceBounces = "maxSurfaceBounces"; const std::string kMaxDiffuseBounces = "maxDiffuseBounces"; const std::string kMaxSpecularBounces = "maxSpecularBounces"; const std::string kMaxTransmissionBounces = "maxTransmissionBounces"; const std::string kSampleGenerator = "sampleGenerator"; const std::string kFixedSeed = "fixedSeed"; const std::string kUseBSDFSampling = "useBSDFSampling"; const std::string kUseRussianRoulette = "useRussianRoulette"; const std::string kUseNEE = "useNEE"; const std::string kUseMIS = "useMIS"; const std::string kMISHeuristic = "misHeuristic"; const std::string kMISPowerExponent = "misPowerExponent"; const std::string kEmissiveSampler = "emissiveSampler"; const std::string kLightBVHOptions = "lightBVHOptions"; const std::string kUseRTXDI = "useRTXDI"; const std::string kRTXDIOptions = "RTXDIOptions"; const std::string kUseAlphaTest = "useAlphaTest"; const std::string kAdjustShadingNormals = "adjustShadingNormals"; const std::string kMaxNestedMaterials = "maxNestedMaterials"; const std::string kUseLightsInDielectricVolumes = "useLightsInDielectricVolumes"; const std::string kDisableCaustics = "disableCaustics"; const std::string kSpecularRoughnessThreshold = "specularRoughnessThreshold"; const std::string kPrimaryLodMode = "primaryLodMode"; const std::string kLODBias = "lodBias"; const std::string kOutputSize = "outputSize"; const std::string kFixedOutputSize = "fixedOutputSize"; const std::string kColorFormat = "colorFormat"; const std::string kUseNRDDemodulation = "useNRDDemodulation"; } // Don't remove this. it's required for hot-reload to function properly extern "C" FALCOR_API_EXPORT const char* getProjDir() { return PROJECT_DIR; } extern "C" FALCOR_API_EXPORT void getPasses(Falcor::RenderPassLibrary& lib) { lib.registerPass(PathTracer::kInfo, PathTracer::create); ScriptBindings::registerBinding(PathTracer::registerBindings); } void PathTracer::registerBindings(pybind11::module& m) { pybind11::enum_<ColorFormat> colorFormat(m, "ColorFormat"); colorFormat.value("RGBA32F", ColorFormat::RGBA32F); colorFormat.value("LogLuvHDR", ColorFormat::LogLuvHDR); pybind11::enum_<MISHeuristic> misHeuristic(m, "MISHeuristic"); misHeuristic.value("Balance", MISHeuristic::Balance); misHeuristic.value("PowerTwo", MISHeuristic::PowerTwo); misHeuristic.value("PowerExp", MISHeuristic::PowerExp); pybind11::class_<PathTracer, RenderPass, PathTracer::SharedPtr> pass(m, "PathTracer"); pass.def_property_readonly("pixelStats", &PathTracer::getPixelStats); pass.def_property("useFixedSeed", [](const PathTracer* pt) { return pt->mParams.useFixedSeed ? true : false; }, [](PathTracer* pt, bool value) { pt->mParams.useFixedSeed = value ? 1 : 0; } ); pass.def_property("fixedSeed", [](const PathTracer* pt) { return pt->mParams.fixedSeed; }, [](PathTracer* pt, uint32_t value) { pt->mParams.fixedSeed = value; } ); } PathTracer::SharedPtr PathTracer::create(RenderContext* pRenderContext, const Dictionary& dict) { return SharedPtr(new PathTracer(dict)); } PathTracer::PathTracer(const Dictionary& dict) : RenderPass(kInfo) { if (!gpDevice->isShaderModelSupported(Device::ShaderModel::SM6_5)) { throw RuntimeError("PathTracer: Shader Model 6.5 is not supported by the current device"); } if (!gpDevice->isFeatureSupported(Device::SupportedFeatures::RaytracingTier1_1)) { throw RuntimeError("PathTracer: Raytracing Tier 1.1 is not supported by the current device"); } parseDictionary(dict); validateOptions(); // Create sample generator. mpSampleGenerator = SampleGenerator::create(mStaticParams.sampleGenerator); // Create programs. auto defines = mStaticParams.getDefines(*this); mpGeneratePaths = ComputePass::create(Program::Desc(kGeneratePathsFilename).setShaderModel(kShaderModel).csEntry("main"), defines, false); mpResolvePass = ComputePass::create(Program::Desc(kResolvePassFilename).setShaderModel(kShaderModel).csEntry("main"), defines, false); mpReflectTypes = ComputePass::create(Program::Desc(kReflectTypesFile).setShaderModel(kShaderModel).csEntry("main"), defines, false); // Note: The trace pass program is lazily created in updatePrograms() because a scene needs to be present when creating it. mpPixelStats = PixelStats::create(); mpPixelDebug = PixelDebug::create(); } void PathTracer::parseDictionary(const Dictionary& dict) { for (const auto& [key, value] : dict) { // Rendering parameters if (key == kSamplesPerPixel) mStaticParams.samplesPerPixel = value; else if (key == kMaxSurfaceBounces) mStaticParams.maxSurfaceBounces = value; else if (key == kMaxDiffuseBounces) mStaticParams.maxDiffuseBounces = value; else if (key == kMaxSpecularBounces) mStaticParams.maxSpecularBounces = value; else if (key == kMaxTransmissionBounces) mStaticParams.maxTransmissionBounces = value; // Sampling parameters else if (key == kSampleGenerator) mStaticParams.sampleGenerator = value; else if (key == kFixedSeed) { mParams.fixedSeed = value; mParams.useFixedSeed = true; } else if (key == kUseBSDFSampling) mStaticParams.useBSDFSampling = value; else if (key == kUseRussianRoulette) mStaticParams.useRussianRoulette = value; else if (key == kUseNEE) mStaticParams.useNEE = value; else if (key == kUseMIS) mStaticParams.useMIS = value; else if (key == kMISHeuristic) mStaticParams.misHeuristic = value; else if (key == kMISPowerExponent) mStaticParams.misPowerExponent = value; else if (key == kEmissiveSampler) mStaticParams.emissiveSampler = value; else if (key == kLightBVHOptions) mLightBVHOptions = value; else if (key == kUseRTXDI) mStaticParams.useRTXDI = value; else if (key == kRTXDIOptions) mRTXDIOptions = value; // Material parameters else if (key == kUseAlphaTest) mStaticParams.useAlphaTest = value; else if (key == kAdjustShadingNormals) mStaticParams.adjustShadingNormals = value; else if (key == kMaxNestedMaterials) mStaticParams.maxNestedMaterials = value; else if (key == kUseLightsInDielectricVolumes) mStaticParams.useLightsInDielectricVolumes = value; else if (key == kDisableCaustics) mStaticParams.disableCaustics = value; else if (key == kSpecularRoughnessThreshold) mParams.specularRoughnessThreshold = value; else if (key == kPrimaryLodMode) mStaticParams.primaryLodMode = value; else if (key == kLODBias) mParams.lodBias = value; // Denoising parameters else if (key == kUseNRDDemodulation) mStaticParams.useNRDDemodulation = value; // Output parameters else if (key == kOutputSize) mOutputSizeSelection = value; else if (key == kFixedOutputSize) mFixedOutputSize = value; else if (key == kColorFormat) mStaticParams.colorFormat = value; else logWarning("Unknown field '{}' in PathTracer dictionary.", key); } if (dict.keyExists(kMaxSurfaceBounces)) { // Initialize bounce counts to 'maxSurfaceBounces' if they weren't explicitly set. if (!dict.keyExists(kMaxDiffuseBounces)) mStaticParams.maxDiffuseBounces = mStaticParams.maxSurfaceBounces; if (!dict.keyExists(kMaxSpecularBounces)) mStaticParams.maxSpecularBounces = mStaticParams.maxSurfaceBounces; if (!dict.keyExists(kMaxTransmissionBounces)) mStaticParams.maxTransmissionBounces = mStaticParams.maxSurfaceBounces; } else { // Initialize surface bounces. mStaticParams.maxSurfaceBounces = std::max(mStaticParams.maxDiffuseBounces, std::max(mStaticParams.maxSpecularBounces, mStaticParams.maxTransmissionBounces)); } bool maxSurfaceBouncesNeedsAdjustment = mStaticParams.maxSurfaceBounces < mStaticParams.maxDiffuseBounces || mStaticParams.maxSurfaceBounces < mStaticParams.maxSpecularBounces || mStaticParams.maxSurfaceBounces < mStaticParams.maxTransmissionBounces; // Show a warning if maxSurfaceBounces will be adjusted in validateOptions(). if (dict.keyExists(kMaxSurfaceBounces) && maxSurfaceBouncesNeedsAdjustment) { logWarning("'{}' is set lower than '{}', '{}' or '{}' and will be increased.", kMaxSurfaceBounces, kMaxDiffuseBounces, kMaxSpecularBounces, kMaxTransmissionBounces); } } void PathTracer::validateOptions() { if (mParams.specularRoughnessThreshold < 0.f || mParams.specularRoughnessThreshold > 1.f) { logWarning("'specularRoughnessThreshold' has invalid value. Clamping to range [0,1]."); mParams.specularRoughnessThreshold = clamp(mParams.specularRoughnessThreshold, 0.f, 1.f); } // Static parameters. if (mStaticParams.samplesPerPixel < 1 || mStaticParams.samplesPerPixel > kMaxSamplesPerPixel) { logWarning("'samplesPerPixel' must be in the range [1, {}]. Clamping to this range.", kMaxSamplesPerPixel); mStaticParams.samplesPerPixel = std::clamp(mStaticParams.samplesPerPixel, 1u, kMaxSamplesPerPixel); } auto clampBounces = [] (uint32_t& bounces, const std::string& name) { if (bounces > kMaxBounces) { logWarning("'{}' exceeds the maximum supported bounces. Clamping to {}.", name, kMaxBounces); bounces = kMaxBounces; } }; clampBounces(mStaticParams.maxSurfaceBounces, kMaxSurfaceBounces); clampBounces(mStaticParams.maxDiffuseBounces, kMaxDiffuseBounces); clampBounces(mStaticParams.maxSpecularBounces, kMaxSpecularBounces); clampBounces(mStaticParams.maxTransmissionBounces, kMaxTransmissionBounces); // Make sure maxSurfaceBounces is at least as many as any of diffuse, specular or transmission. uint32_t minSurfaceBounces = std::max(mStaticParams.maxDiffuseBounces, std::max(mStaticParams.maxSpecularBounces, mStaticParams.maxTransmissionBounces)); mStaticParams.maxSurfaceBounces = std::max(mStaticParams.maxSurfaceBounces, minSurfaceBounces); if (mStaticParams.primaryLodMode == TexLODMode::RayCones) { logWarning("Unsupported tex lod mode. Defaulting to Mip0."); mStaticParams.primaryLodMode = TexLODMode::Mip0; } } Dictionary PathTracer::getScriptingDictionary() { if (auto lightBVHSampler = std::dynamic_pointer_cast<LightBVHSampler>(mpEmissiveSampler)) { mLightBVHOptions = lightBVHSampler->getOptions(); } Dictionary d; // Rendering parameters d[kSamplesPerPixel] = mStaticParams.samplesPerPixel; d[kMaxSurfaceBounces] = mStaticParams.maxSurfaceBounces; d[kMaxDiffuseBounces] = mStaticParams.maxDiffuseBounces; d[kMaxSpecularBounces] = mStaticParams.maxSpecularBounces; d[kMaxTransmissionBounces] = mStaticParams.maxTransmissionBounces; // Sampling parameters d[kSampleGenerator] = mStaticParams.sampleGenerator; if (mParams.useFixedSeed) d[kFixedSeed] = mParams.fixedSeed; d[kUseBSDFSampling] = mStaticParams.useBSDFSampling; d[kUseRussianRoulette] = mStaticParams.useRussianRoulette; d[kUseNEE] = mStaticParams.useNEE; d[kUseMIS] = mStaticParams.useMIS; d[kMISHeuristic] = mStaticParams.misHeuristic; d[kMISPowerExponent] = mStaticParams.misPowerExponent; d[kEmissiveSampler] = mStaticParams.emissiveSampler; if (mStaticParams.emissiveSampler == EmissiveLightSamplerType::LightBVH) d[kLightBVHOptions] = mLightBVHOptions; d[kUseRTXDI] = mStaticParams.useRTXDI; d[kRTXDIOptions] = mRTXDIOptions; // Material parameters d[kUseAlphaTest] = mStaticParams.useAlphaTest; d[kAdjustShadingNormals] = mStaticParams.adjustShadingNormals; d[kMaxNestedMaterials] = mStaticParams.maxNestedMaterials; d[kUseLightsInDielectricVolumes] = mStaticParams.useLightsInDielectricVolumes; d[kDisableCaustics] = mStaticParams.disableCaustics; d[kSpecularRoughnessThreshold] = mParams.specularRoughnessThreshold; d[kPrimaryLodMode] = mStaticParams.primaryLodMode; d[kLODBias] = mParams.lodBias; // Denoising parameters d[kUseNRDDemodulation] = mStaticParams.useNRDDemodulation; // Output parameters d[kOutputSize] = mOutputSizeSelection; if (mOutputSizeSelection == RenderPassHelpers::IOSize::Fixed) d[kFixedOutputSize] = mFixedOutputSize; d[kColorFormat] = mStaticParams.colorFormat; return d; } RenderPassReflection PathTracer::reflect(const CompileData& compileData) { RenderPassReflection reflector; const uint2 sz = RenderPassHelpers::calculateIOSize(mOutputSizeSelection, mFixedOutputSize, compileData.defaultTexDims); addRenderPassInputs(reflector, kInputChannels); addRenderPassOutputs(reflector, kOutputChannels, ResourceBindFlags::UnorderedAccess, sz); return reflector; } void PathTracer::setFrameDim(const uint2 frameDim) { auto prevFrameDim = mParams.frameDim; auto prevScreenTiles = mParams.screenTiles; mParams.frameDim = frameDim; if (mParams.frameDim.x > kMaxFrameDimension || mParams.frameDim.y > kMaxFrameDimension) { throw RuntimeError("Frame dimensions up to {} pixels width/height are supported.", kMaxFrameDimension); } // Tile dimensions have to be powers-of-two. FALCOR_ASSERT(isPowerOf2(kScreenTileDim.x) && isPowerOf2(kScreenTileDim.y)); FALCOR_ASSERT(kScreenTileDim.x == (1 << kScreenTileBits.x) && kScreenTileDim.y == (1 << kScreenTileBits.y)); mParams.screenTiles = div_round_up(mParams.frameDim, kScreenTileDim); if (mParams.frameDim != prevFrameDim || mParams.screenTiles != prevScreenTiles) { mVarsChanged = true; } } void PathTracer::setScene(RenderContext* pRenderContext, const Scene::SharedPtr& pScene) { mpScene = pScene; mParams.frameCount = 0; mParams.frameDim = {}; mParams.screenTiles = {}; mpRTXDI = nullptr; // Need to recreate the trace pass because the shader binding table changes. mTracePass.pProgram = nullptr; resetLighting(); if (mpScene) { if (pScene->hasGeometryType(Scene::GeometryType::Custom)) { logWarning("PathTracer: This render pass does not support custom primitives."); } validateOptions(); mRecompile = true; } } void PathTracer::execute(RenderContext* pRenderContext, const RenderData& renderData) { if (!beginFrame(pRenderContext, renderData)) return; // Update shader program specialization. updatePrograms(); // Prepare resources. prepareResources(pRenderContext, renderData); // Prepare the path tracer parameter block. // This should be called after all resources have been created. preparePathTracer(renderData); // Generate paths at primary hits. generatePaths(pRenderContext, renderData); // Update RTXDI. if (mpRTXDI) { const auto& pMotionVectors = renderData[kInputMotionVectors]->asTexture(); mpRTXDI->update(pRenderContext, pMotionVectors); } // Trace pass. tracePass(pRenderContext, renderData); // Resolve pass. resolvePass(pRenderContext, renderData); endFrame(pRenderContext, renderData); } void PathTracer::renderUI(Gui::Widgets& widget) { bool dirty = false; // Rendering options. dirty |= renderRenderingUI(widget); // Stats and debug options. renderStatsUI(widget); dirty |= renderDebugUI(widget); if (dirty) { validateOptions(); mOptionsChanged = true; } } bool PathTracer::renderRenderingUI(Gui::Widgets& widget) { bool dirty = false; bool runtimeDirty = false; if (mFixedSampleCount) { dirty |= widget.var("Samples/pixel", mStaticParams.samplesPerPixel, 1u, kMaxSamplesPerPixel); } else widget.text("Samples/pixel: Variable"); widget.tooltip("Number of samples per pixel. One path is traced for each sample.\n\n" "When the '" + kInputSampleCount + "' input is connected, the number of samples per pixel is loaded from the texture."); if (widget.var("Max surface bounces", mStaticParams.maxSurfaceBounces, 0u, kMaxBounces)) { // Allow users to change the max surface bounce parameter in the UI to clamp all other surface bounce parameters. mStaticParams.maxDiffuseBounces = std::min(mStaticParams.maxDiffuseBounces, mStaticParams.maxSurfaceBounces); mStaticParams.maxSpecularBounces = std::min(mStaticParams.maxSpecularBounces, mStaticParams.maxSurfaceBounces); mStaticParams.maxTransmissionBounces = std::min(mStaticParams.maxTransmissionBounces, mStaticParams.maxSurfaceBounces); dirty = true; } widget.tooltip("Maximum number of surface bounces (diffuse + specular + transmission).\n" "Note that specular reflection events from a material with a roughness greater than specularRoughnessThreshold are also classified as diffuse events."); dirty |= widget.var("Max diffuse bounces", mStaticParams.maxDiffuseBounces, 0u, kMaxBounces); widget.tooltip("Maximum number of diffuse bounces.\n0 = direct only\n1 = one indirect bounce etc."); dirty |= widget.var("Max specular bounces", mStaticParams.maxSpecularBounces, 0u, kMaxBounces); widget.tooltip("Maximum number of specular bounces.\n0 = direct only\n1 = one indirect bounce etc."); dirty |= widget.var("Max transmission bounces", mStaticParams.maxTransmissionBounces, 0u, kMaxBounces); widget.tooltip("Maximum number of transmission bounces.\n0 = no transmission\n1 = one transmission bounce etc."); // Sampling options. if (widget.dropdown("Sample generator", SampleGenerator::getGuiDropdownList(), mStaticParams.sampleGenerator)) { mpSampleGenerator = SampleGenerator::create(mStaticParams.sampleGenerator); dirty = true; } dirty |= widget.checkbox("BSDF importance sampling", mStaticParams.useBSDFSampling); widget.tooltip("BSDF importance sampling should normally be enabled.\n\n" "If disabled, cosine-weighted hemisphere sampling is used for debugging purposes"); dirty |= widget.checkbox("Russian roulette", mStaticParams.useRussianRoulette); widget.tooltip("Use russian roulette to terminate low throughput paths."); dirty |= widget.checkbox("Next-event estimation (NEE)", mStaticParams.useNEE); widget.tooltip("Use next-event estimation.\nThis option enables direct illumination sampling at each path vertex."); if (mStaticParams.useNEE) { dirty |= widget.checkbox("Multiple importance sampling (MIS)", mStaticParams.useMIS); widget.tooltip("When enabled, BSDF sampling is combined with light sampling for the environment map and emissive lights.\n" "Note that MIS has currently no effect on analytic lights."); if (mStaticParams.useMIS) { dirty |= widget.dropdown("MIS heuristic", kMISHeuristicList, reinterpret_cast<uint32_t&>(mStaticParams.misHeuristic)); if (mStaticParams.misHeuristic == MISHeuristic::PowerExp) { dirty |= widget.var("MIS power exponent", mStaticParams.misPowerExponent, 0.01f, 10.f); } } if (mpScene && mpScene->useEmissiveLights()) { if (auto group = widget.group("Emissive sampler")) { if (widget.dropdown("Emissive sampler", kEmissiveSamplerList, (uint32_t&)mStaticParams.emissiveSampler)) { resetLighting(); dirty = true; } widget.tooltip("Selects which light sampler to use for importance sampling of emissive geometry.", true); if (mpEmissiveSampler) { if (mpEmissiveSampler->renderUI(group)) mOptionsChanged = true; } } } } if (auto group = widget.group("RTXDI")) { dirty |= widget.checkbox("Enabled", mStaticParams.useRTXDI); widget.tooltip("Use RTXDI for direct illumination."); if (mpRTXDI) dirty |= mpRTXDI->renderUI(group); } if (auto group = widget.group("Material controls")) { dirty |= widget.checkbox("Alpha test", mStaticParams.useAlphaTest); widget.tooltip("Use alpha testing on non-opaque triangles."); dirty |= widget.checkbox("Adjust shading normals on secondary hits", mStaticParams.adjustShadingNormals); widget.tooltip("Enables adjustment of the shading normals to reduce the risk of black pixels due to back-facing vectors.\nDoes not apply to primary hits which is configured in GBuffer.", true); dirty |= widget.var("Max nested materials", mStaticParams.maxNestedMaterials, 2u, 4u); widget.tooltip("Maximum supported number of nested materials."); dirty |= widget.checkbox("Use lights in dielectric volumes", mStaticParams.useLightsInDielectricVolumes); widget.tooltip("Use lights inside of volumes (transmissive materials). We typically don't want this because lights are occluded by the interface."); dirty |= widget.checkbox("Disable caustics", mStaticParams.disableCaustics); widget.tooltip("Disable sampling of caustic light paths (i.e. specular events after diffuse events)."); runtimeDirty |= widget.var("Specular roughness threshold", mParams.specularRoughnessThreshold, 0.f, 1.f); widget.tooltip("Specular reflection events are only classified as specular if the material's roughness value is equal or smaller than this threshold. Otherwise they are classified diffuse."); dirty |= widget.dropdown("Primary LOD Mode", kLODModeList, reinterpret_cast<uint32_t&>(mStaticParams.primaryLodMode)); widget.tooltip("Texture LOD mode at primary hit"); runtimeDirty |= widget.var("TexLOD bias", mParams.lodBias, -16.f, 16.f, 0.01f); } if (auto group = widget.group("Denoiser options")) { dirty |= widget.checkbox("Use NRD demodulation", mStaticParams.useNRDDemodulation); widget.tooltip("Global switch for NRD demodulation"); } if (auto group = widget.group("Output options")) { // Switch to enable/disable path tracer output. dirty |= widget.checkbox("Enable output", mEnabled); // Controls for output size. // When output size requirements change, we'll trigger a graph recompile to update the render pass I/O sizes. if (widget.dropdown("Output size", RenderPassHelpers::kIOSizeList, (uint32_t&)mOutputSizeSelection)) requestRecompile(); if (mOutputSizeSelection == RenderPassHelpers::IOSize::Fixed) { if (widget.var("Size in pixels", mFixedOutputSize, 32u, 16384u)) requestRecompile(); } dirty |= widget.dropdown("Color format", kColorFormatList, (uint32_t&)mStaticParams.colorFormat); widget.tooltip("Selects the color format used for internal per-sample color and denoiser buffers"); } if (dirty) mRecompile = true; return dirty || runtimeDirty; } bool PathTracer::renderDebugUI(Gui::Widgets& widget) { bool dirty = false; if (auto group = widget.group("Debugging")) { dirty |= group.checkbox("Use fixed seed", mParams.useFixedSeed); group.tooltip("Forces a fixed random seed for each frame.\n\n" "This should produce exactly the same image each frame, which can be useful for debugging."); if (mParams.useFixedSeed) { dirty |= group.var("Seed", mParams.fixedSeed); } mpPixelDebug->renderUI(group); } return dirty; } void PathTracer::renderStatsUI(Gui::Widgets& widget) { if (auto g = widget.group("Statistics")) { // Show ray stats mpPixelStats->renderUI(g); } } bool PathTracer::onMouseEvent(const MouseEvent& mouseEvent) { return mpPixelDebug->onMouseEvent(mouseEvent); } void PathTracer::updatePrograms() { FALCOR_ASSERT(mpScene); if (mRecompile == false) return; auto defines = mStaticParams.getDefines(*this); auto typeConformances = mStaticParams.getTypeConformances(*this); // Create trace pass lazily. if (!mTracePass.pProgram) { const uint32_t kRayTypeScatter = 0; const uint32_t kMissScatter = 0; RtProgram::Desc desc; desc.addShaderLibrary(kTracePassFilename); desc.setShaderModel(kShaderModel); desc.setMaxPayloadSize(160); // This is conservative but the required minimum is 140 bytes. desc.setMaxAttributeSize(mpScene->getRaytracingMaxAttributeSize()); desc.setMaxTraceRecursionDepth(1); if (!mpScene->hasProceduralGeometry()) desc.setPipelineFlags(RtPipelineFlags::SkipProceduralPrimitives); desc.addTypeConformances(typeConformances); mTracePass.pBindingTable = RtBindingTable::create(1, 1, mpScene->getGeometryCount()); mTracePass.pBindingTable->setRayGen(desc.addRayGen("rayGen")); mTracePass.pBindingTable->setMiss(kMissScatter, desc.addMiss("scatterMiss")); // Add hit groups for triangles. mTracePass.pBindingTable->setHitGroup(kRayTypeScatter, mpScene->getGeometryIDs(Scene::GeometryType::TriangleMesh), desc.addHitGroup("scatterTriangleClosestHit", "scatterTriangleAnyHit")); // Add hit groups for displaced triangle meshes. if (mpScene->hasGeometryType(Scene::GeometryType::DisplacedTriangleMesh)) { mTracePass.pBindingTable->setHitGroup(kRayTypeScatter, mpScene->getGeometryIDs(Scene::GeometryType::DisplacedTriangleMesh), desc.addHitGroup("scatterDisplacedTriangleMeshClosestHit", "", "displacedTriangleMeshIntersection")); } // Add hit groups for curves. if (mpScene->hasGeometryType(Scene::GeometryType::Curve)) { mTracePass.pBindingTable->setHitGroup(kRayTypeScatter, mpScene->getGeometryIDs(Scene::GeometryType::Curve), desc.addHitGroup("scatterCurveClosestHit", "", "curveIntersection")); } // Add hit groups for SDF grids. if (mpScene->hasGeometryType(Scene::GeometryType::SDFGrid)) { mTracePass.pBindingTable->setHitGroup(kRayTypeScatter, mpScene->getGeometryIDs(Scene::GeometryType::SDFGrid), desc.addHitGroup("scatterSdfGridClosestHit", "", "sdfGridIntersection")); } mTracePass.pProgram = RtProgram::create(desc, defines); } FALCOR_ASSERT(mTracePass.pProgram != nullptr && mTracePass.pBindingTable != nullptr); // Prepare other programs. auto prepareProgram = [&](Program::SharedPtr program) { program->addDefines(defines); program->setTypeConformances(typeConformances); }; prepareProgram(mpGeneratePaths->getProgram()); prepareProgram(mpResolvePass->getProgram()); prepareProgram(mpReflectTypes->getProgram()); prepareProgram(mTracePass.pProgram); mpGeneratePaths->setVars(nullptr); mpResolvePass->setVars(nullptr); mpReflectTypes->setVars(nullptr); mTracePass.pVars = RtProgramVars::create(mTracePass.pProgram, mTracePass.pBindingTable); mVarsChanged = true; mRecompile = false; } void PathTracer::prepareResources(RenderContext* pRenderContext, const RenderData& renderData) { // Compute allocation requirements for paths and output samples. // Note that the sample buffers are padded to whole tiles, while the max path count depends on actual frame dimension. // If we don't have a fixed sample count, assume the worst case. uint32_t spp = mFixedSampleCount ? mStaticParams.samplesPerPixel : kMaxSamplesPerPixel; uint32_t tileCount = mParams.screenTiles.x * mParams.screenTiles.y; const uint32_t sampleCount = tileCount * kScreenTileDim.x * kScreenTileDim.y * spp; const uint32_t screenPixelCount = mParams.frameDim.x * mParams.frameDim.y; const uint32_t pathCount = screenPixelCount * spp; // Allocate output sample offset buffer if needed. // This buffer stores the output offset to where the samples for each pixel are stored consecutively. // The offsets are local to the current tile, so 16-bit format is sufficient and reduces bandwidth usage. if (!mFixedSampleCount) { if (!mpSampleOffset || mpSampleOffset->getWidth() != mParams.frameDim.x || mpSampleOffset->getHeight() != mParams.frameDim.y) { FALCOR_ASSERT(kScreenTileDim.x * kScreenTileDim.y * kMaxSamplesPerPixel <= (1u << 16)); mpSampleOffset = Texture::create2D(mParams.frameDim.x, mParams.frameDim.y, ResourceFormat::R16Uint, 1, 1, nullptr, Resource::BindFlags::ShaderResource | Resource::BindFlags::UnorderedAccess); mVarsChanged = true; } } auto var = mpReflectTypes->getRootVar(); // Allocate per-sample buffers. // For the special case of fixed 1 spp, the output is written out directly and this buffer is not needed. if (!mFixedSampleCount || mStaticParams.samplesPerPixel > 1) { if (!mpSampleColor || mpSampleColor->getElementCount() < sampleCount || mVarsChanged) { mpSampleColor = Buffer::createStructured(var["sampleColor"], sampleCount, Resource::BindFlags::ShaderResource | Resource::BindFlags::UnorderedAccess, Buffer::CpuAccess::None, nullptr, false); mVarsChanged = true; } } if (mOutputGuideData && (!mpSampleGuideData || mpSampleGuideData->getElementCount() < sampleCount || mVarsChanged)) { mpSampleGuideData = Buffer::createStructured(var["sampleGuideData"], sampleCount, Resource::BindFlags::ShaderResource | Resource::BindFlags::UnorderedAccess, Buffer::CpuAccess::None, nullptr, false); mVarsChanged = true; } if (mOutputNRDData && (!mpSampleNRDRadiance || mpSampleNRDRadiance->getElementCount() < sampleCount || mVarsChanged)) { mpSampleNRDRadiance = Buffer::createStructured(var["sampleNRDRadiance"], sampleCount, Resource::BindFlags::ShaderResource | Resource::BindFlags::UnorderedAccess, Buffer::CpuAccess::None, nullptr, false); mpSampleNRDHitDist = Buffer::createStructured(var["sampleNRDHitDist"], sampleCount, Resource::BindFlags::ShaderResource | Resource::BindFlags::UnorderedAccess, Buffer::CpuAccess::None, nullptr, false); mpSampleNRDEmission = Buffer::createStructured(var["sampleNRDEmission"], sampleCount, Resource::BindFlags::ShaderResource | Resource::BindFlags::UnorderedAccess, Buffer::CpuAccess::None, nullptr, false); mpSampleNRDReflectance = Buffer::createStructured(var["sampleNRDReflectance"], sampleCount, Resource::BindFlags::ShaderResource | Resource::BindFlags::UnorderedAccess, Buffer::CpuAccess::None, nullptr, false); mVarsChanged = true; } } void PathTracer::preparePathTracer(const RenderData& renderData) { // Create path tracer parameter block if needed. if (!mpPathTracerBlock || mVarsChanged) { auto reflector = mpReflectTypes->getProgram()->getReflector()->getParameterBlock("pathTracer"); mpPathTracerBlock = ParameterBlock::create(reflector); FALCOR_ASSERT(mpPathTracerBlock); mVarsChanged = true; } // Bind resources. auto var = mpPathTracerBlock->getRootVar(); setShaderData(var, renderData); } void PathTracer::resetLighting() { // Retain the options for the emissive sampler. if (auto lightBVHSampler = std::dynamic_pointer_cast<LightBVHSampler>(mpEmissiveSampler)) { mLightBVHOptions = lightBVHSampler->getOptions(); } mpEmissiveSampler = nullptr; mpEnvMapSampler = nullptr; mRecompile = true; } void PathTracer::prepareMaterials(RenderContext* pRenderContext) { // This functions checks for material changes and performs any necessary update. // For now all we need to do is to trigger a recompile so that the right defines get set. // In the future, we might want to do additional material-specific setup here. if (is_set(mpScene->getUpdates(), Scene::UpdateFlags::MaterialsChanged)) { mRecompile = true; } } bool PathTracer::prepareLighting(RenderContext* pRenderContext) { bool lightingChanged = false; if (is_set(mpScene->getUpdates(), Scene::UpdateFlags::RenderSettingsChanged)) { lightingChanged = true; mRecompile = true; } if (is_set(mpScene->getUpdates(), Scene::UpdateFlags::SDFGridConfigChanged)) { mRecompile = true; } if (is_set(mpScene->getUpdates(), Scene::UpdateFlags::EnvMapChanged)) { mpEnvMapSampler = nullptr; lightingChanged = true; mRecompile = true; } if (mpScene->useEnvLight()) { if (!mpEnvMapSampler) { mpEnvMapSampler = EnvMapSampler::create(pRenderContext, mpScene->getEnvMap()); lightingChanged = true; mRecompile = true; } } else { if (mpEnvMapSampler) { mpEnvMapSampler = nullptr; lightingChanged = true; mRecompile = true; } } // Request the light collection if emissive lights are enabled. if (mpScene->getRenderSettings().useEmissiveLights) { mpScene->getLightCollection(pRenderContext); } if (mpScene->useEmissiveLights()) { if (!mpEmissiveSampler) { const auto& pLights = mpScene->getLightCollection(pRenderContext); FALCOR_ASSERT(pLights && pLights->getActiveLightCount() > 0); FALCOR_ASSERT(!mpEmissiveSampler); switch (mStaticParams.emissiveSampler) { case EmissiveLightSamplerType::Uniform: mpEmissiveSampler = EmissiveUniformSampler::create(pRenderContext, mpScene); break; case EmissiveLightSamplerType::LightBVH: mpEmissiveSampler = LightBVHSampler::create(pRenderContext, mpScene, mLightBVHOptions); break; case EmissiveLightSamplerType::Power: mpEmissiveSampler = EmissivePowerSampler::create(pRenderContext, mpScene); break; default: throw RuntimeError("Unknown emissive light sampler type"); } lightingChanged = true; mRecompile = true; } } else { if (mpEmissiveSampler) { // Retain the options for the emissive sampler. if (auto lightBVHSampler = std::dynamic_pointer_cast<LightBVHSampler>(mpEmissiveSampler)) { mLightBVHOptions = lightBVHSampler->getOptions(); } mpEmissiveSampler = nullptr; lightingChanged = true; mRecompile = true; } } if (mpEmissiveSampler) { lightingChanged |= mpEmissiveSampler->update(pRenderContext); auto defines = mpEmissiveSampler->getDefines(); if (mTracePass.pProgram && mTracePass.pProgram->addDefines(defines)) mRecompile = true; } return lightingChanged; } void PathTracer::prepareRTXDI(RenderContext* pRenderContext) { if (mStaticParams.useRTXDI) { if (!mpRTXDI) mpRTXDI = RTXDI::create(mpScene, mRTXDIOptions); // Emit warning if enabled while using spp != 1. if (!mFixedSampleCount || mStaticParams.samplesPerPixel != 1) { logWarning("Using RTXDI with samples/pixel != 1 will only generate one RTXDI sample reused for all pixel samples."); } } else { mpRTXDI = nullptr; } } void PathTracer::setNRDData(const ShaderVar& var, const RenderData& renderData) const { var["sampleRadiance"] = mpSampleNRDRadiance; var["sampleHitDist"] = mpSampleNRDHitDist; var["sampleEmission"] = mpSampleNRDEmission; var["sampleReflectance"] = mpSampleNRDReflectance; var["primaryHitEmission"] = renderData[kOutputNRDEmission]->asTexture(); var["primaryHitDiffuseReflectance"] = renderData[kOutputNRDDiffuseReflectance]->asTexture(); var["primaryHitSpecularReflectance"] = renderData[kOutputNRDSpecularReflectance]->asTexture(); } void PathTracer::setShaderData(const ShaderVar& var, const RenderData& renderData, bool useLightSampling) const { // Bind static resources that don't change per frame. if (mVarsChanged) { if (useLightSampling && mpEnvMapSampler) mpEnvMapSampler->setShaderData(var["envMapSampler"]); var["sampleOffset"] = mpSampleOffset; // Can be nullptr var["sampleColor"] = mpSampleColor; var["sampleGuideData"] = mpSampleGuideData; } // Bind runtime data. setNRDData(var["outputNRD"], renderData); Texture::SharedPtr pViewDir; if (mpScene->getCamera()->getApertureRadius() > 0.f) { pViewDir = renderData[kInputViewDir]->asTexture(); if (!pViewDir) logWarning("Depth-of-field requires the '{}' input. Expect incorrect rendering.", kInputViewDir); } Texture::SharedPtr pSampleCount; if (!mFixedSampleCount) { pSampleCount = renderData[kInputSampleCount]->asTexture(); if (!pSampleCount) throw RuntimeError("PathTracer: Missing sample count input texture"); } var["params"].setBlob(mParams); var["vbuffer"] = renderData[kInputVBuffer]->asTexture(); var["viewDir"] = pViewDir; // Can be nullptr var["sampleCount"] = pSampleCount; // Can be nullptr var["outputColor"] = renderData[kOutputColor]->asTexture(); if (useLightSampling && mpEmissiveSampler) { // TODO: Do we have to bind this every frame? mpEmissiveSampler->setShaderData(var["emissiveSampler"]); } } bool PathTracer::beginFrame(RenderContext* pRenderContext, const RenderData& renderData) { const auto& pOutputColor = renderData[kOutputColor]->asTexture(); FALCOR_ASSERT(pOutputColor); // Set output frame dimension. setFrameDim(uint2(pOutputColor->getWidth(), pOutputColor->getHeight())); // Validate all I/O sizes match the expected size. // If not, we'll disable the path tracer to give the user a chance to fix the configuration before re-enabling it. bool resolutionMismatch = false; auto validateChannels = [&](const auto& channels) { for (const auto& channel : channels) { auto pTexture = renderData[channel.name]->asTexture(); if (pTexture && (pTexture->getWidth() != mParams.frameDim.x || pTexture->getHeight() != mParams.frameDim.y)) resolutionMismatch = true; } }; validateChannels(kInputChannels); validateChannels(kOutputChannels); if (mEnabled && resolutionMismatch) { logError("PathTracer I/O sizes don't match. The pass will be disabled."); mEnabled = false; } if (mpScene == nullptr || !mEnabled) { pRenderContext->clearUAV(pOutputColor->getUAV().get(), float4(0.f)); // Set refresh flag if changes that affect the output have occured. // This is needed to ensure other passes get notified when the path tracer is enabled/disabled. if (mOptionsChanged) { auto& dict = renderData.getDictionary(); auto flags = dict.getValue(kRenderPassRefreshFlags, Falcor::RenderPassRefreshFlags::None); if (mOptionsChanged) flags |= Falcor::RenderPassRefreshFlags::RenderOptionsChanged; dict[Falcor::kRenderPassRefreshFlags] = flags; } return false; } // Update materials. prepareMaterials(pRenderContext); // Update the env map and emissive sampler to the current frame. bool lightingChanged = prepareLighting(pRenderContext); // Prepare RTXDI. prepareRTXDI(pRenderContext); if (mpRTXDI) mpRTXDI->beginFrame(pRenderContext, mParams.frameDim); // Update refresh flag if changes that affect the output have occured. auto& dict = renderData.getDictionary(); if (mOptionsChanged || lightingChanged) { auto flags = dict.getValue(kRenderPassRefreshFlags, Falcor::RenderPassRefreshFlags::None); if (mOptionsChanged) flags |= Falcor::RenderPassRefreshFlags::RenderOptionsChanged; if (lightingChanged) flags |= Falcor::RenderPassRefreshFlags::LightingChanged; dict[Falcor::kRenderPassRefreshFlags] = flags; mOptionsChanged = false; } // Check if GBuffer has adjusted shading normals enabled. bool gbufferAdjustShadingNormals = dict.getValue(Falcor::kRenderPassGBufferAdjustShadingNormals, false); if (gbufferAdjustShadingNormals != mGBufferAdjustShadingNormals) { mGBufferAdjustShadingNormals = gbufferAdjustShadingNormals; mRecompile = true; } // Check if fixed sample count should be used. When the sample count input is connected we load the count from there instead. mFixedSampleCount = renderData[kInputSampleCount] == nullptr; // Check if guide data should be generated. mOutputGuideData = renderData[kOutputAlbedo] != nullptr || renderData[kOutputSpecularAlbedo] != nullptr || renderData[kOutputIndirectAlbedo] != nullptr || renderData[kOutputNormal] != nullptr || renderData[kOutputReflectionPosW] != nullptr; // Check if NRD data should be generated. mOutputNRDData = renderData[kOutputNRDDiffuseRadianceHitDist] != nullptr || renderData[kOutputNRDSpecularRadianceHitDist] != nullptr || renderData[kOutputNRDResidualRadianceHitDist] != nullptr || renderData[kOutputNRDEmission] != nullptr || renderData[kOutputNRDDiffuseReflectance] != nullptr || renderData[kOutputNRDSpecularReflectance] != nullptr; // Enable pixel stats if rayCount or pathLength outputs are connected. if (renderData[kOutputRayCount] != nullptr || renderData[kOutputPathLength] != nullptr) { mpPixelStats->setEnabled(true); } mpPixelStats->beginFrame(pRenderContext, mParams.frameDim); mpPixelDebug->beginFrame(pRenderContext, mParams.frameDim); // Update the random seed. mParams.seed = mParams.useFixedSeed ? mParams.fixedSeed : mParams.frameCount; return true; } void PathTracer::endFrame(RenderContext* pRenderContext, const RenderData& renderData) { mpPixelStats->endFrame(pRenderContext); mpPixelDebug->endFrame(pRenderContext); auto copyTexture = [pRenderContext](Texture* pDst, const Texture* pSrc) { if (pDst && pSrc) { FALCOR_ASSERT(pDst && pSrc); FALCOR_ASSERT(pDst->getFormat() == pSrc->getFormat()); FALCOR_ASSERT(pDst->getWidth() == pSrc->getWidth() && pDst->getHeight() == pSrc->getHeight()); pRenderContext->copyResource(pDst, pSrc); } else if (pDst) { pRenderContext->clearUAV(pDst->getUAV().get(), uint4(0, 0, 0, 0)); } }; // Copy pixel stats to outputs if available. copyTexture(renderData[kOutputRayCount]->asTexture().get(), mpPixelStats->getRayCountTexture(pRenderContext).get()); copyTexture(renderData[kOutputPathLength]->asTexture().get(), mpPixelStats->getPathLengthTexture().get()); if (mpRTXDI) mpRTXDI->endFrame(pRenderContext); mVarsChanged = false; mParams.frameCount++; } void PathTracer::generatePaths(RenderContext* pRenderContext, const RenderData& renderData) { FALCOR_PROFILE("generatePaths"); // Check shader assumptions. // We launch one thread group per screen tile, with threads linearly indexed. const uint32_t tileSize = kScreenTileDim.x * kScreenTileDim.y; FALCOR_ASSERT(kScreenTileDim.x == 16 && kScreenTileDim.y == 16); // TODO: Remove this temporary limitation when Slang bug has been fixed, see comments in shader. FALCOR_ASSERT(kScreenTileBits.x <= 4 && kScreenTileBits.y <= 4); // Since we use 8-bit deinterleave. FALCOR_ASSERT(mpGeneratePaths->getThreadGroupSize().x == tileSize); FALCOR_ASSERT(mpGeneratePaths->getThreadGroupSize().y == 1 && mpGeneratePaths->getThreadGroupSize().z == 1); // Additional specialization. This shouldn't change resource declarations. mpGeneratePaths->addDefine("USE_VIEW_DIR", (mpScene->getCamera()->getApertureRadius() > 0 && renderData[kInputViewDir] != nullptr) ? "1" : "0"); mpGeneratePaths->addDefine("OUTPUT_GUIDE_DATA", mOutputGuideData ? "1" : "0"); mpGeneratePaths->addDefine("OUTPUT_NRD_DATA", mOutputNRDData ? "1" : "0"); // Bind resources. auto var = mpGeneratePaths->getRootVar()["CB"]["gPathGenerator"]; setShaderData(var, renderData, false); mpGeneratePaths["gScene"] = mpScene->getParameterBlock(); if (mpRTXDI) mpRTXDI->setShaderData(mpGeneratePaths->getRootVar()); // Launch one thread per pixel. // The dimensions are padded to whole tiles to allow re-indexing the threads in the shader. mpGeneratePaths->execute(pRenderContext, { mParams.screenTiles.x * tileSize, mParams.screenTiles.y, 1u }); } void PathTracer::tracePass(RenderContext* pRenderContext, const RenderData& renderData) { FALCOR_PROFILE("tracePass"); // Additional specialization. This shouldn't change resource declarations. mTracePass.pProgram->addDefine("USE_VIEW_DIR", (mpScene->getCamera()->getApertureRadius() > 0 && renderData[kInputViewDir] != nullptr) ? "1" : "0"); mTracePass.pProgram->addDefine("OUTPUT_GUIDE_DATA", mOutputGuideData ? "1" : "0"); mTracePass.pProgram->addDefine("OUTPUT_NRD_DATA", mOutputNRDData ? "1" : "0"); // Bind global resources. auto var = mTracePass.pVars->getRootVar(); mpScene->setRaytracingShaderData(pRenderContext, var); if (mVarsChanged) mpSampleGenerator->setShaderData(var); if (mpRTXDI) mpRTXDI->setShaderData(var); mpPixelStats->prepareProgram(mTracePass.pProgram, var); mpPixelDebug->prepareProgram(mTracePass.pProgram, var); // Bind the path tracer. var["gPathTracer"] = mpPathTracerBlock; // Full screen dispatch. mpScene->raytrace(pRenderContext, mTracePass.pProgram.get(), mTracePass.pVars, uint3(mParams.frameDim, 1)); } void PathTracer::resolvePass(RenderContext* pRenderContext, const RenderData& renderData) { if (!mOutputGuideData && !mOutputNRDData && mFixedSampleCount && mStaticParams.samplesPerPixel == 1) return; FALCOR_PROFILE("resolvePass"); // This pass is executed when multiple samples per pixel are used. // We launch one thread per pixel that computes the resolved color by iterating over the samples. // The samples are arranged in tiles with pixels in Morton order, with samples stored consecutively for each pixel. // With adaptive sampling, an extra sample offset lookup table computed by the path generation pass is used to // locate the samples for each pixel. // Additional specialization. This shouldn't change resource declarations. mpResolvePass->addDefine("OUTPUT_GUIDE_DATA", mOutputGuideData ? "1" : "0"); mpResolvePass->addDefine("OUTPUT_NRD_DATA", mOutputNRDData ? "1" : "0"); // Bind resources. auto var = mpResolvePass->getRootVar()["CB"]["gResolvePass"]; var["params"].setBlob(mParams); var["sampleCount"] = renderData[kInputSampleCount]->asTexture(); // Can be nullptr var["outputColor"] = renderData[kOutputColor]->asTexture(); var["outputAlbedo"] = renderData[kOutputAlbedo]->asTexture(); var["outputSpecularAlbedo"] = renderData[kOutputSpecularAlbedo]->asTexture(); var["outputIndirectAlbedo"] = renderData[kOutputIndirectAlbedo]->asTexture(); var["outputNormal"] = renderData[kOutputNormal]->asTexture(); var["outputReflectionPosW"] = renderData[kOutputReflectionPosW]->asTexture(); var["outputNRDDiffuseRadianceHitDist"] = renderData[kOutputNRDDiffuseRadianceHitDist]->asTexture(); var["outputNRDSpecularRadianceHitDist"] = renderData[kOutputNRDSpecularRadianceHitDist]->asTexture(); var["outputNRDResidualRadianceHitDist"] = renderData[kOutputNRDResidualRadianceHitDist]->asTexture(); if (mVarsChanged) { var["sampleOffset"] = mpSampleOffset; // Can be nullptr var["sampleColor"] = mpSampleColor; var["sampleGuideData"] = mpSampleGuideData; var["sampleNRDRadiance"] = mpSampleNRDRadiance; var["sampleNRDHitDist"] = mpSampleNRDHitDist; var["sampleNRDEmission"] = mpSampleNRDEmission; var["sampleNRDReflectance"] = mpSampleNRDReflectance; } // Launch one thread per pixel. mpResolvePass->execute(pRenderContext, { mParams.frameDim, 1u }); } Program::DefineList PathTracer::StaticParams::getDefines(const PathTracer& owner) const { Program::DefineList defines; // Path tracer configuration. defines.add("SAMPLES_PER_PIXEL", (owner.mFixedSampleCount ? std::to_string(samplesPerPixel) : "0")); // 0 indicates a variable sample count defines.add("MAX_SURFACE_BOUNCES", std::to_string(maxSurfaceBounces)); defines.add("MAX_DIFFUSE_BOUNCES", std::to_string(maxDiffuseBounces)); defines.add("MAX_SPECULAR_BOUNCES", std::to_string(maxSpecularBounces)); defines.add("MAX_TRANSMISSON_BOUNCES", std::to_string(maxTransmissionBounces)); defines.add("ADJUST_SHADING_NORMALS", adjustShadingNormals ? "1" : "0"); defines.add("USE_BSDF_SAMPLING", useBSDFSampling ? "1" : "0"); defines.add("USE_NEE", useNEE ? "1" : "0"); defines.add("USE_MIS", useMIS ? "1" : "0"); defines.add("USE_RUSSIAN_ROULETTE", useRussianRoulette ? "1" : "0"); defines.add("USE_RTXDI", useRTXDI ? "1" : "0"); defines.add("USE_ALPHA_TEST", useAlphaTest ? "1" : "0"); defines.add("USE_LIGHTS_IN_DIELECTRIC_VOLUMES", useLightsInDielectricVolumes ? "1" : "0"); defines.add("DISABLE_CAUSTICS", disableCaustics ? "1" : "0"); defines.add("PRIMARY_LOD_MODE", std::to_string((uint32_t)primaryLodMode)); defines.add("USE_NRD_DEMODULATION", useNRDDemodulation ? "1" : "0"); defines.add("COLOR_FORMAT", std::to_string((uint32_t)colorFormat)); defines.add("MIS_HEURISTIC", std::to_string((uint32_t)misHeuristic)); defines.add("MIS_POWER_EXPONENT", std::to_string(misPowerExponent)); // Sampling utilities configuration. FALCOR_ASSERT(owner.mpSampleGenerator); defines.add(owner.mpSampleGenerator->getDefines()); if (owner.mpEmissiveSampler) defines.add(owner.mpEmissiveSampler->getDefines()); if (owner.mpRTXDI) defines.add(owner.mpRTXDI->getDefines()); defines.add("INTERIOR_LIST_SLOT_COUNT", std::to_string(maxNestedMaterials)); defines.add("GBUFFER_ADJUST_SHADING_NORMALS", owner.mGBufferAdjustShadingNormals ? "1" : "0"); // Scene-specific configuration. const auto& scene = owner.mpScene; if (scene) defines.add(scene->getSceneDefines()); defines.add("USE_ENV_LIGHT", scene && scene->useEnvLight() ? "1" : "0"); defines.add("USE_ANALYTIC_LIGHTS", scene && scene->useAnalyticLights() ? "1" : "0"); defines.add("USE_EMISSIVE_LIGHTS", scene && scene->useEmissiveLights() ? "1" : "0"); defines.add("USE_CURVES", scene && scene->hasGeometryType(Scene::GeometryType::Curve) ? "1" : "0"); defines.add("USE_SDF_GRIDS", scene && scene->hasGeometryType(Scene::GeometryType::SDFGrid) ? "1" : "0"); // Set default (off) values for additional features. defines.add("USE_VIEW_DIR", "0"); defines.add("OUTPUT_GUIDE_DATA", "0"); defines.add("OUTPUT_NRD_DATA", "0"); defines.add("OUTPUT_NRD_ADDITIONAL_DATA", "0"); return defines; } Program::TypeConformanceList PathTracer::StaticParams::getTypeConformances(const PathTracer& owner) const { Program::TypeConformanceList typeConformances; // Scene-specific configuration. FALCOR_ASSERT(owner.mpScene); typeConformances.add(owner.mpScene->getTypeConformances()); return typeConformances; }
bsd-3-clause
mhallin/hesitate-py
tests/django16/django16/wsgi.py
391
""" WSGI config for django16 project. It exposes the WSGI callable as a module-level variable named ``application``. For more information on this file, see https://docs.djangoproject.com/en/1.6/howto/deployment/wsgi/ """ import os os.environ.setdefault("DJANGO_SETTINGS_MODULE", "django16.settings") from django.core.wsgi import get_wsgi_application application = get_wsgi_application()
bsd-3-clause
tpltnt/SimpleCV
SimpleCV/examples/manipulation/threedee.py
1125
#!/usr/bin/env python from __future__ import print_function import sys import os from SimpleCV import * def threedee_me(left, right, offset): (r,g,b)=left.splitChannels() left_blue = left.mergeChannels(None,b,g); #left_blue.save("blue.png", sample=True) (r,g,b) = right.splitChannels() right_red = right.mergeChannels(r,None,None); #right_red.save("red.png", sample=True) sz = (left.width+offset[0],left.height+offset[1]) output = left_blue.embiggen(size=sz,pos=(0,0)) output = output.blit(right_red,alpha=0.5,pos=offset) output = output.crop(offset[0],y=offset[1],w=left.width-offset[0],h=left.height-offset[1]) return output print("Taking pictures. Please move your camera slightly to its right") print("after every picture.") c = Camera() time.sleep(1) images = [] for i in range(5): images.append(c.getImage()) print("Picture %d taken" % (i + 1)) time.sleep(1) offset = (0,0) for i in range(4): left = images[i] right = images[i+1] output = threedee_me(left, right, offset) print(output.save(temp = True)) output.show() time.sleep(2)
bsd-3-clause
ric2b/Vivaldi-browser
chromium/cc/trees/layer_tree_host_unittest_masks.cc
23829
// Copyright 2018 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "cc/trees/layer_tree_host.h" #include "base/time/time.h" #include "cc/test/fake_picture_layer.h" #include "cc/test/fake_recording_source.h" #include "cc/test/layer_tree_test.h" #include "cc/test/property_tree_test_utils.h" #include "cc/trees/layer_tree_impl.h" #include "components/viz/common/quads/compositor_render_pass_draw_quad.h" namespace cc { namespace { class LayerTreeTestMaskLayerForSurfaceWithContentRectNotAtOrigin : public LayerTreeTest { protected: void SetupTree() override { // The masked layer has bounds 50x50, but it has a child that causes // the surface bounds to be larger. It also has a parent that clips the // masked layer and its surface. SetInitialRootBounds(gfx::Size(100, 100)); LayerTreeTest::SetupTree(); Layer* root = layer_tree_host()->root_layer(); scoped_refptr<FakePictureLayer> content_layer = FakePictureLayer::Create(&client_); std::unique_ptr<RecordingSource> recording_source = FakeRecordingSource::CreateFilledRecordingSource(gfx::Size(100, 100)); PaintFlags paint1, paint2; static_cast<FakeRecordingSource*>(recording_source.get()) ->add_draw_rect_with_flags(gfx::Rect(0, 0, 100, 90), paint1); static_cast<FakeRecordingSource*>(recording_source.get()) ->add_draw_rect_with_flags(gfx::Rect(0, 90, 100, 10), paint2); client_.set_fill_with_nonsolid_color(true); static_cast<FakeRecordingSource*>(recording_source.get())->Rerecord(); scoped_refptr<FakePictureLayer> mask_layer = FakePictureLayer::CreateWithRecordingSource( &client_, std::move(recording_source)); content_layer->SetMaskLayer(mask_layer); gfx::Size layer_size(100, 100); content_layer->SetBounds(layer_size); gfx::Size mask_size(100, 100); mask_layer->SetBounds(mask_size); mask_layer_id_ = mask_layer->id(); scoped_refptr<Layer> clip_layer = Layer::Create(); clip_layer->SetBounds(gfx::Size(50, 50)); clip_layer->SetMasksToBounds(true); scoped_refptr<Layer> scroll_layer = Layer::Create(); scroll_layer->SetBounds(layer_size); scroll_layer->SetScrollable(gfx::Size(50, 50)); scroll_layer->SetMasksToBounds(true); scroll_layer->SetElementId( LayerIdToElementIdForTesting(scroll_layer->id())); root->AddChild(clip_layer); clip_layer->AddChild(scroll_layer); scroll_layer->AddChild(content_layer); client_.set_bounds(root->bounds()); scroll_layer->SetScrollOffset(gfx::ScrollOffset(50, 50)); } void BeginTest() override { PostSetNeedsCommitToMainThread(); } DrawResult PrepareToDrawOnThread(LayerTreeHostImpl* host_impl, LayerTreeHostImpl::FrameData* frame_data, DrawResult draw_result) override { EXPECT_EQ(3u, frame_data->render_passes.size()); viz::CompositorRenderPass* root_pass = frame_data->render_passes.back().get(); EXPECT_EQ(2u, root_pass->quad_list.size()); // There's a solid color quad under everything. EXPECT_EQ(viz::DrawQuad::Material::kSolidColor, root_pass->quad_list.back()->material); EXPECT_EQ(viz::DrawQuad::Material::kCompositorRenderPass, root_pass->quad_list.front()->material); const viz::CompositorRenderPassDrawQuad* render_pass_quad = viz::CompositorRenderPassDrawQuad::MaterialCast( root_pass->quad_list.front()); gfx::Rect rect_in_target_space = MathUtil::MapEnclosingClippedRect( render_pass_quad->shared_quad_state->quad_to_target_transform, render_pass_quad->rect); EXPECT_EQ(gfx::Rect(0, 0, 50, 50), rect_in_target_space); // We use kDstIn blend mode instead of the mask feature of RenderPass. EXPECT_EQ(gfx::RectF(), render_pass_quad->mask_uv_rect); viz::CompositorRenderPass* mask_pass = frame_data->render_passes[1].get(); EXPECT_EQ(SkBlendMode::kDstIn, mask_pass->quad_list.front()->shared_quad_state->blend_mode); EndTest(); return draw_result; } int mask_layer_id_; FakeContentLayerClient client_; }; SINGLE_AND_MULTI_THREAD_TEST_F( LayerTreeTestMaskLayerForSurfaceWithContentRectNotAtOrigin); class LayerTreeTestMaskLayerForSurfaceWithContentRectNotAtOriginWithLayerList : public LayerTreeTest { protected: LayerTreeTestMaskLayerForSurfaceWithContentRectNotAtOriginWithLayerList() { SetUseLayerLists(); } void SetupTree() override { // The masked layer has bounds 50x50, but it has a child that causes // the surface bounds to be larger. It also has a parent that clips the // masked layer and its surface. SetInitialRootBounds(gfx::Size(100, 100)); LayerTreeTest::SetupTree(); Layer* root = layer_tree_host()->root_layer(); gfx::Size layer_size(100, 100); SetupViewport(root, gfx::Size(50, 50), layer_size); auto* scroll = layer_tree_host()->OuterViewportScrollLayerForTesting(); SetScrollOffset(scroll, gfx::ScrollOffset(50, 50)); client_.set_bounds(root->bounds()); auto content_layer = FakePictureLayer::Create(&client_); content_layer->SetBounds(layer_size); CopyProperties(scroll, content_layer.get()); root->AddChild(content_layer); std::unique_ptr<RecordingSource> recording_source = FakeRecordingSource::CreateFilledRecordingSource(gfx::Size(100, 100)); PaintFlags paint1, paint2; static_cast<FakeRecordingSource*>(recording_source.get()) ->add_draw_rect_with_flags(gfx::Rect(0, 0, 100, 90), paint1); static_cast<FakeRecordingSource*>(recording_source.get()) ->add_draw_rect_with_flags(gfx::Rect(0, 90, 100, 10), paint2); client_.set_fill_with_nonsolid_color(true); static_cast<FakeRecordingSource*>(recording_source.get())->Rerecord(); auto mask_layer = FakePictureLayer::CreateWithRecordingSource( &client_, std::move(recording_source)); SetupMaskProperties(content_layer.get(), mask_layer.get()); root->AddChild(mask_layer); mask_layer_id_ = mask_layer->id(); } void BeginTest() override { PostSetNeedsCommitToMainThread(); } DrawResult PrepareToDrawOnThread(LayerTreeHostImpl* host_impl, LayerTreeHostImpl::FrameData* frame_data, DrawResult draw_result) override { EXPECT_EQ(1u, frame_data->render_passes.size()); viz::CompositorRenderPass* pass = frame_data->render_passes.back().get(); EXPECT_EQ(3u, pass->quad_list.size()); // There's a solid color quad under everything. EXPECT_EQ(viz::DrawQuad::Material::kSolidColor, pass->quad_list.back()->material); EXPECT_EQ(viz::DrawQuad::Material::kTiledContent, pass->quad_list.ElementAt(1)->material); auto* mask_quad = pass->quad_list.front(); EXPECT_EQ(viz::DrawQuad::Material::kTiledContent, mask_quad->material); gfx::Rect rect_in_target_space = MathUtil::MapEnclosingClippedRect( mask_quad->shared_quad_state->quad_to_target_transform, mask_quad->rect); EXPECT_EQ(gfx::Rect(0, 0, 50, 50), rect_in_target_space); // We use kDstIn blend mode for mask. EXPECT_EQ(SkBlendMode::kDstIn, mask_quad->shared_quad_state->blend_mode); EndTest(); return draw_result; } int mask_layer_id_; FakeContentLayerClient client_; }; SINGLE_AND_MULTI_THREAD_TEST_F( LayerTreeTestMaskLayerForSurfaceWithContentRectNotAtOriginWithLayerList); class LayerTreeTestMaskLayerForSurfaceWithClippedLayer : public LayerTreeTest { protected: void SetupTree() override { // The masked layer has bounds 50x50, but it has a child that causes // the surface bounds to be larger. It also has a parent that clips the // masked layer and its surface. scoped_refptr<Layer> root = Layer::Create(); scoped_refptr<Layer> clipping_layer = Layer::Create(); root->AddChild(clipping_layer); scoped_refptr<FakePictureLayer> content_layer = FakePictureLayer::Create(&client_); clipping_layer->AddChild(content_layer); scoped_refptr<FakePictureLayer> content_child_layer = FakePictureLayer::Create(&client_); content_layer->AddChild(content_child_layer); std::unique_ptr<RecordingSource> recording_source = FakeRecordingSource::CreateFilledRecordingSource(gfx::Size(50, 50)); PaintFlags paint1, paint2; static_cast<FakeRecordingSource*>(recording_source.get()) ->add_draw_rect_with_flags(gfx::Rect(0, 0, 50, 40), paint1); static_cast<FakeRecordingSource*>(recording_source.get()) ->add_draw_rect_with_flags(gfx::Rect(0, 40, 50, 10), paint2); client_.set_fill_with_nonsolid_color(true); static_cast<FakeRecordingSource*>(recording_source.get())->Rerecord(); scoped_refptr<FakePictureLayer> mask_layer = FakePictureLayer::CreateWithRecordingSource( &client_, std::move(recording_source)); content_layer->SetMaskLayer(mask_layer); gfx::Size root_size(100, 100); root->SetBounds(root_size); gfx::PointF clipping_origin(20.f, 10.f); gfx::Size clipping_size(10, 20); clipping_layer->SetBounds(clipping_size); clipping_layer->SetPosition(clipping_origin); clipping_layer->SetMasksToBounds(true); gfx::Size layer_size(50, 50); content_layer->SetBounds(layer_size); content_layer->SetPosition(gfx::PointF() - clipping_origin.OffsetFromOrigin()); gfx::Size child_size(50, 50); content_child_layer->SetBounds(child_size); content_child_layer->SetPosition(gfx::PointF(20.f, 0.f)); gfx::Size mask_size(50, 50); mask_layer->SetBounds(mask_size); mask_layer_id_ = mask_layer->id(); layer_tree_host()->SetRootLayer(root); LayerTreeTest::SetupTree(); client_.set_bounds(root->bounds()); } void BeginTest() override { PostSetNeedsCommitToMainThread(); } DrawResult PrepareToDrawOnThread(LayerTreeHostImpl* host_impl, LayerTreeHostImpl::FrameData* frame_data, DrawResult draw_result) override { EXPECT_EQ(3u, frame_data->render_passes.size()); viz::CompositorRenderPass* root_pass = frame_data->render_passes.back().get(); EXPECT_EQ(2u, root_pass->quad_list.size()); // There's a solid color quad under everything. EXPECT_EQ(viz::DrawQuad::Material::kSolidColor, root_pass->quad_list.back()->material); // The surface is clipped to 10x20. EXPECT_EQ(viz::DrawQuad::Material::kCompositorRenderPass, root_pass->quad_list.front()->material); const viz::CompositorRenderPassDrawQuad* render_pass_quad = viz::CompositorRenderPassDrawQuad::MaterialCast( root_pass->quad_list.front()); gfx::Rect rect_in_target_space = MathUtil::MapEnclosingClippedRect( render_pass_quad->shared_quad_state->quad_to_target_transform, render_pass_quad->rect); EXPECT_EQ(gfx::Rect(20, 10, 10, 20).ToString(), rect_in_target_space.ToString()); // We use kDstIn blend mode instead of the mask feature of RenderPass. EXPECT_EQ(gfx::RectF(), render_pass_quad->mask_uv_rect); viz::CompositorRenderPass* mask_pass = frame_data->render_passes[1].get(); EXPECT_EQ(SkBlendMode::kDstIn, mask_pass->quad_list.front()->shared_quad_state->blend_mode); EndTest(); return draw_result; } int mask_layer_id_; FakeContentLayerClient client_; }; SINGLE_AND_MULTI_THREAD_TEST_F( LayerTreeTestMaskLayerForSurfaceWithClippedLayer); class LayerTreeTestMaskLayerForSurfaceWithDifferentScale : public LayerTreeTest { protected: void SetupTree() override { // The masked layer has bounds 50x50, but it has a child that causes // the surface bounds to be larger. It also has a parent that clips the // masked layer and its surface. scoped_refptr<Layer> root = Layer::Create(); scoped_refptr<Layer> clipping_scaling_layer = Layer::Create(); root->AddChild(clipping_scaling_layer); scoped_refptr<FakePictureLayer> content_layer = FakePictureLayer::Create(&client_); clipping_scaling_layer->AddChild(content_layer); scoped_refptr<FakePictureLayer> content_child_layer = FakePictureLayer::Create(&client_); content_layer->AddChild(content_child_layer); std::unique_ptr<RecordingSource> recording_source = FakeRecordingSource::CreateFilledRecordingSource(gfx::Size(50, 50)); PaintFlags paint1, paint2; static_cast<FakeRecordingSource*>(recording_source.get()) ->add_draw_rect_with_flags(gfx::Rect(0, 0, 50, 40), paint1); static_cast<FakeRecordingSource*>(recording_source.get()) ->add_draw_rect_with_flags(gfx::Rect(0, 40, 50, 10), paint2); client_.set_fill_with_nonsolid_color(true); static_cast<FakeRecordingSource*>(recording_source.get())->Rerecord(); scoped_refptr<FakePictureLayer> mask_layer = FakePictureLayer::CreateWithRecordingSource( &client_, std::move(recording_source)); content_layer->SetMaskLayer(mask_layer); gfx::Size root_size(100, 100); root->SetBounds(root_size); gfx::Transform scale; scale.Scale(2, 2); gfx::PointF clipping_origin(20.f, 10.f); gfx::Size clipping_size(10, 20); clipping_scaling_layer->SetBounds(clipping_size); clipping_scaling_layer->SetPosition(clipping_origin); // This changes scale between contributing layer and render surface to 2. clipping_scaling_layer->SetTransform(scale); clipping_scaling_layer->SetMasksToBounds(true); gfx::Size layer_size(50, 50); content_layer->SetBounds(layer_size); content_layer->SetPosition(gfx::PointF() - clipping_origin.OffsetFromOrigin()); gfx::Size child_size(50, 50); content_child_layer->SetBounds(child_size); content_child_layer->SetPosition(gfx::PointF(20.f, 0.f)); gfx::Size mask_size(50, 50); mask_layer->SetBounds(mask_size); mask_layer_id_ = mask_layer->id(); layer_tree_host()->SetRootLayer(root); LayerTreeTest::SetupTree(); client_.set_bounds(root->bounds()); } void BeginTest() override { PostSetNeedsCommitToMainThread(); } DrawResult PrepareToDrawOnThread(LayerTreeHostImpl* host_impl, LayerTreeHostImpl::FrameData* frame_data, DrawResult draw_result) override { EXPECT_EQ(3u, frame_data->render_passes.size()); viz::CompositorRenderPass* root_pass = frame_data->render_passes.back().get(); EXPECT_EQ(2u, root_pass->quad_list.size()); // There's a solid color quad under everything. EXPECT_EQ(viz::DrawQuad::Material::kSolidColor, root_pass->quad_list.back()->material); // The surface is clipped to 10x20, and then scaled by 2, which ends up // being 20x40. EXPECT_EQ(viz::DrawQuad::Material::kCompositorRenderPass, root_pass->quad_list.front()->material); const viz::CompositorRenderPassDrawQuad* render_pass_quad = viz::CompositorRenderPassDrawQuad::MaterialCast( root_pass->quad_list.front()); gfx::Rect rect_in_target_space = MathUtil::MapEnclosingClippedRect( render_pass_quad->shared_quad_state->quad_to_target_transform, render_pass_quad->rect); EXPECT_EQ(gfx::Rect(20, 10, 20, 40).ToString(), rect_in_target_space.ToString()); gfx::Rect visible_rect_in_target_space = MathUtil::MapEnclosingClippedRect( render_pass_quad->shared_quad_state->quad_to_target_transform, render_pass_quad->visible_rect); EXPECT_EQ(gfx::Rect(20, 10, 20, 40).ToString(), visible_rect_in_target_space.ToString()); // We use kDstIn blend mode instead of the mask feature of RenderPass. EXPECT_EQ(gfx::RectF(), render_pass_quad->mask_uv_rect); viz::CompositorRenderPass* mask_pass = frame_data->render_passes[1].get(); EXPECT_EQ(SkBlendMode::kDstIn, mask_pass->quad_list.front()->shared_quad_state->blend_mode); EndTest(); return draw_result; } int mask_layer_id_; FakeContentLayerClient client_; }; SINGLE_AND_MULTI_THREAD_TEST_F( LayerTreeTestMaskLayerForSurfaceWithDifferentScale); class LayerTreeTestMaskLayerWithScaling : public LayerTreeTest { protected: void SetupTree() override { // Root // | // +-- Scaling Layer (adds a 2x scale) // | // +-- Content Layer // +--Mask scoped_refptr<Layer> root = Layer::Create(); scoped_refptr<Layer> scaling_layer = Layer::Create(); root->AddChild(scaling_layer); scoped_refptr<FakePictureLayer> content_layer = FakePictureLayer::Create(&client_); scaling_layer->AddChild(content_layer); std::unique_ptr<RecordingSource> recording_source = FakeRecordingSource::CreateFilledRecordingSource(gfx::Size(100, 100)); PaintFlags paint1, paint2; static_cast<FakeRecordingSource*>(recording_source.get()) ->add_draw_rect_with_flags(gfx::Rect(0, 0, 100, 10), paint1); static_cast<FakeRecordingSource*>(recording_source.get()) ->add_draw_rect_with_flags(gfx::Rect(0, 10, 100, 90), paint2); client_.set_fill_with_nonsolid_color(true); static_cast<FakeRecordingSource*>(recording_source.get())->Rerecord(); scoped_refptr<FakePictureLayer> mask_layer = FakePictureLayer::CreateWithRecordingSource( &client_, std::move(recording_source)); content_layer->SetMaskLayer(mask_layer); gfx::Size root_size(100, 100); root->SetBounds(root_size); gfx::Size scaling_layer_size(50, 50); scaling_layer->SetBounds(scaling_layer_size); gfx::Transform scale; scale.Scale(2.f, 2.f); scaling_layer->SetTransform(scale); content_layer->SetBounds(scaling_layer_size); mask_layer->SetBounds(scaling_layer_size); layer_tree_host()->SetRootLayer(root); LayerTreeTest::SetupTree(); client_.set_bounds(root->bounds()); } void BeginTest() override { PostSetNeedsCommitToMainThread(); } DrawResult PrepareToDrawOnThread(LayerTreeHostImpl* host_impl, LayerTreeHostImpl::FrameData* frame_data, DrawResult draw_result) override { EXPECT_EQ(3u, frame_data->render_passes.size()); viz::CompositorRenderPass* root_pass = frame_data->render_passes.back().get(); EXPECT_EQ(2u, root_pass->quad_list.size()); // There's a solid color quad under everything. EXPECT_EQ(viz::DrawQuad::Material::kSolidColor, root_pass->quad_list.back()->material); EXPECT_EQ(viz::DrawQuad::Material::kCompositorRenderPass, root_pass->quad_list.front()->material); const viz::CompositorRenderPassDrawQuad* render_pass_quad = viz::CompositorRenderPassDrawQuad::MaterialCast( root_pass->quad_list.front()); gfx::Rect rect_in_target_space = MathUtil::MapEnclosingClippedRect( render_pass_quad->shared_quad_state->quad_to_target_transform, render_pass_quad->rect); // We use kDstIn blend mode instead of the mask feature of RenderPass. EXPECT_EQ(gfx::RectF(), render_pass_quad->mask_uv_rect); viz::CompositorRenderPass* mask_pass = frame_data->render_passes[1].get(); EXPECT_EQ(SkBlendMode::kDstIn, mask_pass->quad_list.front()->shared_quad_state->blend_mode); switch (host_impl->active_tree()->source_frame_number()) { case 0: // Check that the tree scaling is correctly taken into account for the // mask, that should fully map onto the quad. EXPECT_EQ(gfx::Rect(0, 0, 100, 100).ToString(), rect_in_target_space.ToString()); break; case 1: // Applying a DSF should change the render surface size, but won't // affect which part of the mask is used. EXPECT_EQ(gfx::Rect(0, 0, 200, 200).ToString(), rect_in_target_space.ToString()); EndTest(); break; } return draw_result; } void DidCommit() override { switch (layer_tree_host()->SourceFrameNumber()) { case 1: gfx::Size double_root_size(200, 200); GenerateNewLocalSurfaceId(); layer_tree_host()->SetViewportRectAndScale( gfx::Rect(double_root_size), 2.f, GetCurrentLocalSurfaceId()); break; } } FakeContentLayerClient client_; }; SINGLE_AND_MULTI_THREAD_TEST_F(LayerTreeTestMaskLayerWithScaling); class LayerTreeTestMaskWithNonExactTextureSize : public LayerTreeTest { protected: void SetupTree() override { // The masked layer has bounds 100x100, but is allocated a 120x150 texture. scoped_refptr<Layer> root = Layer::Create(); scoped_refptr<FakePictureLayer> content_layer = FakePictureLayer::Create(&client_); root->AddChild(content_layer); std::unique_ptr<RecordingSource> recording_source = FakeRecordingSource::CreateFilledRecordingSource(gfx::Size(100, 100)); PaintFlags paint1, paint2; static_cast<FakeRecordingSource*>(recording_source.get()) ->add_draw_rect_with_flags(gfx::Rect(0, 0, 100, 90), paint1); static_cast<FakeRecordingSource*>(recording_source.get()) ->add_draw_rect_with_flags(gfx::Rect(0, 90, 100, 10), paint2); client_.set_fill_with_nonsolid_color(true); static_cast<FakeRecordingSource*>(recording_source.get())->Rerecord(); scoped_refptr<FakePictureLayer> mask_layer = FakePictureLayer::CreateWithRecordingSource( &client_, std::move(recording_source)); content_layer->SetMaskLayer(mask_layer); gfx::Size root_size(100, 100); root->SetBounds(root_size); gfx::Size layer_size(100, 100); content_layer->SetBounds(layer_size); gfx::Size mask_size(100, 100); gfx::Size mask_texture_size(120, 150); mask_layer->SetBounds(mask_size); mask_layer->set_fixed_tile_size(mask_texture_size); layer_tree_host()->SetRootLayer(root); LayerTreeTest::SetupTree(); client_.set_bounds(root->bounds()); } void BeginTest() override { PostSetNeedsCommitToMainThread(); } DrawResult PrepareToDrawOnThread(LayerTreeHostImpl* host_impl, LayerTreeHostImpl::FrameData* frame_data, DrawResult draw_result) override { EXPECT_EQ(3u, frame_data->render_passes.size()); viz::CompositorRenderPass* root_pass = frame_data->render_passes.back().get(); EXPECT_EQ(2u, root_pass->quad_list.size()); // There's a solid color quad under everything. EXPECT_EQ(viz::DrawQuad::Material::kSolidColor, root_pass->quad_list.back()->material); // The surface is 100x100 EXPECT_EQ(viz::DrawQuad::Material::kCompositorRenderPass, root_pass->quad_list.front()->material); const viz::CompositorRenderPassDrawQuad* render_pass_quad = viz::CompositorRenderPassDrawQuad::MaterialCast( root_pass->quad_list.front()); EXPECT_EQ(gfx::Rect(0, 0, 100, 100).ToString(), render_pass_quad->rect.ToString()); // We use kDstIn blend mode instead of the mask feature of RenderPass. EXPECT_EQ(gfx::RectF(), render_pass_quad->mask_uv_rect); viz::CompositorRenderPass* mask_pass = frame_data->render_passes[1].get(); EXPECT_EQ(SkBlendMode::kDstIn, mask_pass->quad_list.front()->shared_quad_state->blend_mode); EndTest(); return draw_result; } int mask_layer_id_; FakeContentLayerClient client_; }; SINGLE_AND_MULTI_THREAD_TEST_F(LayerTreeTestMaskWithNonExactTextureSize); } // namespace } // namespace cc
bsd-3-clause
guillaumesoul/zf2-tutorial
module/Album/config/module.config.php
1172
<?php return array( 'controllers' => array( 'invokables' => array( 'Album\Controller\Album' => 'Album\Controller\AlbumController', ), ), 'router' => array( 'routes' => array( 'album' => array( 'type' => 'segment', 'options' => array( 'route' => '/album[/][:action][/:id]', 'constraints' => array( 'action' => '[a-zA-Z][a-zA-Z0-9_-]*', 'id' => '[0-9]+', ), 'defaults' => array( 'controller' => 'Album\Controller\Album', 'action' => 'index', ), ), ), ), ), 'view_manager' => array( 'template_path_stack' => array( 'album' => __DIR__ . '/../view', ), ), 'home' => array( 'type' => 'Zend\Mvc\Router\Http\Literal', 'options' => array( 'route' => '/', 'defaults' => array( 'controller' => 'Application\Controller\Album', 'action' => 'index', ), ), ), );
bsd-3-clause
mdn/webalyzer
webalyzer/collected/migrations/0001_initial.py
914
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations import sorl.thumbnail.fields class Migration(migrations.Migration): dependencies = [ ('collector', '0001_initial'), ] operations = [ migrations.CreateModel( name='Screenshot', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('file', sorl.thumbnail.fields.ImageField(upload_to=b'screenshots')), ('width', models.PositiveIntegerField()), ('height', models.PositiveIntegerField()), ('engine', models.CharField(default=b'slimerjs', max_length=100)), ('added', models.DateTimeField(auto_now_add=True)), ('page', models.ForeignKey(to='collector.Page')), ], ), ]
bsd-3-clause
manpreetabhaypal/psdm
backend/models/Content.php
1345
<?php namespace app\models; use Yii; /** * This is the model class for table "content". * * @property integer $id * @property string $title * @property string $description * @property string $identifier * @property string $category * @property string $status * @property string $created_date * @property string $modified_date */ class Content extends \yii\db\ActiveRecord { /** * @inheritdoc */ public static function tableName() { return 'content'; } /** * @inheritdoc */ public function rules() { return [ [['title', 'description', 'identifier', 'category', 'status'], 'required'], [['description', 'status'], 'string'], [['created_date', 'modified_date'], 'safe'], [['title'], 'string', 'max' => 255], [['identifier', 'category'], 'string', 'max' => 50], ]; } /** * @inheritdoc */ public function attributeLabels() { return [ 'id' => 'ID', 'title' => 'Title', 'description' => 'Description', 'identifier' => 'Identifier', 'category' => 'Category', 'status' => 'Status', 'created_date' => 'Created Date', 'modified_date' => 'Modified Date', ]; } }
bsd-3-clause
thoughtbot/gitsh
lib/gitsh/tab_completion/matchers/unknown_option_matcher.rb
298
require 'gitsh/tab_completion/matchers/base_matcher' module Gitsh module TabCompletion module Matchers class UnknownOptionMatcher < BaseMatcher def match?(word) word =~ /\A--?[^-]/ end def name 'opt' end end end end end
bsd-3-clause
leapmotion/websocketpp
examples/wsperf/generic.cpp
5175
/* * Copyright (c) 2011, Peter Thorson. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the WebSocket++ Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL PETER THORSON BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * */ #include "generic.hpp" using wsperf::message_test; // Construct a message_test from a wscmd command /* Reads values from the wscmd object into member variables. The cmd object is * passed to the parent constructor for extracting values common to all test * cases. * * Any of the constructors may throw a `case_exception` if required parameters * are not found or default values don't make sense. * * Values that message_test checks for: * * size=[interger]; * Example: size=4096; * Size of messages to send in bytes. Valid values 0 - max size_t * * count=[integer]; * Example: count=1000; * Number of test messages to send. Valid values 0-2^64 * * timeout=[integer]; * Example: timeout=10000; * How long to wait (in ms) for a response before failing the test. * * binary=[bool]; * Example: binary=true; * Whether or not to use binary websocket frames. (true=binary, false=utf8) * * sync=[bool]; * Example: sync=true; * Syncronize messages. When sync is on wsperf will wait for a response before * sending the next message. When sync is off, messages will be sent as quickly * as possible. * * correctness=[string]; * Example: correctness=exact; * Example: correctness=length; * How to evaluate the correctness of responses. Exact checks each response for * exact correctness. Length checks only that the response has the correct * length. Length mode is faster but won't catch invalid implimentations. Length * mode can be used to test situations where you deliberately return incorrect * bytes in order to compare performance (ex: performance with/without masking) */ message_test::message_test(wscmd::cmd& cmd) : case_handler(cmd), m_message_size(extract_number<uint64_t>(cmd,"size")), m_message_count(extract_number<uint64_t>(cmd,"count")), m_timeout(extract_number<uint64_t>(cmd,"timeout")), m_binary(extract_bool(cmd,"binary")), m_sync(extract_bool(cmd,"sync")), m_acks(0) { if (cmd.args["correctness"] == "exact") { m_mode = EXACT; } else if (cmd.args["correctness"] == "length") { m_mode = LENGTH; } else { throw case_exception("Invalid correctness parameter."); } } void message_test::on_open(connection_ptr con) { con->alog()->at(websocketpp::log::alevel::DEVEL) << "message_test::on_open" << websocketpp::log::endl; m_msg = con->get_data_message(); m_data.reserve(static_cast<size_t>(m_message_size)); if (!m_binary) { fill_utf8(m_data,static_cast<size_t>(m_message_size),true); m_msg->reset(websocketpp::frame::opcode::TEXT); } else { fill_binary(m_data,static_cast<size_t>(m_message_size),true); m_msg->reset(websocketpp::frame::opcode::BINARY); } m_msg->set_payload(m_data); start(con,m_timeout); if (m_sync) { con->send(m_msg); } else { for (uint64_t i = 0; i < m_message_count; i++) { con->send(m_msg); } } } void message_test::on_message(connection_ptr con,websocketpp::message::data_ptr msg) { if ((m_mode == LENGTH && msg->get_payload().size() == m_data.size()) || (m_mode == EXACT && msg->get_payload() == m_data)) { m_acks++; m_bytes += m_message_size; mark(); } else { mark(); m_msg.reset(); m_pass = FAIL; this->end(con); } if (m_acks == m_message_count) { m_pass = PASS; m_msg.reset(); this->end(con); } else if (m_sync && m_pass == RUNNING) { con->send(m_msg); } }
bsd-3-clause
endlessm/chromium-browser
content/browser/android/selection/selection_popup_controller.cc
8653
// Copyright 2017 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "content/browser/android/selection/selection_popup_controller.h" #include "base/android/jni_android.h" #include "base/android/jni_string.h" #include "base/android/scoped_java_ref.h" #include "content/browser/android/selection/composited_touch_handle_drawable.h" #include "content/browser/renderer_host/render_widget_host_view_android.h" #include "content/browser/web_contents/web_contents_impl.h" #include "content/browser/web_contents/web_contents_view_android.h" #include "content/public/android/content_jni_headers/SelectionPopupControllerImpl_jni.h" #include "content/public/browser/context_menu_params.h" #include "third_party/blink/public/common/context_menu_data/edit_flags.h" #include "third_party/blink/public/common/context_menu_data/input_field_type.h" #include "ui/gfx/geometry/point_conversions.h" using base::android::AttachCurrentThread; using base::android::ConvertUTF16ToJavaString; using base::android::ConvertUTF8ToJavaString; using base::android::JavaParamRef; using base::android::ScopedJavaLocalRef; namespace content { jlong JNI_SelectionPopupControllerImpl_Init( JNIEnv* env, const JavaParamRef<jobject>& obj, const JavaParamRef<jobject>& jweb_contents) { WebContents* web_contents = WebContents::FromJavaWebContents(jweb_contents); DCHECK(web_contents); // Owns itself and gets destroyed when |WebContentsDestroyed| is called. auto* controller = new SelectionPopupController(env, obj, web_contents); controller->Initialize(); return reinterpret_cast<intptr_t>(controller); } SelectionPopupController::SelectionPopupController( JNIEnv* env, const JavaParamRef<jobject>& obj, WebContents* web_contents) : RenderWidgetHostConnector(web_contents) { java_obj_ = JavaObjectWeakGlobalRef(env, obj); auto* wcva = static_cast<WebContentsViewAndroid*>( static_cast<WebContentsImpl*>(web_contents)->GetView()); wcva->set_selection_popup_controller(this); } SelectionPopupController::~SelectionPopupController() { JNIEnv* env = AttachCurrentThread(); ScopedJavaLocalRef<jobject> obj = java_obj_.get(env); if (!obj.is_null()) { Java_SelectionPopupControllerImpl_nativeSelectionPopupControllerDestroyed( env, obj); } } ScopedJavaLocalRef<jobject> SelectionPopupController::GetContext() const { JNIEnv* env = AttachCurrentThread(); ScopedJavaLocalRef<jobject> obj = java_obj_.get(env); if (obj.is_null()) return nullptr; return Java_SelectionPopupControllerImpl_getContext(env, obj); } void SelectionPopupController::SetTextHandlesTemporarilyHidden( JNIEnv* env, const JavaParamRef<jobject>& obj, jboolean hidden) { if (rwhva_) rwhva_->SetTextHandlesTemporarilyHidden(hidden); } std::unique_ptr<ui::TouchHandleDrawable> SelectionPopupController::CreateTouchHandleDrawable() { ScopedJavaLocalRef<jobject> activityContext = GetContext(); // If activityContext is null then Application context is used instead on // the java side in CompositedTouchHandleDrawable. auto* view = web_contents()->GetNativeView(); return std::unique_ptr<ui::TouchHandleDrawable>( new CompositedTouchHandleDrawable(view, activityContext)); } void SelectionPopupController::MoveRangeSelectionExtent( const gfx::PointF& extent) { auto* web_contents_impl = static_cast<WebContentsImpl*>(web_contents()); if (!web_contents_impl) return; web_contents_impl->MoveRangeSelectionExtent(gfx::ToRoundedPoint(extent)); } void SelectionPopupController::SelectBetweenCoordinates( const gfx::PointF& base, const gfx::PointF& extent) { auto* web_contents_impl = static_cast<WebContentsImpl*>(web_contents()); if (!web_contents_impl) return; gfx::Point base_point = gfx::ToRoundedPoint(base); gfx::Point extent_point = gfx::ToRoundedPoint(extent); if (base_point == extent_point) return; web_contents_impl->SelectRange(base_point, extent_point); } void SelectionPopupController::UpdateRenderProcessConnection( RenderWidgetHostViewAndroid* old_rwhva, RenderWidgetHostViewAndroid* new_rwhva) { if (old_rwhva) old_rwhva->set_selection_popup_controller(nullptr); if (new_rwhva) new_rwhva->set_selection_popup_controller(this); rwhva_ = new_rwhva; } void SelectionPopupController::OnSelectionEvent( ui::SelectionEventType event, const gfx::RectF& selection_rect) { JNIEnv* env = AttachCurrentThread(); ScopedJavaLocalRef<jobject> obj = java_obj_.get(env); if (obj.is_null()) return; Java_SelectionPopupControllerImpl_onSelectionEvent( env, obj, event, selection_rect.x(), selection_rect.y(), selection_rect.right(), selection_rect.bottom()); } void SelectionPopupController::OnDragUpdate(const gfx::PointF& position) { JNIEnv* env = AttachCurrentThread(); ScopedJavaLocalRef<jobject> obj = java_obj_.get(env); if (obj.is_null()) return; Java_SelectionPopupControllerImpl_onDragUpdate(env, obj, position.x(), position.y()); } void SelectionPopupController::OnSelectionChanged(const std::string& text) { JNIEnv* env = AttachCurrentThread(); ScopedJavaLocalRef<jobject> obj = java_obj_.get(env); if (obj.is_null()) return; ScopedJavaLocalRef<jstring> jtext = ConvertUTF8ToJavaString(env, text); Java_SelectionPopupControllerImpl_onSelectionChanged(env, obj, jtext); } bool SelectionPopupController::ShowSelectionMenu( const ContextMenuParams& params, int handle_height) { JNIEnv* env = AttachCurrentThread(); ScopedJavaLocalRef<jobject> obj = java_obj_.get(env); if (obj.is_null()) return false; // Display paste pop-up only when selection is empty and editable. const bool from_touch = params.source_type == ui::MENU_SOURCE_TOUCH || params.source_type == ui::MENU_SOURCE_LONG_PRESS || params.source_type == ui::MENU_SOURCE_TOUCH_HANDLE || params.source_type == ui::MENU_SOURCE_STYLUS; const bool from_selection_adjustment = params.source_type == ui::MENU_SOURCE_ADJUST_SELECTION || params.source_type == ui::MENU_SOURCE_ADJUST_SELECTION_RESET; // If source_type is not in the list then return. if (!from_touch && !from_selection_adjustment) return false; // Don't show paste pop-up for non-editable textarea. if (!params.is_editable && params.selection_text.empty()) return false; const bool can_select_all = !!(params.edit_flags & blink::ContextMenuDataEditFlags::kCanSelectAll); const bool can_edit_richly = !!(params.edit_flags & blink::ContextMenuDataEditFlags::kCanEditRichly); const bool is_password_type = params.input_field_type == blink::ContextMenuDataInputFieldType::kPassword; const ScopedJavaLocalRef<jstring> jselected_text = ConvertUTF16ToJavaString(env, params.selection_text); const bool should_suggest = params.source_type == ui::MENU_SOURCE_TOUCH || params.source_type == ui::MENU_SOURCE_LONG_PRESS; Java_SelectionPopupControllerImpl_showSelectionMenu( env, obj, params.selection_rect.x(), params.selection_rect.y(), params.selection_rect.right(), params.selection_rect.bottom(), handle_height, params.is_editable, is_password_type, jselected_text, params.selection_start_offset, can_select_all, can_edit_richly, should_suggest, params.source_type); return true; } void SelectionPopupController::OnSelectWordAroundCaretAck(bool did_select, int start_adjust, int end_adjust) { JNIEnv* env = AttachCurrentThread(); ScopedJavaLocalRef<jobject> obj = java_obj_.get(env); if (obj.is_null()) return; Java_SelectionPopupControllerImpl_onSelectWordAroundCaretAck( env, obj, did_select, start_adjust, end_adjust); } void SelectionPopupController::HidePopupsAndPreserveSelection() { JNIEnv* env = AttachCurrentThread(); ScopedJavaLocalRef<jobject> obj = java_obj_.get(env); if (obj.is_null()) return; Java_SelectionPopupControllerImpl_hidePopupsAndPreserveSelection(env, obj); } void SelectionPopupController::RestoreSelectionPopupsIfNecessary() { JNIEnv* env = AttachCurrentThread(); ScopedJavaLocalRef<jobject> obj = java_obj_.get(env); if (obj.is_null()) return; Java_SelectionPopupControllerImpl_restoreSelectionPopupsIfNecessary(env, obj); } } // namespace content
bsd-3-clause
nschloe/seacas
docs/ioss_html/structIoxf_1_1SideSet.js
931
var structIoxf_1_1SideSet = [ [ "SideSet", "structIoxf_1_1SideSet.html#a94a0cbb8dae2728b81a4b3440b6b2ca3", null ], [ "SideSet", "structIoxf_1_1SideSet.html#adda252ddff67460d2bd311a3950db0d2", null ], [ "operator!=", "structIoxf_1_1SideSet.html#ac4b6bb9f12a64189527316ba870f1c8d", null ], [ "operator==", "structIoxf_1_1SideSet.html#aa45f984be92ab8d1b212c7fa55f19e84", null ], [ "dfCount", "structIoxf_1_1SideSet.html#a521e7daa8f686868db41d47194bbc49e", null ], [ "elemCount", "structIoxf_1_1SideSet.html#a5ce922cb743097a48210bb939be3911c", null ], [ "id", "structIoxf_1_1SideSet.html#a92f4422a60a76cf99ffe513c604dbcc0", null ], [ "name", "structIoxf_1_1SideSet.html#ab5261bc99c80ebbdfc32deecb11a300e", null ], [ "nodesPerSideSet", "structIoxf_1_1SideSet.html#a3451856a85888a128ffdcabf6fd2b933", null ], [ "sideCount", "structIoxf_1_1SideSet.html#ad878e83c136f8c3cd485befda4534e48", null ] ];
bsd-3-clause
ychab/mymoney-client
test/e2e/specs/user/logout.spec.js
390
var Authentication = require('../../helpers/authentication.js'); describe('Logout', function() { var auth; beforeEach(function() { auth = new Authentication(); auth.logout(); }); it('redirect user on login page after being logout', function() { auth.login(); auth.logout(); var url = browser.getLocationAbsUrl(); expect(url).toMatch(/login$/); }); });
bsd-3-clause
infoscout/SurveyGizmo
surveygizmo/api/surveyoption.py
1555
from surveygizmo.api import base class SurveyOption(base.Resource): resource_fmt_str = 'survey/%(survey_id)s/surveyquestion/%(question_id)s/surveyoption/%(option_id)s' resource_id_keys = ['survey_id', 'quesiton_id'] def list(self, survey_id, question_id, **kwargs): kwargs.update({ 'survey_id': survey_id, 'question_id': question_id, }) return super(SurveyOption, self).list(**kwargs) def get(self, survey_id, question_id, option_id, **kwargs): kwargs.update({ 'survey_id': survey_id, 'question_id': question_id, 'option_id': option_id, }) return super(SurveyOption, self).get(**kwargs) def create(self, survey_id, question_id, **kwargs): kwargs.update({ 'survey_id': survey_id, 'question_id': question_id, }) return super(SurveyOption, self).create(**kwargs) def update(self, survey_id, question_id, option_id, **kwargs): kwargs.update({ 'survey_id': survey_id, 'question_id': question_id, 'option_id': option_id, }) return super(SurveyOption, self).update(**kwargs) def copy(self, **kwargs): raise NotImplementedError() def delete(self, survey_id, question_id, option_id, **kwargs): kwargs.update({ 'survey_id': survey_id, 'question_id': question_id, 'option_id': option_id, }) return super(SurveyOption, self).delete(**kwargs)
bsd-3-clause
rfyiamcool/fileporter
fileporter/server.py
2223
#!/usr/bin/env python # -*- coding: utf-8 -*- import os.path import os,sys import logging import tornado.httpserver import tornado.ioloop import tornado.options import tornado.web import tornado.template as template from tornado.options import define, options #define("port", default=8888,help="run on the given port", type=int) define("address", default='0.0.0.0',help="run on the given port", type=int) logging.basicConfig(level=logging.INFO) class Application(tornado.web.Application): def __init__(self): handlers = [ (r"/", IndexHandler), (r"/([A-Za-z0-9\_\.\-]+)", UploadHandler), (r"/undefined", ErrorHandler), ] settings = dict( static_path=os.path.join(os.path.dirname(__file__), "static"), ) tornado.web.Application.__init__(self, handlers, **settings) class IndexHandler(tornado.web.RequestHandler): @tornado.web.asynchronous def get(self): items = [] for filename in os.listdir("."): items.append(filename) self.render('static/index.html', items=items) def post(self): file_content = self.request.files['file'][0]['body'] file_name = self.request.files['file'][0]['filename'] x = open("upload/" + file_name, 'w') x.write(file_content) x.close() self.redirect("/") class UploadHandler(tornado.web.RequestHandler): @tornado.web.asynchronous def get(self, filename): x = open(os.getcwd()+'/' + filename) self.set_header('Content-Type', 'text/csv') self.set_header('Content-Disposition', 'attachment; filename=' + filename) self.finish(x.read()) class ErrorHandler(tornado.web.RequestHandler): @tornado.web.asynchronous def get(self): self.redirect('/') def main(): if sys.argv[1:]: port = int(sys.argv[1]) else: port = 8000 tornado.options.parse_command_line() http_server = tornado.httpserver.HTTPServer(Application(), xheaders=True) logging.info('Serving HTTP on 0.0.0.0 port %d ...' % port) http_server.listen(port,options.address) tornado.ioloop.IOLoop.instance().start() if __name__ == "__main__": main()
bsd-3-clause
kitech/gearmand
libgearman/aggregator.cc
2003
/* vim:expandtab:shiftwidth=2:tabstop=2:smarttab: * * Gearmand client and server library. * * Copyright (C) 2011 Data Differential, http://datadifferential.com/ * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above * copyright notice, this list of conditions and the following disclaimer * in the documentation and/or other materials provided with the * distribution. * * * The names of its contributors may not be used to endorse or * promote products derived from this software without specific prior * written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * */ #include <config.h> #include <libgearman/common.h> #include "libgearman/assert.hpp" #include <cstdlib> #include <limits> #include <memory> #include <libgearman/aggregator.hpp> void *gearman_aggegator_context(gearman_aggregator_st *self) { if (not self) return NULL; return self->context; }
bsd-3-clause
jrobhoward/SCADAbase
contrib/llvm/tools/lldb/source/Commands/CommandObjectSettings.cpp
48169
//===-- CommandObjectSettings.cpp -------------------------------*- C++ -*-===// // // The LLVM Compiler Infrastructure // // This file is distributed under the University of Illinois Open Source // License. See LICENSE.TXT for details. // //===----------------------------------------------------------------------===// #include "lldb/lldb-python.h" #include "CommandObjectSettings.h" // C Includes // C++ Includes // Other libraries and framework includes // Project includes #include "lldb/Interpreter/CommandInterpreter.h" #include "lldb/Interpreter/CommandReturnObject.h" #include "lldb/Interpreter/CommandCompletions.h" using namespace lldb; using namespace lldb_private; #include "llvm/ADT/StringRef.h" //------------------------------------------------------------------------- // CommandObjectSettingsSet //------------------------------------------------------------------------- class CommandObjectSettingsSet : public CommandObjectRaw { public: CommandObjectSettingsSet (CommandInterpreter &interpreter) : CommandObjectRaw (interpreter, "settings set", "Set or change the value of a single debugger setting variable.", NULL), m_options (interpreter) { CommandArgumentEntry arg1; CommandArgumentEntry arg2; CommandArgumentData var_name_arg; CommandArgumentData value_arg; // Define the first (and only) variant of this arg. var_name_arg.arg_type = eArgTypeSettingVariableName; var_name_arg.arg_repetition = eArgRepeatPlain; // There is only one variant this argument could be; put it into the argument entry. arg1.push_back (var_name_arg); // Define the first (and only) variant of this arg. value_arg.arg_type = eArgTypeValue; value_arg.arg_repetition = eArgRepeatPlain; // There is only one variant this argument could be; put it into the argument entry. arg2.push_back (value_arg); // Push the data for the first argument into the m_arguments vector. m_arguments.push_back (arg1); m_arguments.push_back (arg2); SetHelpLong ( "When setting a dictionary or array variable, you can set multiple entries \n\ at once by giving the values to the set command. For example: \n\ \n\ (lldb) settings set target.run-args value1 value2 value3 \n\ (lldb) settings set target.env-vars MYPATH=~/.:/usr/bin SOME_ENV_VAR=12345 \n\ \n\ (lldb) settings show target.run-args \n\ [0]: 'value1' \n\ [1]: 'value2' \n\ [3]: 'value3' \n\ (lldb) settings show target.env-vars \n\ 'MYPATH=~/.:/usr/bin'\n\ 'SOME_ENV_VAR=12345' \n\ \n\ Warning: The 'set' command re-sets the entire array or dictionary. If you \n\ just want to add, remove or update individual values (or add something to \n\ the end), use one of the other settings sub-commands: append, replace, \n\ insert-before or insert-after.\n"); } virtual ~CommandObjectSettingsSet () {} // Overrides base class's behavior where WantsCompletion = !WantsRawCommandString. virtual bool WantsCompletion() { return true; } virtual Options * GetOptions () { return &m_options; } class CommandOptions : public Options { public: CommandOptions (CommandInterpreter &interpreter) : Options (interpreter), m_global (false) { } virtual ~CommandOptions () {} virtual Error SetOptionValue (uint32_t option_idx, const char *option_arg) { Error error; const int short_option = m_getopt_table[option_idx].val; switch (short_option) { case 'g': m_global = true; break; default: error.SetErrorStringWithFormat ("unrecognized options '%c'", short_option); break; } return error; } void OptionParsingStarting () { m_global = false; } const OptionDefinition* GetDefinitions () { return g_option_table; } // Options table: Required for subclasses of Options. static OptionDefinition g_option_table[]; // Instance variables to hold the values for command options. bool m_global; }; virtual int HandleArgumentCompletion (Args &input, int &cursor_index, int &cursor_char_position, OptionElementVector &opt_element_vector, int match_start_point, int max_return_elements, bool &word_complete, StringList &matches) { std::string completion_str (input.GetArgumentAtIndex (cursor_index), cursor_char_position); const size_t argc = input.GetArgumentCount(); const char *arg = NULL; int setting_var_idx; for (setting_var_idx = 1; setting_var_idx < argc; ++setting_var_idx) { arg = input.GetArgumentAtIndex(setting_var_idx); if (arg && arg[0] != '-') break; // We found our setting variable name index } if (cursor_index == setting_var_idx) { // Attempting to complete setting variable name CommandCompletions::InvokeCommonCompletionCallbacks (m_interpreter, CommandCompletions::eSettingsNameCompletion, completion_str.c_str(), match_start_point, max_return_elements, NULL, word_complete, matches); } else { arg = input.GetArgumentAtIndex(cursor_index); if (arg) { if (arg[0] == '-') { // Complete option name } else { // Complete setting value const char *setting_var_name = input.GetArgumentAtIndex(setting_var_idx); Error error; lldb::OptionValueSP value_sp (m_interpreter.GetDebugger().GetPropertyValue(&m_exe_ctx, setting_var_name, false, error)); if (value_sp) { value_sp->AutoComplete (m_interpreter, completion_str.c_str(), match_start_point, max_return_elements, word_complete, matches); } } } } return matches.GetSize(); } protected: virtual bool DoExecute (const char *command, CommandReturnObject &result) { Args cmd_args(command); // Process possible options. if (!ParseOptions (cmd_args, result)) return false; const size_t argc = cmd_args.GetArgumentCount (); if ((argc < 2) && (!m_options.m_global)) { result.AppendError ("'settings set' takes more arguments"); result.SetStatus (eReturnStatusFailed); return false; } const char *var_name = cmd_args.GetArgumentAtIndex (0); if ((var_name == NULL) || (var_name[0] == '\0')) { result.AppendError ("'settings set' command requires a valid variable name"); result.SetStatus (eReturnStatusFailed); return false; } // Split the raw command into var_name and value pair. llvm::StringRef raw_str(command); std::string var_value_string = raw_str.split(var_name).second.str(); const char *var_value_cstr = Args::StripSpaces(var_value_string, true, true, false); Error error; if (m_options.m_global) { error = m_interpreter.GetDebugger().SetPropertyValue (NULL, eVarSetOperationAssign, var_name, var_value_cstr); } if (error.Success()) { // FIXME this is the same issue as the one in commands script import // we could be setting target.load-script-from-symbol-file which would cause // Python scripts to be loaded, which could run LLDB commands // (e.g. settings set target.process.python-os-plugin-path) and cause a crash // if we did not clear the command's exe_ctx first ExecutionContext exe_ctx(m_exe_ctx); m_exe_ctx.Clear(); error = m_interpreter.GetDebugger().SetPropertyValue (&exe_ctx, eVarSetOperationAssign, var_name, var_value_cstr); } if (error.Fail()) { result.AppendError (error.AsCString()); result.SetStatus (eReturnStatusFailed); return false; } else { result.SetStatus (eReturnStatusSuccessFinishResult); } return result.Succeeded(); } private: CommandOptions m_options; }; OptionDefinition CommandObjectSettingsSet::CommandOptions::g_option_table[] = { { LLDB_OPT_SET_2, false, "global", 'g', OptionParser::eNoArgument, NULL, 0, eArgTypeNone, "Apply the new value to the global default value." }, { 0, false, NULL, 0, 0, NULL, 0, eArgTypeNone, NULL } }; //------------------------------------------------------------------------- // CommandObjectSettingsShow -- Show current values //------------------------------------------------------------------------- class CommandObjectSettingsShow : public CommandObjectParsed { public: CommandObjectSettingsShow (CommandInterpreter &interpreter) : CommandObjectParsed (interpreter, "settings show", "Show the specified internal debugger setting variable and its value, or show all the currently set variables and their values, if nothing is specified.", NULL) { CommandArgumentEntry arg1; CommandArgumentData var_name_arg; // Define the first (and only) variant of this arg. var_name_arg.arg_type = eArgTypeSettingVariableName; var_name_arg.arg_repetition = eArgRepeatOptional; // There is only one variant this argument could be; put it into the argument entry. arg1.push_back (var_name_arg); // Push the data for the first argument into the m_arguments vector. m_arguments.push_back (arg1); } virtual ~CommandObjectSettingsShow () {} virtual int HandleArgumentCompletion (Args &input, int &cursor_index, int &cursor_char_position, OptionElementVector &opt_element_vector, int match_start_point, int max_return_elements, bool &word_complete, StringList &matches) { std::string completion_str (input.GetArgumentAtIndex (cursor_index), cursor_char_position); CommandCompletions::InvokeCommonCompletionCallbacks (m_interpreter, CommandCompletions::eSettingsNameCompletion, completion_str.c_str(), match_start_point, max_return_elements, NULL, word_complete, matches); return matches.GetSize(); } protected: virtual bool DoExecute (Args& args, CommandReturnObject &result) { result.SetStatus (eReturnStatusSuccessFinishResult); const size_t argc = args.GetArgumentCount (); if (argc > 0) { for (size_t i=0; i<argc; ++i) { const char *property_path = args.GetArgumentAtIndex (i); Error error(m_interpreter.GetDebugger().DumpPropertyValue (&m_exe_ctx, result.GetOutputStream(), property_path, OptionValue::eDumpGroupValue)); if (error.Success()) { result.GetOutputStream().EOL(); } else { result.AppendError (error.AsCString()); result.SetStatus (eReturnStatusFailed); } } } else { m_interpreter.GetDebugger().DumpAllPropertyValues (&m_exe_ctx, result.GetOutputStream(), OptionValue::eDumpGroupValue); } return result.Succeeded(); } }; //------------------------------------------------------------------------- // CommandObjectSettingsList -- List settable variables //------------------------------------------------------------------------- class CommandObjectSettingsList : public CommandObjectParsed { public: CommandObjectSettingsList (CommandInterpreter &interpreter) : CommandObjectParsed (interpreter, "settings list", "List and describe all the internal debugger settings variables that are available to the user to 'set' or 'show', or describe a particular variable or set of variables (by specifying the variable name or a common prefix).", NULL) { CommandArgumentEntry arg; CommandArgumentData var_name_arg; CommandArgumentData prefix_name_arg; // Define the first variant of this arg. var_name_arg.arg_type = eArgTypeSettingVariableName; var_name_arg.arg_repetition = eArgRepeatOptional; // Define the second variant of this arg. prefix_name_arg.arg_type = eArgTypeSettingPrefix; prefix_name_arg.arg_repetition = eArgRepeatOptional; arg.push_back (var_name_arg); arg.push_back (prefix_name_arg); // Push the data for the first argument into the m_arguments vector. m_arguments.push_back (arg); } virtual ~CommandObjectSettingsList () {} virtual int HandleArgumentCompletion (Args &input, int &cursor_index, int &cursor_char_position, OptionElementVector &opt_element_vector, int match_start_point, int max_return_elements, bool &word_complete, StringList &matches) { std::string completion_str (input.GetArgumentAtIndex (cursor_index), cursor_char_position); CommandCompletions::InvokeCommonCompletionCallbacks (m_interpreter, CommandCompletions::eSettingsNameCompletion, completion_str.c_str(), match_start_point, max_return_elements, NULL, word_complete, matches); return matches.GetSize(); } protected: virtual bool DoExecute (Args& args, CommandReturnObject &result) { result.SetStatus (eReturnStatusSuccessFinishResult); const bool will_modify = false; const size_t argc = args.GetArgumentCount (); if (argc > 0) { const bool dump_qualified_name = true; for (size_t i=0; i<argc; ++i) { const char *property_path = args.GetArgumentAtIndex (i); const Property *property = m_interpreter.GetDebugger().GetValueProperties()->GetPropertyAtPath (&m_exe_ctx, will_modify, property_path); if (property) { property->DumpDescription (m_interpreter, result.GetOutputStream(), 0, dump_qualified_name); } else { result.AppendErrorWithFormat ("invalid property path '%s'", property_path); result.SetStatus (eReturnStatusFailed); } } } else { m_interpreter.GetDebugger().DumpAllDescriptions (m_interpreter, result.GetOutputStream()); } return result.Succeeded(); } }; //------------------------------------------------------------------------- // CommandObjectSettingsRemove //------------------------------------------------------------------------- class CommandObjectSettingsRemove : public CommandObjectRaw { public: CommandObjectSettingsRemove (CommandInterpreter &interpreter) : CommandObjectRaw (interpreter, "settings remove", "Remove the specified element from an array or dictionary settings variable.", NULL) { CommandArgumentEntry arg1; CommandArgumentEntry arg2; CommandArgumentData var_name_arg; CommandArgumentData index_arg; CommandArgumentData key_arg; // Define the first (and only) variant of this arg. var_name_arg.arg_type = eArgTypeSettingVariableName; var_name_arg.arg_repetition = eArgRepeatPlain; // There is only one variant this argument could be; put it into the argument entry. arg1.push_back (var_name_arg); // Define the first variant of this arg. index_arg.arg_type = eArgTypeSettingIndex; index_arg.arg_repetition = eArgRepeatPlain; // Define the second variant of this arg. key_arg.arg_type = eArgTypeSettingKey; key_arg.arg_repetition = eArgRepeatPlain; // Push both variants into this arg arg2.push_back (index_arg); arg2.push_back (key_arg); // Push the data for the first argument into the m_arguments vector. m_arguments.push_back (arg1); m_arguments.push_back (arg2); } virtual ~CommandObjectSettingsRemove () {} virtual int HandleArgumentCompletion (Args &input, int &cursor_index, int &cursor_char_position, OptionElementVector &opt_element_vector, int match_start_point, int max_return_elements, bool &word_complete, StringList &matches) { std::string completion_str (input.GetArgumentAtIndex (cursor_index), cursor_char_position); // Attempting to complete variable name if (cursor_index < 2) CommandCompletions::InvokeCommonCompletionCallbacks (m_interpreter, CommandCompletions::eSettingsNameCompletion, completion_str.c_str(), match_start_point, max_return_elements, NULL, word_complete, matches); return matches.GetSize(); } protected: virtual bool DoExecute (const char *command, CommandReturnObject &result) { result.SetStatus (eReturnStatusSuccessFinishNoResult); Args cmd_args(command); // Process possible options. if (!ParseOptions (cmd_args, result)) return false; const size_t argc = cmd_args.GetArgumentCount (); if (argc == 0) { result.AppendError ("'settings set' takes an array or dictionary item, or an array followed by one or more indexes, or a dictionary followed by one or more key names to remove"); result.SetStatus (eReturnStatusFailed); return false; } const char *var_name = cmd_args.GetArgumentAtIndex (0); if ((var_name == NULL) || (var_name[0] == '\0')) { result.AppendError ("'settings set' command requires a valid variable name"); result.SetStatus (eReturnStatusFailed); return false; } // Split the raw command into var_name and value pair. llvm::StringRef raw_str(command); std::string var_value_string = raw_str.split(var_name).second.str(); const char *var_value_cstr = Args::StripSpaces(var_value_string, true, true, false); Error error (m_interpreter.GetDebugger().SetPropertyValue (&m_exe_ctx, eVarSetOperationRemove, var_name, var_value_cstr)); if (error.Fail()) { result.AppendError (error.AsCString()); result.SetStatus (eReturnStatusFailed); return false; } return result.Succeeded(); } }; //------------------------------------------------------------------------- // CommandObjectSettingsReplace //------------------------------------------------------------------------- class CommandObjectSettingsReplace : public CommandObjectRaw { public: CommandObjectSettingsReplace (CommandInterpreter &interpreter) : CommandObjectRaw (interpreter, "settings replace", "Replace the specified element from an internal debugger settings array or dictionary variable with the specified new value.", NULL) { CommandArgumentEntry arg1; CommandArgumentEntry arg2; CommandArgumentEntry arg3; CommandArgumentData var_name_arg; CommandArgumentData index_arg; CommandArgumentData key_arg; CommandArgumentData value_arg; // Define the first (and only) variant of this arg. var_name_arg.arg_type = eArgTypeSettingVariableName; var_name_arg.arg_repetition = eArgRepeatPlain; // There is only one variant this argument could be; put it into the argument entry. arg1.push_back (var_name_arg); // Define the first (variant of this arg. index_arg.arg_type = eArgTypeSettingIndex; index_arg.arg_repetition = eArgRepeatPlain; // Define the second (variant of this arg. key_arg.arg_type = eArgTypeSettingKey; key_arg.arg_repetition = eArgRepeatPlain; // Put both variants into this arg arg2.push_back (index_arg); arg2.push_back (key_arg); // Define the first (and only) variant of this arg. value_arg.arg_type = eArgTypeValue; value_arg.arg_repetition = eArgRepeatPlain; // There is only one variant this argument could be; put it into the argument entry. arg3.push_back (value_arg); // Push the data for the first argument into the m_arguments vector. m_arguments.push_back (arg1); m_arguments.push_back (arg2); m_arguments.push_back (arg3); } virtual ~CommandObjectSettingsReplace () {} // Overrides base class's behavior where WantsCompletion = !WantsRawCommandString. virtual bool WantsCompletion() { return true; } virtual int HandleArgumentCompletion (Args &input, int &cursor_index, int &cursor_char_position, OptionElementVector &opt_element_vector, int match_start_point, int max_return_elements, bool &word_complete, StringList &matches) { std::string completion_str (input.GetArgumentAtIndex (cursor_index), cursor_char_position); // Attempting to complete variable name if (cursor_index < 2) CommandCompletions::InvokeCommonCompletionCallbacks (m_interpreter, CommandCompletions::eSettingsNameCompletion, completion_str.c_str(), match_start_point, max_return_elements, NULL, word_complete, matches); return matches.GetSize(); } protected: virtual bool DoExecute (const char *command, CommandReturnObject &result) { result.SetStatus (eReturnStatusSuccessFinishNoResult); Args cmd_args(command); const char *var_name = cmd_args.GetArgumentAtIndex (0); if ((var_name == NULL) || (var_name[0] == '\0')) { result.AppendError ("'settings replace' command requires a valid variable name; No value supplied"); result.SetStatus (eReturnStatusFailed); return false; } // Split the raw command into var_name, index_value, and value triple. llvm::StringRef raw_str(command); std::string var_value_string = raw_str.split(var_name).second.str(); const char *var_value_cstr = Args::StripSpaces(var_value_string, true, true, false); Error error(m_interpreter.GetDebugger().SetPropertyValue (&m_exe_ctx, eVarSetOperationReplace, var_name, var_value_cstr)); if (error.Fail()) { result.AppendError (error.AsCString()); result.SetStatus (eReturnStatusFailed); return false; } else { result.SetStatus (eReturnStatusSuccessFinishNoResult); } return result.Succeeded(); } }; //------------------------------------------------------------------------- // CommandObjectSettingsInsertBefore //------------------------------------------------------------------------- class CommandObjectSettingsInsertBefore : public CommandObjectRaw { public: CommandObjectSettingsInsertBefore (CommandInterpreter &interpreter) : CommandObjectRaw (interpreter, "settings insert-before", "Insert value(s) into an internal debugger settings array variable, immediately before the specified element.", NULL) { CommandArgumentEntry arg1; CommandArgumentEntry arg2; CommandArgumentEntry arg3; CommandArgumentData var_name_arg; CommandArgumentData index_arg; CommandArgumentData value_arg; // Define the first (and only) variant of this arg. var_name_arg.arg_type = eArgTypeSettingVariableName; var_name_arg.arg_repetition = eArgRepeatPlain; // There is only one variant this argument could be; put it into the argument entry. arg1.push_back (var_name_arg); // Define the first (variant of this arg. index_arg.arg_type = eArgTypeSettingIndex; index_arg.arg_repetition = eArgRepeatPlain; // There is only one variant this argument could be; put it into the argument entry. arg2.push_back (index_arg); // Define the first (and only) variant of this arg. value_arg.arg_type = eArgTypeValue; value_arg.arg_repetition = eArgRepeatPlain; // There is only one variant this argument could be; put it into the argument entry. arg3.push_back (value_arg); // Push the data for the first argument into the m_arguments vector. m_arguments.push_back (arg1); m_arguments.push_back (arg2); m_arguments.push_back (arg3); } virtual ~CommandObjectSettingsInsertBefore () {} // Overrides base class's behavior where WantsCompletion = !WantsRawCommandString. virtual bool WantsCompletion() { return true; } virtual int HandleArgumentCompletion (Args &input, int &cursor_index, int &cursor_char_position, OptionElementVector &opt_element_vector, int match_start_point, int max_return_elements, bool &word_complete, StringList &matches) { std::string completion_str (input.GetArgumentAtIndex (cursor_index), cursor_char_position); // Attempting to complete variable name if (cursor_index < 2) CommandCompletions::InvokeCommonCompletionCallbacks (m_interpreter, CommandCompletions::eSettingsNameCompletion, completion_str.c_str(), match_start_point, max_return_elements, NULL, word_complete, matches); return matches.GetSize(); } protected: virtual bool DoExecute (const char *command, CommandReturnObject &result) { result.SetStatus (eReturnStatusSuccessFinishNoResult); Args cmd_args(command); const size_t argc = cmd_args.GetArgumentCount (); if (argc < 3) { result.AppendError ("'settings insert-before' takes more arguments"); result.SetStatus (eReturnStatusFailed); return false; } const char *var_name = cmd_args.GetArgumentAtIndex (0); if ((var_name == NULL) || (var_name[0] == '\0')) { result.AppendError ("'settings insert-before' command requires a valid variable name; No value supplied"); result.SetStatus (eReturnStatusFailed); return false; } // Split the raw command into var_name, index_value, and value triple. llvm::StringRef raw_str(command); std::string var_value_string = raw_str.split(var_name).second.str(); const char *var_value_cstr = Args::StripSpaces(var_value_string, true, true, false); Error error(m_interpreter.GetDebugger().SetPropertyValue (&m_exe_ctx, eVarSetOperationInsertBefore, var_name, var_value_cstr)); if (error.Fail()) { result.AppendError (error.AsCString()); result.SetStatus (eReturnStatusFailed); return false; } return result.Succeeded(); } }; //------------------------------------------------------------------------- // CommandObjectSettingInsertAfter //------------------------------------------------------------------------- class CommandObjectSettingsInsertAfter : public CommandObjectRaw { public: CommandObjectSettingsInsertAfter (CommandInterpreter &interpreter) : CommandObjectRaw (interpreter, "settings insert-after", "Insert value(s) into an internal debugger settings array variable, immediately after the specified element.", NULL) { CommandArgumentEntry arg1; CommandArgumentEntry arg2; CommandArgumentEntry arg3; CommandArgumentData var_name_arg; CommandArgumentData index_arg; CommandArgumentData value_arg; // Define the first (and only) variant of this arg. var_name_arg.arg_type = eArgTypeSettingVariableName; var_name_arg.arg_repetition = eArgRepeatPlain; // There is only one variant this argument could be; put it into the argument entry. arg1.push_back (var_name_arg); // Define the first (variant of this arg. index_arg.arg_type = eArgTypeSettingIndex; index_arg.arg_repetition = eArgRepeatPlain; // There is only one variant this argument could be; put it into the argument entry. arg2.push_back (index_arg); // Define the first (and only) variant of this arg. value_arg.arg_type = eArgTypeValue; value_arg.arg_repetition = eArgRepeatPlain; // There is only one variant this argument could be; put it into the argument entry. arg3.push_back (value_arg); // Push the data for the first argument into the m_arguments vector. m_arguments.push_back (arg1); m_arguments.push_back (arg2); m_arguments.push_back (arg3); } virtual ~CommandObjectSettingsInsertAfter () {} // Overrides base class's behavior where WantsCompletion = !WantsRawCommandString. virtual bool WantsCompletion() { return true; } virtual int HandleArgumentCompletion (Args &input, int &cursor_index, int &cursor_char_position, OptionElementVector &opt_element_vector, int match_start_point, int max_return_elements, bool &word_complete, StringList &matches) { std::string completion_str (input.GetArgumentAtIndex (cursor_index), cursor_char_position); // Attempting to complete variable name if (cursor_index < 2) CommandCompletions::InvokeCommonCompletionCallbacks (m_interpreter, CommandCompletions::eSettingsNameCompletion, completion_str.c_str(), match_start_point, max_return_elements, NULL, word_complete, matches); return matches.GetSize(); } protected: virtual bool DoExecute (const char *command, CommandReturnObject &result) { result.SetStatus (eReturnStatusSuccessFinishNoResult); Args cmd_args(command); const size_t argc = cmd_args.GetArgumentCount (); if (argc < 3) { result.AppendError ("'settings insert-after' takes more arguments"); result.SetStatus (eReturnStatusFailed); return false; } const char *var_name = cmd_args.GetArgumentAtIndex (0); if ((var_name == NULL) || (var_name[0] == '\0')) { result.AppendError ("'settings insert-after' command requires a valid variable name; No value supplied"); result.SetStatus (eReturnStatusFailed); return false; } // Split the raw command into var_name, index_value, and value triple. llvm::StringRef raw_str(command); std::string var_value_string = raw_str.split(var_name).second.str(); const char *var_value_cstr = Args::StripSpaces(var_value_string, true, true, false); Error error(m_interpreter.GetDebugger().SetPropertyValue (&m_exe_ctx, eVarSetOperationInsertAfter, var_name, var_value_cstr)); if (error.Fail()) { result.AppendError (error.AsCString()); result.SetStatus (eReturnStatusFailed); return false; } return result.Succeeded(); } }; //------------------------------------------------------------------------- // CommandObjectSettingsAppend //------------------------------------------------------------------------- class CommandObjectSettingsAppend : public CommandObjectRaw { public: CommandObjectSettingsAppend (CommandInterpreter &interpreter) : CommandObjectRaw (interpreter, "settings append", "Append a new value to the end of an internal debugger settings array, dictionary or string variable.", NULL) { CommandArgumentEntry arg1; CommandArgumentEntry arg2; CommandArgumentData var_name_arg; CommandArgumentData value_arg; // Define the first (and only) variant of this arg. var_name_arg.arg_type = eArgTypeSettingVariableName; var_name_arg.arg_repetition = eArgRepeatPlain; // There is only one variant this argument could be; put it into the argument entry. arg1.push_back (var_name_arg); // Define the first (and only) variant of this arg. value_arg.arg_type = eArgTypeValue; value_arg.arg_repetition = eArgRepeatPlain; // There is only one variant this argument could be; put it into the argument entry. arg2.push_back (value_arg); // Push the data for the first argument into the m_arguments vector. m_arguments.push_back (arg1); m_arguments.push_back (arg2); } virtual ~CommandObjectSettingsAppend () {} // Overrides base class's behavior where WantsCompletion = !WantsRawCommandString. virtual bool WantsCompletion() { return true; } virtual int HandleArgumentCompletion (Args &input, int &cursor_index, int &cursor_char_position, OptionElementVector &opt_element_vector, int match_start_point, int max_return_elements, bool &word_complete, StringList &matches) { std::string completion_str (input.GetArgumentAtIndex (cursor_index), cursor_char_position); // Attempting to complete variable name if (cursor_index < 2) CommandCompletions::InvokeCommonCompletionCallbacks (m_interpreter, CommandCompletions::eSettingsNameCompletion, completion_str.c_str(), match_start_point, max_return_elements, NULL, word_complete, matches); return matches.GetSize(); } protected: virtual bool DoExecute (const char *command, CommandReturnObject &result) { result.SetStatus (eReturnStatusSuccessFinishNoResult); Args cmd_args(command); const size_t argc = cmd_args.GetArgumentCount (); if (argc < 2) { result.AppendError ("'settings append' takes more arguments"); result.SetStatus (eReturnStatusFailed); return false; } const char *var_name = cmd_args.GetArgumentAtIndex (0); if ((var_name == NULL) || (var_name[0] == '\0')) { result.AppendError ("'settings append' command requires a valid variable name; No value supplied"); result.SetStatus (eReturnStatusFailed); return false; } // Do not perform cmd_args.Shift() since StringRef is manipulating the // raw character string later on. // Split the raw command into var_name and value pair. llvm::StringRef raw_str(command); std::string var_value_string = raw_str.split(var_name).second.str(); const char *var_value_cstr = Args::StripSpaces(var_value_string, true, true, false); Error error(m_interpreter.GetDebugger().SetPropertyValue (&m_exe_ctx, eVarSetOperationAppend, var_name, var_value_cstr)); if (error.Fail()) { result.AppendError (error.AsCString()); result.SetStatus (eReturnStatusFailed); return false; } return result.Succeeded(); } }; //------------------------------------------------------------------------- // CommandObjectSettingsClear //------------------------------------------------------------------------- class CommandObjectSettingsClear : public CommandObjectParsed { public: CommandObjectSettingsClear (CommandInterpreter &interpreter) : CommandObjectParsed (interpreter, "settings clear", "Erase all the contents of an internal debugger settings variables; this is only valid for variables with clearable types, i.e. strings, arrays or dictionaries.", NULL) { CommandArgumentEntry arg; CommandArgumentData var_name_arg; // Define the first (and only) variant of this arg. var_name_arg.arg_type = eArgTypeSettingVariableName; var_name_arg.arg_repetition = eArgRepeatPlain; // There is only one variant this argument could be; put it into the argument entry. arg.push_back (var_name_arg); // Push the data for the first argument into the m_arguments vector. m_arguments.push_back (arg); } virtual ~CommandObjectSettingsClear () {} virtual int HandleArgumentCompletion (Args &input, int &cursor_index, int &cursor_char_position, OptionElementVector &opt_element_vector, int match_start_point, int max_return_elements, bool &word_complete, StringList &matches) { std::string completion_str (input.GetArgumentAtIndex (cursor_index), cursor_char_position); // Attempting to complete variable name if (cursor_index < 2) CommandCompletions::InvokeCommonCompletionCallbacks (m_interpreter, CommandCompletions::eSettingsNameCompletion, completion_str.c_str(), match_start_point, max_return_elements, NULL, word_complete, matches); return matches.GetSize(); } protected: virtual bool DoExecute (Args& command, CommandReturnObject &result) { result.SetStatus (eReturnStatusSuccessFinishNoResult); const size_t argc = command.GetArgumentCount (); if (argc != 1) { result.AppendError ("'setttings clear' takes exactly one argument"); result.SetStatus (eReturnStatusFailed); return false; } const char *var_name = command.GetArgumentAtIndex (0); if ((var_name == NULL) || (var_name[0] == '\0')) { result.AppendError ("'settings clear' command requires a valid variable name; No value supplied"); result.SetStatus (eReturnStatusFailed); return false; } Error error (m_interpreter.GetDebugger().SetPropertyValue (&m_exe_ctx, eVarSetOperationClear, var_name, NULL)); if (error.Fail()) { result.AppendError (error.AsCString()); result.SetStatus (eReturnStatusFailed); return false; } return result.Succeeded(); } }; //------------------------------------------------------------------------- // CommandObjectMultiwordSettings //------------------------------------------------------------------------- CommandObjectMultiwordSettings::CommandObjectMultiwordSettings (CommandInterpreter &interpreter) : CommandObjectMultiword (interpreter, "settings", "A set of commands for manipulating internal settable debugger variables.", "settings <command> [<command-options>]") { LoadSubCommand ("set", CommandObjectSP (new CommandObjectSettingsSet (interpreter))); LoadSubCommand ("show", CommandObjectSP (new CommandObjectSettingsShow (interpreter))); LoadSubCommand ("list", CommandObjectSP (new CommandObjectSettingsList (interpreter))); LoadSubCommand ("remove", CommandObjectSP (new CommandObjectSettingsRemove (interpreter))); LoadSubCommand ("replace", CommandObjectSP (new CommandObjectSettingsReplace (interpreter))); LoadSubCommand ("insert-before", CommandObjectSP (new CommandObjectSettingsInsertBefore (interpreter))); LoadSubCommand ("insert-after", CommandObjectSP (new CommandObjectSettingsInsertAfter (interpreter))); LoadSubCommand ("append", CommandObjectSP (new CommandObjectSettingsAppend (interpreter))); LoadSubCommand ("clear", CommandObjectSP (new CommandObjectSettingsClear (interpreter))); } CommandObjectMultiwordSettings::~CommandObjectMultiwordSettings () { }
bsd-3-clause
zstars/booledeusto
EUS/boole_14_34.cpp
2398
// Do not edit. This file is machine generated by the Resource DLL Wizard. //--------------------------------------------------------------------------- #define PACKAGE __declspec(package) #define USERC(FileName) extern PACKAGE _Dummy #define USERES(FileName) extern PACKAGE _Dummy #define USEFORMRES(FileName, FormName, AncestorName) extern PACKAGE _Dummy #pragma hdrstop int _turboFloat; //--------------------------------------------------------------------------- /*ITE*/ /*LCID:00000C0A:0000042D*/ /**/ /*ITE*/ /*DFMFileType*/ /*src\Boole2\ayuda.dfm*/ /*ITE*/ /*RCFileType*/ /*exe\boole_DRC.rc*/ /*ITE*/ /*RCFileType*/ /*src\res\mensajes.rc*/ //--------------------------------------------------------------------------- #pragma resource "src\Boole1\app.dfm" #pragma resource "src\Boole1\calc.dfm" #pragma resource "src\Boole1\uKarnaugh.dfm" #pragma resource "src\Boole1\ExpBool.dfm" #pragma resource "src\Boole1\FormasN.dfm" #pragma resource "src\Boole1\FormSimp.dfm" #pragma resource "src\Boole1\Main.dfm" #pragma resource "src\Boole1\NandNor.dfm" #pragma resource "src\Boole1\NuevoSC.dfm" #pragma resource "src\Boole1\SCCompac.dfm" #pragma resource "src\Boole1\TVComple.dfm" #pragma resource "src\Boole1\TVManual.dfm" #pragma resource "src\Boole2\FCalculando.dfm" USEFORMRES("src\Boole2\ayuda.dfm", TForm); #pragma resource "src\Boole2\ayuda.dfm" #pragma resource "src\Boole2\uLog.dfm" #pragma resource "src\Boole2\Unit15.dfm" #pragma resource "src\Boole2\Unit11.dfm" #pragma resource "src\Boole2\Unit12.dfm" #pragma resource "src\Boole2\Unit13.dfm" #pragma resource "src\Boole2\Unit14.dfm" #pragma resource "src\Boole2\Unit10.dfm" #pragma resource "src\Boole2\V_Boole2.dfm" #pragma resource "src\Boole2\Unit2.dfm" #pragma resource "src\Boole2\Unit3.dfm" #pragma resource "src\Boole2\Unit4.dfm" #pragma resource "src\Boole2\Unit5.dfm" #pragma resource "src\Boole2\Unit6.dfm" #pragma resource "src\Boole2\Unit8.dfm" #pragma resource "src\Boole2\Unit9.dfm" #pragma resource "src\Boole2\uSimulacion.dfm" #pragma resource "src\Boole2\Unit16.dfm" #pragma resource "src\Circuito\V_Imprimir.dfm" #pragma resource "src\Circuito\V_Circuito.dfm" #pragma resource "src\Comun\uTextoAsoc.dfm" USERC("exe\boole_DRC.rc"); USERC("src\res\mensajes.rc"); //--------------------------------------------------------------------------- #define DllEntryPoint
bsd-3-clause
FreeCodeCamp/FreeCodeCamp
client/src/templates/Challenges/classic/action-row.tsx
1615
import React from 'react'; import { connect } from 'react-redux'; import BreadCrumb from '../components/bread-crumb'; import { resetChallenge } from '../redux'; import EditorTabs from './editor-tabs'; interface ActionRowProps { block: string; showConsole: boolean; showNotes?: boolean; showPreview: boolean; superBlock: string; switchDisplayTab: (displayTab: string) => void; resetChallenge: () => void; } const mapDispatchToProps = { resetChallenge }; const ActionRow = ({ switchDisplayTab, showPreview, showConsole, superBlock, block, resetChallenge }: ActionRowProps): JSX.Element => { return ( <div className='action-row'> <div className='breadcrumbs-demo'> <BreadCrumb block={block} superBlock={superBlock} /> </div> <div className='tabs-row'> <EditorTabs /> <button className='restart-step-tab' onClick={resetChallenge} role='tab' > Restart Step </button> <div className='panel-display-tabs'> <button className={showConsole ? 'active-tab' : ''} onClick={() => switchDisplayTab('showConsole')} role='tab' > JS Console </button> <button className={showPreview ? 'active-tab' : ''} onClick={() => switchDisplayTab('showPreview')} role='tab' > Show Preview </button> </div> </div> </div> ); }; ActionRow.displayName = 'ActionRow'; export default connect(null, mapDispatchToProps)(ActionRow);
bsd-3-clause
SaschaMester/delicium
tools/telemetry/telemetry/internal/browser/user_agent_unittest.py
1431
# Copyright 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from telemetry import decorators from telemetry.internal.browser import user_agent from telemetry.unittest_util import tab_test_case class MobileUserAgentTest(tab_test_case.TabTestCase): @classmethod def CustomizeBrowserOptions(cls, options): options.browser_user_agent_type = 'mobile' @decorators.Disabled('chromeos') # crbug.com/483212 def testUserAgent(self): ua = self._tab.EvaluateJavaScript('window.navigator.userAgent') self.assertEquals(ua, user_agent.UA_TYPE_MAPPING['mobile']) class TabletUserAgentTest(tab_test_case.TabTestCase): @classmethod def CustomizeBrowserOptions(cls, options): options.browser_user_agent_type = 'tablet' @decorators.Disabled('chromeos') # crbug.com/483212 def testUserAgent(self): ua = self._tab.EvaluateJavaScript('window.navigator.userAgent') self.assertEquals(ua, user_agent.UA_TYPE_MAPPING['tablet']) class DesktopUserAgentTest(tab_test_case.TabTestCase): @classmethod def CustomizeBrowserOptions(cls, options): options.browser_user_agent_type = 'desktop' @decorators.Disabled('chromeos') # crbug.com/483212 def testUserAgent(self): ua = self._tab.EvaluateJavaScript('window.navigator.userAgent') self.assertEquals(ua, user_agent.UA_TYPE_MAPPING['desktop'])
bsd-3-clause
snowplow/snowplow-javascript-tracker
plugins/browser-plugin-media-tracking/src/findMediaElement.ts
1374
import { SEARCH_ERROR, TAG } from './constants'; import { SearchResult } from './types'; export function findMediaElem(id: string): SearchResult { let el: HTMLVideoElement | HTMLAudioElement | HTMLElement | null = document.getElementById(id); if (!el) return { err: SEARCH_ERROR.NOT_FOUND }; if (isAudioElement(el)) return { el: el }; if (isVideoElement(el)) { // Plyr loads in an initial blank video with currentSrc as https://cdn.plyr.io/static/blank.mp4 // so we need to check until currentSrc updates if (el.currentSrc === 'https://cdn.plyr.io/static/blank.mp4' && el.readyState === 0) { return { err: SEARCH_ERROR.PLYR_CURRENTSRC }; } return { el: el }; } return findMediaElementChild(el); } function findMediaElementChild(el: Element): SearchResult { for (let tag of Object.keys(TAG)) { let elem = el.getElementsByTagName(tag); if (elem.length === 1) { if (isAudioElement(elem[0])) return { el: elem[0] }; if (isVideoElement(elem[0])) return { el: elem[0] }; } else if (elem.length > 1) { return { err: SEARCH_ERROR.MULTIPLE_ELEMENTS }; } } return { err: SEARCH_ERROR.NOT_FOUND }; } function isAudioElement(el: Element): el is HTMLAudioElement { return el.tagName === TAG.AUDIO; } function isVideoElement(el: Element): el is HTMLVideoElement { return el.tagName === TAG.VIDEO; }
bsd-3-clause
parrt/cs601
lectures/code/db/TestDB.java
981
import java.sql.Connection; import java.sql.DriverManager; public class TestDB { public static void main(String[] args) throws Exception { String dbFile = "/tmp/test.db"; Connection db = null; long start = System.currentTimeMillis(); try { Class.forName("org.sqlite.JDBC"); // force load of driver db = DriverManager.getConnection("jdbc:sqlite:" + dbFile); // DO SOMETHINE WITH db to read/write // Statement statement = db.createStatement(); // statement.executeUpdate("create table person (id integer, name string)"); // statement.executeUpdate("insert into person values(1, 'leo')"); // statement.close(); long stop = System.currentTimeMillis(); System.out.printf("SQL exe time %1.1f seconds\n", (stop-start)/1000.0); } finally { if ( db!=null ) { db.close(); } } } }
bsd-3-clause
seem-sky/treefrog-framework
src/tjsonutil.cpp
230
/* Copyright (c) 2013, AOYAMA Kazuharu * All rights reserved. * * This software may be used and distributed according to the terms of * the New BSD License, which is incorporated herein by reference. */ #include <TJsonUtil>
bsd-3-clause
ovh/cds
engine/api/services/cache.go
3941
package services import ( "context" "sync" "time" "github.com/lib/pq" "github.com/rockbears/log" "github.com/ovh/cds/engine/api/database/gorpmapping" "github.com/ovh/cds/engine/database" "github.com/ovh/cds/sdk" ) type event struct { f func(s sdk.Service) s sdk.Service } type iCache struct { dbConnFactory *database.DBConnectionFactory mutex sync.RWMutex data map[string][]sdk.Service chanEvent chan event } var internalCache iCache // Initialize the service package func Initialize(c context.Context, dbF *database.DBConnectionFactory, goRoutines *sdk.GoRoutines) { internalCache = iCache{ chanEvent: make(chan event), data: make(map[string][]sdk.Service), dbConnFactory: dbF, mutex: sync.RWMutex{}, } goRoutines.Run(c, "service.internalCache.doUpdateData", internalCache.doUpdateData) goRoutines.Run(c, "service.internalCache.doListenDatabase", internalCache.doListenDatabase) } func (c *iCache) updateCache(s sdk.Service) { ss, ok := c.data[s.Type] indexToUpdate := -1 if !ok { ss = make([]sdk.Service, 0, 1) } else { for i, sub := range ss { if sub.Name == s.Name { indexToUpdate = i break } } } if indexToUpdate == -1 { ss = append(ss, s) } else { ss[indexToUpdate] = s } c.data[s.Type] = ss } func (c *iCache) removeFromCache(s sdk.Service) { ss, ok := c.data[s.Type] if !ok || len(ss) == 0 { return } indexToSplit := 0 for i, sub := range ss { if sub.Name == s.Name { indexToSplit = i break } } ss = append(ss[:indexToSplit], ss[indexToSplit+1:]...) c.data[s.Type] = ss } func (c *iCache) getFromCache(s string) ([]sdk.Service, bool) { if c == nil { return nil, false } c.mutex.RLock() defer c.mutex.RUnlock() ss, ok := c.data[s] return ss, ok } func (c *iCache) doUpdateData(ctx context.Context) { for { select { case <-ctx.Done(): break case e, has := <-c.chanEvent: if e.f != nil { c.mutex.Lock() e.f(e.s) c.mutex.Unlock() } if !has { break } } } } func (c *iCache) doListenDatabase(ctx context.Context) { chanErr := make(chan error) eventCallback := func(ev pq.ListenerEventType, err error) { if err != nil { chanErr <- err } } listener := c.dbConnFactory.NewListener(time.Second, 10*time.Second, eventCallback) if err := listener.Listen("events"); err != nil { log.Error(ctx, "Unable to %v", err) } for { select { case <-ctx.Done(): break case err := <-chanErr: log.Error(ctx, "doListenDatabase> %v", err) listener := c.dbConnFactory.NewListener(time.Second, 10*time.Second, eventCallback) if err := listener.Listen("events"); err != nil { chanErr <- err } case n := <-listener.Notify: e := map[string]interface{}{} if err := sdk.JSONUnmarshal([]byte(n.Extra), &e); err != nil { log.Warn(ctx, "unable to unmarshal received event: %v", err) continue } iTableName, has := e["table"] if !has { continue } table, ok := iTableName.(string) if !ok || table != "services" { continue } iAction, has := e["action"] if !has { continue } action, ok := iAction.(string) if !ok { continue } data, has := e["data"] if !has { continue } dataAsObject, ok := data.(map[string]interface{}) if !ok { continue } name := dataAsObject["name"].(string) db := database.DBMap(gorpmapping.Mapper, c.dbConnFactory.DB()) switch action { case "UPDATE", "INSERT": srv, err := LoadByName(ctx, db, name) if err != nil { log.Error(ctx, "unable to find service %s: %v", name, err) continue } c.chanEvent <- event{c.updateCache, *srv} case "DELETE": c.chanEvent <- event{c.removeFromCache, sdk.Service{ CanonicalService: sdk.CanonicalService{ Name: name, }, }} } case <-time.After(90 * time.Second): go func() { listener.Ping() // nolint }() } } }
bsd-3-clause
tommienu/zf2appengine
module/Application/config/module.config.php
4508
<?php /** * Zend Framework (http://framework.zend.com/) * * @link http://github.com/zendframework/ZendSkeletonApplication for the canonical source repository * @copyright Copyright (c) 2005-2013 Zend Technologies USA Inc. (http://www.zend.com) * @license http://framework.zend.com/license/new-bsd New BSD License */ return array( 'router' => array( 'routes' => array( 'home' => array( 'type' => 'Zend\Mvc\Router\Http\Literal', 'options' => array( 'route' => '/', 'defaults' => array( 'controller' => 'Application\Controller\Index', 'action' => 'index', ), ), ), 'appenginetest' => array( 'type' => 'Zend\Mvc\Router\Http\Literal', 'options' => array( 'route' => '/appenginetest', 'defaults' => array( 'controller' => 'Application\Controller\Index', 'action' => 'appenginetest', ), ), ), 'localstoragetest' => array( 'type' => 'Zend\Mvc\Router\Http\Literal', 'options' => array( 'route' => '/localstoragetest', 'defaults' => array( 'controller' => 'Application\Controller\Index', 'action' => 'localstoragetest', ), ), ), // The following is a route to simplify getting started creating // new controllers and actions without needing to create a new // module. Simply drop new controllers in, and you can access them // using the path /application/:controller/:action 'application' => array( 'type' => 'Literal', 'options' => array( 'route' => '/application', 'defaults' => array( '__NAMESPACE__' => 'Application\Controller', 'controller' => 'Index', 'action' => 'index', ), ), 'may_terminate' => true, 'child_routes' => array( 'default' => array( 'type' => 'Segment', 'options' => array( 'route' => '/[:controller[/:action]]', 'constraints' => array( 'controller' => '[a-zA-Z][a-zA-Z0-9_-]*', 'action' => '[a-zA-Z][a-zA-Z0-9_-]*', ), 'defaults' => array( ), ), ), ), ), ), ), 'service_manager' => array( 'abstract_factories' => array( 'Zend\Cache\Service\StorageCacheAbstractServiceFactory', 'Zend\Log\LoggerAbstractServiceFactory', ), 'aliases' => array( 'translator' => 'MvcTranslator', ), ), 'translator' => array( 'locale' => 'en_US', 'translation_file_patterns' => array( array( 'type' => 'gettext', 'base_dir' => __DIR__ . '/../language', 'pattern' => '%s.mo', ), ), ), 'controllers' => array( 'invokables' => array( 'Application\Controller\Index' => 'Application\Controller\IndexController' ), ), 'view_manager' => array( 'display_not_found_reason' => true, 'display_exceptions' => true, 'doctype' => 'HTML5', 'not_found_template' => 'error/404', 'exception_template' => 'error/index', 'template_map' => array( 'layout/layout' => __DIR__ . '/../view/layout/layout.phtml', 'application/index/index' => __DIR__ . '/../view/application/index/index.phtml', 'error/404' => __DIR__ . '/../view/error/404.phtml', 'error/index' => __DIR__ . '/../view/error/index.phtml', ), 'template_path_stack' => array( __DIR__ . '/../view', ), ), );
bsd-3-clause
kaaterskil/traveloti
module/Application/src/Application/View/Helper/NotificationIconPicker.php
665
<?php /** * Traveloti Library * * @category Traveloti * @package Traveloti_Application * @copyright Copyright (c) 2009-2012 Kaaterskil Management, LLC * @version $Id: $ */ namespace Application\View\Helper; use Base\Model\Notification; use Zend\Form\View\Helper\AbstractHelper; /** * Returns the icon class for the specified notification type * @author Blair */ class NotificationIconPicker extends AbstractHelper { public function __invoke(Notification $notification) { $type = $notification->getType(); $clazz = 'notification'; switch($type) { case 'notification': default: $clazz = 'icon-notify'; } return $clazz; } } ?>
bsd-3-clause
motech/MOTECH-Mobile
motech-mobile-omp/src/main/java/org/motechproject/mobile/omp/manager/intellivr/IVRCallSession.java
9635
/** * MOTECH PLATFORM OPENSOURCE LICENSE AGREEMENT * * Copyright (c) 2010-11 The Trustees of Columbia University in the City of * New York and Grameen Foundation USA. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * 3. Neither the name of Grameen Foundation USA, Columbia University, or * their respective contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY GRAMEEN FOUNDATION USA, COLUMBIA UNIVERSITY * AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL GRAMEEN FOUNDATION * USA, COLUMBIA UNIVERSITY OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, * OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, * EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.motechproject.mobile.omp.manager.intellivr; import java.util.Date; import java.util.HashSet; import java.util.Set; import org.motechproject.mobile.core.model.MessageRequest; /** * Represents a group of MessageRequest to be delivered to a particular user at * a particular number at the same time. Tracks the number of time delivery has been attempted, * when it will be retried if at all, and if the session was the result of a user calling the system * or the reverse. * @author fcbrooks * */ public class IVRCallSession { public static String INBOUND = "IN"; public static String OUTBOUND = "OUT"; public static int OPEN = 0; public static int SEND_WAIT = 1; public static int REPORT_WAIT = 2; public static int CLOSED = 3; private long id; private int version; private String userId; private String phone; private String language; private String callDirection; private int attempts; private int days; private int state; private Date created; private Date nextAttempt; private Set<MessageRequest> messageRequests; private Set<IVRCall> calls; public IVRCallSession() {} public IVRCallSession(String userId, String phone, String language, String callDirection, int attempts, int days, int state, Date created, Date nextAttempt) { this.userId = userId; this.phone = phone; this.language = language; this.callDirection = callDirection; this.attempts = attempts; this.days = days; this.state = state; this.created = created == null ? new Date() : created; this.nextAttempt = nextAttempt; messageRequests = new HashSet<MessageRequest>(); calls = new HashSet<IVRCall>(); } public long getId() { return id; } public void setId(long id) { this.id = id; } public int getVersion() { return version; } public void setVersion(int version) { this.version = version; } /** * * @return identifier for the user */ public String getUserId() { return userId; } public void setUserId(String userId) { this.userId = userId; } /** * * @return phone number to call. Null for INBOUND calls */ public String getPhone() { return phone; } public void setPhone(String phone) { this.phone = phone; } /** * * @return language of call. Null for INBOUND */ public String getLanguage() { return language; } public void setLanguage(String language) { this.language = language; } /** * * @return OUT if system called user. IN if user called system */ public String getCallDirection() { return callDirection; } public void setCallDirection(String callDirection) { this.callDirection = callDirection; } /** * * @return number of attempts to call the user requested */ public int getAttempts() { return attempts; } public void setAttempts(int attempts) { this.attempts = attempts; } /** * * @return number of days call requests have been completed for */ public int getDays() { return days; } public void setDays(int days) { this.days = days; } public int getState() { return state; } public void setState(int state) { this.state = state; } public Date getCreated() { return created; } public void setCreated(Date created) { this.created = created; } /** * * @return next time a call rquest will be made */ public Date getNextAttempt() { return nextAttempt; } public void setNextAttempt(Date nextAttempt) { this.nextAttempt = nextAttempt; } /** * * @return the underlying {@link MessageRequest} */ public Set<MessageRequest> getMessageRequests(){ return messageRequests; } public void setMessageRequests(Set<MessageRequest> messageRequests) { this.messageRequests = messageRequests; } /** * * @return {@link IVRCall}. One for each OUTBOUND request or INBOUND call. */ public Set<IVRCall> getCalls() { return calls; } public void setCalls(Set<IVRCall> calls) { this.calls = calls; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + attempts; result = prime * result + ((callDirection == null) ? 0 : callDirection.hashCode()); result = prime * result + ((calls == null) ? 0 : calls.hashCode()); result = prime * result + ((created == null) ? 0 : created.hashCode()); result = prime * result + days; result = prime * result + ((language == null) ? 0 : language.hashCode()); result = prime * result + ((messageRequests == null) ? 0 : messageRequests.hashCode()); result = prime * result + ((nextAttempt == null) ? 0 : nextAttempt.hashCode()); result = prime * result + ((phone == null) ? 0 : phone.hashCode()); result = prime * result + state; result = prime * result + ((userId == null) ? 0 : userId.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; IVRCallSession other = (IVRCallSession) obj; if (attempts != other.attempts) return false; if (callDirection == null) { if (other.callDirection != null) return false; } else if (!callDirection.equals(other.callDirection)) return false; if (calls == null) { if (other.calls != null) return false; } else if (!calls.equals(other.calls)) return false; if (created == null) { if (other.created != null) return false; } else if (!created.equals(other.created)) return false; if (days != other.days) return false; if (language == null) { if (other.language != null) return false; } else if (!language.equals(other.language)) return false; if (messageRequests == null) { if (other.messageRequests != null) return false; } else if (!messageRequests.equals(other.messageRequests)) return false; if (nextAttempt == null) { if (other.nextAttempt != null) return false; } else if (!nextAttempt.equals(other.nextAttempt)) return false; if (phone == null) { if (other.phone != null) return false; } else if (!phone.equals(other.phone)) return false; if (state != other.state) return false; if (userId == null) { if (other.userId != null) return false; } else if (!userId.equals(other.userId)) return false; return true; } @Override public String toString() { StringBuilder builder = new StringBuilder(); builder.append("[IVRCallSession"); builder.append(" id="); builder.append(id); builder.append(" version="); builder.append(version); builder.append(" userId="); builder.append(userId == null ? "null" : userId); builder.append(" phone="); builder.append(phone == null ? "null" : phone); builder.append(" language="); builder.append(language == null ? "null" : language); builder.append(" direction="); builder.append(callDirection == null ? "null" : callDirection); builder.append(" attempt="); builder.append(attempts); builder.append(" days="); builder.append(days); builder.append(" created="); builder.append(created == null ? "null" : created); builder.append(" nextAttempt="); builder.append(nextAttempt == null ? "null" : nextAttempt); builder.append(" state="); builder.append(state == OPEN ? "OPEN" : (state == SEND_WAIT ? "SEND_WAIT" : (state == REPORT_WAIT ? "REPORT_WAIT" : (state == CLOSED ? "CLOSED" : "INVALID")))); builder.append("]"); return builder.toString(); } }
bsd-3-clause
tschottdorf/rocksdb
table/block_based_table_reader.cc
45683
// Copyright (c) 2013, Facebook, Inc. All rights reserved. // This source code is licensed under the BSD-style license found in the // LICENSE file in the root directory of this source tree. An additional grant // of patent rights can be found in the PATENTS file in the same directory. // // Copyright (c) 2011 The LevelDB Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. See the AUTHORS file for names of contributors. #include "table/block_based_table_reader.h" #include <string> #include <utility> #include "db/dbformat.h" #include "rocksdb/cache.h" #include "rocksdb/comparator.h" #include "rocksdb/env.h" #include "rocksdb/filter_policy.h" #include "rocksdb/iterator.h" #include "rocksdb/options.h" #include "rocksdb/statistics.h" #include "rocksdb/table.h" #include "rocksdb/table_properties.h" #include "table/block.h" #include "table/filter_block.h" #include "table/block_hash_index.h" #include "table/block_prefix_index.h" #include "table/format.h" #include "table/meta_blocks.h" #include "table/two_level_iterator.h" #include "util/coding.h" #include "util/perf_context_imp.h" #include "util/stop_watch.h" namespace rocksdb { extern const uint64_t kBlockBasedTableMagicNumber; extern const std::string kHashIndexPrefixesBlock; extern const std::string kHashIndexPrefixesMetadataBlock; using std::unique_ptr; typedef BlockBasedTable::IndexReader IndexReader; namespace { // The longest the prefix of the cache key used to identify blocks can be. // We are using the fact that we know for Posix files the unique ID is three // varints. // For some reason, compiling for iOS complains that this variable is unused const size_t kMaxCacheKeyPrefixSize __attribute__((unused)) = kMaxVarint64Length * 3 + 1; // Read the block identified by "handle" from "file". // The only relevant option is options.verify_checksums for now. // On failure return non-OK. // On success fill *result and return OK - caller owns *result Status ReadBlockFromFile(RandomAccessFile* file, const Footer& footer, const ReadOptions& options, const BlockHandle& handle, Block** result, Env* env, bool do_uncompress = true) { BlockContents contents; Status s = ReadBlockContents(file, footer, options, handle, &contents, env, do_uncompress); if (s.ok()) { *result = new Block(contents); } return s; } // Delete the resource that is held by the iterator. template <class ResourceType> void DeleteHeldResource(void* arg, void* ignored) { delete reinterpret_cast<ResourceType*>(arg); } // Delete the entry resided in the cache. template <class Entry> void DeleteCachedEntry(const Slice& key, void* value) { auto entry = reinterpret_cast<Entry*>(value); delete entry; } // Release the cached entry and decrement its ref count. void ReleaseCachedEntry(void* arg, void* h) { Cache* cache = reinterpret_cast<Cache*>(arg); Cache::Handle* handle = reinterpret_cast<Cache::Handle*>(h); cache->Release(handle); } Slice GetCacheKey(const char* cache_key_prefix, size_t cache_key_prefix_size, const BlockHandle& handle, char* cache_key) { assert(cache_key != nullptr); assert(cache_key_prefix_size != 0); assert(cache_key_prefix_size <= kMaxCacheKeyPrefixSize); memcpy(cache_key, cache_key_prefix, cache_key_prefix_size); char* end = EncodeVarint64(cache_key + cache_key_prefix_size, handle.offset()); return Slice(cache_key, static_cast<size_t>(end - cache_key)); } Cache::Handle* GetEntryFromCache(Cache* block_cache, const Slice& key, Tickers block_cache_miss_ticker, Tickers block_cache_hit_ticker, Statistics* statistics) { auto cache_handle = block_cache->Lookup(key); if (cache_handle != nullptr) { PERF_COUNTER_ADD(block_cache_hit_count, 1); // overall cache hit RecordTick(statistics, BLOCK_CACHE_HIT); // block-type specific cache hit RecordTick(statistics, block_cache_hit_ticker); } else { // overall cache miss RecordTick(statistics, BLOCK_CACHE_MISS); // block-type specific cache miss RecordTick(statistics, block_cache_miss_ticker); } return cache_handle; } } // namespace // -- IndexReader and its subclasses // IndexReader is the interface that provide the functionality for index access. class BlockBasedTable::IndexReader { public: explicit IndexReader(const Comparator* comparator) : comparator_(comparator) {} virtual ~IndexReader() {} // Create an iterator for index access. // An iter is passed in, if it is not null, update this one and return it // If it is null, create a new Iterator virtual Iterator* NewIterator( BlockIter* iter = nullptr, bool total_order_seek = true) = 0; // The size of the index. virtual size_t size() const = 0; // Report an approximation of how much memory has been used other than memory // that was allocated in block cache. virtual size_t ApproximateMemoryUsage() const = 0; protected: const Comparator* comparator_; }; // Index that allows binary search lookup for the first key of each block. // This class can be viewed as a thin wrapper for `Block` class which already // supports binary search. class BinarySearchIndexReader : public IndexReader { public: // Read index from the file and create an intance for // `BinarySearchIndexReader`. // On success, index_reader will be populated; otherwise it will remain // unmodified. static Status Create(RandomAccessFile* file, const Footer& footer, const BlockHandle& index_handle, Env* env, const Comparator* comparator, IndexReader** index_reader) { Block* index_block = nullptr; auto s = ReadBlockFromFile(file, footer, ReadOptions(), index_handle, &index_block, env); if (s.ok()) { *index_reader = new BinarySearchIndexReader(comparator, index_block); } return s; } virtual Iterator* NewIterator( BlockIter* iter = nullptr, bool dont_care = true) override { return index_block_->NewIterator(comparator_, iter, true); } virtual size_t size() const override { return index_block_->size(); } virtual size_t ApproximateMemoryUsage() const override { assert(index_block_); return index_block_->ApproximateMemoryUsage(); } private: BinarySearchIndexReader(const Comparator* comparator, Block* index_block) : IndexReader(comparator), index_block_(index_block) { assert(index_block_ != nullptr); } std::unique_ptr<Block> index_block_; }; // Index that leverages an internal hash table to quicken the lookup for a given // key. class HashIndexReader : public IndexReader { public: static Status Create(const SliceTransform* hash_key_extractor, const Footer& footer, RandomAccessFile* file, Env* env, const Comparator* comparator, const BlockHandle& index_handle, Iterator* meta_index_iter, IndexReader** index_reader, bool hash_index_allow_collision) { Block* index_block = nullptr; auto s = ReadBlockFromFile(file, footer, ReadOptions(), index_handle, &index_block, env); if (!s.ok()) { return s; } // Note, failure to create prefix hash index does not need to be a // hard error. We can still fall back to the original binary search index. // So, Create will succeed regardless, from this point on. auto new_index_reader = new HashIndexReader(comparator, index_block); *index_reader = new_index_reader; // Get prefixes block BlockHandle prefixes_handle; s = FindMetaBlock(meta_index_iter, kHashIndexPrefixesBlock, &prefixes_handle); if (!s.ok()) { // TODO: log error return Status::OK(); } // Get index metadata block BlockHandle prefixes_meta_handle; s = FindMetaBlock(meta_index_iter, kHashIndexPrefixesMetadataBlock, &prefixes_meta_handle); if (!s.ok()) { // TODO: log error return Status::OK(); } // Read contents for the blocks BlockContents prefixes_contents; s = ReadBlockContents(file, footer, ReadOptions(), prefixes_handle, &prefixes_contents, env, true /* do decompression */); if (!s.ok()) { return s; } BlockContents prefixes_meta_contents; s = ReadBlockContents(file, footer, ReadOptions(), prefixes_meta_handle, &prefixes_meta_contents, env, true /* do decompression */); if (!s.ok()) { if (prefixes_contents.heap_allocated) { delete[] prefixes_contents.data.data(); } // TODO: log error return Status::OK(); } if (!hash_index_allow_collision) { // TODO: deprecate once hash_index_allow_collision proves to be stable. BlockHashIndex* hash_index = nullptr; s = CreateBlockHashIndex(hash_key_extractor, prefixes_contents.data, prefixes_meta_contents.data, &hash_index); // TODO: log error if (s.ok()) { new_index_reader->index_block_->SetBlockHashIndex(hash_index); new_index_reader->OwnPrefixesContents(prefixes_contents); } } else { BlockPrefixIndex* prefix_index = nullptr; s = BlockPrefixIndex::Create(hash_key_extractor, prefixes_contents.data, prefixes_meta_contents.data, &prefix_index); // TODO: log error if (s.ok()) { new_index_reader->index_block_->SetBlockPrefixIndex(prefix_index); } } // Always release prefix meta block if (prefixes_meta_contents.heap_allocated) { delete[] prefixes_meta_contents.data.data(); } // Release prefix content block if we don't own it. if (!new_index_reader->own_prefixes_contents_) { if (prefixes_contents.heap_allocated) { delete[] prefixes_contents.data.data(); } } return Status::OK(); } virtual Iterator* NewIterator( BlockIter* iter = nullptr, bool total_order_seek = true) override { return index_block_->NewIterator(comparator_, iter, total_order_seek); } virtual size_t size() const override { return index_block_->size(); } virtual size_t ApproximateMemoryUsage() const override { assert(index_block_); return index_block_->ApproximateMemoryUsage() + prefixes_contents_.data.size(); } private: HashIndexReader(const Comparator* comparator, Block* index_block) : IndexReader(comparator), index_block_(index_block), own_prefixes_contents_(false) { assert(index_block_ != nullptr); } ~HashIndexReader() { if (own_prefixes_contents_ && prefixes_contents_.heap_allocated) { delete[] prefixes_contents_.data.data(); } } void OwnPrefixesContents(const BlockContents& prefixes_contents) { prefixes_contents_ = prefixes_contents; own_prefixes_contents_ = true; } std::unique_ptr<Block> index_block_; bool own_prefixes_contents_; BlockContents prefixes_contents_; }; struct BlockBasedTable::Rep { Rep(const EnvOptions& storage_options, const BlockBasedTableOptions& table_opt, const InternalKeyComparator& internal_comparator) : soptions(storage_options), table_options(table_opt), filter_policy(table_opt.filter_policy.get()), internal_comparator(internal_comparator) {} Options options; const EnvOptions& soptions; const BlockBasedTableOptions& table_options; const FilterPolicy* const filter_policy; const InternalKeyComparator& internal_comparator; Status status; unique_ptr<RandomAccessFile> file; char cache_key_prefix[kMaxCacheKeyPrefixSize]; size_t cache_key_prefix_size = 0; char compressed_cache_key_prefix[kMaxCacheKeyPrefixSize]; size_t compressed_cache_key_prefix_size = 0; // Footer contains the fixed table information Footer footer; // index_reader and filter will be populated and used only when // options.block_cache is nullptr; otherwise we will get the index block via // the block cache. unique_ptr<IndexReader> index_reader; unique_ptr<FilterBlockReader> filter; std::shared_ptr<const TableProperties> table_properties; BlockBasedTableOptions::IndexType index_type; bool hash_index_allow_collision; // TODO(kailiu) It is very ugly to use internal key in table, since table // module should not be relying on db module. However to make things easier // and compatible with existing code, we introduce a wrapper that allows // block to extract prefix without knowing if a key is internal or not. unique_ptr<SliceTransform> internal_prefix_transform; }; BlockBasedTable::~BlockBasedTable() { delete rep_; } // CachableEntry represents the entries that *may* be fetched from block cache. // field `value` is the item we want to get. // field `cache_handle` is the cache handle to the block cache. If the value // was not read from cache, `cache_handle` will be nullptr. template <class TValue> struct BlockBasedTable::CachableEntry { CachableEntry(TValue* value, Cache::Handle* cache_handle) : value(value) , cache_handle(cache_handle) { } CachableEntry(): CachableEntry(nullptr, nullptr) { } void Release(Cache* cache) { if (cache_handle) { cache->Release(cache_handle); value = nullptr; cache_handle = nullptr; } } TValue* value = nullptr; // if the entry is from the cache, cache_handle will be populated. Cache::Handle* cache_handle = nullptr; }; // Helper function to setup the cache key's prefix for the Table. void BlockBasedTable::SetupCacheKeyPrefix(Rep* rep) { assert(kMaxCacheKeyPrefixSize >= 10); rep->cache_key_prefix_size = 0; rep->compressed_cache_key_prefix_size = 0; if (rep->table_options.block_cache != nullptr) { GenerateCachePrefix(rep->table_options.block_cache.get(), rep->file.get(), &rep->cache_key_prefix[0], &rep->cache_key_prefix_size); } if (rep->table_options.block_cache_compressed != nullptr) { GenerateCachePrefix(rep->table_options.block_cache_compressed.get(), rep->file.get(), &rep->compressed_cache_key_prefix[0], &rep->compressed_cache_key_prefix_size); } } void BlockBasedTable::GenerateCachePrefix(Cache* cc, RandomAccessFile* file, char* buffer, size_t* size) { // generate an id from the file *size = file->GetUniqueId(buffer, kMaxCacheKeyPrefixSize); // If the prefix wasn't generated or was too long, // create one from the cache. if (*size == 0) { char* end = EncodeVarint64(buffer, cc->NewId()); *size = static_cast<size_t>(end - buffer); } } void BlockBasedTable::GenerateCachePrefix(Cache* cc, WritableFile* file, char* buffer, size_t* size) { // generate an id from the file *size = file->GetUniqueId(buffer, kMaxCacheKeyPrefixSize); // If the prefix wasn't generated or was too long, // create one from the cache. if (*size == 0) { char* end = EncodeVarint64(buffer, cc->NewId()); *size = static_cast<size_t>(end - buffer); } } Status BlockBasedTable::Open(const Options& options, const EnvOptions& soptions, const BlockBasedTableOptions& table_options, const InternalKeyComparator& internal_comparator, unique_ptr<RandomAccessFile>&& file, uint64_t file_size, unique_ptr<TableReader>* table_reader) { table_reader->reset(); Footer footer(kBlockBasedTableMagicNumber); auto s = ReadFooterFromFile(file.get(), file_size, &footer); if (!s.ok()) return s; // We've successfully read the footer and the index block: we're // ready to serve requests. Rep* rep = new BlockBasedTable::Rep( soptions, table_options, internal_comparator); rep->options = options; rep->file = std::move(file); rep->footer = footer; rep->index_type = table_options.index_type; rep->hash_index_allow_collision = table_options.hash_index_allow_collision; SetupCacheKeyPrefix(rep); unique_ptr<BlockBasedTable> new_table(new BlockBasedTable(rep)); // Read meta index std::unique_ptr<Block> meta; std::unique_ptr<Iterator> meta_iter; s = ReadMetaBlock(rep, &meta, &meta_iter); // Read the properties bool found_properties_block = true; s = SeekToPropertiesBlock(meta_iter.get(), &found_properties_block); if (found_properties_block) { s = meta_iter->status(); TableProperties* table_properties = nullptr; if (s.ok()) { s = ReadProperties(meta_iter->value(), rep->file.get(), rep->footer, rep->options.env, rep->options.info_log.get(), &table_properties); } if (!s.ok()) { auto err_msg = "[Warning] Encountered error while reading data from properties " "block " + s.ToString(); Log(rep->options.info_log, "%s", err_msg.c_str()); } else { rep->table_properties.reset(table_properties); } } else { Log(WARN_LEVEL, rep->options.info_log, "Cannot find Properties block from file."); } // Will use block cache for index/filter blocks access? if (table_options.block_cache && table_options.cache_index_and_filter_blocks) { // Hack: Call NewIndexIterator() to implicitly add index to the block_cache unique_ptr<Iterator> iter(new_table->NewIndexIterator(ReadOptions())); s = iter->status(); if (s.ok()) { // Hack: Call GetFilter() to implicitly add filter to the block_cache auto filter_entry = new_table->GetFilter(); filter_entry.Release(table_options.block_cache.get()); } } else { // If we don't use block cache for index/filter blocks access, we'll // pre-load these blocks, which will kept in member variables in Rep // and with a same life-time as this table object. IndexReader* index_reader = nullptr; // TODO: we never really verify check sum for index block s = new_table->CreateIndexReader(&index_reader, meta_iter.get()); if (s.ok()) { rep->index_reader.reset(index_reader); // Set filter block if (rep->filter_policy) { std::string key = kFilterBlockPrefix; key.append(rep->filter_policy->Name()); BlockHandle handle; if (FindMetaBlock(meta_iter.get(), key, &handle).ok()) { rep->filter.reset(ReadFilter(handle, rep)); } } } else { delete index_reader; } } if (s.ok()) { *table_reader = std::move(new_table); } return s; } void BlockBasedTable::SetupForCompaction() { switch (rep_->options.access_hint_on_compaction_start) { case Options::NONE: break; case Options::NORMAL: rep_->file->Hint(RandomAccessFile::NORMAL); break; case Options::SEQUENTIAL: rep_->file->Hint(RandomAccessFile::SEQUENTIAL); break; case Options::WILLNEED: rep_->file->Hint(RandomAccessFile::WILLNEED); break; default: assert(false); } compaction_optimized_ = true; } std::shared_ptr<const TableProperties> BlockBasedTable::GetTableProperties() const { return rep_->table_properties; } size_t BlockBasedTable::ApproximateMemoryUsage() const { size_t usage = 0; if (rep_->filter) { usage += rep_->filter->ApproximateMemoryUsage(); } if (rep_->index_reader) { usage += rep_->index_reader->ApproximateMemoryUsage(); } return usage; } // Load the meta-block from the file. On success, return the loaded meta block // and its iterator. Status BlockBasedTable::ReadMetaBlock( Rep* rep, std::unique_ptr<Block>* meta_block, std::unique_ptr<Iterator>* iter) { // TODO(sanjay): Skip this if footer.metaindex_handle() size indicates // it is an empty block. // TODO: we never really verify check sum for meta index block Block* meta = nullptr; Status s = ReadBlockFromFile( rep->file.get(), rep->footer, ReadOptions(), rep->footer.metaindex_handle(), &meta, rep->options.env); if (!s.ok()) { auto err_msg = "[Warning] Encountered error while reading data from properties" "block " + s.ToString(); Log(rep->options.info_log, "%s", err_msg.c_str()); } if (!s.ok()) { delete meta; return s; } meta_block->reset(meta); // meta block uses bytewise comparator. iter->reset(meta->NewIterator(BytewiseComparator())); return Status::OK(); } Status BlockBasedTable::GetDataBlockFromCache( const Slice& block_cache_key, const Slice& compressed_block_cache_key, Cache* block_cache, Cache* block_cache_compressed, Statistics* statistics, const ReadOptions& read_options, BlockBasedTable::CachableEntry<Block>* block) { Status s; Block* compressed_block = nullptr; Cache::Handle* block_cache_compressed_handle = nullptr; // Lookup uncompressed cache first if (block_cache != nullptr) { block->cache_handle = GetEntryFromCache(block_cache, block_cache_key, BLOCK_CACHE_DATA_MISS, BLOCK_CACHE_DATA_HIT, statistics); if (block->cache_handle != nullptr) { block->value = reinterpret_cast<Block*>(block_cache->Value(block->cache_handle)); return s; } } // If not found, search from the compressed block cache. assert(block->cache_handle == nullptr && block->value == nullptr); if (block_cache_compressed == nullptr) { return s; } assert(!compressed_block_cache_key.empty()); block_cache_compressed_handle = block_cache_compressed->Lookup(compressed_block_cache_key); // if we found in the compressed cache, then uncompress and insert into // uncompressed cache if (block_cache_compressed_handle == nullptr) { RecordTick(statistics, BLOCK_CACHE_COMPRESSED_MISS); return s; } // found compressed block RecordTick(statistics, BLOCK_CACHE_COMPRESSED_HIT); compressed_block = reinterpret_cast<Block*>( block_cache_compressed->Value(block_cache_compressed_handle)); assert(compressed_block->compression_type() != kNoCompression); // Retrieve the uncompressed contents into a new buffer BlockContents contents; s = UncompressBlockContents(compressed_block->data(), compressed_block->size(), &contents); // Insert uncompressed block into block cache if (s.ok()) { block->value = new Block(contents); // uncompressed block assert(block->value->compression_type() == kNoCompression); if (block_cache != nullptr && block->value->cachable() && read_options.fill_cache) { block->cache_handle = block_cache->Insert(block_cache_key, block->value, block->value->size(), &DeleteCachedEntry<Block>); assert(reinterpret_cast<Block*>( block_cache->Value(block->cache_handle)) == block->value); } } // Release hold on compressed cache entry block_cache_compressed->Release(block_cache_compressed_handle); return s; } Status BlockBasedTable::PutDataBlockToCache( const Slice& block_cache_key, const Slice& compressed_block_cache_key, Cache* block_cache, Cache* block_cache_compressed, const ReadOptions& read_options, Statistics* statistics, CachableEntry<Block>* block, Block* raw_block) { assert(raw_block->compression_type() == kNoCompression || block_cache_compressed != nullptr); Status s; // Retrieve the uncompressed contents into a new buffer BlockContents contents; if (raw_block->compression_type() != kNoCompression) { s = UncompressBlockContents(raw_block->data(), raw_block->size(), &contents); } if (!s.ok()) { delete raw_block; return s; } if (raw_block->compression_type() != kNoCompression) { block->value = new Block(contents); // uncompressed block } else { block->value = raw_block; raw_block = nullptr; } // Insert compressed block into compressed block cache. // Release the hold on the compressed cache entry immediately. if (block_cache_compressed != nullptr && raw_block != nullptr && raw_block->cachable()) { auto cache_handle = block_cache_compressed->Insert( compressed_block_cache_key, raw_block, raw_block->size(), &DeleteCachedEntry<Block>); block_cache_compressed->Release(cache_handle); RecordTick(statistics, BLOCK_CACHE_COMPRESSED_MISS); // Avoid the following code to delete this cached block. raw_block = nullptr; } delete raw_block; // insert into uncompressed block cache assert((block->value->compression_type() == kNoCompression)); if (block_cache != nullptr && block->value->cachable()) { block->cache_handle = block_cache->Insert(block_cache_key, block->value, block->value->size(), &DeleteCachedEntry<Block>); RecordTick(statistics, BLOCK_CACHE_ADD); assert(reinterpret_cast<Block*>(block_cache->Value(block->cache_handle)) == block->value); } return s; } FilterBlockReader* BlockBasedTable::ReadFilter(const BlockHandle& filter_handle, BlockBasedTable::Rep* rep, size_t* filter_size) { // TODO: We might want to unify with ReadBlockFromFile() if we start // requiring checksum verification in Table::Open. ReadOptions opt; BlockContents block; if (!ReadBlockContents(rep->file.get(), rep->footer, opt, filter_handle, &block, rep->options.env, false).ok()) { return nullptr; } if (filter_size) { *filter_size = block.data.size(); } return new FilterBlockReader( rep->options, rep->table_options, block.data, block.heap_allocated); } BlockBasedTable::CachableEntry<FilterBlockReader> BlockBasedTable::GetFilter( bool no_io) const { // filter pre-populated if (rep_->filter != nullptr) { return {rep_->filter.get(), nullptr /* cache handle */}; } Cache* block_cache = rep_->table_options.block_cache.get(); if (rep_->filter_policy == nullptr /* do not use filter */ || block_cache == nullptr /* no block cache at all */) { return {nullptr /* filter */, nullptr /* cache handle */}; } // Fetching from the cache char cache_key[kMaxCacheKeyPrefixSize + kMaxVarint64Length]; auto key = GetCacheKey( rep_->cache_key_prefix, rep_->cache_key_prefix_size, rep_->footer.metaindex_handle(), cache_key ); Statistics* statistics = rep_->options.statistics.get(); auto cache_handle = GetEntryFromCache(block_cache, key, BLOCK_CACHE_FILTER_MISS, BLOCK_CACHE_FILTER_HIT, statistics); FilterBlockReader* filter = nullptr; if (cache_handle != nullptr) { filter = reinterpret_cast<FilterBlockReader*>( block_cache->Value(cache_handle)); } else if (no_io) { // Do not invoke any io. return CachableEntry<FilterBlockReader>(); } else { size_t filter_size = 0; std::unique_ptr<Block> meta; std::unique_ptr<Iterator> iter; auto s = ReadMetaBlock(rep_, &meta, &iter); if (s.ok()) { std::string filter_block_key = kFilterBlockPrefix; filter_block_key.append(rep_->filter_policy->Name()); BlockHandle handle; if (FindMetaBlock(iter.get(), filter_block_key, &handle).ok()) { filter = ReadFilter(handle, rep_, &filter_size); assert(filter); assert(filter_size > 0); cache_handle = block_cache->Insert( key, filter, filter_size, &DeleteCachedEntry<FilterBlockReader>); RecordTick(statistics, BLOCK_CACHE_ADD); } } } return { filter, cache_handle }; } Iterator* BlockBasedTable::NewIndexIterator(const ReadOptions& read_options, BlockIter* input_iter) { // index reader has already been pre-populated. if (rep_->index_reader) { return rep_->index_reader->NewIterator( input_iter, read_options.total_order_seek); } bool no_io = read_options.read_tier == kBlockCacheTier; Cache* block_cache = rep_->table_options.block_cache.get(); char cache_key[kMaxCacheKeyPrefixSize + kMaxVarint64Length]; auto key = GetCacheKey(rep_->cache_key_prefix, rep_->cache_key_prefix_size, rep_->footer.index_handle(), cache_key); Statistics* statistics = rep_->options.statistics.get(); auto cache_handle = GetEntryFromCache(block_cache, key, BLOCK_CACHE_INDEX_MISS, BLOCK_CACHE_INDEX_HIT, statistics); if (cache_handle == nullptr && no_io) { if (input_iter != nullptr) { input_iter->SetStatus(Status::Incomplete("no blocking io")); return input_iter; } else { return NewErrorIterator(Status::Incomplete("no blocking io")); } } IndexReader* index_reader = nullptr; if (cache_handle != nullptr) { index_reader = reinterpret_cast<IndexReader*>(block_cache->Value(cache_handle)); } else { // Create index reader and put it in the cache. Status s; s = CreateIndexReader(&index_reader); if (!s.ok()) { // make sure if something goes wrong, index_reader shall remain intact. assert(index_reader == nullptr); if (input_iter != nullptr) { input_iter->SetStatus(s); return input_iter; } else { return NewErrorIterator(s); } } cache_handle = block_cache->Insert(key, index_reader, index_reader->size(), &DeleteCachedEntry<IndexReader>); RecordTick(statistics, BLOCK_CACHE_ADD); } assert(cache_handle); auto* iter = index_reader->NewIterator( input_iter, read_options.total_order_seek); iter->RegisterCleanup(&ReleaseCachedEntry, block_cache, cache_handle); return iter; } // Convert an index iterator value (i.e., an encoded BlockHandle) // into an iterator over the contents of the corresponding block. // If input_iter is null, new a iterator // If input_iter is not null, update this iter and return it Iterator* BlockBasedTable::NewDataBlockIterator(Rep* rep, const ReadOptions& ro, const Slice& index_value, BlockIter* input_iter) { const bool no_io = (ro.read_tier == kBlockCacheTier); Cache* block_cache = rep->table_options.block_cache.get(); Cache* block_cache_compressed = rep->table_options.block_cache_compressed.get(); CachableEntry<Block> block; BlockHandle handle; Slice input = index_value; // We intentionally allow extra stuff in index_value so that we // can add more features in the future. Status s = handle.DecodeFrom(&input); if (!s.ok()) { if (input_iter != nullptr) { input_iter->SetStatus(s); return input_iter; } else { return NewErrorIterator(s); } } // If either block cache is enabled, we'll try to read from it. if (block_cache != nullptr || block_cache_compressed != nullptr) { Statistics* statistics = rep->options.statistics.get(); char cache_key[kMaxCacheKeyPrefixSize + kMaxVarint64Length]; char compressed_cache_key[kMaxCacheKeyPrefixSize + kMaxVarint64Length]; Slice key, /* key to the block cache */ ckey /* key to the compressed block cache */; // create key for block cache if (block_cache != nullptr) { key = GetCacheKey(rep->cache_key_prefix, rep->cache_key_prefix_size, handle, cache_key); } if (block_cache_compressed != nullptr) { ckey = GetCacheKey(rep->compressed_cache_key_prefix, rep->compressed_cache_key_prefix_size, handle, compressed_cache_key); } s = GetDataBlockFromCache(key, ckey, block_cache, block_cache_compressed, statistics, ro, &block); if (block.value == nullptr && !no_io && ro.fill_cache) { Block* raw_block = nullptr; { StopWatch sw(rep->options.env, statistics, READ_BLOCK_GET_MICROS); s = ReadBlockFromFile(rep->file.get(), rep->footer, ro, handle, &raw_block, rep->options.env, block_cache_compressed == nullptr); } if (s.ok()) { s = PutDataBlockToCache(key, ckey, block_cache, block_cache_compressed, ro, statistics, &block, raw_block); } } } // Didn't get any data from block caches. if (block.value == nullptr) { if (no_io) { // Could not read from block_cache and can't do IO if (input_iter != nullptr) { input_iter->SetStatus(Status::Incomplete("no blocking io")); return input_iter; } else { return NewErrorIterator(Status::Incomplete("no blocking io")); } } s = ReadBlockFromFile(rep->file.get(), rep->footer, ro, handle, &block.value, rep->options.env); } Iterator* iter; if (block.value != nullptr) { iter = block.value->NewIterator(&rep->internal_comparator, input_iter); if (block.cache_handle != nullptr) { iter->RegisterCleanup(&ReleaseCachedEntry, block_cache, block.cache_handle); } else { iter->RegisterCleanup(&DeleteHeldResource<Block>, block.value, nullptr); } } else { if (input_iter != nullptr) { input_iter->SetStatus(s); iter = input_iter; } else { iter = NewErrorIterator(s); } } return iter; } class BlockBasedTable::BlockEntryIteratorState : public TwoLevelIteratorState { public: BlockEntryIteratorState(BlockBasedTable* table, const ReadOptions& read_options) : TwoLevelIteratorState(table->rep_->options.prefix_extractor != nullptr), table_(table), read_options_(read_options) {} Iterator* NewSecondaryIterator(const Slice& index_value) override { return NewDataBlockIterator(table_->rep_, read_options_, index_value); } bool PrefixMayMatch(const Slice& internal_key) override { if (read_options_.total_order_seek) { return true; } return table_->PrefixMayMatch(internal_key); } private: // Don't own table_ BlockBasedTable* table_; const ReadOptions read_options_; }; // This will be broken if the user specifies an unusual implementation // of Options.comparator, or if the user specifies an unusual // definition of prefixes in BlockBasedTableOptions.filter_policy. // In particular, we require the following three properties: // // 1) key.starts_with(prefix(key)) // 2) Compare(prefix(key), key) <= 0. // 3) If Compare(key1, key2) <= 0, then Compare(prefix(key1), prefix(key2)) <= 0 // // Otherwise, this method guarantees no I/O will be incurred. // // REQUIRES: this method shouldn't be called while the DB lock is held. bool BlockBasedTable::PrefixMayMatch(const Slice& internal_key) { if (!rep_->filter_policy) { return true; } assert(rep_->options.prefix_extractor != nullptr); auto prefix = rep_->options.prefix_extractor->Transform( ExtractUserKey(internal_key)); InternalKey internal_key_prefix(prefix, 0, kTypeValue); auto internal_prefix = internal_key_prefix.Encode(); bool may_match = true; Status s; // To prevent any io operation in this method, we set `read_tier` to make // sure we always read index or filter only when they have already been // loaded to memory. ReadOptions no_io_read_options; no_io_read_options.read_tier = kBlockCacheTier; unique_ptr<Iterator> iiter(NewIndexIterator(no_io_read_options)); iiter->Seek(internal_prefix); if (!iiter->Valid()) { // we're past end of file // if it's incomplete, it means that we avoided I/O // and we're not really sure that we're past the end // of the file may_match = iiter->status().IsIncomplete(); } else if (ExtractUserKey(iiter->key()).starts_with( ExtractUserKey(internal_prefix))) { // we need to check for this subtle case because our only // guarantee is that "the key is a string >= last key in that data // block" according to the doc/table_format.txt spec. // // Suppose iiter->key() starts with the desired prefix; it is not // necessarily the case that the corresponding data block will // contain the prefix, since iiter->key() need not be in the // block. However, the next data block may contain the prefix, so // we return true to play it safe. may_match = true; } else { // iiter->key() does NOT start with the desired prefix. Because // Seek() finds the first key that is >= the seek target, this // means that iiter->key() > prefix. Thus, any data blocks coming // after the data block corresponding to iiter->key() cannot // possibly contain the key. Thus, the corresponding data block // is the only one which could potentially contain the prefix. Slice handle_value = iiter->value(); BlockHandle handle; s = handle.DecodeFrom(&handle_value); assert(s.ok()); auto filter_entry = GetFilter(true /* no io */); may_match = filter_entry.value == nullptr || filter_entry.value->PrefixMayMatch(handle.offset(), prefix); filter_entry.Release(rep_->table_options.block_cache.get()); } Statistics* statistics = rep_->options.statistics.get(); RecordTick(statistics, BLOOM_FILTER_PREFIX_CHECKED); if (!may_match) { RecordTick(statistics, BLOOM_FILTER_PREFIX_USEFUL); } return may_match; } Iterator* BlockBasedTable::NewIterator(const ReadOptions& read_options, Arena* arena) { return NewTwoLevelIterator(new BlockEntryIteratorState(this, read_options), NewIndexIterator(read_options), arena); } Status BlockBasedTable::Get( const ReadOptions& read_options, const Slice& key, void* handle_context, bool (*result_handler)(void* handle_context, const ParsedInternalKey& k, const Slice& v), void (*mark_key_may_exist_handler)(void* handle_context)) { Status s; BlockIter iiter; NewIndexIterator(read_options, &iiter); auto filter_entry = GetFilter(read_options.read_tier == kBlockCacheTier); FilterBlockReader* filter = filter_entry.value; bool done = false; for (iiter.Seek(key); iiter.Valid() && !done; iiter.Next()) { Slice handle_value = iiter.value(); BlockHandle handle; bool may_not_exist_in_filter = filter != nullptr && handle.DecodeFrom(&handle_value).ok() && !filter->KeyMayMatch(handle.offset(), ExtractUserKey(key)); if (may_not_exist_in_filter) { // Not found // TODO: think about interaction with Merge. If a user key cannot // cross one data block, we should be fine. RecordTick(rep_->options.statistics.get(), BLOOM_FILTER_USEFUL); break; } else { BlockIter biter; NewDataBlockIterator(rep_, read_options, iiter.value(), &biter); if (read_options.read_tier && biter.status().IsIncomplete()) { // couldn't get block from block_cache // Update Saver.state to Found because we are only looking for whether // we can guarantee the key is not there when "no_io" is set (*mark_key_may_exist_handler)(handle_context); break; } if (!biter.status().ok()) { s = biter.status(); break; } // Call the *saver function on each entry/block until it returns false for (biter.Seek(key); biter.Valid(); biter.Next()) { ParsedInternalKey parsed_key; if (!ParseInternalKey(biter.key(), &parsed_key)) { s = Status::Corruption(Slice()); } if (!(*result_handler)(handle_context, parsed_key, biter.value())) { done = true; break; } } s = biter.status(); } } filter_entry.Release(rep_->table_options.block_cache.get()); if (s.ok()) { s = iiter.status(); } return s; } bool BlockBasedTable::TEST_KeyInCache(const ReadOptions& options, const Slice& key) { std::unique_ptr<Iterator> iiter(NewIndexIterator(options)); iiter->Seek(key); assert(iiter->Valid()); CachableEntry<Block> block; BlockHandle handle; Slice input = iiter->value(); Status s = handle.DecodeFrom(&input); assert(s.ok()); Cache* block_cache = rep_->table_options.block_cache.get(); assert(block_cache != nullptr); char cache_key_storage[kMaxCacheKeyPrefixSize + kMaxVarint64Length]; Slice cache_key = GetCacheKey(rep_->cache_key_prefix, rep_->cache_key_prefix_size, handle, cache_key_storage); Slice ckey; s = GetDataBlockFromCache(cache_key, ckey, block_cache, nullptr, nullptr, options, &block); assert(s.ok()); bool in_cache = block.value != nullptr; if (in_cache) { ReleaseCachedEntry(block_cache, block.cache_handle); } return in_cache; } // REQUIRES: The following fields of rep_ should have already been populated: // 1. file // 2. index_handle, // 3. options // 4. internal_comparator // 5. index_type Status BlockBasedTable::CreateIndexReader(IndexReader** index_reader, Iterator* preloaded_meta_index_iter) { // Some old version of block-based tables don't have index type present in // table properties. If that's the case we can safely use the kBinarySearch. auto index_type_on_file = BlockBasedTableOptions::kBinarySearch; if (rep_->table_properties) { auto& props = rep_->table_properties->user_collected_properties; auto pos = props.find(BlockBasedTablePropertyNames::kIndexType); if (pos != props.end()) { index_type_on_file = static_cast<BlockBasedTableOptions::IndexType>( DecodeFixed32(pos->second.c_str())); } } auto file = rep_->file.get(); auto env = rep_->options.env; auto comparator = &rep_->internal_comparator; const Footer& footer = rep_->footer; if (index_type_on_file == BlockBasedTableOptions::kHashSearch && rep_->options.prefix_extractor == nullptr) { Log(rep_->options.info_log, "BlockBasedTableOptions::kHashSearch requires " "options.prefix_extractor to be set." " Fall back to binary seach index."); index_type_on_file = BlockBasedTableOptions::kBinarySearch; } switch (index_type_on_file) { case BlockBasedTableOptions::kBinarySearch: { return BinarySearchIndexReader::Create( file, footer, footer.index_handle(), env, comparator, index_reader); } case BlockBasedTableOptions::kHashSearch: { std::unique_ptr<Block> meta_guard; std::unique_ptr<Iterator> meta_iter_guard; auto meta_index_iter = preloaded_meta_index_iter; if (meta_index_iter == nullptr) { auto s = ReadMetaBlock(rep_, &meta_guard, &meta_iter_guard); if (!s.ok()) { // we simply fall back to binary search in case there is any // problem with prefix hash index loading. Log(rep_->options.info_log, "Unable to read the metaindex block." " Fall back to binary seach index."); return BinarySearchIndexReader::Create( file, footer, footer.index_handle(), env, comparator, index_reader); } meta_index_iter = meta_iter_guard.get(); } // We need to wrap data with internal_prefix_transform to make sure it can // handle prefix correctly. rep_->internal_prefix_transform.reset( new InternalKeySliceTransform(rep_->options.prefix_extractor.get())); return HashIndexReader::Create( rep_->internal_prefix_transform.get(), footer, file, env, comparator, footer.index_handle(), meta_index_iter, index_reader, rep_->hash_index_allow_collision); } default: { std::string error_message = "Unrecognized index type: " + std::to_string(rep_->index_type); return Status::InvalidArgument(error_message.c_str()); } } } uint64_t BlockBasedTable::ApproximateOffsetOf(const Slice& key) { unique_ptr<Iterator> index_iter(NewIndexIterator(ReadOptions())); index_iter->Seek(key); uint64_t result; if (index_iter->Valid()) { BlockHandle handle; Slice input = index_iter->value(); Status s = handle.DecodeFrom(&input); if (s.ok()) { result = handle.offset(); } else { // Strange: we can't decode the block handle in the index block. // We'll just return the offset of the metaindex block, which is // close to the whole file size for this case. result = rep_->footer.metaindex_handle().offset(); } } else { // key is past the last key in the file. If table_properties is not // available, approximate the offset by returning the offset of the // metaindex block (which is right near the end of the file). result = 0; if (rep_->table_properties) { result = rep_->table_properties->data_size; } // table_properties is not present in the table. if (result == 0) { result = rep_->footer.metaindex_handle().offset(); } } return result; } bool BlockBasedTable::TEST_filter_block_preloaded() const { return rep_->filter != nullptr; } bool BlockBasedTable::TEST_index_reader_preloaded() const { return rep_->index_reader != nullptr; } } // namespace rocksdb
bsd-3-clause
DashaLadatko/Diplom
frontend/views/workshop/_form.php
5072
<?php use yii\helpers\Html; use yii\widgets\ActiveForm; use dosamigos\ckeditor\CKEditor; use yii\helpers\ArrayHelper; use common\models\Topic; use yii\helpers\Url; /* @var $this yii\web\View */ /* @var $model common\models\Workshop */ /* @var $form yii\widgets\ActiveForm */ ?> <div class="workshop-form"> <?php $form = ActiveForm::begin(['options' => ['enctype' => 'multipart/form-data']]) ?> <?= $form->field($model, 'topic_id')->dropDownList(ArrayHelper::map(Topic::find()->all(), 'id', 'name')) ?> <?= $form->field($model, 'name')->textInput(['maxlength' => true]) ?> <!-- --><?//= $form->field($model, 'description')->textarea(['rows' => 6]) ?> <?= $form->field($model, 'description')->widget(CKEditor::className(), [ 'options' => ['rows' => 6], 'preset' => 'basic' ]) ?> <?= $form->field($model, 'type')->dropDownList([ 'practical' => 'Практична робота', 'laboratory' => 'Лабораторна робота', 'lecture' => 'Лекція', ], ['prompt' => 'Виберіть тип роботи...']) ?> <?php $initialPreview = []; $initialPreviewConfig = []; if ($model->attachments) { foreach ($model->attachments as $item) { $initialPreview[] = Html::img($item->miniature, [ 'class' => 'file-preview-image', 'alt' => $item->name, 'title' => $item->name, 'head' => '100px', 'width' => '100px' ]); $initialPreviewConfig[] = [ 'width' => '120px', 'url' => Url::to(['attachment/delete']), 'key' => "$item->id\" name=\"$item->type", 'caption' => $item->name ]; } } $config = [ 'options' => ['multiple' => true], 'pluginOptions' => [ 'uploadAsync' => false, 'showRemove' => true, 'showUpload' => false, 'overwriteInitial' => false, 'initialPreviewShowDelete' => true, 'initialPreview' => $initialPreview, 'initialPreviewConfig' => $initialPreviewConfig, ], 'pluginEvents' => [ 'filepredelete' => "function(event, key) { return (!confirm('Вы уверены, что хотите удалить?')); }", 'filedelete' => 'function(event, key) { console.log(\'File is delete\'); }', ] ]; if (!$model->isNewRecord) { $config['pluginOptions']['otherActionButtons'] = ' <button type="button" class="btn-download btn btn-xs btn-default" id="Download" title="Download" {dataKey} ><i class="glyphicon glyphicon-download"></i></button>'; } echo $form->field($model, 'files[]')->widget(\kartik\file\FileInput::classname(), $config); ?> <div class="form-group"> <?= Html::submitButton($model->isNewRecord ? 'Створити' : 'Редагувати', ['class' => $model->isNewRecord ? 'btn btn-success' : 'btn btn-primary']) ?> </div> <?php ActiveForm::end(); $this->registerJs(" var index; var elements = document.getElementsByName(\"unknown\"); for (index = 0; index < elements.length; index++) { if ($(elements[index]).attr(\"id\") == 'Print') { elements[index].remove(); } if ($(elements[index]).attr(\"id\") == 'View') { elements[index].setAttribute(\"disabled\",\"disabled\"); } } var ind; var doc = document.getElementsByName(\"document\"); for (ind = 0; ind < doc.length; ind++) { if ($(doc[ind]).attr(\"id\") == 'Print') { doc[ind].remove(); } } $('.btn-download').click(function () { window.open('" . Url::toRoute('attachment/download') . "?id='+$(this).attr('data-key')); }); $('.btn-print').click(function () { window.open('" . Url::toRoute('attachment/print') . "?id='+$(this).attr('data-key')); }); $('.btn-view').click(function () { $.ajax({ url: \"" . Url::toRoute('attachment/view') . "\", type: \"POST\", data: {'key': $(this).attr('data-key')}, success: function (response) { var data = JSON.parse(response); if(data.open){ document.getElementById(\"pop-up-image\").src = data.url; document.getElementById('modalHeader').innerHTML = ' <h4>' + data.name + '</h4> '; $('#modal').data('bs.modal').isShown ? $('#modal').find('#modalContent').load($(this).attr('value')) : $('#modal').modal('show').find('#modalContent').load($(this).attr('value')); } else if (data && !data.open) { window.open(data.url); } } })});"); ?> </div>
bsd-3-clause
matthieuriolo/cii
modules/cii/vendor/cii/fields/BooleanField.php
416
<?php namespace cii\fields; use Yii; use cii\widgets\Toggler; class BooleanField extends AbstractField { public function getView($model) { return Yii::$app->formatter->asBoolean($this->getRaw($model)); } public function getEditable($model, $form) { return Toggler::widget([ 'model' => $model, 'property' => $this->attribute, 'form' => $form ]); } }
bsd-3-clause
erictj/protean
modules/thirdparty/elastica/lib/Elastica/Filter/Term.php
1136
<?php /** * Term query * * @uses Elastica_Query_Abstract * @category Xodoa * @package Elastica * @author Nicolas Ruflin <spam@ruflin.com> * @link http://www.elasticsearch.com/docs/elasticsearch/rest_api/query_dsl/term_query/ */ class Elastica_Filter_Term extends Elastica_Filter_Abstract { /** * Construct term filter * * @param array $term Term array */ public function __construct(array $term = array()) { $this->setRawTerm($term); } /** * Sets/overwrites key and term directly * * @param array $term Key value pair * @return Elastica_Filter_Term Filter object */ public function setRawTerm(array $term) { return $this->setParams($term); } /** * Adds a term to the term query * * @param string $key Key to query * @param string|array $value Values(s) for the query. Boost can be set with array * @return Elastica_Filter_Term Filter object */ public function setTerm($key, $value) { return $this->setRawTerm(array($key => $value)); } }
bsd-3-clause
Phenomics/ontolib
ontolib-io/src/main/java/com/github/phenomics/ontolib/io/scoredist/H2ScoreDistributionReader.java
7039
package com.github.phenomics.ontolib.io.scoredist; import com.github.phenomics.ontolib.base.OntoLibException; import com.github.phenomics.ontolib.ontology.scoredist.ObjectScoreDistribution; import com.github.phenomics.ontolib.ontology.scoredist.ScoreDistribution; import java.io.IOException; import java.sql.Connection; import java.sql.DriverManager; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.TreeMap; /** * Read score distributions from H2 database. * * <h4>H2 Dependency Notes</h4> * * <p> * The class itself only uses JDBC. Thus, the ontolib module does not depend on H2 via maven but * your calling code has to depend on H2. * </p> * * @author <a href="mailto:manuel.holtgrewe@bihealth.de">Manuel Holtgrewe</a> */ public class H2ScoreDistributionReader implements ScoreDistributionReader { /** Path to database. */ private final String pathDb; /** Name of the table to use. */ private final String tableName; /** Connection of the database to use. */ private final Connection conn; /** H2 query for selecting all term counts. */ private final static String H2_SELECT_TERM_COUNTS = "SELECT DISTINCT (term_count) from %s"; /** H2 query for selecting by term count. */ private final static String H2_SELECT_BY_TERM_COUNT_STATEMENT = "SELECT (term_count, object_id, scores, p_values) FROM % WHERE (term_count = ?)"; /** H2 query for selecting by term count and object ID. */ private final static String H2_SELECT_BY_TERM_COUNT_AND_OBJECT_STATEMENT = "SELECT (term_count, object_id, scores, p_values) FROM % WHERE (term_count = ? AND object_id = ?)"; /** * Create new reader object. * * @param pathDb Path to H2 database to read from. * @param tableName Name of table to use for scores. * @throws OntoLibException If there was a problem opening the H2 database connection. */ public H2ScoreDistributionReader(String pathDb, String tableName) throws OntoLibException { super(); this.pathDb = pathDb; this.tableName = tableName; this.conn = openConnection(); } /** * Open connection and perform checks. * * @return New {@link Connection} to H2 database. * @throws OntoLibException In the case of problem with connecting. */ private Connection openConnection() throws OntoLibException { // Open connection. final Connection result; try { Class.forName("org.h2.Driver"); result = DriverManager.getConnection("jdbc:h2:" + pathDb, "", ""); } catch (ClassNotFoundException e) { throw new OntoLibException("H2 driver class could not be found", e); } catch (SQLException e) { throw new OntoLibException("Could not open database at " + pathDb, e); } // Check whether the table already exists. final boolean tableExists; try (final ResultSet rs = result.getMetaData().getTables(null, null, tableName, new String[] {"TABLE"})) { tableExists = rs.next(); if (!tableExists) { throw new OntoLibException("Table of name " + tableName + " does not exist in database!"); } } catch (SQLException e) { throw new OntoLibException("Checking for table of name " + tableName + " failed", e); } return result; } @Override public ObjectScoreDistribution readForTermCountAndObject(int termCount, int objectId) throws OntoLibException { try (final PreparedStatement stmt = conn .prepareStatement(String.format(H2_SELECT_BY_TERM_COUNT_AND_OBJECT_STATEMENT, tableName))) { stmt.setInt(1, termCount); stmt.setInt(2, objectId); try (final ResultSet rs = stmt.executeQuery()) { while (rs.next()) { return objectScoreDistributionFromResultSet(rs); } } } catch (SQLException e) { throw new OntoLibException("Problem with getting object score distribution for termCount: " + termCount + ", objectId: " + objectId); } throw new OntoLibException( "Found no object for termCount: " + termCount + ", objectId: " + objectId); } /** * Build {@link ObjectScoreDistribution} from {@link ResultSet}. * * @param rs {@link ResultSet} to get data from. * @return {@link ObjectScoreDistribution} constructed from {@code rs}. * @throws SQLException In the case of a problem with retrieving the data. */ private ObjectScoreDistribution objectScoreDistributionFromResultSet(ResultSet rs) throws SQLException { final int termCount = rs.getInt(1); final int objectId = rs.getInt(2); final int sampleSize = rs.getInt(3); final double[] scores = (double[]) rs.getObject(4); final double[] pValues = (double[]) rs.getObject(5); final TreeMap<Double, Double> scoreDist = new TreeMap<Double, Double>(); for (int i = 0; i < scores.length; ++i) { scoreDist.put(scores[i], pValues[i]); } return new ObjectScoreDistribution(termCount, objectId, sampleSize, scoreDist); } @Override public ScoreDistribution readForTermCount(int termCount) throws OntoLibException { final Map<Integer, ObjectScoreDistribution> dists = new HashMap<>(); try (final PreparedStatement stmt = conn.prepareStatement(String.format(H2_SELECT_BY_TERM_COUNT_STATEMENT, tableName))) { stmt.setInt(1, termCount); try (final ResultSet rs = stmt.executeQuery()) { while (rs.next()) { final ObjectScoreDistribution objScoreDist = objectScoreDistributionFromResultSet(rs); dists.put(objScoreDist.getObjectId(), objScoreDist); } } } catch (SQLException e) { throw new OntoLibException( "Problem with getting object score distributions for termCount: " + termCount); } if (dists.size() == 0) { throw new OntoLibException("Found no score distributions for termCount: " + termCount); } else { return new ScoreDistribution(termCount, dists); } } @Override public Map<Integer, ScoreDistribution> readAll() throws OntoLibException { // Get all term counts. final List<Integer> termCounts = new ArrayList<>(); try ( final PreparedStatement stmt = conn.prepareStatement(String.format(H2_SELECT_TERM_COUNTS, tableName)); final ResultSet rs = stmt.executeQuery()) { while (rs.next()) { termCounts.add(rs.getInt(1)); } } catch (SQLException e) { throw new OntoLibException("Problem querying the database for term counts", e); } // Query for all term counts. final Map<Integer, ScoreDistribution> result = new HashMap<>(); for (int termCount : termCounts) { result.put(termCount, readForTermCount(termCount)); } return result; } @Override public void close() throws IOException { try { conn.close(); } catch (SQLException e) { throw new IOException("Problem closing connection to database", e); } } }
bsd-3-clause
harikb/yacr
writer_test.go
3006
// Copyright 2011 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package yacr_test import ( "bytes" "errors" "testing" "time" . "github.com/gwenn/yacr" ) func writeRow(w *Writer, row []string) { for _, field := range row { if !w.WriteString(field) { break } } w.EndOfRecord() } // Stolen/adapted from $GOROOT/src/pkg/encoding/csv/writer_test.go var writeTests = []struct { Input [][]string Output string UseCRLF bool }{ {Input: [][]string{{"abc"}}, Output: "abc\n"}, {Input: [][]string{{"abc"}}, Output: "abc\r\n", UseCRLF: true}, {Input: [][]string{{`"abc"`}}, Output: `"""abc"""` + "\n"}, {Input: [][]string{{`a"b`}}, Output: `"a""b"` + "\n"}, {Input: [][]string{{`"a"b"`}}, Output: `"""a""b"""` + "\n"}, {Input: [][]string{{" abc"}}, Output: " abc\n"}, // differs {Input: [][]string{{"abc,def"}}, Output: `"abc,def"` + "\n"}, {Input: [][]string{{"abc", "def"}}, Output: "abc,def\n"}, {Input: [][]string{{"abc"}, {"def"}}, Output: "abc\ndef\n"}, {Input: [][]string{{"abc\ndef"}}, Output: "\"abc\ndef\"\n"}, {Input: [][]string{{"abc\ndef"}}, Output: "\"abc\ndef\"\r\n", UseCRLF: true}, // differs {Input: [][]string{{"abc\rdef"}}, Output: "\"abc\rdef\"\r\n", UseCRLF: true}, // differs {Input: [][]string{{"abc\rdef"}}, Output: "\"abc\rdef\"\n", UseCRLF: false}, {Input: [][]string{{"a", "b,\n", "c\"d"}}, Output: "a,\"b,\n\",\"c\"\"d\"\n"}, {Input: [][]string{{"à", "é", "è", "ù"}}, Output: "à,é,è,ù\n"}, } func TestWrite(t *testing.T) { for n, tt := range writeTests { b := &bytes.Buffer{} f := DefaultWriter(b) f.UseCRLF = tt.UseCRLF for _, row := range tt.Input { writeRow(f, row) } f.Flush() err := f.Err() if err != nil { t.Errorf("Unexpected error: %s\n", err) } out := b.String() if out != tt.Output { t.Errorf("#%d: out=%q want %q", n, out, tt.Output) } } } type errorWriter struct{} func (e errorWriter) Write(b []byte) (int, error) { return 0, errors.New("Test") } func TestError(t *testing.T) { b := &bytes.Buffer{} f := DefaultWriter(b) writeRow(f, []string{"abc"}) f.Flush() err := f.Err() if err != nil { t.Errorf("Unexpected error: %s\n", err) } f = DefaultWriter(errorWriter{}) writeRow(f, []string{"abc"}) f.Flush() err = f.Err() if err == nil { t.Error("Error should not be nil") } } var writeRecordTests = []struct { Input []interface{} Output string }{ {Input: []interface{}{"abc"}, Output: "abc\n"}, {Input: []interface{}{nil, "nil", 123, 3.14, time.Unix(0, 0).UTC()}, Output: ",nil,123,3.14,1970-01-01T00:00:00Z\n"}, } func TestWriteRecord(t *testing.T) { for n, tt := range writeRecordTests { b := &bytes.Buffer{} w := DefaultWriter(b) w.WriteRecord(tt.Input...) w.Flush() err := w.Err() if err != nil { t.Errorf("Unexpected error: %s\n", err) } out := b.String() if out != tt.Output { t.Errorf("#%d: out=%q want %q", n, out, tt.Output) } } }
bsd-3-clause
gaberger/pybvc
samples/samplenetconf/cmds/show_dpifcfg.py
3050
#!/usr/bin/python """ Copyright (c) 2015, BROCADE COMMUNICATIONS SYSTEMS, INC All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. @authors: Sergei Garbuzov @status: Development @version: 1.1.0 """ import json from pybvc.controller.controller import Controller from pybvc.netconfdev.vrouter.vrouter5600 import VRouter5600 from pybvc.common.status import STATUS from pybvc.common.utils import load_dict_from_file if __name__ == "__main__": f = "cfg.yml" d = {} if(load_dict_from_file(f, d) == False): print("Config file '%s' read error: " % f) exit() try: ctrlIpAddr = d['ctrlIpAddr'] ctrlPortNum = d['ctrlPortNum'] ctrlUname = d['ctrlUname'] ctrlPswd = d['ctrlPswd'] nodeName = d['nodeName'] nodeIpAddr = d['nodeIpAddr'] nodePortNum = d['nodePortNum'] nodeUname = d['nodeUname'] nodePswd = d['nodePswd'] except: print ("Failed to get Controller device attributes") exit(0) ctrl = Controller(ctrlIpAddr, ctrlPortNum, ctrlUname, ctrlPswd) vrouter = VRouter5600(ctrl, nodeName, nodeIpAddr, nodePortNum, nodeUname, nodePswd) print ("<<< 'Controller': %s, '%s': %s" % (ctrlIpAddr, nodeName, nodeIpAddr)) result = vrouter.get_dataplane_interfaces_cfg() status = result.get_status() if(status.eq(STATUS.OK) == True): print "Dataplane interfaces config:" dpIfCfg = result.get_data() print json.dumps(dpIfCfg, indent=4) else: print ("\n") print ("!!!Failed, reason: %s" % status.brief().lower()) print ("%s" % status.detailed()) exit(0)
bsd-3-clause
CosmosOS/XSharp
source/XSharp/XSharp/Assembler/Gen1/x86/_Infra/IInstructionWithCondition.cs
177
namespace XSharp.Assembler.x86 { public interface IInstructionWithCondition { ConditionalTestEnum Condition { get; set; } } }
bsd-3-clause
oliverlietz/bd-j
AuthoringTools/com.hdcookbook.grin.grinviewer/src/main/java/com/hdcookbook/grin/test/RyanDirector.java
8355
/* * Copyright (c) 2007, Sun Microsystems, Inc. * * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of Sun Microsystems nor the names of its contributors * may be used to endorse or promote products derived from this software * without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * * Note: In order to comply with the binary form redistribution * requirement in the above license, the licensee may include * a URL reference to a copy of the required copyright notice, * the list of conditions and the disclaimer in a human readable * file with the binary form of the code that is subject to the * above license. For example, such file could be put on a * Blu-ray disc containing the binary form of the code or could * be put in a JAR file that is broadcast via a digital television * broadcast medium. In any event, you must include in any end * user licenses governing any code that includes the code subject * to the above license (in source and/or binary form) a disclaimer * that is at least as protective of Sun as the disclaimers in the * above license. * * A copy of the required copyright notice, the list of conditions and * the disclaimer will be maintained at * https://hdcookbook.dev.java.net/misc/license.html . * Thus, licensees may comply with the binary form redistribution * requirement with a text file that contains the following text: * * A copy of the license(s) governing this code is located * at https://hdcookbook.dev.java.net/misc/license.html */ package com.hdcookbook.grin.test; import java.net.URL; import java.io.IOException; import java.io.BufferedReader; import java.io.InputStreamReader; import com.hdcookbook.grin.Director; import com.hdcookbook.grin.SEShow; import com.hdcookbook.grin.Show; import com.hdcookbook.grin.Feature; import com.hdcookbook.grin.features.Assembly; import com.hdcookbook.grin.io.text.ShowParser; import com.hdcookbook.grin.io.ShowBuilder; import com.hdcookbook.grin.util.AssetFinder; import com.hdcookbook.grin.util.Debug; /** * This is part of the "Ryan's life" test show. It's mostly of * historical interest; it still works, but some of the ways of * structuring and using a show are passe. * * @author Bill Foote (http://jovial.com) */ public abstract class RyanDirector extends Director { private Assembly[] commentaryIndicators; private Feature[] commentaryOnIndicators; private Feature[] commentaryOffIndicators; private Assembly commentaryDirector; private Feature[] commentaryDirectors; // [0] is no director private int directorNumber; private boolean commentaryOn = false; /** */ public RyanDirector() { } protected void init() { String[] nm = { "F_commentary_menu_count_up", "F_commentary_menu_active", "F_commentary_menu_count_down" }; commentaryIndicators = new Assembly[nm.length]; commentaryOnIndicators = new Feature[nm.length]; commentaryOffIndicators = new Feature[nm.length]; for (int i = 0; i < nm.length; i++) { commentaryIndicators[i] = (Assembly) getShow().getFeature(nm[i]); commentaryOnIndicators[i] = commentaryIndicators[i].findPart("on"); commentaryOffIndicators[i] =commentaryIndicators[i].findPart("off"); if (Debug.ASSERT && (commentaryIndicators[i] == null || commentaryOnIndicators[i] == null || commentaryOffIndicators[i] == null)) { Debug.assertFail(); } } commentaryDirector = (Assembly) getShow().getFeature("F_commentary_director"); if (Debug.ASSERT && commentaryDirector == null) { Debug.assertFail(); } commentaryDirectors = new Feature[7]; for (int i = 0; i < commentaryDirectors.length; i++) { commentaryDirectors[i] = commentaryDirector.findPart("director_"+i); if (Debug.ASSERT && commentaryDirectors[i] == null) { Debug.assertFail(); } } } public Show createShow() { String showName = "ryan_show.txt"; SEShow show = new SEShow(this); URL source = null; BufferedReader rdr = null; try { source = AssetFinder.getURL(showName); if (source == null) { throw new IOException("Can't find resource " + showName); } rdr = new BufferedReader( new InputStreamReader(source.openStream(), "UTF-8")); ShowBuilder builder = new ShowBuilder(); builder.setExtensionParser(new RyanExtensionParser(this)); ShowParser p = new ShowParser(rdr, showName, show, builder); p.parse(); rdr.close(); } catch (IOException ex) { ex.printStackTrace(); System.out.println(); System.out.println(ex.getMessage()); System.out.println(); System.out.println("Error trying to parse " + showName); System.out.println(" URL: " + source); Debug.assertFail(); } finally { if (rdr != null) { try { rdr.close(); } catch (IOException ex) { } } } return show; } /** * Called when initialization is done, so it's OK to start * the video. **/ abstract public void startVideo(); /** * Called when the user selectes interactive or movie mode **/ abstract public void setInteractiveMode(boolean on); /** * Called when the user toggles commentary with the remote control. * The UI state is changed to reflect the new state. This may only * be done within a command. **/ void toggleCommentary() { commentaryOn = !commentaryOn; setCommentaryUI(); } protected void setDirectorNumber(int num) { directorNumber = num; setCommentaryUI(); } abstract protected void startCommentary(); /** * Called to set the state of the commentary UI to the right state, * depending on whether commentary is on or off. **/ void setCommentaryUI() { for (int i = 0; i < commentaryIndicators.length; i++) { Feature f; if (commentaryOn) { f = commentaryOnIndicators[i]; } else { f = commentaryOffIndicators[i]; } commentaryIndicators[i].setCurrentFeature(f); } if (commentaryOn) { commentaryDirector .setCurrentFeature(commentaryDirectors[directorNumber]); } else { commentaryDirector.setCurrentFeature(commentaryDirectors[0]); } } }
bsd-3-clause
tmk-ti/cursozf2
config/application.config.php
3018
<?php /** * If you need an environment-specific system or application configuration, * there is an example in the documentation * @see http://framework.zend.com/manual/current/en/tutorials/config.advanced.html#environment-specific-system-configuration * @see http://framework.zend.com/manual/current/en/tutorials/config.advanced.html#environment-specific-application-configuration */ return array( // This should be an array of module namespaces used in the application. 'modules' => array( 'Application', 'Market', 'Search', 'ZendDeveloperTools', ), // These are various options for the listeners attached to the ModuleManager 'module_listener_options' => array( // This should be an array of paths in which modules reside. // If a string key is provided, the listener will consider that a module // namespace, the value of that key the specific path to that module's // Module class. 'module_paths' => array( './module', './vendor', ), // An array of paths from which to glob configuration files after // modules are loaded. These effectively override configuration // provided by modules themselves. Paths may use GLOB_BRACE notation. 'config_glob_paths' => array( 'config/autoload/{{,*.}global,{,*.}local}.php', ), // Whether or not to enable a configuration cache. // If enabled, the merged configuration will be cached and used in // subsequent requests. //'config_cache_enabled' => $booleanValue, // The key used to create the configuration cache file name. //'config_cache_key' => $stringKey, // Whether or not to enable a module class map cache. // If enabled, creates a module class map cache which will be used // by in future requests, to reduce the autoloading process. //'module_map_cache_enabled' => $booleanValue, // The key used to create the class map cache file name. //'module_map_cache_key' => $stringKey, // The path in which to cache merged configuration. //'cache_dir' => $stringPath, // Whether or not to enable modules dependency checking. // Enabled by default, prevents usage of modules that depend on other modules // that weren't loaded. // 'check_dependencies' => true, ), // Used to create an own service manager. May contain one or more child arrays. //'service_listener_options' => array( // array( // 'service_manager' => $stringServiceManagerName, // 'config_key' => $stringConfigKey, // 'interface' => $stringOptionalInterface, // 'method' => $stringRequiredMethodName, // ), // ), // Initial configuration with which to seed the ServiceManager. // Should be compatible with Zend\ServiceManager\Config. // 'service_manager' => array(), );
bsd-3-clause
tmk-ti/cursozf2
module/Application/src/Application/Helper/LeftLinks.php
501
<?php /** * Created by PhpStorm. * User: filipe * Date: 03/10/15 * Time: 21:05 */ namespace Application\Helper; use Zend\View\Helper\AbstractHelper; class LeftLinks extends AbstractHelper { public function __invoke($values, $urlPrefix) { $lista = "<ul>".PHP_EOL; foreach($values as $value) { $lista .= sprintf("<li><a href=\"%s/%s\">%s</a></li>",$urlPrefix,$value,$value); } $lista .= "</ul>".PHP_EOL; return $lista; } }
bsd-3-clause
laroque/couchdb-python3
couchdb/multipart.py
8872
# -*- coding: utf-8 -*- # # Copyright (C) 2008-2009 Christopher Lenz # All rights reserved. # # This software is licensed as described in the file COPYING, which # you should have received as part of this distribution. """Support for streamed reading and writing of multipart MIME content.""" from base64 import b64encode from cgi import parse_header try: from hashlib import md5 except ImportError: from md5 import new as md5 import sys __all__ = ['read_multipart', 'write_multipart'] __docformat__ = 'restructuredtext en' CRLF = '\r\n' def read_multipart(fileobj, boundary=None): """Simple streaming MIME multipart parser. This function takes a file-like object reading a MIME envelope, and yields a ``(headers, is_multipart, payload)`` tuple for every part found, where ``headers`` is a dictionary containing the MIME headers of that part (with names lower-cased), ``is_multipart`` is a boolean indicating whether the part is itself multipart, and ``payload`` is either a string (if ``is_multipart`` is false), or an iterator over the nested parts. Note that the iterator produced for nested multipart payloads MUST be fully consumed, even if you wish to skip over the content. :param fileobj: a file-like object :param boundary: the part boundary string, will generally be determined automatically from the headers of the outermost multipart envelope :return: an iterator over the parts :since: 0.5 """ headers = {} buf = [] outer = in_headers = boundary is None next_boundary = boundary and '--' + boundary + '\n' or None last_boundary = boundary and '--' + boundary + '--\n' or None def _current_part(): payload = ''.join(buf) if payload.endswith('\r\n'): payload = payload[:-2] elif payload.endswith('\n'): payload = payload[:-1] content_md5 = headers.get('content-md5') if content_md5: h = b64encode(md5(payload).digest()) if content_md5 != h: raise ValueError('data integrity check failed') return headers, False, payload for line in fileobj: if in_headers: line = line.replace(CRLF, '\n') if line != '\n': name, value = line.split(':', 1) headers[name.lower().strip()] = value.strip() else: in_headers = False mimetype, params = parse_header(headers.get('content-type')) if mimetype.startswith('multipart/'): sub_boundary = params['boundary'] sub_parts = read_multipart(fileobj, boundary=sub_boundary) if boundary is not None: yield headers, True, sub_parts headers.clear() del buf[:] else: for part in sub_parts: yield part return elif line.replace(CRLF, '\n') == next_boundary: # We've reached the start of a new part, as indicated by the # boundary if headers: if not outer: yield _current_part() else: outer = False headers.clear() del buf[:] in_headers = True elif line.replace(CRLF, '\n') == last_boundary: # We're done with this multipart envelope break else: buf.append(line) if not outer and headers: yield _current_part() class MultipartWriter(object): def __init__(self, fileobj, headers=None, subtype='mixed', boundary=None): self.fileobj = fileobj if boundary is None: boundary = self._make_boundary() self.boundary = boundary if headers is None: headers = {} headers['Content-Type'] = 'multipart/%s; boundary="%s"' % ( subtype, self.boundary ) self._write_headers(headers) def open(self, headers=None, subtype='mixed', boundary=None): self.fileobj.write('--') self.fileobj.write(self.boundary) self.fileobj.write(CRLF) return MultipartWriter(self.fileobj, headers=headers, subtype=subtype, boundary=boundary) def add(self, mimetype, content, headers=None): self.fileobj.write('--') self.fileobj.write(self.boundary) self.fileobj.write(CRLF) if headers is None: headers = {} if isinstance(content, str): ctype, params = parse_header(mimetype) if 'charset' in params: content = content.encode(params['charset']) else: content = content.encode('utf-8') mimetype = mimetype + ';charset=utf-8' headers['Content-Type'] = mimetype if content: headers['Content-Length'] = str(len(content)) headers['Content-MD5'] = b64encode(md5(content).digest()) self._write_headers(headers) if content: # XXX: throw an exception if a boundary appears in the content?? self.fileobj.write(content.decode() if isinstance(content, bytes) else content) self.fileobj.write(CRLF) def close(self): self.fileobj.write('--') self.fileobj.write(self.boundary) self.fileobj.write('--') self.fileobj.write(CRLF) def _make_boundary(self): try: from uuid import uuid4 return '==' + uuid4().hex + '==' except ImportError: from random import randrange token = randrange(sys.maxsize) format = '%%0%dd' % len(repr(sys.maxsize - 1)) return '===============' + (format % token) + '==' def _write_headers(self, headers): if headers: for name, value in sorted(headers.items()): self.fileobj.write(name) self.fileobj.write(': ') self.fileobj.write(value.decode() if isinstance(value, bytes) else value) self.fileobj.write(CRLF) self.fileobj.write(CRLF) def __enter__(self): return self def __exit__(self, exc_type, exc_val, exc_tb): self.close() def write_multipart(fileobj, subtype='mixed', boundary=None): r"""Simple streaming MIME multipart writer. This function returns a `MultipartWriter` object that has a few methods to control the nested MIME parts. For example, to write a flat multipart envelope you call the ``add(mimetype, content, [headers])`` method for every part, and finally call the ``close()`` method. >>> from StringIO import StringIO >>> buf = StringIO() >>> envelope = write_multipart(buf, boundary='==123456789==') >>> envelope.add('text/plain', 'Just testing') >>> envelope.close() >>> print buf.getvalue().replace('\r\n', '\n') Content-Type: multipart/mixed; boundary="==123456789==" <BLANKLINE> --==123456789== Content-Length: 12 Content-MD5: nHmX4a6el41B06x2uCpglQ== Content-Type: text/plain <BLANKLINE> Just testing --==123456789==-- <BLANKLINE> Note that an explicit boundary is only specified for testing purposes. If the `boundary` parameter is omitted, the multipart writer will generate a random string for the boundary. To write nested structures, call the ``open([headers])`` method on the respective envelope, and finish each envelope using the ``close()`` method: >>> buf = StringIO() >>> envelope = write_multipart(buf, boundary='==123456789==') >>> part = envelope.open(boundary='==abcdefghi==') >>> part.add('text/plain', 'Just testing') >>> part.close() >>> envelope.close() >>> print buf.getvalue().replace('\r\n', '\n') #:doctest +ELLIPSIS Content-Type: multipart/mixed; boundary="==123456789==" <BLANKLINE> --==123456789== Content-Type: multipart/mixed; boundary="==abcdefghi==" <BLANKLINE> --==abcdefghi== Content-Length: 12 Content-MD5: nHmX4a6el41B06x2uCpglQ== Content-Type: text/plain <BLANKLINE> Just testing --==abcdefghi==-- --==123456789==-- <BLANKLINE> :param fileobj: a writable file-like object that the output should get written to :param subtype: the subtype of the multipart MIME type (e.g. "mixed") :param boundary: the boundary to use to separate the different parts :since: 0.6 """ return MultipartWriter(fileobj, subtype=subtype, boundary=boundary)
bsd-3-clause
NCIP/nci-term-browser
software/browser-util/src/gov/nih/nci/evs/browser/bean/ValueSetConfig.java
1440
package gov.nih.nci.evs.browser.bean; import java.io.*; import java.util.*; import java.net.*; public class ValueSetConfig { // Variable declaration private String name; private String uri; private String reportURI; private String extractionRule; // Default constructor public ValueSetConfig() { } // Constructor public ValueSetConfig( String name, String uri, String reportURI, String extractionRule) { this.name = name; this.uri = uri; this.reportURI = reportURI; this.extractionRule = extractionRule; } // Set methods public void setName(String name) { this.name = name; } public void setUri(String uri) { this.uri = uri; } public void setReportURI(String reportURI) { this.reportURI = reportURI; } public void setExtractionRule(String extractionRule) { this.extractionRule = extractionRule; } // Get methods public String getName() { return this.name; } public String getUri() { return this.uri; } public String getReportURI() { return this.reportURI; } public String getExtractionRule() { return this.extractionRule; } public String toString() { StringBuffer buf = new StringBuffer(); buf.append("name: ").append(name); buf.append("\n\turi: ").append(uri); String report_uri = reportURI.replaceAll(" ", "%20"); buf.append("\n\treportURI: ").append(report_uri); buf.append("\n\textractionRule: ").append(extractionRule); return buf.toString(); } }
bsd-3-clause
dimichspb/doc
migrations/m160327_143207_adding_price_supplier_tables.php
1534
<?php use yii\db\Migration; class m160327_143207_adding_price_supplier_tables extends Migration { public function up() { $this->createTable('{{%price}}', [ 'id' => $this->primaryKey(11), 'status' => $this->smallInteger()->notNull()->defaultValue(10), 'created_at' => $this->integer()->notNull(), 'updated_at' => $this->integer()->notNull(), 'started_at' => $this->integer()->notNull(), 'expire_at' => $this->integer(), 'product' => $this->integer(11)->notNull(), 'supplier' => $this->integer(11)->notNull(), 'quantity' => $this->integer(11)->notNull()->defaultValue(0), 'value' => $this->float()->notNull(), ], "DEFAULT CHARSET=utf8"); $this->createTable('{{%supplier}}', [ 'id' => $this->primaryKey(11), 'status' => $this->smallInteger()->notNull()->defaultValue(10), 'name' => $this->string(255), ], "DEFAULT CHARSET=utf8"); $this->addForeignKey('fk_price_product_id', '{{%price}}', 'product', '{{%product}}', 'id', 'RESTRICT', 'CASCADE'); $this->addForeignKey('fk_price_supplier_id', '{{%price}}', 'supplier', '{{%supplier}}', 'id', 'CASCADE', 'CASCADE'); } public function down() { $this->dropForeignKey('fk_price_supplier_id', 'price'); $this->dropForeignKey('fk_price_product_id', 'price'); $this->dropTable('{{%supplier}}'); $this->dropTable('{{%price}}'); } }
bsd-3-clause