repo_name
stringlengths
4
116
path
stringlengths
4
379
size
stringlengths
1
7
content
stringlengths
3
1.05M
license
stringclasses
15 values
williambai/beyond-webapp
unicom/app/scripts/userApp/me/views/MeBonus.js
1840
var _ = require('underscore'); var $ = require('jquery'); var Backbone = require('backbone'); var ListView = require('../../_base/__ListView'); var config = require('../../conf'); var Utils = require('../../_base/__Util'); Backbone.$ = $; //** 模型 var Bonus = Backbone.Model.extend({ idAttribute: '_id', urlRoot: config.api.host + '/private/finance/bonuses', defaults: { }, }); //** 集合 var BonusCollection = Backbone.Collection.extend({ model: Bonus, url: config.api.host + '/private/finance/bonuses', }); //** 列表子视图 var BonusListView = ListView.extend({ el: '#list', template: _.template($('#tpl-me-bonus-item').html()), initialize: function(options){ this.collection = new BonusCollection(); ListView.prototype.initialize.apply(this,options); }, getNewItemView: function(model){ return this.template({model: model.toJSON()}); }, }); //** 页面主视图 exports = module.exports = Backbone.View.extend({ el: '#content', template: _.template($('#tpl-me-bonus-index').html()), initialize: function(options) { this.router = options.router; this.on('load', this.load, this); }, events: { 'scroll': 'scroll', 'click .back': 'back', 'click .item': 'itemView', }, load: function() { var that = this; this.loaded = true; this.render(); this.listView = new BonusListView({ el: '#list', }); this.listView.trigger('load'); }, scroll: function() { this.listView.scroll(); return false; }, back: function(){ this.router.navigate('me/index',{trigger: true, replace: true}); return false; }, itemView: function(evt){ var id = this.$(evt.currentTarget).closest('.item').attr('id'); this.router.navigate('me/bonus/'+ id, {trigger: true, replace: true}); return false; }, render: function() { this.$el.html(this.template()); return this; }, });
mit
3DRealms/jrpg
dominio/src/main/java/mensaje/MensajeAutenticacion.java
894
package mensaje; public class MensajeAutenticacion { private String username; private String password; private boolean registro; private String casta; private String raza; public MensajeAutenticacion(String username, String password, boolean registro, String casta, String raza) { this.username = username; this.password = password; this.registro = registro; this.casta = casta; this.raza = raza; } public MensajeAutenticacion(String username, String password, boolean registrado) { this.username = username; this.password = password; this.registro = registrado; this.casta = ""; this.raza = ""; } public String getUsername() { return username; } public String getPassword() { return password; } public boolean isRegistro() { return registro; } public String getCasta() { return casta; } public String getRaza() { return raza; } }
mit
maxfoow/EventSourcingPoc
src/Processing/EventSourcing.Poc.CommandProcessing/ActionDispatcher.cs
1477
using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; using EventSourcing.Poc.EventSourcing; using EventSourcing.Poc.EventSourcing.Command; using EventSourcing.Poc.EventSourcing.Jobs; using EventSourcing.Poc.EventSourcing.Wrapper; using EventSourcing.Poc.Messages; namespace EventSourcing.Poc.Processing { public class ActionDispatcher : IActionDispatcher { private readonly ICommandQueue _commandQueue; private readonly ICommandStore _commandStore; private readonly IJobHandler _jobHandler; public ActionDispatcher(ICommandStore commandStore, ICommandQueue commandQueue, IJobHandler jobHandler) { _commandStore = commandStore; _commandQueue = commandQueue; _jobHandler = jobHandler; } public async Task Send(IEventWrapper eventParent, IAction action) { await Send(eventParent, new[] {action}); } public async Task Send(IEventWrapper eventParent, IReadOnlyCollection<IAction> actions) { var wrappedActions = actions .Select(c => c.Wrap()) .ToArray(); if (eventParent.IsLinkToJob) { await _jobHandler.Associate(eventParent, wrappedActions); } await _commandStore.Save(wrappedActions); foreach (var wrappedAction in wrappedActions) { await _commandQueue.Send(wrappedAction); } } } }
mit
sh-sh-dev/ir-studies
assets/crispRipple.js
1900
(function($) { 'use strict'; var els = $('.has-crr'); return els.each(function () { var el = $(this), color = el.css('background-color'); el.attr('data-crr-hold', 1); var data = el.data('crr-hold'); el.append('<div class="crisp-container" />').click(function(e){ var container = el.find('.crisp-container'); // ['button', 'a'].map(function(b){ // if (e.target.tagName.toLowerCase() == b) { // return false; // } // }) if ( e.target.tagName.toLowerCase() == 'a' || e.target.tagName.toLowerCase() == 'button' || container.find('.crisp-ripple').length > 1 ) { return false; } data++; if ( data == 4 ) data = 1; el.attr('data-crr-hold', data); var pos = { x: e.pageX - el.offset().left, y: e.pageY - el.offset().top }, crisp = $('<div class="crisp-ripple" />'), size = Math.max( el.height(), el.width() ) * 3, bg, colors = el.data('crr-color').split('|'); if ( data%3 == 0 ) { bg = colors.pop(); } else if ( data%3 == 2 ) { bg = colors.shift(); } else if ( data%3 == 1 ) { bg = color; } crisp.appendTo(container).css({ 'left': pos.x, 'top': pos.y, 'background-color': bg }) setTimeout(function () { crisp.css({ 'height': size, 'width': size }).on('transitionend', function () { el.css('background-color', bg ); crisp.remove() }) }, 0); }) }); }(jQuery));
mit
CupOfTea696/prism
components/prism-asciidoc.js
9031
(function (Prism) { var attributes = { pattern: /(^[ \t]*)\[(?!\[)(?:(["'$`])(?:(?!\2)[^\\]|\\.)*\2|\[(?:[^\]\\]|\\.)*\]|[^\]\\]|\\.)*\]/m, lookbehind: true, inside: { 'quoted': { pattern: /([$`])(?:(?!\1)[^\\]|\\.)*\1/, inside: { 'punctuation': /^[$`]|[$`]$/ } }, 'interpreted': { pattern: /'(?:[^'\\]|\\.)*'/, inside: { 'punctuation': /^'|'$/ // See rest below } }, 'string': /"(?:[^"\\]|\\.)*"/, 'variable': /\w+(?==)/, 'punctuation': /^\[|\]$|,/, 'operator': /=/, // The negative look-ahead prevents blank matches 'attr-value': /(?!^\s+$).+/ } }; Prism.languages.asciidoc = { 'comment-block': { pattern: /^(\/{4,})(?:\r?\n|\r)(?:[\s\S]*(?:\r?\n|\r))??\1/m, alias: 'comment' }, 'table': { pattern: /^\|={3,}(?:(?:\r?\n|\r).*)*?(?:\r?\n|\r)\|={3,}$/m, inside: { 'specifiers': { pattern: /(?!\|)(?:(?:(?:\d+(?:\.\d+)?|\.\d+)[+*])?(?:[<^>](?:\.[<^>])?|\.[<^>])?[a-z]*)(?=\|)/, alias: 'attr-value' }, 'punctuation': { pattern: /(^|[^\\])[|!]=*/, lookbehind: true } // See rest below } }, 'passthrough-block': { pattern: /^(\+{4,})(?:\r?\n|\r)(?:[\s\S]*(?:\r?\n|\r))??\1$/m, inside: { 'punctuation': /^\++|\++$/ // See rest below } }, // Literal blocks and listing blocks 'literal-block': { pattern: /^(-{4,}|\.{4,})(?:\r?\n|\r)(?:[\s\S]*(?:\r?\n|\r))??\1$/m, inside: { 'punctuation': /^(?:-+|\.+)|(?:-+|\.+)$/ // See rest below } }, // Sidebar blocks, quote blocks, example blocks and open blocks 'other-block': { pattern: /^(--|\*{4,}|_{4,}|={4,})(?:\r?\n|\r)(?:[\s\S]*(?:\r?\n|\r))??\1$/m, inside: { 'punctuation': /^(?:-+|\*+|_+|=+)|(?:-+|\*+|_+|=+)$/ // See rest below } }, // list-punctuation and list-label must appear before indented-block 'list-punctuation': { pattern: /(^[ \t]*)(?:-|\*{1,5}|\.{1,5}|(?:[a-z]|\d+)\.|[xvi]+\))(?= )/im, lookbehind: true, alias: 'punctuation' }, 'list-label': { pattern: /(^[ \t]*)[a-z\d].+(?::{2,4}|;;)(?=\s)/im, lookbehind: true, alias: 'symbol' }, 'indented-block': { pattern: /((\r?\n|\r)\2)([ \t]+)\S.*(?:(?:\r?\n|\r)\3.+)*(?=\2{2}|$)/, lookbehind: true }, 'comment': /^\/\/.*/m, 'title': { pattern: /^.+(?:\r?\n|\r)(?:={3,}|-{3,}|~{3,}|\^{3,}|\+{3,})$|^={1,5} +.+|^\.(?![\s.]).*/m, alias: 'important', inside: { 'punctuation': /^(?:\.|=+)|(?:=+|-+|~+|\^+|\++)$/ // See rest below } }, 'attribute-entry': { pattern: /^:[^:\r\n]+:(?: .*?(?: \+(?:\r?\n|\r).*?)*)?$/m, alias: 'tag' }, 'attributes': attributes, 'hr': { pattern: /^'{3,}$/m, alias: 'punctuation' }, 'page-break': { pattern: /^<{3,}$/m, alias: 'punctuation' }, 'admonition': { pattern: /^(?:TIP|NOTE|IMPORTANT|WARNING|CAUTION):/m, alias: 'keyword' }, 'callout': [ { pattern: /(^[ \t]*)<?\d*>/m, lookbehind: true, alias: 'symbol' }, { pattern: /<\d+>/, alias: 'symbol' } ], 'macro': { pattern: /\b[a-z\d][a-z\d-]*::?(?:(?:\S+)??\[(?:[^\]\\"]|(["'])(?:(?!\1)[^\\]|\\.)*\1|\\.)*\])/, inside: { 'function': /^[a-z\d-]+(?=:)/, 'punctuation': /^::?/, 'attributes': { pattern: /(?:\[(?:[^\]\\"]|(["'])(?:(?!\1)[^\\]|\\.)*\1|\\.)*\])/, inside: attributes.inside } } }, 'inline': { /* The initial look-behind prevents the highlighting of escaped quoted text. Quoted text can be multi-line but cannot span an empty line. All quoted text can have attributes before [foobar, 'foobar', baz="bar"]. First, we handle the constrained quotes. Those must be bounded by non-word chars and cannot have spaces between the delimiter and the first char. They are, in order: _emphasis_, ``double quotes'', `single quotes', `monospace`, 'emphasis', *strong*, +monospace+ and #unquoted# Then we handle the unconstrained quotes. Those do not have the restrictions of the constrained quotes. They are, in order: __emphasis__, **strong**, ++monospace++, +++passthrough+++, ##unquoted##, $$passthrough$$, ~subscript~, ^superscript^, {attribute-reference}, [[anchor]], [[[bibliography anchor]]], <<xref>>, (((indexes))) and ((indexes)) */ pattern: /(^|[^\\])(?:(?:\B\[(?:[^\]\\"]|(["'])(?:(?!\2)[^\\]|\\.)*\2|\\.)*\])?(?:\b_(?!\s)(?: _|[^_\\\r\n]|\\.)+(?:(?:\r?\n|\r)(?: _|[^_\\\r\n]|\\.)+)*_\b|\B``(?!\s).+?(?:(?:\r?\n|\r).+?)*''\B|\B`(?!\s)(?: ['`]|.)+?(?:(?:\r?\n|\r)(?: ['`]|.)+?)*['`]\B|\B(['*+#])(?!\s)(?: \3|(?!\3)[^\\\r\n]|\\.)+(?:(?:\r?\n|\r)(?: \3|(?!\3)[^\\\r\n]|\\.)+)*\3\B)|(?:\[(?:[^\]\\"]|(["'])(?:(?!\4)[^\\]|\\.)*\4|\\.)*\])?(?:(__|\*\*|\+\+\+?|##|\$\$|[~^]).+?(?:(?:\r?\n|\r).+?)*\5|\{[^}\r\n]+\}|\[\[\[?.+?(?:(?:\r?\n|\r).+?)*\]?\]\]|<<.+?(?:(?:\r?\n|\r).+?)*>>|\(\(\(?.+?(?:(?:\r?\n|\r).+?)*\)?\)\)))/m, lookbehind: true, inside: { 'attributes': attributes, 'url': { pattern: /^(?:\[\[\[?.+?\]?\]\]|<<.+?>>)$/, inside: { 'punctuation': /^(?:\[\[\[?|<<)|(?:\]\]\]?|>>)$/ } }, 'attribute-ref': { pattern: /^\{.+\}$/, inside: { 'variable': { pattern: /(^\{)[a-z\d,+_-]+/, lookbehind: true }, 'operator': /^[=?!#%@$]|!(?=[:}])/, 'punctuation': /^\{|\}$|::?/ } }, 'italic': { pattern: /^(['_])[\s\S]+\1$/, inside: { 'punctuation': /^(?:''?|__?)|(?:''?|__?)$/ } }, 'bold': { pattern: /^\*[\s\S]+\*$/, inside: { punctuation: /^\*\*?|\*\*?$/ } }, 'punctuation': /^(?:``?|\+{1,3}|##?|\$\$|[~^]|\(\(\(?)|(?:''?|\+{1,3}|##?|\$\$|[~^`]|\)?\)\))$/ } }, 'replacement': { pattern: /\((?:C|TM|R)\)/, alias: 'builtin' }, 'entity': /&#?[\da-z]{1,8};/i, 'line-continuation': { pattern: /(^| )\+$/m, lookbehind: true, alias: 'punctuation' } }; // Allow some nesting. There is no recursion though, so cloning should not be needed. attributes.inside['interpreted'].inside.rest = { 'macro': Prism.languages.asciidoc['macro'], 'inline': Prism.languages.asciidoc['inline'], 'replacement': Prism.languages.asciidoc['replacement'], 'entity': Prism.languages.asciidoc['entity'] }; Prism.languages.asciidoc['passthrough-block'].inside.rest = { 'macro': Prism.languages.asciidoc['macro'] }; Prism.languages.asciidoc['literal-block'].inside.rest = { 'callout': Prism.languages.asciidoc['callout'] }; Prism.languages.asciidoc['table'].inside.rest = { 'comment-block': Prism.languages.asciidoc['comment-block'], 'passthrough-block': Prism.languages.asciidoc['passthrough-block'], 'literal-block': Prism.languages.asciidoc['literal-block'], 'other-block': Prism.languages.asciidoc['other-block'], 'list-punctuation': Prism.languages.asciidoc['list-punctuation'], 'indented-block': Prism.languages.asciidoc['indented-block'], 'comment': Prism.languages.asciidoc['comment'], 'title': Prism.languages.asciidoc['title'], 'attribute-entry': Prism.languages.asciidoc['attribute-entry'], 'attributes': Prism.languages.asciidoc['attributes'], 'hr': Prism.languages.asciidoc['hr'], 'page-break': Prism.languages.asciidoc['page-break'], 'admonition': Prism.languages.asciidoc['admonition'], 'list-label': Prism.languages.asciidoc['list-label'], 'callout': Prism.languages.asciidoc['callout'], 'macro': Prism.languages.asciidoc['macro'], 'inline': Prism.languages.asciidoc['inline'], 'replacement': Prism.languages.asciidoc['replacement'], 'entity': Prism.languages.asciidoc['entity'], 'line-continuation': Prism.languages.asciidoc['line-continuation'] }; Prism.languages.asciidoc['other-block'].inside.rest = { 'table': Prism.languages.asciidoc['table'], 'list-punctuation': Prism.languages.asciidoc['list-punctuation'], 'indented-block': Prism.languages.asciidoc['indented-block'], 'comment': Prism.languages.asciidoc['comment'], 'attribute-entry': Prism.languages.asciidoc['attribute-entry'], 'attributes': Prism.languages.asciidoc['attributes'], 'hr': Prism.languages.asciidoc['hr'], 'page-break': Prism.languages.asciidoc['page-break'], 'admonition': Prism.languages.asciidoc['admonition'], 'list-label': Prism.languages.asciidoc['list-label'], 'macro': Prism.languages.asciidoc['macro'], 'inline': Prism.languages.asciidoc['inline'], 'replacement': Prism.languages.asciidoc['replacement'], 'entity': Prism.languages.asciidoc['entity'], 'line-continuation': Prism.languages.asciidoc['line-continuation'] }; Prism.languages.asciidoc['title'].inside.rest = { 'macro': Prism.languages.asciidoc['macro'], 'inline': Prism.languages.asciidoc['inline'], 'replacement': Prism.languages.asciidoc['replacement'], 'entity': Prism.languages.asciidoc['entity'] }; // Plugin to make entity title show the real entity, idea by Roman Komarov Prism.hooks.add('wrap', function(env) { if (env.type === 'entity') { env.attributes['title'] = env.content.replace(/&amp;/, '&'); } }); Prism.languages.adoc = Prism.languages.asciidoc; }(Prism));
mit
ZEROKISEKI/cube-generator
gulp/assets.js
1118
import gulp from 'gulp' import path from 'path' import { Path } from './config' (function () { 'use strict' gulp.task('fonts', () => { return gulp.src(path.join(Path.source, '/fonts/**/*.*')) .pipe(gulp.dest(path.join('..', Path.theme, Path.source, '/fonts'))) }) gulp.task('languages', () => { return gulp.src(path.join(Path.languages, '/*.yml')) .pipe(gulp.dest(path.join('..', Path.theme, Path.languages))) }) gulp.task('img', () => { return gulp.src(path.join(Path.source, '/images/**/*.*')) .pipe(gulp.dest(path.join('..', Path.theme, Path.source, '/images'))) }) gulp.task('music', () => { return gulp.src(path.join(Path.source, '/music/**/*.*')) .pipe(gulp.dest(path.join('..', Path.theme, Path.source, '/music'))) }) gulp.task('lrc', () => { return gulp.src(path.join(Path.source, '/lrc/**/*.*')) .pipe(gulp.dest(path.join('..', Path.theme, Path.source, '/lrc'))) }) gulp.task('assets', ['fonts', 'languages', 'img', 'music', 'lrc'], () => { }) })()
mit
javadev/underscore-java
spring-boot-example/src/test/java/com/example/demo/controller/XmlToJsonControllerTest.java
2342
package com.example.demo.controller; import static org.junit.jupiter.api.Assertions.assertSame; import com.github.underscore.lodash.U; import java.util.Map; import org.junit.jupiter.api.Test; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; class XmlToJsonControllerTest { private XmlToJsonController testObj = new XmlToJsonController(); @Test void xmltojson() { ResponseEntity<Map<String, Object>> result = testObj.xmltojson(U.objectBuilder().add("xml", "<a/>").build()); assertSame(HttpStatus.OK, result.getStatusCode()); } @Test void xmltojsonError() { ResponseEntity<Map<String, Object>> result = testObj.xmltojson(U.objectBuilder().add("xml", "<a/>1").build()); assertSame(HttpStatus.BAD_REQUEST, result.getStatusCode()); } @Test void formatxml() { ResponseEntity<Map<String, Object>> result = testObj.formatxml(U.objectBuilder().add("xml", "<a/>").build()); assertSame(HttpStatus.OK, result.getStatusCode()); } @Test void formatxmlError() { ResponseEntity<Map<String, Object>> result = testObj.formatxml(U.objectBuilder().add("xml", "<a/>1").build()); assertSame(HttpStatus.BAD_REQUEST, result.getStatusCode()); } @Test void jsontoxml() { ResponseEntity<Map<String, Object>> result = testObj.jsontoxml(U.objectBuilder().add("json", "{\"a\":1}").build()); assertSame(HttpStatus.OK, result.getStatusCode()); } @Test void jsontoxmlError() { ResponseEntity<Map<String, Object>> result = testObj.jsontoxml(U.objectBuilder().add("json", "{\"a\":1}1").build()); assertSame(HttpStatus.BAD_REQUEST, result.getStatusCode()); } @Test void formatjson() { ResponseEntity<Map<String, Object>> result = testObj.formatjson(U.objectBuilder().add("json", "{\"a\":1}").build()); assertSame(HttpStatus.OK, result.getStatusCode()); } @Test void formatjsonError() { ResponseEntity<Map<String, Object>> result = testObj.formatjson(U.objectBuilder().add("json", "{\"a\":1}1").build()); assertSame(HttpStatus.BAD_REQUEST, result.getStatusCode()); } }
mit
AaronFriesen/Trydent
src/main/java/edu/gatech/cs2340/trydent/animation/AnimationEvent.java
521
package edu.gatech.cs2340.trydent.animation; import edu.gatech.cs2340.trydent.GameObject; /** * Stores information about an animation event. */ public class AnimationEvent { /** The GameObject associated with this event. */ public final GameObject gameObject; /** The Animation associated with this event. */ public final Animation animation; public AnimationEvent(GameObject gameObject, Animation animation) { this.gameObject = gameObject; this.animation = animation; } }
mit
uniplaces/ember-cli-uniq
addon/components/uni-auth-modal.js
342
import Component from '@ember/component'; import layout from '../templates/components/uni-auth-modal'; export default Component.extend({ tagName: '', layout, isOpen: false, baseCssClass: 'uni-auth-modal', hasSeparator: false, onCloseModal() {}, actions: { onCloseModal() { this.get('onCloseModal')(); } } });
mit
reasonMix/netfoxpack
LuaFunction.cpp
3447
#include "LuaFunction.hpp" LuaRef::LuaRef(): L(nullptr), ref_(LUA_NOREF) { } LuaRef::LuaRef(lua_State* aL, int index) : L(aL), ref_(LUA_NOREF) { lua_pushvalue(L, index); ref_ = luaL_ref(L, LUA_REGISTRYINDEX); } LuaRef::~LuaRef() { unref(); } LuaRef::LuaRef(const LuaRef& other): L(nullptr), ref_(LUA_NOREF) { *this = other; } LuaRef& LuaRef::operator=(const LuaRef& rhs) { if (this != &rhs) { rhs.push(); reset(rhs.L, -1); lua_pop(L, 1); } return *this; } LuaRef::LuaRef(LuaRef&& other): L(nullptr), ref_(LUA_NOREF) { *this = std::move(other); } LuaRef& LuaRef::operator=(LuaRef&& rhs) { if (this != &rhs) { unref(); L = rhs.L; ref_ = rhs.ref_; rhs.L = nullptr; rhs.ref_ = LUA_NOREF; } return *this; } LuaRef::operator bool() const { return ref_ != LUA_NOREF; } void LuaRef::reset(lua_State* aL, int index) { unref(); if (aL != nullptr) { L = aL; lua_pushvalue(L, index); ref_ = luaL_ref(L, LUA_REGISTRYINDEX); } } void LuaRef::push() const { lua_rawgeti(L, LUA_REGISTRYINDEX, ref_); } lua_State* LuaRef::state() const { return L; } void LuaRef::unref() const { if (L && ref_ != LUA_NOREF && ref_ != LUA_REFNIL) luaL_unref(L, LUA_REGISTRYINDEX, ref_); } LuaFunction::LuaFunction(): LuaRef(), trackback_(0),returnCnt_(0) { } LuaFunction::LuaFunction(lua_State* aL, int index) : LuaRef(aL, index), trackback_(0),returnCnt_(0) { luaL_checktype(aL, index, LUA_TFUNCTION); } LuaFunction::LuaFunction(const LuaFunction& other): LuaRef() { *this = other; } LuaFunction& LuaFunction::operator=(const LuaFunction& rhs) { if (this != &rhs) { returnCnt_ = rhs.returnCnt_; checkReturn_ = rhs.checkReturn_; rhs.push(); luaL_checktype(rhs.L, -1, LUA_TFUNCTION); reset(rhs.L, -1); lua_pop(L, 1); } return *this; } LuaFunction::LuaFunction(LuaFunction&& other): LuaRef() { *this = std::move(other); } LuaFunction& LuaFunction::operator=(LuaFunction&& rhs) { if (this != &rhs) { unref(); L = rhs.L; ref_ = rhs.ref_; returnCnt_ = rhs.returnCnt_; checkReturn_ = rhs.checkReturn_; rhs.L = nullptr; rhs.ref_ = LUA_NOREF; rhs.checkReturn_ = nullptr; } return *this; } void LuaFunction::operator()() const { ppush(); pcall(); } void LuaFunction::ppush() const { lua_getglobal(L, "__G__TRACKBACK__"); trackback_ = lua_gettop(L); push(); luaL_checktype(L, -1, LUA_TFUNCTION); } void LuaFunction::pcall() const { int argc = lua_gettop(L) - trackback_ - 1; lua_pcall(L, argc, returnCnt_, trackback_); if (checkReturn_) checkReturn_(L); lua_settop(L, trackback_ - 1); // remove trackback and any thing above it. } void LuaFunction::pusharg(bool v) const { lua_pushboolean(L, v); } void LuaFunction::pusharg(float v) const { lua_pushnumber(L, v); } void LuaFunction::pusharg(double v) const { lua_pushnumber(L, v); } void LuaFunction::pusharg(int v) const { lua_pushinteger(L, v); } void LuaFunction::pusharg(const std::string& v) const { lua_pushlstring(L, v.data(), v.size()); } void LuaFunction::pusharg(const char* v) const { lua_pushstring(L, v); } void LuaFunction::pusharg(const Data& v) const { lua_pushlstring(L, v.buf, v.len); }
mit
7odri9o/Sunshine
app/src/main/java/com/dreamdevs/sunshine/fragment/DetailFragment.java
9586
package com.dreamdevs.sunshine.fragment; import android.content.Intent; import android.database.Cursor; import android.net.Uri; import android.os.Bundle; import android.support.v4.app.Fragment; import android.support.v4.app.LoaderManager; import android.support.v4.content.CursorLoader; import android.support.v4.content.Loader; import android.support.v4.view.MenuItemCompat; import android.support.v7.widget.ShareActionProvider; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.View; import android.view.ViewGroup; import android.widget.ImageView; import android.widget.TextView; import com.dreamdevs.sunshine.R; import com.dreamdevs.sunshine.data.WeatherContract; import com.dreamdevs.sunshine.util.Utility; import com.dreamdevs.sunshine.view.WindCompass; public class DetailFragment extends Fragment implements LoaderManager.LoaderCallbacks<Cursor> { private static final String LOG_TAG = DetailFragment.class.getSimpleName(); // These indices are tied to DETAIL_COLUMNS. If DETAIL_COLUMNS changes, these // must change. public static final int COL_WEATHER_ID = 0; public static final int COL_WEATHER_DATE = 1; public static final int COL_WEATHER_DESC = 2; public static final int COL_WEATHER_MAX_TEMP = 3; public static final int COL_WEATHER_MIN_TEMP = 4; public static final int COL_WEATHER_HUMIDITY = 5; public static final int COL_WEATHER_PRESSURE = 6; public static final int COL_WEATHER_WIND_SPEED = 7; public static final int COL_WEATHER_DEGREES = 8; public static final int COL_WEATHER_CONDITION_ID = 9; private static final String FORECAST_SHARE_HASHTAG = " #SunshineApp"; private static final int DETAIL_LOADER = 0; private Uri mUri; public static final String DETAIL_URI = "URI"; private static final String[] DETAIL_COLUMNS = { WeatherContract.WeatherEntry.TABLE_NAME + "." + WeatherContract.WeatherEntry._ID, WeatherContract.WeatherEntry.COLUMN_DATE, WeatherContract.WeatherEntry.COLUMN_SHORT_DESC, WeatherContract.WeatherEntry.COLUMN_MAX_TEMP, WeatherContract.WeatherEntry.COLUMN_MIN_TEMP, WeatherContract.WeatherEntry.COLUMN_HUMIDITY, WeatherContract.WeatherEntry.COLUMN_PRESSURE, WeatherContract.WeatherEntry.COLUMN_WIND_SPEED, WeatherContract.WeatherEntry.COLUMN_DEGREES, WeatherContract.WeatherEntry.COLUMN_WEATHER_ID, // This works because the WeatherProvider returns location data joined with // weather data, even though they're stored in two different tables. WeatherContract.LocationEntry.COLUMN_LOCATION_SETTING }; private ShareActionProvider mShareActionProvider; private String mForecast; private ImageView mIconView; private TextView mFriendlyDateView; private TextView mDateView; private TextView mDescriptionView; private TextView mHighTempView; private TextView mLowTempView; private TextView mHumidityView; private TextView mWindView; private TextView mPressureView; private WindCompass mCompass; public DetailFragment() { setHasOptionsMenu(true); } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { Bundle arguments = getArguments(); if (arguments != null) { mUri = arguments.getParcelable(DetailFragment.DETAIL_URI); } View rootView = inflater.inflate(R.layout.fragment_detail, container, false); mIconView = (ImageView) rootView.findViewById(R.id.detail_icon); mDateView = (TextView) rootView.findViewById(R.id.detail_date_textview); mFriendlyDateView = (TextView) rootView.findViewById(R.id.detail_day_textview); mDescriptionView = (TextView) rootView.findViewById(R.id.detail_forecast_textview); mHighTempView = (TextView) rootView.findViewById(R.id.detail_high_textview); mLowTempView = (TextView) rootView.findViewById(R.id.detail_low_textview); mHumidityView = (TextView) rootView.findViewById(R.id.detail_humidity_textview); mWindView = (TextView) rootView.findViewById(R.id.detail_wind_textview); mPressureView = (TextView) rootView.findViewById(R.id.detail_pressure_textview); mCompass = (WindCompass) rootView.findViewById(R.id.wind_compass); return rootView; } @Override public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) { // Inflate the menu; this adds items to the action bar if it is present. inflater.inflate(R.menu.detailfragment, menu); // Retrieve the share menu item MenuItem menuItem = menu.findItem(R.id.action_share); // Get the provider and hold onto it to set/change the share intent. mShareActionProvider = (ShareActionProvider) MenuItemCompat.getActionProvider(menuItem); // If onLoadFinished happens before this, we can go ahead and set the share intent now. if (mForecast != null) { mShareActionProvider.setShareIntent(createShareForecastIntent()); } } private Intent createShareForecastIntent() { Intent shareIntent = new Intent(Intent.ACTION_SEND); shareIntent.addFlags(Intent.FLAG_ACTIVITY_NEW_DOCUMENT); shareIntent.setType("text/plain"); shareIntent.putExtra(Intent.EXTRA_TEXT, mForecast + FORECAST_SHARE_HASHTAG); return shareIntent; } @Override public void onActivityCreated(Bundle savedInstanceState) { getLoaderManager().initLoader(DETAIL_LOADER, null, this); super.onActivityCreated(savedInstanceState); } @Override public Loader<Cursor> onCreateLoader(int id, Bundle args) { if (null != mUri) { // Now create and return a CursorLoader that will take care of // creating a Cursor for the data being displayed. return new CursorLoader( getActivity(), mUri, DETAIL_COLUMNS, null, null, null ); } return null; } @Override public void onLoadFinished(Loader<Cursor> loader, Cursor data) { if (data != null && data.moveToFirst()) { // Read date from cursor and update views for day of week and date long date = data.getLong(COL_WEATHER_DATE); String friendlyDateText = Utility.getDayName(getActivity(), date); String dateText = Utility.getFormattedMonthDay(getActivity(), date); mFriendlyDateView.setText(friendlyDateText); mDateView.setText(dateText); // Read description from cursor and update view String description = data.getString(COL_WEATHER_DESC); mDescriptionView.setText(description); // Read weather condition ID from cursor int weatherId = data.getInt(COL_WEATHER_CONDITION_ID); // Use weather art image mIconView.setImageResource(Utility.getArtResourceForWeatherCondition(weatherId)); mIconView.setContentDescription(description); double high = data.getDouble(COL_WEATHER_MAX_TEMP); String highString = Utility.formatTemperature(getActivity(), high); mHighTempView.setText(highString); // Read low temperature from cursor and update view double low = data.getDouble(COL_WEATHER_MIN_TEMP); String lowString = Utility.formatTemperature(getActivity(), low); mLowTempView.setText(lowString); // Read humidity from cursor and update view float humidity = data.getFloat(COL_WEATHER_HUMIDITY); mHumidityView.setText(getActivity().getString(R.string.format_humidity, humidity)); // Read wind speed and direction from cursor and update view float windSpeedStr = data.getFloat(COL_WEATHER_WIND_SPEED); float windDirStr = data.getFloat(COL_WEATHER_DEGREES); mWindView.setText(Utility.getFormattedWind(getActivity(), windSpeedStr, windDirStr)); // Read pressure from cursor and update view float pressure = data.getFloat(COL_WEATHER_PRESSURE); mPressureView.setText(getActivity().getString(R.string.format_pressure, pressure)); mCompass.updateDirection(windDirStr); mCompass.setVisibility(View.VISIBLE); // We still need this for the share intent mForecast = String.format("%s - %s - %s/%s", dateText, description, high, low); // If onCreateOptionsMenu has already happened, we need to update the share intent now. if (mShareActionProvider != null) { mShareActionProvider.setShareIntent(createShareForecastIntent()); } } } public void onLocationChanged(String newLocation) { // replace the uri, since the location has changed Uri uri = mUri; if (null != uri) { long date = WeatherContract.WeatherEntry.getDateFromUri(uri); Uri updatedUri = WeatherContract.WeatherEntry.buildWeatherLocationWithDate(newLocation, date); mUri = updatedUri; getLoaderManager().restartLoader(DETAIL_LOADER, null, this); } } @Override public void onLoaderReset(Loader<Cursor> loader) { } }
mit
themichaelhall/bluemvc-core
src/Collections/SessionItemCollection.php
4667
<?php /** * This file is a part of the bluemvc-core package. * * Read more at https://bluemvc.com/ */ declare(strict_types=1); namespace BlueMvc\Core\Collections; use BlueMvc\Core\Interfaces\Collections\SessionItemCollectionInterface; /** * Class representing a collection of session items. * * @since 1.0.0 */ class SessionItemCollection implements SessionItemCollectionInterface { /** * SessionItemCollection constructor. * * @since 2.1.0 * * @param array $options The options to pass to session_start() method. */ public function __construct(array $options = []) { $this->options = $options; $this->isInitialized = false; } /** * Returns the number of session items. * * @since 1.0.0 * * @return int The number of session items. */ public function count(): int { $this->doInit(); return count($_SESSION); } /** * Returns the current session item value. * * @since 1.0.0 * * @return mixed The current session item value. */ public function current() { $this->doInit(); return current($_SESSION); } /** * Returns the session item value by session item name if it exists, null otherwise. * * @since 1.0.0 * * @param string $name The session item name. * * @return mixed|null The session item value by session item name if it exists, null otherwise. */ public function get(string $name) { $this->doInit(); if (!isset($_SESSION[$name])) { return null; } return $_SESSION[$name]; } /** * Returns the current session item name. * * @since 1.0.0 * * @return string The current session item name. */ public function key(): string { $this->doInit(); return strval(key($_SESSION)); } /** * Moves forwards to the next session item. * * @since 1.0.0 */ public function next(): void { $this->doInit(); next($_SESSION); } /** * Removes a session item by session item name. * * @since 1.0.0 * * @param string $name The session item name. */ public function remove(string $name): void { if ($this->hasNoSession()) { return; } $this->doInit(true); unset($_SESSION[$name]); } /** * Rewinds the session item collection to first element. * * @since 1.0.0 */ public function rewind(): void { $this->doInit(); reset($_SESSION); } /** * Sets a session item value by session item name. * * @since 1.0.0 * * @param string $name The session item name. * @param mixed $value The session item value. */ public function set(string $name, $value): void { $this->doInit(true); $_SESSION[$name] = $value; } /** * Returns true if the current session item is valid. * * @since 1.0.0 * * @return bool True if the current session item is valid. */ public function valid(): bool { $this->doInit(); return key($_SESSION) !== null; } /** * Initializes session if it is not already initialized. * * @param bool $write If true, initialize for write, if false initialize for read-only. */ private function doInit(bool $write = false): void { if ($this->isInitialized) { return; } if (!$write && $this->hasNoSession()) { $_SESSION = []; return; } if (session_status() !== PHP_SESSION_ACTIVE) { session_start($this->options); } $this->isInitialized = true; } /** * Checks if there is no session available, without unnecessary creation of session cookie. * * @return bool True if there is no session, false otherwise. */ private function hasNoSession(): bool { if ($this->isInitialized) { return false; } if (!boolval(ini_get('session.use_only_cookies'))) { // Can't tell for sure, since only cookie is checked. return false; } if (isset($_COOKIE[session_name()])) { return false; } return true; } /** * @var array The options to pass to session_start() method. */ private $options; /** * @var bool True if session is initialized, false otherwise. */ private $isInitialized; }
mit
Choosue/lintcode
src/test/chushu/lintcode/datastructure/LongestConsecutiveSequence.java
2359
package test.chushu.lintcode.datastructure; import test.chushu.util.ArrayUtils; import java.util.Arrays; import java.util.HashSet; /** * Created by liuchushu on 4/14/16. */ public class LongestConsecutiveSequence { /** * @param num: A list of integers * @return an integer */ public int longestConsecutive(int[] num) { // write you code here if (num == null || num.length == 0) { return 0; } // 1. Add numbers into a hash set. O(n); HashSet<Integer> set = new HashSet<Integer>(); for (int i = 0; i < num.length; i++) { set.add(num[i]); } // 2. Count the longest consecutive sequence from front to end. O(n) int max = 1; for (int i = 0; i < num.length; i++) { if (set.isEmpty()) { break; } if (!set.contains(num[i])) { continue; } // Calculate consecutive sequence from num[i] up int up = 0; int curNum = num[i]; set.remove(curNum); while (set.contains(curNum + 1)) { set.remove(curNum + 1); up++; curNum++; } // Calculate consecutive sequence from num[i] down int down = 0; curNum = num[i]; while (set.contains(curNum - 1)) { set.remove(curNum - 1); down++; curNum--; } max = up + down + 1 > max ? up + down + 1 : max; } return max; } /** * @param num: A list of integers * @return an integer */ public int longestConsecutivePresort(int[] num) { // write you code here if (num == null || num.length == 0) { return 0; } // 1. Sort the array in O(n * log(n)) time. Arrays.sort(num); // 2. Count the longest consecutive sequence from front to end. O(n) int cur = 1; int max = 1; for (int i = 0; i < num.length - 1; i++) { if (num[i + 1] == num[i] + 1) { cur += 1; } else { // record current max, reset cur 0; max = cur > max ? cur : max; cur = 1; } } return max; } }
mit
qvazzler/Flexget
flexget/plugins/input/ftp_list.py
7937
from __future__ import unicode_literals, division, absolute_import from builtins import * # pylint: disable=unused-import, redefined-builtin from past.utils import old_div from future.moves.urllib.parse import quote import logging import ftplib import os import re from flexget.utils.tools import native_str_to_text from flexget import plugin from flexget.event import event from flexget.entry import Entry from flexget.config_schema import one_or_more log = logging.getLogger('ftp_list') class InputFtpList(object): """ Generate entries from a ftp listing Configuration: ftp_list: config: name: <ftp name> username: <username> password: <password> host: <host to connect> port: <port> use-ssl: <yes/no> encoding: <auto/utf8/ascii> files-only: <yes/no> recursive: <yes/no> get-size: <yes/no> dirs: - <directory 1> - <directory 2> - .... """ encodings = ['auto', 'utf8', 'ascii'] schema = { 'type': 'object', 'properties': { 'config': { 'type': 'object', 'properties': { 'name': {'type': 'string'}, 'username': {'type': 'string'}, 'password': {'type': 'string'}, 'host': {'type': 'string'}, 'port': {'type': 'integer'}, 'use-ssl': {'type': 'boolean', 'default': False}, 'encoding': {'type': 'string', 'enum': encodings, 'default': 'auto'}, 'files-only': {'type': 'boolean', 'default': False}, 'recursive': {'type': 'boolean', 'default': False}, 'get-size': {'type': 'boolean', 'default': True} }, 'additionProperties': False, 'required': ['name', 'username', 'password', 'host', 'port'], }, 'dirs': one_or_more({'type': 'string'}), }, 'required': ['config'], 'additionalProperties': False } def on_task_input(self, task, config): connection_config = config['config'] if connection_config['use-ssl']: ftp = ftplib.FTP_TLS() else: ftp = ftplib.FTP() # ftp.set_debuglevel(2) log.debug('Trying connecting to: %s', (connection_config['host'])) try: ftp.connect(connection_config['host'], connection_config['port']) ftp.login(connection_config['username'], connection_config['password']) if connection_config['use-ssl']: ftp.prot_p() except ftplib.all_errors as e: raise plugin.PluginError(e) log.debug('Connected.') encoding = connection_config['encoding'] files_only = connection_config['files-only'] recursive = connection_config['recursive'] get_size = connection_config['get-size'] mlst_supported = False feat_response = ftp.sendcmd('FEAT').splitlines() supported_extensions = [feat_item.strip().upper() for feat_item in feat_response[1:len(feat_response) - 1]] if encoding.lower() == 'auto' and 'UTF8' in supported_extensions: encoding = 'utf8' else: encoding = 'ascii' for supported_extension in supported_extensions: if supported_extension.startswith('MLST'): mlst_supported = True break if not mlst_supported: log.warning('MLST Command is not supported by the FTP server %s@%s:%s', connection_config['username'], connection_config['host'], connection_config['port']) ftp.sendcmd('TYPE I') ftp.set_pasv(True) entries = [] for path in config['dirs']: baseurl = "ftp://%s:%s@%s:%s/" % (connection_config['username'], connection_config['password'], connection_config['host'], connection_config['port']) self._handle_path(entries, ftp, baseurl, path, mlst_supported, files_only, recursive, get_size, encoding) ftp.close() return entries def _handle_path(self, entries, ftp, baseurl, path='', mlst_supported=False, files_only=False, recursive=False, get_size=True, encoding=None): dirs = self.list_directory(ftp, path) for p in dirs: if encoding: p = native_str_to_text(p, encoding=encoding) # Clean file list when subdirectories are used p = p.replace(path + '/', '') mlst = {} if mlst_supported: mlst_output = ftp.sendcmd('MLST ' + path + '/' + p) clean_mlst_output = [line.strip().lower() for line in mlst_output.splitlines()][1] mlst = self.parse_mlst(clean_mlst_output) else: element_is_directory = self.is_directory(ftp, path + '/' + p) if element_is_directory: mlst['type'] = 'dir' log.debug('%s is a directory', p) else: mlst['type'] = 'file' log.debug('%s is a file', p) if recursive and mlst.get('type') == 'dir': self._handle_path(entries, ftp, baseurl, path + '/' + p, mlst_supported, files_only, recursive, get_size, encoding) if not files_only or mlst.get('type') == 'file': url = baseurl + quote(path) + '/' + quote(p) log.debug("Encoded URL: " + url) title = os.path.basename(p) log.info('Accepting entry "%s" [%s]' % (path + '/' + p, mlst.get('type') or "unknown",)) entry = Entry(title, url) if get_size and 'size' not in mlst: if mlst.get('type') == 'file': entry['content_size'] = old_div(ftp.size(path + '/' + p), (1024 * 1024)) log.debug('(FILE) Size = %s', entry['content_size']) elif mlst.get('type') == 'dir': entry['content_size'] = self.get_folder_size(ftp, path, p) log.debug('(DIR) Size = %s', entry['content_size']) elif get_size: entry['content_size'] = old_div(float(mlst.get('size')), (1024 * 1024)) entries.append(entry) def parse_mlst(self, mlst): re_results = re.findall('(.*?)=(.*?);', mlst) parsed = {} for k, v in re_results: parsed[k] = v return parsed def is_directory(self, ftp, elementpath): try: original_wd = ftp.pwd() ftp.cwd(elementpath) ftp.cwd(original_wd) return True except ftplib.error_perm: return False def list_directory(self, ftp, path): try: dirs = ftp.nlst(path) except ftplib.error_perm as e: # ftp returns 550 on empty dirs if str(e).startswith('550 '): log.debug('Directory %s is empty.', path) dirs = [] else: raise plugin.PluginError(e) return dirs def get_folder_size(self, ftp, path, p): size = 0 dirs = self.list_directory(ftp, path + '/' + p) for filename in dirs: filename = filename.replace(path + '/' + p + '/', '') try: size += old_div(ftp.size(path + '/' + p + '/' + filename), (1024 * 1024)) except ftplib.error_perm: size += self.get_folder_size(ftp, path + '/' + p, filename) return size @event('plugin.register') def register_plugin(): plugin.register(InputFtpList, 'ftp_list', api_ver=2, groups=['list'])
mit
mRabitsky/RIPmrHorn
RIP Mr Horn/src/pThree/DLinkedList.java
2950
package pThree; public class DLinkedList<T> { private DListNode<T> firstNode; private DListNode<T> lastNode; private int size; /** * Construct an empty list */ public DLinkedList() { firstNode = null; lastNode = null; size=0; } /** * Returns true if the list contains no elements */ public boolean isEmpty() { return size==0; } /** * Inserts the argument as the first element of this list. */ public void addFirst(T t) { DListNode <T> temp = new DListNode <T> (t, firstNode, null); firstNode = temp; size++; if(null==lastNode) lastNode=firstNode; } /** * Inserts the argument as the last element of this list. */ public void addLast(T t) { DListNode <T> temp = new DListNode <T> (t, null, lastNode); lastNode = temp; size++; if(null==firstNode) firstNode=lastNode; } public DListNode<T> getHead() { return firstNode; } /** * Removes and returns the first element of this list. */ public T removeFirst() { size--; T temp=firstNode.getValue(); firstNode=firstNode.getNext(); firstNode.setPrevious(null); return temp; } /** * Removes and returns the last element of this list. */ public T removeLast() { size--; T temp=lastNode.getValue(); lastNode = lastNode.getPrevious(); lastNode.setNext(null); return temp; } /** * Returns a String representation of the list. */ public String toString() { if(null==firstNode) return "[]"; if(firstNode==lastNode) return "["+firstNode.getValue()+"]"; String str = "["; for(DListNode<T> temp=firstNode; temp.getNext()!=null; temp=temp.getNext()) str+=temp.getValue()+", "; str+=lastNode.getValue()+"]"; return str; } /** * Returns the number of elements in the list as an int. */ public int size() { return size; } /** * Removes all of the elements from this list. */ private void clear() { firstNode=null; lastNode=null; size=0; } /** * Returns a DListIterator. */ public DListIterator<T> iterator() { return new DListIterator<T>(this); } public static void main(String[] args) { DLinkedList<String> dll=new DLinkedList<>(); System.out.println("Empty? "+dll.isEmpty()); dll.addFirst("a"); System.out.println(dll); dll.addLast("b"); System.out.println(dll); dll.addLast("c"); dll.addLast("d"); dll.addLast("e"); dll.addLast("f"); System.out.println(dll); System.out.println(dll.size()); System.out.println(dll.removeFirst()); System.out.println("Remove first: "+dll); System.out.println(dll.removeLast()); System.out.println("Remove last: "+dll); DListIterator<String> dli=dll.iterator(); System.out.println("Iterator has next? "+dli.hasNext()+" ("+dli.next()+")"); dli.previous(); System.out.println("Iterator has previous? "+dli.hasPrevious()+" ("+dli.previous()+")"); } }
mit
EverlessDrop41/NumberGuesser
NumberGuesser/Properties/AssemblyInfo.cs
996
using System.Reflection; using System.Runtime.CompilerServices; // Information about this assembly is defined by the following attributes. // Change them to the values specific to your project. [assembly: AssemblyTitle ("NumberGuesser")] [assembly: AssemblyDescription ("")] [assembly: AssemblyConfiguration ("")] [assembly: AssemblyCompany ("")] [assembly: AssemblyProduct ("")] [assembly: AssemblyCopyright ("everless-drop")] [assembly: AssemblyTrademark ("")] [assembly: AssemblyCulture ("")] // The assembly version has the format "{Major}.{Minor}.{Build}.{Revision}". // The form "{Major}.{Minor}.*" will automatically update the build and revision, // and "{Major}.{Minor}.{Build}.*" will update just the revision. [assembly: AssemblyVersion ("1.0.*")] // The following attributes are used to specify the signing key for the assembly, // if desired. See the Mono documentation for more information about signing. //[assembly: AssemblyDelaySign(false)] //[assembly: AssemblyKeyFile("")]
mit
samphippen/spe
src/uk/me/graphe/shared/graphmanagers/GraphManager2dImpl.java
12073
package uk.me.graphe.shared.graphmanagers; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import uk.me.graphe.client.Console; import uk.me.graphe.client.EdgeDrawable; import uk.me.graphe.client.VertexDrawable; import uk.me.graphe.shared.Edge; import uk.me.graphe.shared.Graph; import uk.me.graphe.shared.Vertex; import uk.me.graphe.shared.VertexDirection; import com.google.gwt.core.client.GWT; public class GraphManager2dImpl implements GraphManager2d { private List<Edge> mEdges = new ArrayList<Edge>(); private List<Runnable> mRedrawCallbacks = new ArrayList<Runnable>(); private Map<Vertex, List<Edge>> mVertexEdgeMap = new HashMap<Vertex, List<Edge>>(); private Map<Vertex, VertexDrawable> mVertexRenderMap = new HashMap<Vertex, VertexDrawable>(); private Map<Edge, EdgeDrawable> mEdgeRenderMap = new HashMap<Edge, EdgeDrawable>(); private List<Vertex> mVertices = new ArrayList<Vertex>(); protected GraphManager2dImpl() { if (GWT.isClient()) { Console.log("graphmanager2d constructed"); } mName = "Untitled graph"; } @Override public boolean isEdgeBetween(Vertex v1, Vertex v2) { boolean result = false; for (Edge e : mVertexEdgeMap.get(v1)) { if (e.getToVertex().equals(v2)) { result = true; break; } } return result; } @Override public void addEdge(Vertex v1, Vertex v2, VertexDirection dir, int weight) { Edge e = new Edge(v1, v2, dir); e.setWeight(weight); if (!mEdges.contains(e)) { mEdges.add(e); mVertexEdgeMap.get(v1).add(e); mVertexEdgeMap.get(v2).add(e); VertexDrawable vd1 = mVertexRenderMap.get(e.getFromVertex()); VertexDrawable vd2 = mVertexRenderMap.get(e.getToVertex()); int l1 = vd1.getCenterX(); int l2 = vd2.getCenterX(); int t1 = vd1.getCenterY(); int t2 = vd2.getCenterY(); // swap l1 and t1 with l2 and t2 if we're entering the "from" node // NOTE: that's an in place swap algorithm using xor if (e.enters(e.getFromVertex())) { l1 ^= l2; l2 ^= l1; l1 ^= l2; t1 ^= t2; t2 ^= t1; t1 ^= t2; } mEdgeRenderMap .put(e, new EdgeDrawable(l1, t1, l2, t2, e.getWeight(), e.getDirection())); } this.invalidate(); } @Override public void addRedrawCallback(Runnable r) { mRedrawCallbacks.add(r); } @Override public void addVertex(Vertex v, int xPosition, int yPosition, int size) { if (!mVertexEdgeMap.containsKey(v)) { if (GWT.isClient()) { Console.log("adding a vertex in graphmanager2dimpl: " + this); Console.log("before size is: " + mVertices.size()); } mVertices.add(v); if (GWT.isClient()) { Console.log("after size is: " + mVertices.size()); } // left and top are x and y - size/2 int halfSize = size / 2; int left = xPosition - halfSize; int top = yPosition - halfSize; mVertexRenderMap.put(v, new VertexDrawable(left, top, size, size, v.getLabel())); mVertexEdgeMap.put(v, new ArrayList<Edge>()); if (GWT.isClient()) { Console.log("vertices size:" + mVertices.size()); } } this.invalidate(); } @Override public VertexDrawable getVertexDrawableAt(int x, int y) { for (VertexDrawable vd : mVertexRenderMap.values()) { if (vd.contains(x, y)) return vd; } return null; } @Override public EdgeDrawable getEdgeDrawableAt(int x, int y) { for (EdgeDrawable ed : mEdgeRenderMap.values()) { if (ed.contains(x, y)) return ed; } return null; } @Override public Collection<EdgeDrawable> getEdgeDrawables() { return Collections.unmodifiableCollection(mEdgeRenderMap.values()); } @Override public EdgeDrawable getDrawableFromEdge(Edge e) { return mEdgeRenderMap.get(e); } @Override public VertexDrawable getDrawableFromVertex(Vertex v) { return mVertexRenderMap.get(v); } @Override public Graph getUnderlyingGraph() { return new Graph(mEdges, mVertices); } @Override public Collection<VertexDrawable> getVertexDrawables() { return Collections.unmodifiableCollection(mVertexRenderMap.values()); } @Override public void moveVertexTo(Vertex v, int xPosition, int yPosition) { VertexDrawable vd = mVertexRenderMap.get(v); if (GWT.isClient()) Console.log("names are same: " + v.getLabel().equals(sName1)); int halfWidth = vd.getWidth() / 2; int halfHeight = vd.getHeight() / 2; int left = xPosition - halfWidth; int top = yPosition - halfHeight; vd.updateBoundingRectangle(left, top, vd.getWidth(), vd.getHeight()); if (GWT.isClient()) Console.log(v + " " + mVertexEdgeMap.get(v)); // update edges // VertexDrawable vd1 = mVertexRenderMap.get(e.getFromVertex()); for (Edge e : mVertexEdgeMap.get(v)) { if (GWT.isClient()) Console.log("in move g1"); EdgeDrawable ed = mEdgeRenderMap.get(e); if (GWT.isClient()) Console.log("in move g2"); if (v.equals(e.getFromVertex())) { if (GWT.isClient()) Console.log("in move g3"); ed.setStartX(vd.getCenterX()); ed.setStartY(vd.getCenterY()); } else { if (GWT.isClient()) Console.log("in move g4"); ed.setEndX(vd.getCenterX()); ed.setEndY(vd.getCenterY()); } } if (GWT.isClient()) Console.log("in move h"); this.invalidate(); } @Override public void removeAllEdges(Vertex v1, Vertex v2) { List<Edge> toDelete = new ArrayList<Edge>(); for (Edge e : mEdges) { if ((e.enters(v1) || e.exits(v1)) && (e.enters(v2) || e.exits(v2))) { toDelete.add(e); } } mEdges.removeAll(toDelete); mVertexEdgeMap.get(v1).clear(); mVertexEdgeMap.get(v2).clear(); this.invalidate(); } @Override public void removeEdge(Edge e) { if (GWT.isClient()) Console.log("removing edge e: " + e); mEdges.remove(e); mEdgeRenderMap.remove(e); mVertexEdgeMap.get(e.getFromVertex()).remove(e); mVertexEdgeMap.get(e.getToVertex()).remove(e); this.invalidate(); } @Override public void removeVertex(Vertex v) { mVertices.remove(v); mVertexRenderMap.remove(v); if (mVertexEdgeMap.containsKey(v)) { if (GWT.isClient()) Console.log("Vertex " + v.getLabel() + "has " + String.valueOf(mVertexEdgeMap.get(v).size()) + " edges"); for (Edge e : mVertexEdgeMap.get(v)) { if (GWT.isClient()) Console.log("Remove edge: " + e.getFromVertex().getLabel() + " to " + e.getToVertex().getLabel()); mEdges.remove(e); if (GWT.isClient()) Console.log("Removed from edges list"); mEdgeRenderMap.remove(e); if (e.getToVertex().equals(v)) { mVertexEdgeMap.get(e.getFromVertex()).remove(e); } else { mVertexEdgeMap.get(e.getToVertex()).remove(e); } } mVertexEdgeMap.remove(v); } this.invalidate(); } @Override public void scaleVertex(Vertex v, int newSize) { VertexDrawable vd = mVertexRenderMap.get(v); int newLeft = vd.getLeft() - newSize / 2; int newTop = vd.getTop() - newSize / 2; int newWidth = newSize; int newHeight = newSize; vd.updateBoundingRectangle(newLeft, newTop, newWidth, newHeight); this.invalidate(); } public Vertex getVertexFromDrawable(VertexDrawable vd) { for (Vertex v : mVertices) { if (mVertexRenderMap.get(v) == vd) { return v; } } return null; } public Edge getEdgeFromDrawable(EdgeDrawable ed) { for (Edge e : mEdges) { if (mEdgeRenderMap.get(e) == ed) { return e; } } return null; } public boolean isDirectedEdgeBetweenVertices(Vertex v1, Vertex v2) { boolean b = false; for (Edge e : mEdges) { if ((e.getFromVertex() == v1 && e.getToVertex() == v2 && e.getDirection() == VertexDirection.fromTo) || (e.getFromVertex() == v2 && e.getToVertex() == v1 && e.getDirection() == VertexDirection.toFrom)) { b = true; break; } } return b; } public void invalidate() { for (final Runnable r : mRedrawCallbacks) { r.run(); } } @Override public VertexDrawable getVertexDrawable(String s) { return mVertexRenderMap.get(new Vertex(s)); } /* * checks if a vertex name is already taken */ public boolean isVertexNameAvailable(String s) { boolean b = true; for (Vertex v : mVertices) { if (v.toString().equals(s)) { b = false; break; } } return b; } @Override public void setVertexStyle(Vertex node, int mStyle) { mVertexRenderMap.get(node).setStyle(mStyle); this.invalidate(); } private String mName; @Override public String getName() { return mName; } @Override public void setName(String s) { mName = s; } @Override public void setEdgeWeight(EdgeDrawable ed, int weight) { for (Edge e : mEdgeRenderMap.keySet()) { if (mEdgeRenderMap.get(e).equals(ed)) { e.setWeight(weight); mEdgeRenderMap.remove(e); ed.setWeight(weight); mEdgeRenderMap.put(e, ed); } } } private static String sName1; @Override public void renameVertex(String label, String name) { if (mVertices.contains(new Vertex(label))) { sName1 = name; mVertices.remove(new Vertex(label)); mVertices.add(new Vertex(name)); VertexDrawable vd = mVertexRenderMap.get(new Vertex(label)); vd.rename(name); mVertexRenderMap.remove(new Vertex(label)); mVertexRenderMap.put(new Vertex(name), vd); List<Edge> edges = mVertexEdgeMap.get(new Vertex(label)); List<Edge> newEdges = new ArrayList<Edge>(); for (Edge e : edges) { mEdgeRenderMap.remove(e); e.replaceVertex(label, name); VertexDrawable v1 = mVertexRenderMap.get(e.getFromVertex()); VertexDrawable v2 = mVertexRenderMap.get(e.getToVertex()); mEdgeRenderMap.put(e, new EdgeDrawable(v1.getCenterX(), v1.getCenterY(), v2.getCenterX(), v2.getCenterY(), e.getWeight(), e.getDirection())); newEdges.add(e); } mVertexEdgeMap.remove(new Vertex(label)); if (GWT.isClient()) Console.log("putting array under name: " + name + " " + newEdges); mVertexEdgeMap.put(new Vertex(name), newEdges); if (GWT.isClient()) { Console.log("getting array back:" + mVertexEdgeMap.get(new Vertex(name))); } } } }
mit
tirpitz-verus/enigma-in-scala
core/src/main/scala/mlesiewski/enigmainscala/core/Plugboard.scala
1338
package mlesiewski.enigmainscala.core object Plugboard { def apply (pluggedPairs: Seq[(Char, Char)]): Plugboard = new Plugboard (pluggedPairs) } class Plugboard private[core] ( /** pairs of letters that will be swapped during encryption */ pluggedPairs: Seq[(Char, Char)] ) extends Part { /** a name of the part */ override val partName: String = "Plugboard" override val description: String = "Plugboard (Steckerbrett in German) was first introduced in 1930. It allowed " + /** describes what the part does */ "to swap one letter for another before and after the signal was passed from rotors. It had a stronger cryptographic " + "effect then adding a next rotor." val mappings: Map[Char, Char] = pluggedPairs /** mappings that the Plugboard will use to encode letters */ .filter (pair => Enigma.acceptedLetters.contains (pair._1) && Enigma.acceptedLetters.contains (pair._2)) .flatMap (pair => Seq ((pair._1, pair._2), (pair._2, pair._1))) .toMap require (mappings.size == pluggedPairs.size * 2, "mappings size should twice the length of plugged pairs but " + s"${pluggedPairs.size} pairs resulted in only $mappings") def encode (key: Char): Char = mappings.getOrElse (key.toUpper, key) }
mit
safesoftware/shout_at
lib/shout_at/shouter.rb
1214
# frozen_string_literal: true module ShoutAt class Shouter class << self attr_writer :logger def logger @logger ||= Logger.new($stdout) end end def initialize(group, level, **_options) @group = group.to_s.camelize @level = level.to_s.camelize end def logger Shouter.logger end # Common arguments, see subclasses for other allowed parameters # # @param subject [String] brief description of the incident # @param url [String] URL providing more information about the incident # @param exception [Exception] original exception, if any def shout(message, subject: nil, url: nil, exception: nil, **args) @subject = subject || (message.length > 33 ? "#{message[0..30]}..." : message) @url = url @exception = exception log_message(message, args) end private def log_message(message, args) logger.info "Shouter::#{@group}::#{@level}: #{@subject}: #{message}" logger.warn "Exception: #{@exception}" if @exception logger.warn @exception.backtrace.join("\n") if @exception&.backtrace logger.debug "Shouter::#{@group}::#{@level}: Args: #{args}" end end end
mit
qious/ss-panel
src/api/nodes/detail.js
301
const errors = require('../../lib/errors') const nodeService = require('../../service/node') module.exports = async (ctx) => { let { nodeId } = ctx.params let node = await nodeService.getAsync(nodeId) if (!node) { throw new errors.NotFound('未找到相关节点') } ctx.body = node }
mit
2B5/ia-3B5
module1/app/requester.js
767
var request = require('request'); class Requester{ constructor(address){ this.options = address; } doRequest(hasResponseInBody, cb){ request(this.options, function(error, response, body){ if(error){ console.log(error); cb("Sorry, cannot answer that."); } var resp = {}; try { console.log(body); resp = JSON.parse(body); if(hasResponseInBody) cb(resp.response); else cb(resp); } catch (error) { console.log(error) cb("Sorry, cannot answer that."); } }) } } module.exports = Requester;
mit
rosshendrickson-wf/education
talks/2015-10-21-debug/debug/slide4.go
449
package main import ( "log" "strconv" ) func main() { a := generateStrings(10) sendToLog(a) } func mutate(s string) string { l := sumLength(s) s += strconv.Itoa(l) return s } func sendToLog(s string) { b := mutate(s) log.Print(b[len(s)+10]) } func sumLength(s string) int { return len(s) } func generateStrings(n int) string { s := "" for i := 0; i < n; i++ { if i%2 == 0 { s += "A" } else { s += "H" } } return s }
mit
LocalJoost/WpWinNl
portable-win81+wpa81/WpWinNl/Converters/NullToCollapsedConverter.cs
804
using System; using System.Globalization; using System.Windows; #if !WINDOWS_PHONE using Windows.UI.Xaml; #endif namespace WpWinNl.Converters { /// <summary> /// Returns Visibility.Collapsed for any value that is null, empty, only whitespaces or an empty guid. /// </summary> public class NullToCollapsedConverter : BaseValueConverter { public override object Convert(object value, Type targetType, object parameter, CultureInfo culture) { return value == null || string.IsNullOrWhiteSpace(value.ToString()) || value.ToString() == Guid.Empty.ToString() ? Visibility.Collapsed : Visibility.Visible; } public override object ConvertBack(object value, Type targetType, object parameter, CultureInfo culture) { return DependencyProperty.UnsetValue; } } }
mit
NiallBegley/nestgraph
db/migrate/20170722182703_add_humidity_and_state.rb
304
class AddHumidityAndState < ActiveRecord::Migration[5.1] def change add_column :records, :hvac_state, :string add_column :records, :humidity, :float add_column :records, :name, :string add_column :records, :device_id, :string add_column :records, :time_to_target, :string end end
mit
JayH117/Assignment-3
ZombieTower/Assets/Scripts/Bullet.cs
691
using UnityEngine; using System.Collections; public class Bullet : MonoBehaviour { public float speed = 6; public Transform target; void FixedUpdate() { if (target) { Vector3 dir = target.position - transform.position; GetComponent<Rigidbody>().velocity = dir.normalized * speed; } else { Destroy(gameObject); } } void OnTriggerEnter(Collider co) { HealthScript health = co.GetComponentInChildren<HealthScript>(); if (health) { health.decreaseZom(); Destroy(gameObject); } } }
mit
dai-shi/es-beautifier
tests/lib/rules/jsx-separateline-literals.js
856
const { RuleTester } = require('eslint'); const rule = require('../../../lib/rules/jsx-separateline-literals.js'); const ruleTester = new RuleTester(); const parserOptions = { ecmaVersion: 2015, ecmaFeatures: { jsx: true }, }; ruleTester.run('jsx-separateline-literals', rule, { valid: [{ code: 'x=(<div>aaa</div>)', parserOptions, }, { code: 'x=(\n <div>\n aaa\n </div>\n)', parserOptions, }], invalid: [{ code: 'x=(\n <div>aaa\n</div>\n);', errors: [1, 2].map(() => ({ message: 'JSXText in JSX Element should be on a separate line.', type: 'JSXElement', })), parserOptions, }, { code: 'x=(\n <div>\naaa</div>\n);', errors: [1, 2].map(() => ({ message: 'JSXText in JSX Element should be on a separate line.', type: 'JSXElement', })), parserOptions, }], });
mit
smeagonline-developers/OnlineEducationPlatform---SMEAGonline
src/Topxia/AdminBundle/Resources/public/js/controller/analysis/register.js
2017
define(function(require, exports, module) { var Morris=require("morris"); require("jquery.bootstrap-datetimepicker"); var autoSubmitCondition=require("./autoSubmitCondition.js"); var Validator = require('bootstrap.validator'); require('common/validator-rules').inject(Validator); var now = new Date(); exports.run = function() { if($('#data').length > 0){ var data = eval ("(" + $('#data').attr("value") + ")"); Morris.Line({ element: 'line-data', data: data, xkey: 'date', ykeys: ['count'], labels: ['注册人数'], xLabels:"day" }); } $("[name=endTime]").datetimepicker({ language: 'zh-CN', autoclose: true, format: 'yyyy-mm-dd', minView: 'month' }); $('[name=endTime]').datetimepicker('setEndDate', now); $('[name=endTime]').datetimepicker('setStartDate', $('#registerStartDate').attr("value")); $("[name=startTime]").datetimepicker({ language: 'zh-CN', autoclose: true, format: 'yyyy-mm-dd', minView: 'month' }); $('[name=startTime]').datetimepicker('setEndDate', now); $('[name=startTime]').datetimepicker('setStartDate', $('#registerStartDate').attr("value")); var validator = new Validator({ element: '#operation-form'}); validator.addItem({ element: '[name=startTime]', required: true, rule:'date_check' }); validator.addItem({ element: '[name=endTime]', required: true, rule:'date_check' }); validator.addItem({ element: '[name=analysisDateType]', required: true }); autoSubmitCondition.autoSubmitCondition(); }; });
mit
hybridgroup/taskmapper-redmine
lib/provider/redmine.rb
866
module TaskMapper::Provider # This is the Yoursystem Provider for taskmapper module Redmine include TaskMapper::Provider::Base PROJECT_API = RedmineAPI::Project TICKET_API = RedmineAPI::Issue # This is for cases when you want to instantiate using TaskMapper::Provider::Yoursystem.new(auth) def self.new(auth = {}) TaskMapper.new(:redmine, auth) end # declare needed overloaded methods here def authorize(auth = {}) @authentication ||= TaskMapper::Authenticator.new(auth) auth = @authentication if auth.server.blank? and auth.token.blank? raise "Please you should provide server and token" end RedmineAPI.authenticate(auth.server, auth.token) end def valid? begin RedmineAPI::Project.find(:all).size >= 0 rescue false end end end end
mit
Bojo966/SoftUni-JS-Fundamentals
Arrays-And-Matrices/add-remove-elements-from-array.js
541
function interpredCommands(input) { let initialValue = 1 let valuesArray = [1] for (var index = 1; index < input.length; index++) { var element = input[index] initialValue++ if (element === 'add') { valuesArray.push(initialValue) } else if (element === 'remove') { valuesArray.pop() } } if (valuesArray.length === 0) { console.log('Empty') } else { for (let i in valuesArray) { console.log(valuesArray[i]) } } }
mit
simplyspoke/archiproject
config/env/default.js
2418
'use strict'; module.exports = { app: { title: 'AchiProject', description: 'A project manager and archive for architects.', keywords: 'mongodb, express, angularjs, node.js, mongoose, passport', googleAnalyticsTrackingID: process.env.GOOGLE_ANALYTICS_TRACKING_ID || 'GOOGLE_ANALYTICS_TRACKING_ID' }, db: { promise: global.Promise }, port: process.env.PORT || 3000, host: process.env.HOST || '0.0.0.0', // DOMAIN config should be set to the fully qualified application accessible // URL. For example: https://www.myapp.com (including port if required). domain: process.env.DOMAIN, // Session Cookie settings sessionCookie: { // session expiration is set by default to 24 hours maxAge: 24 * (60 * 60 * 1000), // httpOnly flag makes sure the cookie is only accessed // through the HTTP protocol and not JS/browser httpOnly: true, // secure cookie should be turned to true to provide additional // layer of security so that the cookie is set only when working // in HTTPS mode. secure: false }, // sessionSecret should be changed for security measures and concerns sessionSecret: process.env.SESSION_SECRET || 'asdfs678sdg68sd', // sessionKey is the cookie session name sessionKey: 'sessionId', sessionCollection: 'sessions', // Lusca config csrf: { csrf: false, csp: false, xframe: 'SAMEORIGIN', p3p: 'ABCDEF', xssProtection: true }, logo: 'modules/core/client/img/brand/logo.png', favicon: 'modules/core/client/img/brand/favicon.ico', illegalUsernames: ['meanjs', 'administrator', 'password', 'admin', 'user', 'unknown', 'anonymous', 'null', 'undefined', 'api' ], uploads: { profile: { image: { dest: './modules/users/client/img/profile/uploads/', limits: { fileSize: 1 * 1024 * 1024 // Max file size in bytes (1 MB) } } } }, harvest: { subdomain: process.env.HARVEST_SUBDOMAIN || '', authorizationURL: process.env.HARVEST_AUTHORIZATIONURL || '', tokenURL: process.env.HARVEST_TOKENURL || '', clientID: process.env.HARVEST_ID || '', clientSecret: process.env.HARVEST_SECRET || '', callbackURL: process.env.HARVEST_CALLBACKURL || '' }, shared: { owasp: { allowPassphrases: true, maxLength: 128, minLength: 10, minPhraseLength: 20, minOptionalTestsToPass: 4 } } };
mit
karim/adila
database/src/main/java/adila/db/angus3a41_lenovo20a2020a40.java
238
// This file is automatically generated. package adila.db; /* * Lenovo A2020a40 * * DEVICE: angus3A41 * MODEL: Lenovo A2020a40 */ final class angus3a41_lenovo20a2020a40 { public static final String DATA = "Lenovo|A2020a40|"; }
mit
cncgl/springboot-doma-todo
src/main/java/com/example/DemoApplication.java
882
package com.example; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; import java.util.List; @SpringBootApplication // @RestController public class DemoApplication { public static void main(String[] args) { SpringApplication.run(DemoApplication.class, args); } @Autowired ReservationDao reservationDao; @RequestMapping(path = "/") List<Reservation> all() { return reservationDao.selectAll(); } @RequestMapping(path = "/", params = "name") List<Reservation> name(@RequestParam String name) { return reservationDao.selectByName(name); } }
mit
KlausBrunner/solarpositioning
src/main/java/net/e175/klaus/solarpositioning/PSA.java
6456
package net.e175.klaus.solarpositioning; import java.util.Calendar; import java.util.GregorianCalendar; import java.util.TimeZone; /** * Compute sun position for a given date/time and longitude/latitude. * * This is a simple Java port of the "PSA" solar positioning algorithm, as documented in: * * Blanco-Muriel et al.: Computing the Solar Vector. Solar Energy Vol 70 No 5 pp 431-441. * http://dx.doi.org/10.1016/S0038-092X(00)00156-0 * * According to the paper, "The algorithm allows .. the true solar vector to be determined with an accuracy of 0.5 * minutes of arc for the period 1999–2015." * * @author Klaus A. Brunner * @deprecated PSA shouldn't be used after the year 2015. */ public final class PSA { private static final double D_EARTH_MEAN_RADIUS = 6371.01; // in km private static final double D_ASTRONOMICAL_UNIT = 149597890; // in km private static final double PI = Math.PI; private static final double TWOPI = (2 * PI); private static final double RAD = (PI / 180); private PSA() { } /** * Calculate sun position for a given time and location. * * @param date Note that it's unclear how well the algorithm performs before the year 1990 or after the year 2015. * @param latitude in degrees (positive east of Greenwich) * @param longitude in degrees (positive north of equator) * @return Topocentric solar position (azimuth measured eastward from north) */ public static AzimuthZenithAngle calculateSolarPosition(final GregorianCalendar date, final double latitude, final double longitude) { final Calendar utcTime = new GregorianCalendar(TimeZone.getTimeZone("GMT")); utcTime.setTimeInMillis(date.getTimeInMillis()); // Main variables double dElapsedJulianDays; double dDecimalHours; double dEclipticLongitude; double dEclipticObliquity; double dRightAscension; double dDeclination; // Auxiliary variables double dY; double dX; // Calculate difference in days between the current Julian Day // and JD 2451545.0, which is noon 1 January 2000 Universal Time { long liAux1; long liAux2; double dJulianDate; // Calculate time of the day in UT decimal hours dDecimalHours = utcTime.get(Calendar.HOUR_OF_DAY) + (utcTime.get(Calendar.MINUTE) + utcTime.get(Calendar.SECOND) / 60.0) / 60.0; // Calculate current Julian Day liAux1 = (utcTime.get(Calendar.MONTH) + 1 - 14) / 12; liAux2 = (1461 * (utcTime.get(Calendar.YEAR) + 4800 + liAux1)) / 4 + (367 * (utcTime.get(Calendar.MONTH) + 1 - 2 - 12 * liAux1)) / 12 - (3 * ((utcTime.get(Calendar.YEAR) + 4900 + liAux1) / 100)) / 4 + utcTime.get(Calendar.DAY_OF_MONTH) - 32075; dJulianDate = (liAux2) - 0.5 + dDecimalHours / 24.0; // Calculate difference between current Julian Day and JD 2451545.0 dElapsedJulianDays = dJulianDate - 2451545.0; } // Calculate ecliptic coordinates (ecliptic longitude and obliquity of the // ecliptic in radians but without limiting the angle to be less than 2*Pi // (i.e., the result may be greater than 2*Pi) { double dMeanLongitude; double dMeanAnomaly; double dOmega; dOmega = 2.1429 - 0.0010394594 * dElapsedJulianDays; dMeanLongitude = 4.8950630 + 0.017202791698 * dElapsedJulianDays; // Radians dMeanAnomaly = 6.2400600 + 0.0172019699 * dElapsedJulianDays; dEclipticLongitude = dMeanLongitude + 0.03341607 * Math.sin(dMeanAnomaly) + 0.00034894 * Math.sin(2 * dMeanAnomaly) - 0.0001134 - 0.0000203 * Math.sin(dOmega); dEclipticObliquity = 0.4090928 - 6.2140e-9 * dElapsedJulianDays + 0.0000396 * Math.cos(dOmega); } // Calculate celestial coordinates ( right ascension and declination ) in radians // but without limiting the angle to be less than 2*Pi (i.e., the result // may be greater than 2*Pi) { double dSinEclipticLongitude; dSinEclipticLongitude = Math.sin(dEclipticLongitude); dY = Math.cos(dEclipticObliquity) * dSinEclipticLongitude; dX = Math.cos(dEclipticLongitude); dRightAscension = Math.atan2(dY, dX); if (dRightAscension < 0.0) { dRightAscension = dRightAscension + 2 * Math.PI; } dDeclination = Math.asin(Math.sin(dEclipticObliquity) * dSinEclipticLongitude); } // Calculate local coordinates ( azimuth and zenith angle ) in degrees { double dGreenwichMeanSiderealTime; double dLocalMeanSiderealTime; double dLatitudeInRadians; double dHourAngle; double dCosLatitude; double dSinLatitude; double dCosHourAngle; double dParallax; dGreenwichMeanSiderealTime = 6.6974243242 + 0.0657098283 * dElapsedJulianDays + dDecimalHours; dLocalMeanSiderealTime = (dGreenwichMeanSiderealTime * 15 + longitude) * RAD; dHourAngle = dLocalMeanSiderealTime - dRightAscension; dLatitudeInRadians = latitude * RAD; dCosLatitude = Math.cos(dLatitudeInRadians); dSinLatitude = Math.sin(dLatitudeInRadians); dCosHourAngle = Math.cos(dHourAngle); double zenithAngle = (Math.acos(dCosLatitude * dCosHourAngle * Math.cos(dDeclination) + Math.sin(dDeclination) * dSinLatitude)); dY = -Math.sin(dHourAngle); dX = Math.tan(dDeclination) * dCosLatitude - dSinLatitude * dCosHourAngle; double azimuth = Math.atan2(dY, dX); if (azimuth < 0.0) { azimuth = azimuth + TWOPI; } azimuth = azimuth / RAD; // Parallax Correction dParallax = (D_EARTH_MEAN_RADIUS / D_ASTRONOMICAL_UNIT) * Math.sin(zenithAngle); zenithAngle = (zenithAngle + dParallax) / RAD; return new AzimuthZenithAngle(azimuth, zenithAngle); } } }
mit
bugknightyyp/yyper
public/js/common/underscore/1.6.0/underscore.js
45496
// Underscore.js 1.6.0 // http://underscorejs.org // (c) 2009-2014 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors // Underscore may be freely distributed under the MIT license. (function() { // Baseline setup // -------------- // Establish the root object, `window` in the browser, or `exports` on the server. var root = this; // Save the previous value of the `_` variable. var previousUnderscore = root._; // Establish the object that gets returned to break out of a loop iteration. var breaker = {}; // Save bytes in the minified (but not gzipped) version: var ArrayProto = Array.prototype, ObjProto = Object.prototype, FuncProto = Function.prototype; // Create quick reference variables for speed access to core prototypes. var push = ArrayProto.push, slice = ArrayProto.slice, concat = ArrayProto.concat, toString = ObjProto.toString, hasOwnProperty = ObjProto.hasOwnProperty; // All **ECMAScript 5** native function implementations that we hope to use // are declared here. var nativeForEach = ArrayProto.forEach, nativeMap = ArrayProto.map, nativeReduce = ArrayProto.reduce, nativeReduceRight = ArrayProto.reduceRight, nativeFilter = ArrayProto.filter, nativeEvery = ArrayProto.every, nativeSome = ArrayProto.some, nativeIndexOf = ArrayProto.indexOf, nativeLastIndexOf = ArrayProto.lastIndexOf, nativeIsArray = Array.isArray, nativeKeys = Object.keys, nativeBind = FuncProto.bind; // Create a safe reference to the Underscore object for use below. var _ = function(obj) { if (obj instanceof _) return obj; if (!(this instanceof _)) return new _(obj); this._wrapped = obj; }; // Export the Underscore object for **Node.js**, with // backwards-compatibility for the old `require()` API. If we're in // the browser, add `_` as a global object via a string identifier, // for Closure Compiler "advanced" mode. if (typeof exports !== 'undefined') { if (typeof module !== 'undefined' && module.exports) { exports = module.exports = _; } exports._ = _; } else { root._ = _; } // Current version. _.VERSION = '1.6.0'; // Collection Functions // -------------------- // The cornerstone, an `each` implementation, aka `forEach`. // Handles objects with the built-in `forEach`, arrays, and raw objects. // Delegates to **ECMAScript 5**'s native `forEach` if available. var each = _.each = _.forEach = function(obj, iterator, context) { if (obj == null) return obj; if (nativeForEach && obj.forEach === nativeForEach) { obj.forEach(iterator, context); } else if (obj.length === +obj.length) { for (var i = 0, length = obj.length; i < length; i++) { if (iterator.call(context, obj[i], i, obj) === breaker) return; } } else { var keys = _.keys(obj); for (var i = 0, length = keys.length; i < length; i++) { if (iterator.call(context, obj[keys[i]], keys[i], obj) === breaker) return; } } return obj; }; // Return the results of applying the iterator to each element. // Delegates to **ECMAScript 5**'s native `map` if available. _.map = _.collect = function(obj, iterator, context) { var results = []; if (obj == null) return results; if (nativeMap && obj.map === nativeMap) return obj.map(iterator, context); each(obj, function(value, index, list) { results.push(iterator.call(context, value, index, list)); }); return results; }; var reduceError = 'Reduce of empty array with no initial value'; // **Reduce** builds up a single result from a list of values, aka `inject`, // or `foldl`. Delegates to **ECMAScript 5**'s native `reduce` if available. _.reduce = _.foldl = _.inject = function(obj, iterator, memo, context) { var initial = arguments.length > 2; if (obj == null) obj = []; if (nativeReduce && obj.reduce === nativeReduce) { if (context) iterator = _.bind(iterator, context); return initial ? obj.reduce(iterator, memo) : obj.reduce(iterator); } each(obj, function(value, index, list) { if (!initial) { memo = value; initial = true; } else { memo = iterator.call(context, memo, value, index, list); } }); if (!initial) throw new TypeError(reduceError); return memo; }; // The right-associative version of reduce, also known as `foldr`. // Delegates to **ECMAScript 5**'s native `reduceRight` if available. _.reduceRight = _.foldr = function(obj, iterator, memo, context) { var initial = arguments.length > 2; if (obj == null) obj = []; if (nativeReduceRight && obj.reduceRight === nativeReduceRight) { if (context) iterator = _.bind(iterator, context); return initial ? obj.reduceRight(iterator, memo) : obj.reduceRight(iterator); } var length = obj.length; if (length !== +length) { var keys = _.keys(obj); length = keys.length; } each(obj, function(value, index, list) { index = keys ? keys[--length] : --length; if (!initial) { memo = obj[index]; initial = true; } else { memo = iterator.call(context, memo, obj[index], index, list); } }); if (!initial) throw new TypeError(reduceError); return memo; }; // Return the first value which passes a truth test. Aliased as `detect`. _.find = _.detect = function(obj, predicate, context) { var result; any(obj, function(value, index, list) { if (predicate.call(context, value, index, list)) { result = value; return true; } }); return result; }; // Return all the elements that pass a truth test. // Delegates to **ECMAScript 5**'s native `filter` if available. // Aliased as `select`. _.filter = _.select = function(obj, predicate, context) { var results = []; if (obj == null) return results; if (nativeFilter && obj.filter === nativeFilter) return obj.filter(predicate, context); each(obj, function(value, index, list) { if (predicate.call(context, value, index, list)) results.push(value); }); return results; }; // Return all the elements for which a truth test fails. _.reject = function(obj, predicate, context) { return _.filter(obj, function(value, index, list) { return !predicate.call(context, value, index, list); }, context); }; // Determine whether all of the elements match a truth test. // Delegates to **ECMAScript 5**'s native `every` if available. // Aliased as `all`. _.every = _.all = function(obj, predicate, context) { predicate || (predicate = _.identity); var result = true; if (obj == null) return result; if (nativeEvery && obj.every === nativeEvery) return obj.every(predicate, context); each(obj, function(value, index, list) { if (!(result = result && predicate.call(context, value, index, list))) return breaker; }); return !!result; }; // Determine if at least one element in the object matches a truth test. // Delegates to **ECMAScript 5**'s native `some` if available. // Aliased as `any`. var any = _.some = _.any = function(obj, predicate, context) { predicate || (predicate = _.identity); var result = false; if (obj == null) return result; if (nativeSome && obj.some === nativeSome) return obj.some(predicate, context); each(obj, function(value, index, list) { if (result || (result = predicate.call(context, value, index, list))) return breaker; }); return !!result; }; // Determine if the array or object contains a given value (using `===`). // Aliased as `include`. _.contains = _.include = function(obj, target) { if (obj == null) return false; if (nativeIndexOf && obj.indexOf === nativeIndexOf) return obj.indexOf(target) != -1; return any(obj, function(value) { return value === target; }); }; // Invoke a method (with arguments) on every item in a collection. _.invoke = function(obj, method) { var args = slice.call(arguments, 2); var isFunc = _.isFunction(method); return _.map(obj, function(value) { return (isFunc ? method : value[method]).apply(value, args); }); }; // Convenience version of a common use case of `map`: fetching a property. _.pluck = function(obj, key) { return _.map(obj, _.property(key)); }; // Convenience version of a common use case of `filter`: selecting only objects // containing specific `key:value` pairs. _.where = function(obj, attrs) { return _.filter(obj, _.matches(attrs)); }; // Convenience version of a common use case of `find`: getting the first object // containing specific `key:value` pairs. _.findWhere = function(obj, attrs) { return _.find(obj, _.matches(attrs)); }; // Return the maximum element or (element-based computation). // Can't optimize arrays of integers longer than 65,535 elements. // See [WebKit Bug 80797](https://bugs.webkit.org/show_bug.cgi?id=80797) _.max = function(obj, iterator, context) { if (!iterator && _.isArray(obj) && obj[0] === +obj[0] && obj.length < 65535) { return Math.max.apply(Math, obj); } var result = -Infinity, lastComputed = -Infinity; each(obj, function(value, index, list) { var computed = iterator ? iterator.call(context, value, index, list) : value; if (computed > lastComputed) { result = value; lastComputed = computed; } }); return result; }; // Return the minimum element (or element-based computation). _.min = function(obj, iterator, context) { if (!iterator && _.isArray(obj) && obj[0] === +obj[0] && obj.length < 65535) { return Math.min.apply(Math, obj); } var result = Infinity, lastComputed = Infinity; each(obj, function(value, index, list) { var computed = iterator ? iterator.call(context, value, index, list) : value; if (computed < lastComputed) { result = value; lastComputed = computed; } }); return result; }; // Shuffle an array, using the modern version of the // [Fisher-Yates shuffle](http://en.wikipedia.org/wiki/Fisher–Yates_shuffle). _.shuffle = function(obj) { var rand; var index = 0; var shuffled = []; each(obj, function(value) { rand = _.random(index++); shuffled[index - 1] = shuffled[rand]; shuffled[rand] = value; }); return shuffled; }; // Sample **n** random values from a collection. // If **n** is not specified, returns a single random element. // The internal `guard` argument allows it to work with `map`. _.sample = function(obj, n, guard) { if (n == null || guard) { if (obj.length !== +obj.length) obj = _.values(obj); return obj[_.random(obj.length - 1)]; } return _.shuffle(obj).slice(0, Math.max(0, n)); }; // An internal function to generate lookup iterators. var lookupIterator = function(value) { if (value == null) return _.identity; if (_.isFunction(value)) return value; return _.property(value); }; // Sort the object's values by a criterion produced by an iterator. _.sortBy = function(obj, iterator, context) { iterator = lookupIterator(iterator); return _.pluck(_.map(obj, function(value, index, list) { return { value: value, index: index, criteria: iterator.call(context, value, index, list) }; }).sort(function(left, right) { var a = left.criteria; var b = right.criteria; if (a !== b) { if (a > b || a === void 0) return 1; if (a < b || b === void 0) return -1; } return left.index - right.index; }), 'value'); }; // An internal function used for aggregate "group by" operations. var group = function(behavior) { return function(obj, iterator, context) { var result = {}; iterator = lookupIterator(iterator); each(obj, function(value, index) { var key = iterator.call(context, value, index, obj); behavior(result, key, value); }); return result; }; }; // Groups the object's values by a criterion. Pass either a string attribute // to group by, or a function that returns the criterion. _.groupBy = group(function(result, key, value) { _.has(result, key) ? result[key].push(value) : result[key] = [value]; }); // Indexes the object's values by a criterion, similar to `groupBy`, but for // when you know that your index values will be unique. _.indexBy = group(function(result, key, value) { result[key] = value; }); // Counts instances of an object that group by a certain criterion. Pass // either a string attribute to count by, or a function that returns the // criterion. _.countBy = group(function(result, key) { _.has(result, key) ? result[key]++ : result[key] = 1; }); // Use a comparator function to figure out the smallest index at which // an object should be inserted so as to maintain order. Uses binary search. _.sortedIndex = function(array, obj, iterator, context) { iterator = lookupIterator(iterator); var value = iterator.call(context, obj); var low = 0, high = array.length; while (low < high) { var mid = (low + high) >>> 1; iterator.call(context, array[mid]) < value ? low = mid + 1 : high = mid; } return low; }; // Safely create a real, live array from anything iterable. _.toArray = function(obj) { if (!obj) return []; if (_.isArray(obj)) return slice.call(obj); if (obj.length === +obj.length) return _.map(obj, _.identity); return _.values(obj); }; // Return the number of elements in an object. _.size = function(obj) { if (obj == null) return 0; return (obj.length === +obj.length) ? obj.length : _.keys(obj).length; }; // Array Functions // --------------- // Get the first element of an array. Passing **n** will return the first N // values in the array. Aliased as `head` and `take`. The **guard** check // allows it to work with `_.map`. _.first = _.head = _.take = function(array, n, guard) { if (array == null) return void 0; if ((n == null) || guard) return array[0]; if (n < 0) return []; return slice.call(array, 0, n); }; // Returns everything but the last entry of the array. Especially useful on // the arguments object. Passing **n** will return all the values in // the array, excluding the last N. The **guard** check allows it to work with // `_.map`. _.initial = function(array, n, guard) { return slice.call(array, 0, array.length - ((n == null) || guard ? 1 : n)); }; // Get the last element of an array. Passing **n** will return the last N // values in the array. The **guard** check allows it to work with `_.map`. _.last = function(array, n, guard) { if (array == null) return void 0; if ((n == null) || guard) return array[array.length - 1]; return slice.call(array, Math.max(array.length - n, 0)); }; // Returns everything but the first entry of the array. Aliased as `tail` and `drop`. // Especially useful on the arguments object. Passing an **n** will return // the rest N values in the array. The **guard** // check allows it to work with `_.map`. _.rest = _.tail = _.drop = function(array, n, guard) { return slice.call(array, (n == null) || guard ? 1 : n); }; // Trim out all falsy values from an array. _.compact = function(array) { return _.filter(array, _.identity); }; // Internal implementation of a recursive `flatten` function. var flatten = function(input, shallow, output) { if (shallow && _.every(input, _.isArray)) { return concat.apply(output, input); } each(input, function(value) { if (_.isArray(value) || _.isArguments(value)) { shallow ? push.apply(output, value) : flatten(value, shallow, output); } else { output.push(value); } }); return output; }; // Flatten out an array, either recursively (by default), or just one level. _.flatten = function(array, shallow) { return flatten(array, shallow, []); }; // Return a version of the array that does not contain the specified value(s). _.without = function(array) { return _.difference(array, slice.call(arguments, 1)); }; // Split an array into two arrays: one whose elements all satisfy the given // predicate, and one whose elements all do not satisfy the predicate. _.partition = function(array, predicate) { var pass = [], fail = []; each(array, function(elem) { (predicate(elem) ? pass : fail).push(elem); }); return [pass, fail]; }; // Produce a duplicate-free version of the array. If the array has already // been sorted, you have the option of using a faster algorithm. // Aliased as `unique`. _.uniq = _.unique = function(array, isSorted, iterator, context) { if (_.isFunction(isSorted)) { context = iterator; iterator = isSorted; isSorted = false; } var initial = iterator ? _.map(array, iterator, context) : array; var results = []; var seen = []; each(initial, function(value, index) { if (isSorted ? (!index || seen[seen.length - 1] !== value) : !_.contains(seen, value)) { seen.push(value); results.push(array[index]); } }); return results; }; // Produce an array that contains the union: each distinct element from all of // the passed-in arrays. _.union = function() { return _.uniq(_.flatten(arguments, true)); }; // Produce an array that contains every item shared between all the // passed-in arrays. _.intersection = function(array) { var rest = slice.call(arguments, 1); return _.filter(_.uniq(array), function(item) { return _.every(rest, function(other) { return _.contains(other, item); }); }); }; // Take the difference between one array and a number of other arrays. // Only the elements present in just the first array will remain. _.difference = function(array) { var rest = concat.apply(ArrayProto, slice.call(arguments, 1)); return _.filter(array, function(value){ return !_.contains(rest, value); }); }; // Zip together multiple lists into a single array -- elements that share // an index go together. _.zip = function() { var length = _.max(_.pluck(arguments, 'length').concat(0)); var results = new Array(length); for (var i = 0; i < length; i++) { results[i] = _.pluck(arguments, '' + i); } return results; }; // Converts lists into objects. Pass either a single array of `[key, value]` // pairs, or two parallel arrays of the same length -- one of keys, and one of // the corresponding values. _.object = function(list, values) { if (list == null) return {}; var result = {}; for (var i = 0, length = list.length; i < length; i++) { if (values) { result[list[i]] = values[i]; } else { result[list[i][0]] = list[i][1]; } } return result; }; // If the browser doesn't supply us with indexOf (I'm looking at you, **MSIE**), // we need this function. Return the position of the first occurrence of an // item in an array, or -1 if the item is not included in the array. // Delegates to **ECMAScript 5**'s native `indexOf` if available. // If the array is large and already in sort order, pass `true` // for **isSorted** to use binary search. _.indexOf = function(array, item, isSorted) { if (array == null) return -1; var i = 0, length = array.length; if (isSorted) { if (typeof isSorted == 'number') { i = (isSorted < 0 ? Math.max(0, length + isSorted) : isSorted); } else { i = _.sortedIndex(array, item); return array[i] === item ? i : -1; } } if (nativeIndexOf && array.indexOf === nativeIndexOf) return array.indexOf(item, isSorted); for (; i < length; i++) if (array[i] === item) return i; return -1; }; // Delegates to **ECMAScript 5**'s native `lastIndexOf` if available. _.lastIndexOf = function(array, item, from) { if (array == null) return -1; var hasIndex = from != null; if (nativeLastIndexOf && array.lastIndexOf === nativeLastIndexOf) { return hasIndex ? array.lastIndexOf(item, from) : array.lastIndexOf(item); } var i = (hasIndex ? from : array.length); while (i--) if (array[i] === item) return i; return -1; }; // Generate an integer Array containing an arithmetic progression. A port of // the native Python `range()` function. See // [the Python documentation](http://docs.python.org/library/functions.html#range). _.range = function(start, stop, step) { if (arguments.length <= 1) { stop = start || 0; start = 0; } step = arguments[2] || 1; var length = Math.max(Math.ceil((stop - start) / step), 0); var idx = 0; var range = new Array(length); while(idx < length) { range[idx++] = start; start += step; } return range; }; // Function (ahem) Functions // ------------------ // Reusable constructor function for prototype setting. var ctor = function(){}; // Create a function bound to a given object (assigning `this`, and arguments, // optionally). Delegates to **ECMAScript 5**'s native `Function.bind` if // available. _.bind = function(func, context) { var args, bound; if (nativeBind && func.bind === nativeBind) return nativeBind.apply(func, slice.call(arguments, 1)); if (!_.isFunction(func)) throw new TypeError; args = slice.call(arguments, 2); return bound = function() { if (!(this instanceof bound)) return func.apply(context, args.concat(slice.call(arguments))); ctor.prototype = func.prototype; var self = new ctor; ctor.prototype = null; var result = func.apply(self, args.concat(slice.call(arguments))); if (Object(result) === result) return result; return self; }; }; // Partially apply a function by creating a version that has had some of its // arguments pre-filled, without changing its dynamic `this` context. _ acts // as a placeholder, allowing any combination of arguments to be pre-filled. _.partial = function(func) { var boundArgs = slice.call(arguments, 1); return function() { var position = 0; var args = boundArgs.slice(); for (var i = 0, length = args.length; i < length; i++) { if (args[i] === _) args[i] = arguments[position++]; } while (position < arguments.length) args.push(arguments[position++]); return func.apply(this, args); }; }; // Bind a number of an object's methods to that object. Remaining arguments // are the method names to be bound. Useful for ensuring that all callbacks // defined on an object belong to it. _.bindAll = function(obj) { var funcs = slice.call(arguments, 1); if (funcs.length === 0) throw new Error('bindAll must be passed function names'); each(funcs, function(f) { obj[f] = _.bind(obj[f], obj); }); return obj; }; // Memoize an expensive function by storing its results. _.memoize = function(func, hasher) { var memo = {}; hasher || (hasher = _.identity); return function() { var key = hasher.apply(this, arguments); return _.has(memo, key) ? memo[key] : (memo[key] = func.apply(this, arguments)); }; }; // Delays a function for the given number of milliseconds, and then calls // it with the arguments supplied. _.delay = function(func, wait) { var args = slice.call(arguments, 2); return setTimeout(function(){ return func.apply(null, args); }, wait); }; // Defers a function, scheduling it to run after the current call stack has // cleared. _.defer = function(func) { return _.delay.apply(_, [func, 1].concat(slice.call(arguments, 1))); }; // Returns a function, that, when invoked, will only be triggered at most once // during a given window of time. Normally, the throttled function will run // as much as it can, without ever going more than once per `wait` duration; // but if you'd like to disable the execution on the leading edge, pass // `{leading: false}`. To disable execution on the trailing edge, ditto. _.throttle = function(func, wait, options) { var context, args, result; var timeout = null; var previous = 0; options || (options = {}); var later = function() { previous = options.leading === false ? 0 : _.now(); timeout = null; result = func.apply(context, args); context = args = null; }; return function() { var now = _.now(); if (!previous && options.leading === false) previous = now; var remaining = wait - (now - previous); context = this; args = arguments; if (remaining <= 0) { clearTimeout(timeout); timeout = null; previous = now; result = func.apply(context, args); context = args = null; } else if (!timeout && options.trailing !== false) { timeout = setTimeout(later, remaining); } return result; }; }; // Returns a function, that, as long as it continues to be invoked, will not // be triggered. The function will be called after it stops being called for // N milliseconds. If `immediate` is passed, trigger the function on the // leading edge, instead of the trailing. _.debounce = function(func, wait, immediate) { var timeout, args, context, timestamp, result; var later = function() { var last = _.now() - timestamp; if (last < wait) { timeout = setTimeout(later, wait - last); } else { timeout = null; if (!immediate) { result = func.apply(context, args); context = args = null; } } }; return function() { context = this; args = arguments; timestamp = _.now(); var callNow = immediate && !timeout; if (!timeout) { timeout = setTimeout(later, wait); } if (callNow) { result = func.apply(context, args); context = args = null; } return result; }; }; // Returns a function that will be executed at most one time, no matter how // often you call it. Useful for lazy initialization. _.once = function(func) { var ran = false, memo; return function() { if (ran) return memo; ran = true; memo = func.apply(this, arguments); func = null; return memo; }; }; // Returns the first function passed as an argument to the second, // allowing you to adjust arguments, run code before and after, and // conditionally execute the original function. _.wrap = function(func, wrapper) { return _.partial(wrapper, func); }; // Returns a function that is the composition of a list of functions, each // consuming the return value of the function that follows. _.compose = function() { var funcs = arguments; return function() { var args = arguments; for (var i = funcs.length - 1; i >= 0; i--) { args = [funcs[i].apply(this, args)]; } return args[0]; }; }; // Returns a function that will only be executed after being called N times. _.after = function(times, func) { return function() { if (--times < 1) { return func.apply(this, arguments); } }; }; // Object Functions // ---------------- // Retrieve the names of an object's properties. // Delegates to **ECMAScript 5**'s native `Object.keys` _.keys = function(obj) { if (!_.isObject(obj)) return []; if (nativeKeys) return nativeKeys(obj); var keys = []; for (var key in obj) if (_.has(obj, key)) keys.push(key); return keys; }; // Retrieve the values of an object's properties. _.values = function(obj) { var keys = _.keys(obj); var length = keys.length; var values = new Array(length); for (var i = 0; i < length; i++) { values[i] = obj[keys[i]]; } return values; }; // Convert an object into a list of `[key, value]` pairs. _.pairs = function(obj) { var keys = _.keys(obj); var length = keys.length; var pairs = new Array(length); for (var i = 0; i < length; i++) { pairs[i] = [keys[i], obj[keys[i]]]; } return pairs; }; // Invert the keys and values of an object. The values must be serializable. _.invert = function(obj) { var result = {}; var keys = _.keys(obj); for (var i = 0, length = keys.length; i < length; i++) { result[obj[keys[i]]] = keys[i]; } return result; }; // Return a sorted list of the function names available on the object. // Aliased as `methods` _.functions = _.methods = function(obj) { var names = []; for (var key in obj) { if (_.isFunction(obj[key])) names.push(key); } return names.sort(); }; // Extend a given object with all the properties in passed-in object(s). _.extend = function(obj) { each(slice.call(arguments, 1), function(source) { if (source) { for (var prop in source) { obj[prop] = source[prop]; } } }); return obj; }; // Return a copy of the object only containing the whitelisted properties. _.pick = function(obj) { var copy = {}; var keys = concat.apply(ArrayProto, slice.call(arguments, 1)); each(keys, function(key) { if (key in obj) copy[key] = obj[key]; }); return copy; }; // Return a copy of the object without the blacklisted properties. _.omit = function(obj) { var copy = {}; var keys = concat.apply(ArrayProto, slice.call(arguments, 1)); for (var key in obj) { if (!_.contains(keys, key)) copy[key] = obj[key]; } return copy; }; // Fill in a given object with default properties. _.defaults = function(obj) { each(slice.call(arguments, 1), function(source) { if (source) { for (var prop in source) { if (obj[prop] === void 0) obj[prop] = source[prop]; } } }); return obj; }; // Create a (shallow-cloned) duplicate of an object. _.clone = function(obj) { if (!_.isObject(obj)) return obj; return _.isArray(obj) ? obj.slice() : _.extend({}, obj); }; // Invokes interceptor with the obj, and then returns obj. // The primary purpose of this method is to "tap into" a method chain, in // order to perform operations on intermediate results within the chain. _.tap = function(obj, interceptor) { interceptor(obj); return obj; }; // Internal recursive comparison function for `isEqual`. var eq = function(a, b, aStack, bStack) { // Identical objects are equal. `0 === -0`, but they aren't identical. // See the [Harmony `egal` proposal](http://wiki.ecmascript.org/doku.php?id=harmony:egal). if (a === b) return a !== 0 || 1 / a == 1 / b; // A strict comparison is necessary because `null == undefined`. if (a == null || b == null) return a === b; // Unwrap any wrapped objects. if (a instanceof _) a = a._wrapped; if (b instanceof _) b = b._wrapped; // Compare `[[Class]]` names. var className = toString.call(a); if (className != toString.call(b)) return false; switch (className) { // Strings, numbers, dates, and booleans are compared by value. case '[object String]': // Primitives and their corresponding object wrappers are equivalent; thus, `"5"` is // equivalent to `new String("5")`. return a == String(b); case '[object Number]': // `NaN`s are equivalent, but non-reflexive. An `egal` comparison is performed for // other numeric values. return a != +a ? b != +b : (a == 0 ? 1 / a == 1 / b : a == +b); case '[object Date]': case '[object Boolean]': // Coerce dates and booleans to numeric primitive values. Dates are compared by their // millisecond representations. Note that invalid dates with millisecond representations // of `NaN` are not equivalent. return +a == +b; // RegExps are compared by their source patterns and flags. case '[object RegExp]': return a.source == b.source && a.global == b.global && a.multiline == b.multiline && a.ignoreCase == b.ignoreCase; } if (typeof a != 'object' || typeof b != 'object') return false; // Assume equality for cyclic structures. The algorithm for detecting cyclic // structures is adapted from ES 5.1 section 15.12.3, abstract operation `JO`. var length = aStack.length; while (length--) { // Linear search. Performance is inversely proportional to the number of // unique nested structures. if (aStack[length] == a) return bStack[length] == b; } // Objects with different constructors are not equivalent, but `Object`s // from different frames are. var aCtor = a.constructor, bCtor = b.constructor; if (aCtor !== bCtor && !(_.isFunction(aCtor) && (aCtor instanceof aCtor) && _.isFunction(bCtor) && (bCtor instanceof bCtor)) && ('constructor' in a && 'constructor' in b)) { return false; } // Add the first object to the stack of traversed objects. aStack.push(a); bStack.push(b); var size = 0, result = true; // Recursively compare objects and arrays. if (className == '[object Array]') { // Compare array lengths to determine if a deep comparison is necessary. size = a.length; result = size == b.length; if (result) { // Deep compare the contents, ignoring non-numeric properties. while (size--) { if (!(result = eq(a[size], b[size], aStack, bStack))) break; } } } else { // Deep compare objects. for (var key in a) { if (_.has(a, key)) { // Count the expected number of properties. size++; // Deep compare each member. if (!(result = _.has(b, key) && eq(a[key], b[key], aStack, bStack))) break; } } // Ensure that both objects contain the same number of properties. if (result) { for (key in b) { if (_.has(b, key) && !(size--)) break; } result = !size; } } // Remove the first object from the stack of traversed objects. aStack.pop(); bStack.pop(); return result; }; // Perform a deep comparison to check if two objects are equal. _.isEqual = function(a, b) { return eq(a, b, [], []); }; // Is a given array, string, or object empty? // An "empty" object has no enumerable own-properties. _.isEmpty = function(obj) { if (obj == null) return true; if (_.isArray(obj) || _.isString(obj)) return obj.length === 0; for (var key in obj) if (_.has(obj, key)) return false; return true; }; // Is a given value a DOM element? _.isElement = function(obj) { return !!(obj && obj.nodeType === 1); }; // Is a given value an array? // Delegates to ECMA5's native Array.isArray _.isArray = nativeIsArray || function(obj) { return toString.call(obj) == '[object Array]'; }; // Is a given variable an object? _.isObject = function(obj) { return obj === Object(obj); }; // Add some isType methods: isArguments, isFunction, isString, isNumber, isDate, isRegExp. each(['Arguments', 'Function', 'String', 'Number', 'Date', 'RegExp'], function(name) { _['is' + name] = function(obj) { return toString.call(obj) == '[object ' + name + ']'; }; }); // Define a fallback version of the method in browsers (ahem, IE), where // there isn't any inspectable "Arguments" type. if (!_.isArguments(arguments)) { _.isArguments = function(obj) { return !!(obj && _.has(obj, 'callee')); }; } // Optimize `isFunction` if appropriate. if (typeof (/./) !== 'function') { _.isFunction = function(obj) { return typeof obj === 'function'; }; } // Is a given object a finite number? _.isFinite = function(obj) { return isFinite(obj) && !isNaN(parseFloat(obj)); }; // Is the given value `NaN`? (NaN is the only number which does not equal itself). _.isNaN = function(obj) { return _.isNumber(obj) && obj != +obj; }; // Is a given value a boolean? _.isBoolean = function(obj) { return obj === true || obj === false || toString.call(obj) == '[object Boolean]'; }; // Is a given value equal to null? _.isNull = function(obj) { return obj === null; }; // Is a given variable undefined? _.isUndefined = function(obj) { return obj === void 0; }; // Shortcut function for checking if an object has a given property directly // on itself (in other words, not on a prototype). _.has = function(obj, key) { return hasOwnProperty.call(obj, key); }; // Utility Functions // ----------------- // Run Underscore.js in *noConflict* mode, returning the `_` variable to its // previous owner. Returns a reference to the Underscore object. _.noConflict = function() { root._ = previousUnderscore; return this; }; // Keep the identity function around for default iterators. _.identity = function(value) { return value; }; _.constant = function(value) { return function () { return value; }; }; _.property = function(key) { return function(obj) { return obj[key]; }; }; // Returns a predicate for checking whether an object has a given set of `key:value` pairs. _.matches = function(attrs) { return function(obj) { if (obj === attrs) return true; //avoid comparing an object to itself. for (var key in attrs) { if (attrs[key] !== obj[key]) return false; } return true; } }; // Run a function **n** times. _.times = function(n, iterator, context) { var accum = Array(Math.max(0, n)); for (var i = 0; i < n; i++) accum[i] = iterator.call(context, i); return accum; }; // Return a random integer between min and max (inclusive). _.random = function(min, max) { if (max == null) { max = min; min = 0; } return min + Math.floor(Math.random() * (max - min + 1)); }; // A (possibly faster) way to get the current timestamp as an integer. _.now = Date.now || function() { return new Date().getTime(); }; // List of HTML entities for escaping. var entityMap = { escape: { '&': '&amp;', '<': '&lt;', '>': '&gt;', '"': '&quot;', "'": '&#x27;' } }; entityMap.unescape = _.invert(entityMap.escape); // Regexes containing the keys and values listed immediately above. var entityRegexes = { escape: new RegExp('[' + _.keys(entityMap.escape).join('') + ']', 'g'), unescape: new RegExp('(' + _.keys(entityMap.unescape).join('|') + ')', 'g') }; // Functions for escaping and unescaping strings to/from HTML interpolation. _.each(['escape', 'unescape'], function(method) { _[method] = function(string) { if (string == null) return ''; return ('' + string).replace(entityRegexes[method], function(match) { return entityMap[method][match]; }); }; }); // If the value of the named `property` is a function then invoke it with the // `object` as context; otherwise, return it. _.result = function(object, property) { if (object == null) return void 0; var value = object[property]; return _.isFunction(value) ? value.call(object) : value; }; // Add your own custom functions to the Underscore object. _.mixin = function(obj) { each(_.functions(obj), function(name) { var func = _[name] = obj[name]; _.prototype[name] = function() { var args = [this._wrapped]; push.apply(args, arguments); return result.call(this, func.apply(_, args)); }; }); }; // Generate a unique integer id (unique within the entire client session). // Useful for temporary DOM ids. var idCounter = 0; _.uniqueId = function(prefix) { var id = ++idCounter + ''; return prefix ? prefix + id : id; }; // By default, Underscore uses ERB-style template delimiters, change the // following template settings to use alternative delimiters. _.templateSettings = { evaluate : /<%([\s\S]+?)%>/g, interpolate : /<%=([\s\S]+?)%>/g, escape : /<%-([\s\S]+?)%>/g }; // When customizing `templateSettings`, if you don't want to define an // interpolation, evaluation or escaping regex, we need one that is // guaranteed not to match. var noMatch = /(.)^/; // Certain characters need to be escaped so that they can be put into a // string literal. var escapes = { "'": "'", '\\': '\\', '\r': 'r', '\n': 'n', '\t': 't', '\u2028': 'u2028', '\u2029': 'u2029' }; var escaper = /\\|'|\r|\n|\t|\u2028|\u2029/g; // JavaScript micro-templating, similar to John Resig's implementation. // Underscore templating handles arbitrary delimiters, preserves whitespace, // and correctly escapes quotes within interpolated code. _.template = function(text, data, settings) { var render; settings = _.defaults({}, settings, _.templateSettings); // Combine delimiters into one regular expression via alternation. var matcher = new RegExp([ (settings.escape || noMatch).source, (settings.interpolate || noMatch).source, (settings.evaluate || noMatch).source ].join('|') + '|$', 'g'); // Compile the template source, escaping string literals appropriately. var index = 0; var source = "__p+='"; text.replace(matcher, function(match, escape, interpolate, evaluate, offset) { source += text.slice(index, offset) .replace(escaper, function(match) { return '\\' + escapes[match]; }); if (escape) { source += "'+\n((__t=(" + escape + "))==null?'':_.escape(__t))+\n'"; } if (interpolate) { source += "'+\n((__t=(" + interpolate + "))==null?'':__t)+\n'"; } if (evaluate) { source += "';\n" + evaluate + "\n__p+='"; } index = offset + match.length; return match; }); source += "';\n"; // If a variable is not specified, place data values in local scope. if (!settings.variable) source = 'with(obj||{}){\n' + source + '}\n'; source = "var __t,__p='',__j=Array.prototype.join," + "print=function(){__p+=__j.call(arguments,'');};\n" + source + "return __p;\n"; try { render = new Function(settings.variable || 'obj', '_', source); } catch (e) { e.source = source; throw e; } if (data) return render(data, _); var template = function(data) { return render.call(this, data, _); }; // Provide the compiled function source as a convenience for precompilation. template.source = 'function(' + (settings.variable || 'obj') + '){\n' + source + '}'; return template; }; // Add a "chain" function, which will delegate to the wrapper. _.chain = function(obj) { return _(obj).chain(); }; // OOP // --------------- // If Underscore is called as a function, it returns a wrapped object that // can be used OO-style. This wrapper holds altered versions of all the // underscore functions. Wrapped objects may be chained. // Helper function to continue chaining intermediate results. var result = function(obj) { return this._chain ? _(obj).chain() : obj; }; // Add all of the Underscore functions to the wrapper object. _.mixin(_); // Add all mutator Array functions to the wrapper. each(['pop', 'push', 'reverse', 'shift', 'sort', 'splice', 'unshift'], function(name) { var method = ArrayProto[name]; _.prototype[name] = function() { var obj = this._wrapped; method.apply(obj, arguments); if ((name == 'shift' || name == 'splice') && obj.length === 0) delete obj[0]; return result.call(this, obj); }; }); // Add all accessor Array functions to the wrapper. each(['concat', 'join', 'slice'], function(name) { var method = ArrayProto[name]; _.prototype[name] = function() { return result.call(this, method.apply(this._wrapped, arguments)); }; }); _.extend(_.prototype, { // Start chaining a wrapped Underscore object. chain: function() { this._chain = true; return this; }, // Extracts the result from a wrapped and chained object. value: function() { return this._wrapped; } }); // AMD registration happens at the end for compatibility with AMD loaders // that may not enforce next-turn semantics on modules. Even though general // practice for AMD registration is to be anonymous, underscore registers // as a named module because, like jQuery, it is a base library that is // popular enough to be bundled in a third party lib, but not be part of // an AMD load request. Those cases could generate an error when an // anonymous define() is called outside of a loader request. if (typeof define === 'function') { define('underscore/1.6.0/underscore.cmd', [], function() { return _; }); } }).call(this);
mit
bkerley/r509-cert-validator
lib/r509/cert/validator/version.rb
87
module R509 class Cert class Validator VERSION = "0.0.4" end end end
mit
axodox/AxoTools
AxoCover/Models/Testing/Data/ITestResult.cs
259
using System; namespace AxoCover.Models.Testing.Data { public interface ITestResult { TimeSpan Duration { get; } string ErrorMessage { get; } TestMethod Method { get; } TestState Outcome { get; } StackItem[] StackTrace { get; } } }
mit
motorcyclets/motorcycle
packages/stream/src/sources/createProxy/index.ts
84
export * from './types' export * from './createProxy' export * from './ProxyStream'
mit
telcocoin-project/telcocoin
src/net.cpp
57397
// Copyright (c) 2009-2010 Satoshi Nakamoto // Copyright (c) 2009-2012 The Bitcoin developers // Distributed under the MIT/X11 software license, see the accompanying // file COPYING or http://www.opensource.org/licenses/mit-license.php. #include "irc.h" #include "db.h" #include "net.h" #include "init.h" #include "addrman.h" #include "ui_interface.h" #include "script.h" #ifdef WIN32 #include <string.h> #endif #ifdef USE_UPNP #include <miniupnpc/miniwget.h> #include <miniupnpc/miniupnpc.h> #include <miniupnpc/upnpcommands.h> #include <miniupnpc/upnperrors.h> #endif // Dump addresses to peers.dat every 15 minutes (900s) #define DUMP_ADDRESSES_INTERVAL 900 using namespace std; using namespace boost; static const int MAX_OUTBOUND_CONNECTIONS = 8; bool OpenNetworkConnection(const CAddress& addrConnect, CSemaphoreGrant *grantOutbound = NULL, const char *strDest = NULL, bool fOneShot = false); struct LocalServiceInfo { int nScore; int nPort; }; // // Global state variables // bool fDiscover = true; uint64 nLocalServices = NODE_NETWORK; static CCriticalSection cs_mapLocalHost; static map<CNetAddr, LocalServiceInfo> mapLocalHost; static bool vfReachable[NET_MAX] = {}; static bool vfLimited[NET_MAX] = {}; static CNode* pnodeLocalHost = NULL; static CNode* pnodeSync = NULL; uint64 nLocalHostNonce = 0; static std::vector<SOCKET> vhListenSocket; CAddrMan addrman; int nMaxConnections = 125; vector<CNode*> vNodes; CCriticalSection cs_vNodes; map<CInv, CDataStream> mapRelay; deque<pair<int64, CInv> > vRelayExpiration; CCriticalSection cs_mapRelay; limitedmap<CInv, int64> mapAlreadyAskedFor(MAX_INV_SZ); static deque<string> vOneShots; CCriticalSection cs_vOneShots; set<CNetAddr> setservAddNodeAddresses; CCriticalSection cs_setservAddNodeAddresses; vector<std::string> vAddedNodes; CCriticalSection cs_vAddedNodes; static CSemaphore *semOutbound = NULL; void AddOneShot(string strDest) { LOCK(cs_vOneShots); vOneShots.push_back(strDest); } unsigned short GetListenPort() { return (unsigned short)(GetArg("-port", GetDefaultPort())); } void CNode::PushGetBlocks(CBlockIndex* pindexBegin, uint256 hashEnd) { // Filter out duplicate requests if (pindexBegin == pindexLastGetBlocksBegin && hashEnd == hashLastGetBlocksEnd) return; pindexLastGetBlocksBegin = pindexBegin; hashLastGetBlocksEnd = hashEnd; PushMessage("getblocks", CBlockLocator(pindexBegin), hashEnd); } // find 'best' local address for a particular peer bool GetLocal(CService& addr, const CNetAddr *paddrPeer) { if (fNoListen) return false; int nBestScore = -1; int nBestReachability = -1; { LOCK(cs_mapLocalHost); for (map<CNetAddr, LocalServiceInfo>::iterator it = mapLocalHost.begin(); it != mapLocalHost.end(); it++) { int nScore = (*it).second.nScore; int nReachability = (*it).first.GetReachabilityFrom(paddrPeer); if (nReachability > nBestReachability || (nReachability == nBestReachability && nScore > nBestScore)) { addr = CService((*it).first, (*it).second.nPort); nBestReachability = nReachability; nBestScore = nScore; } } } return nBestScore >= 0; } // get best local address for a particular peer as a CAddress CAddress GetLocalAddress(const CNetAddr *paddrPeer) { CAddress ret(CService("0.0.0.0",0),0); CService addr; if (GetLocal(addr, paddrPeer)) { ret = CAddress(addr); ret.nServices = nLocalServices; ret.nTime = GetAdjustedTime(); } return ret; } bool RecvLine(SOCKET hSocket, string& strLine) { strLine = ""; loop { char c; int nBytes = recv(hSocket, &c, 1, 0); if (nBytes > 0) { if (c == '\n') continue; if (c == '\r') return true; strLine += c; if (strLine.size() >= 9000) return true; } else if (nBytes <= 0) { boost::this_thread::interruption_point(); if (nBytes < 0) { int nErr = WSAGetLastError(); if (nErr == WSAEMSGSIZE) continue; if (nErr == WSAEWOULDBLOCK || nErr == WSAEINTR || nErr == WSAEINPROGRESS) { MilliSleep(10); continue; } } if (!strLine.empty()) return true; if (nBytes == 0) { // socket closed printf("socket closed\n"); return false; } else { // socket error int nErr = WSAGetLastError(); printf("recv failed: %d\n", nErr); return false; } } } } // used when scores of local addresses may have changed // pushes better local address to peers void static AdvertizeLocal() { LOCK(cs_vNodes); BOOST_FOREACH(CNode* pnode, vNodes) { if (pnode->fSuccessfullyConnected) { CAddress addrLocal = GetLocalAddress(&pnode->addr); if (addrLocal.IsRoutable() && (CService)addrLocal != (CService)pnode->addrLocal) { pnode->PushAddress(addrLocal); pnode->addrLocal = addrLocal; } } } } void SetReachable(enum Network net, bool fFlag) { LOCK(cs_mapLocalHost); vfReachable[net] = fFlag; if (net == NET_IPV6 && fFlag) vfReachable[NET_IPV4] = true; } // learn a new local address bool AddLocal(const CService& addr, int nScore) { if (!addr.IsRoutable()) return false; if (!fDiscover && nScore < LOCAL_MANUAL) return false; if (IsLimited(addr)) return false; printf("AddLocal(%s,%i)\n", addr.ToString().c_str(), nScore); { LOCK(cs_mapLocalHost); bool fAlready = mapLocalHost.count(addr) > 0; LocalServiceInfo &info = mapLocalHost[addr]; if (!fAlready || nScore >= info.nScore) { info.nScore = nScore + (fAlready ? 1 : 0); info.nPort = addr.GetPort(); } SetReachable(addr.GetNetwork()); } AdvertizeLocal(); return true; } bool AddLocal(const CNetAddr &addr, int nScore) { return AddLocal(CService(addr, GetListenPort()), nScore); } /** Make a particular network entirely off-limits (no automatic connects to it) */ void SetLimited(enum Network net, bool fLimited) { if (net == NET_UNROUTABLE) return; LOCK(cs_mapLocalHost); vfLimited[net] = fLimited; } bool IsLimited(enum Network net) { LOCK(cs_mapLocalHost); return vfLimited[net]; } bool IsLimited(const CNetAddr &addr) { return IsLimited(addr.GetNetwork()); } /** vote for a local address */ bool SeenLocal(const CService& addr) { { LOCK(cs_mapLocalHost); if (mapLocalHost.count(addr) == 0) return false; mapLocalHost[addr].nScore++; } AdvertizeLocal(); return true; } /** check whether a given address is potentially local */ bool IsLocal(const CService& addr) { LOCK(cs_mapLocalHost); return mapLocalHost.count(addr) > 0; } /** check whether a given address is in a network we can probably connect to */ bool IsReachable(const CNetAddr& addr) { LOCK(cs_mapLocalHost); enum Network net = addr.GetNetwork(); return vfReachable[net] && !vfLimited[net]; } bool GetMyExternalIP2(const CService& addrConnect, const char* pszGet, const char* pszKeyword, CNetAddr& ipRet) { SOCKET hSocket; if (!ConnectSocket(addrConnect, hSocket)) return error("GetMyExternalIP() : connection to %s failed", addrConnect.ToString().c_str()); send(hSocket, pszGet, strlen(pszGet), MSG_NOSIGNAL); string strLine; while (RecvLine(hSocket, strLine)) { if (strLine.empty()) // HTTP response is separated from headers by blank line { loop { if (!RecvLine(hSocket, strLine)) { closesocket(hSocket); return false; } if (pszKeyword == NULL) break; if (strLine.find(pszKeyword) != string::npos) { strLine = strLine.substr(strLine.find(pszKeyword) + strlen(pszKeyword)); break; } } closesocket(hSocket); if (strLine.find("<") != string::npos) strLine = strLine.substr(0, strLine.find("<")); strLine = strLine.substr(strspn(strLine.c_str(), " \t\n\r")); while (strLine.size() > 0 && isspace(strLine[strLine.size()-1])) strLine.resize(strLine.size()-1); CService addr(strLine,0,true); printf("GetMyExternalIP() received [%s] %s\n", strLine.c_str(), addr.ToString().c_str()); if (!addr.IsValid() || !addr.IsRoutable()) return false; ipRet.SetIP(addr); return true; } } closesocket(hSocket); return error("GetMyExternalIP() : connection closed"); } bool GetMyExternalIP(CNetAddr& ipRet) { CService addrConnect; const char* pszGet; const char* pszKeyword; for (int nLookup = 0; nLookup <= 1; nLookup++) for (int nHost = 1; nHost <= 2; nHost++) { // We should be phasing out our use of sites like these. If we need // replacements, we should ask for volunteers to put this simple // php file on their web server that prints the client IP: // <?php echo $_SERVER["REMOTE_ADDR"]; ?> if (nHost == 1) { addrConnect = CService("91.198.22.70", 80); // checkip.dyndns.org if (nLookup == 1) { CService addrIP("checkip.dyndns.org", 80, true); if (addrIP.IsValid()) addrConnect = addrIP; } pszGet = "GET / HTTP/1.1\r\n" "Host: checkip.dyndns.org\r\n" "User-Agent: Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1)\r\n" "Connection: close\r\n" "\r\n"; pszKeyword = "Address:"; } else if (nHost == 2) { addrConnect = CService("74.208.43.192", 80); // www.showmyip.com if (nLookup == 1) { CService addrIP("www.showmyip.com", 80, true); if (addrIP.IsValid()) addrConnect = addrIP; } pszGet = "GET /simple/ HTTP/1.1\r\n" "Host: www.showmyip.com\r\n" "User-Agent: Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1)\r\n" "Connection: close\r\n" "\r\n"; pszKeyword = NULL; // Returns just IP address } if (GetMyExternalIP2(addrConnect, pszGet, pszKeyword, ipRet)) return true; } return false; } void ThreadGetMyExternalIP(void* parg) { // Make this thread recognisable as the external IP detection thread RenameThread("bitcoin-ext-ip"); CNetAddr addrLocalHost; if (GetMyExternalIP(addrLocalHost)) { printf("GetMyExternalIP() returned %s\n", addrLocalHost.ToStringIP().c_str()); AddLocal(addrLocalHost, LOCAL_HTTP); } } void AddressCurrentlyConnected(const CService& addr) { addrman.Connected(addr); } CNode* FindNode(const CNetAddr& ip) { LOCK(cs_vNodes); BOOST_FOREACH(CNode* pnode, vNodes) if ((CNetAddr)pnode->addr == ip) return (pnode); return NULL; } CNode* FindNode(std::string addrName) { LOCK(cs_vNodes); BOOST_FOREACH(CNode* pnode, vNodes) if (pnode->addrName == addrName) return (pnode); return NULL; } CNode* FindNode(const CService& addr) { LOCK(cs_vNodes); BOOST_FOREACH(CNode* pnode, vNodes) if ((CService)pnode->addr == addr) return (pnode); return NULL; } CNode* ConnectNode(CAddress addrConnect, const char *pszDest) { if (pszDest == NULL) { if (IsLocal(addrConnect)) return NULL; // Look for an existing connection CNode* pnode = FindNode((CService)addrConnect); if (pnode) { pnode->AddRef(); return pnode; } } /// debug print printf("trying connection %s lastseen=%.1fhrs\n", pszDest ? pszDest : addrConnect.ToString().c_str(), pszDest ? 0 : (double)(GetAdjustedTime() - addrConnect.nTime)/3600.0); // Connect SOCKET hSocket; if (pszDest ? ConnectSocketByName(addrConnect, hSocket, pszDest, GetDefaultPort()) : ConnectSocket(addrConnect, hSocket)) { addrman.Attempt(addrConnect); /// debug print printf("connected %s\n", pszDest ? pszDest : addrConnect.ToString().c_str()); // Set to non-blocking #ifdef WIN32 u_long nOne = 1; if (ioctlsocket(hSocket, FIONBIO, &nOne) == SOCKET_ERROR) printf("ConnectSocket() : ioctlsocket non-blocking setting failed, error %d\n", WSAGetLastError()); #else if (fcntl(hSocket, F_SETFL, O_NONBLOCK) == SOCKET_ERROR) printf("ConnectSocket() : fcntl non-blocking setting failed, error %d\n", errno); #endif // Add node CNode* pnode = new CNode(hSocket, addrConnect, pszDest ? pszDest : "", false); pnode->AddRef(); { LOCK(cs_vNodes); vNodes.push_back(pnode); } pnode->nTimeConnected = GetTime(); return pnode; } else { return NULL; } } void CNode::CloseSocketDisconnect() { fDisconnect = true; if (hSocket != INVALID_SOCKET) { printf("disconnecting node %s\n", addrName.c_str()); closesocket(hSocket); hSocket = INVALID_SOCKET; } // in case this fails, we'll empty the recv buffer when the CNode is deleted TRY_LOCK(cs_vRecvMsg, lockRecv); if (lockRecv) vRecvMsg.clear(); // if this was the sync node, we'll need a new one if (this == pnodeSync) pnodeSync = NULL; } void CNode::Cleanup() { } void CNode::PushVersion() { /// when NTP implemented, change to just nTime = GetAdjustedTime() int64 nTime = (fInbound ? GetAdjustedTime() : GetTime()); CAddress addrYou = (addr.IsRoutable() && !IsProxy(addr) ? addr : CAddress(CService("0.0.0.0",0))); CAddress addrMe = GetLocalAddress(&addr); RAND_bytes((unsigned char*)&nLocalHostNonce, sizeof(nLocalHostNonce)); printf("send version message: version %d, blocks=%d, us=%s, them=%s, peer=%s\n", PROTOCOL_VERSION, nBestHeight, addrMe.ToString().c_str(), addrYou.ToString().c_str(), addr.ToString().c_str()); PushMessage("version", PROTOCOL_VERSION, nLocalServices, nTime, addrYou, addrMe, nLocalHostNonce, FormatSubVersion(CLIENT_NAME, CLIENT_VERSION, std::vector<string>()), nBestHeight); } std::map<CNetAddr, int64> CNode::setBanned; CCriticalSection CNode::cs_setBanned; void CNode::ClearBanned() { setBanned.clear(); } bool CNode::IsBanned(CNetAddr ip) { bool fResult = false; { LOCK(cs_setBanned); std::map<CNetAddr, int64>::iterator i = setBanned.find(ip); if (i != setBanned.end()) { int64 t = (*i).second; if (GetTime() < t) fResult = true; } } return fResult; } bool CNode::Misbehaving(int howmuch) { if (addr.IsLocal()) { printf("Warning: Local node %s misbehaving (delta: %d)!\n", addrName.c_str(), howmuch); return false; } nMisbehavior += howmuch; if (nMisbehavior >= GetArg("-banscore", 100)) { int64 banTime = GetTime()+GetArg("-bantime", 60*60*24); // Default 24-hour ban printf("Misbehaving: %s (%d -> %d) DISCONNECTING\n", addr.ToString().c_str(), nMisbehavior-howmuch, nMisbehavior); { LOCK(cs_setBanned); if (setBanned[addr] < banTime) setBanned[addr] = banTime; } CloseSocketDisconnect(); return true; } else printf("Misbehaving: %s (%d -> %d)\n", addr.ToString().c_str(), nMisbehavior-howmuch, nMisbehavior); return false; } #undef X #define X(name) stats.name = name void CNode::copyStats(CNodeStats &stats) { X(nServices); X(nLastSend); X(nLastRecv); X(nTimeConnected); X(addrName); X(nVersion); X(cleanSubVer); X(fInbound); X(nStartingHeight); X(nMisbehavior); X(nSendBytes); X(nRecvBytes); X(nBlocksRequested); stats.fSyncNode = (this == pnodeSync); } #undef X // requires LOCK(cs_vRecvMsg) bool CNode::ReceiveMsgBytes(const char *pch, unsigned int nBytes) { while (nBytes > 0) { // get current incomplete message, or create a new one if (vRecvMsg.empty() || vRecvMsg.back().complete()) vRecvMsg.push_back(CNetMessage(SER_NETWORK, nRecvVersion)); CNetMessage& msg = vRecvMsg.back(); // absorb network data int handled; if (!msg.in_data) handled = msg.readHeader(pch, nBytes); else handled = msg.readData(pch, nBytes); if (handled < 0) return false; pch += handled; nBytes -= handled; } return true; } int CNetMessage::readHeader(const char *pch, unsigned int nBytes) { // copy data to temporary parsing buffer unsigned int nRemaining = 24 - nHdrPos; unsigned int nCopy = std::min(nRemaining, nBytes); memcpy(&hdrbuf[nHdrPos], pch, nCopy); nHdrPos += nCopy; // if header incomplete, exit if (nHdrPos < 24) return nCopy; // deserialize to CMessageHeader try { hdrbuf >> hdr; } catch (std::exception &e) { return -1; } // reject messages larger than MAX_SIZE if (hdr.nMessageSize > MAX_SIZE) return -1; // switch state to reading message data in_data = true; vRecv.resize(hdr.nMessageSize); return nCopy; } int CNetMessage::readData(const char *pch, unsigned int nBytes) { unsigned int nRemaining = hdr.nMessageSize - nDataPos; unsigned int nCopy = std::min(nRemaining, nBytes); memcpy(&vRecv[nDataPos], pch, nCopy); nDataPos += nCopy; return nCopy; } // requires LOCK(cs_vSend) void SocketSendData(CNode *pnode) { std::deque<CSerializeData>::iterator it = pnode->vSendMsg.begin(); while (it != pnode->vSendMsg.end()) { const CSerializeData &data = *it; assert(data.size() > pnode->nSendOffset); int nBytes = send(pnode->hSocket, &data[pnode->nSendOffset], data.size() - pnode->nSendOffset, MSG_NOSIGNAL | MSG_DONTWAIT); if (nBytes > 0) { pnode->nLastSend = GetTime(); pnode->nSendBytes += nBytes; pnode->nSendOffset += nBytes; if (pnode->nSendOffset == data.size()) { pnode->nSendOffset = 0; pnode->nSendSize -= data.size(); it++; } else { // could not send full message; stop sending more break; } } else { if (nBytes < 0) { // error int nErr = WSAGetLastError(); if (nErr != WSAEWOULDBLOCK && nErr != WSAEMSGSIZE && nErr != WSAEINTR && nErr != WSAEINPROGRESS) { printf("socket send error %d\n", nErr); pnode->CloseSocketDisconnect(); } } // couldn't send anything at all break; } } if (it == pnode->vSendMsg.end()) { assert(pnode->nSendOffset == 0); assert(pnode->nSendSize == 0); } pnode->vSendMsg.erase(pnode->vSendMsg.begin(), it); } static list<CNode*> vNodesDisconnected; void ThreadSocketHandler() { unsigned int nPrevNodeCount = 0; loop { // // Disconnect nodes // { LOCK(cs_vNodes); // Disconnect unused nodes vector<CNode*> vNodesCopy = vNodes; BOOST_FOREACH(CNode* pnode, vNodesCopy) { if (pnode->fDisconnect || (pnode->GetRefCount() <= 0 && pnode->vRecvMsg.empty() && pnode->nSendSize == 0 && pnode->ssSend.empty())) { // remove from vNodes vNodes.erase(remove(vNodes.begin(), vNodes.end(), pnode), vNodes.end()); // release outbound grant (if any) pnode->grantOutbound.Release(); // close socket and cleanup pnode->CloseSocketDisconnect(); pnode->Cleanup(); // hold in disconnected pool until all refs are released if (pnode->fNetworkNode || pnode->fInbound) pnode->Release(); vNodesDisconnected.push_back(pnode); } } // Delete disconnected nodes list<CNode*> vNodesDisconnectedCopy = vNodesDisconnected; BOOST_FOREACH(CNode* pnode, vNodesDisconnectedCopy) { // wait until threads are done using it if (pnode->GetRefCount() <= 0) { bool fDelete = false; { TRY_LOCK(pnode->cs_vSend, lockSend); if (lockSend) { TRY_LOCK(pnode->cs_vRecvMsg, lockRecv); if (lockRecv) { TRY_LOCK(pnode->cs_inventory, lockInv); if (lockInv) fDelete = true; } } } if (fDelete) { vNodesDisconnected.remove(pnode); delete pnode; } } } } if (vNodes.size() != nPrevNodeCount) { nPrevNodeCount = vNodes.size(); uiInterface.NotifyNumConnectionsChanged(vNodes.size()); } // // Find which sockets have data to receive // struct timeval timeout; timeout.tv_sec = 0; timeout.tv_usec = 50000; // frequency to poll pnode->vSend fd_set fdsetRecv; fd_set fdsetSend; fd_set fdsetError; FD_ZERO(&fdsetRecv); FD_ZERO(&fdsetSend); FD_ZERO(&fdsetError); SOCKET hSocketMax = 0; bool have_fds = false; BOOST_FOREACH(SOCKET hListenSocket, vhListenSocket) { FD_SET(hListenSocket, &fdsetRecv); hSocketMax = max(hSocketMax, hListenSocket); have_fds = true; } { LOCK(cs_vNodes); BOOST_FOREACH(CNode* pnode, vNodes) { if (pnode->hSocket == INVALID_SOCKET) continue; FD_SET(pnode->hSocket, &fdsetError); hSocketMax = max(hSocketMax, pnode->hSocket); have_fds = true; // Implement the following logic: // * If there is data to send, select() for sending data. As this only // happens when optimistic write failed, we choose to first drain the // write buffer in this case before receiving more. This avoids // needlessly queueing received data, if the remote peer is not themselves // receiving data. This means properly utilizing TCP flow control signalling. // * Otherwise, if there is no (complete) message in the receive buffer, // or there is space left in the buffer, select() for receiving data. // * (if neither of the above applies, there is certainly one message // in the receiver buffer ready to be processed). // Together, that means that at least one of the following is always possible, // so we don't deadlock: // * We send some data. // * We wait for data to be received (and disconnect after timeout). // * We process a message in the buffer (message handler thread). { TRY_LOCK(pnode->cs_vSend, lockSend); if (lockSend && !pnode->vSendMsg.empty()) { FD_SET(pnode->hSocket, &fdsetSend); continue; } } { TRY_LOCK(pnode->cs_vRecvMsg, lockRecv); if (lockRecv && ( pnode->vRecvMsg.empty() || !pnode->vRecvMsg.front().complete() || pnode->GetTotalRecvSize() <= ReceiveFloodSize())) FD_SET(pnode->hSocket, &fdsetRecv); } } } int nSelect = select(have_fds ? hSocketMax + 1 : 0, &fdsetRecv, &fdsetSend, &fdsetError, &timeout); boost::this_thread::interruption_point(); if (nSelect == SOCKET_ERROR) { if (have_fds) { int nErr = WSAGetLastError(); printf("socket select error %d\n", nErr); for (unsigned int i = 0; i <= hSocketMax; i++) FD_SET(i, &fdsetRecv); } FD_ZERO(&fdsetSend); FD_ZERO(&fdsetError); MilliSleep(timeout.tv_usec/1000); } // // Accept new connections // BOOST_FOREACH(SOCKET hListenSocket, vhListenSocket) if (hListenSocket != INVALID_SOCKET && FD_ISSET(hListenSocket, &fdsetRecv)) { #ifdef USE_IPV6 struct sockaddr_storage sockaddr; #else struct sockaddr sockaddr; #endif socklen_t len = sizeof(sockaddr); SOCKET hSocket = accept(hListenSocket, (struct sockaddr*)&sockaddr, &len); CAddress addr; int nInbound = 0; if (hSocket != INVALID_SOCKET) if (!addr.SetSockAddr((const struct sockaddr*)&sockaddr)) printf("Warning: Unknown socket family\n"); { LOCK(cs_vNodes); BOOST_FOREACH(CNode* pnode, vNodes) if (pnode->fInbound) nInbound++; } if (hSocket == INVALID_SOCKET) { int nErr = WSAGetLastError(); if (nErr != WSAEWOULDBLOCK) printf("socket error accept failed: %d\n", nErr); } else if (nInbound >= nMaxConnections - MAX_OUTBOUND_CONNECTIONS) { { LOCK(cs_setservAddNodeAddresses); if (!setservAddNodeAddresses.count(addr)) closesocket(hSocket); } } else if (CNode::IsBanned(addr)) { printf("connection from %s dropped (banned)\n", addr.ToString().c_str()); closesocket(hSocket); } else { printf("accepted connection %s\n", addr.ToString().c_str()); CNode* pnode = new CNode(hSocket, addr, "", true); pnode->AddRef(); { LOCK(cs_vNodes); vNodes.push_back(pnode); } } } // // Service each socket // vector<CNode*> vNodesCopy; { LOCK(cs_vNodes); vNodesCopy = vNodes; BOOST_FOREACH(CNode* pnode, vNodesCopy) pnode->AddRef(); } BOOST_FOREACH(CNode* pnode, vNodesCopy) { boost::this_thread::interruption_point(); // // Receive // if (pnode->hSocket == INVALID_SOCKET) continue; if (FD_ISSET(pnode->hSocket, &fdsetRecv) || FD_ISSET(pnode->hSocket, &fdsetError)) { TRY_LOCK(pnode->cs_vRecvMsg, lockRecv); if (lockRecv) { { // typical socket buffer is 8K-64K char pchBuf[0x10000]; int nBytes = recv(pnode->hSocket, pchBuf, sizeof(pchBuf), MSG_DONTWAIT); if (nBytes > 0) { if (!pnode->ReceiveMsgBytes(pchBuf, nBytes)) pnode->CloseSocketDisconnect(); pnode->nLastRecv = GetTime(); pnode->nRecvBytes += nBytes; } else if (nBytes == 0) { // socket closed gracefully if (!pnode->fDisconnect) printf("socket closed\n"); pnode->CloseSocketDisconnect(); } else if (nBytes < 0) { // error int nErr = WSAGetLastError(); if (nErr != WSAEWOULDBLOCK && nErr != WSAEMSGSIZE && nErr != WSAEINTR && nErr != WSAEINPROGRESS) { if (!pnode->fDisconnect) printf("socket recv error %d\n", nErr); pnode->CloseSocketDisconnect(); } } } } } // // Send // if (pnode->hSocket == INVALID_SOCKET) continue; if (FD_ISSET(pnode->hSocket, &fdsetSend)) { TRY_LOCK(pnode->cs_vSend, lockSend); if (lockSend) SocketSendData(pnode); } // // Inactivity checking // if (pnode->vSendMsg.empty()) pnode->nLastSendEmpty = GetTime(); if (GetTime() - pnode->nTimeConnected > 60) { if (pnode->nLastRecv == 0 || pnode->nLastSend == 0) { printf("socket no message in first 60 seconds, %d %d\n", pnode->nLastRecv != 0, pnode->nLastSend != 0); pnode->fDisconnect = true; } else if (GetTime() - pnode->nLastSend > 90*60 && GetTime() - pnode->nLastSendEmpty > 90*60) { printf("socket not sending\n"); pnode->fDisconnect = true; } else if (GetTime() - pnode->nLastRecv > 90*60) { printf("socket inactivity timeout\n"); pnode->fDisconnect = true; } } } { LOCK(cs_vNodes); BOOST_FOREACH(CNode* pnode, vNodesCopy) pnode->Release(); } MilliSleep(10); } } #ifdef USE_UPNP void ThreadMapPort() { std::string port = strprintf("%u", GetListenPort()); const char * multicastif = 0; const char * minissdpdpath = 0; struct UPNPDev * devlist = 0; char lanaddr[64]; #ifndef UPNPDISCOVER_SUCCESS /* miniupnpc 1.5 */ devlist = upnpDiscover(2000, multicastif, minissdpdpath, 0); #else /* miniupnpc 1.6 */ int error = 0; devlist = upnpDiscover(2000, multicastif, minissdpdpath, 0, 0, &error); #endif struct UPNPUrls urls; struct IGDdatas data; int r; r = UPNP_GetValidIGD(devlist, &urls, &data, lanaddr, sizeof(lanaddr)); if (r == 1) { if (fDiscover) { char externalIPAddress[40]; r = UPNP_GetExternalIPAddress(urls.controlURL, data.first.servicetype, externalIPAddress); if(r != UPNPCOMMAND_SUCCESS) printf("UPnP: GetExternalIPAddress() returned %d\n", r); else { if(externalIPAddress[0]) { printf("UPnP: ExternalIPAddress = %s\n", externalIPAddress); AddLocal(CNetAddr(externalIPAddress), LOCAL_UPNP); } else printf("UPnP: GetExternalIPAddress failed.\n"); } } string strDesc = "TelcoCoin " + FormatFullVersion(); try { loop { #ifndef UPNPDISCOVER_SUCCESS /* miniupnpc 1.5 */ r = UPNP_AddPortMapping(urls.controlURL, data.first.servicetype, port.c_str(), port.c_str(), lanaddr, strDesc.c_str(), "TCP", 0); #else /* miniupnpc 1.6 */ r = UPNP_AddPortMapping(urls.controlURL, data.first.servicetype, port.c_str(), port.c_str(), lanaddr, strDesc.c_str(), "TCP", 0, "0"); #endif if(r!=UPNPCOMMAND_SUCCESS) printf("AddPortMapping(%s, %s, %s) failed with code %d (%s)\n", port.c_str(), port.c_str(), lanaddr, r, strupnperror(r)); else printf("UPnP Port Mapping successful.\n");; MilliSleep(20*60*1000); // Refresh every 20 minutes } } catch (boost::thread_interrupted) { r = UPNP_DeletePortMapping(urls.controlURL, data.first.servicetype, port.c_str(), "TCP", 0); printf("UPNP_DeletePortMapping() returned : %d\n", r); freeUPNPDevlist(devlist); devlist = 0; FreeUPNPUrls(&urls); throw; } } else { printf("No valid UPnP IGDs found\n"); freeUPNPDevlist(devlist); devlist = 0; if (r != 0) FreeUPNPUrls(&urls); } } void MapPort(bool fUseUPnP) { static boost::thread* upnp_thread = NULL; if (fUseUPnP) { if (upnp_thread) { upnp_thread->interrupt(); upnp_thread->join(); delete upnp_thread; } upnp_thread = new boost::thread(boost::bind(&TraceThread<boost::function<void()> >, "upnp", &ThreadMapPort)); } else if (upnp_thread) { upnp_thread->interrupt(); upnp_thread->join(); delete upnp_thread; upnp_thread = NULL; } } #else void MapPort(bool) { // Intentionally left blank. } #endif // DNS seeds // Each pair gives a source name and a seed name. // The first name is used as information source for addrman. // The second name should resolve to a list of seed addresses. static const char *strMainNetDNSSeed[][2] = { {"cryptolife.net","wallet.cryptolife.net"}, {"cryptolife.net","explore.cryptolife.net"}, {"cryptolife.net","seed1.cryptolife.net"}, {"cryptolife.net","seed2.cryptolife.net"}, {NULL, NULL} }; static const char *strTestNetDNSSeed[][2] = { {NULL, NULL} }; void ThreadDNSAddressSeed() { static const char *(*strDNSSeed)[2] = fTestNet ? strTestNetDNSSeed : strMainNetDNSSeed; int found = 0; printf("Loading addresses from DNS seeds (could take a while)\n"); for (unsigned int seed_idx = 0; strDNSSeed[seed_idx][0] != NULL; seed_idx++) { if (HaveNameProxy()) { AddOneShot(strDNSSeed[seed_idx][1]); } else { vector<CNetAddr> vaddr; vector<CAddress> vAdd; if (LookupHost(strDNSSeed[seed_idx][1], vaddr)) { BOOST_FOREACH(CNetAddr& ip, vaddr) { int nOneDay = 24*3600; CAddress addr = CAddress(CService(ip, GetDefaultPort())); addr.nTime = GetTime() - 3*nOneDay - GetRand(4*nOneDay); // use a random age between 3 and 7 days old vAdd.push_back(addr); found++; } } addrman.Add(vAdd, CNetAddr(strDNSSeed[seed_idx][0], true)); } } printf("%d addresses found from DNS seeds\n", found); } unsigned int pnSeed[] = { 0x119caa6b // 0x92B9B572, 0xA2F3716E, 0x5F551D90 }; void DumpAddresses() { int64 nStart = GetTimeMillis(); CAddrDB adb; adb.Write(addrman); printf("Flushed %d addresses to peers.dat %"PRI64d"ms\n", addrman.size(), GetTimeMillis() - nStart); } void static ProcessOneShot() { string strDest; { LOCK(cs_vOneShots); if (vOneShots.empty()) return; strDest = vOneShots.front(); vOneShots.pop_front(); } CAddress addr; CSemaphoreGrant grant(*semOutbound, true); if (grant) { if (!OpenNetworkConnection(addr, &grant, strDest.c_str(), true)) AddOneShot(strDest); } } void ThreadOpenConnections() { // Connect to specific addresses if (mapArgs.count("-connect") && mapMultiArgs["-connect"].size() > 0) { for (int64 nLoop = 0;; nLoop++) { ProcessOneShot(); BOOST_FOREACH(string strAddr, mapMultiArgs["-connect"]) { CAddress addr; OpenNetworkConnection(addr, NULL, strAddr.c_str()); for (int i = 0; i < 10 && i < nLoop; i++) { MilliSleep(500); } } MilliSleep(500); } } // Initiate network connections int64 nStart = GetTime(); loop { ProcessOneShot(); MilliSleep(500); CSemaphoreGrant grant(*semOutbound); boost::this_thread::interruption_point(); // Add seed nodes if IRC isn't working if (addrman.size()==0 && (GetTime() - nStart > 60) && !fTestNet) { std::vector<CAddress> vAdd; for (unsigned int i = 0; i < ARRAYLEN(pnSeed); i++) { // It'll only connect to one or two seed nodes because once it connects, // it'll get a pile of addresses with newer timestamps. // Seed nodes are given a random 'last seen time' of between one and two // weeks ago. const int64 nOneWeek = 7*24*60*60; struct in_addr ip; memcpy(&ip, &pnSeed[i], sizeof(ip)); CAddress addr(CService(ip, GetDefaultPort())); addr.nTime = GetTime()-GetRand(nOneWeek)-nOneWeek; vAdd.push_back(addr); } addrman.Add(vAdd, CNetAddr("127.0.0.1")); } // // Choose an address to connect to based on most recently seen // CAddress addrConnect; // Only connect out to one peer per network group (/16 for IPv4). // Do this here so we don't have to critsect vNodes inside mapAddresses critsect. int nOutbound = 0; set<vector<unsigned char> > setConnected; { LOCK(cs_vNodes); BOOST_FOREACH(CNode* pnode, vNodes) { if (!pnode->fInbound) { setConnected.insert(pnode->addr.GetGroup()); nOutbound++; } } } int64 nANow = GetAdjustedTime(); int nTries = 0; loop { // use an nUnkBias between 10 (no outgoing connections) and 90 (8 outgoing connections) CAddress addr = addrman.Select(10 + min(nOutbound,8)*10); // if we selected an invalid address, restart if (!addr.IsValid() || setConnected.count(addr.GetGroup()) || IsLocal(addr)) break; // If we didn't find an appropriate destination after trying 100 addresses fetched from addrman, // stop this loop, and let the outer loop run again (which sleeps, adds seed nodes, recalculates // already-connected network ranges, ...) before trying new addrman addresses. nTries++; if (nTries > 100) break; if (IsLimited(addr)) continue; // only consider very recently tried nodes after 30 failed attempts if (nANow - addr.nLastTry < 600 && nTries < 30) continue; // do not allow non-default ports, unless after 50 invalid addresses selected already if (addr.GetPort() != GetDefaultPort() && nTries < 50) continue; addrConnect = addr; break; } if (addrConnect.IsValid()) OpenNetworkConnection(addrConnect, &grant); } } void ThreadOpenAddedConnections() { { LOCK(cs_vAddedNodes); vAddedNodes = mapMultiArgs["-addnode"]; } if (HaveNameProxy()) { while(true) { list<string> lAddresses(0); { LOCK(cs_vAddedNodes); BOOST_FOREACH(string& strAddNode, vAddedNodes) lAddresses.push_back(strAddNode); } BOOST_FOREACH(string& strAddNode, lAddresses) { CAddress addr; CSemaphoreGrant grant(*semOutbound); OpenNetworkConnection(addr, &grant, strAddNode.c_str()); MilliSleep(500); } MilliSleep(120000); // Retry every 2 minutes } } for (unsigned int i = 0; true; i++) { list<string> lAddresses(0); { LOCK(cs_vAddedNodes); BOOST_FOREACH(string& strAddNode, vAddedNodes) lAddresses.push_back(strAddNode); } list<vector<CService> > lservAddressesToAdd(0); BOOST_FOREACH(string& strAddNode, lAddresses) { vector<CService> vservNode(0); if(Lookup(strAddNode.c_str(), vservNode, GetDefaultPort(), fNameLookup, 0)) { lservAddressesToAdd.push_back(vservNode); { LOCK(cs_setservAddNodeAddresses); BOOST_FOREACH(CService& serv, vservNode) setservAddNodeAddresses.insert(serv); } } } // Attempt to connect to each IP for each addnode entry until at least one is successful per addnode entry // (keeping in mind that addnode entries can have many IPs if fNameLookup) { LOCK(cs_vNodes); BOOST_FOREACH(CNode* pnode, vNodes) for (list<vector<CService> >::iterator it = lservAddressesToAdd.begin(); it != lservAddressesToAdd.end(); it++) BOOST_FOREACH(CService& addrNode, *(it)) if (pnode->addr == addrNode) { it = lservAddressesToAdd.erase(it); it--; break; } } BOOST_FOREACH(vector<CService>& vserv, lservAddressesToAdd) { CSemaphoreGrant grant(*semOutbound); OpenNetworkConnection(CAddress(vserv[i % vserv.size()]), &grant); MilliSleep(500); } MilliSleep(120000); // Retry every 2 minutes } } // if successful, this moves the passed grant to the constructed node bool OpenNetworkConnection(const CAddress& addrConnect, CSemaphoreGrant *grantOutbound, const char *strDest, bool fOneShot) { // // Initiate outbound network connection // boost::this_thread::interruption_point(); if (!strDest) if (IsLocal(addrConnect) || FindNode((CNetAddr)addrConnect) || CNode::IsBanned(addrConnect) || FindNode(addrConnect.ToStringIPPort().c_str())) return false; if (strDest && FindNode(strDest)) return false; CNode* pnode = ConnectNode(addrConnect, strDest); boost::this_thread::interruption_point(); if (!pnode) return false; if (grantOutbound) grantOutbound->MoveTo(pnode->grantOutbound); pnode->fNetworkNode = true; if (fOneShot) pnode->fOneShot = true; return true; } // for now, use a very simple selection metric: the node from which we received // most recently double static NodeSyncScore(const CNode *pnode) { return -pnode->nLastRecv; } void static StartSync(const vector<CNode*> &vNodes) { CNode *pnodeNewSync = NULL; double dBestScore = 0; // fImporting and fReindex are accessed out of cs_main here, but only // as an optimization - they are checked again in SendMessages. if (fImporting || fReindex) return; // Iterate over all nodes BOOST_FOREACH(CNode* pnode, vNodes) { // check preconditions for allowing a sync if (!pnode->fClient && !pnode->fOneShot && !pnode->fDisconnect && pnode->fSuccessfullyConnected && (pnode->nStartingHeight > (nBestHeight - 144)) && (pnode->nVersion < NOBLKS_VERSION_START || pnode->nVersion >= NOBLKS_VERSION_END)) { // if ok, compare node's score with the best so far double dScore = NodeSyncScore(pnode); if (pnodeNewSync == NULL || dScore > dBestScore) { pnodeNewSync = pnode; dBestScore = dScore; } } } // if a new sync candidate was found, start sync! if (pnodeNewSync) { pnodeNewSync->fStartSync = true; pnodeSync = pnodeNewSync; } } void ThreadMessageHandler() { SetThreadPriority(THREAD_PRIORITY_BELOW_NORMAL); while (true) { bool fHaveSyncNode = false; vector<CNode*> vNodesCopy; { LOCK(cs_vNodes); vNodesCopy = vNodes; BOOST_FOREACH(CNode* pnode, vNodesCopy) { pnode->AddRef(); if (pnode == pnodeSync) fHaveSyncNode = true; } } if (!fHaveSyncNode) StartSync(vNodesCopy); // Poll the connected nodes for messages CNode* pnodeTrickle = NULL; if (!vNodesCopy.empty()) pnodeTrickle = vNodesCopy[GetRand(vNodesCopy.size())]; bool fSleep = true; BOOST_FOREACH(CNode* pnode, vNodesCopy) { if (pnode->fDisconnect) continue; // Receive messages { TRY_LOCK(pnode->cs_vRecvMsg, lockRecv); if (lockRecv) { if (!ProcessMessages(pnode)) pnode->CloseSocketDisconnect(); if (pnode->nSendSize < SendBufferSize()) { if (!pnode->vRecvGetData.empty() || (!pnode->vRecvMsg.empty() && pnode->vRecvMsg[0].complete())) { fSleep = false; } } } } boost::this_thread::interruption_point(); // Send messages { TRY_LOCK(pnode->cs_vSend, lockSend); if (lockSend) SendMessages(pnode, pnode == pnodeTrickle); } boost::this_thread::interruption_point(); } { LOCK(cs_vNodes); BOOST_FOREACH(CNode* pnode, vNodesCopy) pnode->Release(); } if (fSleep) MilliSleep(100); } } bool BindListenPort(const CService &addrBind, string& strError) { strError = ""; int nOne = 1; // Create socket for listening for incoming connections #ifdef USE_IPV6 struct sockaddr_storage sockaddr; #else struct sockaddr sockaddr; #endif socklen_t len = sizeof(sockaddr); if (!addrBind.GetSockAddr((struct sockaddr*)&sockaddr, &len)) { strError = strprintf("Error: bind address family for %s not supported", addrBind.ToString().c_str()); printf("%s\n", strError.c_str()); return false; } SOCKET hListenSocket = socket(((struct sockaddr*)&sockaddr)->sa_family, SOCK_STREAM, IPPROTO_TCP); if (hListenSocket == INVALID_SOCKET) { strError = strprintf("Error: Couldn't open socket for incoming connections (socket returned error %d)", WSAGetLastError()); printf("%s\n", strError.c_str()); return false; } #ifdef SO_NOSIGPIPE // Different way of disabling SIGPIPE on BSD setsockopt(hListenSocket, SOL_SOCKET, SO_NOSIGPIPE, (void*)&nOne, sizeof(int)); #endif #ifndef WIN32 // Allow binding if the port is still in TIME_WAIT state after // the program was closed and restarted. Not an issue on windows. setsockopt(hListenSocket, SOL_SOCKET, SO_REUSEADDR, (void*)&nOne, sizeof(int)); #endif #ifdef WIN32 // Set to non-blocking, incoming connections will also inherit this if (ioctlsocket(hListenSocket, FIONBIO, (u_long*)&nOne) == SOCKET_ERROR) #else if (fcntl(hListenSocket, F_SETFL, O_NONBLOCK) == SOCKET_ERROR) #endif { strError = strprintf("Error: Couldn't set properties on socket for incoming connections (error %d)", WSAGetLastError()); printf("%s\n", strError.c_str()); return false; } #ifdef USE_IPV6 // some systems don't have IPV6_V6ONLY but are always v6only; others do have the option // and enable it by default or not. Try to enable it, if possible. if (addrBind.IsIPv6()) { #ifdef IPV6_V6ONLY #ifdef WIN32 setsockopt(hListenSocket, IPPROTO_IPV6, IPV6_V6ONLY, (const char*)&nOne, sizeof(int)); #else setsockopt(hListenSocket, IPPROTO_IPV6, IPV6_V6ONLY, (void*)&nOne, sizeof(int)); #endif #endif #ifdef WIN32 int nProtLevel = 10 /* PROTECTION_LEVEL_UNRESTRICTED */; int nParameterId = 23 /* IPV6_PROTECTION_LEVEl */; // this call is allowed to fail setsockopt(hListenSocket, IPPROTO_IPV6, nParameterId, (const char*)&nProtLevel, sizeof(int)); #endif } #endif if (::bind(hListenSocket, (struct sockaddr*)&sockaddr, len) == SOCKET_ERROR) { int nErr = WSAGetLastError(); if (nErr == WSAEADDRINUSE) strError = strprintf(_("Unable to bind to %s on this computer. TelcoCoin is probably already running."), addrBind.ToString().c_str()); else strError = strprintf(_("Unable to bind to %s on this computer (bind returned error %d, %s)"), addrBind.ToString().c_str(), nErr, strerror(nErr)); printf("%s\n", strError.c_str()); return false; } printf("Bound to %s\n", addrBind.ToString().c_str()); // Listen for incoming connections if (listen(hListenSocket, SOMAXCONN) == SOCKET_ERROR) { strError = strprintf("Error: Listening for incoming connections failed (listen returned error %d)", WSAGetLastError()); printf("%s\n", strError.c_str()); return false; } vhListenSocket.push_back(hListenSocket); if (addrBind.IsRoutable() && fDiscover) AddLocal(addrBind, LOCAL_BIND); return true; } void static Discover() { if (!fDiscover) return; #ifdef WIN32 // Get local host IP char pszHostName[1000] = ""; if (gethostname(pszHostName, sizeof(pszHostName)) != SOCKET_ERROR) { vector<CNetAddr> vaddr; if (LookupHost(pszHostName, vaddr)) { BOOST_FOREACH (const CNetAddr &addr, vaddr) { AddLocal(addr, LOCAL_IF); } } } #else // Get local host ip struct ifaddrs* myaddrs; if (getifaddrs(&myaddrs) == 0) { for (struct ifaddrs* ifa = myaddrs; ifa != NULL; ifa = ifa->ifa_next) { if (ifa->ifa_addr == NULL) continue; if ((ifa->ifa_flags & IFF_UP) == 0) continue; if (strcmp(ifa->ifa_name, "lo") == 0) continue; if (strcmp(ifa->ifa_name, "lo0") == 0) continue; if (ifa->ifa_addr->sa_family == AF_INET) { struct sockaddr_in* s4 = (struct sockaddr_in*)(ifa->ifa_addr); CNetAddr addr(s4->sin_addr); if (AddLocal(addr, LOCAL_IF)) printf("IPv4 %s: %s\n", ifa->ifa_name, addr.ToString().c_str()); } #ifdef USE_IPV6 else if (ifa->ifa_addr->sa_family == AF_INET6) { struct sockaddr_in6* s6 = (struct sockaddr_in6*)(ifa->ifa_addr); CNetAddr addr(s6->sin6_addr); if (AddLocal(addr, LOCAL_IF)) printf("IPv6 %s: %s\n", ifa->ifa_name, addr.ToString().c_str()); } #endif } freeifaddrs(myaddrs); } #endif // Don't use external IPv4 discovery, when -onlynet="IPv6" if (!IsLimited(NET_IPV4)) NewThread(ThreadGetMyExternalIP, NULL); } void StartNode(boost::thread_group& threadGroup) { if (semOutbound == NULL) { // initialize semaphore int nMaxOutbound = min(MAX_OUTBOUND_CONNECTIONS, nMaxConnections); semOutbound = new CSemaphore(nMaxOutbound); } if (pnodeLocalHost == NULL) pnodeLocalHost = new CNode(INVALID_SOCKET, CAddress(CService("127.0.0.1", 0), nLocalServices)); Discover(); // // Start threads // if (!GetBoolArg("-dnsseed", true)) printf("DNS seeding disabled\n"); else threadGroup.create_thread(boost::bind(&TraceThread<boost::function<void()> >, "dnsseed", &ThreadDNSAddressSeed)); #ifdef USE_UPNP // Map ports with UPnP MapPort(GetBoolArg("-upnp", USE_UPNP)); #endif // Get addresses from IRC and advertise ours threadGroup.create_thread(boost::bind(&TraceThread<void (*)()>, "irc", &ThreadIRCSeed)); // Send and receive from sockets, accept connections threadGroup.create_thread(boost::bind(&TraceThread<void (*)()>, "net", &ThreadSocketHandler)); // Initiate outbound connections from -addnode threadGroup.create_thread(boost::bind(&TraceThread<void (*)()>, "addcon", &ThreadOpenAddedConnections)); // Initiate outbound connections threadGroup.create_thread(boost::bind(&TraceThread<void (*)()>, "opencon", &ThreadOpenConnections)); // Process messages threadGroup.create_thread(boost::bind(&TraceThread<void (*)()>, "msghand", &ThreadMessageHandler)); // Dump network addresses threadGroup.create_thread(boost::bind(&LoopForever<void (*)()>, "dumpaddr", &DumpAddresses, DUMP_ADDRESSES_INTERVAL * 1000)); } bool StopNode() { printf("StopNode()\n"); GenerateBitcoins(false, NULL); MapPort(false); nTransactionsUpdated++; if (semOutbound) for (int i=0; i<MAX_OUTBOUND_CONNECTIONS; i++) semOutbound->post(); MilliSleep(50); DumpAddresses(); return true; } class CNetCleanup { public: CNetCleanup() { } ~CNetCleanup() { // Close sockets BOOST_FOREACH(CNode* pnode, vNodes) if (pnode->hSocket != INVALID_SOCKET) closesocket(pnode->hSocket); BOOST_FOREACH(SOCKET hListenSocket, vhListenSocket) if (hListenSocket != INVALID_SOCKET) if (closesocket(hListenSocket) == SOCKET_ERROR) printf("closesocket(hListenSocket) failed with error %d\n", WSAGetLastError()); // clean up some globals (to help leak detection) BOOST_FOREACH(CNode *pnode, vNodes) delete pnode; BOOST_FOREACH(CNode *pnode, vNodesDisconnected) delete pnode; vNodes.clear(); vNodesDisconnected.clear(); delete semOutbound; semOutbound = NULL; delete pnodeLocalHost; pnodeLocalHost = NULL; #ifdef WIN32 // Shutdown Windows Sockets WSACleanup(); #endif } } instance_of_cnetcleanup; void RelayTransaction(const CTransaction& tx, const uint256& hash) { CDataStream ss(SER_NETWORK, PROTOCOL_VERSION); ss.reserve(10000); ss << tx; RelayTransaction(tx, hash, ss); } void RelayTransaction(const CTransaction& tx, const uint256& hash, const CDataStream& ss) { CInv inv(MSG_TX, hash); { LOCK(cs_mapRelay); // Expire old relay messages while (!vRelayExpiration.empty() && vRelayExpiration.front().first < GetTime()) { mapRelay.erase(vRelayExpiration.front().second); vRelayExpiration.pop_front(); } // Save original serialized message so newer versions are preserved mapRelay.insert(std::make_pair(inv, ss)); vRelayExpiration.push_back(std::make_pair(GetTime() + 15 * 60, inv)); } LOCK(cs_vNodes); BOOST_FOREACH(CNode* pnode, vNodes) { if(!pnode->fRelayTxes) continue; LOCK(pnode->cs_filter); if (pnode->pfilter) { if (pnode->pfilter->IsRelevantAndUpdate(tx, hash)) pnode->PushInventory(inv); } else pnode->PushInventory(inv); } }
mit
svenberra/NewtonUnityPlugin
Demos/Assets/Scripts/RopeComponent.cs
1769
using System.Collections; using System.Collections.Generic; using UnityEngine; public class RopeComponent : MonoBehaviour { public int Segments = 10; public float SegmentSize = 0.3f; private List<GameObject> gameObjects = new List<GameObject>(); private NewtonWorld world; private LineRenderer lineRenderer; public Material material; private Vector3[] points; void Awake() { world = FindObjectOfType(typeof(NewtonWorld)) as NewtonWorld; for (var i = 0; i < Segments; i++) { var go = new GameObject("Segment" + i.ToString()); go.transform.localPosition = new Vector3(i * SegmentSize, 0, 0); var rb = go.AddComponent<NewtonBody>(); rb.m_world = world; rb.m_mass = 1.0f; var col = go.AddComponent<NewtonSphereCollider>(); col.m_radius = SegmentSize / 2.0f; var joint = go.AddComponent<NewtonBallAndSocket>(); joint.m_posit = new Vector3(-(SegmentSize / 2.0f), 0, 0); if (i > 0) { joint.m_otherBody = gameObjects[i - 1].GetComponent<NewtonBody>(); } go.transform.SetParent(transform, false); gameObjects.Add(go); } lineRenderer = transform.gameObject.AddComponent<LineRenderer>(); lineRenderer.numPositions = Segments; lineRenderer.widthMultiplier = SegmentSize; lineRenderer.sharedMaterial = material; } void Start() { points = new Vector3[Segments]; } void Update() { for (int i = 0; i < Segments; i++) { points[i] = gameObjects[i].transform.position; } lineRenderer.SetPositions(points); } }
mit
xtina-starr/reaction
src/__generated__/PricingContextStoryQuery.graphql.ts
6918
/* tslint:disable */ import { ConcreteRequest } from "relay-runtime"; import { PricingContext_artwork$ref } from "./PricingContext_artwork.graphql"; export type PricingContextStoryQueryVariables = {}; export type PricingContextStoryQueryResponse = { readonly artwork: ({ readonly " $fragmentRefs": PricingContext_artwork$ref; }) | null; }; export type PricingContextStoryQuery = { readonly response: PricingContextStoryQueryResponse; readonly variables: PricingContextStoryQueryVariables; }; /* query PricingContextStoryQuery { artwork(id: "unused") { ...PricingContext_artwork __id } } fragment PricingContext_artwork on Artwork { priceCents { min max } artists { id __id } category pricingContext { appliedFiltersDisplay appliedFilters { dimension category } bins { maxPrice maxPriceCents minPrice minPriceCents numArtworks } } __id } */ const node: ConcreteRequest = (function(){ var v0 = [ { "kind": "Literal", "name": "id", "value": "unused", "type": "String!" } ], v1 = { "kind": "ScalarField", "alias": null, "name": "__id", "args": null, "storageKey": null }, v2 = { "kind": "ScalarField", "alias": null, "name": "category", "args": null, "storageKey": null }; return { "kind": "Request", "operationKind": "query", "name": "PricingContextStoryQuery", "id": null, "text": "query PricingContextStoryQuery {\n artwork(id: \"unused\") {\n ...PricingContext_artwork\n __id\n }\n}\n\nfragment PricingContext_artwork on Artwork {\n priceCents {\n min\n max\n }\n artists {\n id\n __id\n }\n category\n pricingContext {\n appliedFiltersDisplay\n appliedFilters {\n dimension\n category\n }\n bins {\n maxPrice\n maxPriceCents\n minPrice\n minPriceCents\n numArtworks\n }\n }\n __id\n}\n", "metadata": {}, "fragment": { "kind": "Fragment", "name": "PricingContextStoryQuery", "type": "Query", "metadata": null, "argumentDefinitions": [], "selections": [ { "kind": "LinkedField", "alias": null, "name": "artwork", "storageKey": "artwork(id:\"unused\")", "args": v0, "concreteType": "Artwork", "plural": false, "selections": [ { "kind": "FragmentSpread", "name": "PricingContext_artwork", "args": null }, v1 ] } ] }, "operation": { "kind": "Operation", "name": "PricingContextStoryQuery", "argumentDefinitions": [], "selections": [ { "kind": "LinkedField", "alias": null, "name": "artwork", "storageKey": "artwork(id:\"unused\")", "args": v0, "concreteType": "Artwork", "plural": false, "selections": [ { "kind": "LinkedField", "alias": null, "name": "priceCents", "storageKey": null, "args": null, "concreteType": "PriceCents", "plural": false, "selections": [ { "kind": "ScalarField", "alias": null, "name": "min", "args": null, "storageKey": null }, { "kind": "ScalarField", "alias": null, "name": "max", "args": null, "storageKey": null } ] }, { "kind": "LinkedField", "alias": null, "name": "artists", "storageKey": null, "args": null, "concreteType": "Artist", "plural": true, "selections": [ { "kind": "ScalarField", "alias": null, "name": "id", "args": null, "storageKey": null }, v1 ] }, v2, { "kind": "LinkedField", "alias": null, "name": "pricingContext", "storageKey": null, "args": null, "concreteType": "AnalyticsPricingContext", "plural": false, "selections": [ { "kind": "ScalarField", "alias": null, "name": "appliedFiltersDisplay", "args": null, "storageKey": null }, { "kind": "LinkedField", "alias": null, "name": "appliedFilters", "storageKey": null, "args": null, "concreteType": "AnalyticsPriceContextFilterType", "plural": false, "selections": [ { "kind": "ScalarField", "alias": null, "name": "dimension", "args": null, "storageKey": null }, v2 ] }, { "kind": "LinkedField", "alias": null, "name": "bins", "storageKey": null, "args": null, "concreteType": "AnalyticsHistogramBin", "plural": true, "selections": [ { "kind": "ScalarField", "alias": null, "name": "maxPrice", "args": null, "storageKey": null }, { "kind": "ScalarField", "alias": null, "name": "maxPriceCents", "args": null, "storageKey": null }, { "kind": "ScalarField", "alias": null, "name": "minPrice", "args": null, "storageKey": null }, { "kind": "ScalarField", "alias": null, "name": "minPriceCents", "args": null, "storageKey": null }, { "kind": "ScalarField", "alias": null, "name": "numArtworks", "args": null, "storageKey": null } ] } ] }, v1 ] } ] } }; })(); (node as any).hash = '82437c2a0e40f80f11ce8355a882fe17'; export default node;
mit
jaimegarcia/vega-examples
app/scripts/main.js
285
console.log('\'Allo \'Allo!'); function parse(spec) { console.log(spec); vg.parse.spec(spec, function(chart) { console.log(chart) var view = chart({ el:"#vis" }); view.viewport(null) .renderer("svg") .update(); }); } parse("charts-specs/node-io-9.json");
mit
kobayashi-tbn/watarase
tmp/dummy/db/migrate/20130316163018_create_users.rb
223
class CreateUsers < ActiveRecord::Migration def change create_table(:users, id: false) do |t| t.string :username t.timestamps end execute "ALTER TABLE users ADD PRIMARY KEY(username);" end end
mit
twin/synergy
app/decorators/project_decorator.rb
713
class ProjectDecorator < Draper::Decorator delegate_all translates :name, :description, :summary, :location, :age_restriction, :capacity decorates_association :category delegate :to_s def title h.smarty_pants [name, category.try(:name), h.date_range(begins_on, ends_on)].join(", ") end def description h.markdown super end def summary h.markdown (super.to_s.rstrip + " (#{h.link_to("#{I18n.t("words.more")}", object, class: "read-more")})") end def duration h.days (ends_on - begins_on).to_i + 1 end def date [begins_on.to_s(:long), ends_on.to_s(:long)].join(" – ") end def attachments object.attachments.available_in(I18n.locale).decorate end end
mit
goodwinxp/Yorozuya
library/ATF/_apex_send_trans.hpp
365
// This file auto generated by plugin for ida pro. Generated code only for x64. Please, dont change manually #pragma once #include <common/common.h> START_ATF_NAMESPACE struct _apex_send_trans { int m_nRet; public: int size(); }; static_assert(ATF::checkSize<_apex_send_trans, 4>(), "_apex_send_trans"); END_ATF_NAMESPACE
mit
WSlatton/ftc-nav
src/main/java/io/github/wslatton/ftc_nav/motion/Rotation.java
1685
package io.github.wslatton.ftc_nav.motion; import io.github.wslatton.ftc_nav.control.Correction; import io.github.wslatton.ftc_nav.util.Angle; /** * basic rotation */ public class Rotation { private Robot robot; private Correction<Double, Double> correction; private double tolerance; /** * create rotation handler with given correction function * * @param robot wrapper around primitive hardware motions * @param correction correction function to use * @param tolerance angle tolerance in degrees */ public Rotation(Robot robot, Correction<Double, Double> correction, double tolerance) { this.robot = robot; this.correction = correction; this.tolerance = tolerance; } /** * perform rotation by given amount * * @param theta signed angle in degrees */ public void rotateBy(double theta) { correction.setTarget(theta); while (correction.getError() * theta >= tolerance) robot.rotate(correction.update(robot.getYaw())); } /** * perform rotation to given heading relative to starting location in shortest direction * * @param theta signed angle in degrees in range [0, 360) */ public void rotateTo(double theta) { double yaw = robot.getYaw(); double cw = Angle.clockwiseDelta(theta % 360, yaw); double ccw = Angle.clockwiseDelta(theta % 360, yaw); if (cw < ccw) { // turn clockwise rotateTo(yaw + cw); } else { // turn counter-clockwise rotateTo(yaw - ccw); } rotateBy(theta - robot.getYaw()); } }
mit
vanHeemstraSystems/libraries
library/rethinkdbdash.js
124
//import RethinkDBDash from 'rethinkdbdash'; // //export default RethinkDBDash; module.exports = require('rethinkdbdash');
mit
neilferreira/sf
src/NeilF/AdminBundle/Entity/Session.php
1682
<?php namespace NeilF\AdminBundle\Entity; /** * NeilF\AdminBundle\Entity\Session */ class Session { /** * @var integer $id */ private $id; /** * @var string $session_id */ private $session_id; /** * @var string $session_value */ private $session_value; /** * @var integer $session_time */ private $session_time; /** * Get id * * @return integer */ public function getId() { return $this->id; } /** * Set session_id * * @param string $sessionId * @return Session */ public function setSessionId($sessionId) { $this->session_id = $sessionId; return $this; } /** * Get session_id * * @return string */ public function getSessionId() { return $this->session_id; } /** * Set session_value * * @param string $sessionValue * @return Session */ public function setSessionValue($sessionValue) { $this->session_value = $sessionValue; return $this; } /** * Get session_value * * @return string */ public function getSessionValue() { return $this->session_value; } /** * Set session_time * * @param integer $sessionTime * @return Session */ public function setSessionTime($sessionTime) { $this->session_time = $sessionTime; return $this; } /** * Get session_time * * @return integer */ public function getSessionTime() { return $this->session_time; } }
mit
bsampietro/weirdy
test/dummy/config/application.rb
2570
require File.expand_path('../boot', __FILE__) require 'rails/all' Bundler.require(*Rails.groups) require "weirdy" module Dummy class Application < Rails::Application # Settings in config/environments/* take precedence over those specified here. # Application configuration should go into files in config/initializers # -- all .rb files in that directory are automatically loaded. # Custom directories with classes and modules you want to be autoloadable. # config.autoload_paths += %W(#{config.root}/extras) # Only load the plugins named here, in the order given (default is alphabetical). # :all can be used as a placeholder for all plugins not explicitly named. # config.plugins = [ :exception_notification, :ssl_requirement, :all ] # Activate observers that should always be running. # config.active_record.observers = :cacher, :garbage_collector, :forum_observer # Set Time.zone default to the specified zone and make Active Record auto-convert to this zone. # Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC. # config.time_zone = 'Central Time (US & Canada)' # The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded. # config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s] # config.i18n.default_locale = :de # Configure the default encoding used in templates for Ruby 1.9. config.encoding = "utf-8" # Configure sensitive parameters which will be filtered from the log file. config.filter_parameters += [:password] # Enable escaping HTML in JSON. config.active_support.escape_html_entities_in_json = true # Use SQL instead of Active Record's schema dumper when creating the database. # This is necessary if your schema can't be completely dumped by the schema dumper, # like if you have constraints or database-specific column types # config.active_record.schema_format = :sql # Enforce whitelist mode for mass assignment. # This will create an empty whitelist of attributes available for mass-assignment for all models # in your app. As such, your models will need to explicitly whitelist or blacklist accessible # parameters by using an attr_accessible or attr_protected declaration. config.active_record.whitelist_attributes = true # Enable the asset pipeline config.assets.enabled = true # Version of your assets, change this if you want to expire all your assets config.assets.version = '1.0' end end
mit
aichbauer/node-semantic-git-release-cli
src/helpers/cleanNodeModules.js
408
import execa from "execa"; import hasYarn from "has-yarn"; import fs from "fs-extra"; import path from "path"; const deleteNodeModules = () => { const cwd = process.cwd(); return fs.remove(path.join(cwd, "node_modules")); }; const installNodeModules = () => { if (hasYarn()) { return execa.shell("yarn"); } return execa.shell("npm i"); }; export { deleteNodeModules, installNodeModules };
mit
mdraganov/Telerik-Academy
Java Script/JavaScript Fundmentals/Functions/Functions/06.Larger than neighbours.js
793
function checkNeighbours(arr, i) { var result, inputArray, position; if (arr == null) { inputArray = document.getElementById('numbersArray2').value.split(' '), position = +document.getElementById('num6').value; } else { inputArray = arr, position = i; } if ((position === 0 || position === inputArray.length) && arr == null) { result = 'Not enough neighbours!'; document.getElementById('result6').innerHTML = result; return result; } if ((+inputArray[position] > +inputArray[position - 1]) && (+inputArray[position] > +inputArray[position + 1])) result = true; else result = false; if (arr == null) document.getElementById('result6').innerHTML = result; return result; }
mit
nitely/Spirit
spirit/comment/migrations/0001_initial.py
1847
# -*- coding: utf-8 -*- from django.db import models, migrations import django.utils.timezone from django.conf import settings class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('spirit_topic', '0001_initial'), ] operations = [ migrations.CreateModel( name='Comment', fields=[ ('id', models.AutoField(serialize=False, verbose_name='ID', primary_key=True, auto_created=True)), ('comment', models.TextField(verbose_name='comment', max_length=3000)), ('comment_html', models.TextField(verbose_name='comment html')), ('action', models.IntegerField(default=0, verbose_name='action', choices=[(0, 'comment'), (1, 'topic moved'), (2, 'topic closed'), (3, 'topic unclosed'), (4, 'topic pinned'), (5, 'topic unpinned')])), ('date', models.DateTimeField(default=django.utils.timezone.now)), ('is_removed', models.BooleanField(default=False)), ('is_modified', models.BooleanField(default=False)), ('ip_address', models.GenericIPAddressField(null=True, blank=True)), ('modified_count', models.PositiveIntegerField(default=0, verbose_name='modified count')), ('likes_count', models.PositiveIntegerField(default=0, verbose_name='likes count')), ('topic', models.ForeignKey(to='spirit_topic.Topic', on_delete=models.CASCADE)), ('user', models.ForeignKey(to=settings.AUTH_USER_MODEL, verbose_name='user', on_delete=models.CASCADE)), ], options={ 'verbose_name_plural': 'comments', 'verbose_name': 'comment', 'ordering': ['-date', '-pk'], }, ), ]
mit
yangra/SoftUni
DataStructures/RopeTrieExercise/src/main/java/Main.java
3884
import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.util.Arrays; import java.util.LinkedHashMap; import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; public class Main { public static void main(String[] args) throws IOException { BufferedReader reader = new BufferedReader(new InputStreamReader(System.in)); TextEditor editor = new TextEditorImpl(); Map<String, Boolean> users = new LinkedHashMap<>(); Pattern regex = Pattern.compile("\"(.*)\""); String line = reader.readLine(); while (true) { if (line.equals("end")) { break; } Matcher match = regex.matcher(line); String[] commandArgs = line.split(" "); commandArgs = Arrays.stream(commandArgs).filter(s -> s.length() > 0).toArray(String[]::new); try { switch (commandArgs[0]) { case "login": users.put(commandArgs[1], true); editor.login(commandArgs[1]); break; case "logout": users.put(commandArgs[1], false); editor.logout(commandArgs[1]); break; case "users": if (commandArgs.length == 2) { Iterable<String> result = editor.users(commandArgs[1]); for (String username : users.keySet()) { for (String user : result) { if (username.equals(user) && users.get(user)) { System.out.println(user); } } } break; } users.keySet().stream().filter(users::get).forEach(System.out::println); break; } String username = commandArgs[0]; if (!users.containsKey(username) || !users.get(username)) { line = reader.readLine(); continue; } String str = ""; if (match.find()) { str = match.group(1); } switch (commandArgs[1]) { case "insert": editor.insert(username, Integer.parseInt(commandArgs[2]), str); break; case "prepend": editor.prepend(username, str); break; case "substring": editor.substring(username, Integer.parseInt(commandArgs[2]), Integer.parseInt(commandArgs[3])); break; case "delete": editor.delete(username, Integer.parseInt(commandArgs[2]), Integer.parseInt(commandArgs[3])); break; case "clear": editor.clear(username); break; case "length": System.out.println(editor.length(username)); break; case "print": System.out.println(editor.print(username)); break; case "undo": editor.undo(username); break; } } catch (Exception ex) { } line = reader.readLine(); } } }
mit
tjpranic/Win33
Win33/Win33/Window.cpp
9776
#include "Window.h" #include "Application.h" #include "Error.h" #include "Misc.h" namespace Win33 { const Point Window::DefaultPosition = { CW_USEDEFAULT, CW_USEDEFAULT }; const Size Window::DefaultSize = { CW_USEDEFAULT, CW_USEDEFAULT }; Window::Window( const Point& position, const Size& size, WindowStyle style, ExWindowStyle exStyle, const std::wstring& className ): Window( nullptr, position, size, style, exStyle, className ) { } Window::Window( Window* parent, const Point& position, const Size& size, WindowStyle style, ExWindowStyle exStyle, const std::wstring& className ): mHandle ( nullptr ), mParent ( parent ), mInitialPosition ( position ), mInitialSize ( size ), mMinimumSize ( { 0, 0 } ), mMaximumSize ( Monitor::getSize( ) ), mPosition ( position ), mSize ( size ) { mHandle = CreateWindowEx( static_cast<DWORD>( exStyle ), className.c_str( ), L"", static_cast<DWORD>( style ), position.getX( ), position.getY( ), size.getWidth( ), size.getHeight( ), parent ? parent->mHandle : nullptr, nullptr, GetModuleHandle( nullptr ), nullptr ); if( !mHandle ) { throw EXCEPTION( L"Unable to create window." ); } Application::get( ).mWindows[mHandle] = this; } void Window::show( ) { ShowWindow( mHandle, SW_SHOW ); } void Window::hide( ) { ShowWindow( mHandle, SW_HIDE ); } void Window::close( ) { SendMessage( mHandle, WM_CLOSE, 0, 0 ); } void Window::minimize( ) { CloseWindow( mHandle ); } void Window::maximize( ) { ShowWindow( mHandle, SW_MAXIMIZE ); } void Window::restore( ) { ShowWindow( mHandle, SW_RESTORE ); } void Window::toggleVisibility( ) { if( getVisible( ) ) { hide( ); } else { show( ); } } Point Window::getInitialPosition( ) const { return mInitialPosition; } Size Window::getInitialSize( ) const { return mInitialSize; } Size Window::getMinimumSize( ) const { return mMinimumSize; } Size Window::getMaximumSize( ) const { return mMaximumSize; } bool Window::getEnabled( ) const { return IsWindowEnabled( mHandle ) != 0; } int Window::getX( ) const { RECT cr = { }; GetClientRect( mHandle, &cr ); MapWindowPoints( mHandle, HWND_DESKTOP, reinterpret_cast<LPPOINT>( &cr ), 2 ); RECT wr = { }; GetWindowRect( mHandle, &wr ); POINT p = { wr.left, wr.top }; ScreenToClient( mHandle, &p ); return cr.left + p.x; } int Window::getY( ) const { RECT cr = { }; GetClientRect( mHandle, &cr ); MapWindowPoints( mHandle, HWND_DESKTOP, reinterpret_cast<LPPOINT>( &cr ), 2 ); RECT wr = { }; GetWindowRect( mHandle, &wr ); POINT p = { wr.left, wr.top }; ScreenToClient( mHandle, &p ); return cr.top + p.y; } Point Window::getPosition( ) const { return { getX( ), getY( ) }; } int Window::getWidth( ) const { RECT wr = { }; GetWindowRect( mHandle, &wr ); return wr.right - wr.left; } int Window::getHeight( ) const { RECT wr = { }; GetWindowRect( mHandle, &wr ); return wr.bottom - wr.top; } Size Window::getSize( ) const { return { getWidth( ), getHeight( ) }; } bool Window::getVisible( ) const { return IsWindowVisible( mHandle ) != 0; } std::wstring Window::getTitle( ) const { static wchar_t text[256]; GetWindowText( mHandle, text, 256 ); return std::wstring( text ); } bool Window::getResizable( ) const { return ( GetWindowLong( mHandle, GWL_STYLE ) & WS_THICKFRAME ) == WS_THICKFRAME; } bool Window::getMaximizable( ) const { return ( GetWindowLong( mHandle, GWL_STYLE ) & WS_MAXIMIZEBOX ) == WS_MAXIMIZEBOX; } bool Window::getMinimizable( ) const { return ( GetWindowLong( mHandle, GWL_STYLE ) & WS_MINIMIZEBOX ) == WS_MINIMIZEBOX; } bool Window::getMinimized( ) const { auto wp = WINDOWPLACEMENT { }; GetWindowPlacement( mHandle, &wp ); return wp.showCmd == SW_SHOWMINIMIZED; } bool Window::getMaximized( ) const { auto wp = WINDOWPLACEMENT { }; GetWindowPlacement( mHandle, &wp ); return wp.showCmd == SW_SHOWMAXIMIZED; } void Window::setMinimumSize( const Size& size ) { mMinimumSize = size; } void Window::setMaximumSize( const Size& size ) { mMaximumSize = size; } void Window::setEnabled( bool enabled ) { EnableWindow( mHandle, enabled ); } void Window::setX( int x ) { SetWindowPos( mHandle, HWND_TOP, x, getY( ), 0, 0, SWP_NOSIZE ); } void Window::setY( int y ) { SetWindowPos( mHandle, HWND_TOP, getX( ), y, 0, 0, SWP_NOSIZE ); } void Window::setPosition( const Point& position ) { SetWindowPos( mHandle, HWND_TOP, position.getX( ), position.getY( ), 0, 0, SWP_NOSIZE ); } void Window::setWidth( int width ) { SetWindowPos( mHandle, HWND_TOP, 0, 0, width, getHeight( ), SWP_NOMOVE ); } void Window::setHeight( int height ) { SetWindowPos( mHandle, HWND_TOP, 0, 0, getWidth( ), height, SWP_NOMOVE ); } void Window::setSize( const Size& size ) { SetWindowPos( mHandle, HWND_TOP, 0, 0, size.getWidth( ), size.getHeight( ), SWP_NOMOVE ); } void Window::setVisible( bool visible ) { ShowWindow( mHandle, visible ? SW_SHOW : SW_HIDE ); } void Window::setTitle( const std::wstring& title ) { SetWindowText( mHandle, title.c_str( ) ); } void Window::setResizable( bool resizable ) { if( !resizable ) { SetWindowLong( mHandle, GWL_STYLE, GetWindowLong( mHandle, GWL_STYLE ) & ~WS_THICKFRAME ); } else { SetWindowLong( mHandle, GWL_STYLE, GetWindowLong( mHandle, GWL_STYLE ) | WS_THICKFRAME ); } } void Window::setIcon( Icon* icon ) { ASSERT_TRUE( icon != nullptr, L"icon cannot be null" ); SendMessage( mHandle, WM_SETICON, ICON_SMALL, reinterpret_cast<LONG_PTR>( icon ) ); } void Window::setMaximizable( bool maximizable ) { if( !maximizable ) { SetWindowLong( mHandle, GWL_STYLE, GetWindowLong( mHandle, GWL_STYLE ) & ~WS_MAXIMIZEBOX ); } else { SetWindowLong( mHandle, GWL_STYLE, GetWindowLong( mHandle, GWL_STYLE ) | WS_MAXIMIZEBOX ); } } void Window::setMinimizable( bool minimizable ) { if( !minimizable ) { SetWindowLong( mHandle, GWL_STYLE, GetWindowLong( mHandle, GWL_STYLE ) & ~WS_MINIMIZEBOX ); } else { SetWindowLong( mHandle, GWL_STYLE, GetWindowLong( mHandle, GWL_STYLE ) | WS_MINIMIZEBOX ); } } void Window::setMinimzed( bool minimized ) { if( !getMinimized( ) ) { minimize( ); } } void Window::setMaximized( bool maximized ) { if( !getMaximized( ) ) { maximize( ); } } LRESULT CALLBACK Window::windowProcessor( HWND handle, UINT message, WPARAM wordParameter, LPARAM longParameter ) { switch( message ) { case WM_PAINT: { DrawMenuBar( handle ); break; } case WM_SIZE: { onResize.trigger( getSize( ) ); break; } case WM_MOVE: { onMove.trigger( getPosition( ) ); break; } case WM_GETMINMAXINFO: { auto* mmi = reinterpret_cast<MINMAXINFO*>( longParameter ); mmi->ptMinTrackSize.x = getMinimumSize( ).getWidth( ); mmi->ptMinTrackSize.y = getMinimumSize( ).getHeight( ); mmi->ptMaxTrackSize.x = getMaximumSize( ).getWidth( ); mmi->ptMaxTrackSize.y = getMaximumSize( ).getHeight( ); break; } case WM_LBUTTONUP: { onLeftClick.trigger( Cursor::getPosition( ) ); break; } case WM_RBUTTONUP: { onRightClick.trigger( Cursor::getPosition( ) ); break; } case WM_SYSKEYDOWN: case WM_KEYDOWN: { onKeyDown.trigger( Keys::virtualKeyCodeToKey( static_cast<VirtualKeyCode>( wordParameter ) ) ); break; } case WM_SYSKEYUP: case WM_KEYUP: { onKeyUp.trigger( Keys::virtualKeyCodeToKey( static_cast<VirtualKeyCode>( wordParameter ) ) ); break; } case WM_CLOSE: { const auto cancelled = false; onClose.trigger( cancelled ); if( cancelled ) { return true; } break; } case WM_DESTROY: { onDestroy.trigger( ); break; } default: { break; } } return DefWindowProc( handle, message, wordParameter, longParameter ); } Window::operator HWND( ) const { return mHandle; } }
mit
CoderDojoPL/cd-sms
test/UserTest.php
7163
<?php /* * This file is part of the HMS project. * * (c) CoderDojo Polska Foundation * * For the full copyright and license information, please view the LICENSE * file that was distributed with this source code. */ namespace Test; require_once __DIR__.'/../common/WebTestCaseHelper.php'; use Common\WebTestCaseHelper; use Entity\User; use Entity\Location; use Entity\Role; /** * @package Test * @author Michal Tomczak (m.tomczak@coderdojo.org.pl) */ class UserTest extends WebTestCaseHelper{ public function testIndexUnautheticate(){ $client=$this->createClient(); $url=$client->loadPage('/user') ->getUrl(); $this->assertEquals('/login',$url); } public function testIndex(){ $em=$this->getService('doctrine')->getEntityManager(); $location=new Location(); $location->setName('Location name'); $location->setCity('Location city'); $location->setStreet('Location street'); $location->setNumber('Location number'); $location->setApartment('Location apartment'); $location->setPostal('00-000'); $location->setPhone('+48100000000'); $location->setEmail('email@email.pl'); $this->persist($location); $user=$this->user; $this->flush(); $session=$this->createSession(); $session->set('user.id',$user->getId()); $client=$this->createClient($session); $client->loadPage('/user'); $this->assertEquals(200,$client->getResponse()->getStatusCode(),'Invalid status code.'); $tr=$client->getElement('table')->getElement('tbody')->findElements('tr'); $this->assertCount(1,$tr,'Invalid number records in grid'); $td=$tr[0]->findElements('td'); $this->assertCount(6,$td,'Invalid number columns in grid'); $this->assertEquals($user->getId(),$td[0]->getText(),'Invalid data columns id'); $this->assertEquals($user->getEmail(),$td[1]->getText(),'Invalid data columns email'); $this->assertEquals($user->getFirstName(),$td[2]->getText(),'Invalid data columns first name'); $this->assertEquals($user->getLastName(),$td[3]->getText(),'Invalid data columns last name'); $this->assertEquals($user->getLocation()->getName(),$td[4]->getText(),'Invalid data columns location'); $actionButtons=$td[5]->findElements('a'); $this->assertCount(1,$actionButtons,'Invalid number action buttons in grid'); $this->assertEquals('Edit',$actionButtons[0]->getText(),'Invalid label for edit button'); $actionButtons[0]->click(); $this->assertEquals('/user/edit/'.$user->getId(), $client->getUrl(),'Invalid edit url'); } public function testEditUnautheticate(){ $em=$this->getService('doctrine')->getEntityManager(); $location=new Location(); $location->setName('Location name'); $location->setCity('Location city'); $location->setStreet('Location street'); $location->setNumber('Location number'); $location->setApartment('Location apartment'); $location->setPostal('00-000'); $location->setPhone('+48100000000'); $location->setEmail('email@email.pl'); $this->persist($location); $user=new User(); $user->setEmail('test@coderdojo.org.pl'); $user->setFirstName('first name'); $user->setLastName('last name'); $user->setLocation($location); $this->persist($user); $this->flush(); $client=$this->createClient(); $url=$client->loadPage('/user/edit/'.$user->getId()) ->getUrl(); $this->assertEquals('/login',$url); } public function testEdit(){ $em=$this->getService('doctrine')->getEntityManager(); $location1=new Location(); $location1->setName('Location name'); $location1->setCity('Location city'); $location1->setStreet('Location street'); $location1->setNumber('Location number'); $location1->setApartment('Location apartment'); $location1->setPostal('00-000'); $location1->setPhone('+48100000000'); $location1->setEmail('email@email.pl'); $this->persist($location1); $location2=new Location(); $location2->setName('Location 2 name'); $location2->setCity('Location 2 city'); $location2->setStreet('Location 2 street'); $location2->setNumber('Location 2 number'); $location2->setApartment('Location 2 apartment'); $location2->setPostal('02-000'); $location2->setPhone('+28100000000'); $location2->setEmail('email2@email.pl'); $this->persist($location2); $role=new Role(); $role->setName('Admin'); foreach($em->getRepository('Entity\Functionality')->findAll() as $functionality){ $role->getFunctionalities()->add($functionality); } $this->persist($role); $user=new User(); $user->setEmail('test@coderdojo.org.pl'); $user->setFirstName('first name'); $user->setLastName('last name'); $user->setLocation($location1); $user->setRole($role); $this->persist($user); $this->flush(); $session=$this->createSession(); $session->set('user.id',$user->getId()); $client=$this->createClient($session); $client->loadPage('/user/edit/'.$user->getId()); $this->assertEquals(200,$client->getResponse()->getStatusCode(),'Invalid status code.'); $form=$client->getElement('form'); $fields=$form->getFields(); $this->assertCount(5,$fields,'Invalid number fields'); $this->assertEquals('test@coderdojo.org.pl',$fields[0]->getData(),'Invalid field value for email'); $this->assertEquals('first name',$fields[1]->getData(),'Invalid field value for first name'); $this->assertEquals('last name',$fields[2]->getData(),'Invalid field value for last name'); $this->assertEquals($location1->getId(),$fields[3]->getData(),'Invalid field value for location'); $fields[0]->setData(''); $fields[1]->setData(''); $fields[2]->setData(''); $fields[3]->setData(''); $form->submit(); $form=$client->getElement('form'); $fields=$form->getFields(); $this->assertEquals('/user/edit/'.$user->getId(),$client->getUrl(),'Invalid url form after submited location'); $this->assertCount(5,$fields,'Invalid number fields'); $this->assertFalse($fields[0]->getParent()->hasElement('label'),'Redundant error message for email'); $this->assertEquals('Value can not empty',$fields[1]->getParent()->getElement('label')->getText(),'Invalid error message for first name'); $this->assertEquals('Value can not empty',$fields[2]->getParent()->getElement('label')->getText(),'Invalid error message for last name'); $this->assertEquals('Value can not empty',$fields[3]->getParent()->getElement('label')->getText(),'Invalid error message for location'); $fields[0]->setData('chang@coderdojo.org.pl'); $fields[1]->setData('First name edit'); $fields[2]->setData('Last name edit'); $fields[3]->setData($location2->getId()); $form->submit(); $this->assertEquals('/user',$client->getUrl(),'Invalid url form after submited location'); $em->clear(); $users=$em->getRepository('Entity\User')->findAll(); $this->assertCount(2,$users, 'Invalid number users'); $user=$users[1]; $this->assertEquals('test@coderdojo.org.pl',$user->getEmail(),'Invalid user email'); $this->assertEquals('First name edit',$user->getFirstName(),'Invalid user first name'); $this->assertEquals('Last name edit',$user->getLastName(),'Invalid user last name'); $this->assertEquals($location2->getId(),$user->getLocation()->getId(),'Invalid user location'); } }
mit
gabrielprallon/SunProjectV2TheRiseOfTheTrueSun
SPV2TROTTS/Assets/Particle Playground/Extensions/Playground Recorder/PlaygroundRecorder.cs
34720
using UnityEngine; using System; using System.Collections; using System.Collections.Generic; namespace ParticlePlayground { /// <summary> /// The Particle Playground Recorder enables playback and scrubbing of recorded particles. /// The method used is storing built-in particle arrays as keyframes, then interpolate between current keyframe and next upon playback. /// /// Usage: /// Drag the PlaygroundRecorder.cs onto a particle system you wish to record. /// Use StartRecording(float keyframeInterval) to start record the particle system then StopRecording() to stop. /// Use RecordOneFrame() to add a recorded frame, use InsertOneFrame(int frame) to insert a recorded frame. /// To replay a recording use Play(float fromNormalizedTime, float speed, bool repeat) then Stop() to stop. This is essentially the same as using Scrub() with an increasing time step. /// Use Pause() to pause during replay. /// Use Scrub() to scrub to a normalized time (where all recorded frames are measured between 0f to 1f). /// </summary> [ExecuteInEditMode()] public class PlaygroundRecorder : MonoBehaviour { /// <summary> /// Determines if the particle playback and serialization should run asynchronously on another thread. /// </summary> public bool multithreading = true; /// <summary> /// The interval between keyframes while recording. /// </summary> public float keyframeInterval = .1f; /// <summary> /// The speed of the playback. Set this to a negative value to make the playback go backwards. /// </summary> public float playbackSpeed = 1f; /// <summary> /// The current position of the playhead (scrubber). /// </summary> public float playHead = 0; /// <summary> /// Determines if the playback should loop when reaching the end of the recording. /// </summary> public bool loopPlayback = true; /// <summary> /// Determines if playback particles should fade in at appearance. This will interpolate from birth color with 0 alpha to the upcoming keyframe's color. /// </summary> public bool fadeIn = true; /// <summary> /// Determines if playback particles should grow from 0 size at appearance. This will interpolate from 0 size to the upcoming keyframe's size. /// </summary> public bool sizeIn = true; /// <summary> /// Determines if interpolation should be skipped on a recorded end-frame. This will make the playback let particles instantly jump/cut to the next recorded frame. /// </summary> public bool skipInterpolationOnEndFrames = true; /// <summary> /// Determines if the particle system should switch simulation space to local upon playback. This enabled you to move the recording around freely in the scene based on its transform. /// </summary> public bool localSpaceOnPlayback = true; /// <summary> /// The playground system to record and playback into. /// </summary> [HideInInspector] public PlaygroundParticlesC playgroundSystem; /// <summary> /// The recorded data for serialization (data storage). /// </summary> [HideInInspector] public PlaygroundRecorderData recorderData; /// <summary> /// The list of recorded frames. Each frame has its own list of particle data, where playback will interpolate between these based on the playhead. /// </summary> [NonSerialized] public List<RecordedFrame> recordedFrames; private bool _inPlayback = false; private bool _isReplaying = false; private bool _isRecording = false; private bool _hasRecorderData = false; private bool _hasPlaygroundSystem = false; private bool _hasEditedRecordData = false; private float _recordingStarted; private float _lastRecordedFrameTime; private int _recordingStartFrame; private int _recordingEndFrame; private ParticleSystem.Particle[] _playbackParticles; private ParticleSystemSimulationSpace _previousSimulationSpace; private object locker = new object(); /**************************************************************************** Monobehaviours ****************************************************************************/ void OnEnable () { if (playgroundSystem == null) playgroundSystem = GetComponent<PlaygroundParticlesC>(); if (Application.isPlaying && recordedFrames == null) { if (recorderData != null) { _hasRecorderData = true; if (multithreading) LoadAsync(); else Load(); } else { recordedFrames = new List<RecordedFrame>(); _hasEditedRecordData = true; } } _hasPlaygroundSystem = playgroundSystem != null; } void OnDisable () { if (!_hasPlaygroundSystem) return; playgroundSystem.inPlayback = false; playgroundSystem.calculate = false; } /**************************************************************************** Public functions ****************************************************************************/ /// <summary> /// Determines if this Playground Recorder has recorded frames. /// </summary> /// <returns><c>true</c> if this Playground Recorder has recorded frames; otherwise, <c>false</c>.</returns> public bool HasRecordedFrames () { return recordedFrames!=null && recordedFrames.Count>0; } /// <summary> /// Determines if this Playground Recorder is recording. /// </summary> /// <returns><c>true</c> if this Playground Recorder is recording; otherwise, <c>false</c>.</returns> public bool IsRecording () { return _isRecording; } /// <summary> /// Determines if this Playground Recorder is currently replaying recorded data. /// </summary> /// <returns><c>true</c> if this Playground Recorder is replaying; otherwise, <c>false</c>.</returns> public bool IsReplaying () { return _isReplaying; } /// <summary> /// Determines whether this Playground Recorder is in playback mode (_isReplaying can be in a stopped state but _inPlayback can still be true). /// </summary> /// <returns><c>true</c> if this Playground Recorder is in playback; otherwise, <c>false</c>.</returns> public bool IsInPlayback () { return _inPlayback; } /// <summary> /// Returns the amount of recorded frames. /// </summary> /// <returns>The number of recorded frames.</returns> public int FrameCount () { if (recordedFrames == null) return -1; return recordedFrames.Count-1<0? 0 : recordedFrames.Count-1; } /// <summary> /// Gets the frame at normalized time. This is always a floored value to the closest frame of the normalizedTime. /// </summary> /// <returns>The frame at time.</returns> /// <param name="normalizedTime">Normalized time.</param> public int GetFrameAtTime (float normalizedTime) { if (recordedFrames == null || recordedFrames.Count==0) return 0; return Mathf.Clamp (Mathf.FloorToInt((recordedFrames.Count-1) * Mathf.Clamp01(normalizedTime)), 0, recordedFrames.Count-1); } /// <summary> /// Gets the floating frame number at normalized time. Example: Passing in normalizedTime of 0.5 and total FrameCount is 3, the floating frame would return 1.5. /// </summary> /// <returns>The floating frame at time.</returns> /// <param name="normalizedTime">Normalized time.</param> public float GetFloatingFrameAtTime (float normalizedTime) { if (recordedFrames == null || recordedFrames.Count<1) return 0; return (recordedFrames.Count-1) * Mathf.Clamp01(normalizedTime); } /// <summary> /// Gets the normalized time at specified recorded frame. /// </summary> /// <returns>The normalized time at frame.</returns> /// <param name="frame">The recorded frame.</param> public float GetTimeAtFrame (int frame) { frame = Mathf.Clamp (frame, 0, recordedFrames.Count); return (frame*1f)/(recordedFrames.Count-1); } /// <summary> /// Gets the keyframe interval at specified frame. /// </summary> /// <returns>The keyframe interval at frame.</returns> /// <param name="frame">Frame.</param> public float GetKeyframeIntervalAtFrame (int frame) { if (recordedFrames==null || frame<0 || frame>=recordedFrames.Count) return 0; return recordedFrames[frame].keyframeInterval; } /// <summary> /// Returns when the recording started using Time.realtimeSinceStartup. /// </summary> /// <returns>The started.</returns> public float RecordingStarted () { return _recordingStarted; } /// <summary> /// Gets the time (real time since startup) when the last frame was recorded. /// </summary> /// <returns>The last recorded frame time.</returns> public float GetLastRecordedFrameTime () { return _lastRecordedFrameTime; } /// <summary> /// Starts a recording until StopRecording() is called. This overload will by default use the previously set keyframe interval during recording. /// </summary> public void StartRecording () { if (playgroundSystem == null) return; playgroundSystem.inPlayback = false; if (_isRecording) StopRecording(); _isRecording = true; _isReplaying = false; _inPlayback = false; _recordingStarted = Time.realtimeSinceStartup; if (Application.isPlaying) StartCoroutine (RecordInternal(keyframeInterval)); } /// <summary> /// Starts a recording until StopRecording() is called. This overload takes a keyframe interval as parameter. /// </summary> /// <param name="keyframeInterval">The Keyframe Interval determines the rate of created keyframes (measured in seconds where 1f is 1 second).</param> public void StartRecording (float keyframeInterval) { this.keyframeInterval = keyframeInterval; StartRecording(); } /// <summary> /// Starts a recording with specified length or until StopRecording() is called. This overload takes a recording length and keyframe interval as parameter. /// </summary> /// <param name="recordingLength">The amount of seconds the recording should be.</param> /// <param name="keyframeInterval">The Keyframe Interval determines the rate of created keyframes (measured in seconds where 1f is 1 second).</param> public void StartRecording (float recordingLength, float keyframeInterval) { if (playgroundSystem == null) return; StartCoroutine(StartRecordingInternal(recordingLength, keyframeInterval)); } /// <summary> /// Records one frame. This can be useful if you want exact control of when keyframes should be created. /// </summary> public void RecordOneFrame () { RecFrame(); } /// <summary> /// Inserts a recorded frame into the specified frame index. This can be useful if you want to add frames into the recording that shouldn't be placed last. Use FrameCount() to determine how many frames you currently have. /// </summary> /// <param name="frame">The index of where the frame should be inserted.</param> /// <param name="frameType">The type of the inserted frame (by default FrameType.Middle).</param> public void InsertOneFrame (int frame, FrameType frameType = FrameType.Middle) { InsertRecFrame(frame, frameType); } /// <summary> /// Stops the ongoing recording. /// </summary> public void StopRecording () { if (_isRecording) { _isRecording = false; _recordingEndFrame = recordedFrames.Count-1; CancelInvoke("RecFrame"); if (recordedFrames.Count > 0) { _recordingStartFrame = Mathf.Clamp (_recordingStartFrame, 0, recordedFrames.Count); _recordingEndFrame = Mathf.Clamp (_recordingEndFrame, 0, recordedFrames.Count); recordedFrames[_recordingStartFrame].frameType = FrameType.Start; recordedFrames[_recordingEndFrame].frameType = FrameType.End; } } } /// <summary> /// Clears out the current recorded frames. /// </summary> public void ClearRecording () { _isReplaying = false; StopRecording(); StopPlayback(); recordedFrames = null; recordedFrames = new List<RecordedFrame>(); if (recorderData != null) recorderData.Clear(); _hasEditedRecordData = true; _recordingStartFrame = 0; _recordingEndFrame = 0; if (_hasPlaygroundSystem) { playgroundSystem.inPlayback = false; playgroundSystem.calculate = false; } } /// <summary> /// Starts the playback of this Playground Recorder. /// </summary> public void Play () { Play (playHead, playbackSpeed, loopPlayback); } /// <summary> /// Starts the playback of this Playground Recorder with specified playback speed. /// </summary> /// <param name="speed">The speed of the playback.</param> public void Play (float speed) { playbackSpeed = speed; Play (playHead, speed, loopPlayback); } /// <summary> /// Starts the playback of this Playground Recorder with specified starting point, playback speed and if looping should occur. /// </summary> /// <param name="fromNormalizedTime">From normalized time in recording.</param> /// <param name="speed">The speed of the playback.</param> /// <param name="repeat">If set to <c>true</c> then enable looping.</param> public void Play (float fromNormalizedTime, float speed, bool repeat) { if (!_hasPlaygroundSystem) return; if (!_isReplaying && localSpaceOnPlayback) { #if UNITY_5_5_OR_NEWER ParticleSystem.MainModule mainModule = playgroundSystem.shurikenParticleSystem.main; _previousSimulationSpace = mainModule.simulationSpace; mainModule.simulationSpace = ParticleSystemSimulationSpace.Local; #else _previousSimulationSpace = playgroundSystem.shurikenParticleSystem.simulationSpace; playgroundSystem.shurikenParticleSystem.simulationSpace = ParticleSystemSimulationSpace.Local; #endif } playgroundSystem.inPlayback = true; playbackSpeed = speed; loopPlayback = repeat; playHead = fromNormalizedTime; _isReplaying = true; StopRecording(); StartPlayback(); if (playHead >= 1f) playHead = 0; if (Application.isPlaying) StartCoroutine(PlayRecordedFrames(playHead)); } /// <summary> /// Pauses the playback of this Playground Recorder. /// </summary> public void Pause () { if (!_hasPlaygroundSystem) return; playgroundSystem.inPlayback = true; _isReplaying = false; if (_isRecording) StopRecording(); StartPlayback(); } /// <summary> /// Stops the playback and recording of this Playground Recorder. /// </summary> public void Stop () { if (!_hasPlaygroundSystem) return; if (_isReplaying && localSpaceOnPlayback) { #if UNITY_5_5_OR_NEWER ParticleSystem.MainModule mainModule = playgroundSystem.shurikenParticleSystem.main; mainModule.simulationSpace = _previousSimulationSpace; #else playgroundSystem.shurikenParticleSystem.simulationSpace = _previousSimulationSpace; #endif } playgroundSystem.inPlayback = false; _isReplaying = false; if (_isRecording) StopRecording(); StopPlayback(); } /// <summary> /// Stops the playback and recording of this Playground Recorder and serializes data into Recorder Data. If multithreading is enabled then the serialization will be asynchronous. /// </summary> public void StopAndSerialize () { if (!_hasPlaygroundSystem) return; playgroundSystem.inPlayback = false; _isReplaying = false; if (_isRecording) { StopRecording(); Serialize(); } StopPlayback(); } /// <summary> /// Serializes the current recorded frames into the Recorder Data. If multithreading is enabled then the serialization will be asynchronous. /// </summary> public void Serialize () { if (_hasRecorderData && _hasEditedRecordData) { if (multithreading) recorderData.SerializeAsync(recordedFrames); else recorderData.Serialize(recordedFrames); _hasEditedRecordData = false; } } /// <summary> /// Loads frames from the Recorder Data. /// </summary> public void Load () { if (recorderData != null) { recordedFrames = recorderData.CloneAsRecordedFrames(); _hasEditedRecordData = true; } else Debug.Log ("No Playground Recorder Data to load from!", gameObject); } /// <summary> /// Loads frames from the Recorder Data asynchronously. /// </summary> public void LoadAsync () { if (recorderData == null) { Debug.Log ("No Playground Recorder Data to load from!", gameObject); return; } PlaygroundC.RunAsync(() => { recordedFrames = recorderData.CloneAsRecordedFrames(); _hasEditedRecordData = true; }); } /// <summary> /// Scrub to specified time in particle recording. This will linearly interpolate between the closest recorded frames of the passed in time (normalized between 0f - 1f). /// </summary> /// <param name="normalizedTime">The normalized time (0f to 1f).</param> public void Scrub (float normalizedTime) { if (!HasRecordedFrames()) return; if (_isRecording) StopRecording(); if (!_inPlayback) StartPlayback(); if (multithreading) PlaygroundC.RunAsync(() => { lock (locker) { ScrubInternal(normalizedTime); } }); else ScrubInternal (normalizedTime); } /// <summary> /// Trims (remove) the specified frames outside of normalized leftTime to rightTime. Returns true if trimming occurred. /// </summary> /// <param name="leftTime">The normalized left time (0 - 1).</param> /// <param name="rightTime">The normalized right time (0 - 1).</param> public bool Trim (float leftTime, float rightTime) { if (recordedFrames.Count == 0) return false; int leftFrame = GetFrameAtTime(leftTime); int rightFrame = GetFrameAtTime(rightTime); bool didTrim = false; if (leftFrame>0) { recordedFrames.RemoveRange(0, leftFrame); didTrim = true; } if (rightFrame<recordedFrames.Count-1) { int rFrames = (rightFrame - leftFrame); recordedFrames.RemoveRange(rFrames, recordedFrames.Count-rFrames); didTrim = true; } if (didTrim) { _hasEditedRecordData = true; Serialize(); } return didTrim; } /// <summary> /// Trims (removes) the specified frames inside of normalized leftTime to rightTime. /// </summary> /// <param name="leftTime">The normalized left time (0 - 1).</param> /// <param name="rightTime">The normalized right time (0 - 1).</param> public void TrimInner (float leftTime, float rightTime) { if (recordedFrames.Count == 0) return; int leftFrame = GetFrameAtTime(leftTime); int rightFrame = GetFrameAtTime(rightTime); recordedFrames.RemoveRange(leftFrame, rightFrame-leftFrame); _hasEditedRecordData = true; Serialize(); } /// <summary> /// Sets the particle system's live particles at the current position in playback (using the playhead) of this Playground Recorder. If multithreading is enabled this operation will run asynchronously. /// </summary> public void SetParticleSystemAsCurrentPlayback () { SetParticleSystemAsRecording (playHead); } /// <summary> /// Sets the particle system's live particles at normalized time of the recorded frames. If multithreading is enabled this operation will run asynchronously. /// </summary> public void SetParticleSystemAsRecording (float normalizedTime) { if (playgroundSystem == null || _playbackParticles==null) return; if (multithreading) { PlaygroundC.RunAsync(() => { lock (locker) { SetParticleSystemAsRecordingInternal(normalizedTime); } }); } else { SetParticleSystemAsRecordingInternal(normalizedTime); } } /**************************************************************************** Internal functions ****************************************************************************/ #if UNITY_EDITOR float lastFrameTime; void Update () { _hasPlaygroundSystem = playgroundSystem != null; _hasRecorderData = recorderData != null; if (!_hasRecorderData) _hasEditedRecordData = true; // Enables recording in Editor non Play-mode if (!Application.isPlaying) { if (_isRecording) { if (Time.realtimeSinceStartup >= _lastRecordedFrameTime + keyframeInterval) RecFrame(); } else if (_isReplaying) { playHead += (playbackSpeed/(FrameCount()*keyframeInterval))*(Time.realtimeSinceStartup-lastFrameTime); if (playHead>1f) playHead = 1f; else if (playHead<0) playHead = 0; Scrub (playHead); if (playHead == 1f) { if (loopPlayback) playHead = 0; else _isReplaying = false; } else if (playHead == 0) { if (loopPlayback) playHead = 1f; else _isReplaying = false; } } } lastFrameTime = Time.realtimeSinceStartup; } #endif void RecFrame () { if (!_isRecording) return; if (!playgroundSystem.calculate) { CancelInvoke ("RecFrame"); return; } _lastRecordedFrameTime = Time.realtimeSinceStartup; if (playgroundSystem.particleCache != null) { if (recordedFrames == null) recordedFrames = new List<RecordedFrame>(); recordedFrames.Add (new RecordedFrame(playgroundSystem, keyframeInterval)); _hasEditedRecordData = true; } } void InsertRecFrame (int frame, FrameType frameType) { if (playgroundSystem.particleCache != null) { if (recordedFrames == null) recordedFrames = new List<RecordedFrame>(); recordedFrames.Insert (frame, new RecordedFrame(playgroundSystem, keyframeInterval)); recordedFrames[frame].frameType = frameType; _hasEditedRecordData = true; } } void StartPlayback () { if (!_inPlayback && recordedFrames!=null && recordedFrames.Count>0 && playgroundSystem!=null) { _inPlayback = true; playgroundSystem.calculate = false; playgroundSystem.inPlayback = true; if (recordedFrames[0].particles != null) _playbackParticles = (ParticleSystem.Particle[])recordedFrames[0].CloneAsParticles(); StartCoroutine (Playback()); } } void StopPlayback () { _inPlayback = false; if (_hasPlaygroundSystem) { playgroundSystem.calculate = true; } } void ScrubInternal (float normalizedTime) { // Set time parameters normalizedTime = Mathf.Clamp01(normalizedTime); int normalizedFrame = GetFrameAtTime(normalizedTime); // Interpolation skipping if this is an end-frame if (skipInterpolationOnEndFrames && normalizedTime < 1f && recordedFrames[normalizedFrame].frameType == FrameType.End) { normalizedTime = Mathf.Clamp01(normalizedTime + (GetTimeAtFrame(normalizedFrame+1)-normalizedTime)); playHead = normalizedTime; normalizedFrame = GetFrameAtTime(normalizedTime); } int targetFrame = Mathf.Clamp (normalizedFrame+1, 0, recordedFrames.Count); int rebirthRotationFrame = Mathf.Clamp (targetFrame+1, 0, recordedFrames.Count); // Set live particles in case the length doesn't add up if (_playbackParticles == null || _playbackParticles.Length != recordedFrames[normalizedFrame].particles.Length) { if (recordedFrames[normalizedFrame].particles != null) _playbackParticles = (ParticleSystem.Particle[])recordedFrames[normalizedFrame].CloneAsParticles(); else return; } // No need to interpolate if (normalizedFrame >= recordedFrames.Count-1 || normalizedTime == 0 && !loopPlayback) return; // Delta time is the time between the normalized frame and next based on normalized time // float deltaTime = Mathf.Lerp (0, (recordedFrames.Count-1)*1f, normalizedTime)%1f; float deltaTime = 1f-(targetFrame-(GetFloatingFrameAtTime(normalizedTime))); // Set particle values for (int i = 0; i<_playbackParticles.Length; i++) { // If particle is between death/birth blend in differently if (recordedFrames[normalizedFrame].particles[i].lifetime < recordedFrames[targetFrame].particles[i].lifetime) { Color32 inColor = fadeIn? new Color32(recordedFrames[normalizedFrame].particles[i].color.r, recordedFrames[normalizedFrame].particles[i].color.g, recordedFrames[normalizedFrame].particles[i].color.b, 0) : new Color32(); _playbackParticles[i].position = Vector3.Lerp (recordedFrames[normalizedFrame].particles[i].sourcePosition, recordedFrames[targetFrame].particles[i].position, deltaTime); #if UNITY_4_3 || UNITY_4_5 || UNITY_4_6 || UNITY_4_7 || UNITY_4_8 || UNITY_4_9 || UNITY_5_0 || UNITY_5_1 || UNITY_5_2 _playbackParticles[i].size = Mathf.Lerp (!sizeIn? recordedFrames[normalizedFrame].particles[i].startingSize : 0, recordedFrames[targetFrame].particles[i].size, deltaTime); _playbackParticles[i].color = Color.Lerp (!fadeIn? recordedFrames[normalizedFrame].particles[i].color : inColor, recordedFrames[targetFrame].particles[i].color, deltaTime); #else _playbackParticles[i].startSize = Mathf.Lerp (!sizeIn? recordedFrames[normalizedFrame].particles[i].startingSize : 0, recordedFrames[targetFrame].particles[i].size, deltaTime); _playbackParticles[i].startColor = Color.Lerp (!fadeIn? recordedFrames[normalizedFrame].particles[i].color : inColor, recordedFrames[targetFrame].particles[i].color, deltaTime); #endif _playbackParticles[i].rotation = Mathf.Lerp (recordedFrames[targetFrame].particles[i].rotation - (recordedFrames[rebirthRotationFrame].particles[i].rotation * deltaTime), recordedFrames[targetFrame].particles[i].rotation, deltaTime); } // ...otherwise interpolate else { _playbackParticles[i].position = Vector3.Lerp (recordedFrames[normalizedFrame].particles[i].position, recordedFrames[targetFrame].particles[i].position, deltaTime); #if UNITY_4_3 || UNITY_4_5 || UNITY_4_6 || UNITY_4_7 || UNITY_4_8 || UNITY_4_9 || UNITY_5_0 || UNITY_5_1 || UNITY_5_2 _playbackParticles[i].size = Mathf.Lerp (recordedFrames[normalizedFrame].particles[i].size, recordedFrames[targetFrame].particles[i].size, deltaTime); _playbackParticles[i].color = Color.Lerp (recordedFrames[normalizedFrame].particles[i].color, recordedFrames[targetFrame].particles[i].color, deltaTime); #else _playbackParticles[i].startSize = Mathf.Lerp (recordedFrames[normalizedFrame].particles[i].size, recordedFrames[targetFrame].particles[i].size, deltaTime); _playbackParticles[i].startColor = Color.Lerp (recordedFrames[normalizedFrame].particles[i].color, recordedFrames[targetFrame].particles[i].color, deltaTime); #endif _playbackParticles[i].rotation = Mathf.Lerp (recordedFrames[normalizedFrame].particles[i].rotation, recordedFrames[targetFrame].particles[i].rotation, deltaTime); _playbackParticles[i].remainingLifetime = Mathf.Lerp (recordedFrames[normalizedFrame].particles[i].lifetime, recordedFrames[targetFrame].particles[i].lifetime, deltaTime); } } } void SetParticleSystemAsRecordingInternal (float normalizedTime) { normalizedTime = Mathf.Clamp01(normalizedTime); int normalizedFrame = GetFrameAtTime(normalizedTime); int targetFrame = Mathf.Clamp (normalizedFrame+1, 0, recordedFrames.Count-1); float deltaTime = 1f-(targetFrame-(GetFloatingFrameAtTime(normalizedTime))); float tStamp = recordedFrames[normalizedFrame].timeStamp; float currentTime = PlaygroundC.globalTime; int pCount = playgroundSystem.playgroundCache.position.Length; for (int i = 0; i<_playbackParticles.Length; i++) { playgroundSystem.playgroundCache.position[i%pCount] = Vector3.Lerp (recordedFrames[normalizedFrame].particles[i].position, recordedFrames[targetFrame].particles[i].position, deltaTime); playgroundSystem.playgroundCache.velocity[i%pCount] = Vector3.Lerp (recordedFrames[normalizedFrame].particles[i].velocity, recordedFrames[targetFrame].particles[i].velocity, deltaTime); playgroundSystem.playgroundCache.size[i%pCount] = Mathf.Lerp (recordedFrames[normalizedFrame].particles[i].size, recordedFrames[targetFrame].particles[i].size, deltaTime); playgroundSystem.playgroundCache.color[i%pCount] = Color.Lerp (recordedFrames[normalizedFrame].particles[i].color, recordedFrames[targetFrame].particles[i].color, deltaTime); playgroundSystem.playgroundCache.rotation[i%pCount] = Mathf.Lerp (recordedFrames[normalizedFrame].particles[i].rotation, recordedFrames[targetFrame].particles[i].rotation, deltaTime); playgroundSystem.playgroundCache.life[i%pCount] = Mathf.Lerp (recordedFrames[normalizedFrame].particles[i].playgroundLife, recordedFrames[targetFrame].particles[i].playgroundLife, deltaTime); playgroundSystem.playgroundCache.lifetimeSubtraction[i%pCount] = Mathf.Lerp (recordedFrames[normalizedFrame].particles[i].playgroundLifetimeSubtraction, recordedFrames[targetFrame].particles[i].playgroundLifetimeSubtraction, deltaTime); playgroundSystem.playgroundCache.birth[i%pCount] = currentTime + (recordedFrames[normalizedFrame].particles[i].playgroundStartLifetime-tStamp); playgroundSystem.playgroundCache.death[i%pCount] = currentTime + (recordedFrames[normalizedFrame].particles[i].playgroundEndLifetime-tStamp); } playgroundSystem.localTime = currentTime; playgroundSystem.LastTimeUpdated = currentTime; playgroundSystem.LocalDeltaTime = .001f; playgroundSystem.cameFromNonCalculatedFrame = false; playgroundSystem.cameFromNonEmissionFrame = false; playgroundSystem.loopExceeded = false; playgroundSystem.loopExceededOnParticle = -1; playgroundSystem.hasActiveParticles = true; StopAndSerialize(); } /**************************************************************************** Internal enumerators ****************************************************************************/ IEnumerator StartRecordingInternal (float recordingLength, float keyframeInterval) { _isReplaying = false; _inPlayback = false; _isRecording = true; playgroundSystem.inPlayback = false; StartCoroutine(RecordInternal(keyframeInterval)); yield return new WaitForSeconds(recordingLength); StopRecording(); } IEnumerator RecordInternal (float keyframeInterval) { _recordingStartFrame = recordedFrames.Count>0? recordedFrames.Count-1 : 0; while (_isRecording) { RecFrame(); yield return new WaitForSeconds(keyframeInterval); } } IEnumerator Playback () { while (_inPlayback) { if (_playbackParticles != null) playgroundSystem.shurikenParticleSystem.SetParticles(_playbackParticles, _playbackParticles.Length); yield return null; } } IEnumerator PlayRecordedFrames (float fromNormalizedTime) { float t = fromNormalizedTime; while (_isReplaying) { t += (playbackSpeed/(FrameCount()*keyframeInterval))*Time.deltaTime; if (t>1f) t = 1f; else if (t<0) t = 0; Scrub (t); if (t == 1f) { if (loopPlayback) t = 0; else _isReplaying = false; } else if (t == 0) { if (loopPlayback) t = 1f; else _isReplaying = false; } playHead = t; yield return null; } } } /// <summary> /// A Playback Particle is a struct for keeping information about one single particle in a Playground Recorder's recorded data. /// </summary> public struct PlaybackParticle { [HideInInspector] public Vector3 position; [HideInInspector] public Vector3 velocity; [HideInInspector] public float rotation; [HideInInspector] public float size; [HideInInspector] public float lifetime; [HideInInspector] public float startLifetime; [HideInInspector] public float playgroundLife; [HideInInspector] public float playgroundStartLifetime; [HideInInspector] public float playgroundEndLifetime; [HideInInspector] public float playgroundLifetimeSubtraction; [HideInInspector] public Color32 color; [HideInInspector] public Vector3 sourcePosition; [HideInInspector] public float startingSize; public PlaybackParticle (Vector3 position, Vector3 velocity, float rotation, float size, float lifetime, float startLifetime, float playgroundLife, float playgroundStartLifetime, float playgroundEndLifetime, float playgroundLifetimeSubtraction, Color32 color, Vector3 sourcePosition, float startingSize ) { this.position = position; this.velocity = velocity; this.rotation = rotation; this.size = size; this.lifetime = lifetime; this.startLifetime = startLifetime; this.playgroundLife = playgroundLife; this.playgroundStartLifetime = playgroundStartLifetime; this.playgroundEndLifetime = playgroundEndLifetime; this.playgroundLifetimeSubtraction = playgroundLifetimeSubtraction; this.color = color; this.sourcePosition = sourcePosition; this.startingSize = startingSize; } public ParticleSystem.Particle CloneAsParticle (bool includeVelocity = false) { ParticleSystem.Particle particle = new ParticleSystem.Particle(); particle.position = position; particle.rotation = rotation; particle.remainingLifetime = lifetime; particle.startLifetime = startLifetime; #if UNITY_4_3 || UNITY_4_5 || UNITY_4_6 || UNITY_4_7 || UNITY_4_8 || UNITY_4_9 || UNITY_5_0 || UNITY_5_1 || UNITY_5_2 particle.size = size; particle.color = color; #else particle.startSize = size; particle.startColor = color; #endif if (includeVelocity) particle.velocity = velocity; return particle; } public SerializedParticle CloneAsSerializedParticle () { SerializedParticle particle = new SerializedParticle( position, velocity, rotation, size, lifetime, startLifetime, playgroundLife, playgroundStartLifetime, playgroundEndLifetime, playgroundLifetimeSubtraction, color, sourcePosition, startingSize ); return particle; } public PlaybackParticle Clone () { PlaybackParticle particle = new PlaybackParticle( position, velocity, rotation, size, lifetime, startLifetime, playgroundLife, playgroundStartLifetime, playgroundEndLifetime, playgroundLifetimeSubtraction, color, sourcePosition, startingSize ); return particle; } } /// <summary> /// A frame type describes where in the total recording the recorded frame was made. /// </summary> public enum FrameType { /// <summary> /// The frame was created at the start of recording. /// </summary> Start, /// <summary> /// The frame was created at the middle of recording. /// </summary> Middle, /// <summary> /// The frame was created at the end of recording. /// </summary> End } }
mit
Tuxion/tx.cms-autologin
Views.php
144
<?php namespace components\autologin; if(!defined('TX')) die('No direct access.'); class View extends \dependencies\BaseComponent { }
mit
LiquidFire/jisho-quick-radicals
aliases.js
21067
aliases = { // Strokes: 1 // ----------- // 一 1: [ "一", "-", "1", "ichi", "iti", "one", // WaniKani ], // | 2: [ "|", "line", ], // 丶 3: [ "丶", "`", "drop", // WaniKani "tick", ], // ノ or 丿 4: [ "ノ", "丿", "/", "no", "slide", // WaniKani ], // 乙 5: [ "乙", "z", "otome", // as in 乙女 "nose", // WaniKani ], // 亅 6: [ "亅", "]", "barb", // WaniKani ], // Strokes: 2 // ----------- // 二 7: [ "二", "=", "2", "ni", "two", // WaniKani ], // 亠 8: [ "亠", "lid", // WaniKani ], // 人 9: [ "人", "hito", "person", // WaniKani ], // ⺅ 10: [ "⺅", "イ", "i", "ninben", "personleft", "hitoleft", "leader", // WaniKani ], // 𠆢 11: [ "𠆢", "亼", "^", "persontop", "hitotop", "hitoyane", "roof", // WaniKani ], // 儿 12: [ "儿", "legs", // WaniKani ], // 入 13: [ "入", "nyuu", "hairu", "enter", // WaniKani ], // ハ 14: [ "ハ", "/\\", "ha", "fins", // WaniKani ], // 丷 15: [ "丷", "''", "`'", "``", "\"", "horns", // WaniKani ], // 冂 16: [ "冂", "upside-down box", "upside down box", "head", // WaniKani ], // 冖 17: [ "冖", "[", "wa", "wakannmuri", "cover", "forehead", // WaniKani ], // 冫 18: [ "冫", ";", "ice", // WaniKani ], // 几 19: [ "几", "tukue", "desk", "table", // WaniKani ], // 凵 20: [ "凵", "box", // WaniKani "open box", ], // 刀 21: [ "刀", "katana", "sword", // WaniKani ], // ⺉ 22: [ "⺉", "リ", "ri", "ribs", // WaniKani ], // 力 23: [ "力", "tikara", "chikara", "ryoku", "power", // WaniKani ], // 勹 24: [ "勹", "wrap", "enclosure", // WaniKani ], // 匕 25: [ "匕", "hi", "saji", "spoon", // WaniKani ], // 匚 26: [ "匚", "hakogamae", "side box", "cage", // WaniKani ], // 十 27: [ "十", "10", "juu", "jyuu", "zyuu", "ten", "cross", // WaniKani ], // 卜 28: [ "卜", "to", "toe", // WaniKani ], // 卩 29: [ "卩", "seal", "stamp", // WaniKani ], // 厂 30: [ "厂", "cliff", // WaniKani ], // 厶 31: [ "厶", "mu", "pile", // WaniKani ], // 又 32: [ "又", "mata", "again", "stool", // WaniKani ], // マ 33: [ "マ", "ma", "mama", // WaniKani ], // 九 34: [ "九", "9", "nine", // WaniKani ], // ユ 35: [ "ユ", "yu", "hook", // WaniKani ], // 乃 36: [ "乃", "stairs", // WaniKani ], // Strokes: 3 // ----------- // ⻌ 37: [ "⻌", "water slide", // WaniKani ], // 口 38: [ "口", "kuti", "kuchi", "mouth", // WaniKani ], // 囗 39: [ "囗", "kunigamae", "surround", ], // 土 40: [ "土", "tuti", "tsuchi", "soil", "earth", "grave", // WaniKani ], // 士 41: [ "士", "si", "shi", "scholar", "bachelor", "samurai", // WaniKani ], // 夂 42: [ "夂", "fuyu", "winter", // WaniKani ], // 夕 43: [ "夕", "タ", "ta", "yuu", "yuube", "sunset", "evening", // WaniKani ], // 大 44: [ "大", "oo", "ookii", "tai", "dai", "big", // WaniKani ], // 女 45: [ "女", "jo", "onna", "onnna", "female", "woman", // WaniKani ], // 子 46: [ "子", "ko", "seed", "child", // WaniKani ], // 宀 47: [ "宀", "crown", // "roof", // conflicts with WaniKani "helmet", // WaniKani ], // 寸 48: [ "寸", "mamori", // as in 守 "mamoru", "measure", "measurement", // WaniKani ], // 小 49: [ "小", "syou", "shou", "small", // WaniKani ], // ⺌ 50: [ "⺌", "'''", "```", "triceratops", // WaniKani ], // 尢 51: [ "尢", "dainomageasi", "dainomageashi", "sizu", // as in 沈 "shizu", "sizumu", "shizumu", "lame", "pirate", // WaniKani ], // 尸 52: [ "尸", "p", "P", "corpse", "flag", // WaniKani ], // 屮 53: [ "屮", "gyaku", // as in 逆 "sprout", ], // 山 54: [ "山", "yama", "mountain", // WaniKani ], // 川 55: [ "川", "kawa", "river", // WaniKani ], // 巛 56: [ "巛", "<<<", "magarigawa", "curving river", "arrows", // WaniKani ], // 工 57: [ "工", "エ", "e", "I", "kou", "work", "construction", // WaniKani ], // 已 58: [ "已", "己", "巳", "onore", "snake", // WaniKani ], // 巾 59: [ "巾", "si2", "shi2", "city", "town", "market", "two face", // WaniKani ], // 干 60: [ "干", "hosu", "dry", "antenna", // WaniKani ], // 幺 61: [ "幺", "kei", // as in 系 "itogasira", "itogashira", "poop", // WaniKani ], // 广 62: [ "广", "dotted cliff", "mullet", // WaniKani ], // 廴 63: [ "廴", "long stride", "big bird", // WaniKani ], // 廾 64: [ "廾", "h", "H", "twenty", // WaniKani ], // 弋 65: [ "弋", "siki", // as in 式 "shiki", "ceremony", // WaniKani ], // 弓 66: [ "弓", "bow", "yumi", "spring", // WaniKani ], // ヨ 67: [ "ヨ", "E", "yo", "wolverine", // WaniKani ], // 彑 68: [ "彑", "go2", // as in 互 "tagai", "mutual", "shuriken", // WaniKani ], // 彡 69: [ "彡", "///", "ミ", "3", "mi", "beard", "hair", // WaniKani ], // 彳 70: [ "彳", "step", "going man", "loiter", // WaniKani ], // ⺖ 71: [ "⺖", "heart left", "left heart", "fish stick", // WaniKani ], // ⺘ 72: [ "⺘", "motu", // as in 持 "motsu", "utu", // as in 打 "utsu", "hand left", "left hand", "nailbat", // WaniKani ], // ⺡ 73: [ "⺡", "氵", "water left", "left water", "tunami", "tsunami", // WaniKani ], // ⺨ 74: [ "⺨", "犭", "dog left", "left dog", "animal", // WaniKani ], // ⺾ 75: [ "⺾", "艹", "hana", // as in 花 "grass", "flowers", // WaniKani ], // ⻏ 76: [ "⻏", "b", "B", "bu", // as in 部 "butcher", // WaniKani ], // ⻖ 77: [ "⻖", "fusegu", // as in 防 "left b", "b left", "butcher left", "left butcher", ], // 也 78: [ "也", "hoka", // as in 他 "ike", // as in 池 "other", "lake", "alligator", // WaniKani ], // 亡 79: [ "亡", "bou", "death", // WaniKani ], // 及 80: [ "及", "kyuu", "escalator", // WaniKani ], // 久 81: [ "久", "hisa", "hisasiburi", "hisashiburi", "long time", "raptor cage", // WaniKani ], // Strokes: 4 // ----------- // ⺹ 82: [ "⺹", "mono", // as in 者 "coffin", // WaniKani ], // 心 83: [ "心", "kokoro", "heart", // WaniKani ], // 戈 84: [ "戈", "halberd", "drunkard", // WaniKani ], // 戸 85: [ "戸", "to2", "door", // WaniKani ], // 手 86: [ "手", "te", "hand", // WaniKani ], // 支 87: [ "支", "si3", "shi3", "branch", "sasaeru", "frog", // WaniKani ], // 攵 88: [ "攵", "strike", ], // 文 89: [ "文", "bun", "bunn", "script", "doll", // WaniKani ], // 斗 90: [ "斗", "dipper", "chopsticks", // WaniKani ], // 斤 91: [ "斤", "ono", "axe", // WaniKani ], // 方 92: [ "方", "hou", "kata", "square", "direction", // WaniKani ], // 无 93: [ "无", "negative", ], // 日 94: [ "日", "hi2", "niti", "nichi", "day", "sun", // WaniKani ], // 曰 95: [ "曰", "hirabi", ], // 月 96: [ "月", "tuki", "tsuki", "getu", "getsu", "gatu", "gatsu", "month", "moon", // WaniKani ], // 木 97: [ "木", "ki", "moku", "tree", // WaniKani ], // 欠 98: [ "欠", "tugi", // as in 次 "tsugi", "yawn", "lack", // WaniKani ], // 止 99: [ "止", "tome", "tomeru", "yame", "yameru", "stop", // WaniKani ], // 歹 100: [ "歹", "retu", // as in 列 "retsu", "tatoe", // as in 例 "decay", "bad", // WaniKani ], // 殳 101: [ "殳", "pike", "yaku", // as in 役 "setu", // as in 設 "setsu", "ikea", // WaniKani ], // 比 102: [ "比", "hi3", "ratio", "compare", // WaniKani ], // 毛 103: [ "毛", "mou", "wool", "hair2", "fur", // WaniKani ], // 氏 104: [ "氏", "si4", "shi4", "karesi", "kareshi", "clan", "duck", // WaniKani ], // 气 105: [ "气", "ki2", "steam", // WaniKani ], // 水 106: [ "水", "sui", "mizu", "water", // WaniKani ], // 火 107: [ "火", "ka", "hi4", "fire", // WaniKani ], // ⺣ 108: [ "⺣", "netu", // as in 熱 "netsu", "fish tail", // WaniKani ], // 爪 109: [ "爪", "tume", "tsume", "nail", "talon", "claw", // WaniKani ], // 父 110: [ "父", "titi", "chichi", "kou2", // as in 交 "kawasu", "exchange", "father", // WaniKani ], // 爻 111: [ "爻", "divination", ], // 爿 112: [ "爿", "kita", // as in 北 "left kata2", "kata2 left", ], // 片 113: [ "片", "kata2", "waiter", // WaniKani ], // 牛 114: [ "牛", "usi", "ushi", "gyuu", "cow", // WaniKani ], // 犬 115: [ "犬", "inu", "dog", // WaniKani ], // ⺭ 116: [ "⺭", "ネ", "ne", "pelican", // WaniKani ], // 王 117: [ "王", "ou", "king", // WaniKani ], // 元 118: [ "元", "gen", "genn", "moto", "origin", // WaniKani ], // 井 119: [ "井", "i2", "well", // WaniKani ], // 勿 120: [ "勿", "mono2", // as in 物 "not", "wing", // WaniKani ], // 尤 121: [ "尤", "mottomo", ], // 五 122: [ "五", "5", "five", // WaniKani ], // 屯 123: [ "屯", "jun", // as in 純 "junn", ], // 巴 124: [ "巴", "tomoe", ], // 毋 125: [ "毋", "mai", // as in 毎 "do not", ], // Strokes: 5 // ----------- // 玄 126: [ "玄", "gen2", "genn2", "genkan", // as in 玄関 "gennkann", "dark", "mysterious", // WaniKani ], // 瓦 127: [ "瓦", "tile", "ga", "kawara", ], // 甘 128: [ "甘", "ama", "amai", "sweet", // WaniKani ], // 生 129: [ "生", "sei", "raw", "birth", "life", // WaniKani ], // 用 130: [ "用", "you", "use", "task", "elephant", // WaniKani ], // 田 131: [ "田", "ta2", "rice field", "rice paddy", // WaniKani ], // 疋 132: [ "疋", "bolt of cloth", "hiki", "coat rack", // WaniKani ], // 疒 133: [ "疒", "byou", // as in 病気 "sickness", "sick", // WaniKani ], // 癶 134: [ "癶", "hatu", "hatsu", "footsteps", "dotted tent", "tent", // WaniKani ], // 白 135: [ "白", "haku", "siro", "shiro", "white", // WaniKani ], // 皮 136: [ "皮", "hi5", "hifu", "kawa2", "hide", "skin", // WaniKani ], // 皿 137: [ "皿", "sara", "dish", "plate", // WaniKani ], // 目 138: [ "目", "me", "eye", // WaniKani "eyes", ], // 矛 139: [ "矛", "spear", "hoko", "soldier", // WaniKani ], // 矢 140: [ "矢", "ya", "arrow", // WaniKani ], // 石 141: [ "石", "isi", "ishi", "seki", "stone", // WaniKani ], // 示 142: [ "示", "ancestor", "veneration", "simesu", "shimesu", "jackhammer", // WaniKani ], // 禸 143: [ "禸", "rump", "track", "juu2", ], // 禾 144: [ "禾", "grain", "two-branch tree", "two branch tree", "ine", "nogi", "nogihen", "pine", // WaniKani ], // 穴 145: [ "穴", "ana", "cave", "hole", // WaniKani ], // 立 146: [ "立", "ritu", "ritsu", "tatu", "tatsu", "erect", "stand", // WaniKani ], // ⻂ 147: [ "⻂", "ネ2", "ne2", "ne with tail", "ne tail", "pelican2", ], // 世 148: [ "世", "yo2", "se", "sei2", "sekai", "seiki", "yononaka", "generation", // WaniKani ], // 巨 149: [ "巨", "kyo", "giant", // WaniKani ], // 冊 150: [ "冊", "satu", "satsu", "tomes", "books", // WaniKani ], // 母 151: [ "母", "haha", "mother", "drawer", // WaniKani ], // ⺲ 152: [ "⺲", "net", "amigasira", "amigashira", "sauron", // WaniKani ], // 牙 153: [ "牙", "kiba", "tooth", "fang", // WaniKani ], // Strokes: 6 // ----------- // 瓜 154: [ "瓜", "ka2", "suika", "kabocha", "kabotya", "pumpkin", "watermelon", "melon", // WaniKani ], // 竹 155: [ "竹", "take", "bamboo", // WaniKani ], // 米 156: [ "米", "bei", "kome", "rice", // WaniKani ], // 糸 157: [ "糸", "ito", "silk", "yarn", "string", "thread", // WaniKani ], // 缶 158: [ "缶", "kan", "hotogi", "tin", "jar", "can", // WaniKani ], // 羊 159: [ "羊", "hitsuji", "hituji", "hituzi", "you2", "sheep", // WaniKani ], // 羽 160: [ "羽", "hane", "feather", "feathers", // WaniKani ], // 而 161: [ "而", "shikashite", "sikasite", "and", "but", "rake", // WaniKani ], // 耒 162: [ "耒", "raisuki", "plough", "redwood", // WaniKani ], // 耳 163: [ "耳", "mimi", "ears", "ear", // WaniKani ], // 聿 164: [ "聿", "fudedukuri", "fudedzukuri", "fudezukuri", "ink brush", "brush", // WaniKani ], // 肉 165: [ "肉", "niku", "meat", // WaniKani ], // 自 166: [ "自", "ji", "mizukara", "self", // WaniKani ], // 至 167: [ "至", "arrive", "itaru", "boob grave", // WaniKani ], // 臼 168: [ "臼", "usu", "mortar", "grenade", // WaniKani ], // 舌 169: [ "舌", "sita", "shita", "zetu", "zetsu", "tongue", ], // 舟 170: [ "舟", "fune", "boat", "sail", // WaniKani ], // 艮 171: [ "艮", "ryou", "yoi", "ii", "stilness", "stopping", "good", // WaniKani ], // 色 172: [ "色", "iro", "colour", "color", // WaniKani ], // 虍 173: [ "虍", "tora", "torakanmuri", "tiger", // WaniKani ], // 虫 174: [ "虫", "musi", "mushi", "worm", "insect", // WaniKani ], // 血 175: [ "血", "ti", "chi", "ketu", "ketsu", "blood", // WaniKani ], // 行 176: [ "行", "kou3", "gyou", "iku", "yuku", "go", // WaniKani ], // 衣 177: [ "衣", "i3", "koromo", "clothes", // WaniKani ], // 西 178: [ "西", "nisi", "nishi", "west", // WaniKani ], // Strokes: 7 // ----------- // 臣 179: [ "臣", "omi", "retainer", "minister", "official", "servant", // WaniKani ], // 見 180: [ "見", "mi2", "miru", "view", "look", "see", // WaniKani ], // 角 181: [ "角", "tuno", "tsuno", "kado", "horn", "angle", // WaniKani ], // 言 182: [ "言", "iu", "speech", "say", // WaniKani ], // 谷 183: [ "谷", "tani", "valley", // WaniKani ], // 豆 184: [ "豆", "mame", "tou", "bean", "beans", // WaniKani ], // 豕 185: [ "豕", "buta", "inoko", "pig", // WaniKani ], // 豸 186: [ "豸", "mujina", "cat", "badger", // WaniKani ], // 貝 187: [ "貝", "kai", "shell", "clam", // WaniKani ], // 赤 188: [ "赤", "aka", "red", // WaniKani ], // 走 189: [ "走", "hasiru", "hashiru", "run", // WaniKani ], // 足 190: [ "足", "asi", "ashi", "foot", // WaniKani ], // 身 191: [ "身", "mi3", "sin2", "shin2", "body", "somebody", // WaniKani ], // 車 192: [ "車", "kuruma", "sha", "sya", "cart", "car", // WaniKani ], // 辛 193: [ "辛", "karai", "turai", "tsurai", "bitter", "spicy", // WaniKani ], // 辰 194: [ "辰", "shinnotatsu", "sinnotatu", "morning", "superman", // WaniKani ], // 酉 195: [ "酉", "hiyominotori", "wine", "sake", "alcohol", // WaniKani ], // 釆 196: [ "釆", "wakareru", "distinguish", "sickle", // WaniKani ], // 里 197: [ "里", "sato", "village", "sunflower", // WaniKani ], // 舛 198: [ "舛", "maiasi", "maiashi", "oppose", "protester", // WaniKani ], // 麦 199: [ "麦", "mugi", "wheat", "oat", ], // Strokes: 8 // ----------- // 金 200: [ "金", "kane", "kin", "kinn", "metal", "gold", // WaniKani ], // 長 201: [ "長", "naga", "nagai", "chou", "tyou", "long", // WaniKani ], // 門 202: [ "門", "mon", "monn", "gate", // WaniKani ], // 隶 203: [ "隶", "rei", "slave", ], // 隹 204: [ "隹", "furutori", "small bird", "sin3", "shin3", "turkey", // WaniKani ], // 雨 205: [ "雨", "ame", "rain", // WaniKani ], // 青 206: [ "青", "ao", "aoi", "blue", // WaniKani ], // 非 207: [ "非", "hi6", "wrong", "injustice", // WaniKani ], // 奄 208: [ "奄", "ore", ], // 岡 209: [ "岡", "oka", "hill", // WaniKani ], // 免 210: [ "免", "men", "menn", "excuse", ], // 斉 211: [ "斉", "sai", "stilts", // WaniKani ], // Strokes: 9 // ----------- // 面 212: [ "面", ], // 革 213: [ "革", ], // 韭 214: [ "韭", ], // 音 215: [ "音", ], // 頁 216: [ "頁", ], // 風 217: [ "風", ], // 飛 218: [ "飛", ], // 食 219: [ "食", ], // 首 220: [ "首", ], // 香 221: [ "香", ], // 品 222: [ "品", ], // Strokes: 10 // ----------- // 馬 223: [ "馬", ], // 骨 224: [ "骨", ], // 高 225: [ "高", ], // 髟 226: [ "髟", ], // 鬥 227: [ "鬥", ], // 鬯 228: [ "鬯", ], // 鬲 229: [ "鬲", ], // 鬼 230: [ "鬼", ], // 竜 231: [ "竜", ], // 韋 232: [ "韋", ], // Strokes: 11 // ----------- // 魚 233: [ "魚", ], // 鳥 234: [ "鳥", ], // 鹵 235: [ "鹵", ], // 鹿 236: [ "鹿", ], // 麻 237: [ "麻", ], // 亀 238: [ "亀", ], // 啇 239: [ "啇", ], // 黄 240: [ "黄", ], // 黒 241: [ "黒", ], // Strokes: 12 // ----------- // 黍 242: [ "黍", ], // 黹 243: [ "黹", ], // 無 244: [ "無", ], // 歯 245: [ "歯", ], // Strokes: 13 // ----------- // 黽 246: [ "黽", ], // 鼎 247: [ "鼎", ], // 鼓 248: [ "鼓", ], // 鼠 249: [ "鼠", ], // Strokes: 14 // ----------- // 鼻 250: [ "鼻", ], // 齊 251: [ "齊", ], // Strokes: 17 // ----------- // 龠 252: [ "龠", ], };
mit
VR-Robotica/AvatarComponents
Assets/VR_Robotica/SimpleEyeGaze/Scripts/Objects/VRR_Frustum_Object.cs
7965
using System.Collections; using UnityEngine; using com.VR_Robotica.AvatarComponents.Controllers; /// <summary> /// This script simply adds and removes potential ObjectsOfInterest from the Controller_Interest Script /// when objects enter or exit the collision area of the Frustum Collider Geometry /// </summary> namespace com.VR_Robotica.AvatarComponents.Objects { public class VRR_Frustum_Object : MonoBehaviour { [Tooltip("You can place script here, or it will be discovered if left null")] // create reference to Control Script to change list values public VRR_Interest_Controller InterestController; [Space] public bool ShowDebugLog; [Tooltip("Width, Height, Focus Distance")] public Vector3 FrustumSize = new Vector3(1.0f, 0.5f, 2.5f); public Vector3 FrustumOffSet; [HideInInspector] public Vector3 FrustumScale = new Vector3(1, 1, 1);//(150, 100, 200); private bool _isReady; #region CREATE FRUSTUM public IEnumerator Create() { if (!_isReady) { createFrustum(FrustumSize.x, FrustumSize.y, FrustumSize.z); setupFrustum(); getReferences(); _isReady = true; } yield return null; } private void createFrustum(float width, float height, float distance) { MeshCollider collider = this.gameObject.AddComponent<MeshCollider>(); collider.convex = true; collider.isTrigger = true; Mesh colliderMesh; colliderMesh = new Mesh(); colliderMesh.name = "Frustum Mesh"; Vector3[] frustumOriginPlane = new Vector3[4]; frustumOriginPlane[0] = new Vector3(-width * 0.1f, height * 0.1f, 0); frustumOriginPlane[1] = new Vector3(width * 0.1f, height * 0.1f, 0); frustumOriginPlane[2] = new Vector3(-width * 0.1f, -height * 0.1f, 0); frustumOriginPlane[3] = new Vector3(width * 0.1f, -height * 0.1f, 0); Vector3[] frustumDistantPlane = new Vector3[4]; frustumDistantPlane[0] = new Vector3(-width, height, distance); frustumDistantPlane[1] = new Vector3(width, height, distance); frustumDistantPlane[2] = new Vector3(-width, -height, distance); frustumDistantPlane[3] = new Vector3(width, -height, distance); colliderMesh.vertices = new Vector3[] { // bottom plane frustumDistantPlane[2], frustumDistantPlane[3], frustumOriginPlane[3], frustumOriginPlane[2], // left plane frustumOriginPlane[0], frustumDistantPlane[0], frustumDistantPlane[2], frustumOriginPlane[2], // front plane - Distant frustumDistantPlane[0], frustumDistantPlane[1], frustumDistantPlane[3], frustumDistantPlane[2], // back plane - Origin frustumOriginPlane[1], frustumOriginPlane[0], frustumOriginPlane[2], frustumOriginPlane[3], // right plane frustumDistantPlane[1], frustumOriginPlane[1], frustumOriginPlane[3], frustumDistantPlane[3], // top plane frustumOriginPlane[0], frustumOriginPlane[1], frustumDistantPlane[1], frustumDistantPlane[0] }; colliderMesh.triangles = new int[] { // Bottom 3, 1, 0, 3, 2, 1, // Left 3 + 4 * 1, 1 + 4 * 1, 0 + 4 * 1, 3 + 4 * 1, 2 + 4 * 1, 1 + 4 * 1, // Front 3 + 4 * 2, 1 + 4 * 2, 0 + 4 * 2, 3 + 4 * 2, 2 + 4 * 2, 1 + 4 * 2, // Back 3 + 4 * 3, 1 + 4 * 3, 0 + 4 * 3, 3 + 4 * 3, 2 + 4 * 3, 1 + 4 * 3, // Right 3 + 4 * 4, 1 + 4 * 4, 0 + 4 * 4, 3 + 4 * 4, 2 + 4 * 4, 1 + 4 * 4, // Top 3 + 4 * 5, 1 + 4 * 5, 0 + 4 * 5, 3 + 4 * 5, 2 + 4 * 5, 1 + 4 * 5, }; collider.sharedMesh = colliderMesh; } private void setupFrustum() { this.gameObject.name = "Frustum"; // Set to Layer[2] = Ignore Ray Cast this.gameObject.layer = 2; // Align and Scale Frustum this.transform.localEulerAngles = Vector3.zero; this.transform.localScale = FrustumScale; this.transform.localPosition = FrustumOffSet; Debug.Log("Frustum Offset: " + FrustumOffSet); this.gameObject.AddComponent<Rigidbody>(); this.gameObject.GetComponent<Rigidbody>().useGravity = false; this.gameObject.GetComponent<Rigidbody>().mass = 0.0f; // Add collision triggering script VRR_Frustum_Object of = this.gameObject.GetComponent<VRR_Frustum_Object>(); if (of == null) { Debug.Log("Adding Frustum Controller"); of = this.gameObject.AddComponent<VRR_Frustum_Object>(); } // setting reference of.InterestController = this.gameObject.GetComponent<VRR_Interest_Controller>(); } #endregion private void getReferences() { if (InterestController == null) { if (ShowDebugLog) { Debug.Log("Frustum: interest Controller Not Set, searching Parent Object for component"); } InterestController = transform.parent.gameObject.GetComponent<VRR_Interest_Controller>(); if (InterestController == null) { if (ShowDebugLog) { Debug.Log("Frustum: interest Controller Not Set, searching Grand Parent Object for component"); } InterestController = transform.parent.parent.gameObject.GetComponent<VRR_Interest_Controller>(); if (InterestController == null) { Debug.LogWarning("Controller_Interest Component Not Found in Parent or GrandParent Object!"); return; } } } } void OnTriggerEnter(Collider col) { if (_isReady) { if (col.gameObject != InterestController.gameObject) { VRR_ObjectOfInterest ooi = col.gameObject.GetComponent<VRR_ObjectOfInterest>(); if (ooi != null) { //Debug.Log(col.name + " is an object of interest."); // add game object to primary list InterestController.ObjectsOfInterest.Add(col.gameObject); // interupt cycle InterestController.InteruptCycle(col.gameObject); } } } } /// <summary> /// Remove Exited Object from the Objects Of Interest List in the Controller_Interest script /// If the exited object was being looked at, refresh the focus (clear points of interest and /// the currently looked at objects) /// </summary> /// <param name="col"></param> void OnTriggerExit(Collider col) { if (_isReady) { VRR_ObjectOfInterest ooi = col.gameObject.GetComponent<VRR_ObjectOfInterest>(); if (ooi != null) { // IF the object exiting is the CURRENT OBJECT being looked at // clear everything... if (ooi.gameObject == InterestController.CurrentObject) { InterestController.CurrentlyLookingAt = null; InterestController.CurrentObject = null; } // remove object from list InterestController.ObjectsOfInterest.Remove(col.gameObject); InterestController.ChangeObjectOfFocus(); } } } } }
mit
TekMonks/monkshu
backend/server/lib/apiregistry.js
8188
/** * (C) 2015, 2016, 2017, 2018, 2019, 2020, 2021. TekMonks. All rights reserved. * License: MIT - see enclosed LICENSE file. * * This is our main API Manager class. */ const fs = require("fs"); const path = require("path"); const querystring = require("querystring"); const app = require(`${CONSTANTS.LIBDIR}/app.js`); const API_REG_DISTM_KEY = "__org_monkshu_apiregistry_key"; let decoders, encoders, headermanagers, securitycheckers; function initSync() { let apireg = CLUSTER_MEMORY.get(API_REG_DISTM_KEY) || JSON.parse(fs.readFileSync(CONSTANTS.API_REGISTRY)); LOG.info(`Read API registry: ${JSON.stringify(apireg)}`); if (!CLUSTER_MEMORY.get(API_REG_DISTM_KEY)) CLUSTER_MEMORY.set(API_REG_DISTM_KEY, apireg); const decoderPathAndRoots = [{path: `${CONSTANTS.ROOTDIR}/${CONSTANTS.API_MANAGER_DECODERS_CONF}`, root: CONSTANTS.ROOTDIR}]; const encoderPathAndRoots = [{path: `${CONSTANTS.ROOTDIR}/${CONSTANTS.API_MANAGER_ENCODERS_CONF}`, root: CONSTANTS.ROOTDIR}]; const headermanagersPathAndRoots = [{path: `${CONSTANTS.ROOTDIR}/${CONSTANTS.API_MANAGER_HEADERMANAGERS_CONF}`, root: CONSTANTS.ROOTDIR}]; const securitycheckersPathAndRoots = [{path: `${CONSTANTS.ROOTDIR}/${CONSTANTS.API_MANAGER_SECURITYCHECKERS_CONF}`, root: CONSTANTS.ROOTDIR}]; const apps = app.getApps(); for (const appObj of apps) { const app = Object.keys(appObj)[0]; if (fs.existsSync(`${CONSTANTS.APPROOTDIR}/${app}/conf/apiregistry.json`)) { let regThisRaw = fs.readFileSync(`${CONSTANTS.APPROOTDIR}/${app}/conf/apiregistry.json`); LOG.info(`Read App API registry for app ${app}: ${regThisRaw}`); let regThis = JSON.parse(regThisRaw); Object.keys(regThis).forEach(key => regThis[key] = (`../apps/${app}/${regThis[key]}`)); apireg = {...apireg, ...regThis}; } const appRoot = appObj[app]; if (fs.existsSync(`${appRoot}/${CONSTANTS.API_MANAGER_DECODERS_CONF}`)) decoderPathAndRoots.push( {path: `${appRoot}/${CONSTANTS.API_MANAGER_DECODERS_CONF}`, root: appRoot}); if (fs.existsSync(`${appRoot}/${CONSTANTS.API_MANAGER_ENCODERS_CONF}`)) encoderPathAndRoots.push( {path: `${appRoot}/${CONSTANTS.API_MANAGER_ENCODERS_CONF}`, root: appRoot}); if (fs.existsSync(`${appRoot}/${CONSTANTS.API_MANAGER_HEADERMANAGERS_CONF}`)) headermanagersPathAndRoots.push( {path: `${appRoot}/${CONSTANTS.API_MANAGER_HEADERMANAGERS_CONF}`, root: appRoot}); if (fs.existsSync(`${appRoot}/${CONSTANTS.API_MANAGER_SECURITYCHECKERS_CONF}`)) securitycheckersPathAndRoots.push( {path: `${appRoot}/${CONSTANTS.API_MANAGER_SECURITYCHECKERS_CONF}`, root: appRoot}); } CLUSTER_MEMORY.set(API_REG_DISTM_KEY, apireg); decoders = _loadSortedConfOjbects(decoderPathAndRoots); encoders = _loadSortedConfOjbects(encoderPathAndRoots); headermanagers = _loadSortedConfOjbects(headermanagersPathAndRoots); securitycheckers = _loadSortedConfOjbects(securitycheckersPathAndRoots); for (const decoderThis of decoders) if (decoderThis.initSync) decoderThis.initSync(apireg); for (const securitycheckerThis of securitycheckers) if (securitycheckerThis.initSync) securitycheckerThis.initSync(apireg); for (const headermanagerThis of headermanagers) if (headermanagerThis.initSync) headermanagerThis.initSync(apireg); for (const encoderThis of encoders) if (encoderThis.initSync) encoderThis.initSync(apireg); global.APIREGISTRY = this; } function getAPI(url) { const endPoint = new URL(url).pathname; const apireg = CLUSTER_MEMORY.get(API_REG_DISTM_KEY); if (apireg[endPoint]) return path.resolve(`${CONSTANTS.ROOTDIR}/${_getAPIRegEntryAsURL(apireg[endPoint]).rawpathname}`); else return; } function decodeIncomingData(url, data, headers, servObject) { const endPoint = new URL(url).pathname; const apireg = CLUSTER_MEMORY.get(API_REG_DISTM_KEY); let apiregentry = apireg[endPoint]; if (!apiregentry) return false; apiregentry = _getAPIRegEntryAsURL(apireg[endPoint]); let decoded = data; for (const decoderThis of decoders) decoded = decoderThis.decodeIncomingData(apiregentry, url, decoded, headers, servObject); return decoded; } function encodeResponse(url, respObj, reqHeaders, respHeaders, servObject) { const endPoint = new URL(url).pathname; const apireg = CLUSTER_MEMORY.get(API_REG_DISTM_KEY); let apiregentry = apireg[endPoint]; if (!apiregentry) return false; apiregentry = _getAPIRegEntryAsURL(apireg[endPoint]); let encoded = respObj; for (const encoderThis of encoders) encoded = encoderThis.encodeResponse(apiregentry, endPoint, encoded, reqHeaders, respHeaders, servObject); return encoded; } function checkSecurity(url, req, headers, servObject, reason) { const endPoint = new URL(url).pathname; const apireg = CLUSTER_MEMORY.get(API_REG_DISTM_KEY); let apiregentry = apireg[endPoint]; if (!apiregentry) { reason = {reason:"API endpoint missing", code:403}; return false; } apiregentry = _getAPIRegEntryAsURL(apireg[endPoint]); for (const securitycheckerThis of securitycheckers) if (!securitycheckerThis.checkSecurity(apiregentry, endPoint, req, headers, servObject, reason)) { reason.reason += ` ---- Failed on: ${securitycheckerThis.__org_monkshu_apiregistry_conf_modulename}`; return false; } return true; } function injectResponseHeaders(url, response, requestHeaders, responseHeaders, servObject) { const endPoint = new URL(url).pathname; const apireg = CLUSTER_MEMORY.get(API_REG_DISTM_KEY); let apiregentry = apireg[endPoint]; if (!apiregentry) return; apiregentry = _getAPIRegEntryAsURL(apireg[endPoint]); for (const headermanagerThis of headermanagers) headermanagerThis.injectResponseHeaders(apiregentry, endPoint, response, requestHeaders, responseHeaders, servObject); } function listAPIs() { const apireg = CLUSTER_MEMORY.get(API_REG_DISTM_KEY); return [...Object.keys(apireg)]; // clone for security } async function addAPI(path, apiregentry, app) { const apireg = CLUSTER_MEMORY.get(API_REG_DISTM_KEY); apireg[path] = app?`../apps/${app}/${apiregentry}`:apiregentry; CLUSTER_MEMORY.set(API_REG_DISTM_KEY, apireg); const regFile = app?`${CONSTANTS.APPROOTDIR}/${app}/conf/apiregistry.json`:CONSTANTS.API_REGISTRY; const regFileObj = JSON.parse(await fs.promises.readFile(regFile)); regFileObj[path] = apiregentry; await fs.promises.writeFile(regFile, JSON.stringify(regFileObj, null, 4)); } const editAPI = addAPI; async function deleteAPI(path, app) { const apireg = CLUSTER_MEMORY.get(API_REG_DISTM_KEY); if (apireg[path]) delete apireg[path]; CLUSTER_MEMORY.set(API_REG_DISTM_KEY, apireg); const regFile = app?`${CONSTANTS.APPROOTDIR}/${app}/conf/apiregistry.json`:CONSTANTS.API_REGISTRY; const regFileObj = JSON.parse(await fs.promises.readFile(regFile)); if (regFileObj[path]) delete regFileObj[path]; await fs.promises.writeFile(regFile, JSON.stringify(regFileObj, null, 4)); } const getExtension = name => require(`${CONSTANTS.LIBDIR}/apiregistry_extensions/${name.toLowerCase()}.js`); function _loadSortedConfOjbects(pathAndRoots) { const sortedConfObjects = []; for (const {path, root} of pathAndRoots) { const rawObject = require(path); for (const key of Object.keys(rawObject)) sortedConfObjects.push( {"module":`${root}/lib/apiregistry_extensions/${key.toLowerCase()}.js`, "priority":rawObject[key]} ); } sortedConfObjects.sort((a,b) => (a.priority < b.priority) ? -1 : (a.priority > b.priority) ? 1 : 0); for (const [i, confObject] of sortedConfObjects.entries()) { sortedConfObjects[i] = require(confObject.module); sortedConfObjects[i].__org_monkshu_apiregistry_conf_modulename = path.basename(confObject.module); } return sortedConfObjects; } function _getAPIRegEntryAsURL(endPoint) { // parses endpoint and converts to URL + legacy properties from url.parse we need const retURL = new URL(endPoint, "http://dummyhost/"); retURL.query = querystring.parse(retURL.search!=""?retURL.search.substring(1):""); retURL.rawpathname = retURL.search!=""?endPoint.substring(0, endPoint.indexOf("?")):endPoint; retURL.path = retURL.rawpathname+retURL.search; return retURL; } module.exports = {initSync, getAPI, listAPIs, addAPI, editAPI, deleteAPI, decodeIncomingData, checkSecurity, injectResponseHeaders, encodeResponse, getExtension};
mit
shuang1330/tf-faster-rcnn
lib/datasets/pascal_voc.py
11996
# -------------------------------------------------------- # Fast R-CNN # Copyright (c) 2015 Microsoft # Licensed under The MIT License [see LICENSE for details] # Written by Ross Girshick and Xinlei Chen # -------------------------------------------------------- from __future__ import absolute_import from __future__ import division from __future__ import print_function import os from datasets.imdb import imdb import datasets.ds_utils as ds_utils import xml.etree.ElementTree as ET import numpy as np import scipy.sparse import scipy.io as sio import utils.cython_bbox import pickle import subprocess import uuid from .voc_eval import voc_eval from model.config import cfg class pascal_voc(imdb): def __init__(self, image_set, year, devkit_path=None): imdb.__init__(self, 'voc_' + year + '_' + image_set) self._year = year self._image_set = image_set self._devkit_path = self._get_default_path() if devkit_path is None \ else devkit_path self._data_path = os.path.join(self._devkit_path, 'VOC' + self._year) self._classes = ('__background__', # always index 0 'aeroplane', 'bicycle', 'bird', 'boat', 'bottle', 'bus', 'car', 'cat', 'chair', 'cow', 'diningtable', 'dog', 'horse', 'motorbike', 'person', 'pottedplant', 'sheep', 'sofa', 'train', 'tvmonitor') self._class_to_ind = dict(list(zip(self.classes, list(range(self.num_classes))))) self._image_ext = '.jpg' self._image_index = self._load_image_set_index() # Default to roidb handler self._roidb_handler = self.gt_roidb self._salt = str(uuid.uuid4()) self._comp_id = 'comp4' # PASCAL specific config options self.config = {'cleanup': True, 'use_salt': True, 'use_diff': False, 'matlab_eval': False, 'rpn_file': None} assert os.path.exists(self._devkit_path), \ 'VOCdevkit path does not exist: {}'.format(self._devkit_path) assert os.path.exists(self._data_path), \ 'Path does not exist: {}'.format(self._data_path) def image_path_at(self, i): """ Return the absolute path to image i in the image sequence. """ return self.image_path_from_index(self._image_index[i]) def image_path_from_index(self, index): """ Construct an image path from the image's "index" identifier. """ image_path = os.path.join(self._data_path, 'JPEGImages', index + self._image_ext) assert os.path.exists(image_path), \ 'Path does not exist: {}'.format(image_path) return image_path def _load_image_set_index(self): """ Load the indexes listed in this dataset's image set file. """ # Example path to image set file: # self._devkit_path + /VOCdevkit2007/VOC2007/ImageSets/Main/val.txt image_set_file = os.path.join(self._data_path, 'ImageSets', 'Main', self._image_set + '.txt') assert os.path.exists(image_set_file), \ 'Path does not exist: {}'.format(image_set_file) with open(image_set_file) as f: image_index = [x.strip() for x in f.readlines()] return image_index def _get_default_path(self): """ Return the default path where PASCAL VOC is expected to be installed. """ return os.path.join(cfg.DATA_DIR, 'VOCdevkit' + self._year) def gt_roidb(self): """ Return the database of ground-truth regions of interest. This function loads/saves from/to a cache file to speed up future calls. """ cache_file = os.path.join(self.cache_path, self.name + '_gt_roidb.pkl') if os.path.exists(cache_file): with open(cache_file, 'rb') as fid: try: roidb = pickle.load(fid) except: roidb = pickle.load(fid, encoding='bytes') print('{} gt roidb loaded from {}'.format(self.name, cache_file)) return roidb gt_roidb = [self._load_pascal_annotation(index) for index in self.image_index] with open(cache_file, 'wb') as fid: pickle.dump(gt_roidb, fid, pickle.HIGHEST_PROTOCOL) print('wrote gt roidb to {}'.format(cache_file)) return gt_roidb def rpn_roidb(self): if int(self._year) == 2007 or self._image_set != 'test': gt_roidb = self.gt_roidb() rpn_roidb = self._load_rpn_roidb(gt_roidb) roidb = imdb.merge_roidbs(gt_roidb, rpn_roidb) else: roidb = self._load_rpn_roidb(None) return roidb def _load_rpn_roidb(self, gt_roidb): filename = self.config['rpn_file'] print('loading {}'.format(filename)) assert os.path.exists(filename), \ 'rpn data not found at: {}'.format(filename) with open(filename, 'rb') as f: box_list = pickle.load(f) return self.create_roidb_from_box_list(box_list, gt_roidb) def _load_pascal_annotation(self, index): """ Load image and bounding boxes info from XML file in the PASCAL VOC format. """ filename = os.path.join(self._data_path, 'Annotations', index + '.xml') tree = ET.parse(filename) objs = tree.findall('object') if not self.config['use_diff']: # Exclude the samples labeled as difficult non_diff_objs = [ obj for obj in objs if int(obj.find('difficult').text) == 0] # if len(non_diff_objs) != len(objs): # print 'Removed {} difficult objects'.format( # len(objs) - len(non_diff_objs)) objs = non_diff_objs num_objs = len(objs) boxes = np.zeros((num_objs, 4), dtype=np.uint16) gt_classes = np.zeros((num_objs), dtype=np.int32) overlaps = np.zeros((num_objs, self.num_classes), dtype=np.float32) # "Seg" area for pascal is just the box area seg_areas = np.zeros((num_objs), dtype=np.float32) # Load object bounding boxes into a data frame. for ix, obj in enumerate(objs): bbox = obj.find('bndbox') # Make pixel indexes 0-based x1 = float(bbox.find('xmin').text) - 1 y1 = float(bbox.find('ymin').text) - 1 x2 = float(bbox.find('xmax').text) - 1 y2 = float(bbox.find('ymax').text) - 1 cls = self._class_to_ind[obj.find('name').text.lower().strip()] boxes[ix, :] = [x1, y1, x2, y2] gt_classes[ix] = cls overlaps[ix, cls] = 1.0 seg_areas[ix] = (x2 - x1 + 1) * (y2 - y1 + 1) overlaps = scipy.sparse.csr_matrix(overlaps) return {'boxes': boxes, 'gt_classes': gt_classes, 'gt_overlaps': overlaps, 'flipped': False, 'seg_areas': seg_areas} def _get_comp_id(self): comp_id = (self._comp_id + '_' + self._salt if self.config['use_salt'] else self._comp_id) return comp_id def _get_voc_results_file_template(self): # VOCdevkit/results/VOC2007/Main/<comp_id>_det_test_aeroplane.txt filename = self._get_comp_id() + '_det_' + self._image_set + '_{:s}.txt' path = os.path.join( self._devkit_path, 'results', 'VOC' + self._year, 'Main', filename) # temporary_folder = os.path.join( # cfg.ROOT_DIR, # 'temp_output', # 'results', # 'VOC' + self._year, # 'Main') # if not os.path.exists(temporary_folder): # os.makedirs(temporary_folder) # path = os.path.join( # temporary_folder, # filename) return path def _write_voc_results_file(self, all_boxes): for cls_ind, cls in enumerate(self.classes): if cls == '__background__': continue print('Writing {} VOC results file'.format(cls)) filename = self._get_voc_results_file_template().format(cls) with open(filename, 'wt') as f: for im_ind, index in enumerate(self.image_index): # for im_ind, index in enumerate(self.image_index[:2]): dets = all_boxes[cls_ind][im_ind] if dets == []: continue # the VOCdevkit expects 1-based indices for k in range(dets.shape[0]): f.write('{:s} {:.3f} {:.1f} {:.1f} {:.1f} {:.1f}\n'. format(index, dets[k, -1], dets[k, 0] + 1, dets[k, 1] + 1, dets[k, 2] + 1, dets[k, 3] + 1)) def _do_python_eval(self, output_dir='output',experiment_setup='.'): annopath = os.path.join( self._devkit_path, 'VOC' + self._year, 'Annotations', '{:s}.xml') imagesetfile = os.path.join( self._devkit_path, 'VOC' + self._year, 'ImageSets', 'Main', self._image_set + '.txt') cachedir = os.path.join(self._devkit_path, 'annotations_cache') # cachedir = os.path.join(cfg.ROOT_DIR,'temp_output','annotation_cache') aps = [] # The PASCAL VOC metric changed in 2010 use_07_metric = True if int(self._year) < 2010 else False print('VOC07 metric? ' + ('Yes' if use_07_metric else 'No')) if not os.path.isdir(output_dir): os.mkdir(output_dir) for i, cls in enumerate(self._classes): if cls == '__background__': continue filename = self._get_voc_results_file_template().format(cls) rec, prec, ap = voc_eval( filename, annopath, imagesetfile, cls, cachedir, ovthresh=0.5, use_07_metric=use_07_metric) aps += [ap] print(('AP for {} = {:.4f}'.format(cls, ap))) if not os.path.exists(os.path.join(output_dir, '%s'%experiment_setup)): os.makedirs(os.path.join(output_dir, '%s'%experiment_setup)) with open(os.path.join(output_dir, '%s'%experiment_setup, cls + '_pr.pkl'), 'wb') as f: pickle.dump({'rec': rec, 'prec': prec, 'ap': ap}, f) # print('saved results in %s'%os.path.join(output_dir, '%s'%experiment_setup)) print(('Mean AP = {:.4f}'.format(np.mean(aps)))) print('~~~~~~~~') print('Results:') for ap in aps: print(('{:.3f}'.format(ap))) print(('{:.3f}'.format(np.mean(aps)))) print('~~~~~~~~') print('') print('--------------------------------------------------------------') print('Results computed with the **unofficial** Python eval code.') print('Results should be very close to the official MATLAB eval code.') print('Recompute with `./tools/reval.py --matlab ...` for your paper.') print('-- Thanks, The Management') print('--------------------------------------------------------------') def _do_matlab_eval(self, output_dir='output'): print('-----------------------------------------------------') print('Computing results with the official MATLAB eval code.') print('-----------------------------------------------------') path = os.path.join(cfg.ROOT_DIR, 'lib', 'datasets', 'VOCdevkit-matlab-wrapper') cmd = 'cd {} && '.format(path) cmd += '{:s} -nodisplay -nodesktop '.format(cfg.MATLAB) cmd += '-r "dbstop if error; ' cmd += 'voc_eval(\'{:s}\',\'{:s}\',\'{:s}\',\'{:s}\'); quit;"' \ .format(self._devkit_path, self._get_comp_id(), self._image_set, output_dir) print(('Running:\n{}'.format(cmd))) status = subprocess.call(cmd, shell=True) def evaluate_detections(self, all_boxes, output_dir,experiment_setup=None): self._write_voc_results_file(all_boxes) self._do_python_eval(output_dir,experiment_setup) if self.config['matlab_eval']: self._do_matlab_eval(output_dir) if self.config['cleanup']: for cls in self._classes: if cls == '__background__': continue filename = self._get_voc_results_file_template().format(cls) os.remove(filename) # delete temporary results def competition_mode(self, on): if on: self.config['use_salt'] = False self.config['cleanup'] = False else: self.config['use_salt'] = True self.config['cleanup'] = True if __name__ == '__main__': from datasets.pascal_voc import pascal_voc d = pascal_voc('trainval', '2007') res = d.roidb from IPython import embed; embed()
mit
tealinuxos/tealinuxos-blog
database/migrations/2016_05_07_141929_create_taggings_table.php
467
<?php use Illuminate\Database\Schema\Blueprint; use Illuminate\Database\Migrations\Migration; class CreateTaggingsTable extends Migration { /** * Run the migrations. * * @return void */ public function up() { Schema::create('taggings', function(Blueprint $table) { $table->increments('id'); $table->timestamps(); }); } /** * Reverse the migrations. * * @return void */ public function down() { Schema::drop('taggings'); } }
mit
TheOrangeAllianceNet/TheOrangeAlliance
src/app/ftc/regions/region.component.spec.ts
805
import { async, ComponentFixture, TestBed } from '@angular/core/testing'; import { RegionComponent } from './region.component'; import { RouterTestingModule } from '@angular/router/testing'; import { HttpClientModule } from '@angular/common/http'; describe('RegionComponent', () => { let component: RegionComponent; let fixture: ComponentFixture<RegionComponent>; beforeEach(async(() => { TestBed.configureTestingModule({ declarations: [ RegionComponent ], imports: [RouterTestingModule, HttpClientModule] }) .compileComponents(); })); beforeEach(() => { fixture = TestBed.createComponent(RegionComponent); component = fixture.componentInstance; fixture.detectChanges(); }); it('should create', () => { expect(component).toBeTruthy(); }); });
mit
whatever555/countries
components/ReadMore/ReadMore.js
4526
import React, { Component } from "react"; import styled from "styled-components"; const MAXIMUM_CONTAINER_HEIGHT = 2999; const ReadMoreCheckboxLabel = styled.label` color: #333334; cursor: pointer; top: 0px; font-size: 16px; font-weight: 700; line-height: 1.4; `; const ReadMoreButton = styled.button` position: relative; background: none; display: block; outline: 0; border: 0; margin: 0 auto; width: 100%; color: #333334; cursor: pointer; font-size: 16px; font-weight: 700; line-height: 1.4; margin-top: 10px; text-shadow: 0 1px #964444; text-align: center; `; const Holder = styled.div` > ${ReadMoreButton} { > ${ReadMoreCheckboxLabel} { :after { content: '${props => props.labelText}'; } } } input { display: none; } input[type='checkbox']:checked ~ div { max-height: ${MAXIMUM_CONTAINER_HEIGHT}px; } input[type='checkbox']:checked ~ ${ReadMoreButton} { > ${ReadMoreCheckboxLabel} { :after { content: 'Less'; } } }`; const ReadMoreWrapper = styled.div` transition: max-height ${props => props.transitionTime}s ${props => props.transitionType}; max-height: ${props => props.settings.isOpen ? props.settings.contentHeight : props.settings.maxCollapsedHeight}px; overflow: hidden; word-break: break-word; white-space: pre-wrap; `; const defaultProps = { transitionType: "linear", transitionTime: 0.2, label: "Read more", maxCollapsedHeight: 200 }; const ReadMore = class extends Component { checkRef = React.createRef(); contentWrapperRef = React.createRef(); static defaultProps = defaultProps; constructor(props = defaultProps) { super(props); const { maxCollapsedHeight } = this.props; this.state = { settings: { maxCollapsedHeight, contentHeight: maxCollapsedHeight, isOpen: false }, isMounted: false }; } componentWillUnmount() { window.removeEventListener( "orientationchange", this.handleOrientationChange ); this.setState({ isMounted: false }); } componentDidMount() { this.setState({ isMounted: true }); const { settings } = this.state; if (this.checkRef.current) { settings.isOpen = this.checkRef.current.checked; this.checkRef.current.checked = false; this.checkRef.current.disabled = true; } window.addEventListener("orientationchange", this.handleOrientationChange); this.setState({ settings }, () => this.setHeights()); } setHeights = () => { if (this.contentWrapperRef.current) { const contentHeight = this.contentWrapperRef.current.clientHeight; const maxCollapsedHeight = this.props.maxCollapsedHeight < contentHeight ? this.props.maxCollapsedHeight : MAXIMUM_CONTAINER_HEIGHT; const { settings } = this.state; settings.contentHeight = contentHeight; settings.maxCollapsedHeight = maxCollapsedHeight; this.setState({ settings }); } }; toggleIsOpen = () => { this.setHeights(); const { settings } = this.state; settings.isOpen = !settings.isOpen; this.setState({ settings }); }; handleOrientationChange = () => { window.addEventListener("resize", this.onResize); }; onResize = () => { window.removeEventListener("resize", this.onResize); this.setHeights(); }; render() { const { children, label, renderOnServer, transitionTime, transitionType } = this.props; const { settings, isMounted } = this.state; return ( <Holder labelText={label}> <input ref={this.checkRef} type="checkbox" name="readMoreCheckbox" id="readMoreCheckbox" /> <ReadMoreWrapper settings={settings} transitionTime={transitionTime} transitionType={transitionType} > <div ref={this.contentWrapperRef}>{children}</div> </ReadMoreWrapper> {settings.contentHeight > settings.maxCollapsedHeight ? ( <ReadMoreButton onClick={this.toggleIsOpen}> {settings.isOpen ? "Less" : <>{label}</>} </ReadMoreButton> ) : ( renderOnServer && !isMounted && ( <ReadMoreButton> <ReadMoreCheckboxLabel htmlFor={"readMoreCheckbox"} /> </ReadMoreButton> ) )} </Holder> ); } }; export default ReadMore;
mit
niarora/Playground
Problems/BuildOrder.cs
4101
namespace Playground.Problems { using System; using System.Collections.Generic; using System.Linq; public class BuildDependency { public string Build { get; private set; } public string DependentOn { get; private set; } public BuildDependency(string build, string dependentOn) { this.Build = build; this.DependentOn = dependentOn; } } public class BuildOrder { private string[] Builds { get; set; } private IEnumerable<BuildDependency> BuildDependencies { get; set; } public BuildOrder(string[] builds, IEnumerable<BuildDependency> dependecies) { this.Builds = builds; this.BuildDependencies = dependecies; } // An O(N + M) algorithm for establishing a build order where builds can have dependencies on each other. // N is number of builds and M is the number of dependencies. // This method public IEnumerable<string> GetBuildOrder() { // Create two dictionaries. One for build 'foo' depends on 'b1, b2. // Other 'bar' can build 'b3, b4'. var dependentOn = new Dictionary<string, ICollection<string>>(); var canBuild = new Dictionary<string, ICollection<string>>(); foreach (var build in this.Builds) // O(N) { dependentOn.Add(build, new List<string>()); canBuild.Add(build, new List<string>()); } foreach (var dependency in BuildDependencies) // O(M) { dependentOn[dependency.Build].Add(dependency.DependentOn); canBuild[dependency.DependentOn].Add(dependency.Build); } // Can build builds where there are no dependecies. O(N) var canBuildBuilds = dependentOn .Where(dependency => dependency.Value.Count == 0) .Select(pair => pair.Key).ToList(); var buildsBuilt = new List<string>(); bool buildsAdded; do { buildsAdded = false; var newBuildsToBuild = new List<string>(); // Remove builds that are now built (added to buildsBuilt list) from the dependentOn dictionary. // For each build 'X' that can be built, 'canBuild[X]' value is the list of builds that depend on 'X'. // For each 'canBuild' value 'Y' for X, update 'dependentOn[Y]' by removing 'X' from the dependentOn[Y] value. // If a newly updated dependentOn[Y] has zero values, that build is not ready to be built in the next pass. if (canBuildBuilds.Count > 0) { buildsAdded = true; buildsBuilt.AddRange(canBuildBuilds); // Building the new builds. // This inner loop seems like N * M, but it only executes M times. // Since dictionary access is constant and we are only flowing through the dependency edges once, // the total iterations is M. foreach (var build in canBuildBuilds) { dependentOn.Remove(build); // Remove builds that were built. // Update the depends on list to remove the dependecies just built. foreach (var b in canBuild[build]) { dependentOn[b].Remove(build); if (dependentOn[b].Count == 0) { newBuildsToBuild.Add(b); } } } } canBuildBuilds = newBuildsToBuild; } while (buildsAdded); if (buildsBuilt.Count != this.Builds.Length) { throw new InvalidOperationException("Builds have a circular dependency."); } return buildsBuilt; } } }
mit
akshaybabloo/Car-ND
Term_1/TensorFlow_3/cross_entropy_8.py
637
import tensorflow as tf def cross_entropy(): r""" Cross Entropy finds the distance between two probability vectors. The equation is given .. math:: D(S,L)=-\sum_{i}^{n}L_{i}~log(S_{i}) Returns ------- """ softmax_data = [0.7, 0.2, 0.1] one_hot_data = [1.0, 0.0, 0.0] softmax = tf.placeholder(tf.float32) one_hot = tf.placeholder(tf.float32) cross_entro = -tf.reduce_sum(tf.mul(one_hot, tf.log(softmax))) with tf.Session() as sess: print(sess.run(cross_entro, feed_dict={softmax: softmax_data, one_hot: one_hot_data})) if __name__ == '__main__': cross_entropy()
mit
TsvetanMilanov/TelerikAcademyHW
08_HQC/11_TestDrivenDevelopment/TestDrivenDevelopment/Poker.Tests/PockerHandsCheckerTests.cs
33716
namespace Poker.Tests { using System; using System.Collections.Generic; using NUnit.Framework; [TestFixture] public class PockerHandsCheckerTests { private PokerHandsChecker pockerHandsChecker; private IList<ICard> allCards; private Dictionary<PockerHands, IHand> allPockerHands; [TestFixtureSetUp] public void InitializeData() { pockerHandsChecker = new PokerHandsChecker(); IList<CardSuit> allCardSuits = new List<CardSuit>(); IList<CardFace> allCardFaces = new List<CardFace>(); allCards = new List<ICard>(); allPockerHands = new Dictionary<PockerHands, IHand>(); foreach (CardSuit suit in Enum.GetValues(typeof(CardSuit))) { allCardSuits.Add(suit); } foreach (CardFace face in Enum.GetValues(typeof(CardFace))) { allCardFaces.Add(face); } foreach (var suit in allCardSuits) { foreach (var face in allCardFaces) { allCards.Add(new Card(face, suit)); } } addAllValidHandsToDictionary(allPockerHands); } [Test] public void IsValidHandShouldReturnTrueWhenHandHasValidCards() { IList<ICard> listOfCards = new List<ICard>(); for (int i = 0; i < 5; i++) { listOfCards.Add(allCards[i]); } IHand hand = new Hand(listOfCards); bool isHandValid = pockerHandsChecker.IsValidHand(hand); Assert.IsTrue(isHandValid); } [Test] [ExpectedException(typeof(ArgumentNullException))] public void IsValdHandShouldThrowArgumentNullExceptionWhenHandIsNull() { IHand nullHand = null; bool isHandValid = pockerHandsChecker.IsValidHand(nullHand); } [Test] public void IsValdHandShouldShouldReturnFalseWhenHandHasRepeatingCards() { IList<ICard> listOfCards = new List<ICard>(); for (int i = 0; i < 4; i++) { listOfCards.Add(allCards[i]); } listOfCards.Add(listOfCards[0]); IHand nullHand = new Hand(listOfCards); bool isHandValid = pockerHandsChecker.IsValidHand(nullHand); Assert.IsFalse(isHandValid); } [TestCase(0)] [TestCase(2)] [TestCase(4)] [TestCase(6)] [TestCase(7)] public void IsValidHandShouldReturnFalseWhenHandHasNotFiveCards(int cardsCount) { IList<ICard> listOfCards = new List<ICard>(); for (int i = 0; i < cardsCount; i++) { listOfCards.Add(allCards[i]); } IHand hand = new Hand(listOfCards); bool isHandValid = pockerHandsChecker.IsValidHand(hand); Assert.IsFalse(isHandValid); } [Test] [ExpectedException(typeof(ArgumentNullException))] public void IsFlushShouldThrowArgumentNullExceptionWhenHandIsNull() { IHand nullHand = null; bool isHandFlush = pockerHandsChecker.IsFlush(nullHand); } [Test] [ExpectedException(typeof(ArgumentException))] public void IsFlushShouldThrowArgumentExceptionWhenHandIsInvalid() { IHand hand = new Hand(new List<ICard>() { new Card(CardFace.Ace, CardSuit.Hearts), new Card(CardFace.Five, CardSuit.Hearts), new Card(CardFace.Ten, CardSuit.Hearts), new Card(CardFace.Queen, CardSuit.Hearts), new Card(CardFace.Ace, CardSuit.Hearts) }); bool isHandFlush = pockerHandsChecker.IsFlush(hand); } [Test] public void IsFlushShouldReturnTrueWhenGivenValidFlushHand() { IHand validFlushHand = allPockerHands[PockerHands.Flush]; bool isHandFlush = pockerHandsChecker.IsFlush(validFlushHand); Assert.IsTrue(isHandFlush); } [Test] public void IsFlushShouldReturnFalseWhenGivenOnlyOneInvalidCard() { IHand hand = new Hand(new List<ICard>() { new Card(CardFace.Three, CardSuit.Hearts), new Card(CardFace.Five, CardSuit.Hearts), new Card(CardFace.Ten, CardSuit.Hearts), new Card(CardFace.Queen, CardSuit.Hearts), new Card(CardFace.Ace, CardSuit.Diamonds) }); bool isValidFlushHand = pockerHandsChecker.IsFlush(hand); Assert.IsFalse(isValidFlushHand); } [Test] public void IsFlushShouldReturnFalseWhenGivenMoreThanOneInvalidCards() { IHand hand = new Hand(new List<ICard>() { new Card(CardFace.Three, CardSuit.Hearts), new Card(CardFace.Five, CardSuit.Spades), new Card(CardFace.Ten, CardSuit.Clubs), new Card(CardFace.Queen, CardSuit.Hearts), new Card(CardFace.Ace, CardSuit.Diamonds) }); bool isValidFlushHand = pockerHandsChecker.IsFlush(hand); Assert.IsFalse(isValidFlushHand); } [Test] public void IsFlushShouldReturnFalseWhenGivenStraightFlushHand() { IHand hand = allPockerHands[PockerHands.StraightFlush]; bool isValidFlushHand = pockerHandsChecker.IsFlush(hand); Assert.IsFalse(isValidFlushHand); } [Test] [ExpectedException(typeof(ArgumentNullException))] public void IsFourOfAKindShouldThrowArgumentNullExceptionWhenHandIsNull() { IHand nullHand = null; bool isHandFourOfAKind = pockerHandsChecker.IsFourOfAKind(nullHand); } [Test] [ExpectedException(typeof(ArgumentException))] public void IsFourOfAKindShouldThrowArgumentExceptionWhenHandIsInvalid() { IHand hand = new Hand(new List<ICard>() { new Card(CardFace.Ace, CardSuit.Hearts), new Card(CardFace.Five, CardSuit.Hearts), new Card(CardFace.Ten, CardSuit.Hearts), new Card(CardFace.Queen, CardSuit.Hearts), new Card(CardFace.Ace, CardSuit.Hearts) }); bool isHandFourOfAKind = pockerHandsChecker.IsFourOfAKind(hand); } [Test] public void IsFourOfAKindShouldReturnTrueWhenGivenValidFourOfAKindHand() { IHand validFlushHand = allPockerHands[PockerHands.FourOfAKind]; bool isHandFourOfAKind = pockerHandsChecker.IsFourOfAKind(validFlushHand); Assert.IsTrue(isHandFourOfAKind); } [Test] public void IsFourOfAKindShouldReturnFalseWhenGivenThreeOfAKindHand() { IHand hand = allPockerHands[PockerHands.ThreeOfAKind]; bool isValidFourOfAKindHand = pockerHandsChecker.IsFourOfAKind(hand); Assert.IsFalse(isValidFourOfAKindHand); } [Test] public void IsFourOfAKindShouldReturnFalseWhenGivenOnePairHand() { IHand hand = allPockerHands[PockerHands.OnePair]; bool isValidFourOfAKindHand = pockerHandsChecker.IsFourOfAKind(hand); Assert.IsFalse(isValidFourOfAKindHand); } [Test] public void IsFourOfAKindShouldReturnFalseWhenGivenTwoPairHand() { IHand hand = allPockerHands[PockerHands.TwoPair]; bool isValidFourOfAKindHand = pockerHandsChecker.IsFourOfAKind(hand); Assert.IsFalse(isValidFourOfAKindHand); } [Test] public void IsFourOfAKindShouldReturnFalseWhenGivenFullHouse() { IHand hand = allPockerHands[PockerHands.FullHouse]; bool isValidFourOfAKindHand = pockerHandsChecker.IsFourOfAKind(hand); Assert.IsFalse(isValidFourOfAKindHand); } [Test] [ExpectedException(typeof(ArgumentNullException))] public void IsHighCardShouldThrowArgumentNullExceptionWhenHandIsNull() { IHand nullHand = null; bool isHighCardHand = pockerHandsChecker.IsHighCard(nullHand); } [Test] [ExpectedException(typeof(ArgumentException))] public void IsHighCardShouldThrowArgumentExceptionWhenHandIsInvalid() { IHand hand = new Hand(new List<ICard>() { new Card(CardFace.Ace, CardSuit.Hearts), new Card(CardFace.Five, CardSuit.Hearts), new Card(CardFace.Ten, CardSuit.Hearts), new Card(CardFace.Queen, CardSuit.Hearts), new Card(CardFace.Ace, CardSuit.Hearts) }); bool isHighCardHand = pockerHandsChecker.IsHighCard(hand); } [Test] public void IsHighCardShouldReturnTrueWhenGivenValidHighCardHand() { IHand validHighCardHand = allPockerHands[PockerHands.HighCard]; bool isHighCardHand = pockerHandsChecker.IsHighCard(validHighCardHand); Assert.IsTrue(isHighCardHand); } [Test] public void IsHighCardShouldReturnFalseWhenGivenOnePairHand() { IHand hand = allPockerHands[PockerHands.OnePair]; bool isHighCardHand = pockerHandsChecker.IsHighCard(hand); Assert.IsFalse(isHighCardHand); } [Test] public void IsHighCardShouldReturnFalseWhenGivenTwoPairHand() { IHand hand = allPockerHands[PockerHands.TwoPair]; bool isHighCardHand = pockerHandsChecker.IsHighCard(hand); Assert.IsFalse(isHighCardHand); } [Test] public void IsHighCardShouldReturnFalseWhenGivenThreeOfAKindHand() { IHand hand = allPockerHands[PockerHands.ThreeOfAKind]; bool isHighCardHand = pockerHandsChecker.IsHighCard(hand); Assert.IsFalse(isHighCardHand); } [Test] public void IsHighCardShouldReturnFalseWhenGivenStraightHand() { IHand hand = allPockerHands[PockerHands.Straight]; bool isHighCardHand = pockerHandsChecker.IsHighCard(hand); Assert.IsFalse(isHighCardHand); } [Test] public void IsHighCardShouldReturnFalseWhenGivenFlushHand() { IHand hand = allPockerHands[PockerHands.Flush]; bool isHighCardHand = pockerHandsChecker.IsHighCard(hand); Assert.IsFalse(isHighCardHand); } [Test] public void IsHighCardShouldReturnFalseWhenGivenFullHouseHand() { IHand hand = allPockerHands[PockerHands.FullHouse]; bool isHighCardHand = pockerHandsChecker.IsHighCard(hand); Assert.IsFalse(isHighCardHand); } [Test] public void IsHighCardShouldReturnFalseWhenGivenFourOfAKindHand() { IHand hand = allPockerHands[PockerHands.FourOfAKind]; bool isHighCardHand = pockerHandsChecker.IsHighCard(hand); Assert.IsFalse(isHighCardHand); } [Test] public void IsHighCardShouldReturnFalseWhenGivenStraightFlushHand() { IHand hand = allPockerHands[PockerHands.StraightFlush]; bool isHighCardHand = pockerHandsChecker.IsHighCard(hand); Assert.IsFalse(isHighCardHand); } [Test] [ExpectedException(typeof(ArgumentNullException))] public void IsOnePairShouldThrowArgumentNullExceptionWhenHandIsNull() { IHand nullHand = null; bool isOnePairHand = pockerHandsChecker.IsOnePair(nullHand); } [Test] [ExpectedException(typeof(ArgumentException))] public void IsOnePairShouldThrowArgumentExceptionWhenHandIsInvalid() { IHand hand = new Hand(new List<ICard>() { new Card(CardFace.Ace, CardSuit.Hearts), new Card(CardFace.Five, CardSuit.Hearts), new Card(CardFace.Ten, CardSuit.Hearts), new Card(CardFace.Queen, CardSuit.Hearts), new Card(CardFace.Ace, CardSuit.Hearts) }); bool isOnePairHand = pockerHandsChecker.IsOnePair(hand); } [Test] public void IsOnePairShouldReturnTrueWhenGivenValidOnePairHand() { IHand validOnePairHand = allPockerHands[PockerHands.OnePair]; bool isOnePairHand = pockerHandsChecker.IsOnePair(validOnePairHand); Assert.IsTrue(isOnePairHand); } [Test] public void IsOnePairShouldReturnFalseWhenGivenTwoPairHand() { IHand hand = allPockerHands[PockerHands.TwoPair]; bool isOnePairHand = pockerHandsChecker.IsOnePair(hand); Assert.IsFalse(isOnePairHand); } [Test] public void IsOnePairShouldReturnFalseWhenGivenFullHouseHand() { IHand hand = allPockerHands[PockerHands.FullHouse]; bool isOnePairHand = pockerHandsChecker.IsOnePair(hand); Assert.IsFalse(isOnePairHand); } [Test] public void IsOnePairShouldReturnFalseWhenGivenThreeOfAKindHand() { IHand hand = allPockerHands[PockerHands.ThreeOfAKind]; bool isOnePairHand = pockerHandsChecker.IsOnePair(hand); Assert.IsFalse(isOnePairHand); } [Test] public void IsOnePairShouldReturnFalseWhenGivenFourOfAKindHand() { IHand hand = allPockerHands[PockerHands.FourOfAKind]; bool isOnePairHand = pockerHandsChecker.IsOnePair(hand); Assert.IsFalse(isOnePairHand); } [Test] public void IsOnePairShouldReturnFalseWhenGivenStraightHand() { IHand hand = allPockerHands[PockerHands.Straight]; bool isOnePairHand = pockerHandsChecker.IsOnePair(hand); Assert.IsFalse(isOnePairHand); } [Test] [ExpectedException(typeof(ArgumentNullException))] public void IsTwoPairShouldThrowArgumentNullExceptionWhenHandIsNull() { IHand nullHand = null; bool isTwoPairHand = pockerHandsChecker.IsTwoPair(nullHand); } [Test] [ExpectedException(typeof(ArgumentException))] public void IsTwoPairShouldThrowArgumentExceptionWhenHandIsInvalid() { IHand hand = new Hand(new List<ICard>() { new Card(CardFace.Ace, CardSuit.Hearts), new Card(CardFace.Five, CardSuit.Hearts), new Card(CardFace.Ten, CardSuit.Hearts), new Card(CardFace.Queen, CardSuit.Hearts), new Card(CardFace.Ace, CardSuit.Hearts) }); bool isTwoPairHand = pockerHandsChecker.IsTwoPair(hand); } [Test] public void IsTwoPairShouldReturnTrueWhenGivenValidTwoPairHand() { IHand validOnePairHand = allPockerHands[PockerHands.TwoPair]; bool isTwoPairHand = pockerHandsChecker.IsTwoPair(validOnePairHand); Assert.IsTrue(isTwoPairHand); } [Test] public void IsTwoPairShouldReturnFalseWhenGivenOnePairHand() { IHand hand = allPockerHands[PockerHands.OnePair]; bool isTwoPairHand = pockerHandsChecker.IsTwoPair(hand); Assert.IsFalse(isTwoPairHand); } [Test] public void IsTwoPairShouldReturnFalseWhenGivenFullHouseHand() { IHand hand = allPockerHands[PockerHands.FullHouse]; bool isTwoPairHand = pockerHandsChecker.IsTwoPair(hand); Assert.IsFalse(isTwoPairHand); } [Test] public void IsTwoPairShouldReturnFalseWhenGivenThreeOfAKindHand() { IHand hand = allPockerHands[PockerHands.ThreeOfAKind]; bool isTwoPairHand = pockerHandsChecker.IsTwoPair(hand); Assert.IsFalse(isTwoPairHand); } [Test] public void IsTwoPairShouldReturnFalseWhenGivenFourOfAKindHand() { IHand hand = allPockerHands[PockerHands.FourOfAKind]; bool isTwoPairHand = pockerHandsChecker.IsTwoPair(hand); Assert.IsFalse(isTwoPairHand); } [Test] public void IsTwoPairShouldReturnFalseWhenGivenStraightHand() { IHand hand = allPockerHands[PockerHands.Straight]; bool isTwoPairHand = pockerHandsChecker.IsTwoPair(hand); Assert.IsFalse(isTwoPairHand); } [Test] [ExpectedException(typeof(ArgumentNullException))] public void IsThreeOfAKindShouldThrowArgumentNullExceptionWhenHandIsNull() { IHand nullHand = null; bool isThreeOfAKind = pockerHandsChecker.IsThreeOfAKind(nullHand); } [Test] [ExpectedException(typeof(ArgumentException))] public void IsThreeOfAKindShouldThrowArgumentExceptionWhenHandIsInvalid() { IHand hand = new Hand(new List<ICard>() { new Card(CardFace.Ace, CardSuit.Hearts), new Card(CardFace.Five, CardSuit.Hearts), new Card(CardFace.Ten, CardSuit.Hearts), new Card(CardFace.Queen, CardSuit.Hearts), new Card(CardFace.Ace, CardSuit.Hearts) }); bool isThreeOfAKind = pockerHandsChecker.IsThreeOfAKind(hand); } [Test] public void IsThreeOfAKindShouldReturnTrueWhenGivenValidThreeOfAKindHand() { IHand validThreeOfAKindHand = allPockerHands[PockerHands.ThreeOfAKind]; bool isThreeOfAKind = pockerHandsChecker.IsThreeOfAKind(validThreeOfAKindHand); Assert.IsTrue(isThreeOfAKind); } [Test] public void IsThreeOfAKindShouldReturnFalseWhenGivenOnePairHand() { IHand hand = allPockerHands[PockerHands.OnePair]; bool isThreeOfAKind = pockerHandsChecker.IsThreeOfAKind(hand); Assert.IsFalse(isThreeOfAKind); } [Test] public void IsThreeOfAKindShouldReturnFalseWhenGivenFullHouseHand() { IHand hand = allPockerHands[PockerHands.FullHouse]; bool isThreeOfAKind = pockerHandsChecker.IsThreeOfAKind(hand); Assert.IsFalse(isThreeOfAKind); } [Test] public void IsThreeOfAKindShouldReturnFalseWhenGivenTwoPairHand() { IHand hand = allPockerHands[PockerHands.TwoPair]; bool isThreeOfAKind = pockerHandsChecker.IsThreeOfAKind(hand); Assert.IsFalse(isThreeOfAKind); } [Test] public void IsThreeOfAKindShouldReturnFalseWhenGivenFourOfAKindHand() { IHand hand = allPockerHands[PockerHands.FourOfAKind]; bool isThreeOfAKind = pockerHandsChecker.IsThreeOfAKind(hand); Assert.IsFalse(isThreeOfAKind); } [Test] public void IsThreeOfAKindShouldReturnFalseWhenGivenStraightHand() { IHand hand = allPockerHands[PockerHands.Straight]; bool isThreeOfAKind = pockerHandsChecker.IsThreeOfAKind(hand); Assert.IsFalse(isThreeOfAKind); } [Test] [ExpectedException(typeof(ArgumentNullException))] public void IsStraightShouldThrowArgumentNullExceptionWhenHandIsNull() { IHand nullHand = null; bool isStraightHand = pockerHandsChecker.IsStraight(nullHand); } [Test] [ExpectedException(typeof(ArgumentException))] public void IsStraightShouldThrowArgumentExceptionWhenHandIsInvalid() { IHand hand = new Hand(new List<ICard>() { new Card(CardFace.Ace, CardSuit.Hearts), new Card(CardFace.Five, CardSuit.Hearts), new Card(CardFace.Ten, CardSuit.Hearts), new Card(CardFace.Queen, CardSuit.Hearts), new Card(CardFace.Ace, CardSuit.Hearts) }); bool isStraightHand = pockerHandsChecker.IsStraight(hand); } [Test] public void IsStraightShouldReturnTrueWhenGivenValidStraightHand() { IHand validThreeOfAKindHand = allPockerHands[PockerHands.Straight]; bool isStraightHand = pockerHandsChecker.IsStraight(validThreeOfAKindHand); Assert.IsTrue(isStraightHand); } [Test] public void IsStraightShouldReturnFalseWhenGivenStraightFlushHand() { IHand hand = allPockerHands[PockerHands.StraightFlush]; bool isStraightHand = pockerHandsChecker.IsStraight(hand); Assert.IsFalse(isStraightHand); } [Test] public void IsStraightShouldReturnFalseWhenGivenFlushHand() { IHand hand = allPockerHands[PockerHands.Flush]; bool isStraightHand = pockerHandsChecker.IsStraight(hand); Assert.IsFalse(isStraightHand); } [Test] public void IsStraightShouldReturnFalseWhenGivenHighCardHand() { IHand hand = allPockerHands[PockerHands.HighCard]; bool isStraightHand = pockerHandsChecker.IsStraight(hand); Assert.IsFalse(isStraightHand); } [Test] public void IsStraightShouldReturnFalseWhenGivenFourEqualyIncreasingCardsAndOneNotEqualyIncreasingCardHand() { IHand hand = new Hand(new List<ICard>() { new Card(CardFace.Seven, CardSuit.Clubs), new Card(CardFace.Eight, CardSuit.Hearts), new Card(CardFace.Nine, CardSuit.Spades), new Card(CardFace.Ten, CardSuit.Diamonds), new Card(CardFace.Queen, CardSuit.Clubs), }); bool isStraightHand = pockerHandsChecker.IsStraight(hand); Assert.IsFalse(isStraightHand); } [Test] [ExpectedException(typeof(ArgumentNullException))] public void IsFullHouseShouldThrowArgumentNullExceptionWhenHandIsNull() { IHand nullHand = null; bool isFullHouseHand = pockerHandsChecker.IsFullHouse(nullHand); } [Test] [ExpectedException(typeof(ArgumentException))] public void IsFullHouseShouldThrowArgumentExceptionWhenHandIsInvalid() { IHand hand = new Hand(new List<ICard>() { new Card(CardFace.Ace, CardSuit.Hearts), new Card(CardFace.Five, CardSuit.Hearts), new Card(CardFace.Ten, CardSuit.Hearts), new Card(CardFace.Queen, CardSuit.Hearts), new Card(CardFace.Ace, CardSuit.Hearts) }); bool isFullHouseHand = pockerHandsChecker.IsFullHouse(hand); } [Test] public void IsFullHouseShouldReturnTrueWhenGivenValidFullHouseHand() { IHand validOnePairHand = allPockerHands[PockerHands.FullHouse]; bool isFullHouseHand = pockerHandsChecker.IsFullHouse(validOnePairHand); Assert.IsTrue(isFullHouseHand); } [Test] public void IsFullHouseShouldReturnFalseWhenGivenOnePairHand() { IHand hand = allPockerHands[PockerHands.OnePair]; bool isFullHouseHand = pockerHandsChecker.IsFullHouse(hand); Assert.IsFalse(isFullHouseHand); } [Test] public void IsFullHouseShouldReturnFalseWhenGivenTwoPairHand() { IHand hand = allPockerHands[PockerHands.TwoPair]; bool isFullHouseHand = pockerHandsChecker.IsFullHouse(hand); Assert.IsFalse(isFullHouseHand); } [Test] public void IsFullHouseShouldReturnFalseWhenGivenThreeOfAKindHand() { IHand hand = allPockerHands[PockerHands.ThreeOfAKind]; bool isFullHouseHand = pockerHandsChecker.IsFullHouse(hand); Assert.IsFalse(isFullHouseHand); } [Test] public void IsFullHouseShouldReturnFalseWhenGivenFourOfAKindHand() { IHand hand = allPockerHands[PockerHands.FourOfAKind]; bool isFullHouseHand = pockerHandsChecker.IsFullHouse(hand); Assert.IsFalse(isFullHouseHand); } [Test] public void IsFullHouseShouldReturnFalseWhenGivenStraightHand() { IHand hand = allPockerHands[PockerHands.Straight]; bool isFullHouseHand = pockerHandsChecker.IsFullHouse(hand); Assert.IsFalse(isFullHouseHand); } [Test] [ExpectedException(typeof(ArgumentNullException))] public void IsStraightFlushShouldThrowArgumentNullExceptionWhenHandIsNull() { IHand nullHand = null; bool isHandStraightFlush = pockerHandsChecker.IsStraightFlush(nullHand); } [Test] [ExpectedException(typeof(ArgumentException))] public void IsStraightFlushShouldThrowArgumentExceptionWhenHandIsInvalid() { IHand hand = new Hand(new List<ICard>() { new Card(CardFace.Ace, CardSuit.Hearts), new Card(CardFace.Five, CardSuit.Hearts), new Card(CardFace.Ten, CardSuit.Hearts), new Card(CardFace.Queen, CardSuit.Hearts), new Card(CardFace.Ace, CardSuit.Hearts) }); bool isHandStraightFlush = pockerHandsChecker.IsStraightFlush(hand); } [Test] public void IsStraightFlushShouldReturnTrueWhenGivenValidStraightFlushHand() { IHand validFlushHand = allPockerHands[PockerHands.StraightFlush]; bool isHandStraightFlush = pockerHandsChecker.IsStraightFlush(validFlushHand); Assert.IsTrue(isHandStraightFlush); } [Test] public void IsStraightFlushShouldReturnFalseWhenGivenOnlyOneInvalidSuitCard() { IHand hand = new Hand(new List<ICard>() { new Card(CardFace.Three, CardSuit.Hearts), new Card(CardFace.Four, CardSuit.Hearts), new Card(CardFace.Five, CardSuit.Hearts), new Card(CardFace.Six, CardSuit.Hearts), new Card(CardFace.Seven, CardSuit.Diamonds) }); bool isHandStraightFlush = pockerHandsChecker.IsStraightFlush(hand); Assert.IsFalse(isHandStraightFlush); } [Test] public void IsStraightFlushShouldReturnFalseWhenGivenMoreThanOneInvalidSuitCards() { IHand hand = new Hand(new List<ICard>() { new Card(CardFace.Three, CardSuit.Hearts), new Card(CardFace.Four, CardSuit.Spades), new Card(CardFace.Five, CardSuit.Clubs), new Card(CardFace.Six, CardSuit.Hearts), new Card(CardFace.Seven, CardSuit.Diamonds) }); bool isHandStraightFlush = pockerHandsChecker.IsStraightFlush(hand); Assert.IsFalse(isHandStraightFlush); } [Test] public void IsStraightFlushShouldReturnFalseWhenGivenFlushHand() { IHand hand = allPockerHands[PockerHands.Flush]; bool isHandStraightFlush = pockerHandsChecker.IsStraightFlush(hand); Assert.IsFalse(isHandStraightFlush); } [Test] public void IsStraightFlushShouldReturnFalseWhenGivenHighCardHand() { IHand hand = allPockerHands[PockerHands.HighCard]; bool isHandStraightFlush = pockerHandsChecker.IsStraightFlush(hand); Assert.IsFalse(isHandStraightFlush); } [Test] public void IsStraightFlushShouldReturnFalseWhenGivenOnlyOneInvalidFaceCard() { IHand hand = new Hand(new List<ICard>() { new Card(CardFace.Three, CardSuit.Hearts), new Card(CardFace.Four, CardSuit.Hearts), new Card(CardFace.Five, CardSuit.Hearts), new Card(CardFace.Six, CardSuit.Hearts), new Card(CardFace.Eight, CardSuit.Hearts) }); bool isHandStraightFlush = pockerHandsChecker.IsStraightFlush(hand); Assert.IsFalse(isHandStraightFlush); } [Test] public void IsStraightFlushShouldReturnFalseWhenGivenMoreThanOneInvalidFaceCards() { IHand hand = new Hand(new List<ICard>() { new Card(CardFace.Four, CardSuit.Hearts), new Card(CardFace.Five, CardSuit.Spades), new Card(CardFace.Seven, CardSuit.Hearts), new Card(CardFace.Eight, CardSuit.Hearts), new Card(CardFace.Ten, CardSuit.Hearts) }); bool isHandStraightFlush = pockerHandsChecker.IsStraightFlush(hand); Assert.IsFalse(isHandStraightFlush); } private void addAllValidHandsToDictionary(Dictionary<PockerHands, IHand> allPockerHands) { IHand straightFlushHand = new Hand(new List<ICard>() { new Card(CardFace.Five, CardSuit.Spades), new Card(CardFace.Six, CardSuit.Spades), new Card(CardFace.Seven, CardSuit.Spades), new Card(CardFace.Eight, CardSuit.Spades), new Card(CardFace.Nine, CardSuit.Spades) }); allPockerHands.Add(PockerHands.StraightFlush, straightFlushHand); IHand fourOfAKindHand = new Hand(new List<ICard>() { new Card(CardFace.Ace, CardSuit.Spades), new Card(CardFace.Ace, CardSuit.Hearts), new Card(CardFace.Ace, CardSuit.Clubs), new Card(CardFace.Ace, CardSuit.Diamonds), new Card(CardFace.Nine, CardSuit.Spades) }); allPockerHands.Add(PockerHands.FourOfAKind, fourOfAKindHand); IHand fullHouseHand = new Hand(new List<ICard>() { new Card(CardFace.Ace, CardSuit.Spades), new Card(CardFace.Ace, CardSuit.Hearts), new Card(CardFace.Ace, CardSuit.Clubs), new Card(CardFace.Nine, CardSuit.Diamonds), new Card(CardFace.Nine, CardSuit.Spades) }); allPockerHands.Add(PockerHands.FullHouse, fullHouseHand); IHand flushHand = new Hand(new List<ICard>() { new Card(CardFace.Five, CardSuit.Spades), new Card(CardFace.Three, CardSuit.Spades), new Card(CardFace.Ace, CardSuit.Spades), new Card(CardFace.Seven, CardSuit.Spades), new Card(CardFace.Jack, CardSuit.Spades) }); allPockerHands.Add(PockerHands.Flush, flushHand); IHand straightHand = new Hand(new List<ICard>() { new Card(CardFace.Seven, CardSuit.Spades), new Card(CardFace.Eight, CardSuit.Hearts), new Card(CardFace.Nine, CardSuit.Clubs), new Card(CardFace.Ten, CardSuit.Diamonds), new Card(CardFace.Jack, CardSuit.Spades) }); allPockerHands.Add(PockerHands.Straight, straightHand); IHand threeOfAKindHand = new Hand(new List<ICard>() { new Card(CardFace.Seven, CardSuit.Spades), new Card(CardFace.Seven, CardSuit.Hearts), new Card(CardFace.Seven, CardSuit.Clubs), new Card(CardFace.Ten, CardSuit.Diamonds), new Card(CardFace.Jack, CardSuit.Spades) }); allPockerHands.Add(PockerHands.ThreeOfAKind, threeOfAKindHand); IHand twoPairHand = new Hand(new List<ICard>() { new Card(CardFace.Seven, CardSuit.Spades), new Card(CardFace.Seven, CardSuit.Hearts), new Card(CardFace.Ten, CardSuit.Clubs), new Card(CardFace.Ten, CardSuit.Diamonds), new Card(CardFace.Jack, CardSuit.Spades) }); allPockerHands.Add(PockerHands.TwoPair, twoPairHand); IHand onePairHand = new Hand(new List<ICard>() { new Card(CardFace.Seven, CardSuit.Spades), new Card(CardFace.Seven, CardSuit.Hearts), new Card(CardFace.Ten, CardSuit.Clubs), new Card(CardFace.Two, CardSuit.Diamonds), new Card(CardFace.Jack, CardSuit.Spades) }); allPockerHands.Add(PockerHands.OnePair, onePairHand); IHand highCardHand = new Hand(new List<ICard>() { new Card(CardFace.Ace, CardSuit.Spades), new Card(CardFace.Seven, CardSuit.Hearts), new Card(CardFace.Ten, CardSuit.Clubs), new Card(CardFace.Two, CardSuit.Diamonds), new Card(CardFace.Jack, CardSuit.Spades) }); allPockerHands.Add(PockerHands.HighCard, highCardHand); } } }
mit
3pillarlabs/hailstorm-sdk
hailstorm-client-exchange/src/test/java/com/tpg/labs/hailstorm/clientexchange/HailstormClientExchangeApplicationTests.java
394
package com.tpg.labs.hailstorm.clientexchange; import org.junit.jupiter.api.Test; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.test.context.ContextConfiguration; @ContextConfiguration(classes = HailstormClientExchangeApplicationTestsConfig.class) @SpringBootTest class HailstormClientExchangeApplicationTests { @Test void contextLoads() { } }
mit
algolia/instantsearch.js
src/connectors/query-rules/connectQueryRules.ts
7800
import type { AlgoliaSearchHelper as Helper, SearchParameters, } from 'algoliasearch-helper'; import type { Connector, TransformItems, WidgetRenderState } from '../../types'; import { checkRendering, createDocumentationMessageGenerator, warning, getRefinements, isEqual, noop, } from '../../lib/utils'; import type { Refinement as InternalRefinement, NumericRefinement as InternalNumericRefinement, } from '../../lib/utils/getRefinements'; type TrackedFilterRefinement = string | number | boolean; export type ParamTrackedFilters = { [facetName: string]: ( facetValues: TrackedFilterRefinement[] ) => TrackedFilterRefinement[]; }; export type ParamTransformRuleContexts = (ruleContexts: string[]) => string[]; export type QueryRulesConnectorParams = { trackedFilters?: ParamTrackedFilters; transformRuleContexts?: ParamTransformRuleContexts; transformItems?: TransformItems<any>; }; export type QueryRulesRenderState = { items: any[]; }; const withUsage = createDocumentationMessageGenerator({ name: 'query-rules', connector: true, }); function hasStateRefinements(state: SearchParameters): boolean { return [ state.disjunctiveFacetsRefinements, state.facetsRefinements, state.hierarchicalFacetsRefinements, state.numericRefinements, ].some((refinement) => Boolean(refinement && Object.keys(refinement).length > 0) ); } // A context rule must consist only of alphanumeric characters, hyphens, and underscores. // See https://www.algolia.com/doc/guides/managing-results/refine-results/merchandising-and-promoting/in-depth/implementing-query-rules/#context function escapeRuleContext(ruleName: string): string { return ruleName.replace(/[^a-z0-9-_]+/gi, '_'); } function getRuleContextsFromTrackedFilters({ helper, sharedHelperState, trackedFilters, }: { helper: Helper; sharedHelperState: SearchParameters; trackedFilters: ParamTrackedFilters; }): string[] { const ruleContexts = Object.keys(trackedFilters).reduce<string[]>( (facets, facetName) => { const facetRefinements: TrackedFilterRefinement[] = getRefinements( helper.lastResults || {}, sharedHelperState, true ) .filter( (refinement: InternalRefinement) => refinement.attribute === facetName ) .map( (refinement: InternalRefinement) => (refinement as InternalNumericRefinement).numericValue || refinement.name ); const getTrackedFacetValues = trackedFilters[facetName]; const trackedFacetValues = getTrackedFacetValues(facetRefinements); return [ ...facets, ...facetRefinements .filter((facetRefinement) => trackedFacetValues.includes(facetRefinement) ) .map((facetValue) => escapeRuleContext(`ais-${facetName}-${facetValue}`) ), ]; }, [] ); return ruleContexts; } function applyRuleContexts( this: { helper: Helper; initialRuleContexts: string[]; trackedFilters: ParamTrackedFilters; transformRuleContexts: ParamTransformRuleContexts; }, event: { state: SearchParameters } ): void { const { helper, initialRuleContexts, trackedFilters, transformRuleContexts } = this; const sharedHelperState = event.state; const previousRuleContexts: string[] = sharedHelperState.ruleContexts || []; const newRuleContexts = getRuleContextsFromTrackedFilters({ helper, sharedHelperState, trackedFilters, }); const nextRuleContexts = [...initialRuleContexts, ...newRuleContexts]; warning( nextRuleContexts.length <= 10, ` The maximum number of \`ruleContexts\` is 10. They have been sliced to that limit. Consider using \`transformRuleContexts\` to minimize the number of rules sent to Algolia. ` ); const ruleContexts = transformRuleContexts(nextRuleContexts).slice(0, 10); if (!isEqual(previousRuleContexts, ruleContexts)) { helper.overrideStateWithoutTriggeringChangeEvent({ ...sharedHelperState, ruleContexts, }); } } export type QueryRulesWidgetDescription = { $$type: 'ais.queryRules'; renderState: QueryRulesRenderState; indexRenderState: { queryRules: WidgetRenderState< QueryRulesRenderState, QueryRulesConnectorParams >; }; }; export type QueryRulesConnector = Connector< QueryRulesWidgetDescription, QueryRulesConnectorParams >; const connectQueryRules: QueryRulesConnector = function connectQueryRules( render, unmount = noop ) { checkRendering(render, withUsage()); return (widgetParams) => { const { trackedFilters = {} as ParamTrackedFilters, transformRuleContexts = ((rules) => rules) as ParamTransformRuleContexts, transformItems = ((items) => items) as NonNullable< QueryRulesConnectorParams['transformItems'] >, } = widgetParams || {}; Object.keys(trackedFilters).forEach((facetName) => { if (typeof trackedFilters[facetName] !== 'function') { throw new Error( withUsage( `'The "${facetName}" filter value in the \`trackedFilters\` option expects a function.` ) ); } }); const hasTrackedFilters = Object.keys(trackedFilters).length > 0; // We store the initial rule contexts applied before creating the widget // so that we do not override them with the rules created from `trackedFilters`. let initialRuleContexts: string[] = []; let onHelperChange: (event: { state: SearchParameters }) => void; return { $$type: 'ais.queryRules', init(initOptions) { const { helper, state, instantSearchInstance } = initOptions; initialRuleContexts = state.ruleContexts || []; onHelperChange = applyRuleContexts.bind({ helper, initialRuleContexts, trackedFilters, transformRuleContexts, }); if (hasTrackedFilters) { // We need to apply the `ruleContexts` based on the `trackedFilters` // before the helper changes state in some cases: // - Some filters are applied on the first load (e.g. using `configure`) // - The `transformRuleContexts` option sets initial `ruleContexts`. if ( hasStateRefinements(state) || Boolean(widgetParams.transformRuleContexts) ) { onHelperChange({ state }); } // We track every change in the helper to override its state and add // any `ruleContexts` needed based on the `trackedFilters`. helper.on('change', onHelperChange); } render( { ...this.getWidgetRenderState(initOptions), instantSearchInstance, }, true ); }, render(renderOptions) { const { instantSearchInstance } = renderOptions; render( { ...this.getWidgetRenderState(renderOptions), instantSearchInstance, }, false ); }, getWidgetRenderState({ results }) { const { userData = [] } = results || {}; const items = transformItems(userData, { results }); return { items, widgetParams, }; }, getRenderState(renderState, renderOptions) { return { ...renderState, queryRules: this.getWidgetRenderState(renderOptions), }; }, dispose({ helper, state }) { unmount(); if (hasTrackedFilters) { helper.removeListener('change', onHelperChange); return state.setQueryParameter('ruleContexts', initialRuleContexts); } return state; }, }; }; }; export default connectQueryRules;
mit
anishathalye/gitlive
slurp/src/main/scala/slurp/Slurp.scala
1167
package slurp import java.util.concurrent.{ BlockingQueue, LinkedBlockingQueue } import com.typesafe.config.ConfigFactory import dispatch._, Defaults._ import morph.ast._, DSL._, Implicits._ object Slurp { def main(args: Array[String]) { val config = ConfigFactory.load() val clientId = config.getString("github.clientId") val clientSecret = config.getString("github.clientSecret") val ghs = new GitHubStream(clientId, clientSecret) val queue = new LinkedBlockingQueue[String]() val qd = new QueueDumper(queue) qd.start() mainLoop(ghs, queue) } def mainLoop(ghs: GitHubStream, queue: BlockingQueue[String]) { while (true) { try { val (events, pollInterval) = ghs.getEvents() val timeStep = pollInterval * 1000 if (events.nonEmpty) { events foreach { event => queue.offer(event) Thread.sleep(timeStep) } } else { Thread.sleep(EMPTY_SLEEP) } } catch { case e: Exception => { e.printStackTrace() Thread.sleep(EMPTY_SLEEP) } } } } private val EMPTY_SLEEP = 5000 }
mit
SchizoCat3D/Ampersand
Assets/Scripts/Mechanics/UpgradeProp.cs
1084
using System.Collections; using System.Collections.Generic; using UnityEngine; public class UpgradeProp : MonoBehaviour { enum Tipo { Tronco, Planta, Comida } public int puntuacion; [Header("Características")] [SerializeField] Tipo tipo; public GameObject destroyed; public GameObject growed; private void OnTriggerEnter(Collider other) { if(other.tag == "Tronco") { Destroy(other.gameObject, 0.5f); Destroy(this.gameObject, 0.5f); growed.gameObject.SetActive(true); destroyed.gameObject.SetActive(false); // ENVIAR PUNTUACION } } private void OnDrawGizmos() { if(tipo == 0) { Gizmos.color = new Color(1, 0.5f, 0.5f, 0.5f); }/* else if(tipo == 1) { Gizmos.color = new Color(1, 0.5f, 0.5f, 0.5f); }*/ else { Gizmos.color = new Color(1, 1, 0.5f, 0.5f); } Gizmos.DrawCube(this.transform.position, this.transform.localScale); } }
mit
lehins/django-wepay
djwepay/management/commands/wepay_callback_update.py
1959
from optparse import make_option from django.core.management.base import BaseCommand from djwepay.api import get_wepay_model, DEFAULT_MODELS from wepay.exceptions import WePayHTTPError, WePayConnectionError class Command(BaseCommand): option_list = BaseCommand.option_list + ( make_option('--objects', default='', dest='objects', help="Comma separated object names that need their callback_uri updated"), ) supported_objects = [ 'user', 'account', 'checkout', 'preapproval', 'withdrawal', 'subscription_plan', 'subscription' ] def handle(self, objects='', **kwargs): update_errors = [] if objects: objects = [o.strip() for o in objects.split(',') if o] unknown_objects = set(objects) - set(self.supported_objects) assert not unknown_objects, "Unknown objects: %s" % ','.join(unknown_objects) else: objects = self.supported_objects models = [] for obj_name in objects: try: models.append((obj_name, get_wepay_model(obj_name))) except LookupError: pass for obj_name, model in models: obj = None for obj in model.objects.accessible(): try: api_modify = getattr(obj, "api_%s_modify" % obj_name) print("Modified: %s" % api_modify(callback_uri=obj.get_callback_uri())[0]) except (WePayHTTPError, WePayConnectionError) as e: print("Error: %s" % e) update_errors.append({ 'call': "api_%s_modify" % obj_name, 'object': obj, 'params': "callback_uri=%s" % obj.get_callback_uri(), 'error': e }) if update_errors: print("THERE WERE ERRORS:") print(update_errors)
mit
larryjiang/cs_study
compilers/book_compilers/src/cptt/chapter2/easyexpressiontranslator/Postfix.java
1145
package cptt.chapter2.easyexpressiontranslator; import java.io.*; public class Postfix{ public static void main(String args[]) throws IOException{ Parser parser = new Parser(); parser.expr(); System.out.println(); } } class Parser{ private int lookahead; private InputStreamReader inputStreamReader; public Parser() throws IOException{ inputStreamReader = new InputStreamReader(System.in); lookahead = inputStreamReader.read(); } void expr() throws IOException{ term(); while (true) { if('+' == lookahead){ match('+');term();System.out.print('+'); }else if ('-' == lookahead) { match('-');term();System.out.print('-'); }else{ return; } } } void term() throws IOException{ if(Character.isDigit((char) lookahead)){ System.out.print((char) lookahead); match(lookahead); }else { throw new Error("Syntax Error"); } } void match(int t) throws IOException{ if(lookahead == t){ lookahead = inputStreamReader.read(); }else { throw new Error("Syntax Error"); } } }
mit
PinkyJie/angular1-webpack-starter
source/app/pages/dashboard/dashboard.controller.js
684
class DashboardController { constructor (UserAPI) { Object.assign(this, {UserAPI}); this.colors = ['indigo', 'red', 'pink']; const userInfo = this.UserAPI.getUserInfo(); this.welcomeMessage = `Welcome ${userInfo.name}!`; this._getProductsSummary(); } _getProductsSummary () { this.UserAPI.getProductSummary() .then((data) => { this.products = data; this.products.forEach((product) => { product.link = `root.layout.${product.name}`; }); }); } } DashboardController.$inject = ['UserAPI']; export default DashboardController;
mit
wbellang/cloudy
imports/ui/layouts/account/account.js
68
import './account.html'; import '../../layouts/header/header.html';
mit
rkeshmir/persian-angular-ui-bootstrap
src/pagination/docs/demo.js
436
angular.module('ui.bootstrap.demo').controller('PaginationDemoCtrl', function ($scope, $log) { $scope.totalItems = 64; $scope.currentPage = 4; $scope.setPage = function (pageNo) { $scope.currentPage = pageNo; }; $scope.pageChanged = function () { $log.log('Page changed to: ' + $scope.currentPage); }; $scope.maxSize = 5; $scope.bigTotalItems = 175; $scope.bigCurrentPage = 1; });
mit
selvasingh/azure-sdk-for-java
sdk/network/mgmt-v2019_11_01/src/main/java/com/microsoft/azure/management/network/v2019_11_01/implementation/ConnectionResetSharedKeyImpl.java
962
/** * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator. */ package com.microsoft.azure.management.network.v2019_11_01.implementation; import com.microsoft.azure.management.network.v2019_11_01.ConnectionResetSharedKey; import com.microsoft.azure.arm.model.implementation.WrapperImpl; class ConnectionResetSharedKeyImpl extends WrapperImpl<ConnectionResetSharedKeyInner> implements ConnectionResetSharedKey { private final NetworkManager manager; ConnectionResetSharedKeyImpl(ConnectionResetSharedKeyInner inner, NetworkManager manager) { super(inner); this.manager = manager; } @Override public NetworkManager manager() { return this.manager; } @Override public int keyLength() { return this.inner().keyLength(); } }
mit
crossgovernmentservices/csdigital-prototype
application/objectives/forms.py
1725
import datetime from flask.ext.wtf import Form from wtforms.validators import DataRequired, Required from wtforms.fields import StringField, TextAreaField from application.models import create_log_entry from application.utils import a_year_from_now class ObjectiveForm(Form): title = StringField('Title', validators=[Required()]) what = TextAreaField('What is your objective?', validators=[Required()]) how = TextAreaField('How will you achieve this?', validators=[Required()]) measures = TextAreaField('Measurements') outcomes = TextAreaField('Outcomes (optional)') deliverables = TextAreaField('Deliverables (optional)') progress = TextAreaField('What progress have you made? (optional - for project work only)') def update(self, objective): objective.entry.update( title=self.title.data, what=self.what.data, measures=self.measures.data, outcomes=self.outcomes.data, deliverables=self.deliverables.data, progress=self.progress.data, how=self.how.data) def create(self): objective = create_log_entry( 'objective', title=self.title.data, what=self.what.data, how=self.how.data, measures=self.measures.data, outcomes=self.outcomes.data, deliverables=self.deliverables.data, progress=self.progress.data, started_on=datetime.datetime.utcnow(), due_by=a_year_from_now()) objective.add_tag('Objective') return objective class EvidenceForm(Form): title = StringField('Title', validators=[DataRequired()]) content = TextAreaField('Content')
mit
brett-harvey/Smart-Contracts
Ethereum-based-Roll4Win/node_modules/h2x-parse/lib/index.js
365
"use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports.default = void 0; var _jsdom = require("jsdom"); var _h2xTypes = require("h2x-types"); /* eslint-disable no-restricted-syntax */ function parse(code) { return (0, _h2xTypes.fromHtmlElement)(_jsdom.JSDOM.fragment(code)); } var _default = parse; exports.default = _default;
mit
daogames/docs.daogames.com
api_docs/daoclient-csharp/daounity/documentation/html/search/functions_0.js
899
var searchData= [ ['addauthenticationcallback',['AddAuthenticationCallback',['../../../../daonet/documentation/html/class_dao_net_1_1_dao_access.html#a79fa5e28a78727c2e61895cfcf3a5f13',1,'DaoNet::DaoAccess']]], ['addlogger',['AddLogger',['../../../../daonet/documentation/html/class_dao_net_1_1_dao_client.html#a7591d883f80179ce21fca6b494922fa8',1,'DaoNet::DaoClient']]], ['addstatecallback',['AddStateCallback',['../../../../daonet/documentation/html/class_dao_net_1_1_dao_access.html#a00bc796a9f4c184c101c3cec2764d727',1,'DaoNet::DaoAccess']]], ['addzonecallback',['AddZoneCallback',['../../../../daonet/documentation/html/class_dao_net_1_1_dao_access.html#a4cf8c1b5aa622189dbded9fd833467a4',1,'DaoNet::DaoAccess']]], ['authenticate',['Authenticate',['../../../../daonet/documentation/html/class_dao_net_1_1_dao_client.html#a455303105a6ef62d0717384aecbe0797',1,'DaoNet::DaoClient']]] ];
mit
jonathanvdc/Pixie
Pixie.Terminal/TerminalBase.cs
2694
namespace Pixie.Terminal { /// <summary> /// A base type for a terminal which can be used by Pixie /// to render markup nodes as text. /// </summary> public abstract class TerminalBase { /// <summary> /// Gets the style manager for this terminal. /// </summary> /// <returns>The style manager.</returns> public abstract StyleManager Style { get; } /// <summary> /// Gets the terminal's width, in characters. /// </summary> /// <returns>The terminal's width.</returns> public abstract int Width { get; } /// <summary> /// Tells if this terminal can render a particular /// character string. /// </summary> /// <param name="text">A string to render.</param> /// <returns> /// <c>true</c> if the text can be rendered; otherwise, <c>false</c>. /// </returns> public abstract bool CanRender(string text); /// <summary> /// Prints a string of characters to the terminal. /// </summary> /// <param name="text">The text to print.</param> public abstract void Write(string text); /// <summary> /// Prints an end-of-line sequence to the terminal. /// </summary> public abstract void WriteLine(); /// <summary> /// Separates the current text from the next /// by at least the given number of newlines. /// </summary> /// <param name="lineCount">The minimum number of newlines.</param> public abstract void WriteSeparator(int lineCount); /// <summary> /// Prints a character to the terminal. /// </summary> /// <param name="character">The character to write.</param> public virtual void Write(char character) { Write(character.ToString()); } /// <summary> /// Gets the first renderable string in a sequence of strings. /// If the sequence is empty or no string is renderable, <c>null</c> /// is returned. /// </summary> /// <param name="options">A sequence of strings.</param> /// <returns> /// The first renderable string in a sequence of strings. /// If the sequence is empty or no string is renderable, <c>null</c> /// is returned. /// </returns> public string GetFirstRenderableString(params string[] options) { foreach (var opt in options) { if (CanRender(opt)) { return opt; } } return null; } } }
mit
jaspervdm/pogoprotos-php
src/POGOProtos/Networking/Responses/GetInboxResponse/ClientInbox/Notification.php
18138
<?php /** * Generated by Protobuf protoc plugin. * * File descriptor : POGOProtos/Networking/Responses/GetInboxResponse.proto */ namespace POGOProtos\Networking\Responses\GetInboxResponse\ClientInbox; /** * Protobuf message : * POGOProtos.Networking.Responses.GetInboxResponse.ClientInbox.Notification */ class Notification extends \Protobuf\AbstractMessage { /** * @var \Protobuf\UnknownFieldSet */ protected $unknownFieldSet = null; /** * @var \Protobuf\Extension\ExtensionFieldMap */ protected $extensions = null; /** * notification_id optional string = 1 * * @var string */ protected $notification_id = null; /** * title_key optional string = 2 * * @var string */ protected $title_key = null; /** * category optional string = 3 * * @var string */ protected $category = null; /** * create_timestamp_ms optional int64 = 4 * * @var int */ protected $create_timestamp_ms = null; /** * variables repeated message = 5 * * @var \Protobuf\Collection<\POGOProtos\Networking\Responses\GetInboxResponse\ClientInbox\TemplateVariable> */ protected $variables = null; /** * labels repeated enum = 6 * * @var \Protobuf\Collection<\POGOProtos\Networking\Responses\GetInboxResponse\ClientInbox\Notification\Label> */ protected $labels = null; /** * Check if 'notification_id' has a value * * @return bool */ public function hasNotificationId() { return $this->notification_id !== null; } /** * Get 'notification_id' value * * @return string */ public function getNotificationId() { return $this->notification_id; } /** * Set 'notification_id' value * * @param string $value */ public function setNotificationId($value = null) { $this->notification_id = $value; } /** * Check if 'title_key' has a value * * @return bool */ public function hasTitleKey() { return $this->title_key !== null; } /** * Get 'title_key' value * * @return string */ public function getTitleKey() { return $this->title_key; } /** * Set 'title_key' value * * @param string $value */ public function setTitleKey($value = null) { $this->title_key = $value; } /** * Check if 'category' has a value * * @return bool */ public function hasCategory() { return $this->category !== null; } /** * Get 'category' value * * @return string */ public function getCategory() { return $this->category; } /** * Set 'category' value * * @param string $value */ public function setCategory($value = null) { $this->category = $value; } /** * Check if 'create_timestamp_ms' has a value * * @return bool */ public function hasCreateTimestampMs() { return $this->create_timestamp_ms !== null; } /** * Get 'create_timestamp_ms' value * * @return int */ public function getCreateTimestampMs() { return $this->create_timestamp_ms; } /** * Set 'create_timestamp_ms' value * * @param int $value */ public function setCreateTimestampMs($value = null) { $this->create_timestamp_ms = $value; } /** * Check if 'variables' has a value * * @return bool */ public function hasVariablesList() { return $this->variables !== null; } /** * Get 'variables' value * * @return \Protobuf\Collection<\POGOProtos\Networking\Responses\GetInboxResponse\ClientInbox\TemplateVariable> */ public function getVariablesList() { return $this->variables; } /** * Set 'variables' value * * @param \Protobuf\Collection<\POGOProtos\Networking\Responses\GetInboxResponse\ClientInbox\TemplateVariable> $value */ public function setVariablesList(\Protobuf\Collection $value = null) { $this->variables = $value; } /** * Add a new element to 'variables' * * @param * \POGOProtos\Networking\Responses\GetInboxResponse\ClientInbox\TemplateVariable * $value */ public function addVariables(\POGOProtos\Networking\Responses\GetInboxResponse\ClientInbox\TemplateVariable $value) { if ($this->variables === null) { $this->variables = new \Protobuf\MessageCollection(); } $this->variables->add($value); } /** * Check if 'labels' has a value * * @return bool */ public function hasLabelsList() { return $this->labels !== null; } /** * Get 'labels' value * * @return \Protobuf\Collection<\POGOProtos\Networking\Responses\GetInboxResponse\ClientInbox\Notification\Label> */ public function getLabelsList() { return $this->labels; } /** * Set 'labels' value * * @param \Protobuf\Collection<\POGOProtos\Networking\Responses\GetInboxResponse\ClientInbox\Notification\Label> $value */ public function setLabelsList(\Protobuf\Collection $value = null) { $this->labels = $value; } /** * Add a new element to 'labels' * * @param * \POGOProtos\Networking\Responses\GetInboxResponse\ClientInbox\Notification\Label * $value */ public function addLabels(\POGOProtos\Networking\Responses\GetInboxResponse\ClientInbox\Notification\Label $value) { if ($this->labels === null) { $this->labels = new \Protobuf\EnumCollection(); } $this->labels->add($value); } /** * {@inheritdoc} */ public function extensions() { if ( $this->extensions !== null) { return $this->extensions; } return $this->extensions = new \Protobuf\Extension\ExtensionFieldMap(__CLASS__); } /** * {@inheritdoc} */ public function unknownFieldSet() { return $this->unknownFieldSet; } /** * {@inheritdoc} */ public static function fromStream($stream, \Protobuf\Configuration $configuration = null) { return new self($stream, $configuration); } /** * {@inheritdoc} */ public static function fromArray(array $values) { $message = new self(); $values = array_merge([ 'notification_id' => null, 'title_key' => null, 'category' => null, 'create_timestamp_ms' => null, 'variables' => [], 'labels' => [] ], $values); $message->setNotificationId($values['notification_id']); $message->setTitleKey($values['title_key']); $message->setCategory($values['category']); $message->setCreateTimestampMs($values['create_timestamp_ms']); foreach ($values['variables'] as $item) { $message->addVariables($item); } foreach ($values['labels'] as $item) { $message->addLabels($item); } return $message; } /** * {@inheritdoc} */ public static function descriptor() { return \google\protobuf\DescriptorProto::fromArray([ 'name' => 'Notification', 'field' => [ \google\protobuf\FieldDescriptorProto::fromArray([ 'number' => 1, 'name' => 'notification_id', 'type' => \google\protobuf\FieldDescriptorProto\Type::TYPE_STRING(), 'label' => \google\protobuf\FieldDescriptorProto\Label::LABEL_OPTIONAL() ]), \google\protobuf\FieldDescriptorProto::fromArray([ 'number' => 2, 'name' => 'title_key', 'type' => \google\protobuf\FieldDescriptorProto\Type::TYPE_STRING(), 'label' => \google\protobuf\FieldDescriptorProto\Label::LABEL_OPTIONAL() ]), \google\protobuf\FieldDescriptorProto::fromArray([ 'number' => 3, 'name' => 'category', 'type' => \google\protobuf\FieldDescriptorProto\Type::TYPE_STRING(), 'label' => \google\protobuf\FieldDescriptorProto\Label::LABEL_OPTIONAL() ]), \google\protobuf\FieldDescriptorProto::fromArray([ 'number' => 4, 'name' => 'create_timestamp_ms', 'type' => \google\protobuf\FieldDescriptorProto\Type::TYPE_INT64(), 'label' => \google\protobuf\FieldDescriptorProto\Label::LABEL_OPTIONAL() ]), \google\protobuf\FieldDescriptorProto::fromArray([ 'number' => 5, 'name' => 'variables', 'type' => \google\protobuf\FieldDescriptorProto\Type::TYPE_MESSAGE(), 'label' => \google\protobuf\FieldDescriptorProto\Label::LABEL_REPEATED(), 'type_name' => '.POGOProtos.Networking.Responses.GetInboxResponse.ClientInbox.TemplateVariable' ]), \google\protobuf\FieldDescriptorProto::fromArray([ 'number' => 6, 'name' => 'labels', 'type' => \google\protobuf\FieldDescriptorProto\Type::TYPE_ENUM(), 'label' => \google\protobuf\FieldDescriptorProto\Label::LABEL_REPEATED(), 'type_name' => '.POGOProtos.Networking.Responses.GetInboxResponse.ClientInbox.Notification.Label' ]), ], ]); } /** * {@inheritdoc} */ public function toStream(\Protobuf\Configuration $configuration = null) { $config = $configuration ?: \Protobuf\Configuration::getInstance(); $context = $config->createWriteContext(); $stream = $context->getStream(); $this->writeTo($context); $stream->seek(0); return $stream; } /** * {@inheritdoc} */ public function writeTo(\Protobuf\WriteContext $context) { $stream = $context->getStream(); $writer = $context->getWriter(); $sizeContext = $context->getComputeSizeContext(); if ($this->notification_id !== null) { $writer->writeVarint($stream, 10); $writer->writeString($stream, $this->notification_id); } if ($this->title_key !== null) { $writer->writeVarint($stream, 18); $writer->writeString($stream, $this->title_key); } if ($this->category !== null) { $writer->writeVarint($stream, 26); $writer->writeString($stream, $this->category); } if ($this->create_timestamp_ms !== null) { $writer->writeVarint($stream, 32); $writer->writeVarint($stream, $this->create_timestamp_ms); } if ($this->variables !== null) { foreach ($this->variables as $val) { $writer->writeVarint($stream, 42); $writer->writeVarint($stream, $val->serializedSize($sizeContext)); $val->writeTo($context); } } if ($this->labels !== null) { foreach ($this->labels as $val) { $writer->writeVarint($stream, 48); $writer->writeVarint($stream, $val->value()); } } if ($this->extensions !== null) { $this->extensions->writeTo($context); } return $stream; } /** * {@inheritdoc} */ public function readFrom(\Protobuf\ReadContext $context) { $reader = $context->getReader(); $length = $context->getLength(); $stream = $context->getStream(); $limit = ($length !== null) ? ($stream->tell() + $length) : null; while ($limit === null || $stream->tell() < $limit) { if ($stream->eof()) { break; } $key = $reader->readVarint($stream); $wire = \Protobuf\WireFormat::getTagWireType($key); $tag = \Protobuf\WireFormat::getTagFieldNumber($key); if ($stream->eof()) { break; } if ($tag === 1) { \Protobuf\WireFormat::assertWireType($wire, 9); $this->notification_id = $reader->readString($stream); continue; } if ($tag === 2) { \Protobuf\WireFormat::assertWireType($wire, 9); $this->title_key = $reader->readString($stream); continue; } if ($tag === 3) { \Protobuf\WireFormat::assertWireType($wire, 9); $this->category = $reader->readString($stream); continue; } if ($tag === 4) { \Protobuf\WireFormat::assertWireType($wire, 3); $this->create_timestamp_ms = $reader->readVarint($stream); continue; } if ($tag === 5) { \Protobuf\WireFormat::assertWireType($wire, 11); $innerSize = $reader->readVarint($stream); $innerMessage = new \POGOProtos\Networking\Responses\GetInboxResponse\ClientInbox\TemplateVariable(); if ($this->variables === null) { $this->variables = new \Protobuf\MessageCollection(); } $this->variables->add($innerMessage); $context->setLength($innerSize); $innerMessage->readFrom($context); $context->setLength($length); continue; } if ($tag === 6) { $innerSize = $reader->readVarint($stream); $innerLimit = $stream->tell() + $innerSize; if ($this->labels === null) { $this->labels = new \Protobuf\EnumCollection(); } while ($stream->tell() < $innerLimit) { $this->labels->add(\POGOProtos\Networking\Responses\GetInboxResponse\ClientInbox\Notification\Label::valueOf($reader->readVarint($stream))); } continue; } $extensions = $context->getExtensionRegistry(); $extension = $extensions ? $extensions->findByNumber(__CLASS__, $tag) : null; if ($extension !== null) { $this->extensions()->add($extension, $extension->readFrom($context, $wire)); continue; } if ($this->unknownFieldSet === null) { $this->unknownFieldSet = new \Protobuf\UnknownFieldSet(); } $data = $reader->readUnknown($stream, $wire); $unknown = new \Protobuf\Unknown($tag, $wire, $data); $this->unknownFieldSet->add($unknown); } } /** * {@inheritdoc} */ public function serializedSize(\Protobuf\ComputeSizeContext $context) { $calculator = $context->getSizeCalculator(); $size = 0; if ($this->notification_id !== null) { $size += 1; $size += $calculator->computeStringSize($this->notification_id); } if ($this->title_key !== null) { $size += 1; $size += $calculator->computeStringSize($this->title_key); } if ($this->category !== null) { $size += 1; $size += $calculator->computeStringSize($this->category); } if ($this->create_timestamp_ms !== null) { $size += 1; $size += $calculator->computeVarintSize($this->create_timestamp_ms); } if ($this->variables !== null) { foreach ($this->variables as $val) { $innerSize = $val->serializedSize($context); $size += 1; $size += $innerSize; $size += $calculator->computeVarintSize($innerSize); } } if ($this->labels !== null) { foreach ($this->labels as $val) { $size += 1; $size += $calculator->computeVarintSize($val->value()); } } if ($this->extensions !== null) { $size += $this->extensions->serializedSize($context); } return $size; } /** * {@inheritdoc} */ public function clear() { $this->notification_id = null; $this->title_key = null; $this->category = null; $this->create_timestamp_ms = null; $this->variables = null; $this->labels = null; } /** * {@inheritdoc} */ public function merge(\Protobuf\Message $message) { if ( ! $message instanceof \POGOProtos\Networking\Responses\GetInboxResponse\ClientInbox\Notification) { throw new \InvalidArgumentException(sprintf('Argument 1 passed to %s must be a %s, %s given', __METHOD__, __CLASS__, get_class($message))); } $this->notification_id = ($message->notification_id !== null) ? $message->notification_id : $this->notification_id; $this->title_key = ($message->title_key !== null) ? $message->title_key : $this->title_key; $this->category = ($message->category !== null) ? $message->category : $this->category; $this->create_timestamp_ms = ($message->create_timestamp_ms !== null) ? $message->create_timestamp_ms : $this->create_timestamp_ms; $this->variables = ($message->variables !== null) ? $message->variables : $this->variables; $this->labels = ($message->labels !== null) ? $message->labels : $this->labels; } }
mit
lancetw/react-isomorphic-bundle
src/shared/components/AppContainer.js
550
import React, { Component, PropTypes } from 'react' const Translate = require('react-translate-component') export default class AppContainer extends Component { static propTypes = { children: PropTypes.any, translator: Translate.translatorType } static childContextTypes = { translator: Translate.translatorType } constructor (props) { super(props) } getChildContext () { return { translator: this.props.translator } } render () { const { children } = this.props return children() } }
mit
rlishtaba/algorithmable
lib/algorithmable/data_structs/ordered_symbol_table.rb
443
module Algorithmable module DataStructs class OrderedSymbolTable extend Forwardable def_delegators :@imp, :[]=, :[], :key?, :empty?, :size, :keys, :max, :min, :floor, :ceiling, :rank, :delete def initialize(key_type, value_type) search_strategy_factory = Object.new.extend Algorithmable::Searches @imp = search_strategy_factory.new_binary_search_tree(key_type, value_type) end end end end
mit
martijnvermaat/wiggelen
tests/test_wiggle.py
8867
""" Tests for the wiggle module. """ import os from itertools import chain from nose.tools import * import wiggelen from wiggelen.index import INDEX_SUFFIX, clear_cache DATA_DIR = os.path.join(os.path.dirname(__file__), 'data') def open_(filename, mode='r'): """ Open a file from the test data. """ return open(os.path.join(DATA_DIR, filename), mode) def remove_indices(keep_cache=False): """ Cleanup any index files for the test data. """ if not keep_cache: clear_cache() for file in os.listdir(DATA_DIR): if file.endswith(INDEX_SUFFIX): os.unlink(os.path.join(DATA_DIR, file)) def sparse(region, positions): """ Create a walker for one region with given defined positions (values are same as position). """ return ((region, p, p) for p in positions) def filled(region, start, stop, none=[]): """ Create a walker for one region with all positions between start and stop (inclusive). Values are same as position, except for those listed in none (they are None). """ return ((region, p, None if p in none else p) for p in range(start, stop + 1)) class TestWiggle(object): """ Tests for the wiggle module. """ @classmethod def setup_class(cls): remove_indices() def teardown(self): remove_indices() def test_walk_fixed_step(self): """ Walk over a fixed step wiggle track. """ c = [('chr8', 1, 11), ('chr8', 2, 11), ('chr8', 6, 33), ('chr8', 7, 33), ('chr8', 11, 44), ('chr8', 12, 44)] walker = wiggelen.walk(open_('fixedstep.wig')) for expected, item in zip(c, walker): assert_equal(expected, item) assert_raises(StopIteration, next, walker) def test_walk_fixed_step_without_step(self): """ Walk over a fixed step wiggle track without `step` arguments. According to the spec, `fixedStep` definitions require the `step` argument. However, there seems to be real-world data where it is missing and the UCSC Genome Browser can still work with it. So we also support it. Issue: https://github.com/martijnvermaat/wiggelen/issues/1 """ c = [('chr', 1, 64.), ('chr', 2, 64.), ('chr', 3, 65.), ('chr', 4, 66.), ('chr', 5, 66.), ('chr', 6, 66.), ('chr', 7, 69.), ('chr', 8, 70.), ('chr', 9, 71.), ('chr', 10, 71.), ('chr', 11, 71.), ('chr', 12, 71.), ('chr', 13, 71.), ('chr', 14, 71.), ('chr', 15, 71.), ('chr', 16, 71.), ('chr', 17, 71.), ('chr', 18, 71.), ('chr', 19, 73.), ('chr', 20, 73.), ('chr', 21, 73.), ('chr', 22, 73.), ('chr', 23, 73.), ('chr', 24, 73.), ('chr', 25, 73.), ('chr', 26, 74.), ('chr', 27, 75.), ('chr', 28, 75.), ('chr', 29, 75.), ('chr', 30, 75.), ('chr', 31, 76.)] walker = wiggelen.walk(open_('fixedstep-without-step.wig')) for expected, item in zip(c, walker): assert_equal(expected, item) assert_raises(StopIteration, next, walker) def test_walk_single_region(self): """ Walk over a track with a single region. """ c = [('MT', 1, 364.0), ('MT', 6, 435.0), ('MT', 10, 485.0)] walker = wiggelen.walk(open_('c.wig')) for expected, item in zip(c, walker): assert_equal(expected, item) assert_raises(StopIteration, next, walker) def test_walk_multiple_regions(self): """ Walk over a track with multiple regions. """ values = [(2, 392.0), (3, 408.0), (4, 420.0), (5, 452.0), (7, 466.0), (8, 474.0), (9, 479.0)] b = [(r, p, v) for r in ('MT', '1', '13') for (p, v) in values] walker = wiggelen.walk(open_('b.wig')) for expected, item in zip(b, walker): assert_equal(expected, item) assert_raises(StopIteration, next, walker) def test_sort_multiple_regions(self): """ Walk over a track with multiple regions and index. """ values = [(2, 392.0), (3, 408.0), (4, 420.0), (5, 452.0), (7, 466.0), (8, 474.0), (9, 479.0)] b = [(r, p, v) for r in ('1', '13', 'MT') for (p, v) in values] walker = wiggelen.walk(open_('b.wig'), force_index=True) for expected, item in zip(b, walker): assert_equal(expected, item) assert_raises(StopIteration, next, walker) def test_store_index(self): """ Walk over a track after the index has been made. """ values = [(2, 392.0), (3, 408.0), (4, 420.0), (5, 452.0), (7, 466.0), (8, 474.0), (9, 479.0)] b = [(r, p, v) for r in ('1', '13', 'MT') for (p, v) in values] walker = wiggelen.walk(open_('b.wig'), force_index=True) for expected, item in zip(b, walker): assert_equal(expected, item) assert_raises(StopIteration, next, walker) walker = wiggelen.walk(open_('b.wig')) for expected, item in zip(b, walker): assert_equal(expected, item) assert_raises(StopIteration, next, walker) def test_cache_index(self): """ Walk over a track after the index has been made but has been removed from the filesystem. """ values = [(2, 392.0), (3, 408.0), (4, 420.0), (5, 452.0), (7, 466.0), (8, 474.0), (9, 479.0)] b = [(r, p, v) for r in ('1', '13', 'MT') for (p, v) in values] walker = wiggelen.walk(open_('b.wig'), force_index=True) for expected, item in zip(b, walker): assert_equal(expected, item) assert_raises(StopIteration, next, walker) remove_indices(keep_cache=True) walker = wiggelen.walk(open_('b.wig')) for expected, item in zip(b, walker): assert_equal(expected, item) assert_raises(StopIteration, next, walker) def test_walk_complex(self): """ Walk over a complex track. """ walker = wiggelen.walk(open_('complex.wig')) for _ in walker: pass def test_fill_open(self): """ Test filling undefined positions. """ walker = sparse('a', [3, 5, 6, 8]) expected = list(filled('a', 3, 8, [4, 7])) assert_equal(list(wiggelen.fill(walker)), expected) def test_fill_closed(self): """ Test filling undefined positions with start and stop. """ walker = sparse('a', [3, 5, 6, 8]) expected = list(filled('a', 1, 10, [1, 2, 4, 7, 9, 10])) assert_equal(list(wiggelen.fill(walker, regions={'a': (1, 10)})), expected) def test_fill_subset(self): """ Test filling undefined positions on a subset. """ walker = sparse('a', [1, 3, 5, 6, 8, 10]) expected = [('a', 1, 1)] + list(filled('a', 3, 8, [4, 7])) + [('a', 10, 10)] assert_equal(list(wiggelen.fill(walker, regions={'a': (3, 8)})), expected) def test_fill_regions(self): """ Test filling undefined positions over multiple regions. """ a = sparse('a', [3, 5, 6, 8]) b = sparse('b', [3, 5, 6, 8]) c = sparse('c', [1, 3, 5, 6, 8, 10]) walker = chain(a, b, c) e_a = list(sparse('a', [3, 5, 6, 8])) e_b = list(filled('b', 1, 10, [1, 2, 4, 7, 9, 10])) e_c = [('c', 1, 1)] + list(filled('c', 3, 8, [4, 7])) + [('c', 10, 10)] expected = list(chain(e_a, e_b, e_c)) assert_equal(list(wiggelen.fill(walker, regions={'b': (1, 10), 'c': (3, 8)})), expected) def test_fill_only_edges(self): """ Test filling edges of undefined positions. """ walker = sparse('a', [3, 5, 6, 14]) expected = [('a', 3, 3), ('a', 4, None), ('a', 5, 5), ('a', 6, 6), ('a', 7, None), ('a', 13, None), ('a', 14, 14)] assert_equal(list(wiggelen.fill(walker, only_edges=True)), expected)
mit
cloudfoundry-community/firehose-to-syslog
vendor/code.cloudfoundry.org/go-loggregator/servers_test.go
2170
package loggregator_test import ( "crypto/tls" "crypto/x509" "io/ioutil" "net" "golang.org/x/net/context" "google.golang.org/grpc" "google.golang.org/grpc/credentials" "code.cloudfoundry.org/go-loggregator/rpc/loggregator_v2" ) type testIngressServer struct { receivers chan loggregator_v2.Ingress_BatchSenderServer sendReceiver chan *loggregator_v2.EnvelopeBatch addr string tlsConfig *tls.Config grpcServer *grpc.Server grpc.Stream } func newTestIngressServer(serverCert, serverKey, caCert string) (*testIngressServer, error) { cert, err := tls.LoadX509KeyPair(serverCert, serverKey) if err != nil { return nil, err } tlsConfig := &tls.Config{ Certificates: []tls.Certificate{cert}, ClientAuth: tls.RequestClientCert, InsecureSkipVerify: false, } caCertBytes, err := ioutil.ReadFile(caCert) if err != nil { return nil, err } caCertPool := x509.NewCertPool() caCertPool.AppendCertsFromPEM(caCertBytes) tlsConfig.RootCAs = caCertPool return &testIngressServer{ tlsConfig: tlsConfig, receivers: make(chan loggregator_v2.Ingress_BatchSenderServer), sendReceiver: make(chan *loggregator_v2.EnvelopeBatch, 100), addr: "localhost:0", }, nil } func (*testIngressServer) Sender(srv loggregator_v2.Ingress_SenderServer) error { return nil } func (t *testIngressServer) BatchSender(srv loggregator_v2.Ingress_BatchSenderServer) error { t.receivers <- srv <-srv.Context().Done() return nil } func (t *testIngressServer) Send(_ context.Context, b *loggregator_v2.EnvelopeBatch) (*loggregator_v2.SendResponse, error) { t.sendReceiver <- b return &loggregator_v2.SendResponse{}, nil } func (t *testIngressServer) start() error { listener, err := net.Listen("tcp4", t.addr) if err != nil { return err } t.addr = listener.Addr().String() var opts []grpc.ServerOption if t.tlsConfig != nil { opts = append(opts, grpc.Creds(credentials.NewTLS(t.tlsConfig))) } t.grpcServer = grpc.NewServer(opts...) loggregator_v2.RegisterIngressServer(t.grpcServer, t) go t.grpcServer.Serve(listener) return nil } func (t *testIngressServer) stop() { t.grpcServer.Stop() }
mit
Vectorface/auth
src/Plugin/SuccessPlugin.php
781
<?php namespace Vectorface\Auth\Plugin; use Vectorface\Auth\Auth; /** * An auth plugin that always succeeds. Useful in development. */ class SuccessPlugin extends BaseAuthPlugin { /** * Auth plugin hook to be fired on login. * * @param string $username * @param string $password * @return int */ public function login($username, $password) { return Auth::RESULT_SUCCESS; } /** * Auth plugin hook to be fired on auth verification. * * @return int */ public function verify() { return Auth::RESULT_SUCCESS; } /** * Auth plugin hook to be fired on logout. * * @return int */ public function logout() { return Auth::RESULT_SUCCESS; } }
mit
sasha240100/three.js
examples/js/exporters/GLTFExporter.js
29102
/** * @author fernandojsg / http://fernandojsg.com */ //------------------------------------------------------------------------------ // Constants //------------------------------------------------------------------------------ var WEBGL_CONSTANTS = { POINTS: 0x0000, LINES: 0x0001, LINE_LOOP: 0x0002, LINE_STRIP: 0x0003, TRIANGLES: 0x0004, TRIANGLE_STRIP: 0x0005, TRIANGLE_FAN: 0x0006, UNSIGNED_BYTE: 0x1401, UNSIGNED_SHORT: 0x1403, FLOAT: 0x1406, UNSIGNED_INT: 0x1405, ARRAY_BUFFER: 0x8892, ELEMENT_ARRAY_BUFFER: 0x8893, NEAREST: 0x2600, LINEAR: 0x2601, NEAREST_MIPMAP_NEAREST: 0x2700, LINEAR_MIPMAP_NEAREST: 0x2701, NEAREST_MIPMAP_LINEAR: 0x2702, LINEAR_MIPMAP_LINEAR: 0x2703 }; var THREE_TO_WEBGL = { // @TODO Replace with computed property name [THREE.*] when available on es6 1003: WEBGL_CONSTANTS.NEAREST, 1004: WEBGL_CONSTANTS.NEAREST_MIPMAP_NEAREST, 1005: WEBGL_CONSTANTS.NEAREST_MIPMAP_LINEAR, 1006: WEBGL_CONSTANTS.LINEAR, 1007: WEBGL_CONSTANTS.LINEAR_MIPMAP_NEAREST, 1008: WEBGL_CONSTANTS.LINEAR_MIPMAP_LINEAR }; var PATH_PROPERTIES = { scale: 'scale', position: 'translation', quaternion: 'rotation', morphTargetInfluences: 'weights' }; //------------------------------------------------------------------------------ // GLTF Exporter //------------------------------------------------------------------------------ THREE.GLTFExporter = function () {}; THREE.GLTFExporter.prototype = { constructor: THREE.GLTFExporter, /** * Parse scenes and generate GLTF output * @param {THREE.Scene or [THREE.Scenes]} input THREE.Scene or Array of THREE.Scenes * @param {Function} onDone Callback on completed * @param {Object} options options */ parse: function ( input, onDone, options ) { var DEFAULT_OPTIONS = { trs: false, onlyVisible: true, truncateDrawRange: true, embedImages: true, animations: [] }; options = Object.assign( {}, DEFAULT_OPTIONS, options ); if ( options.animations.length > 0 ) { // Only TRS properties, and not matrices, may be targeted by animation. options.trs = true; } var outputJSON = { asset: { version: "2.0", generator: "THREE.GLTFExporter" } }; var byteOffset = 0; var dataViews = []; var nodeMap = {}; var skins = []; var cachedData = { images: {}, materials: {} }; var cachedCanvas; /** * Compare two arrays */ /** * Compare two arrays * @param {Array} array1 Array 1 to compare * @param {Array} array2 Array 2 to compare * @return {Boolean} Returns true if both arrays are equal */ function equalArray( array1, array2 ) { return ( array1.length === array2.length ) && array1.every( function ( element, index ) { return element === array2[ index ]; } ); } /** * Converts a string to an ArrayBuffer. * @param {string} text * @return {ArrayBuffer} */ function stringToArrayBuffer( text ) { if ( window.TextEncoder !== undefined ) { return new TextEncoder().encode( text ).buffer; } var buffer = new ArrayBuffer( text.length ); var bufferView = new Uint8Array( buffer ); for ( var i = 0; i < text.length; ++ i ) { bufferView[ i ] = text.charCodeAt( i ); } return buffer; } /** * Get the min and max vectors from the given attribute * @param {THREE.BufferAttribute} attribute Attribute to find the min/max * @return {Object} Object containing the `min` and `max` values (As an array of attribute.itemSize components) */ function getMinMax( attribute ) { var output = { min: new Array( attribute.itemSize ).fill( Number.POSITIVE_INFINITY ), max: new Array( attribute.itemSize ).fill( Number.NEGATIVE_INFINITY ) }; for ( var i = 0; i < attribute.count; i ++ ) { for ( var a = 0; a < attribute.itemSize; a ++ ) { var value = attribute.array[ i * attribute.itemSize + a ]; output.min[ a ] = Math.min( output.min[ a ], value ); output.max[ a ] = Math.max( output.max[ a ], value ); } } return output; } /** * Process a buffer to append to the default one. * @param {THREE.BufferAttribute} attribute Attribute to store * @param {Integer} componentType Component type (Unsigned short, unsigned int or float) * @return {Integer} Index of the buffer created (Currently always 0) */ function processBuffer( attribute, componentType, start, count ) { if ( ! outputJSON.buffers ) { outputJSON.buffers = [ { byteLength: 0, uri: '' } ]; } var offset = 0; var componentSize = componentType === WEBGL_CONSTANTS.UNSIGNED_SHORT ? 2 : 4; // Create a new dataview and dump the attribute's array into it var byteLength = count * attribute.itemSize * componentSize; var dataView = new DataView( new ArrayBuffer( byteLength ) ); for ( var i = start; i < start + count; i ++ ) { for ( var a = 0; a < attribute.itemSize; a ++ ) { var value = attribute.array[ i * attribute.itemSize + a ]; if ( componentType === WEBGL_CONSTANTS.FLOAT ) { dataView.setFloat32( offset, value, true ); } else if ( componentType === WEBGL_CONSTANTS.UNSIGNED_INT ) { dataView.setUint8( offset, value, true ); } else if ( componentType === WEBGL_CONSTANTS.UNSIGNED_SHORT ) { dataView.setUint16( offset, value, true ); } offset += componentSize; } } // We just use one buffer dataViews.push( dataView ); // Always using just one buffer return 0; } /** * Process and generate a BufferView * @param {THREE.BufferAttribute} data * @param {number} componentType * @param {number} start * @param {number} count * @param {number} target (Optional) Target usage of the BufferView * @return {Object} */ function processBufferView( data, componentType, start, count, target ) { if ( ! outputJSON.bufferViews ) { outputJSON.bufferViews = []; } var componentSize = componentType === WEBGL_CONSTANTS.UNSIGNED_SHORT ? 2 : 4; // Create a new dataview and dump the attribute's array into it var byteLength = count * data.itemSize * componentSize; var gltfBufferView = { buffer: processBuffer( data, componentType, start, count ), byteOffset: byteOffset, byteLength: byteLength }; if ( target !== undefined ) gltfBufferView.target = target; if ( target === WEBGL_CONSTANTS.ARRAY_BUFFER ) { // Only define byteStride for vertex attributes. gltfBufferView.byteStride = data.itemSize * componentSize; } byteOffset += byteLength; outputJSON.bufferViews.push( gltfBufferView ); // @TODO Ideally we'll have just two bufferviews: 0 is for vertex attributes, 1 for indices var output = { id: outputJSON.bufferViews.length - 1, byteLength: 0 }; return output; } /** * Process attribute to generate an accessor * @param {THREE.BufferAttribute} attribute Attribute to process * @param {THREE.BufferGeometry} geometry (Optional) Geometry used for truncated draw range * @return {Integer} Index of the processed accessor on the "accessors" array */ function processAccessor( attribute, geometry ) { if ( ! outputJSON.accessors ) { outputJSON.accessors = []; } var types = { 1: 'SCALAR', 2: 'VEC2', 3: 'VEC3', 4: 'VEC4', 16: 'MAT4' }; var componentType; // Detect the component type of the attribute array (float, uint or ushort) if ( attribute.array.constructor === Float32Array ) { componentType = WEBGL_CONSTANTS.FLOAT; } else if ( attribute.array.constructor === Uint32Array ) { componentType = WEBGL_CONSTANTS.UNSIGNED_INT; } else if ( attribute.array.constructor === Uint16Array ) { componentType = WEBGL_CONSTANTS.UNSIGNED_SHORT; } else { throw new Error( 'THREE.GLTFExporter: Unsupported bufferAttribute component type.' ); } var minMax = getMinMax( attribute ); var start = 0; var count = attribute.count; // @TODO Indexed buffer geometry with drawRange not supported yet if ( options.truncateDrawRange && geometry !== undefined && geometry.index === null ) { start = geometry.drawRange.start; count = geometry.drawRange.count !== Infinity ? geometry.drawRange.count : attribute.count; } var bufferViewTarget; // If geometry isn't provided, don't infer the target usage of the bufferView. For // animation samplers, target must not be set. if ( geometry !== undefined ) { var isVertexAttributes = componentType === WEBGL_CONSTANTS.FLOAT; bufferViewTarget = isVertexAttributes ? WEBGL_CONSTANTS.ARRAY_BUFFER : WEBGL_CONSTANTS.ELEMENT_ARRAY_BUFFER; } var bufferView = processBufferView( attribute, componentType, start, count, bufferViewTarget ); var gltfAccessor = { bufferView: bufferView.id, byteOffset: bufferView.byteOffset, componentType: componentType, count: count, max: minMax.max, min: minMax.min, type: types[ attribute.itemSize ] }; outputJSON.accessors.push( gltfAccessor ); return outputJSON.accessors.length - 1; } /** * Process image * @param {Texture} map Texture to process * @return {Integer} Index of the processed texture in the "images" array */ function processImage( map ) { if ( cachedData.images[ map.uuid ] !== undefined ) { return cachedData.images[ map.uuid ]; } if ( ! outputJSON.images ) { outputJSON.images = []; } var mimeType = map.format === THREE.RGBAFormat ? 'image/png' : 'image/jpeg'; var gltfImage = {mimeType: mimeType}; if ( options.embedImages ) { var canvas = cachedCanvas = cachedCanvas || document.createElement( 'canvas' ); canvas.width = map.image.width; canvas.height = map.image.height; var ctx = canvas.getContext( '2d' ); if ( map.flipY === true ) { ctx.translate( 0, map.image.height ); ctx.scale( 1, -1 ); } ctx.drawImage( map.image, 0, 0 ); // @TODO Embed in { bufferView } if options.binary set. gltfImage.uri = canvas.toDataURL( mimeType ); } else { gltfImage.uri = map.image.src; } outputJSON.images.push( gltfImage ); var index = outputJSON.images.length - 1; cachedData.images[ map.uuid ] = index; return index; } /** * Process sampler * @param {Texture} map Texture to process * @return {Integer} Index of the processed texture in the "samplers" array */ function processSampler( map ) { if ( ! outputJSON.samplers ) { outputJSON.samplers = []; } var gltfSampler = { magFilter: THREE_TO_WEBGL[ map.magFilter ], minFilter: THREE_TO_WEBGL[ map.minFilter ], wrapS: THREE_TO_WEBGL[ map.wrapS ], wrapT: THREE_TO_WEBGL[ map.wrapT ] }; outputJSON.samplers.push( gltfSampler ); return outputJSON.samplers.length - 1; } /** * Process texture * @param {Texture} map Map to process * @return {Integer} Index of the processed texture in the "textures" array */ function processTexture( map ) { if ( ! outputJSON.textures ) { outputJSON.textures = []; } var gltfTexture = { sampler: processSampler( map ), source: processImage( map ) }; outputJSON.textures.push( gltfTexture ); return outputJSON.textures.length - 1; } /** * Process material * @param {THREE.Material} material Material to process * @return {Integer} Index of the processed material in the "materials" array */ function processMaterial( material ) { if ( cachedData.materials[ material.uuid ] !== undefined ) { return cachedData.materials[ material.uuid ]; } if ( ! outputJSON.materials ) { outputJSON.materials = []; } if ( material instanceof THREE.ShaderMaterial ) { console.warn( 'GLTFExporter: THREE.ShaderMaterial not supported.' ); return null; } if ( ! ( material instanceof THREE.MeshStandardMaterial ) ) { console.warn( 'GLTFExporter: Currently just THREE.StandardMaterial is supported. Material conversion may lose information.' ); } // @QUESTION Should we avoid including any attribute that has the default value? var gltfMaterial = { pbrMetallicRoughness: {} }; // pbrMetallicRoughness.baseColorFactor var color = material.color.toArray().concat( [ material.opacity ] ); if ( ! equalArray( color, [ 1, 1, 1, 1 ] ) ) { gltfMaterial.pbrMetallicRoughness.baseColorFactor = color; } if ( material instanceof THREE.MeshStandardMaterial ) { gltfMaterial.pbrMetallicRoughness.metallicFactor = material.metalness; gltfMaterial.pbrMetallicRoughness.roughnessFactor = material.roughness; } else { gltfMaterial.pbrMetallicRoughness.metallicFactor = 0.5; gltfMaterial.pbrMetallicRoughness.roughnessFactor = 0.5; } // pbrMetallicRoughness.baseColorTexture if ( material.map ) { gltfMaterial.pbrMetallicRoughness.baseColorTexture = { index: processTexture( material.map ) }; } if ( material instanceof THREE.MeshBasicMaterial || material instanceof THREE.LineBasicMaterial || material instanceof THREE.PointsMaterial ) { } else { // emissiveFactor var emissive = material.emissive.clone().multiplyScalar( material.emissiveIntensity ).toArray(); if ( ! equalArray( emissive, [ 0, 0, 0 ] ) ) { gltfMaterial.emissiveFactor = emissive; } // emissiveTexture if ( material.emissiveMap ) { gltfMaterial.emissiveTexture = { index: processTexture( material.emissiveMap ) }; } } // normalTexture if ( material.normalMap ) { gltfMaterial.normalTexture = { index: processTexture( material.normalMap ) }; if ( material.normalScale.x !== - 1 ) { if ( material.normalScale.x !== material.normalScale.y ) { console.warn( 'THREE.GLTFExporter: Normal scale components are different, ignoring Y and exporting X.' ); } gltfMaterial.normalTexture.scale = material.normalScale.x; } } // occlusionTexture if ( material.aoMap ) { gltfMaterial.occlusionTexture = { index: processTexture( material.aoMap ) }; if ( material.aoMapIntensity !== 1.0 ) { gltfMaterial.occlusionTexture.strength = material.aoMapIntensity; } } // alphaMode if ( material.transparent || material.alphaTest > 0.0 ) { gltfMaterial.alphaMode = material.opacity < 1.0 ? 'BLEND' : 'MASK'; // Write alphaCutoff if it's non-zero and different from the default (0.5). if ( material.alphaTest > 0.0 && material.alphaTest !== 0.5 ) { gltfMaterial.alphaCutoff = material.alphaTest; } } // doubleSided if ( material.side === THREE.DoubleSide ) { gltfMaterial.doubleSided = true; } if ( material.name ) { gltfMaterial.name = material.name; } outputJSON.materials.push( gltfMaterial ); var index = outputJSON.materials.length - 1; cachedData.materials[ material.uuid ] = index; return index; } /** * Process mesh * @param {THREE.Mesh} mesh Mesh to process * @return {Integer} Index of the processed mesh in the "meshes" array */ function processMesh( mesh ) { if ( ! outputJSON.meshes ) { outputJSON.meshes = []; } var geometry = mesh.geometry; var mode; // Use the correct mode if ( mesh instanceof THREE.LineSegments ) { mode = WEBGL_CONSTANTS.LINES; } else if ( mesh instanceof THREE.LineLoop ) { mode = WEBGL_CONSTANTS.LINE_LOOP; } else if ( mesh instanceof THREE.Line ) { mode = WEBGL_CONSTANTS.LINE_STRIP; } else if ( mesh instanceof THREE.Points ) { mode = WEBGL_CONSTANTS.POINTS; } else { if ( ! geometry.isBufferGeometry ) { var geometryTemp = new THREE.BufferGeometry(); geometryTemp.fromGeometry( geometry ); geometry = geometryTemp; } if ( mesh.drawMode === THREE.TriangleFanDrawMode ) { console.warn( 'GLTFExporter: TriangleFanDrawMode and wireframe incompatible.' ); mode = WEBGL_CONSTANTS.TRIANGLE_FAN; } else if ( mesh.drawMode === THREE.TriangleStripDrawMode ) { mode = mesh.material.wireframe ? WEBGL_CONSTANTS.LINE_STRIP : WEBGL_CONSTANTS.TRIANGLE_STRIP; } else { mode = mesh.material.wireframe ? WEBGL_CONSTANTS.LINES : WEBGL_CONSTANTS.TRIANGLES; } } var gltfMesh = { primitives: [ { mode: mode, attributes: {}, } ] }; var material = processMaterial( mesh.material ); if ( material !== null ) { gltfMesh.primitives[ 0 ].material = material; } if ( geometry.index ) { gltfMesh.primitives[ 0 ].indices = processAccessor( geometry.index, geometry ); } // We've just one primitive per mesh var gltfAttributes = gltfMesh.primitives[ 0 ].attributes; // Conversion between attributes names in threejs and gltf spec var nameConversion = { uv: 'TEXCOORD_0', uv2: 'TEXCOORD_1', color: 'COLOR_0', skinWeight: 'WEIGHTS_0', skinIndex: 'JOINTS_0' }; // @QUESTION Detect if .vertexColors = THREE.VertexColors? // For every attribute create an accessor for ( var attributeName in geometry.attributes ) { var attribute = geometry.attributes[ attributeName ]; attributeName = nameConversion[ attributeName ] || attributeName.toUpperCase(); if ( attributeName.substr( 0, 5 ) !== 'MORPH' ) { gltfAttributes[ attributeName ] = processAccessor( attribute, geometry ); } } // Morph targets if ( mesh.morphTargetInfluences !== undefined && mesh.morphTargetInfluences.length > 0 ) { gltfMesh.primitives[ 0 ].targets = []; for ( var i = 0; i < mesh.morphTargetInfluences.length; ++ i ) { var target = {}; for ( var attributeName in geometry.morphAttributes ) { var attribute = geometry.morphAttributes[ attributeName ][ i ]; attributeName = nameConversion[ attributeName ] || attributeName.toUpperCase(); target[ attributeName ] = processAccessor( attribute, geometry ); } gltfMesh.primitives[ 0 ].targets.push( target ); } } outputJSON.meshes.push( gltfMesh ); return outputJSON.meshes.length - 1; } /** * Process camera * @param {THREE.Camera} camera Camera to process * @return {Integer} Index of the processed mesh in the "camera" array */ function processCamera( camera ) { if ( ! outputJSON.cameras ) { outputJSON.cameras = []; } var isOrtho = camera instanceof THREE.OrthographicCamera; var gltfCamera = { type: isOrtho ? 'orthographic' : 'perspective' }; if ( isOrtho ) { gltfCamera.orthographic = { xmag: camera.right * 2, ymag: camera.top * 2, zfar: camera.far, znear: camera.near }; } else { gltfCamera.perspective = { aspectRatio: camera.aspect, yfov: THREE.Math.degToRad( camera.fov ) / camera.aspect, zfar: camera.far, znear: camera.near }; } if ( camera.name ) { gltfCamera.name = camera.type; } outputJSON.cameras.push( gltfCamera ); return outputJSON.cameras.length - 1; } /** * Creates glTF animation entry from AnimationClip object. * * Status: * - Only properties listed in PATH_PROPERTIES may be animated. * - Only LINEAR and STEP interpolation currently supported. * * @param {THREE.AnimationClip} clip * @param {THREE.Object3D} root * @return {number} */ function processAnimation ( clip, root ) { if ( ! outputJSON.animations ) { outputJSON.animations = []; } var channels = []; var samplers = []; for ( var i = 0; i < clip.tracks.length; ++ i ) { var track = clip.tracks[ i ]; var trackBinding = THREE.PropertyBinding.parseTrackName( track.name ); var trackNode = THREE.PropertyBinding.findNode( root, trackBinding.nodeName ); var trackProperty = PATH_PROPERTIES[ trackBinding.propertyName ]; if ( trackBinding.objectName === 'bones' ) { if ( trackNode.isSkinnedMesh === true ) { trackNode = trackNode.skeleton.getBoneByName( trackBinding.objectIndex ); } else { trackNode = undefined; } } if ( ! trackNode || ! trackProperty ) { console.warn( 'THREE.GLTFExporter: Could not export animation track "%s".', track.name ); return null; } var inputItemSize = 1; var outputItemSize = track.values.length / track.times.length; if ( trackProperty === PATH_PROPERTIES.morphTargetInfluences ) { outputItemSize /= trackNode.morphTargetInfluences.length; } samplers.push( { input: processAccessor( new THREE.BufferAttribute( track.times, inputItemSize ) ), output: processAccessor( new THREE.BufferAttribute( track.values, outputItemSize ) ), interpolation: track.interpolation === THREE.InterpolateDiscrete ? 'STEP' : 'LINEAR' } ); channels.push( { sampler: samplers.length - 1, target: { node: nodeMap[ trackNode.uuid ], path: trackProperty } } ); } outputJSON.animations.push( { name: clip.name || 'clip_' + outputJSON.animations.length, samplers: samplers, channels: channels } ); return outputJSON.animations.length - 1; } function processSkin( object ) { var node = outputJSON.nodes[ nodeMap[ object.uuid ] ]; var skeleton = object.skeleton; var rootJoint = object.skeleton.bones[ 0 ]; if ( rootJoint === undefined ) return null; var joints = []; var inverseBindMatrices = new Float32Array( skeleton.bones.length * 16 ); for ( var i = 0; i < skeleton.bones.length; ++ i ) { joints.push( nodeMap[ skeleton.bones[ i ].uuid ] ); skeleton.boneInverses[ i ].toArray( inverseBindMatrices, i * 16 ); } if ( outputJSON.skins === undefined ) { outputJSON.skins = []; } outputJSON.skins.push( { inverseBindMatrices: processAccessor( new THREE.BufferAttribute( inverseBindMatrices, 16 ) ), joints: joints, skeleton: nodeMap[ rootJoint.uuid ] } ); var skinIndex = node.skin = outputJSON.skins.length - 1; return skinIndex; } /** * Process Object3D node * @param {THREE.Object3D} node Object3D to processNode * @return {Integer} Index of the node in the nodes list */ function processNode( object ) { if ( object instanceof THREE.Light ) { console.warn( 'GLTFExporter: Unsupported node type:', object.constructor.name ); return null; } if ( ! outputJSON.nodes ) { outputJSON.nodes = []; } var gltfNode = {}; if ( options.trs ) { var rotation = object.quaternion.toArray(); var position = object.position.toArray(); var scale = object.scale.toArray(); if ( ! equalArray( rotation, [ 0, 0, 0, 1 ] ) ) { gltfNode.rotation = rotation; } if ( ! equalArray( position, [ 0, 0, 0 ] ) ) { gltfNode.translation = position; } if ( ! equalArray( scale, [ 1, 1, 1 ] ) ) { gltfNode.scale = scale; } } else { object.updateMatrix(); if ( ! equalArray( object.matrix.elements, [ 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1 ] ) ) { gltfNode.matrix = object.matrix.elements; } } if ( object.name ) { gltfNode.name = object.name; } if ( object.userData && Object.keys( object.userData ).length > 0 ) { try { gltfNode.extras = JSON.parse( JSON.stringify( object.userData ) ); } catch ( e ) { throw new Error( 'THREE.GLTFExporter: userData can\'t be serialized' ); } } if ( object instanceof THREE.Mesh || object instanceof THREE.Line || object instanceof THREE.Points ) { gltfNode.mesh = processMesh( object ); } else if ( object instanceof THREE.Camera ) { gltfNode.camera = processCamera( object ); } if ( object instanceof THREE.SkinnedMesh ) { skins.push( object ); } if ( object.children.length > 0 ) { var children = []; for ( var i = 0, l = object.children.length; i < l; i ++ ) { var child = object.children[ i ]; if ( child.visible || options.onlyVisible === false ) { var node = processNode( child ); if ( node !== null ) { children.push( node ); } } } if ( children.length > 0 ) { gltfNode.children = children; } } outputJSON.nodes.push( gltfNode ); var nodeIndex = nodeMap[ object.uuid ] = outputJSON.nodes.length - 1; return nodeIndex; } /** * Process Scene * @param {THREE.Scene} node Scene to process */ function processScene( scene ) { if ( ! outputJSON.scenes ) { outputJSON.scenes = []; outputJSON.scene = 0; } var gltfScene = { nodes: [] }; if ( scene.name ) { gltfScene.name = scene.name; } outputJSON.scenes.push( gltfScene ); var nodes = []; for ( var i = 0, l = scene.children.length; i < l; i ++ ) { var child = scene.children[ i ]; if ( child.visible || options.onlyVisible === false ) { var node = processNode( child ); if ( node !== null ) { nodes.push( node ); } } } if ( nodes.length > 0 ) { gltfScene.nodes = nodes; } } /** * Creates a THREE.Scene to hold a list of objects and parse it * @param {Array} objects List of objects to process */ function processObjects( objects ) { var scene = new THREE.Scene(); scene.name = 'AuxScene'; for ( var i = 0; i < objects.length; i ++ ) { // We push directly to children instead of calling `add` to prevent // modify the .parent and break its original scene and hierarchy scene.children.push( objects[ i ] ); } processScene( scene ); } function processInput( input ) { input = input instanceof Array ? input : [ input ]; var objectsWithoutScene = []; for ( var i = 0; i < input.length; i ++ ) { if ( input[ i ] instanceof THREE.Scene ) { processScene( input[ i ] ); } else { objectsWithoutScene.push( input[ i ] ); } } if ( objectsWithoutScene.length > 0 ) { processObjects( objectsWithoutScene ); } for ( var i = 0; i < skins.length; ++ i ) { processSkin( skins[ i ] ); } for ( var i = 0; i < options.animations.length; ++ i ) { processAnimation( options.animations[ i ], input[ 0 ] ); } } processInput( input ); // Generate buffer // Create a new blob with all the dataviews from the buffers var blob = new Blob( dataViews, { type: 'application/octet-stream' } ); // Update the bytlength of the only main buffer and update the uri with the base64 representation of it if ( outputJSON.buffers && outputJSON.buffers.length > 0 ) { outputJSON.buffers[ 0 ].byteLength = blob.size; var reader = new window.FileReader(); if ( options.binary === true ) { // https://github.com/KhronosGroup/glTF/blob/master/specification/2.0/README.md#glb-file-format-specification var GLB_HEADER_BYTES = 12; var GLB_HEADER_MAGIC = 0x46546C67; var GLB_VERSION = 2; var GLB_CHUNK_PREFIX_BYTES = 8; var GLB_CHUNK_TYPE_JSON = 0x4E4F534A; var GLB_CHUNK_TYPE_BIN = 0x004E4942; reader.readAsArrayBuffer( blob ); reader.onloadend = function () { // Binary chunk. var binaryChunk = reader.result; var binaryChunkPrefix = new DataView( new ArrayBuffer( GLB_CHUNK_PREFIX_BYTES ) ); binaryChunkPrefix.setUint32( 0, binaryChunk.byteLength, true ); binaryChunkPrefix.setUint32( 4, GLB_CHUNK_TYPE_BIN, true ); // JSON chunk. delete outputJSON.buffers[ 0 ].uri; // Omitted URI indicates use of binary chunk. var jsonChunk = stringToArrayBuffer( JSON.stringify( outputJSON ) ); var jsonChunkPrefix = new DataView( new ArrayBuffer( GLB_CHUNK_PREFIX_BYTES ) ); jsonChunkPrefix.setUint32( 0, jsonChunk.byteLength, true ); jsonChunkPrefix.setUint32( 4, GLB_CHUNK_TYPE_JSON, true ); // GLB header. var header = new ArrayBuffer( GLB_HEADER_BYTES ); var headerView = new DataView( header ); headerView.setUint32( 0, GLB_HEADER_MAGIC, true ); headerView.setUint32( 4, GLB_VERSION, true ); var totalByteLength = GLB_HEADER_BYTES + jsonChunkPrefix.byteLength + jsonChunk.byteLength + binaryChunkPrefix.byteLength + binaryChunk.byteLength; headerView.setUint32( 8, totalByteLength, true ); var glbBlob = new Blob( [ header, jsonChunkPrefix, jsonChunk, binaryChunkPrefix, binaryChunk ], { type: 'application/octet-stream' } ); var glbReader = new window.FileReader(); glbReader.readAsArrayBuffer( glbBlob ); glbReader.onloadend = function () { onDone( glbReader.result ); }; }; } else { reader.readAsDataURL( blob ); reader.onloadend = function () { var base64data = reader.result; outputJSON.buffers[ 0 ].uri = base64data; onDone( outputJSON ); }; } } else { onDone( outputJSON ); } } };
mit
koreamic/gitlabhq
spec/requests/api/users_spec.rb
26936
require 'spec_helper' describe API::API, api: true do include ApiHelpers let(:user) { create(:user) } let(:admin) { create(:admin) } let(:key) { create(:key, user: user) } let(:email) { create(:email, user: user) } let(:omniauth_user) { create(:omniauth_user) } describe "GET /users" do context "when unauthenticated" do it "should return authentication error" do get api("/users") expect(response.status).to eq(401) end end context "when authenticated" do it "should return an array of users" do get api("/users", user) expect(response.status).to eq(200) expect(json_response).to be_an Array username = user.username expect(json_response.detect do |user| user['username'] == username end['username']).to eq(username) end it "should return one user" do get api("/users?username=#{omniauth_user.username}", user) expect(response.status).to eq(200) expect(json_response).to be_an Array expect(json_response.first['username']).to eq(omniauth_user.username) end end context "when admin" do it "should return an array of users" do get api("/users", admin) expect(response.status).to eq(200) expect(json_response).to be_an Array expect(json_response.first.keys).to include 'email' expect(json_response.first.keys).to include 'identities' expect(json_response.first.keys).to include 'can_create_project' expect(json_response.first.keys).to include 'two_factor_enabled' end end end describe "GET /users/:id" do it "should return a user by id" do get api("/users/#{user.id}", user) expect(response.status).to eq(200) expect(json_response['username']).to eq(user.username) end it "should return a 401 if unauthenticated" do get api("/users/9998") expect(response.status).to eq(401) end it "should return a 404 error if user id not found" do get api("/users/9999", user) expect(response.status).to eq(404) expect(json_response['message']).to eq('404 Not found') end it "should return a 404 if invalid ID" do get api("/users/1ASDF", user) expect(response.status).to eq(404) end end describe "POST /users" do before{ admin } it "should create user" do expect do post api("/users", admin), attributes_for(:user, projects_limit: 3) end.to change { User.count }.by(1) end it "should create user with correct attributes" do post api('/users', admin), attributes_for(:user, admin: true, can_create_group: true) expect(response.status).to eq(201) user_id = json_response['id'] new_user = User.find(user_id) expect(new_user).not_to eq(nil) expect(new_user.admin).to eq(true) expect(new_user.can_create_group).to eq(true) end it "should create non-admin user" do post api('/users', admin), attributes_for(:user, admin: false, can_create_group: false) expect(response.status).to eq(201) user_id = json_response['id'] new_user = User.find(user_id) expect(new_user).not_to eq(nil) expect(new_user.admin).to eq(false) expect(new_user.can_create_group).to eq(false) end it "should create non-admin users by default" do post api('/users', admin), attributes_for(:user) expect(response.status).to eq(201) user_id = json_response['id'] new_user = User.find(user_id) expect(new_user).not_to eq(nil) expect(new_user.admin).to eq(false) end it "should return 201 Created on success" do post api("/users", admin), attributes_for(:user, projects_limit: 3) expect(response.status).to eq(201) end it "should not create user with invalid email" do post api('/users', admin), email: 'invalid email', password: 'password', name: 'test' expect(response.status).to eq(400) end it 'should return 400 error if name not given' do post api('/users', admin), attributes_for(:user).except(:name) expect(response.status).to eq(400) end it 'should return 400 error if password not given' do post api('/users', admin), attributes_for(:user).except(:password) expect(response.status).to eq(400) end it 'should return 400 error if email not given' do post api('/users', admin), attributes_for(:user).except(:email) expect(response.status).to eq(400) end it 'should return 400 error if username not given' do post api('/users', admin), attributes_for(:user).except(:username) expect(response.status).to eq(400) end it 'should return 400 error if user does not validate' do post api('/users', admin), password: 'pass', email: 'test@example.com', username: 'test!', name: 'test', bio: 'g' * 256, projects_limit: -1 expect(response.status).to eq(400) expect(json_response['message']['password']). to eq(['is too short (minimum is 8 characters)']) expect(json_response['message']['bio']). to eq(['is too long (maximum is 255 characters)']) expect(json_response['message']['projects_limit']). to eq(['must be greater than or equal to 0']) expect(json_response['message']['username']). to eq([Gitlab::Regex.namespace_regex_message]) end it "shouldn't available for non admin users" do post api("/users", user), attributes_for(:user) expect(response.status).to eq(403) end context 'with existing user' do before do post api('/users', admin), email: 'test@example.com', password: 'password', username: 'test', name: 'foo' end it 'should return 409 conflict error if user with same email exists' do expect do post api('/users', admin), name: 'foo', email: 'test@example.com', password: 'password', username: 'foo' end.to change { User.count }.by(0) expect(response.status).to eq(409) expect(json_response['message']).to eq('Email has already been taken') end it 'should return 409 conflict error if same username exists' do expect do post api('/users', admin), name: 'foo', email: 'foo@example.com', password: 'password', username: 'test' end.to change { User.count }.by(0) expect(response.status).to eq(409) expect(json_response['message']).to eq('Username has already been taken') end end end describe "GET /users/sign_up" do it "should redirect to sign in page" do get "/users/sign_up" expect(response.status).to eq(302) expect(response).to redirect_to(new_user_session_path) end end describe "PUT /users/:id" do let!(:admin_user) { create(:admin) } before { admin } it "should update user with new bio" do put api("/users/#{user.id}", admin), { bio: 'new test bio' } expect(response.status).to eq(200) expect(json_response['bio']).to eq('new test bio') expect(user.reload.bio).to eq('new test bio') end it 'should update user with his own email' do put api("/users/#{user.id}", admin), email: user.email expect(response.status).to eq(200) expect(json_response['email']).to eq(user.email) expect(user.reload.email).to eq(user.email) end it 'should update user with his own username' do put api("/users/#{user.id}", admin), username: user.username expect(response.status).to eq(200) expect(json_response['username']).to eq(user.username) expect(user.reload.username).to eq(user.username) end it "should update user's existing identity" do put api("/users/#{omniauth_user.id}", admin), provider: 'ldapmain', extern_uid: '654321' expect(response.status).to eq(200) expect(omniauth_user.reload.identities.first.extern_uid).to eq('654321') end it 'should update user with new identity' do put api("/users/#{user.id}", admin), provider: 'github', extern_uid: '67890' expect(response.status).to eq(200) expect(user.reload.identities.first.extern_uid).to eq('67890') expect(user.reload.identities.first.provider).to eq('github') end it "should update admin status" do put api("/users/#{user.id}", admin), { admin: true } expect(response.status).to eq(200) expect(json_response['is_admin']).to eq(true) expect(user.reload.admin).to eq(true) end it "should not update admin status" do put api("/users/#{admin_user.id}", admin), { can_create_group: false } expect(response.status).to eq(200) expect(json_response['is_admin']).to eq(true) expect(admin_user.reload.admin).to eq(true) expect(admin_user.can_create_group).to eq(false) end it "should not allow invalid update" do put api("/users/#{user.id}", admin), { email: 'invalid email' } expect(response.status).to eq(400) expect(user.reload.email).not_to eq('invalid email') end it "shouldn't available for non admin users" do put api("/users/#{user.id}", user), attributes_for(:user) expect(response.status).to eq(403) end it "should return 404 for non-existing user" do put api("/users/999999", admin), { bio: 'update should fail' } expect(response.status).to eq(404) expect(json_response['message']).to eq('404 Not found') end it "should raise error for invalid ID" do expect{put api("/users/ASDF", admin) }.to raise_error(ActionController::RoutingError) end it 'should return 400 error if user does not validate' do put api("/users/#{user.id}", admin), password: 'pass', email: 'test@example.com', username: 'test!', name: 'test', bio: 'g' * 256, projects_limit: -1 expect(response.status).to eq(400) expect(json_response['message']['password']). to eq(['is too short (minimum is 8 characters)']) expect(json_response['message']['bio']). to eq(['is too long (maximum is 255 characters)']) expect(json_response['message']['projects_limit']). to eq(['must be greater than or equal to 0']) expect(json_response['message']['username']). to eq([Gitlab::Regex.namespace_regex_message]) end context "with existing user" do before do post api("/users", admin), { email: 'test@example.com', password: 'password', username: 'test', name: 'test' } post api("/users", admin), { email: 'foo@bar.com', password: 'password', username: 'john', name: 'john' } @user = User.all.last end it 'should return 409 conflict error if email address exists' do put api("/users/#{@user.id}", admin), email: 'test@example.com' expect(response.status).to eq(409) expect(@user.reload.email).to eq(@user.email) end it 'should return 409 conflict error if username taken' do @user_id = User.all.last.id put api("/users/#{@user.id}", admin), username: 'test' expect(response.status).to eq(409) expect(@user.reload.username).to eq(@user.username) end end end describe "POST /users/:id/keys" do before { admin } it "should not create invalid ssh key" do post api("/users/#{user.id}/keys", admin), { title: "invalid key" } expect(response.status).to eq(400) expect(json_response['message']).to eq('400 (Bad request) "key" not given') end it 'should not create key without title' do post api("/users/#{user.id}/keys", admin), key: 'some key' expect(response.status).to eq(400) expect(json_response['message']).to eq('400 (Bad request) "title" not given') end it "should create ssh key" do key_attrs = attributes_for :key expect do post api("/users/#{user.id}/keys", admin), key_attrs end.to change{ user.keys.count }.by(1) end it "should return 405 for invalid ID" do post api("/users/ASDF/keys", admin) expect(response.status).to eq(405) end end describe 'GET /user/:uid/keys' do before { admin } context 'when unauthenticated' do it 'should return authentication error' do get api("/users/#{user.id}/keys") expect(response.status).to eq(401) end end context 'when authenticated' do it 'should return 404 for non-existing user' do get api('/users/999999/keys', admin) expect(response.status).to eq(404) expect(json_response['message']).to eq('404 User Not Found') end it 'should return array of ssh keys' do user.keys << key user.save get api("/users/#{user.id}/keys", admin) expect(response.status).to eq(200) expect(json_response).to be_an Array expect(json_response.first['title']).to eq(key.title) end it "should return 405 for invalid ID" do get api("/users/ASDF/keys", admin) expect(response.status).to eq(405) end end end describe 'DELETE /user/:uid/keys/:id' do before { admin } context 'when unauthenticated' do it 'should return authentication error' do delete api("/users/#{user.id}/keys/42") expect(response.status).to eq(401) end end context 'when authenticated' do it 'should delete existing key' do user.keys << key user.save expect do delete api("/users/#{user.id}/keys/#{key.id}", admin) end.to change { user.keys.count }.by(-1) expect(response.status).to eq(200) end it 'should return 404 error if user not found' do user.keys << key user.save delete api("/users/999999/keys/#{key.id}", admin) expect(response.status).to eq(404) expect(json_response['message']).to eq('404 User Not Found') end it 'should return 404 error if key not foud' do delete api("/users/#{user.id}/keys/42", admin) expect(response.status).to eq(404) expect(json_response['message']).to eq('404 Key Not Found') end end end describe "POST /users/:id/emails" do before { admin } it "should not create invalid email" do post api("/users/#{user.id}/emails", admin), {} expect(response.status).to eq(400) expect(json_response['message']).to eq('400 (Bad request) "email" not given') end it "should create email" do email_attrs = attributes_for :email expect do post api("/users/#{user.id}/emails", admin), email_attrs end.to change{ user.emails.count }.by(1) end it "should raise error for invalid ID" do post api("/users/ASDF/emails", admin) expect(response.status).to eq(405) end end describe 'GET /user/:uid/emails' do before { admin } context 'when unauthenticated' do it 'should return authentication error' do get api("/users/#{user.id}/emails") expect(response.status).to eq(401) end end context 'when authenticated' do it 'should return 404 for non-existing user' do get api('/users/999999/emails', admin) expect(response.status).to eq(404) expect(json_response['message']).to eq('404 User Not Found') end it 'should return array of emails' do user.emails << email user.save get api("/users/#{user.id}/emails", admin) expect(response.status).to eq(200) expect(json_response).to be_an Array expect(json_response.first['email']).to eq(email.email) end it "should raise error for invalid ID" do put api("/users/ASDF/emails", admin) expect(response.status).to eq(405) end end end describe 'DELETE /user/:uid/emails/:id' do before { admin } context 'when unauthenticated' do it 'should return authentication error' do delete api("/users/#{user.id}/emails/42") expect(response.status).to eq(401) end end context 'when authenticated' do it 'should delete existing email' do user.emails << email user.save expect do delete api("/users/#{user.id}/emails/#{email.id}", admin) end.to change { user.emails.count }.by(-1) expect(response.status).to eq(200) end it 'should return 404 error if user not found' do user.emails << email user.save delete api("/users/999999/emails/#{email.id}", admin) expect(response.status).to eq(404) expect(json_response['message']).to eq('404 User Not Found') end it 'should return 404 error if email not foud' do delete api("/users/#{user.id}/emails/42", admin) expect(response.status).to eq(404) expect(json_response['message']).to eq('404 Email Not Found') end it "should raise error for invalid ID" do expect{delete api("/users/ASDF/emails/bar", admin) }.to raise_error(ActionController::RoutingError) end end end describe "DELETE /users/:id" do before { admin } it "should delete user" do delete api("/users/#{user.id}", admin) expect(response.status).to eq(200) expect { User.find(user.id) }.to raise_error ActiveRecord::RecordNotFound expect(json_response['email']).to eq(user.email) end it "should not delete for unauthenticated user" do delete api("/users/#{user.id}") expect(response.status).to eq(401) end it "shouldn't available for non admin users" do delete api("/users/#{user.id}", user) expect(response.status).to eq(403) end it "should return 404 for non-existing user" do delete api("/users/999999", admin) expect(response.status).to eq(404) expect(json_response['message']).to eq('404 User Not Found') end it "should raise error for invalid ID" do expect{delete api("/users/ASDF", admin) }.to raise_error(ActionController::RoutingError) end end describe "GET /user" do it "should return current user" do get api("/user", user) expect(response.status).to eq(200) expect(json_response['email']).to eq(user.email) expect(json_response['is_admin']).to eq(user.is_admin?) expect(json_response['can_create_project']).to eq(user.can_create_project?) expect(json_response['can_create_group']).to eq(user.can_create_group?) expect(json_response['projects_limit']).to eq(user.projects_limit) end it "should return 401 error if user is unauthenticated" do get api("/user") expect(response.status).to eq(401) end end describe "GET /user/keys" do context "when unauthenticated" do it "should return authentication error" do get api("/user/keys") expect(response.status).to eq(401) end end context "when authenticated" do it "should return array of ssh keys" do user.keys << key user.save get api("/user/keys", user) expect(response.status).to eq(200) expect(json_response).to be_an Array expect(json_response.first["title"]).to eq(key.title) end end end describe "GET /user/keys/:id" do it "should return single key" do user.keys << key user.save get api("/user/keys/#{key.id}", user) expect(response.status).to eq(200) expect(json_response["title"]).to eq(key.title) end it "should return 404 Not Found within invalid ID" do get api("/user/keys/42", user) expect(response.status).to eq(404) expect(json_response['message']).to eq('404 Not found') end it "should return 404 error if admin accesses user's ssh key" do user.keys << key user.save admin get api("/user/keys/#{key.id}", admin) expect(response.status).to eq(404) expect(json_response['message']).to eq('404 Not found') end it "should return 404 for invalid ID" do get api("/users/keys/ASDF", admin) expect(response.status).to eq(404) end end describe "POST /user/keys" do it "should create ssh key" do key_attrs = attributes_for :key expect do post api("/user/keys", user), key_attrs end.to change{ user.keys.count }.by(1) expect(response.status).to eq(201) end it "should return a 401 error if unauthorized" do post api("/user/keys"), title: 'some title', key: 'some key' expect(response.status).to eq(401) end it "should not create ssh key without key" do post api("/user/keys", user), title: 'title' expect(response.status).to eq(400) expect(json_response['message']).to eq('400 (Bad request) "key" not given') end it 'should not create ssh key without title' do post api('/user/keys', user), key: 'some key' expect(response.status).to eq(400) expect(json_response['message']).to eq('400 (Bad request) "title" not given') end it "should not create ssh key without title" do post api("/user/keys", user), key: "somekey" expect(response.status).to eq(400) end end describe "DELETE /user/keys/:id" do it "should delete existed key" do user.keys << key user.save expect do delete api("/user/keys/#{key.id}", user) end.to change{user.keys.count}.by(-1) expect(response.status).to eq(200) end it "should return success if key ID not found" do delete api("/user/keys/42", user) expect(response.status).to eq(200) end it "should return 401 error if unauthorized" do user.keys << key user.save delete api("/user/keys/#{key.id}") expect(response.status).to eq(401) end it "should raise error for invalid ID" do expect{delete api("/users/keys/ASDF", admin) }.to raise_error(ActionController::RoutingError) end end describe "GET /user/emails" do context "when unauthenticated" do it "should return authentication error" do get api("/user/emails") expect(response.status).to eq(401) end end context "when authenticated" do it "should return array of emails" do user.emails << email user.save get api("/user/emails", user) expect(response.status).to eq(200) expect(json_response).to be_an Array expect(json_response.first["email"]).to eq(email.email) end end end describe "GET /user/emails/:id" do it "should return single email" do user.emails << email user.save get api("/user/emails/#{email.id}", user) expect(response.status).to eq(200) expect(json_response["email"]).to eq(email.email) end it "should return 404 Not Found within invalid ID" do get api("/user/emails/42", user) expect(response.status).to eq(404) expect(json_response['message']).to eq('404 Not found') end it "should return 404 error if admin accesses user's email" do user.emails << email user.save admin get api("/user/emails/#{email.id}", admin) expect(response.status).to eq(404) expect(json_response['message']).to eq('404 Not found') end it "should return 404 for invalid ID" do get api("/users/emails/ASDF", admin) expect(response.status).to eq(404) end end describe "POST /user/emails" do it "should create email" do email_attrs = attributes_for :email expect do post api("/user/emails", user), email_attrs end.to change{ user.emails.count }.by(1) expect(response.status).to eq(201) end it "should return a 401 error if unauthorized" do post api("/user/emails"), email: 'some email' expect(response.status).to eq(401) end it "should not create email with invalid email" do post api("/user/emails", user), {} expect(response.status).to eq(400) expect(json_response['message']).to eq('400 (Bad request) "email" not given') end end describe "DELETE /user/emails/:id" do it "should delete existed email" do user.emails << email user.save expect do delete api("/user/emails/#{email.id}", user) end.to change{user.emails.count}.by(-1) expect(response.status).to eq(200) end it "should return success if email ID not found" do delete api("/user/emails/42", user) expect(response.status).to eq(200) end it "should return 401 error if unauthorized" do user.emails << email user.save delete api("/user/emails/#{email.id}") expect(response.status).to eq(401) end it "should raise error for invalid ID" do expect{delete api("/users/emails/ASDF", admin) }.to raise_error(ActionController::RoutingError) end end describe 'PUT /user/:id/block' do before { admin } it 'should block existing user' do put api("/users/#{user.id}/block", admin) expect(response.status).to eq(200) expect(user.reload.state).to eq('blocked') end it 'should not be available for non admin users' do put api("/users/#{user.id}/block", user) expect(response.status).to eq(403) expect(user.reload.state).to eq('active') end it 'should return a 404 error if user id not found' do put api('/users/9999/block', admin) expect(response.status).to eq(404) expect(json_response['message']).to eq('404 User Not Found') end end describe 'PUT /user/:id/unblock' do before { admin } it 'should unblock existing user' do put api("/users/#{user.id}/unblock", admin) expect(response.status).to eq(200) expect(user.reload.state).to eq('active') end it 'should unblock a blocked user' do put api("/users/#{user.id}/block", admin) expect(response.status).to eq(200) expect(user.reload.state).to eq('blocked') put api("/users/#{user.id}/unblock", admin) expect(response.status).to eq(200) expect(user.reload.state).to eq('active') end it 'should not be available for non admin users' do put api("/users/#{user.id}/unblock", user) expect(response.status).to eq(403) expect(user.reload.state).to eq('active') end it 'should return a 404 error if user id not found' do put api('/users/9999/block', admin) expect(response.status).to eq(404) expect(json_response['message']).to eq('404 User Not Found') end it "should raise error for invalid ID" do expect{put api("/users/ASDF/block", admin) }.to raise_error(ActionController::RoutingError) end end end
mit
pauldubois777/VendingMachineKataA2
src/app/app.component.spec.ts
1766
/* tslint:disable:no-unused-variable */ import { TestBed, async } from '@angular/core/testing'; // Services import { CoinReturnService } from './services/coin-return/coin-return.service'; import { MessageService } from './services/message/message.service'; import { InitialInventory } from './services/inventory/initial-inventory'; import { InventoryService } from './services/inventory/inventory.service'; import { InitialBankCoins } from './services/bank/initial-bank-coins'; import { BankService } from './services/bank/bank.service'; import { InsertedCoinsService } from './services/inserted-coins/inserted-coins.service'; import { PurchaseService } from './services/purchase/purchase.service'; // Components import { AppComponent } from './app.component'; import { CoinSlotComponent } from './components/coin-slot/coin-slot.component'; import { CoinComponent } from './components/coin/coin.component'; import { MessageDisplayComponent } from './components/message-display/message-display.component'; describe('App: VendingMachineKataA2', () => { // beforeEach(() => { // TestBed.configureTestingModule({ // declarations: [ // AppComponent, // CoinSlotComponent, // CoinComponent, // MessageDisplayComponent // ], // providers: [ // CoinReturnService, // MessageService, // InitialInventory, // InventoryService, // BankService, // InitialBankCoins, // InsertedCoinsService, // PurchaseService // ] // }); // }); // it('should create the app', async(() => { // let fixture = TestBed.createComponent(AppComponent); // let app = fixture.debugElement.componentInstance; // expect(app).toBeTruthy(); // })); });
mit
fgborja/CivisAnalysis
routes/camara.js
6772
/* * GET camara resources and save to DB * NOTE: this functions will make a GET to camara.gov and update the mongo db. For regular queries use camaramongo.js */ exports.obterDeputados = function(requestify,xml2js,db){ return function(req, res){ requestify.post('http://www.camara.leg.br/SitCamaraWS/Deputados.asmx/ObterDeputados?').then(function(response) { xml2js.parseString(response.body, function(err,json){ db.collection('obterDeputados').insert(json, function(err, result){ res.json( (err === null) ? json : { msg: err } ); }); }) }); }; }; // Get the list of all 'articles' voted in plenary (representatives chamber = camara dos deputados) exports.listarProposicoesVotadasEmPlenario = function(requestify,xml2js,db){ return function(req, res){ var ano = req.params.ano; // get the list of roll calls in the year(==ano) // GET from camara the response requestify.get('http://www.camara.leg.br/SitCamaraWS/Proposicoes.asmx/ListarProposicoesVotadasEmPlenario?ano='+ano+'&tipo=').then(function(response) { // parse the recieved xml to JSON xml2js.parseString(response.body, function(err,json){ // update/insert the collection: if there is already one list with {ano} then update {json} else insert {ano,json}. db.collection('listarProposicoesVotadasEmPlenario').update({ano:ano},{ano:ano,data:json},{upsert:true}, function(err, result){ res.json( (err === null) ? {ano:ano, data:json} : { msg: err } ); }); }) }); }; }; // // INSERT in the new entries => datetime = new Date(year, month, day, hours, minutes, seconds, milliseconds); //http://www.camara.leg.br/SitCamaraWS/Proposicoes.asmx/ObterVotacaoProposicao?tipo=PL&numero=1992&ano=2007 // exports.obterVotacaoProposicao = function(requestify,xml2js,db){ return function(req, res){ var ano = req.params.ano; var tipo = req.params.tipo; var numero = req.params.numero; requestify.get('http://www.camara.leg.br/SitCamaraWS/Proposicoes.asmx/ObterVotacaoProposicao?tipo='+tipo+'&numero='+numero+'&ano='+ano).then(function(response) { xml2js.parseString(response.body, function(err,json){ // fix and add variables json = fixFormatObterVotacaoProposicao(json); // add the datetimeRollCallsMotion entry reference to the motion for (var i = 0; i < json.proposicao.Votacoes.Votacao.length; i++) { db.collection('datetimeRollCallsMotion') .update({'datetime':json.proposicao.Votacoes.Votacao[i].datetime,'tipo':tipo,'numero':numero,'ano':ano}, //query {'datetime':json.proposicao.Votacoes.Votacao[i].datetime,'tipo':tipo,'numero':numero,'ano':ano}, //insert/update {upsert:true}, // param function(err, result){ if(err != null){console.log(err)} } // callback ); }; // add to the collection of motionRollCalls and return the json; db.collection('obterVotacaoProposicao') .update({'proposicao.Sigla':tipo,'proposicao.Numero':numero,'proposicao.Ano':ano}, //query json, //insert/update {upsert:true}, // param function(err, result){ res.json( (err === null) ? json : { msg: err } ) } // callback ); }) }) // requestify }; }; exports.obterProposicao = function(requestify,xml2js,db){ return function(req, res){ var ano = req.params.ano; var tipo = req.params.tipo; var numero = req.params.numero; requestify.get('http://www.camara.leg.br/SitCamaraWS/Proposicoes.asmx/ObterProposicao?tipo='+tipo+'&numero='+numero+'&ano='+ano).then(function(response) { xml2js.parseString(response.body, function(err,json){ //FIX the proposicao.tipo => sometimes with whitespaces++ //console.log(json.proposicao.tipo); json.proposicao.tipo = json.proposicao.tipo.trim(); db.collection('obterProposicao') .update({'proposicao.tipo':tipo,'proposicao.numero':numero,'proposicao.ano':ano}, json,{upsert:true}, function(err, result){ res.json( (err === null) ? json : { msg: err } ); }); }) }); }; }; exports.obterDetalhesDeputado = function(requestify,xml2js,db){ return function(req, res){ var ideCadastro = req.params.ideCadastro; requestify.get('http://www.camara.leg.br/SitCamaraWS/Deputados.asmx/ObterDetalhesDeputado?ideCadastro='+ideCadastro+'&numLegislatura=').then(function(response) { xml2js.parseString(response.body, function(err,json){ json.ideCadastro = ideCadastro; db.collection('obterDetalhesDeputado') .update({ideCadastro:ideCadastro}, json,{upsert:true}, function(err, result){ res.json( (err === null) ? json : { msg: err } ); }); }) }); }; }; function fixFormatObterVotacaoProposicao(json){ //FIX the proposicao.tipo => sometimes with whitespaces++ json.proposicao.Sigla = json.proposicao.Sigla.trim(); // fix the object/array to array if(!isArray(json.proposicao.Votacoes.Votacao)){ json.proposicao.Votacoes.Votacao = [ json.proposicao.Votacoes.Votacao ]; } // ADD datetime Date() for (var i = 0; i < json.proposicao.Votacoes.Votacao.length; i++) { var day_month_year = json.proposicao.Votacoes.Votacao[i].Data.match(/\d+/g); var hour_minutes = json.proposicao.Votacoes.Votacao[i].Hora.match(/\d+/g); json.proposicao.Votacoes.Votacao[i].datetime = new Date(day_month_year[2], day_month_year[1]-1, day_month_year[0], hour_minutes[0], hour_minutes[1], 0, 0); }; return json; } // function to check size of properties of an object Object.size = function(obj) { var size = 0, key; for (key in obj) { if (obj.hasOwnProperty(key)) size++; } return size; }; // check is the Object is an Arrayroposicoes2012 function isArray(obj) { return Object.prototype.toString.call(obj) === '[object Array]'; }
mit
a-ignatov-parc/Marrow
static/js/core/core.observatory.js
9371
// version: 1.1.4 // ------------- // // Объект реализующий паттерн _pub/sub_ с полной поддержкой событий _jQuery_ но работающий на много // быстрее (http://jsperf.com/custom-pub-sub-test/3) // // __Пример:__ // // 1. Выстреливает глобальное событие `event_name` и передает в обработчик два аргумента; // // core.observatory.trigger('event_name', [argument1, argument2]); // // 1. Создание обработчика на глобальное событие `event_name`, который принимает в качестве // аргументов объект _jQuery_ события и два аргумента переданные при выстреливании события // // core.observatory.on('event_name', function(event, argument1, argument2) { // code here // }); // 1. Создание одного обработчика на несколько глобальных событий `event_name1` и `event_name2`. // // core.observatory.on('event_name1 event_name2', function(event) { // code here // }); // // 1. Создание обработчика на глобальное событие `event_name`, который принимает в качестве // аргументов объект _jQuery_ события и два аргумента переданные при выстреливании события, // а так же обработчик вызывается с указаным контекстом (передается третьим аргументом) // // core.observatory.on('event_name', function(event, argument1, argument2) { // code here // }, this); // // 1. Отписываемся от события // // core.observatory.off('event_name'); // // __Набор тестов на производительность:__ // // 1. http://jsperf.com/marrow-observatory/4 - Самый суровый кейс. // Каждый цикл навешивание событий -> Выстреливание -> Удаление события -> Выстреливание. // // 1. http://jsperf.com/marrow-observatory-vs-jquery-trigger-events/4 - Самый простой кейс. // Навешивание событий и в каждом цыкле только выстреливание. // // 1. http://jsperf.com/marrow-observatory-vs-jquery-bind-events-create-test - Тест на скорость // создания обсерватории и работы с событиями. // // __История версий:__ // // * `1.1.4` - Оптимизация методов для более быстрой работы. // // * `1.1.3` - Исправлена ошибка с подменной контекста у одинаковых обработчиков разных инстансов // одного и того же класса. // // * `1.1.2` - Вызов обработчика событий обернут в `try..catch` на случай если внутри обработчика // произойдет ошибка. В противном случае ошибка внутри обработчика может привести к обрыву цепочки // вызова обработчиков. // // * `1.1.1` - Исправлена ошибка когда метод `off` не учитывал переданный обработчик и контекст. // // * `1.1.0` - Полностью переписаны все методы для ускорения производительности и исправления // ошибок. window.WebApp || (window.WebApp = {}); window.WebApp.Observatory = function() { var regex = /\s+/, contexts = [], handlers = [], eventMap = {}, mutedNamesMap = {}, mutedHandlerMap = {}; this.on = function(eventName, handler, context) { var handlerId = handlers.length, name, event, events, namespace, delimiterIndex; if (!eventName || !handler || typeof(handler) !== 'function') { console.error('No event name or handler! Skipping...'); return this; } if (eventName.indexOf(' ') >= 0) { events = eventName.split(regex); } else { events = [eventName]; } // Добавляем обработчик в коллекцию обработчиков handlers.push(handler); // Добавляем контекст в коллекцию контекстов contexts.push(context); for (;events.length;) { name = null; namespace = null; event = events.shift(); delimiterIndex = event.lastIndexOf('.'); if (delimiterIndex >= 0) { name = event.substring(0, delimiterIndex); namespace = event.substring(delimiterIndex); } else { name = event; } if (name && namespace) { if (!eventMap[event]) { eventMap[event] = []; } eventMap[event].push(handlerId); } if (namespace) { if (!eventMap[namespace]) { eventMap[namespace] = []; } eventMap[namespace].push(handlerId); } if (name) { if (!eventMap[name]) { eventMap[name] = []; } eventMap[name].push(handlerId); } // Проверяем небыло ли "заглушено" событие. // Если да, то делаем заглушение текущего обработчика. if (mutedNamesMap[event] || mutedNamesMap[name] || mutedNamesMap[namespace]) { mutedHandlerMap[handlerId] = handlers[handlerId]; handlers[handlerId] = null; } } return this; }; this.off = function(eventName, handler, context) { var name, event, events, handlerId; if (eventName.indexOf(' ') >= 0) { events = eventName.split(regex); } else { events = [eventName]; } for (;events.length;) { name = events.shift(); event = eventMap[name]; if (event) { for (var i = 0, length = event.length; i < length; i++) { handlerId = event[i]; if (handlers[handlerId]) { if (!handler || handler == handlers[handlerId]) { if (!context || contexts[handlerId] == context) { // Удаляем обработчик и контекст handlers[handlerId] = contexts[handlerId] = null; } } } } } } return this; }; this.trigger = function(eventName, params) { var events, handler, handlerId; if (!eventName) { console.error('No event name! Skipping...'); return this; } if (eventName.indexOf(' ') >= 0) { eventName = eventName.replace(' ', ''); } events = eventMap[eventName]; if (events) { for (var i = 0, length = events.length; i < length; i++) { handlerId = events[i]; handler = handlers[handlerId]; if (handler) { // Пытаемся вызвать обработчик событий с подготовленными аргументами try { // Проверяем если аргумент `params` это массив, то выполняем более сложную и медленную // операцию вызова обработчика события. // Если же `params` не определен или не является массивом, то выполняем быстрый вызов // обработчика. if (typeof(params) === 'object' && typeof(params.concat) === 'function' && typeof(params.length) === 'number' && params.length) { handler.apply(contexts[handlerId], [eventName].concat(params)); } else { handler.call(contexts[handlerId], eventName, params); } } catch(e) { // Если вызов прошел не успешно выдаем в консоль ошибку и продолжаем дальше console.error(e); } } } } return this; }; this.mute = function(eventName) { var name, event, events, handlerId; if (!eventName) { console.error('No event name! Skipping...'); return this; } if (eventName.indexOf(' ') >= 0) { events = eventName.split(regex); } else { events = [eventName]; } for (;events.length;) { name = events.shift(); if (!mutedNamesMap[name]) { mutedNamesMap[name] = 1; event = eventMap[name]; if (event) { for (var i = 0, length = event.length; i < length; i++) { handlerId = event[i]; if (!mutedHandlerMap[handlerId]) { mutedHandlerMap[handlerId] = handlers[handlerId]; handlers[handlerId] = null; } } } } } return this; }; this.unmute = function(eventName) { var name, event, events, handlerId; if (!eventName) { console.error('No event name! Skipping...'); return this; } if (eventName.indexOf(' ') >= 0) { events = eventName.split(regex); } else { events = [eventName]; } for (;events.length;) { name = events.shift(); event = eventMap[name]; if (event) { for (var i = 0, length = event.length; i < length; i++) { handlerId = event[i]; if (mutedHandlerMap[handlerId]) { handlers[handlerId] = mutedHandlerMap[handlerId]; delete mutedHandlerMap[handlerId]; } } } } return this; }; };
mit
tonysneed/GK.Patterns.160718
Review/DecoratorReview/DecoratorReview/Properties/AssemblyInfo.cs
1406
using System.Reflection; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; // General Information about an assembly is controlled through the following // set of attributes. Change these attribute values to modify the information // associated with an assembly. [assembly: AssemblyTitle("DecoratorReview")] [assembly: AssemblyDescription("")] [assembly: AssemblyConfiguration("")] [assembly: AssemblyCompany("")] [assembly: AssemblyProduct("DecoratorReview")] [assembly: AssemblyCopyright("Copyright © 2016")] [assembly: AssemblyTrademark("")] [assembly: AssemblyCulture("")] // Setting ComVisible to false makes the types in this assembly not visible // to COM components. If you need to access a type in this assembly from // COM, set the ComVisible attribute to true on that type. [assembly: ComVisible(false)] // The following GUID is for the ID of the typelib if this project is exposed to COM [assembly: Guid("01d6e5a3-8771-4b30-b31f-20cd7df4ccc7")] // Version information for an assembly consists of the following four values: // // Major Version // Minor Version // Build Number // Revision // // You can specify all the values or you can default the Build and Revision Numbers // by using the '*' as shown below: // [assembly: AssemblyVersion("1.0.*")] [assembly: AssemblyVersion("1.0.0.0")] [assembly: AssemblyFileVersion("1.0.0.0")]
mit
Jingru/CLRS
C15-Dynamic-Programming/rodcutting.cpp
1490
using namespace std; #include <iostream> #include <stdio.h> #include <algorithm> // this is the most basic recursive version implementation int cut_rod(int p[] , int n) { int q = -999; if (n == 0) { return 0; } for (int i=0; i<n; i++){ int res=p[i] + cut_rod(p,n - (i+1) ); q= max(q,res); } return q; } // memoized version of cut_rod int memoized_cut_rod (int p[] , int n) { // create an array table to store memoized values int temp[100]={}; int q=-1; if ( n== 0) { return 0; } if (temp[n]>0) { return temp[n]; } else { for (int i=0; i<n; i++) { int res=p[i] + cut_rod(p,n - (i+1) ); q= max(q,res); } return q; } } // cut_rod using bottom up approach int r[100]= {}; int bottom_cut_rod (int p[],int n) { r[0]=0; for (int j=0; j<n; j++) { int q=-5; for(int i=1; i-1<=j; i++) { // cout << "i: " << i << "j : " << j << "\n"; q=max(q,p[i - 1] + r[j - i + 1]); cout << q << "\n"; } r[j+1]=q; r[1]=0; cout << "r : " << r[j] << "\n" ; } return r[n]; } int main() { int arr[]={1,5,8,9,10,17,17,20,24,30,31,32,33,34,35,36,37,38}; // cout << cut_rod(arr,18) ; // cout << memoized_cut_rod(arr,10); cout << bottom_cut_rod(arr,9); return 0; }
mit
mattleib/o365api-simple-code-flow
SimpleWebAppCodeFlow/SimpleWebAppCodeFlow/Controllers/HomeController.cs
15385
//Copyright (c) Microsoft. All rights reserved. Licensed under the MIT license. See full license at the bottom of this file. // using Microsoft.IdentityModel.Clients.ActiveDirectory; using System; using System.Collections.Generic; using System.Linq; using System.Security.Cryptography.X509Certificates; using System.Web; using System.Web.Mvc; using System.Reflection; using System.Threading.Tasks; using System.Text.RegularExpressions; using Newtonsoft.Json; using Newtonsoft.Json.Linq; using System.Net.Http; using System.Text; using System.Globalization; using System.Security.Cryptography; using System.Net.Http.Headers; using System.Security.Claims; using SimpleWebAppCodeFlow.App_Classes; using SimpleWebAppCodeFlow.Models; namespace SimpleWebAppCodeFlow.Controllers { // From: Jason Johnston@https://github.com/jasonjoh/office365-azure-guides/blob/master/code/parse-token.cs static class Base64UrlEncoder { static char Base64PadCharacter = '='; static string DoubleBase64PadCharacter = String.Format(CultureInfo.InvariantCulture, "{0}{0}", Base64PadCharacter); static char Base64Character62 = '+'; static char Base64Character63 = '/'; static char Base64UrlCharacter62 = '-'; static char Base64UrlCharacter63 = '_'; public static byte[] DecodeBytes(string arg) { string s = arg; s = s.Replace(Base64UrlCharacter62, Base64Character62); // 62nd char of encoding s = s.Replace(Base64UrlCharacter63, Base64Character63); // 63rd char of encoding switch (s.Length % 4) // Pad { case 0: break; // No pad chars in this case case 2: s += DoubleBase64PadCharacter; break; // Two pad chars case 3: s += Base64PadCharacter; break; // One pad char default: throw new ArgumentException("Illegal base64url string!", arg); } return Convert.FromBase64String(s); // Standard base64 decoder } public static string Decode(string arg) { return Encoding.UTF8.GetString(DecodeBytes(arg)); } } public class HomeController : Controller { private static AppConfig appConfig = new AppConfig(); [AttributeUsage(AttributeTargets.Method, AllowMultiple = false, Inherited = true)] public class MultipleButtonAttribute : ActionNameSelectorAttribute { public string Name { get; set; } public string Argument { get; set; } public override bool IsValidName(ControllerContext controllerContext, string actionName, MethodInfo methodInfo) { var isValidName = false; var keyValue = string.Format("{0}:{1}", Name, Argument); var value = controllerContext.Controller.ValueProvider.GetValue(keyValue); if (value != null) { controllerContext.Controller.ControllerContext.RouteData.Values[Name] = Argument; isValidName = true; } return isValidName; } } // // GET: /Home/ public async Task<ActionResult> Index() { // Force SSL if (!Request.IsSecureConnection) { string httplength = "http"; string nonsecureurl = Request.Url.AbsoluteUri.Substring(httplength.Length); string secureurl = String.Format("https{0}", nonsecureurl); RedirectResult result = Redirect(secureurl); result.ExecuteResult(this.ControllerContext); } // This is where state of the app is maintained and data passed between view and controller AppState appState = new AppState(); // Authorization back from AAD in a form post as requested in the authorize request if (!Request.Form.HasKeys()) { return View(appState); } // Cool we have a form post // Did it return with an error? if (!String.IsNullOrEmpty(Request.Form["error"])) { appState.ErrorMessage = Request.Form["error"]; return View(appState); } // Was it correlated with authorize request var authstate = Session[AppSessionVariables.AuthState] as String; Session[AppSessionVariables.AuthState] = null; if (String.IsNullOrEmpty(authstate)) { appState.ErrorMessage = "Oops. Something went wrong with the authorization state (No auth state). Please retry."; return View(appState); } if (!Request.Form["state"].Equals(authstate)) { appState.ErrorMessage = "Oops. Something went wrong with the authorization state (Invalid auth state). Please retry."; return View(appState); } // Authorized without error: Check to see if we have an ID token and code if (String.IsNullOrEmpty(Request.Form["id_token"]) || String.IsNullOrEmpty(Request.Form["code"])) { return View(appState); } try { // Get the TenantId out of the ID Token to address tenant specific token endpoint. // No validation of ID Token as the only info we need is the tenantID // If for any case your app wants to use the ID Token to authenticate // it must be validated. JwtToken openIDToken = GetTenantId(Request.Form["id_token"]); appState.TenantId = openIDToken.tid; appState.TenantDomain = openIDToken.domain; appState.LoggedOnUser = openIDToken.upn; // Get an app-only access token for the AAD Graph Rest APIs var accessToken = await GetAccessTokenByCode( Request.Form["code"], appState.TenantId); appState.AccessToken = accessToken; appState.AccessTokenAquiredWithoutError = true; appState.AppIsAuthorized = true; SetSessionInProgress(); } catch (Exception ex) { appState.ErrorMessage = ex.Message; } return View(appState); } private void SetSessionInProgress() { Session[AppSessionVariables.IsAuthorized] = true; } private bool IsSessionInProgress() { bool? inprogress = Session[AppSessionVariables.IsAuthorized] as bool?; if (null == inprogress) return false; return (bool)inprogress; } private ViewResult RedirectHome() { RedirectResult result = Redirect(appConfig.RedirectUri); result.ExecuteResult(this.ControllerContext); return View("Index", new AppState()); } [HttpPost] [MultipleButton(Name = "action", Argument = "StartOver")] public ActionResult StartOver(AppState passedAppState) { if (!IsSessionInProgress()) { return RedirectHome(); } AppState appState = new AppState(); Session.Clear(); UriBuilder signOutRequest = new UriBuilder(appConfig.SignoutUri.Replace("common", passedAppState.TenantId)); signOutRequest.Query = "post_logout_redirect_uri=" + HttpUtility.UrlEncode(appConfig.RedirectUri); RedirectResult result = Redirect(signOutRequest.Uri.ToString()); result.ExecuteResult(this.ControllerContext); return View("Index", appState); } [HttpPost] [MultipleButton(Name = "action", Argument = "Authorize")] public ActionResult Auhorize(AppState passedAppState) { passedAppState.AppIsAuthorized = false; // hit the common endpoint for authorization, // after authorization we will use the tenant specific endpoint for getting app-only tokens UriBuilder authorizeRequest = new UriBuilder(appConfig.AuthorizationUri); // Maintain state for authorize request to prvenet cross forgery attacks var authstate = Guid.NewGuid().ToString(); Session[AppSessionVariables.AuthState] = authstate; authorizeRequest.Query = "state=" + authstate + "&response_type=code+id_token" + "&scope=openid" + "&nonce=" + Guid.NewGuid().ToString() + "&client_id=" + appConfig.ClientId + "&redirect_uri=" + HttpUtility.UrlEncode(appConfig.RedirectUri) + "&resource=" + HttpUtility.UrlEncode(appConfig.ExchangeResourceUri) + #if DEBUG "&login_hint=" + "some-user@some-tenant.onmicrosoft.com" + #endif "&prompt=consent" + "&response_mode=form_post"; RedirectResult result = Redirect(authorizeRequest.Uri.ToString()); result.ExecuteResult(this.ControllerContext); return View("Index", passedAppState); } private string Base64UrlDecodeJwtTokenPayload(string base64UrlEncodedJwtToken) { string payload = base64UrlEncodedJwtToken.Split('.')[1]; return Base64UrlEncoder.Decode(payload); } public class JwtToken { public string tid { get; set; } public string upn { get; set; } public string domain { get { return (string.IsNullOrEmpty(upn)) ? "string.Empty" : upn.Split('@')[1]; } } } private JwtToken GetTenantId(string id_token) { string encodedOpenIdToken = id_token; string decodedToken = Base64UrlDecodeJwtTokenPayload(encodedOpenIdToken); JwtToken token = JsonConvert.DeserializeObject<JwtToken>(decodedToken); return token; } public class AADCodeFlowSuccessResponse { //{ // "token_type": "Bearer", // "expires_in": "3600", // "expires_on": "1423336547", // "not_before": "1423332647", // "resource": "https://outlook.office365.com/", // "access_token": "eyJ0eXAiOiJKV1QiLCJhbGciO....ZVvynkUXjZPNg1oJWDKBymPL-U0WA" // "refresh_token": "eyJ0eXAiOiJKV1QiLCJhbGciO....ZVvynkUXjZPNg1oJWDKBymPL-U0WA" // "scope": "Mail.Read" //} public string token_type; public string expires_in; public string expires_on; public string not_before; public string resource; public string access_token; public string refresh_token; public string scope; }; public class AADErrorResponse { //{ // "error": "invalid_client", // "error_description": "AADSTS70002: Error ...", // "error_codes": [ // 70002, // 50012 // ], // "timestamp": "2015-02-07 18:44:09Z", // "trace_id": "dabcfa26-ea8d-46c5-81bc-ff57a0895629", // "correlation_id": "8e270f2d-ba05-42fb-a7ab-e819d142c843", // "submit_url": null, // "context": null //} public string error; public string error_description; public string[] error_codes; public string timestamp; public string trace_id; public string correlation_id; public string submit_url; public string context; } private async Task<String> GetAccessTokenByCode(string code, string tenantId) { string tokenIssueEndpoint = appConfig.TokenIssueingUri.Replace("common", tenantId); /** * build the request payload */ FormUrlEncodedContent tokenRequestForm; tokenRequestForm = new FormUrlEncodedContent( new[] { new KeyValuePair<string,string>("grant_type","authorization_code"), new KeyValuePair<string,string>("code",code), new KeyValuePair<string,string>("client_id", appConfig.ClientId), new KeyValuePair<string,string>("client_secret", appConfig.ClientSecret), new KeyValuePair<string,string>("redirect_uri", appConfig.RedirectUri) } ); /* * Do the web request */ HttpClient client = new HttpClient(); Task<string> requestString = tokenRequestForm.ReadAsStringAsync(); StringContent requestContent = new StringContent(requestString.Result); requestContent.Headers.ContentType = new MediaTypeHeaderValue("application/x-www-form-urlencoded"); requestContent.Headers.Add("client-request-id", System.Guid.NewGuid().ToString()); requestContent.Headers.Add("return-client-request-id", "true"); requestContent.Headers.Add("UserAgent", "MatthiasLeibmannsWebAppCodeFlow/0.1"); HttpResponseMessage response = client.PostAsync(tokenIssueEndpoint, requestContent).Result; JObject jsonResponse = JObject.Parse(response.Content.ReadAsStringAsync().Result); JsonSerializer jsonSerializer = new JsonSerializer(); if(response.IsSuccessStatusCode == true) { AADCodeFlowSuccessResponse s = (AADCodeFlowSuccessResponse)jsonSerializer.Deserialize(new JTokenReader(jsonResponse), typeof(AADCodeFlowSuccessResponse)); return s.access_token; } AADErrorResponse e = (AADErrorResponse)jsonSerializer.Deserialize(new JTokenReader(jsonResponse), typeof(AADErrorResponse)); throw new Exception(e.error_description); } } } // MIT License: // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // ""Software""), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // THE SOFTWARE IS PROVIDED ""AS IS"", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
mit
ma-lijun/jinghuan
static/h-ui.admin/js/H-ui.admin.js
7111
/* -----------H-ui前端框架------------- * H-ui.admin.js v2.4 * http://www.h-ui.net/ * Created & Modified by guojunhui * Date modified 15:42 2016.03.14 * * Copyright 2013-2016 北京颖杰联创科技有限公司 All rights reserved. * Licensed under MIT license. * http://opensource.org/licenses/MIT * */ var num=0,oUl=$("#min_title_list"),hide_nav=$("#Hui-tabNav"); /*获取顶部选项卡总长度*/ function tabNavallwidth(){ var taballwidth=0, $tabNav = hide_nav.find(".acrossTab"), $tabNavWp = hide_nav.find(".Hui-tabNav-wp"), $tabNavitem = hide_nav.find(".acrossTab li"), $tabNavmore =hide_nav.find(".Hui-tabNav-more"); if (!$tabNav[0]){return} $tabNavitem.each(function(index, element) { taballwidth+=Number(parseFloat($(this).width()+60)) }); $tabNav.width(taballwidth+25); var w = $tabNavWp.width(); if(taballwidth+25>w){ $tabNavmore.show()} else{ $tabNavmore.hide(); $tabNav.css({left:0}) } } /*左侧菜单响应式*/ function Huiasidedisplay(){ if($(window).width()>=768){ $(".Hui-aside").show() } } function getskincookie(){ var v = getCookie("Huiskin"); var hrefStr=$("#skin").attr("href"); if(v==null||v==""){ v="default"; } if(hrefStr!=undefined){ var hrefRes=hrefStr.substring(0,hrefStr.lastIndexOf('skin/'))+'skin/'+v+'/skin.css'; $("#skin").attr("href",hrefRes); } } function Hui_admin_tab(obj){ if($(obj).attr('_href')){ var bStop=false; var bStopIndex=0; var _href=$(obj).attr('_href'); var _titleName=$(obj).attr("data-title"); var topWindow=$(window.parent.document); var show_navLi=topWindow.find("#min_title_list li"); show_navLi.each(function() { if($(this).find('span').attr("data-href")==_href){ bStop=true; bStopIndex=show_navLi.index($(this)); return false; } }); if(!bStop){ creatIframe(_href,_titleName); min_titleList(); } else{ show_navLi.removeClass("active").eq(bStopIndex).addClass("active"); var iframe_box=topWindow.find("#iframe_box"); iframe_box.find(".show_iframe").hide().eq(bStopIndex).show().find("iframe").attr("src",_href); } } } function min_titleList(){ var topWindow=$(window.parent.document); var show_nav=topWindow.find("#min_title_list"); var aLi=show_nav.find("li"); }; function creatIframe(href,titleName){ var topWindow=$(window.parent.document); var show_nav=topWindow.find('#min_title_list'); show_nav.find('li').removeClass("active"); var iframe_box=topWindow.find('#iframe_box'); show_nav.append('<li class="active"><span data-href="'+href+'">'+titleName+'</span><i></i><em></em></li>'); var taballwidth=0, $tabNav = topWindow.find(".acrossTab"), $tabNavWp = topWindow.find(".Hui-tabNav-wp"), $tabNavitem = topWindow.find(".acrossTab li"), $tabNavmore =topWindow.find(".Hui-tabNav-more"); if (!$tabNav[0]){return} $tabNavitem.each(function(index, element) { taballwidth+=Number(parseFloat($(this).width()+60)) }); $tabNav.width(taballwidth+25); var w = $tabNavWp.width(); if(taballwidth+25>w){ $tabNavmore.show()} else{ $tabNavmore.hide(); $tabNav.css({left:0}) } var iframeBox=iframe_box.find('.show_iframe'); iframeBox.hide(); iframe_box.append('<div class="show_iframe"><div class="loading"></div><iframe frameborder="0" src='+href+'></iframe></div>'); var showBox=iframe_box.find('.show_iframe:visible'); showBox.find('iframe').load(function(){ showBox.find('.loading').hide(); }); } function removeIframe(){ var topWindow = $(window.parent.document); var iframe = topWindow.find('#iframe_box .show_iframe'); var tab = topWindow.find(".acrossTab li"); var showTab = topWindow.find(".acrossTab li.active"); var showBox=topWindow.find('.show_iframe:visible'); var i = showTab.index(); tab.eq(i-1).addClass("active"); iframe.eq(i-1).show(); tab.eq(i).remove(); iframe.eq(i).remove(); } /*弹出层*/ /* 参数解释: title 标题 url 请求的url id 需要操作的数据id w 弹出层宽度(缺省调默认值) h 弹出层高度(缺省调默认值) */ function layer_show(title,url,w,h){ if (title == null || title == '') { title=false; }; if (url == null || url == '') { url="404.html"; }; if (w == null || w == '') { w=800; }; if (h == null || h == '') { h=($(window).height() - 50); }; layer.open({ type: 2, area: [w+'px', h +'px'], fix: false, //不固定 maxmin: true, shade:0.4, title: title, content: url }); } /*关闭弹出框口*/ function layer_close(){ var index = parent.layer.getFrameIndex(window.name); parent.layer.close(index); } $(function(){ getskincookie(); //layer.config({extend: 'extend/layer.ext.js'}); Huiasidedisplay(); var resizeID; $(window).resize(function(){ clearTimeout(resizeID); resizeID = setTimeout(function(){ Huiasidedisplay(); },500); }); $(".nav-toggle").click(function(){ $(".Hui-aside").slideToggle(); }); $(".Hui-aside").on("click",".menu_dropdown dd li a",function(){ if($(window).width()<768){ $(".Hui-aside").slideToggle(); } }); /*左侧菜单*/ $.Huifold(".menu_dropdown dl dt",".menu_dropdown dl dd","fast",1,"click"); /*选项卡导航*/ $(".Hui-aside").on("click",".menu_dropdown a",function(){ Hui_admin_tab(this); }); $(document).on("click","#min_title_list li",function(){ var bStopIndex=$(this).index(); var iframe_box=$("#iframe_box"); $("#min_title_list li").removeClass("active").eq(bStopIndex).addClass("active"); iframe_box.find(".show_iframe").hide().eq(bStopIndex).show(); }); $(document).on("click","#min_title_list li i",function(){ var aCloseIndex=$(this).parents("li").index(); $(this).parent().remove(); $('#iframe_box').find('.show_iframe').eq(aCloseIndex).remove(); num==0?num=0:num--; tabNavallwidth(); }); $(document).on("dblclick","#min_title_list li",function(){ var aCloseIndex=$(this).index(); var iframe_box=$("#iframe_box"); if(aCloseIndex>0){ $(this).remove(); $('#iframe_box').find('.show_iframe').eq(aCloseIndex).remove(); num==0?num=0:num--; $("#min_title_list li").removeClass("active").eq(aCloseIndex-1).addClass("active"); iframe_box.find(".show_iframe").hide().eq(aCloseIndex-1).show(); tabNavallwidth(); }else{ return false; } }); tabNavallwidth(); $('#js-tabNav-next').click(function(){ num==oUl.find('li').length-1?num=oUl.find('li').length-1:num++; toNavPos(); }); $('#js-tabNav-prev').click(function(){ num==0?num=0:num--; toNavPos(); }); function toNavPos(){ oUl.stop().animate({'left':-num*100},100); } /*换肤*/ // $("#Hui-skin .dropDown-menu a").click(function(){ // var v = $(this).attr("data-val"); // setCookie("Huiskin", v); // var hrefStr=$("#skin").attr("href"); // var hrefRes=hrefStr.substring(0,hrefStr.lastIndexOf('skin/'))+'skin/'+v+'/skin.css'; // $(window.frames.document).contents().find("#skin").attr("href",hrefRes); // }); }); //datagrid 表格宽带自适应 function fixWidth(percent) { return (document.body.clientWidth) * percent ; } function fixHeight(percent) { return (document.body.clientHeight) * percent ; }
mit
andrewsomething/droplet_kit
lib/droplet_kit/models/droplet.rb
1070
module DropletKit class Droplet < BaseModel [:id, :name, :memory, :vcpus, :disk, :locked, :created_at, :status, :backup_ids, :snapshot_ids, :action_ids, :features, :region, :image, :networks, :kernel, :size_slug, :tags, :volume_ids].each do |key| attribute(key) end # Used for creates attribute :names attribute :volumes attribute :ssh_keys attribute :backups attribute :monitoring attribute :size attribute :ipv6 attribute :user_data attribute :private_networking attribute :vpc_uuid def public_ip network = network_for(:v4, 'public') network && network.ip_address end def private_ip network = network_for(:v4, 'private') network && network.ip_address end private def network_for(type, publicity) networks = case type when :v4 then self.networks.v4 when :v6 then self.networks.v6 end networks.find do |network| network.type == publicity end end end end
mit
Teddy-Zhu/SilentGo-JSON
src/main/java/com/silentgo/json/parser/Reader.java
691
package com.silentgo.json.parser; /** * Project : SilentGo * Package : com.silentgo.json * * @author <a href="mailto:teddyzhu15@gmail.com" target="_blank">teddyzhu</a> * <p> * Created by teddyzhu on 2017/1/4. */ public abstract class Reader { public static final char NULL = 0; public int pos; public int end; public abstract char prev(); public abstract char next(); public abstract boolean hasNext(); public abstract char peek(); public abstract char peek(int position); public abstract char peekNext(); public abstract String peekRange(int start, int length); public abstract Reader expand(int pos, int end); }
mit
fahad19/tydel
test/collection/Collection.toJS.spec.js
867
/* global describe, it */ import { expect } from 'chai'; import Types from '../../src/Types'; import createModel from '../../src/createModel'; import createCollection from '../../src/createCollection'; import isModel from '../../src/isModel'; describe('Collection :: toJS()', function () { it('converts to plain array of plain objects', function () { const Person = createModel({ name: Types.string.isRequired }); const People = createCollection(Person); const people = new People([ { name: 'Harry' }, { name: 'Hermione' }, { name: 'Ron' } ]); expect(isModel(people.at(0))).to.eql(true); expect(isModel(people.at(1))).to.eql(true); expect(isModel(people.at(2))).to.eql(true); expect(people.toJS()).to.eql([ { name: 'Harry' }, { name: 'Hermione' }, { name: 'Ron' } ]); }); });
mit
oobj/oobj-directives
src/oobj-login/oobj-login.directive.js
693
/** * Created by ATILLA on 20/10/2015. */ (function () { 'use strict'; angular .module('oobj-directives') .directive('oobjLogin', oobjLogin); /** @ngInject */ function oobjLogin() { return { restrict: 'EA', templateUrl: 'js/directives/oobj-login/oobj-login.html', scope: { logo: '@?', labelBtnLogin: '@?', login: '&', labelForgotPassword: '@?', labelNewUser: '@?', forgotPassword: '&', newUser: '&', username: '=?', password: '=?' } }; } })();
mit
Destevit/Convay
docs/html/search/enumvalues_4.js
223
var searchData= [ ['safedrawing',['safeDrawing',['../class_convay_1_1_win_forms_game_1_1_settings_form.html#a7918e2a1b1587472aa065d4db7a13537a580b802bad2f7fd52207c99b2f4b61cf',1,'Convay::WinFormsGame::SettingsForm']]] ];
mit