repo_name
stringlengths 4
116
| path
stringlengths 4
379
| size
stringlengths 1
7
| content
stringlengths 3
1.05M
| license
stringclasses 15
values |
---|---|---|---|---|
AndreyAttr/Evo
|
ClientWeb/Evolution/Services/di-interaction-service-tokens.ts
|
2970
|
import {IEvent, IInteractEvent} from "./event-interaction.service";
import {Observable} from "rxjs/Observable";
//Abstract base token classes for 2 possible kinds of event:
//Class used as a "narrowing" interface that exposes properties of real instance of service(which is hidden under the token) and the same time it
//may hide properties which must be hidden. So, it's really narrowing: https://angular.io/guide/dependency-injection-in-action#class-interface
//But here we use this "narrowing" interface classes also as base classes in token hierarchy(basically to reduce duplication of code which would
//be caused by implementation real interfaces: IEvent<T>, IInteractEvent<T1,T2> in each token class).
//When you use a class this way, it's called a class-interface. The key benefit of a class-interface is that you can get the strong-typing of an
//interface and you can use it as a provider token in the way you would a normal class.
abstract class BaseEventToken<T> implements IEvent<T>{
generated$: Observable<T>;
event(value: T): void {
throw new Error('Method not implemented.');
}
}
abstract class BaseInteractionToken<T1,T2> extends BaseEventToken<T1> implements IInteractEvent<T1,T2>{
confirmed$: Observable<T2>;
confirm(value: T2): void {
throw new Error('Method not implemented.');
}
}
//Abstract provider token classes to register SINGLETON event instance with each of them:
abstract class RemoveGeneInteractionMultiCastEventToken<T> extends BaseEventToken<T>{}
//Question: When you IMPLEMENT(not extends) base class: https://angular.io/guide/dependency-injection-in-action#the-parent-class-interface
/*It's also possible to use base class as a class-interface("narrowing" interface) but that leads to neccessarity to implement all the members
* of that base class as it serves as interface. So, to reduce code dublication we do not use that approach(but it's possible):*/
// abstract class RemoveGeneInteractionMultiCastEventToken<T> implements BaseEventToken<T>{
// generated$: Observable<T>;
//
// event(value: T): void {
// throw new Error('Method not implemented.');
// }
// }
//Remark: The key benefit of an abstract class-interface is that you can get the strong-typing of an interface and you can use it
//Remark: as a provider token in the way you would a normal class(interface can't be used as provider token, because
//Remark: in JS there are no interfaces). https://angular.io/guide/dependency-injection-in-action#class-interface
//Note: You can't use an interface as a provider token because interfaces are not JavaScript objects. They exist only in the TypeScript design space.
abstract class SiteInteractionToken<TInput,TOutput> extends BaseInteractionToken<TInput, TOutput>{}
abstract class GeneInteractionToken<TInput,TOutput> extends BaseInteractionToken<TInput,TOutput>{}
export {SiteInteractionToken, GeneInteractionToken, RemoveGeneInteractionMultiCastEventToken}
|
mit
|
CriticalPathTraining/GSA2016
|
Modules/13_NodeJS/Lab/Solution/project1/gulpfile.js
|
1089
|
var gulp = require('gulp');
var clean = require('gulp-clean');
var ts = require("gulp-typescript");
var tsProject = ts.createProject("tsconfig.json");
var browserSync = require('browser-sync');
gulp.task('clean', function() {
console.log("Running clean task");
return gulp.src('dist/', {read: false})
.pipe(clean());
});
gulp.task('build', ['clean'], function() {
console.log("Running build task");
gulp.src('src/**/*.html')
.pipe(gulp.dest('dist'));
gulp.src('src/css/**/*.css')
.pipe(gulp.dest('dist/css'));
gulp.src('src/css/img/**/*.png')
.pipe(gulp.dest('dist/css/img'));
return tsProject.src()
.pipe(tsProject())
.js.pipe(gulp.dest("."));
});
gulp.task('start', ['build'], function() {
console.log("Running start task");
return browserSync.init( {server: {baseDir: 'dist'} } );
})
gulp.task('refresh', ['build'], function(){
console.log("Running refresh task");
return browserSync.reload();
})
gulp.task('serve', ['start'], function() {
console.log("Running build task");
gulp.watch("src/**/*", ['refresh']);
});
|
mit
|
RealityFactory/CEGUI
|
samples/browser/src/CEGuiOpenGL3BaseApplication.cpp
|
3521
|
/***********************************************************************
created: 24/9/2004
author: Paul D Turner
*************************************************************************/
/***************************************************************************
* Copyright (C) 2004 - 2009 Paul D Turner & The CEGUI Development Team
*
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
* OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
* ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
***************************************************************************/
// We need to include windows.h here before glfw is included (via
// CEGuiOpenGL3BaseApplication.h) or there will be a warning when GL.h includes
// windows.h (via GL3Renderer.h)
#if defined(CEGUI_USE_GLEW) && (defined( __WIN32__ ) || defined( _WIN32 ))
#include "windows.h"
#endif
#include "CEGuiOpenGL3BaseApplication.h"
#include "CEGUI/RendererModules/OpenGL/GL3Renderer.h"
//----------------------------------------------------------------------------//
CEGuiOpenGL3BaseApplication::CEGuiOpenGL3BaseApplication()
{
initGLFW();
setGLFWWindowCreationHints();
createGLFWWindow();
setGLFWAppConfiguration();
d_renderer = &CEGUI::OpenGL3Renderer::create();
}
//----------------------------------------------------------------------------//
void CEGuiOpenGL3BaseApplication::setGLFWWindowCreationHints()
{
#if GLFW_VERSION_MAJOR >= 3
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 2);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
#if ( (GLFW_VERSION_MAJOR >= 4) \
|| ((GLFW_VERSION_MAJOR == 3) && (GLFW_VERSION_MINOR >= 2)))
#if CEGUI_SAMPLES_DESKTOP_OPENGL_CONTEXT_CREATION_API == CEGUI_SAMPLES_OPENGL_CONTEXT_CREATION_API_EGL
glfwWindowHint(GLFW_CONTEXT_CREATION_API, GLFW_EGL_CONTEXT_API);
#elif CEGUI_SAMPLES_DESKTOP_OPENGL_CONTEXT_CREATION_API == CEGUI_SAMPLES_OPENGL_CONTEXT_CREATION_API_NATIVE
glfwWindowHint(GLFW_CONTEXT_CREATION_API, GLFW_NATIVE_CONTEXT_API);
#else
#error Invalid "CEGUI_SAMPLES_DESKTOP_OPENGL_CONTEXT_CREATION_API"
#endif
#endif
#else // GLFW_VERSION_MAJOR <= 2
glfwOpenWindowHint(GLFW_OPENGL_VERSION_MAJOR, 3);
glfwOpenWindowHint(GLFW_OPENGL_VERSION_MINOR, 2);
glfwOpenWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
#endif
}
//----------------------------------------------------------------------------//
|
mit
|
andion/CursoiOS
|
academy/db/schema.rb
|
1820
|
# encoding: UTF-8
# This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# Note that this schema.rb definition is the authoritative source for your
# database schema. If you need to create the application database on another
# system, you should be using db:schema:load, not running all the migrations
# from scratch. The latter is a flawed and unsustainable approach (the more migrations
# you'll amass, the slower it'll run and the greater likelihood for issues).
#
# It's strongly recommended to check this file into your version control system.
ActiveRecord::Schema.define(:version => 20120719101530) do
create_table "aecomo_classes", :force => true do |t|
t.string "name"
t.datetime "starts_at"
t.datetime "ends_at"
t.text "description"
t.datetime "created_at", :null => false
t.datetime "updated_at", :null => false
end
create_table "resources", :force => true do |t|
t.string "name"
t.string "link"
t.string "description"
t.datetime "created_at", :null => false
t.datetime "updated_at", :null => false
t.string "photo_file_name"
t.string "photo_content_type"
t.integer "photo_file_size"
t.datetime "photo_updated_at"
end
create_table "stundents", :force => true do |t|
t.string "name"
t.string "lastname"
t.string "city"
t.string "email"
t.datetime "created_at", :null => false
t.datetime "updated_at", :null => false
t.string "avatar_file_name"
t.string "avatar_content_type"
t.integer "avatar_file_size"
t.datetime "avatar_updated_at"
end
end
|
mit
|
datalogics-robb/scons
|
bin/scons-unzip.py
|
1802
|
#!/usr/bin/env python
#
# A quick script to unzip a .zip archive and put the files in a
# subdirectory that matches the basename of the .zip file.
#
# This is actually generic functionality, it's not SCons-specific, but
# I'm using this to make it more convenient to manage working on multiple
# changes on Windows, where I don't have access to my Aegis tools.
#
import getopt
import os.path
import sys
import zipfile
helpstr = """\
Usage: scons-unzip.py [-o outdir] zipfile
Options:
-o DIR, --out DIR Change output directory name to DIR
-v, --verbose Print file names when extracting
"""
opts, args = getopt.getopt(sys.argv[1:],
"o:v",
['out=', 'verbose'])
outdir = None
printname = lambda x: x
for o, a in opts:
if o == '-o' or o == '--out':
outdir = a
elif o == '-v' or o == '--verbose':
def printname(x):
print x
if len(args) != 1:
sys.stderr.write("scons-unzip.py: \n")
sys.exit(1)
zf = zipfile.ZipFile(str(args[0]), 'r')
if outdir is None:
outdir, _ = os.path.splitext(os.path.basename(args[0]))
def outname(n, outdir=outdir):
l = []
while 1:
n, tail = os.path.split(n)
if not n:
break
l.append(tail)
l.append(outdir)
l.reverse()
return apply(os.path.join, l)
for name in zf.namelist():
dest = outname(name)
dir = os.path.dirname(dest)
try:
os.makedirs(dir)
except:
pass
printname(dest)
# if the file exists, then delete it before writing
# to it so that we don't end up trying to write to a symlink:
if os.path.isfile(dest) or os.path.islink(dest):
os.unlink(dest)
if not os.path.isdir(dest):
open(dest, 'w').write(zf.read(name))
|
mit
|
Kunstmaan/BootstrapCK4-Skin
|
plugins/image/lang/en-gb.js
|
954
|
/*
Copyright (c) 2003-2014, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.md or http://ckeditor.com/license
*/
CKEDITOR.plugins.setLang( 'image', 'en-gb', {
alertUrl: 'Please type the image URL',
alt: 'Alternative Text',
border: 'Border',
btnUpload: 'Send it to the Server',
button2Img: 'Do you want to transform the selected image button on a simple image?',
hSpace: 'HSpace',
img2Button: 'Do you want to transform the selected image on a image button?',
infoTab: 'Image Info',
linkTab: 'Link',
lockRatio: 'Lock Ratio',
menu: 'Image Properties',
resetSize: 'Reset Size',
title: 'Image Properties',
titleButton: 'Image Button Properties',
upload: 'Upload',
urlMissing: 'Image source URL is missing.',
vSpace: 'VSpace',
validateBorder: 'Border must be a whole number.',
validateHSpace: 'HSpace must be a whole number.',
validateVSpace: 'VSpace must be a whole number.'
} );
|
mit
|
phpexpertsinc/DoctrineDetective
|
PHPExperts/DoctrineDetectiveBundle/DoctrineDetectiveBundle.php
|
153
|
<?php
namespace PHPExperts\DoctrineDetectiveBundle;
use Symfony\Component\HttpKernel\Bundle\Bundle;
class DoctrineDetectiveBundle extends Bundle
{
}
|
mit
|
anjali-ui-dev/JavaScriptPaintApp
|
app.js
|
3685
|
// Paint App Javascript
'use strict';
(function () {
var canvas = document.getElementById('paintArea');
var context = canvas.getContext('2d');
var drawnCircles = [];
var currentDrawingcircle ;
var isDebugMode = false;
var refreshInterval = 20;
var currentSelectedIndex = -1;
var isDrag = false;
canvas.onselectstart = function () { return false; }
function Circle (x1,y1) {
this.center = {};
this.center.x = x1;
this.center.y = y1;
this.fill = "pink";
this.radius = 10;
this.drawable =false;
}
Circle.prototype.setRadius = function (x2,y2) {
this.radius = Math.sqrt(Math.pow((x2 - this.center.x), 2) + Math.pow((y2-this.center.y), 2));
if(isDebugMode){
console.log("Radius og Circle:",this.radius);
}
};
Circle.prototype.draw =function (context,addToTheList) {
context.beginPath();
context.arc(this.center.x,this.center.y,this.radius,2*Math.PI,false);
context.save();
context.fillStyle = this.fill;
context.fill();
context.restore();
if(addToTheList) {
drawnCircles.push(currentDrawingcircle);
}
}
Circle.prototype.setColor = function (color) {
this.fill = color;
}
Circle.prototype.isDrawable = function(){
return this.radius>0;
}
Circle.prototype.isPointInCircle = function (e) {
var x = parseInt(e.clientX - canvas.offsetLeft);
var y = parseInt(e.clientY - canvas.offsetTop);
if(isDebugMode){
console.log("value of x:" + xCal);
console.log("value of y:"+ yCal);
console.log("value of r:" + rCal);
}
return (Math.pow((x-this.center.x) ,2) + Math.pow((y-this.center.y) ,2)) <= Math.pow(this.radius,2);
}
function getRandomColor () {
var letters = '0123456789ABCDEF'.split('');
var color = '#';
for (var i = 0; i < 6; i++) {
color += letters[Math.round(Math.random () * 15)];
};
return color;
}
canvas.onmousedown = function (e) {
var x = parseInt(e.clientX - canvas.offsetLeft);
var y = parseInt(e.clientY - canvas.offsetTop);
for (var i = drawnCircles.length-1; i >= 0; i--) {
if(drawnCircles[i].isPointInCircle(e)) {
isDrag = true;
currentSelectedIndex = i;
canvas.onmousemove = mouseMove;
break;
}
}
if(!isDrag){
currentDrawingcircle = new Circle(x,y);
currentDrawingcircle.setColor(getRandomColor());
}
}
function mouseMove(e) {
if(isDrag) {
drawnCircles[currentSelectedIndex].center.x = parseInt(e.clientX - canvas.offsetLeft);
drawnCircles[currentSelectedIndex].center.y = parseInt(e.clientY - canvas.offsetTop);
}
}
canvas.onmouseup = function (e) {
if(currentDrawingcircle != null && isDrag == false) {
var x = parseInt(e.clientX - canvas.offsetLeft);
var y = parseInt(e.clientY - canvas.offsetTop);
currentDrawingcircle.setRadius(x,y);
if(currentDrawingcircle.isDrawable()) {
currentDrawingcircle.draw(context,true);
}
}
currentDrawingcircle =null;
canvas.onmousemove = null;
currentSelectedIndex = -1;
//inValidate = false;
isDrag = false;
}
canvas.ondblclick = function (e) {
for (var i = drawnCircles.length - 1; i >=0; i--) {
if(drawnCircles[i].isPointInCircle(e)) {
//console.log(" IN delete");
drawnCircles.splice(i,1);
//inValidate = true;
allCircleDraw();
break;
}
}
currentDrawingcircle =null;
}
var reset = document.getElementById('reset');
reset.onclick = clearAll;
function clearCanvas () {
context.clearRect(0,0,canvas.width,canvas.height);
}
function clearAll () {
clearCanvas();
drawnCircles = [];
}
setInterval(allCircleDraw,refreshInterval);
function allCircleDraw() {
clearCanvas();
for (var i = 0; i < drawnCircles.length ; i++) {
drawnCircles[i].draw(context,false);
};
}
})();
|
mit
|
entrotech/deployapp
|
Sabio.Web/Assets/Admin/js/form-wizards-validation.demo.min.js
|
755
|
/*
Template Name: Color Admin - Responsive Admin Dashboard Template build with Twitter Bootstrap 3.3.7
Version: 2.1.0
Author: Sean Ngu
Website: http://www.seantheme.com/color-admin-v2.1/admin/html/
*/var handleBootstrapWizardsValidation=function(){"use strict";$("#wizard").bwizard({validating:function(e,t){if(t.index==0){if(false===$('form[name="form-wizard"]').parsley().validate("wizard-step-1")){return false}}else if(t.index==1){if(false===$('form[name="form-wizard"]').parsley().validate("wizard-step-2")){return false}}else if(t.index==2){if(false===$('form[name="form-wizard"]').parsley().validate("wizard-step-3")){return false}}}})};var FormWizardValidation=function(){"use strict";return{init:function(){handleBootstrapWizardsValidation()}}}()
|
mit
|
princesust/SUN-TASK
|
resources/views/partials/not-found.php
|
279
|
<div v-if="totalPage == 0 && loading == false">
<div id="content">
<div class="container">
<div class="not-found">
<h1>No Record Found</h1>
</div>
</div>
</div>
</div>
|
mit
|
aspose-cells/Aspose.Cells-for-Cloud
|
Examples/Java/SDK/src/main/java/com/aspose/cells/cloud/examples/worksheet/UnhideWorksheet.java
|
1791
|
package com.aspose.cells.cloud.examples.worksheet;
import com.aspose.cells.api.CellsApi;
import com.aspose.cells.cloud.examples.Configuration;
import com.aspose.cells.cloud.examples.Utils;
import com.aspose.storage.api.StorageApi;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardCopyOption;
public class UnhideWorksheet {
public static void main(String... args) throws IOException {
// ExStart: unhide-worksheets
try {
// Instantiate Aspose Storage API SDK
StorageApi storageApi = new StorageApi(Configuration.apiKey, Configuration.appSID, true);
// Instantiate Aspose Words API SDK
CellsApi cellsApi = new CellsApi(Configuration.apiKey, Configuration.appSID, true);
String input = "Sample1.xlsx";
String output = "Sample2.xlsx";
Path inputFile = Utils.getPath(UnhideWorksheet.class, input);
Path outputFile = Utils.getPath(UnhideWorksheet.class, output);
String sheet = "Sheet1";
boolean isVisible = true;
storageApi.PutCreate(input, null, Utils.STORAGE, inputFile.toFile());
com.aspose.cells.model.WorksheetResponse wr = cellsApi.PutChangeVisibilityWorksheet(input, sheet, isVisible,
Utils.STORAGE, null);
System.out.println("Visibility Type: " + wr.getWorksheet().getVisibilityType());
com.aspose.storage.model.ResponseMessage sr = storageApi.GetDownload(input, null, Utils.STORAGE);
Files.copy(sr.getInputStream(), outputFile, StandardCopyOption.REPLACE_EXISTING);
}
catch (Exception e) {
e.printStackTrace();
}
// ExEnd: unhide-worksheets
}
}
|
mit
|
lingxyd/nmqtt
|
nMQTT/ConnectionHandling/MqttConnection.cs
|
13038
|
/*
* nMQTT, a .Net MQTT v3 client implementation.
* http://wiki.github.com/markallanson/nmqtt
*
* Copyright (c) 2009 Mark Allanson (mark@markallanson.net) & Contributors
*
* Licensed under the MIT License. You may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/mit-license.php
*/
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Net.Sockets;
using Common.Logging;
namespace Nmqtt
{
internal class MqttConnection : IDisposable
{
private static readonly ILog Log = LogManager.GetCurrentClassLogger();
/// <summary>
/// The TcpClient that maintains the connection to the MQTT broker.
/// </summary>
private readonly TcpClient tcpClient;
/// <summary>
/// Sync lock object to ensure that only a single message is sent through the connection handler at once.
/// </summary>
private readonly object sendPadlock = new object();
/// <summary>
/// Initializes a new instance of the <see cref="MqttConnection" /> class.
/// </summary>
/// <param name="server">The server.</param>
/// <param name="port">The port.</param>
private MqttConnection(string server, int port) {
try {
Log.Debug(m => m("Connecting to message broker running on {0}:{1}", server, port));
// connect and save off the stream.
tcpClient = new TcpClient(server, port);
} catch (SocketException ex) {
String message = String.Format("The connection to the message broker {0}:{1} could not be made.", server, port);
Log.Error(message, ex);
throw new ConnectionException(message, ConnectionState.Faulted, ex);
}
// get and stash the network stream
var readWrapper = new ReadWrapper(tcpClient.GetStream());
readWrapper.Stream.BeginRead(readWrapper.Buffer, 0, readWrapper.NextReadSize, ReadHeaderComplete, readWrapper);
}
/// <summary>
/// Initiate a new connection to a message broker
/// </summary>
/// <param name="server"></param>
/// <param name="port"></param>
public static MqttConnection Connect(string server, int port) {
return new MqttConnection(server, port);
}
/// <summary>
/// Disconnects from the message broker
/// </summary>
private void Disconnect() {
tcpClient.Close();
}
/// <summary>
/// Sends the message in the stream to the broker.
/// </summary>
/// <param name="message">The message to send.</param>
public void Send(Stream message) {
var messageBytes = new byte[message.Length];
message.Read(messageBytes, 0, (int) message.Length);
Send(messageBytes);
}
/// <summary>
/// Sends the message contained in the byte array to the broker.
/// </summary>
/// <param name="message">The message to send.</param>
private void Send(byte[] message) {
Log.Info(m => m("Sending message of {0} bytes to connected broker", message.Length));
// ensure only a single thread gets through to do wire ops at once.
lock (sendPadlock) {
var stream = tcpClient.GetStream();
stream.Write(message, 0, message.Length);
stream.Flush();
}
}
/// <summary>
/// Callback for when data has been read from the underlying network stream.
/// </summary>
/// <param name="asyncResult">The async result from the read.</param>
private void ReadHeaderComplete(IAsyncResult asyncResult) {
var readWrapper = (ReadWrapper) asyncResult.AsyncState;
try {
var bytesRead = readWrapper.Stream.EndRead(asyncResult);
if (bytesRead == 0) {
// Nothing read, we will just try another read from the stream.
Log.Debug("Async network stream read returned 0 bytes, continuing to search for header.");
readWrapper.ReadState = ConnectionReadState.Header;
} else if (tcpClient.Connected && readWrapper.Stream.CanRead) {
if (readWrapper.ReadState == ConnectionReadState.Header && readWrapper.Stream.DataAvailable) {
Log.Info("Reading message arriving on the wire.");
readWrapper.MessageBytes.Add(readWrapper.Buffer[0]);
var lengthBytes = MqttHeader.ReadLengthBytes(readWrapper.Stream);
var remainingLength = MqttHeader.CalculateLength(lengthBytes);
// update the read wrapper with the header bytes, and a resized read buffer
// to capture the remaining length.
readWrapper.MessageBytes.AddRange(lengthBytes);
// no content, so yield the message early, else transition to reading the content.
if (remainingLength == 0) {
Log.Debug("Message receipt complete. Has empty content length so handing off now.");
FireDataAvailableEvent(readWrapper.MessageBytes);
} else {
// total bytes of content is the remaining length plus the header.
readWrapper.TotalBytes = remainingLength + readWrapper.MessageBytes.Count;
readWrapper.RecalculateNextReadSize();
readWrapper.ReadState = ConnectionReadState.Content;
}
} else if (readWrapper.ReadState == ConnectionReadState.Content) {
// stash what we've read.
readWrapper.MessageBytes.AddRange(readWrapper.Buffer.Take(bytesRead));
Log.Debug(m => m("Message Content read {0:n0} of {1:n0} expected remaining bytes.", bytesRead, readWrapper.TotalBytes));
// if we haven't yet read all of the message repeat the read otherwise if
// we're finished process the message and switch back to waiting for the next header.
if (readWrapper.IsReadComplete) {
// reset the read buffer to accommodate the remaining length (last - what was read)
readWrapper.RecalculateNextReadSize();
} else {
Log.Debug(m => m("Message receipt complete ({0:n0} total bytes including all headers), handing off to handlers.", readWrapper.MessageBytes.Count));
readWrapper.ReadState = ConnectionReadState.Header;
FireDataAvailableEvent(readWrapper.MessageBytes);
}
}
// if we've switched to reading a header then recreate the read dwrapper for the next message
if (readWrapper.ReadState == ConnectionReadState.Header) {
readWrapper = new ReadWrapper(readWrapper.Stream);
}
// we can still read etc
// initiate a read for the next set of bytes which will be the header bytes so long as
// we're still connected to the underlying client
readWrapper.Stream.BeginRead(readWrapper.Buffer, 0, readWrapper.NextReadSize, ReadHeaderComplete, readWrapper);
}
} catch (IOException ex) {
Log.Debug("Error occurred during async read from broker network stream. Initiating broker disconnect", ex);
// close the underlying connection
this.Disconnect();
if (ConnectionDropped != null) {
ConnectionDropped(this, new ConnectionDroppedEventArgs(ex));
}
}
}
/// <summary>
/// Raises the DataAvailable event, passing the raw message bytes to all subscribers.
/// </summary>
/// <param name="messageBytes">The raw content of the message received.</param>
private void FireDataAvailableEvent(List<byte> messageBytes) {
Log.Debug("Dispatching completed message to handlers.");
if (DataAvailable != null) {
DataAvailable(this, new DataAvailableEventArgs(messageBytes));
}
}
/// <summary>
/// Occurs when Data is available for processing from the underlying network stream.
/// </summary>
public event EventHandler<DataAvailableEventArgs> DataAvailable;
/// <summary>
/// Occurs when the connection to the remote server drops unexpectedly.
/// </summary>
public event EventHandler<ConnectionDroppedEventArgs> ConnectionDropped;
/// <summary>
/// Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources.
/// </summary>
public void Dispose() {
Disconnect();
GC.SuppressFinalize(this);
}
/// <summary>
/// Controls the read state used during async reads.
/// </summary>
private enum ConnectionReadState {
/// <summary>
/// Reading a message header.
/// </summary>
Header,
/// <summary>
/// Reading message content.
/// </summary>
Content
}
/// <summary>
/// State and logic used to read from the underlying network stream.
/// </summary>
private struct ReadWrapper
{
/// <summary>
/// The read buffer size from the network
/// </summary>
private const int BufferSize = 1 << 17; // 128KB read buffer
/// <summary>
/// The total bytes expected to be read from from the header of content
/// </summary>
public int TotalBytes;
/// <summary>
/// The bytes associated with the message being read.
/// </summary>
public readonly List<byte> MessageBytes;
/// <summary>
/// The network stream being read.
/// </summary>
public readonly NetworkStream Stream;
/// <summary>
/// The amount of content to read during the next read.
/// </summary>
public int NextReadSize;
/// <summary>
/// The buffer the last stream read wrote into.
/// </summary>
public readonly byte[] Buffer;
/// <summary>
/// What is the connection currently reading.
/// </summary>
public ConnectionReadState ReadState;
/// <summary>
/// A boolean that indicates whether the message read is complete
/// </summary>
public bool IsReadComplete {
get { return MessageBytes.Count < TotalBytes; }
}
/// <summary>
/// Creates a new ReadWrapper that wraps the state used to read a message from a stream.
/// </summary>
/// <param name="stream">The stream being read.</param>
public ReadWrapper(NetworkStream stream) {
this.ReadState = ConnectionReadState.Header;
this.Buffer = new byte[BufferSize];
this.MessageBytes = new List<byte>();
this.TotalBytes = 1; // default to header read size.
this.NextReadSize = this.TotalBytes;
this.Stream = stream;
}
/// <summary>
/// Recalculates the number of best to read given the expected total size and the amount read so far.
/// </summary>
public void RecalculateNextReadSize() {
if (TotalBytes == 0) {
throw new InvalidOperationException("Total ReadBytes is 0, cannot calculate next read size.");
}
// next read size is the buffersize if we have more than buffer size left to read,
// otherwise it's the amount left to read in the message.
var remainingBytes = TotalBytes - MessageBytes.Count;
this.NextReadSize = remainingBytes > BufferSize
? BufferSize
: remainingBytes;
}
}
}
}
|
mit
|
mKeRix/home-assistant
|
homeassistant/scripts/check_config.py
|
9544
|
"""Script to check the configuration file."""
import argparse
import asyncio
from collections import OrderedDict
from collections.abc import Mapping, Sequence
from glob import glob
import logging
import os
from typing import Any, Callable, Dict, List, Tuple
from unittest.mock import patch
from homeassistant import bootstrap, core
from homeassistant.config import get_default_config_dir
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.check_config import async_check_ha_config_file
import homeassistant.util.yaml.loader as yaml_loader
# mypy: allow-untyped-calls, allow-untyped-defs
REQUIREMENTS = ("colorlog==4.1.0",)
_LOGGER = logging.getLogger(__name__)
# pylint: disable=protected-access
MOCKS: Dict[str, Tuple[str, Callable]] = {
"load": ("homeassistant.util.yaml.loader.load_yaml", yaml_loader.load_yaml),
"load*": ("homeassistant.config.load_yaml", yaml_loader.load_yaml),
"secrets": ("homeassistant.util.yaml.loader.secret_yaml", yaml_loader.secret_yaml),
}
SILENCE = ("homeassistant.scripts.check_config.yaml_loader.clear_secret_cache",)
PATCHES: Dict[str, Any] = {}
C_HEAD = "bold"
ERROR_STR = "General Errors"
def color(the_color, *args, reset=None):
"""Color helper."""
# pylint: disable=import-outside-toplevel
from colorlog.escape_codes import escape_codes, parse_colors
try:
if not args:
assert reset is None, "You cannot reset if nothing being printed"
return parse_colors(the_color)
return parse_colors(the_color) + " ".join(args) + escape_codes[reset or "reset"]
except KeyError as k:
raise ValueError(f"Invalid color {k!s} in {the_color}")
def run(script_args: List) -> int:
"""Handle check config commandline script."""
parser = argparse.ArgumentParser(description="Check Home Assistant configuration.")
parser.add_argument("--script", choices=["check_config"])
parser.add_argument(
"-c",
"--config",
default=get_default_config_dir(),
help="Directory that contains the Home Assistant configuration",
)
parser.add_argument(
"-i",
"--info",
nargs="?",
default=None,
const="all",
help="Show a portion of the config",
)
parser.add_argument(
"-f", "--files", action="store_true", help="Show used configuration files"
)
parser.add_argument(
"-s", "--secrets", action="store_true", help="Show secret information"
)
args, unknown = parser.parse_known_args()
if unknown:
print(color("red", "Unknown arguments:", ", ".join(unknown)))
config_dir = os.path.join(os.getcwd(), args.config)
print(color("bold", "Testing configuration at", config_dir))
res = check(config_dir, args.secrets)
domain_info: List[str] = []
if args.info:
domain_info = args.info.split(",")
if args.files:
print(color(C_HEAD, "yaml files"), "(used /", color("red", "not used") + ")")
deps = os.path.join(config_dir, "deps")
yaml_files = [
f
for f in glob(os.path.join(config_dir, "**/*.yaml"), recursive=True)
if not f.startswith(deps)
]
for yfn in sorted(yaml_files):
the_color = "" if yfn in res["yaml_files"] else "red"
print(color(the_color, "-", yfn))
if res["except"]:
print(color("bold_white", "Failed config"))
for domain, config in res["except"].items():
domain_info.append(domain)
print(" ", color("bold_red", domain + ":"), color("red", "", reset="red"))
dump_dict(config, reset="red")
print(color("reset"))
if domain_info:
if "all" in domain_info:
print(color("bold_white", "Successful config (all)"))
for domain, config in res["components"].items():
print(" ", color(C_HEAD, domain + ":"))
dump_dict(config)
else:
print(color("bold_white", "Successful config (partial)"))
for domain in domain_info:
if domain == ERROR_STR:
continue
print(" ", color(C_HEAD, domain + ":"))
dump_dict(res["components"].get(domain))
if args.secrets:
flatsecret: Dict[str, str] = {}
for sfn, sdict in res["secret_cache"].items():
sss = []
for skey in sdict:
if skey in flatsecret:
_LOGGER.error(
"Duplicated secrets in files %s and %s", flatsecret[skey], sfn
)
flatsecret[skey] = sfn
sss.append(color("green", skey) if skey in res["secrets"] else skey)
print(color(C_HEAD, "Secrets from", sfn + ":"), ", ".join(sss))
print(color(C_HEAD, "Used Secrets:"))
for skey, sval in res["secrets"].items():
if sval is None:
print(" -", skey + ":", color("red", "not found"))
continue
print(
" -",
skey + ":",
sval,
color("cyan", "[from:", flatsecret.get(skey, "keyring") + "]"),
)
return len(res["except"])
def check(config_dir, secrets=False):
"""Perform a check by mocking hass load functions."""
logging.getLogger("homeassistant.loader").setLevel(logging.CRITICAL)
res: Dict[str, Any] = {
"yaml_files": OrderedDict(), # yaml_files loaded
"secrets": OrderedDict(), # secret cache and secrets loaded
"except": OrderedDict(), # exceptions raised (with config)
#'components' is a HomeAssistantConfig # noqa: E265
"secret_cache": None,
}
# pylint: disable=possibly-unused-variable
def mock_load(filename):
"""Mock hass.util.load_yaml to save config file names."""
res["yaml_files"][filename] = True
return MOCKS["load"][1](filename)
# pylint: disable=possibly-unused-variable
def mock_secrets(ldr, node):
"""Mock _get_secrets."""
try:
val = MOCKS["secrets"][1](ldr, node)
except HomeAssistantError:
val = None
res["secrets"][node.value] = val
return val
# Patches to skip functions
for sil in SILENCE:
PATCHES[sil] = patch(sil)
# Patches with local mock functions
for key, val in MOCKS.items():
if not secrets and key == "secrets":
continue
# The * in the key is removed to find the mock_function (side_effect)
# This allows us to use one side_effect to patch multiple locations
mock_function = locals()[f"mock_{key.replace('*', '')}"]
PATCHES[key] = patch(val[0], side_effect=mock_function)
# Start all patches
for pat in PATCHES.values():
pat.start()
if secrets:
# Ensure !secrets point to the patched function
yaml_loader.yaml.SafeLoader.add_constructor("!secret", yaml_loader.secret_yaml)
try:
res["components"] = asyncio.run(async_check_config(config_dir))
res["secret_cache"] = OrderedDict(yaml_loader.__SECRET_CACHE)
for err in res["components"].errors:
domain = err.domain or ERROR_STR
res["except"].setdefault(domain, []).append(err.message)
if err.config:
res["except"].setdefault(domain, []).append(err.config)
except Exception as err: # pylint: disable=broad-except
print(color("red", "Fatal error while loading config:"), str(err))
res["except"].setdefault(ERROR_STR, []).append(str(err))
finally:
# Stop all patches
for pat in PATCHES.values():
pat.stop()
if secrets:
# Ensure !secrets point to the original function
yaml_loader.yaml.SafeLoader.add_constructor(
"!secret", yaml_loader.secret_yaml
)
bootstrap.clear_secret_cache()
return res
async def async_check_config(config_dir):
"""Check the HA config."""
hass = core.HomeAssistant()
hass.config.config_dir = config_dir
components = await async_check_ha_config_file(hass)
await hass.async_stop(force=True)
return components
def line_info(obj, **kwargs):
"""Display line config source."""
if hasattr(obj, "__config_file__"):
return color(
"cyan", f"[source {obj.__config_file__}:{obj.__line__ or '?'}]", **kwargs
)
return "?"
def dump_dict(layer, indent_count=3, listi=False, **kwargs):
"""Display a dict.
A friendly version of print yaml_loader.yaml.dump(config).
"""
def sort_dict_key(val):
"""Return the dict key for sorting."""
key = str(val[0]).lower()
return "0" if key == "platform" else key
indent_str = indent_count * " "
if listi or isinstance(layer, list):
indent_str = indent_str[:-1] + "-"
if isinstance(layer, Mapping):
for key, value in sorted(layer.items(), key=sort_dict_key):
if isinstance(value, (dict, list)):
print(indent_str, str(key) + ":", line_info(value, **kwargs))
dump_dict(value, indent_count + 2)
else:
print(indent_str, str(key) + ":", value)
indent_str = indent_count * " "
if isinstance(layer, Sequence):
for i in layer:
if isinstance(i, dict):
dump_dict(i, indent_count + 2, True)
else:
print(" ", indent_str, i)
|
mit
|
tschaume/pymatgen
|
pymatgen/analysis/pourbaix_diagram.py
|
39890
|
# coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
"""
This module is intended to be used to compute Pourbaix diagrams
of arbitrary compositions and formation energies. If you use
this module in your work, please consider citing the following:
General formalism for solid-aqueous equilibria from DFT:
Persson et al., DOI: 10.1103/PhysRevB.85.235438
Decomposition maps, or Pourbaix hull diagrams
Singh et al., DOI: 10.1021/acs.chemmater.7b03980
Fast computation of many-element Pourbaix diagrams:
Patel et al., https://arxiv.org/abs/1909.00035 (submitted)
"""
import logging
import numpy as np
import itertools
import re
from copy import deepcopy
from functools import cmp_to_key, partial, lru_cache
from monty.json import MSONable, MontyDecoder
from multiprocessing import Pool
import warnings
from scipy.spatial import ConvexHull, HalfspaceIntersection
try:
from scipy.special import comb
except ImportError:
from scipy.misc import comb
from pymatgen.util.coord import Simplex
from pymatgen.util.string import latexify
from pymatgen.util.plotting import pretty_plot
from pymatgen.core.periodic_table import Element
from pymatgen.core.composition import Composition
from pymatgen.core.ion import Ion
from pymatgen.entries.computed_entries import ComputedEntry
from pymatgen.analysis.reaction_calculator import Reaction, ReactionError
from pymatgen.analysis.phase_diagram import PhaseDiagram, PDEntry
from tqdm import tqdm
__author__ = "Sai Jayaraman"
__copyright__ = "Copyright 2012, The Materials Project"
__version__ = "0.4"
__maintainer__ = "Joseph Montoya"
__credits__ = "Arunima Singh, Joseph Montoya, Anjli Patel"
__email__ = "joseph.montoya@tri.global"
__status__ = "Production"
__date__ = "Nov 1, 2012"
logger = logging.getLogger(__name__)
MU_H2O = -2.4583
PREFAC = 0.0591
# TODO: Revise to more closely reflect PDEntry, invoke from energy/composition
# TODO: PourbaixEntries depend implicitly on having entry energies be
# formation energies, should be a better way to get from raw energies
# TODO: uncorrected_energy is a bit of a misnomer, but not sure what to rename
class PourbaixEntry(MSONable):
"""
An object encompassing all data relevant to a solid or ion
in a pourbaix diagram. Each bulk solid/ion has an energy
g of the form: e = e0 + 0.0591 log10(conc) - nO mu_H2O
+ (nH - 2nO) pH + phi (-nH + 2nO + q)
Note that the energies corresponding to the input entries
should be formation energies with respect to hydrogen and
oxygen gas in order for the pourbaix diagram formalism to
work. This may be changed to be more flexible in the future.
Args:
entry (ComputedEntry/ComputedStructureEntry/PDEntry/IonEntry): An
entry object
"""
def __init__(self, entry, entry_id=None, concentration=1e-6):
self.entry = entry
if isinstance(entry, IonEntry):
self.concentration = concentration
self.phase_type = "Ion"
self.charge = entry.ion.charge
else:
self.concentration = 1.0
self.phase_type = "Solid"
self.charge = 0.0
self.uncorrected_energy = entry.energy
if entry_id is not None:
self.entry_id = entry_id
elif hasattr(entry, "entry_id") and entry.entry_id:
self.entry_id = entry.entry_id
else:
self.entry_id = None
@property
def npH(self):
return self.entry.composition.get("H", 0.) \
- 2 * self.entry.composition.get("O", 0.)
@property
def nH2O(self):
return self.entry.composition.get("O", 0.)
@property
def nPhi(self):
return self.npH - self.charge
@property
def name(self):
if self.phase_type == "Solid":
return self.entry.composition.reduced_formula + "(s)"
elif self.phase_type == "Ion":
return self.entry.name
@property
def energy(self):
"""
returns energy
Returns (float): total energy of the pourbaix
entry (at pH, V = 0 vs. SHE)
"""
# Note: this implicitly depends on formation energies as input
return self.uncorrected_energy + self.conc_term - (MU_H2O * self.nH2O)
@property
def energy_per_atom(self):
"""
energy per atom of the pourbaix entry
Returns (float): energy per atom
"""
return self.energy / self.composition.num_atoms
def energy_at_conditions(self, pH, V):
"""
Get free energy for a given pH and V
Args:
pH (float): pH at which to evaluate free energy
V (float): voltage at which to evaluate free energy
Returns:
free energy at conditions
"""
return self.energy + self.npH * PREFAC * pH + self.nPhi * V
def get_element_fraction(self, element):
"""
Gets the elemental fraction of a given non-OH element
Args:
element (Element or str): string or element corresponding
to element to get from composition
Returns:
fraction of element / sum(all non-OH elements)
"""
return self.composition.get(element) * self.normalization_factor
@property
def normalized_energy(self):
"""
Returns:
energy normalized by number of non H or O atoms, e. g.
for Zn2O6, energy / 2 or for AgTe3(OH)3, energy / 4
"""
return self.energy * self.normalization_factor
def normalized_energy_at_conditions(self, pH, V):
"""
Energy at an electrochemical condition, compatible with
numpy arrays for pH/V input
Args:
pH (float): pH at condition
V (float): applied potential at condition
Returns:
energy normalized by number of non-O/H atoms at condition
"""
return self.energy_at_conditions(pH, V) * self.normalization_factor
@property
def conc_term(self):
"""
Returns the concentration contribution to the free energy,
and should only be present when there are ions in the entry
"""
return PREFAC * np.log10(self.concentration)
# TODO: not sure if these are strictly necessary with refactor
def as_dict(self):
"""
Returns dict which contains Pourbaix Entry data.
Note that the pH, voltage, H2O factors are always calculated when
constructing a PourbaixEntry object.
"""
d = {"@module": self.__class__.__module__,
"@class": self.__class__.__name__}
if isinstance(self.entry, IonEntry):
d["entry_type"] = "Ion"
else:
d["entry_type"] = "Solid"
d["entry"] = self.entry.as_dict()
d["concentration"] = self.concentration
d["entry_id"] = self.entry_id
return d
@classmethod
def from_dict(cls, d):
"""
Invokes
"""
entry_type = d["entry_type"]
if entry_type == "Ion":
entry = IonEntry.from_dict(d["entry"])
else:
entry = PDEntry.from_dict(d["entry"])
entry_id = d["entry_id"]
concentration = d["concentration"]
return PourbaixEntry(entry, entry_id, concentration)
@property
def normalization_factor(self):
"""
Sum of number of atoms minus the number of H and O in composition
"""
return 1.0 / (self.num_atoms - self.composition.get('H', 0)
- self.composition.get('O', 0))
@property
def composition(self):
"""
Returns composition
"""
return self.entry.composition
@property
def num_atoms(self):
"""
Return number of atoms in current formula. Useful for normalization
"""
return self.composition.num_atoms
def __repr__(self):
return "Pourbaix Entry : {} with energy = {:.4f}, npH = {}, nPhi = {}, nH2O = {}, entry_id = {} ".format(
self.entry.composition, self.energy, self.npH, self.nPhi, self.nH2O, self.entry_id)
def __str__(self):
return self.__repr__()
class MultiEntry(PourbaixEntry):
"""
PourbaixEntry-like object for constructing multi-elemental Pourbaix
diagrams.
"""
def __init__(self, entry_list, weights=None):
"""
Initializes a MultiEntry.
Args:
entry_list ([PourbaixEntry]): List of component PourbaixEntries
weights ([float]): Weights associated with each entry. Default is None
"""
if weights is None:
self.weights = [1.0] * len(entry_list)
else:
self.weights = weights
self.entry_list = entry_list
@lru_cache()
def __getattr__(self, item):
"""
Because most of the attributes here are just weighted
averages of the entry_list, we save some space by
having a set of conditionals to define the attributes
"""
# Attributes that are weighted averages of entry attributes
if item in ["energy", "npH", "nH2O", "nPhi", "conc_term",
"composition", "uncorrected_energy"]:
# TODO: Composition could be changed for compat with sum
if item == "composition":
start = Composition({})
else:
start = 0
return sum([getattr(e, item) * w
for e, w in zip(self.entry_list, self.weights)], start)
# Attributes that are just lists of entry attributes
elif item in ["entry_id", "phase_type"]:
return [getattr(e, item) for e in self.entry_list]
# normalization_factor, num_atoms should work from superclass
return self.__getattribute__(item)
@property
def name(self):
"""
MultiEntry name, i. e. the name of each entry joined by ' + '
"""
return " + ".join([e.name for e in self.entry_list])
def __repr__(self):
return "Multiple Pourbaix Entry: energy = {:.4f}, npH = {}, nPhi = {}, nH2O = {}, entry_id = {}, species: {}" \
.format(self.energy, self.npH, self.nPhi, self.nH2O, self.entry_id, self.name)
def __str__(self):
return self.__repr__()
def as_dict(self):
return {"@module": self.__class__.__module__,
"@class": self.__class__.__name__,
"entry_list": [e.as_dict() for e in self.entry_list],
"weights": self.weights}
@classmethod
def from_dict(cls, d):
entry_list = [PourbaixEntry.from_dict(e) for e in d.get("entry_list")]
return cls(entry_list, d.get("weights"))
# TODO: this class isn't particularly useful in its current form, could be
# refactored to include information about the reference solid
class IonEntry(PDEntry):
"""
Object similar to PDEntry, but contains an Ion object instead of a
Composition object.
Args:
ion: Ion object
energy: Energy for composition.
name: Optional parameter to name the entry. Defaults to the
chemical formula.
.. attribute:: name
A name for the entry. This is the string shown in the phase diagrams.
By default, this is the reduced formula for the composition, but can be
set to some other string for display purposes.
"""
def __init__(self, ion, energy, name=None, attribute=None):
self.ion = ion
# Auto-assign name
name = name if name else self.ion.reduced_formula
super(IonEntry, self).__init__(
composition=ion.composition, energy=energy, name=name,
attribute=attribute)
@classmethod
def from_dict(cls, d):
"""
Returns an IonEntry object from a dict.
"""
return IonEntry(Ion.from_dict(d["ion"]), d["energy"], d.get("name"),
d.get("attribute"))
def as_dict(self):
"""
Creates a dict of composition, energy, and ion name
"""
d = {"ion": self.ion.as_dict(), "energy": self.energy,
"name": self.name}
return d
def __repr__(self):
return "IonEntry : {} with energy = {:.4f}".format(self.composition,
self.energy)
def __str__(self):
return self.__repr__()
def ion_or_solid_comp_object(formula):
"""
Returns either an ion object or composition object given
a formula.
Args:
formula: String formula. Eg. of ion: NaOH(aq), Na[+];
Eg. of solid: Fe2O3(s), Fe(s), Na2O
Returns:
Composition/Ion object
"""
m = re.search(r"\[([^\[\]]+)\]|\(aq\)", formula)
if m:
comp_obj = Ion.from_formula(formula)
elif re.search(r"\(s\)", formula):
comp_obj = Composition(formula[:-3])
else:
comp_obj = Composition(formula)
return comp_obj
ELEMENTS_HO = {Element('H'), Element('O')}
# TODO: the solids filter breaks some of the functionality of the
# heatmap plotter, because the reference states for decomposition
# don't include oxygen/hydrogen in the OER/HER regions
# TODO: create a from_phase_diagram class method for non-formation energy
# invocation
# TODO: invocation from a MultiEntry entry list could be a bit more robust
# TODO: serialization is still a bit rough around the edges
class PourbaixDiagram(MSONable):
"""
Class to create a Pourbaix diagram from entries
Args:
entries ([PourbaixEntry] or [MultiEntry]): Entries list
containing Solids and Ions or a list of MultiEntries
comp_dict ({str: float}): Dictionary of compositions,
defaults to equal parts of each elements
conc_dict ({str: float}): Dictionary of ion concentrations,
defaults to 1e-6 for each element
filter_solids (bool): applying this filter to a pourbaix
diagram ensures all included phases are filtered by
stability on the compositional phase diagram. This
breaks some of the functionality of the analysis,
though, so use with caution.
nproc (int): number of processes to generate multientries with
in parallel. Defaults to None (serial processing)
"""
def __init__(self, entries, comp_dict=None, conc_dict=None,
filter_solids=False, nproc=None):
entries = deepcopy(entries)
# Get non-OH elements
self.pbx_elts = set(itertools.chain.from_iterable(
[entry.composition.elements for entry in entries]))
self.pbx_elts = list(self.pbx_elts - ELEMENTS_HO)
self.dim = len(self.pbx_elts) - 1
# Process multientry inputs
if isinstance(entries[0], MultiEntry):
self._processed_entries = entries
# Extract individual entries
single_entries = list(set(itertools.chain.from_iterable(
[e.entry_list for e in entries])))
self._unprocessed_entries = single_entries
self._filtered_entries = single_entries
self._conc_dict = None
self._elt_comp = {k: v for k, v in entries[0].composition.items()
if k not in ELEMENTS_HO}
self._multielement = True
# Process single entry inputs
else:
# Set default conc/comp dicts
if not comp_dict:
comp_dict = {elt.symbol: 1. / len(self.pbx_elts) for elt in self.pbx_elts}
if not conc_dict:
conc_dict = {elt.symbol: 1e-6 for elt in self.pbx_elts}
self._conc_dict = conc_dict
self._elt_comp = comp_dict
self.pourbaix_elements = self.pbx_elts
solid_entries = [entry for entry in entries
if entry.phase_type == "Solid"]
ion_entries = [entry for entry in entries
if entry.phase_type == "Ion"]
# If a conc_dict is specified, override individual entry concentrations
for entry in ion_entries:
ion_elts = list(set(entry.composition.elements) - ELEMENTS_HO)
# TODO: the logic here for ion concentration setting is in two
# places, in PourbaixEntry and here, should be consolidated
if len(ion_elts) == 1:
entry.concentration = conc_dict[ion_elts[0].symbol] \
* entry.normalization_factor
elif len(ion_elts) > 1 and not entry.concentration:
raise ValueError("Elemental concentration not compatible "
"with multi-element ions")
self._unprocessed_entries = solid_entries + ion_entries
if not len(solid_entries + ion_entries) == len(entries):
raise ValueError("All supplied entries must have a phase type of "
"either \"Solid\" or \"Ion\"")
if filter_solids:
# O is 2.46 b/c pbx entry finds energies referenced to H2O
entries_HO = [ComputedEntry('H', 0), ComputedEntry('O', 2.46)]
solid_pd = PhaseDiagram(solid_entries + entries_HO)
solid_entries = list(set(solid_pd.stable_entries) - set(entries_HO))
self._filtered_entries = solid_entries + ion_entries
if len(comp_dict) > 1:
self._multielement = True
self._processed_entries = self._preprocess_pourbaix_entries(
self._filtered_entries, nproc=nproc)
else:
self._processed_entries = self._filtered_entries
self._multielement = False
self._stable_domains, self._stable_domain_vertices = \
self.get_pourbaix_domains(self._processed_entries)
def _convert_entries_to_points(self, pourbaix_entries):
"""
Args:
pourbaix_entries ([PourbaixEntry]): list of pourbaix entries
to process into vectors in nph-nphi-composition space
Returns:
list of vectors, [[nph, nphi, e0, x1, x2, ..., xn-1]]
corresponding to each entry in nph-nphi-composition space
"""
vecs = [[entry.npH, entry.nPhi, entry.energy] +
[entry.composition.get(elt) for elt in self.pbx_elts[:-1]]
for entry in pourbaix_entries]
vecs = np.array(vecs)
norms = np.transpose([[entry.normalization_factor
for entry in pourbaix_entries]])
vecs *= norms
return vecs
def _get_hull_in_nph_nphi_space(self, entries):
"""
Generates convex hull of pourbaix diagram entries in composition,
npH, and nphi space. This enables filtering of multi-entries
such that only compositionally stable combinations of entries
are included.
Args:
entries ([PourbaixEntry]): list of PourbaixEntries to construct
the convex hull
Returns: list of entries and stable facets corresponding to that
list of entries
"""
ion_entries = [entry for entry in entries
if entry.phase_type == "Ion"]
solid_entries = [entry for entry in entries
if entry.phase_type == "Solid"]
# Pre-filter solids based on min at each composition
logger.debug("Pre-filtering solids by min energy at each composition")
sorted_entries = sorted(
solid_entries, key=lambda x: (x.composition.reduced_composition,
x.entry.energy_per_atom))
grouped_by_composition = itertools.groupby(
sorted_entries, key=lambda x: x.composition.reduced_composition)
min_entries = [list(grouped_entries)[0]
for comp, grouped_entries in grouped_by_composition]
min_entries += ion_entries
logger.debug("Constructing nph-nphi-composition points for qhull")
vecs = self._convert_entries_to_points(min_entries)
maxes = np.max(vecs[:, :3], axis=0)
extra_point = np.concatenate(
[maxes, np.ones(self.dim) / self.dim], axis=0)
# Add padding for extra point
pad = 1000
extra_point[2] += pad
points = np.concatenate([vecs, np.array([extra_point])], axis=0)
logger.debug("Constructing convex hull in nph-nphi-composition space")
hull = ConvexHull(points, qhull_options="QJ i")
# Create facets and remove top
facets = [facet for facet in hull.simplices
if not len(points) - 1 in facet]
if self.dim > 1:
logger.debug("Filtering facets by pourbaix composition")
valid_facets = []
for facet in facets:
comps = vecs[facet][:, 3:]
full_comps = np.concatenate([
comps, 1 - np.sum(comps, axis=1).reshape(len(comps), 1)], axis=1)
# Ensure an compositional interior point exists in the simplex
if np.linalg.matrix_rank(full_comps) > self.dim:
valid_facets.append(facet)
else:
valid_facets = facets
return min_entries, valid_facets
def _preprocess_pourbaix_entries(self, entries, nproc=None):
"""
Generates multi-entries for pourbaix diagram
Args:
entries ([PourbaixEntry]): list of PourbaixEntries to preprocess
into MultiEntries
nproc (int): number of processes to be used in parallel
treatment of entry combos
Returns:
([MultiEntry]) list of stable MultiEntry candidates
"""
# Get composition
tot_comp = Composition(self._elt_comp)
min_entries, valid_facets = self._get_hull_in_nph_nphi_space(entries)
combos = []
for facet in valid_facets:
for i in range(1, self.dim + 2):
these_combos = list()
for combo in itertools.combinations(facet, i):
these_entries = [min_entries[i] for i in combo]
these_combos.append(frozenset(these_entries))
combos.append(these_combos)
all_combos = set(itertools.chain.from_iterable(combos))
list_combos = []
for i in all_combos:
list_combos.append(list(i))
all_combos = list_combos
multi_entries = []
# Parallel processing of multi-entry generation
if nproc is not None:
f = partial(self.process_multientry, prod_comp=tot_comp)
with Pool(nproc) as p:
multi_entries = list(tqdm(p.imap(f, all_combos),
total=len(all_combos)))
multi_entries = list(filter(bool, multi_entries))
else:
# Serial processing of multi-entry generation
for combo in tqdm(all_combos):
multi_entry = self.process_multientry(combo, prod_comp=tot_comp)
if multi_entry:
multi_entries.append(multi_entry)
return multi_entries
def _generate_multielement_entries(self, entries, nproc=None):
"""
Create entries for multi-element Pourbaix construction.
This works by finding all possible linear combinations
of entries that can result in the specified composition
from the initialized comp_dict.
Args:
entries ([PourbaixEntries]): list of pourbaix entries
to process into MultiEntries
nproc (int): number of processes to be used in parallel
treatment of entry combos
"""
N = len(self._elt_comp) # No. of elements
total_comp = Composition(self._elt_comp)
# generate all combinations of compounds that have all elements
entry_combos = [itertools.combinations(
entries, j + 1) for j in range(N)]
entry_combos = itertools.chain.from_iterable(entry_combos)
entry_combos = filter(lambda x: total_comp < MultiEntry(x).composition,
entry_combos)
# Generate and filter entries
processed_entries = []
total = sum([comb(len(entries), j + 1)
for j in range(N)])
if total > 1e6:
warnings.warn("Your pourbaix diagram includes {} entries and may "
"take a long time to generate.".format(total))
# Parallel processing of multi-entry generation
if nproc is not None:
f = partial(self.process_multientry, prod_comp=total_comp)
with Pool(nproc) as p:
processed_entries = list(tqdm(p.imap(f, entry_combos),
total=total))
processed_entries = list(filter(bool, processed_entries))
# Serial processing of multi-entry generation
else:
for entry_combo in entry_combos:
processed_entry = self.process_multientry(entry_combo, total_comp)
if processed_entry is not None:
processed_entries.append(processed_entry)
return processed_entries
@staticmethod
def process_multientry(entry_list, prod_comp, coeff_threshold=1e-4):
"""
Static method for finding a multientry based on
a list of entries and a product composition.
Essentially checks to see if a valid aqueous
reaction exists between the entries and the
product composition and returns a MultiEntry
with weights according to the coefficients if so.
Args:
entry_list ([Entry]): list of entries from which to
create a MultiEntry
prod_comp (Composition): composition constraint for setting
weights of MultiEntry
coeff_threshold (float): threshold of stoichiometric
coefficients to filter, if weights are lower than
this value, the entry is not returned
"""
dummy_oh = [Composition("H"), Composition("O")]
try:
# Get balanced reaction coeffs, ensuring all < 0 or conc thresh
# Note that we get reduced compositions for solids and non-reduced
# compositions for ions because ions aren't normalized due to
# their charge state.
entry_comps = [e.composition for e in entry_list]
rxn = Reaction(entry_comps + dummy_oh, [prod_comp])
coeffs = -np.array([rxn.get_coeff(comp) for comp in entry_comps])
# Return None if reaction coeff threshold is not met
if (coeffs > coeff_threshold).all():
return MultiEntry(entry_list, weights=coeffs.tolist())
else:
return None
except ReactionError:
return None
@staticmethod
def get_pourbaix_domains(pourbaix_entries, limits=None):
"""
Returns a set of pourbaix stable domains (i. e. polygons) in
pH-V space from a list of pourbaix_entries
This function works by using scipy's HalfspaceIntersection
function to construct all of the 2-D polygons that form the
boundaries of the planes corresponding to individual entry
gibbs free energies as a function of pH and V. Hyperplanes
of the form a*pH + b*V + 1 - g(0, 0) are constructed and
supplied to HalfspaceIntersection, which then finds the
boundaries of each pourbaix region using the intersection
points.
Args:
pourbaix_entries ([PourbaixEntry]): Pourbaix entries
with which to construct stable pourbaix domains
limits ([[float]]): limits in which to do the pourbaix
analysis
Returns:
Returns a dict of the form {entry: [boundary_points]}.
The list of boundary points are the sides of the N-1
dim polytope bounding the allowable ph-V range of each entry.
"""
if limits is None:
limits = [[-2, 16], [-4, 4]]
# Get hyperplanes
hyperplanes = [np.array([-PREFAC * entry.npH, -entry.nPhi,
0, -entry.energy]) * entry.normalization_factor
for entry in pourbaix_entries]
hyperplanes = np.array(hyperplanes)
hyperplanes[:, 2] = 1
max_contribs = np.max(np.abs(hyperplanes), axis=0)
g_max = np.dot(-max_contribs, [limits[0][1], limits[1][1], 0, 1])
# Add border hyperplanes and generate HalfspaceIntersection
border_hyperplanes = [[-1, 0, 0, limits[0][0]],
[1, 0, 0, -limits[0][1]],
[0, -1, 0, limits[1][0]],
[0, 1, 0, -limits[1][1]],
[0, 0, -1, 2 * g_max]]
hs_hyperplanes = np.vstack([hyperplanes, border_hyperplanes])
interior_point = np.average(limits, axis=1).tolist() + [g_max]
hs_int = HalfspaceIntersection(hs_hyperplanes, np.array(interior_point))
# organize the boundary points by entry
pourbaix_domains = {entry: [] for entry in pourbaix_entries}
for intersection, facet in zip(hs_int.intersections,
hs_int.dual_facets):
for v in facet:
if v < len(pourbaix_entries):
this_entry = pourbaix_entries[v]
pourbaix_domains[this_entry].append(intersection)
# Remove entries with no pourbaix region
pourbaix_domains = {k: v for k, v in pourbaix_domains.items() if v}
pourbaix_domain_vertices = {}
for entry, points in pourbaix_domains.items():
points = np.array(points)[:, :2]
# Initial sort to ensure consistency
points = points[np.lexsort(np.transpose(points))]
center = np.average(points, axis=0)
points_centered = points - center
# Sort points by cross product of centered points,
# isn't strictly necessary but useful for plotting tools
points_centered = sorted(points_centered,
key=cmp_to_key(lambda x, y: x[0] * y[1] - x[1] * y[0]))
points = points_centered + center
# Create simplices corresponding to pourbaix boundary
simplices = [Simplex(points[indices])
for indices in ConvexHull(points).simplices]
pourbaix_domains[entry] = simplices
pourbaix_domain_vertices[entry] = points
return pourbaix_domains, pourbaix_domain_vertices
def find_stable_entry(self, pH, V):
"""
Finds stable entry at a pH,V condition
Args:
pH (float): pH to find stable entry
V (float): V to find stable entry
Returns:
"""
energies_at_conditions = [e.normalized_energy_at_conditions(pH, V)
for e in self.stable_entries]
return self.stable_entries[np.argmin(energies_at_conditions)]
def get_decomposition_energy(self, entry, pH, V):
"""
Finds decomposition to most stable entries in eV/atom,
supports vectorized inputs for pH and V
Args:
entry (PourbaixEntry): PourbaixEntry corresponding to
compound to find the decomposition for
pH (float, [float]): pH at which to find the decomposition
V (float, [float]): voltage at which to find the decomposition
Returns:
Decomposition energy for the entry, i. e. the energy above
the "pourbaix hull" in eV/atom at the given conditions
"""
# Check composition consistency between entry and Pourbaix diagram:
pbx_comp = Composition(self._elt_comp).fractional_composition
entry_pbx_comp = Composition(
{elt: coeff for elt, coeff in entry.composition.items()
if elt not in ELEMENTS_HO}).fractional_composition
if entry_pbx_comp != pbx_comp:
raise ValueError("Composition of stability entry does not match "
"Pourbaix Diagram")
entry_normalized_energy = entry.normalized_energy_at_conditions(pH, V)
hull_energy = self.get_hull_energy(pH, V)
decomposition_energy = entry_normalized_energy - hull_energy
# Convert to eV/atom instead of eV/normalized formula unit
decomposition_energy /= entry.normalization_factor
decomposition_energy /= entry.composition.num_atoms
return decomposition_energy
def get_hull_energy(self, pH, V):
"""
Gets the minimum energy of the pourbaix "basin" that is formed
from the stable pourbaix planes. Vectorized.
Args:
pH (float or [float]): pH at which to find the hull energy
V (float or [float]): V at which to find the hull energy
Returns:
(float or [float]) minimum pourbaix energy at conditions
"""
all_gs = np.array([e.normalized_energy_at_conditions(
pH, V) for e in self.stable_entries])
base = np.min(all_gs, axis=0)
return base
def get_stable_entry(self, pH, V):
"""
Gets the stable entry at a given pH, V condition
Args:
pH (float): pH at a given condition
V (float): V at a given condition
Returns:
(PourbaixEntry or MultiEntry): pourbaix or multi-entry
corresponding ot the minimum energy entry at a given
pH, V condition
"""
all_gs = np.array([e.normalized_energy_at_conditions(
pH, V) for e in self.stable_entries])
return self.stable_entries[np.argmin(all_gs)]
@property
def stable_entries(self):
"""
Returns the stable entries in the Pourbaix diagram.
"""
return list(self._stable_domains.keys())
@property
def unstable_entries(self):
"""
Returns all unstable entries in the Pourbaix diagram
"""
return [e for e in self.all_entries if e not in self.stable_entries]
@property
def all_entries(self):
"""
Return all entries used to generate the pourbaix diagram
"""
return self._processed_entries
@property
def unprocessed_entries(self):
"""
Return unprocessed entries
"""
return self._unprocessed_entries
def as_dict(self, include_unprocessed_entries=False):
if include_unprocessed_entries:
entries = [e.as_dict() for e in self._unprocessed_entries]
else:
entries = [e.as_dict() for e in self._processed_entries]
d = {"@module": self.__class__.__module__,
"@class": self.__class__.__name__,
"entries": entries,
"comp_dict": self._elt_comp,
"conc_dict": self._conc_dict}
return d
@classmethod
def from_dict(cls, d):
decoded_entries = MontyDecoder().process_decoded(d['entries'])
return cls(decoded_entries, d.get('comp_dict'),
d.get('conc_dict'))
class PourbaixPlotter:
"""
A plotter class for phase diagrams.
Args:
pourbaix_diagram (PourbaixDiagram): A PourbaixDiagram object.
"""
def __init__(self, pourbaix_diagram):
self._pbx = pourbaix_diagram
def show(self, *args, **kwargs):
"""
Shows the pourbaix plot
Args:
*args: args to get_pourbaix_plot
**kwargs: kwargs to get_pourbaix_plot
Returns:
None
"""
plt = self.get_pourbaix_plot(*args, **kwargs)
plt.show()
def get_pourbaix_plot(self, limits=None, title="",
label_domains=True, plt=None):
"""
Plot Pourbaix diagram.
Args:
limits: 2D list containing limits of the Pourbaix diagram
of the form [[xlo, xhi], [ylo, yhi]]
title (str): Title to display on plot
label_domains (bool): whether to label pourbaix domains
plt (pyplot): Pyplot instance for plotting
Returns:
plt (pyplot) - matplotlib plot object with pourbaix diagram
"""
if limits is None:
limits = [[-2, 16], [-3, 3]]
plt = plt or pretty_plot(16)
xlim = limits[0]
ylim = limits[1]
h_line = np.transpose([[xlim[0], -xlim[0] * PREFAC],
[xlim[1], -xlim[1] * PREFAC]])
o_line = np.transpose([[xlim[0], -xlim[0] * PREFAC + 1.23],
[xlim[1], -xlim[1] * PREFAC + 1.23]])
neutral_line = np.transpose([[7, ylim[0]], [7, ylim[1]]])
V0_line = np.transpose([[xlim[0], 0], [xlim[1], 0]])
ax = plt.gca()
ax.set_xlim(xlim)
ax.set_ylim(ylim)
lw = 3
plt.plot(h_line[0], h_line[1], "r--", linewidth=lw)
plt.plot(o_line[0], o_line[1], "r--", linewidth=lw)
plt.plot(neutral_line[0], neutral_line[1], "k-.", linewidth=lw)
plt.plot(V0_line[0], V0_line[1], "k-.", linewidth=lw)
for entry, vertices in self._pbx._stable_domain_vertices.items():
center = np.average(vertices, axis=0)
x, y = np.transpose(np.vstack([vertices, vertices[0]]))
plt.plot(x, y, 'k-', linewidth=lw)
if label_domains:
plt.annotate(generate_entry_label(entry), center, ha='center',
va='center', fontsize=20, color="b").draggable()
plt.xlabel("pH")
plt.ylabel("E (V)")
plt.title(title, fontsize=20, fontweight='bold')
return plt
def plot_entry_stability(self, entry, pH_range=None, pH_resolution=100,
V_range=None, V_resolution=100, e_hull_max=1,
cmap='RdYlBu_r', **kwargs):
if pH_range is None:
pH_range = [-2, 16]
if V_range is None:
V_range = [-3, 3]
# plot the Pourbaix diagram
plt = self.get_pourbaix_plot(**kwargs)
pH, V = np.mgrid[pH_range[0]:pH_range[1]:pH_resolution * 1j, V_range[0]:V_range[1]:V_resolution * 1j]
stability = self._pbx.get_decomposition_energy(entry, pH, V)
# Plot stability map
plt.pcolor(pH, V, stability, cmap=cmap, vmin=0, vmax=e_hull_max)
cbar = plt.colorbar()
cbar.set_label("Stability of {} (eV/atom)".format(
generate_entry_label(entry)))
# Set ticklabels
# ticklabels = [t.get_text() for t in cbar.ax.get_yticklabels()]
# ticklabels[-1] = '>={}'.format(ticklabels[-1])
# cbar.ax.set_yticklabels(ticklabels)
return plt
def domain_vertices(self, entry):
"""
Returns the vertices of the Pourbaix domain.
Args:
entry: Entry for which domain vertices are desired
Returns:
list of vertices
"""
return self._pbx._stable_domain_vertices[entry]
def generate_entry_label(entry):
"""
Generates a label for the pourbaix plotter
Args:
entry (PourbaixEntry or MultiEntry): entry to get a label for
"""
if isinstance(entry, MultiEntry):
return " + ".join([latexify_ion(latexify(e.name)) for e in entry.entry_list])
else:
return latexify_ion(latexify(entry.name))
def latexify_ion(formula):
return re.sub(r"()\[([^)]*)\]", r"\1$^{\2}$", formula)
|
mit
|
magullo/jmxtrans
|
jmxtrans-core/src/main/java/com/googlecode/jmxtrans/model/JmxResultProcessor.java
|
7249
|
/**
* The MIT License
* Copyright © 2010 JmxTrans team
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.googlecode.jmxtrans.model;
import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import javax.management.Attribute;
import javax.management.ObjectInstance;
import javax.management.ObjectName;
import javax.management.openmbean.CompositeData;
import javax.management.openmbean.CompositeType;
import javax.management.openmbean.TabularData;
import java.lang.reflect.Array;
import java.util.List;
import java.util.Map;
import java.util.Set;
import static com.google.common.collect.ImmutableList.Builder;
public class JmxResultProcessor {
private final Query query;
private final ObjectInstance objectInstance;
private final String className;
private final String objDomain;
private final List<Attribute> attributes;
public JmxResultProcessor(Query query, ObjectInstance objectInstance, List<Attribute> attributes, String className, String objDomain) {
this.query = query;
this.objectInstance = objectInstance;
this.className = className;
this.objDomain = objDomain;
this.attributes = attributes;
}
public ImmutableList<Result> getResults() {
ResultsBuilder builder = new ResultsBuilder();
for (Attribute attribute : attributes) {
if (attribute != null){
builder.add(attribute.getName(), attribute.getValue());
}
}
return builder.build();
}
/**
* Result list builders.
* Recursively walks in the value to add results.
*/
private class ResultsBuilder {
private final Builder<Result> accumulator = ImmutableList.builder();
private final long epoch = System.currentTimeMillis();
private void add(String attributeName, Object value) {
add(attributeName, ImmutableList.<String>builder(), value);
}
private ImmutableList.Builder<String> newValuePath(ImmutableList.Builder<String> valuePath, String name) {
return ImmutableList.<String>builder()
.addAll(valuePath.build())
.add(name);
}
/**
* Add one or more results from a value of any type.
* This is a recursive function.
*/
private void add(String attributeName, ImmutableList.Builder<String> valuePath, Object value) {
if (value == null) {
return;
}
if (value instanceof CompositeData) {
add(attributeName, valuePath, (CompositeData) value);
} else if (value instanceof CompositeData[]) {
for (CompositeData cd : (CompositeData[]) value) {
add(attributeName, cd);
}
} else if (value instanceof ObjectName[]) {
add(attributeName, valuePath, (ObjectName[]) value);
} else if (value.getClass().isArray()) {
// OMFG: this is nutty. some of the items in the array can be
// primitive! great interview question!
for (int i = 0; i < Array.getLength(value); i++) {
Object val = Array.get(value, i);
add(attributeName, newValuePath(valuePath, Integer.toString(i)), val);
}
} else if (value instanceof TabularData) {
add(attributeName, valuePath, (TabularData) value);
} else if (value instanceof Map) {
add(attributeName, valuePath, (Map<Object, Object>) value);
} else if (value instanceof Iterable) {
add(attributeName, valuePath, (Iterable) value);
} else {
addNew(attributeName, valuePath, value);
}
}
/**
* Add results from a value of type map.
*/
private void add(String attributeName, ImmutableList.Builder<String> valuePath, Map<Object, Object> map) {
for (Map.Entry<Object, Object> entry : map.entrySet()) {
add(attributeName, newValuePath(valuePath, entry.getKey().toString()), entry.getValue());
}
}
/**
* Add results from a value of type objet name array.
*/
private void add(String attributeName, ImmutableList.Builder<String> valuePath, ObjectName[] objs) {
ImmutableMap.Builder<String, Object> values = ImmutableMap.builder();
for (ObjectName obj : objs) {
values.put(obj.getCanonicalName(), obj.getKeyPropertyListString());
}
addNew(attributeName, valuePath, values.build());
}
/**
* Add results from a value of type composite data.
* This is a recursive function.
*/
private void add(String attributeName, ImmutableList.Builder<String> valuePath, CompositeData cds) {
CompositeType t = cds.getCompositeType();
Set<String> keys = t.keySet();
for (String key : keys) {
if (!query.getKeys().isEmpty() && !query.getKeys().contains(key)) {
continue;
}
Object value = cds.get(key);
add(attributeName, newValuePath(valuePath, key), value);
}
}
/**
* Add results from a value of type composite data.
* This is a recursive function.
*/
private void add(String attributeName, ImmutableList.Builder<String> valuePath, Iterable iterable) {
int index = 0;
for(Object value: iterable) {
add(attributeName, newValuePath(valuePath, Integer.toString(index++)), value);
}
}
/**
* Add results from a value of type tabular data.
* This is a recursive function.
*/
private void add(String attributeName, ImmutableList.Builder<String> valuePath, TabularData tds) {
// @see TabularData#keySet JavaDoc:
// "Set<List<?>>" but is declared as a {@code Set<?>} for
// compatibility reasons. The returned set can be used to iterate
// over the keys."
Set<List<?>> keys = (Set<List<?>>) tds.keySet();
for (List<?> key : keys) {
// ie: attributeName=LastGcInfo.Par Survivor Space
// i haven't seen this be smaller or larger than List<1>, but
// might as well loop it.
CompositeData compositeData = tds.get(key.toArray());
String attributeName2 = Joiner.on('.').join(key);
add(attributeName, newValuePath(valuePath, attributeName2), compositeData);
}
}
/**
* Create and add a new result.
*/
private void addNew(String attributeName, ImmutableList.Builder<String> valuePath, Object value) {
accumulator.add(new Result(epoch, attributeName, className, objDomain, query.getResultAlias(), objectInstance.getObjectName().getKeyPropertyListString(), valuePath.build(), value));
}
/**
* Return the built list
*/
public ImmutableList<Result> build() {
return accumulator.build();
}
}
}
|
mit
|
darrelljefferson/themcset.com
|
bin/test/a/f/m.php
|
37
|
<?php
namespace test\a\f;
class m { }
|
mit
|
p-try/CakePdf
|
src/Pdf/Engine/DomPdfEngine.php
|
1511
|
<?php
namespace CakePdf\Pdf\Engine;
use CakePdf\Pdf\CakePdf;
use Dompdf\Dompdf;
class DomPdfEngine extends AbstractPdfEngine
{
/**
* Generates Pdf from html
*
* @return string raw pdf data
*/
public function output()
{
$defaults = [
'fontCache' => TMP,
'tempDir' => TMP
];
$options = (array)$this->config('options') + $defaults;
$DomPDF = $this->_createInstance($options);
$DomPDF->setPaper($this->_Pdf->pageSize(), $this->_Pdf->orientation());
$DomPDF = $this->_render($this->_Pdf, $DomPDF);
return $this->_output($DomPDF);
}
/**
* Creates the Dompdf instance.
*
* @param array $options The engine options.
* @return Dompdf
*/
protected function _createInstance($options)
{
return new Dompdf($options);
}
/**
* Renders the Dompdf instance.
*
* @param CakePdf $Pdf The CakePdf instance that supplies the content to render.
* @param Dompdf $DomPDF The Dompdf instance to render.
* @return Dompdf
*/
protected function _render($Pdf, $DomPDF)
{
$DomPDF->loadHtml($Pdf->html());
$DomPDF->render();
return $DomPDF;
}
/**
* Generates the PDF output.
*
* @param Dompdf $DomPDF The Dompdf instance from which to generate the output from.
* @return string
*/
protected function _output($DomPDF)
{
return $DomPDF->output();
}
}
|
mit
|
QuarnstromA/Tales-of-Kratos
|
lib/game/entities/menus/decreaseDemonFang.js
|
895
|
ig.module(
'game.entities.menus.decreaseDemonFang'
)
.requires(
'game.entities.abstractions.decreaseSkill'
)
.defines(function(){
ig.global.EntityDecreaseDemonFang = ig.global.EntityDecreaseSkill.extend({
update: function(){
if(ig.game.artesCatalog['demonFang'].points > 0 && ig.game.artesCatalog['demonFang'].active){
this.currentAnim = this.anims.idle;
} else {
this.currentAnim = this.anims.fade;
this.currentAnim.alpha = 0.2;
}
},
clicked: function() {
/* Handle the click */
console.log("Clicked minus");
if(ig.game.artesCatalog['demonFang'].active){
if(ig.game.artesCatalog['demonFang'].points > 0){
ig.game.artesCatalog['demonFang'].level -= 1;
ig.game.player.skillPoints += 1;
ig.game.artesCatalog['demonFang'].points -= 1;
}
}
},
});
});
|
mit
|
noreiller/sfAssetsLibraryPlugin
|
modules/sfAsset/actions/actions.class.php
|
180
|
<?php
require_once sfConfig::get('sf_plugins_dir'). '/sfAssetsLibraryPlugin/modules/sfAsset/lib/BasesfAssetActions.class.php';
class sfAssetActions extends BasesfAssetActions
{
}
|
mit
|
bitpay/bitcore-wallet-service
|
lib/emailservice.js
|
12005
|
'use strict';
var _ = require('lodash');
var $ = require('preconditions').singleton();
var async = require('async');
var Mustache = require('mustache');
var log = require('npmlog');
log.debug = log.verbose;
var fs = require('fs');
var path = require('path');
var Utils = require('./common/utils');
var Defaults = require('./common/defaults');
var Storage = require('./storage');
var MessageBroker = require('./messagebroker');
var Lock = require('./lock');
var Model = require('./model');
var EMAIL_TYPES = {
'NewCopayer': {
filename: 'new_copayer',
notifyDoer: false,
notifyOthers: true,
},
'WalletComplete': {
filename: 'wallet_complete',
notifyDoer: true,
notifyOthers: true,
},
'NewTxProposal': {
filename: 'new_tx_proposal',
notifyDoer: false,
notifyOthers: true,
},
'NewOutgoingTx': {
filename: 'new_outgoing_tx',
notifyDoer: true,
notifyOthers: true,
},
'NewIncomingTx': {
filename: 'new_incoming_tx',
notifyDoer: true,
notifyOthers: true,
},
'TxProposalFinallyRejected': {
filename: 'txp_finally_rejected',
notifyDoer: false,
notifyOthers: true,
},
'TxConfirmation': {
filename: 'tx_confirmation',
notifyDoer: true,
notifyOthers: false,
},
};
function EmailService() {};
EmailService.prototype.start = function(opts, cb) {
opts = opts || {};
function _readDirectories(basePath, cb) {
fs.readdir(basePath, function(err, files) {
if (err) return cb(err);
async.filter(files, function(file, next) {
fs.stat(path.join(basePath, file), function(err, stats) {
return next(!err && stats.isDirectory());
});
}, function(dirs) {
return cb(null, dirs);
});
});
};
var self = this;
self.defaultLanguage = opts.emailOpts.defaultLanguage || 'en';
self.defaultUnit = opts.emailOpts.defaultUnit || 'btc';
self.templatePath = path.normalize((opts.emailOpts.templatePath || (__dirname + '/templates')) + '/');
self.publicTxUrlTemplate = opts.emailOpts.publicTxUrlTemplate || {};
self.subjectPrefix = opts.emailOpts.subjectPrefix || '[Wallet service]';
self.from = opts.emailOpts.from;
async.parallel([
function(done) {
_readDirectories(self.templatePath, function(err, res) {
self.availableLanguages = res;
done(err);
});
},
function(done) {
if (opts.storage) {
self.storage = opts.storage;
done();
} else {
self.storage = new Storage();
self.storage.connect(opts.storageOpts, done);
}
},
function(done) {
self.messageBroker = opts.messageBroker || new MessageBroker(opts.messageBrokerOpts);
self.messageBroker.onMessage(_.bind(self.sendEmail, self));
done();
},
function(done) {
self.lock = opts.lock || new Lock(opts.lockOpts);
done();
},
function(done) {
self.mailer = opts.mailer || nodemailer.createTransport(opts.emailOpts);
done();
},
], function(err) {
if (err) {
log.error(err);
}
return cb(err);
});
};
EmailService.prototype._compileTemplate = function(template, extension) {
var lines = template.split('\n');
if (extension == '.html') {
lines.unshift('');
}
return {
subject: lines[0],
body: _.tail(lines).join('\n'),
};
};
EmailService.prototype._readTemplateFile = function(language, filename, cb) {
var self = this;
var fullFilename = path.join(self.templatePath, language, filename);
fs.readFile(fullFilename, 'utf8', function(err, template) {
if (err) {
return cb(new Error('Could not read template file ' + fullFilename, err));
}
return cb(null, template);
});
};
// TODO: cache for X minutes
EmailService.prototype._loadTemplate = function(emailType, recipient, extension, cb) {
var self = this;
self._readTemplateFile(recipient.language, emailType.filename + extension, function(err, template) {
if (err) return cb(err);
return cb(null, self._compileTemplate(template, extension));
});
};
EmailService.prototype._applyTemplate = function(template, data, cb) {
if (!data) return cb(new Error('Could not apply template to empty data'));
var error;
var result = _.mapValues(template, function(t) {
try {
return Mustache.render(t, data);
} catch (e) {
log.error('Could not apply data to template', e);
error = e;
}
});
if (error) return cb(error);
return cb(null, result);
};
EmailService.prototype._getRecipientsList = function(notification, emailType, cb) {
var self = this;
self.storage.fetchWallet(notification.walletId, function(err, wallet) {
if (err) return cb(err);
self.storage.fetchPreferences(notification.walletId, null, function(err, preferences) {
if (err) return cb(err);
if (_.isEmpty(preferences)) return cb(null, []);
var usedEmails = {};
var recipients = _.compact(_.map(preferences, function(p) {
if (!p.email || usedEmails[p.email]) return;
usedEmails[p.email] = true;
if (notification.creatorId == p.copayerId && !emailType.notifyDoer) return;
if (notification.creatorId != p.copayerId && !emailType.notifyOthers) return;
if (!_.includes(self.availableLanguages, p.language)) {
if (p.language) {
log.warn('Language for email "' + p.language + '" not available.');
}
p.language = self.defaultLanguage;
}
var unit;
if (wallet.coin != Defaults.COIN) {
unit = wallet.coin;
} else {
unit = p.unit || self.defaultUnit;
}
return {
copayerId: p.copayerId,
emailAddress: p.email,
language: p.language,
unit: unit,
};
}));
return cb(null, recipients);
});
});
};
EmailService.prototype._getDataForTemplate = function(notification, recipient, cb) {
var self = this;
// TODO: Declare these in BWU
var UNIT_LABELS = {
btc: 'BTC',
bit: 'bits',
bch: 'BCH',
};
var data = _.cloneDeep(notification.data);
data.subjectPrefix = _.trim(self.subjectPrefix) + ' ';
if (data.amount) {
try {
var unit = recipient.unit.toLowerCase();
data.amount = Utils.formatAmount(+data.amount, unit) + ' ' + UNIT_LABELS[unit];
} catch (ex) {
return cb(new Error('Could not format amount', ex));
}
}
self.storage.fetchWallet(notification.walletId, function(err, wallet) {
if (err) return cb(err);
data.walletId = wallet.id;
data.walletName = wallet.name;
data.walletM = wallet.m;
data.walletN = wallet.n;
var copayer = _.find(wallet.copayers, {
id: notification.creatorId
});
if (copayer) {
data.copayerId = copayer.id;
data.copayerName = copayer.name;
}
if (notification.type == 'TxProposalFinallyRejected' && data.rejectedBy) {
var rejectors = _.map(data.rejectedBy, function(copayerId) {
return _.find(wallet.copayers, {
id: copayerId
}).name
});
data.rejectorsNames = rejectors.join(', ');
}
if (_.includes(['NewIncomingTx', 'NewOutgoingTx'], notification.type) && data.txid) {
var urlTemplate = self.publicTxUrlTemplate[wallet.coin][wallet.network];
if (urlTemplate) {
try {
data.urlForTx = Mustache.render(urlTemplate, data);
} catch (ex) {
log.warn('Could not render public url for tx', ex);
}
}
}
return cb(null, data);
});
};
EmailService.prototype._send = function(email, cb) {
var self = this;
var mailOptions = {
from: email.from,
to: email.to,
subject: email.subject,
text: email.bodyPlain,
};
if (email.bodyHtml) {
mailOptions.html = email.bodyHtml;
}
self.mailer.send(mailOptions)
.then((result) => {
log.debug('Message sent: ', result || '');
return cb(null,result);
})
.catch((err) => {
log.error('An error occurred when trying to send email to ' + email.to, err);
return cb(err);
});
};
EmailService.prototype._readAndApplyTemplates = function(notification, emailType, recipientsList, cb) {
var self = this;
async.map(recipientsList, function(recipient, next) {
async.waterfall([
function(next) {
self._getDataForTemplate(notification, recipient, next);
},
function(data, next) {
async.map(['plain', 'html'], function(type, next) {
self._loadTemplate(emailType, recipient, '.' + type, function(err, template) {
if (err && type == 'html') return next();
if (err) return next(err);
self._applyTemplate(template, data, function(err, res) {
return next(err, [type, res]);
});
});
}, function(err, res) {
return next(err, _.fromPairs(res.filter(Boolean)));
});
},
function(result, next) {
next(null, result);
},
], function(err, res) {
next(err, [recipient.language, res]);
});
}, function(err, res) {
return cb(err, _.fromPairs(res.filter(Boolean)));
});
};
EmailService.prototype._checkShouldSendEmail = function(notification, cb) {
var self = this;
if (notification.type != 'NewTxProposal') return cb(null, true);
self.storage.fetchWallet(notification.walletId, function(err, wallet) {
return cb(err, wallet.m > 1);
});
};
EmailService.prototype.sendEmail = function(notification, cb) {
var self = this;
cb = cb || function() {};
var emailType = EMAIL_TYPES[notification.type];
if (!emailType) return cb();
self._checkShouldSendEmail(notification, function(err, should) {
if (err) return cb(err);
if (!should) return cb();
self._getRecipientsList(notification, emailType, function(err, recipientsList) {
if (_.isEmpty(recipientsList)) return cb();
// TODO: Optimize so one process does not have to wait until all others are done
// Instead set a flag somewhere in the db to indicate that this process is free
// to serve another request.
self.lock.runLocked('email-' + notification.id, cb, function(cb) {
self.storage.fetchEmailByNotification(notification.id, function(err, email) {
if (err) return cb(err);
if (email) return cb();
async.waterfall([
function(next) {
self._readAndApplyTemplates(notification, emailType, recipientsList, next);
},
function(contents, next) {
async.map(recipientsList, function(recipient, next) {
var content = contents[recipient.language];
var email = Model.Email.create({
walletId: notification.walletId,
copayerId: recipient.copayerId,
from: self.from,
to: recipient.emailAddress,
subject: content.plain.subject,
bodyPlain: content.plain.body,
bodyHtml: content.html ? content.html.body : null,
notificationId: notification.id,
});
self.storage.storeEmail(email, function(err) {
return next(err, email);
});
}, next);
},
function(emails, next) {
async.each(emails, function(email, next) {
self._send(email, function(err) {
if (err) {
email.setFail();
} else {
email.setSent();
}
self.storage.storeEmail(email, next);
});
}, function(err) {
return next();
});
},
], function(err) {
if (err) {
log.error('An error ocurred generating email notification', err);
}
return cb(err);
});
});
});
});
});
};
module.exports = EmailService;
|
mit
|
v-iam/azure-sdk-for-python
|
azure-mgmt-network/azure/mgmt/network/v2017_06_01/models/topology_resource.py
|
1509
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class TopologyResource(Model):
"""The network resource topology information for the given resource group.
:param name: Name of the resource.
:type name: str
:param id: ID of the resource.
:type id: str
:param location: Resource location.
:type location: str
:param associations: Holds the associations the resource has with other
resources in the resource group.
:type associations: list of :class:`TopologyAssociation
<azure.mgmt.network.v2017_06_01.models.TopologyAssociation>`
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'associations': {'key': 'associations', 'type': '[TopologyAssociation]'},
}
def __init__(self, name=None, id=None, location=None, associations=None):
self.name = name
self.id = id
self.location = location
self.associations = associations
|
mit
|
GenericHero/SSH.NET
|
src/Renci.SshNet.Tests/Classes/Messages/Authentication/BannerMessageTest.cs
|
777
|
using Renci.SshNet.Messages.Authentication;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
using Renci.SshNet.Tests.Common;
namespace Renci.SshNet.Tests.Messages.Authentication
{
/// <summary>
///This is a test class for BannerMessageTest and is intended
///to contain all BannerMessageTest Unit Tests
///</summary>
[TestClass()]
public class BannerMessageTest : TestBase
{
/// <summary>
///A test for BannerMessage Constructor
///</summary>
[TestMethod()]
public void BannerMessageConstructorTest()
{
BannerMessage target = new BannerMessage();
Assert.Inconclusive("TODO: Implement code to verify target");
}
}
}
|
mit
|
andrewphan/node_todo_azure
|
node_modules/docker/src/docker.js
|
29115
|
// # docker.js
// ### _A simple documentation generator based on [docco](http://jashkenas.github.com/docco/)_
// **Docker** is a really simple documentation generator, which originally started out as a
// pure-javascript port of **docco**, but which eventually gained many extra little features
// which somewhat break docco's philosophy of being a quick-and-dirty thing.
//
// Docker source-code can be found on [GitHub](https://github.com/jbt/docker)
//
// Take a look at the [original docco project](http://jashkenas.github.com/docco/) to get a feel
// for the sort of functionality this provides. In short: **Markdown**-based displaying of code comments
// next to syntax-highlighted code. This page is the result of running docker against itself.
//
// The command-line usage of docker is somewhat more useful than that of docco. To use, simply run
//
// ```sh
// ./docker -i path/to/code -o path/to/docs [a_file.js a_dir]
// ```
//
// Docker will then recurse into the code root directory (or alternatively just the files
// and directories you specify) and document-ize all the files it can.
// The folder structure will be preserved in the document root.
//
// More detailed usage instructions and examples can be found in the [README](../README.md)
//
// ## Differences from docco
// The main differences from docco are:
//
// - **jsDoc support**: support for **jsDoc**-style code comments, via [Dox](https://github.com/visionmedia/dox). You can see some examples of
// the sort of output you get below.
//
// - **Folder Tree** and **Heading Navigation**: collapsible sidebar with folder tree and jump-to
// heading links for easy navigation between many files and within long files.
//
// - **Markdown File Support**: support for plain markdown files, like the [README](../README.md) for this project.
//
// - **Colour Schemes**: support for multiple output colour schemes
//
//
// So let's get started!
// ## Node Modules
// Include lots of node modules
var stripIndent = require('strip-indent');
var MarkdownIt = require('markdown-it');
var highlight = require('highlight.js');
var repeating = require('repeating');
var mkdirp = require('mkdirp');
var extend = require('extend');
var watchr = require('watchr');
var async = require('async');
var path = require('path');
var less = require('less');
var dox = require('dox');
var ejs = require('ejs');
var toc = require('toc');
var fs = require('fs');
// Language details exist in [languages.js](./languages.js)
var languages = require('./languages');
// Create an instance of markdown-it, which we'll use for prettyifying all the comments
var md = new MarkdownIt({
html: true,
langPrefix: '',
highlight: function(str, lang) {
if (lang && highlight.getLanguage(lang)) {
try {
return highlight.highlight(lang, str).value;
} catch (__) {}
}
return '';
}
});
// ## Markdown Link Overriding
//
// Relative links to files need to be remapped to their rendered file name,
// so that they can be written without `.html` everywhere else without breaking
md.renderer.rules.link_open = function(tokens, idx, options, env, self) {
var hrefIndex = tokens[idx].attrIndex('href');
// If the link a relative link, then put '.html' on the end.
if (hrefIndex >= 0 && !/\/\//.test(tokens[idx].attrs[hrefIndex][1])) {
tokens[idx].attrs[hrefIndex][1] += '.html';
}
return self.renderToken.apply(self, arguments);
};
/**
* ## Docker Constructor
*
* Creates a new docker instance. All methods are called on one instance of this object.
*
* Input is an `opts` containing all the options as specified below.
*/
var Docker = module.exports = function(opts) {
// Initialise all opts with default values
opts = this.options = extend({
inDir: path.resolve('.'),
outDir: path.resolve('doc'),
onlyUpdated: false,
colourScheme: 'default',
ignoreHidden: false,
sidebarState: true,
exclude: false,
lineNums: false,
multiLineOnly: false,
js: [],
css: [],
extras: []
}, opts);
// Generate an exclude regex for the given pattern
if (typeof opts.exclude === 'string') {
this.excludePattern = new RegExp('^(' +
opts.exclude.replace(/\./g, '\\.')
.replace(/\*/g, '.*')
.replace(/,/g, '|') +
')(/|$)');
} else {
this.excludePattern = false;
}
// Initialise an object which'll store all our directory structure
this.tree = {};
// Load bundled extras
var extrasRoot = path.resolve(__dirname, '..', 'extras');
opts.extras.forEach(function(e) {
opts.js.push(path.join(extrasRoot, e, e + '.js'));
opts.css.push(path.join(extrasRoot, e, e + '.css'));
});
};
/**
* ## Docker.prototype.doc
*
* Generate documentation for a bunch of files
*
* @this Docker
* @param {Array} files Array of file paths relative to the `inDir` to generate documentation for.
*/
Docker.prototype.doc = function(files) {
this.files = files.concat();
// Start processing, unless we already are
if (!this.running) this.run();
};
/**
* ## Docker.prototype.watch
*
* Watches the input directory for file changes and updates docs whenever a file is updated
*
* @param {Array} files Array of file paths relative to the `inDir` to generate documentation for.
*/
Docker.prototype.watch = function(files) {
this.watching = true;
this.watchFiles = files;
// Function to call when a file is changed. We put this on a timeout to account
// for several file changes happening in quick succession.
var uto = false, self = this;
function update() {
if (self.running) return (uto = setTimeout(update, 250));
self.doc(self.watchFiles);
uto = false;
}
// Create a watchr instance to watch all changes in the input directory
watchr.watch({
path: this.options.inDir,
listener: function() {
if (!uto) uto = setTimeout(update, 250);
}
});
// Aaaaand, go!
this.doc(files);
};
/**
* ## Docker.prototype.run
*
* Loops through all the queued file and processes them individually
*/
Docker.prototype.run = function() {
var self = this;
this.running = true;
// While we stil have any files to process, take the first one and process it
async.whilst(
function() {
return self.files.length > 0;
},
function(cb) {
self.process(self.files.shift(), cb);
},
function() {
// Once we're done, say we're no longer running and copy over all the static stuff
self.running = false;
self.copySharedResources();
}
);
};
/**
* ## Docker.prototype.addFileToFree
*
* Adds a file to the file tree to show in the sidebar.
*
* @param {string} filename Name of file to add to the tree
*/
Docker.prototype.addFileToTree = function(filename) {
// Split the file's path into the individual directories
filename = filename.replace(new RegExp('^' + path.sep.replace(/([\/\\])/g, '\\$1')), '');
var bits = filename.split(path.sep);
// Loop through all the directories and process the folder structure into `this.tree`.
//
// `this.tree` takes the format:
// ```js
// {
// dirs: {
// 'child_dir_name': { /* same format as tree */ },
// 'other_child_name': // etc...
// },
// files: [
// 'filename.js',
// 'filename2.js',
// // etc...
// ]
// }
// ```
var currDir = this.tree;
var lastBit = bits.pop();
bits.forEach(function(bit) {
if (!currDir.dirs) currDir.dirs = {};
if (!currDir.dirs[bit]) currDir.dirs[bit] = {};
currDir = currDir.dirs[bit];
});
if (!currDir.files) currDir.files = [];
if (currDir.files.indexOf(lastBit) === -1) currDir.files.push(lastBit);
};
/**
* ## Docker.prototype.process
*
* Process the given file. If it's a directory, list all the children and queue those.
* If it's a file, add it to the queue.
*
* @param {string} file Path to the file to process
* @param {function} cb Callback to call when done
*/
Docker.prototype.process = function(file, cb) {
// If we should be ignoring this file, do nothing and immediately callback.
if (this.excludePattern && this.excludePattern.test(file)) {
cb();
return;
}
var self = this;
var resolved = path.resolve(this.options.inDir, file);
fs.lstat(resolved, function lstatCb(err, stat) {
if (err) {
// Something unexpected happened on the filesystem.
// Nothing really that we can do about it, so throw it and be done with it
return cb(err);
}
if (stat && stat.isSymbolicLink()) {
fs.readlink(resolved, function(err, link) {
if (err) {
// Something unexpected happened on the filesystem.
// Nothing really that we can do about it, so throw it and be done with it
return cb(err);
}
resolved = path.resolve(path.dirname(resolved), link);
fs.exists(resolved, function(exists) {
if (!exists) {
console.error('Unable to follow symlink to ' + resolved + ': file does not exist');
cb(null);
} else {
fs.lstat(resolved, lstatCb);
}
});
});
} else if (stat && stat.isDirectory()) {
// Find all children of the directory and queue those
fs.readdir(resolved, function(err, list) {
if (err) {
// Something unexpected happened on the filesystem.
// Nothing really that we can do about it, so throw it and be done with it
return cb(err);
}
list.forEach(function(f) {
// For everything in the directory, queue it unless it looks hiden and we've
// been told to ignore hidden files.
if (self.options.ignoreHidden && f.charAt(0).match(/[\._]/)) return;
self.files.push(path.join(file, f));
});
cb();
});
} else {
// Wahey, we have a normal file. Go ahead and process it then.
self.processFile(file, cb);
}
});
};
/**
* ## Docker.prototype.processFile
*
* Processes a given file. At this point we know the file exists and
* isn't any kind of directory or symlink.
*
* @param {string} file Path to the file to process
* @param {function} cb Callback to call when done
*/
Docker.prototype.processFile = function(file, cb) {
var resolved = path.resolve(this.options.inDir, file);
var self = this;
// First, check to see whether we actually should be processing this file and bail if not
this.decideWhetherToProcess(resolved, function(shouldProcess) {
if (!shouldProcess) return cb();
fs.readFile(resolved, 'utf-8', function(err, data) {
if (err) return cb(err);
// Grab the language details for the file and bail if we don't understand it.
var lang = self.detectLanguage(resolved, data);
if (lang === false) return cb();
self.addFileToTree(file);
switch (lang.type) {
case 'markdown':
self.renderMarkdownFile(data, resolved, cb);
break;
default:
case 'code':
var sections = self.parseSections(data, lang);
self.highlight(sections, lang);
self.renderCodeFile(sections, lang, resolved, cb);
break;
}
});
});
};
/**
* ## Docker.prototype.decideWhetherToProcess
*
* Decide whether or not a file should be processed. If the `onlyUpdated`
* flag was set on initialization, only allow processing of files that
* are newer than their counterpart generated doc file.
*
* Fires a callback function with either true or false depending on whether
* or not the file should be processed
*
* @param {string} filename The name of the file to check
* @param {function} callback Callback function
*/
Docker.prototype.decideWhetherToProcess = function(filename, callback) {
// If we should be processing all files, then yes, we should process this one
if (!this.options.onlyUpdated) return callback(true);
// Find the doc this file would be compiled to
var outFile = this.outFile(filename);
// See whether the file is newer than the output
this.fileIsNewer(filename, outFile, callback);
};
/**
* ## Docker.prototype.fileIsNewer
*
* Sees whether one file is newer than another
*
* @param {string} file File to check
* @param {string} otherFile File to compare to
* @param {function} callback Callback to fire with true if file is newer than otherFile
*/
Docker.prototype.fileIsNewer = function(file, otherFile, callback) {
fs.stat(otherFile, function(err, outStat) {
// If the output file doesn't exist, then definitely process this file
if (err && err.code == 'ENOENT') return callback(true);
fs.stat(file, function(err, inStat) {
// Process the file if the input is newer than the output
callback(+inStat.mtime > +outStat.mtime);
});
});
};
/**
* ## Docker.prototype.parseSections
*
* Parse the content of a file into individual sections.
* A section is defined to be one block of code with an accompanying comment
*
* Returns an array of section objects, which take the form
* ```js
* {
* doc_text: 'foo', // String containing comment content
* code_text: 'bar' // Accompanying code
* }
* ```
* @param {string} data The contents of the script file
* @param {object} lang The language data for the script file
* @return {Array} array of section objects
*/
Docker.prototype.parseSections = function(data, lang) {
var lines = data.split('\n');
var section = {
docs: '',
code: ''
};
var sections = [];
var inMultiLineComment = false;
var multiLine = '';
var jsDocData;
var commentRegex = new RegExp('^\\s*' + lang.comment + '\\s?');
var self = this;
function mark(a, stripParas) {
var h = md.render(a.replace(/(^\s*|\s*$)/, ''));
return stripParas ? h.replace(/<\/?p>/g, '') : h;
}
lines.forEach(function(line, i) {
// Only match against parts of the line that don't appear in strings
var matchable = line.replace(/(["'])((?:[^\\\1]|(?:\\\\)*?\\[^\\])*?)\1/g, '$1$1');
if (lang.literals) {
lang.literals.forEach(function(replace) {
matchable = matchable.replace(replace[0], replace[1]);
});
}
if (lang.multiLine) {
// If we are currently in a multiline comment, behave differently
if (inMultiLineComment) {
// End-multiline comments should match regardless of whether they're 'quoted'
if (line.match(lang.multiLine[1])) {
// Once we have reached the end of the multiline, take the whole content
// of the multiline comment, and parse it as jsDoc.
inMultiLineComment = false;
multiLine += line;
// Replace block comment delimiters with whitespace of the same length
// This way we can safely outdent without breaking too many things if the
// comment has been deliberately indented. For example, the lines in the
// followinc comment should all be outdented equally:
//
// ```c
// /* A big long multiline
// comment that should get
// outdented properly */
// ```
multiLine = multiLine
.replace(lang.multiLine[0], function(a) { return repeating(' ', a.length); })
.replace(lang.multiLine[1], function(a) { return repeating(' ', a.length); });
multiLine = stripIndent(multiLine);
if (lang.jsDoc) {
// Strip off leading * characters.
multiLine = multiLine.replace(/^[ \t]*\*? ?/gm, '');
jsDocData = dox.parseComment(multiLine, { raw: true });
// Put markdown parser on the data so it can be accessed in the template
jsDocData.md = mark;
section.docs += self.renderTemplate('jsDoc', jsDocData);
} else {
section.docs += '\n' + multiLine + '\n';
}
multiLine = '';
} else {
multiLine += line + '\n';
}
return;
} else if (
// We want to match the start of a multiline comment only if the line doesn't also match the
// end of the same comment, or if a single-line comment is started before the multiline
// So for example the following would not be treated as a multiline starter:
// ```js
// alert('foo'); // Alert some foo /* Random open comment thing
// ```
matchable.match(lang.multiLine[0]) &&
!matchable.replace(lang.multiLine[0], '').match(lang.multiLine[1]) &&
(!lang.comment || !matchable.split(lang.multiLine[0])[0].match(commentRegex))
) {
// Here we start parsing a multiline comment. Store away the current section and start a new one
if (section.code) {
if (!section.code.match(/^\s*$/) || !section.docs.match(/^\s*$/)) sections.push(section);
section = { docs: '', code: '' };
}
inMultiLineComment = true;
multiLine = line + '\n';
return;
}
}
if (
!self.options.multiLineOnly &&
lang.comment &&
matchable.match(commentRegex) &&
(!lang.commentsIgnore || !matchable.match(lang.commentsIgnore)) &&
!matchable.match(/#!/)
) {
// This is for single-line comments. Again, store away the last section and start a new one
if (section.code) {
if (!section.code.match(/^\s*$/) || !section.docs.match(/^\s*$/)) sections.push(section);
section = { docs: '', code: '' };
}
section.docs += line.replace(commentRegex, '') + '\n';
} else if (!lang.commentsIgnore || !line.match(lang.commentsIgnore)) {
// If this is the first line of active code, store it in the section
// so we can grab it for line numbers later
if (!section.firstCodeLine) {
section.firstCodeLine = i + 1;
}
section.code += line + '\n';
}
});
sections.push(section);
return sections;
};
/**
* ## Docker.prototype.detectLanguage
*
* Provides language-specific params for a given file name.
*
* @param {string} filename The name of the file to test
* @param {string} contents The contents of the file (to check for shebang)
* @return {object} Object containing all of the language-specific params
*/
Docker.prototype.detectLanguage = function(filename, contents) {
// First try to detect the language from the file extension
var ext = path.extname(filename);
ext = ext.replace(/^\./, '');
// Bit of a hacky way of incorporating .C for C++
if (ext === '.C') return languages.cpp;
ext = ext.toLowerCase();
var base = path.basename(filename);
base = base.toLowerCase();
for (var i in languages) {
if (!languages.hasOwnProperty(i)) continue;
if (languages[i].extensions &&
languages[i].extensions.indexOf(ext) !== -1) return languages[i];
if (languages[i].names &&
languages[i].names.indexOf(base) !== -1) return languages[i];
}
// If that doesn't work, see if we can grab a shebang
var shebangRegex = /^#!\s*(?:\/usr\/bin\/env)?\s*(?:[^\n]*\/)*([^\/\n]+)(?:\n|$)/;
var match = shebangRegex.exec(contents);
if (match) {
for (var j in languages) {
if (!languages.hasOwnProperty(j)) continue;
if (languages[j].executables && languages[j].executables.indexOf(match[1]) !== -1) return languages[j];
}
}
// If we still can't figure it out, give up and return false.
return false;
};
/**
* ## Docker.prototype.highlight
*
* Highlights all the sections of a file using **highlightjs**
* Given an array of section objects, loop through them, and for each
* section generate pretty html for the comments and the code, and put them in
* `docHtml` and `codeHtml` respectively
*
* @param {Array} sections Array of section objects
* @param {string} language Language ith which to highlight the file
*/
Docker.prototype.highlight = function(sections, lang) {
sections.forEach(function(section) {
section.codeHtml = highlight.highlight(lang.highlightLanguage || lang.language, section.code).value;
section.docHtml = md.render(section.docs);
});
};
/**
* ## Docker.prototype.addAnchors
*
* Automatically assign an id to each section based on any headings using **toc** helpers
*
* @param {object} section The section object to look at
* @param {number} idx The index of the section in the whole array.
* @param {Object} headings Object in which to keep track of headings for avoiding clashes
*/
Docker.prototype.addAnchors = function(docHtml, idx, headings) {
var headingRegex = /<h(\d)(\s*[^>]*)>([\s\S]+?)<\/h\1>/gi; // toc.defaults.headers
if (docHtml.match(headingRegex)) {
// If there is a heading tag, pick out the first one (likely the most important), sanitize
// the name a bit to make it more friendly for IDs, then use that
docHtml = docHtml.replace(headingRegex, function(a, level, attrs, content) {
var id = toc.unique(headings.ids, toc.anchor(content));
headings.list.push({ id: id, text: toc.untag(content), level: level });
return [
'<div class="pilwrap" id="' + id + '">',
' <h' + level + attrs + '>',
' <a href="#' + id + '" name="' + id + '" class="pilcrow"></a>',
content,
' </h' + level + '>',
'</div>'
].join('\n');
});
} else {
// If however we can't find a heading, then just use the section index instead.
docHtml = [
'<div class="pilwrap">',
' <a class="pilcrow" href="#section-' + (idx + 1) + '" id="section-' + (idx + 1) + '"></a>',
'</div>',
docHtml
].join('\n');
}
return docHtml;
};
/**
* ## Docker.prototype.addLineNumbers
*
* Adds line numbers to rendered code HTML
*
* @param {string} html The code HTML
* @param {number} first Line number of the first code line
*/
Docker.prototype.addLineNumbers = function(html, first) {
var lines = html.split('\n');
lines = lines.map(function(line, i) {
var n = first + i;
return '<a class="line-num" href="#line-' + n + '" id="line-' + n + '" data-line="' + n + '"></a> ' + line;
});
return lines.join('\n');
};
/**
* ## Docker.prototype.renderCodeFile
*
* Given an array of sections, render them all out to a nice HTML file
*
* @param {Array} sections Array of sections containing parsed data
* @param {Object} language The language data for the file in question
* @param {string} filename Name of the file being processed
* @param {function} cb Callback function to fire when we're done
*/
Docker.prototype.renderCodeFile = function(sections, language, filename, cb) {
var self = this;
var headings = { ids: {}, list: [] };
sections.forEach(function(section, i) {
// Add anchors to all headings in all sections
section.docHtml = self.addAnchors(section.docHtml, i, headings);
// Add line numbers of we need them
if (self.options.lineNums) {
section.codeHtml = self.addLineNumbers(section.codeHtml, section.firstCodeLine);
}
});
var content = this.renderTemplate('code', {
title: path.basename(filename),
sections: sections,
language: language.language
});
this.makeOutputFile(filename, content, headings, cb);
};
/**
* ## Docker.prototype.renderMarkdownFile
*
* Renders the output for a Markdown file into HTML
*
* @param {string} data The markdown file content
* @param {string} filename Name of the file being processed
* @param {function} cb Callback function to fire when we're done
*/
Docker.prototype.renderMarkdownFile = function(data, filename, cb) {
var content = md.render(data);
var headings = { ids: {}, list: [] };
// Add anchors to all headings
content = this.addAnchors(content, 0, headings);
// Wrap up with necessary classes
content = '<div class="docs markdown">' + content + '</div>';
this.makeOutputFile(filename, content, headings, cb);
};
/**
* ## Docker.prototype.makeOutputFile
*
* Shared code for generating an output file with the given content.
* Renders the given content in a template along with its headings and
* writes it to the output file.
*
* @param {string} filename Path to the input file
* @param {string} content The string content to render into the template
* @param {Object} headings List of headings + ids
* @param {function} cb Callback to call when done
*/
Docker.prototype.makeOutputFile = function(filename, content, headings, cb) {
// Decide which path to store the output on.
var outFile = this.outFile(filename);
// Calculate the location of the input root relative to the output file.
// This is necessary so we can link to the stylesheet in the output HTML using
// a relative href rather than an absolute one
var outDir = path.dirname(outFile);
var relativeOut = path.resolve(outDir)
.replace(path.resolve(this.options.outDir), '')
.replace(/^[\/\\]/, '');
var levels = relativeOut == '' ? 0 : relativeOut.split(path.sep).length;
var relDir = repeating('../', levels);
// Render the html file using our template
var html = this.renderTemplate('tmpl', {
title: path.basename(filename),
relativeDir: relDir,
content: content,
headings: headings,
sidebar: this.options.sidebarState,
filename: filename.replace(this.options.inDir, '').replace(/^[\\\/]/, ''),
js: this.options.js.map(function(f) { return path.basename(f); }),
css: this.options.css.map(function(f) { return path.basename(f); })
});
// Recursively create the output directory, clean out any old version of the
// output file, then save our new file.
this.writeFile(outFile, html, 'Generated: ' + outFile.replace(this.options.outDir, ''), cb);
};
/**
* ## Docker.prototype.copySharedResources
*
* Copies the shared CSS and JS files to the output directories
*/
Docker.prototype.copySharedResources = function() {
var self = this;
self.writeFile(
path.join(self.options.outDir, 'doc-filelist.js'),
'var tree=' + JSON.stringify(self.tree) + ';',
'Saved file tree to doc-filelist.js'
);
// Generate the CSS file using LESS. First, load the less file.
fs.readFile(path.join(__dirname, '..', 'res', 'style.less'), function(err, file) {
// Now try to grab the colours out of whichever highlight theme was used
var hlpath = require.resolve('highlight.js');
var cspath = path.resolve(path.dirname(hlpath), '..', 'styles');
var colours = require('./getColourScheme')(self.options.colourScheme);
// Now compile the LESS to CSS
less.render(file.toString().replace('COLOURSCHEME', self.options.colourScheme), {
paths: [ cspath ],
globalVars: colours
}, function(err, out) {
// Now we've got the rendered CSS, write it out.
self.writeFile(
path.join(self.options.outDir, 'doc-style.css'),
out.css,
'Compiled CSS to doc-style.css'
);
});
});
fs.readFile(path.join(__dirname, '..', 'res', 'script.js'), function(err, file) {
self.writeFile(
path.join(self.options.outDir, 'doc-script.js'),
file,
'Copied JS to doc-script.js'
);
});
this.options.js.concat(this.options.css).forEach(function(ext) {
var fn = path.basename(ext);
fs.readFile(path.resolve(ext), function(err, file) {
self.writeFile(path.join(self.options.outDir, fn), file, 'Copied ' + fn);
});
});
};
/**
* ## Docker.prototype.outFile
*
* Generates the output path for a given input file
*
* @param {string} filename Name of the input file
* @return {string} Name to use for the generated doc file
*/
Docker.prototype.outFile = function(filename) {
return path.normalize(filename.replace(path.resolve(this.options.inDir), this.options.outDir) + '.html');
};
/**
* ## Docker.prototype.renderTemplate
*
* Renders an EJS template with the given data
*
* @param {string} templateName The name of the template to use
* @param {object} obj Object containing parameters for the template
* @return {string} Rendered output
*/
Docker.prototype.renderTemplate = function(templateName, obj) {
// If we haven't already loaded the template, load it now.
// It's a bit messy to be using readFileSync I know, but this
// is the easiest way for now.
if (!this._templates) this._templates = {};
if (!this._templates[templateName]) {
var tmplFile = path.join(__dirname, '..', 'res', templateName + '.ejs');
this._templates[templateName] = ejs.compile(fs.readFileSync(tmplFile).toString());
}
return this._templates[templateName](obj);
};
/**
* ## Docker.prototype.writeFile
*
* Saves a file, making sure the directory already exists and overwriting any existing file
*
* @param {string} filename The name of the file to save
* @param {string} fileContent Content to save to the file
* @param {string} doneLog String to console.log when done
* @param {function} doneCallback Callback to fire when done
*/
Docker.prototype.writeFile = function(filename, fileContent, doneLog, doneCallback) {
mkdirp(path.dirname(filename), function() {
fs.writeFile(filename, fileContent, function() {
if (doneLog) console.log(doneLog);
if (doneCallback) doneCallback();
});
});
};
|
mit
|
Unity-Technologies/ScriptableRenderLoop
|
com.unity.render-pipelines.high-definition/Runtime/Core/Debugging/Prefabs/Scripts/DebugUIHandlerWidget.cs
|
2419
|
using System;
namespace UnityEngine.Experimental.Rendering.UI
{
public class DebugUIHandlerWidget : MonoBehaviour
{
[HideInInspector]
public Color colorDefault = new Color(0.8f, 0.8f, 0.8f, 1f);
[HideInInspector]
public Color colorSelected = new Color(0.25f, 0.65f, 0.8f, 1f);
public DebugUIHandlerWidget parentUIHandler { get; set; }
public DebugUIHandlerWidget previousUIHandler { get; set; }
public DebugUIHandlerWidget nextUIHandler { get; set; }
protected DebugUI.Widget m_Widget;
protected virtual void OnEnable() {}
internal virtual void SetWidget(DebugUI.Widget widget)
{
m_Widget = widget;
}
internal DebugUI.Widget GetWidget()
{
return m_Widget;
}
protected T CastWidget<T>()
where T : DebugUI.Widget
{
var casted = m_Widget as T;
string typeName = m_Widget == null ? "null" : m_Widget.GetType().ToString();
if (casted == null)
throw new InvalidOperationException("Can't cast " + typeName + " to " + typeof(T));
return casted;
}
// Returns `true` if selection is allowed, `false` to skip to the next/previous item
public virtual bool OnSelection(bool fromNext, DebugUIHandlerWidget previous)
{
return true;
}
public virtual void OnDeselection() {}
public virtual void OnAction() {}
public virtual void OnIncrement(bool fast) {}
public virtual void OnDecrement(bool fast) {}
public virtual DebugUIHandlerWidget Previous()
{
if (previousUIHandler != null)
return previousUIHandler;
if (parentUIHandler != null)
return parentUIHandler;
return null;
}
public virtual DebugUIHandlerWidget Next()
{
if (nextUIHandler != null)
return nextUIHandler;
if (parentUIHandler != null)
{
var p = parentUIHandler;
while (p != null)
{
var n = p.nextUIHandler;
if (n != null)
return n;
p = p.parentUIHandler;
}
}
return null;
}
}
}
|
mit
|
ptyz030529/skysport
|
src/main/java/com/skysport/core/action/LoginAction.java
|
822
|
package com.skysport.core.action;
import com.skysport.core.bean.permission.User;
import org.springframework.context.annotation.Scope;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.servlet.ModelAndView;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
@Scope("prototype")
@Controller
@RequestMapping("/")
public class LoginAction {
@RequestMapping(value = "/login")
@ResponseBody
public ModelAndView getProducts(User user, HttpServletRequest request,
HttpServletResponse response) {
ModelAndView mav = new ModelAndView("main");
return mav;
}
}
|
mit
|
axilleas/gitlabhq
|
spec/features/users/login_spec.rb
|
22558
|
require 'spec_helper'
describe 'Login' do
include TermsHelper
include UserLoginHelper
before do
stub_authentication_activity_metrics(debug: true)
end
describe 'password reset token after successful sign in' do
it 'invalidates password reset token' do
expect(authentication_metrics)
.to increment(:user_authenticated_counter)
user = create(:user)
expect(user.reset_password_token).to be_nil
visit new_user_password_path
fill_in 'user_email', with: user.email
click_button 'Reset password'
user.reload
expect(user.reset_password_token).not_to be_nil
find('a[href="#login-pane"]').click
gitlab_sign_in(user)
expect(current_path).to eq root_path
user.reload
expect(user.reset_password_token).to be_nil
end
end
describe 'initial login after setup' do
it 'allows the initial admin to create a password' do
expect(authentication_metrics)
.to increment(:user_authenticated_counter)
# This behavior is dependent on there only being one user
User.delete_all
user = create(:admin, password_automatically_set: true)
visit root_path
expect(current_path).to eq edit_user_password_path
expect(page).to have_content('Please create a password for your new account.')
fill_in 'user_password', with: 'password'
fill_in 'user_password_confirmation', with: 'password'
click_button 'Change your password'
expect(current_path).to eq new_user_session_path
expect(page).to have_content(I18n.t('devise.passwords.updated_not_active'))
fill_in 'user_login', with: user.username
fill_in 'user_password', with: 'password'
click_button 'Sign in'
expect(current_path).to eq root_path
end
it 'does not show flash messages when login page' do
visit root_path
expect(page).not_to have_content('You need to sign in or sign up before continuing.')
end
end
describe 'with a blocked account' do
it 'prevents the user from logging in' do
expect(authentication_metrics)
.to increment(:user_blocked_counter)
.and increment(:user_unauthenticated_counter)
.and increment(:user_session_destroyed_counter).twice
user = create(:user, :blocked)
gitlab_sign_in(user)
expect(page).to have_content('Your account has been blocked.')
end
it 'does not update Devise trackable attributes', :clean_gitlab_redis_shared_state do
expect(authentication_metrics)
.to increment(:user_blocked_counter)
.and increment(:user_unauthenticated_counter)
.and increment(:user_session_destroyed_counter).twice
user = create(:user, :blocked)
expect { gitlab_sign_in(user) }.not_to change { user.reload.sign_in_count }
end
end
describe 'with the ghost user' do
it 'disallows login' do
expect(authentication_metrics)
.to increment(:user_unauthenticated_counter)
.and increment(:user_password_invalid_counter)
gitlab_sign_in(User.ghost)
expect(page).to have_content('Invalid Login or password.')
end
it 'does not update Devise trackable attributes', :clean_gitlab_redis_shared_state do
expect(authentication_metrics)
.to increment(:user_unauthenticated_counter)
.and increment(:user_password_invalid_counter)
expect { gitlab_sign_in(User.ghost) }
.not_to change { User.ghost.reload.sign_in_count }
end
end
describe 'with two-factor authentication' do
def enter_code(code)
fill_in 'user_otp_attempt', with: code
click_button 'Verify code'
end
context 'with valid username/password' do
let(:user) { create(:user, :two_factor) }
before do
gitlab_sign_in(user, remember: true)
expect(page).to have_content('Two-Factor Authentication')
end
it 'does not show a "You are already signed in." error message' do
expect(authentication_metrics)
.to increment(:user_authenticated_counter)
.and increment(:user_session_override_counter)
.and increment(:user_two_factor_authenticated_counter)
enter_code(user.current_otp)
expect(page).not_to have_content('You are already signed in.')
end
context 'using one-time code' do
it 'allows login with valid code' do
expect(authentication_metrics)
.to increment(:user_authenticated_counter)
.and increment(:user_session_override_counter)
.and increment(:user_two_factor_authenticated_counter)
enter_code(user.current_otp)
expect(current_path).to eq root_path
end
it 'persists remember_me value via hidden field' do
field = first('input#user_remember_me', visible: false)
expect(field.value).to eq '1'
end
it 'blocks login with invalid code' do
# TODO invalid 2FA code does not generate any events
# See gitlab-org/gitlab-ce#49785
enter_code('foo')
expect(page).to have_content('Invalid two-factor code')
end
it 'allows login with invalid code, then valid code' do
expect(authentication_metrics)
.to increment(:user_authenticated_counter)
.and increment(:user_session_override_counter)
.and increment(:user_two_factor_authenticated_counter)
enter_code('foo')
expect(page).to have_content('Invalid two-factor code')
enter_code(user.current_otp)
expect(current_path).to eq root_path
end
end
context 'using backup code' do
let(:codes) { user.generate_otp_backup_codes! }
before do
expect(codes.size).to eq 10
# Ensure the generated codes get saved
user.save
end
context 'with valid code' do
it 'allows login' do
expect(authentication_metrics)
.to increment(:user_authenticated_counter)
.and increment(:user_session_override_counter)
.and increment(:user_two_factor_authenticated_counter)
enter_code(codes.sample)
expect(current_path).to eq root_path
end
it 'invalidates the used code' do
expect(authentication_metrics)
.to increment(:user_authenticated_counter)
.and increment(:user_session_override_counter)
.and increment(:user_two_factor_authenticated_counter)
expect { enter_code(codes.sample) }
.to change { user.reload.otp_backup_codes.size }.by(-1)
end
it 'invalidates backup codes twice in a row' do
expect(authentication_metrics)
.to increment(:user_authenticated_counter).twice
.and increment(:user_session_override_counter).twice
.and increment(:user_two_factor_authenticated_counter).twice
.and increment(:user_session_destroyed_counter)
random_code = codes.delete(codes.sample)
expect { enter_code(random_code) }
.to change { user.reload.otp_backup_codes.size }.by(-1)
gitlab_sign_out
gitlab_sign_in(user)
expect { enter_code(codes.sample) }
.to change { user.reload.otp_backup_codes.size }.by(-1)
end
end
context 'with invalid code' do
it 'blocks login' do
# TODO, invalid two factor authentication does not increment
# metrics / counters, see gitlab-org/gitlab-ce#49785
code = codes.sample
expect(user.invalidate_otp_backup_code!(code)).to eq true
user.save!
expect(user.reload.otp_backup_codes.size).to eq 9
enter_code(code)
expect(page).to have_content('Invalid two-factor code.')
end
end
end
end
context 'when logging in via OAuth' do
let(:user) { create(:omniauth_user, :two_factor, extern_uid: 'my-uid', provider: 'saml')}
let(:mock_saml_response) do
File.read('spec/fixtures/authentication/saml_response.xml')
end
before do
stub_omniauth_saml_config(enabled: true, auto_link_saml_user: true, allow_single_sign_on: ['saml'],
providers: [mock_saml_config_with_upstream_two_factor_authn_contexts])
end
context 'when authn_context is worth two factors' do
let(:mock_saml_response) do
File.read('spec/fixtures/authentication/saml_response.xml')
.gsub('urn:oasis:names:tc:SAML:2.0:ac:classes:Password',
'urn:oasis:names:tc:SAML:2.0:ac:classes:SecondFactorOTPSMS')
end
it 'signs user in without prompting for second factor' do
# TODO, OAuth authentication does not fire events,
# see gitlab-org/gitlab-ce#49786
expect(authentication_metrics)
.to increment(:user_authenticated_counter)
.and increment(:user_session_override_counter)
sign_in_using_saml!
expect(page).not_to have_content('Two-Factor Authentication')
expect(current_path).to eq root_path
end
end
context 'when two factor authentication is required' do
it 'shows 2FA prompt after OAuth login' do
expect(authentication_metrics)
.to increment(:user_authenticated_counter)
.and increment(:user_session_override_counter)
.and increment(:user_two_factor_authenticated_counter)
sign_in_using_saml!
expect(page).to have_content('Two-Factor Authentication')
enter_code(user.current_otp)
expect(current_path).to eq root_path
end
end
def sign_in_using_saml!
gitlab_sign_in_via('saml', user, 'my-uid', mock_saml_response)
end
end
end
describe 'without two-factor authentication' do
context 'with correct username and password' do
let(:user) { create(:user) }
it 'allows basic login' do
expect(authentication_metrics)
.to increment(:user_authenticated_counter)
gitlab_sign_in(user)
expect(current_path).to eq root_path
expect(page).not_to have_content('You are already signed in.')
end
end
context 'with invalid username and password' do
let(:user) { create(:user, password: 'not-the-default') }
it 'blocks invalid login' do
expect(authentication_metrics)
.to increment(:user_unauthenticated_counter)
.and increment(:user_password_invalid_counter)
gitlab_sign_in(user)
expect(page).to have_content('Invalid Login or password.')
end
end
end
describe 'with required two-factor authentication enabled' do
let(:user) { create(:user) }
# TODO: otp_grace_period_started_at
context 'global setting' do
before do
stub_application_setting(require_two_factor_authentication: true)
end
context 'with grace period defined' do
before do
stub_application_setting(two_factor_grace_period: 48)
end
context 'within the grace period' do
it 'redirects to two-factor configuration page' do
expect(authentication_metrics)
.to increment(:user_authenticated_counter)
gitlab_sign_in(user)
expect(current_path).to eq profile_two_factor_auth_path
expect(page).to have_content('The global settings require you to enable Two-Factor Authentication for your account. You need to do this before ')
end
it 'allows skipping two-factor configuration', :js do
expect(authentication_metrics)
.to increment(:user_authenticated_counter)
gitlab_sign_in(user)
expect(current_path).to eq profile_two_factor_auth_path
click_link 'Configure it later'
expect(current_path).to eq root_path
end
end
context 'after the grace period' do
let(:user) { create(:user, otp_grace_period_started_at: 9999.hours.ago) }
it 'redirects to two-factor configuration page' do
expect(authentication_metrics)
.to increment(:user_authenticated_counter)
gitlab_sign_in(user)
expect(current_path).to eq profile_two_factor_auth_path
expect(page).to have_content(
'The global settings require you to enable Two-Factor Authentication for your account.'
)
end
it 'disallows skipping two-factor configuration', :js do
expect(authentication_metrics)
.to increment(:user_authenticated_counter)
gitlab_sign_in(user)
expect(current_path).to eq profile_two_factor_auth_path
expect(page).not_to have_link('Configure it later')
end
end
end
context 'without grace period defined' do
before do
stub_application_setting(two_factor_grace_period: 0)
end
it 'redirects to two-factor configuration page' do
expect(authentication_metrics)
.to increment(:user_authenticated_counter)
gitlab_sign_in(user)
expect(current_path).to eq profile_two_factor_auth_path
expect(page).to have_content(
'The global settings require you to enable Two-Factor Authentication for your account.'
)
end
end
end
context 'group setting' do
before do
group1 = create :group, name: 'Group 1', require_two_factor_authentication: true
group1.add_user(user, GroupMember::DEVELOPER)
group2 = create :group, name: 'Group 2', require_two_factor_authentication: true
group2.add_user(user, GroupMember::DEVELOPER)
end
context 'with grace period defined' do
before do
stub_application_setting(two_factor_grace_period: 48)
end
context 'within the grace period' do
it 'redirects to two-factor configuration page' do
expect(authentication_metrics)
.to increment(:user_authenticated_counter)
gitlab_sign_in(user)
expect(current_path).to eq profile_two_factor_auth_path
expect(page).to have_content(
'The group settings for Group 1 and Group 2 require you to enable ' \
'Two-Factor Authentication for your account. You need to do this ' \
'before ')
end
it 'allows skipping two-factor configuration', :js do
expect(authentication_metrics)
.to increment(:user_authenticated_counter)
gitlab_sign_in(user)
expect(current_path).to eq profile_two_factor_auth_path
click_link 'Configure it later'
expect(current_path).to eq root_path
end
end
context 'after the grace period' do
let(:user) { create(:user, otp_grace_period_started_at: 9999.hours.ago) }
it 'redirects to two-factor configuration page' do
expect(authentication_metrics)
.to increment(:user_authenticated_counter)
gitlab_sign_in(user)
expect(current_path).to eq profile_two_factor_auth_path
expect(page).to have_content(
'The group settings for Group 1 and Group 2 require you to enable ' \
'Two-Factor Authentication for your account.'
)
end
it 'disallows skipping two-factor configuration', :js do
expect(authentication_metrics)
.to increment(:user_authenticated_counter)
gitlab_sign_in(user)
expect(current_path).to eq profile_two_factor_auth_path
expect(page).not_to have_link('Configure it later')
end
end
end
context 'without grace period defined' do
before do
stub_application_setting(two_factor_grace_period: 0)
end
it 'redirects to two-factor configuration page' do
expect(authentication_metrics)
.to increment(:user_authenticated_counter)
gitlab_sign_in(user)
expect(current_path).to eq profile_two_factor_auth_path
expect(page).to have_content(
'The group settings for Group 1 and Group 2 require you to enable ' \
'Two-Factor Authentication for your account.'
)
end
end
end
end
describe 'UI tabs and panes' do
context 'when no defaults are changed' do
it 'correctly renders tabs and panes' do
ensure_tab_pane_correctness
end
end
context 'when signup is disabled' do
before do
stub_application_setting(signup_enabled: false)
end
it 'correctly renders tabs and panes' do
ensure_tab_pane_correctness
end
end
context 'when ldap is enabled' do
before do
visit new_user_session_path
allow(page).to receive(:form_based_providers).and_return([:ldapmain])
allow(page).to receive(:ldap_enabled).and_return(true)
end
it 'correctly renders tabs and panes' do
ensure_tab_pane_correctness(false)
end
end
context 'when crowd is enabled' do
before do
visit new_user_session_path
allow(page).to receive(:form_based_providers).and_return([:crowd])
allow(page).to receive(:crowd_enabled?).and_return(true)
end
it 'correctly renders tabs and panes' do
ensure_tab_pane_correctness(false)
end
end
end
context 'when terms are enforced' do
let(:user) { create(:user) }
before do
enforce_terms
end
it 'asks to accept the terms on first login' do
expect(authentication_metrics)
.to increment(:user_authenticated_counter)
visit new_user_session_path
fill_in 'user_login', with: user.email
fill_in 'user_password', with: '12345678'
click_button 'Sign in'
expect_to_be_on_terms_page
click_button 'Accept terms'
expect(current_path).to eq(root_path)
expect(page).not_to have_content('You are already signed in.')
end
it 'does not ask for terms when the user already accepted them' do
expect(authentication_metrics)
.to increment(:user_authenticated_counter)
accept_terms(user)
visit new_user_session_path
fill_in 'user_login', with: user.email
fill_in 'user_password', with: '12345678'
click_button 'Sign in'
expect(current_path).to eq(root_path)
end
context 'when 2FA is required for the user' do
before do
group = create(:group, require_two_factor_authentication: true)
group.add_developer(user)
end
context 'when the user did not enable 2FA' do
it 'asks to set 2FA before asking to accept the terms' do
expect(authentication_metrics)
.to increment(:user_authenticated_counter)
visit new_user_session_path
fill_in 'user_login', with: user.email
fill_in 'user_password', with: '12345678'
click_button 'Sign in'
expect_to_be_on_terms_page
click_button 'Accept terms'
expect(current_path).to eq(profile_two_factor_auth_path)
fill_in 'pin_code', with: user.reload.current_otp
click_button 'Register with two-factor app'
click_link 'Proceed'
expect(current_path).to eq(profile_account_path)
end
end
context 'when the user already enabled 2FA' do
before do
user.update!(otp_required_for_login: true,
otp_secret: User.generate_otp_secret(32))
end
it 'asks the user to accept the terms' do
expect(authentication_metrics)
.to increment(:user_authenticated_counter)
.and increment(:user_session_override_counter)
.and increment(:user_two_factor_authenticated_counter)
visit new_user_session_path
fill_in 'user_login', with: user.email
fill_in 'user_password', with: '12345678'
click_button 'Sign in'
fill_in 'user_otp_attempt', with: user.reload.current_otp
click_button 'Verify code'
expect_to_be_on_terms_page
click_button 'Accept terms'
expect(current_path).to eq(root_path)
end
end
end
context 'when the users password is expired' do
before do
user.update!(password_expires_at: Time.parse('2018-05-08 11:29:46 UTC'))
end
it 'asks the user to accept the terms before setting a new password' do
expect(authentication_metrics)
.to increment(:user_authenticated_counter)
visit new_user_session_path
fill_in 'user_login', with: user.email
fill_in 'user_password', with: '12345678'
click_button 'Sign in'
expect_to_be_on_terms_page
click_button 'Accept terms'
expect(current_path).to eq(new_profile_password_path)
fill_in 'user_current_password', with: '12345678'
fill_in 'user_password', with: 'new password'
fill_in 'user_password_confirmation', with: 'new password'
click_button 'Set new password'
expect(page).to have_content('Password successfully changed')
end
end
context 'when the user does not have an email configured' do
let(:user) { create(:omniauth_user, extern_uid: 'my-uid', provider: 'saml', email: 'temp-email-for-oauth-user@gitlab.localhost') }
before do
stub_omniauth_saml_config(enabled: true, auto_link_saml_user: true, allow_single_sign_on: ['saml'], providers: [mock_saml_config])
end
it 'asks the user to accept the terms before setting an email' do
expect(authentication_metrics)
.to increment(:user_authenticated_counter)
.and increment(:user_session_override_counter)
gitlab_sign_in_via('saml', user, 'my-uid')
expect_to_be_on_terms_page
click_button 'Accept terms'
expect(current_path).to eq(profile_path)
fill_in 'Email', with: 'hello@world.com'
click_button 'Update profile settings'
expect(page).to have_content('Profile was successfully updated')
end
end
end
end
|
mit
|
bukun/maplet
|
setup.py
|
1249
|
#!/usr/bin/env python3
# -*- coding:utf-8 -*-
'''
For pypi
'''
from setuptools import setup, find_packages
desc = ('Maplet ')
setup(
name='torcms_maplet',
version='0.1.1',
keywords=('torcms', 'tornado'),
description=desc,
long_description=''.join(open('README.rst').readlines()),
license='MIT License',
url='',
author='gislite',
author_email='gislite@osgeo.cn',
packages=find_packages(
# include=('torcms',),
exclude=("tester", "torcms_tester",)),
include_package_data = True,
platforms='any',
zip_safe=True,
install_requires=[''],
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 3 - Alpha',
# Indicate who your project is intended for
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries :: Python Modules',
# Pick your license as you wish (should match "license" above)
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6'
],
)
|
mit
|
ssgrn/Rust-Matrix-Computations
|
target/doc/clippy/reexport/sidebar-items.js
|
343
|
initSidebarItems({"struct":[["Ident","An identifier contains a Name (index into the interner table) and a SyntaxContext to track renaming and macro expansion per Flatt et al., \"Macros That Work Together\""],["Name","A name is a part of an identifier, representing a string or gensym. It's the result of interning."]],"type":[["NodeId",""]]});
|
mit
|
danmcclain/ya-form
|
tests/unit/ya-form-test.js
|
761
|
import Ember from 'ember';
import { moduleForComponent, test } from 'ember-qunit';
import hbs from 'htmlbars-inline-precompile';
const { Object: EmberObject, set } = Ember;
moduleForComponent('ya-form', {
integration: true,
beforeEach() {
set(this, 'wrapped', EmberObject.create({
user: { firstName: 'Derek', lastName: 'Zoolander' }
}));
}
});
test('should render the form', function(assert) {
this.render(hbs`<div>
{{#ya-form wrapped as |form|}}
<p>{{form.wrapped.user.firstName}}</p>
{{/ya-form}}
</div>`);
assert.equal(this.$('form').length, 1, 'there is a form');
assert.equal(this.$('form p:contains("Derek")').length, 1, 'yields contents');
});
|
mit
|
rasmus/EventFlow
|
Source/EventFlow/Aggregates/IApply.cs
|
1440
|
// The MIT License (MIT)
//
// Copyright (c) 2015-2018 Rasmus Mikkelsen
// Copyright (c) 2015-2018 eBay Software Foundation
// https://github.com/eventflow/EventFlow
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of
// this software and associated documentation files (the "Software"), to deal in
// the Software without restriction, including without limitation the rights to
// use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
// the Software, and to permit persons to whom the Software is furnished to do so,
// subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
// FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
// COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
// IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
// CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
namespace EventFlow.Aggregates
{
public interface IApply<in TAggregateEvent>
where TAggregateEvent : IAggregateEvent
{
void Apply(TAggregateEvent aggregateEvent);
}
}
|
mit
|
Temoto-kun/piano-keyboard
|
src/piano-keyboard.js
|
25463
|
/**
* Script description.
* @author TheoryOfNekomata
* @date 2017-02-10
*/
(function (root, name, dependencies, factory) {
var Component = function Oatmeal(deps) {
return (root[name] = factory.apply(null, deps));
};
if (typeof define === 'function' && define.amd) {
define(dependencies, function () {
return new Component(
Array.prototype.slice.call(arguments)
);
});
return;
}
if (typeof module === 'object' && module.exports) {
module.exports = new Component(
dependencies.map(function (depName) {
return require(depName);
})
);
return;
}
return new Component(
dependencies.map(function (depName) {
return root[depName];
})
);
})(this, 'PianoKeyboard', [], function pianoKeyboard() {
//noinspection MagicNumberJS
var
// enum for pitch classes
PitchClass = {
GSharp: 0,
A: 1,
ASharp: 2,
B: 3,
C: 4,
CSharp: 5,
D: 6,
DSharp: 7,
E: 8,
F: 9,
FSharp: 10,
G: 11
},
// all the pitch classes in a single octave
pitchClassCount = Object.keys(PitchClass).length,
// only the white keys' count in a single octave
whiteKeyPitchClassCount = Object.keys(PitchClass).filter(function (key) { return key.indexOf('Sharp') < 0; }).length,
// default start key
grandPianoStartKey = 1,
// default end key
grandPianoEndKey = 88,
// default velocity on playing in the keyboard
keyboardVelocity = 100,
// maximum velocity
maxVelocity = 127,
// adjustment for black key positioning
blackKeyRatioAdjustment = 0.015,
// key bindings
bindings = {
'standard': {
81: 40,
50: 41,
87: 42,
51: 43,
69: 44,
82: 45,
53: 46,
84: 47,
54: 48,
89: 49,
55: 50,
85: 51,
73: 52,
57: 53,
79: 54,
48: 55,
80: 56,
219: 57,
187: 58,
221: 59,
90: 28,
83: 29,
88: 30,
68: 31,
67: 32,
86: 33,
71: 34,
66: 35,
72: 36,
78: 37,
74: 38,
77: 39,
188: 40,
76: 41,
190: 42,
186: 43,
191: 44
},
'janko': {
}
};
return function PianoKeyboard(kbdEl) {
var
// sharp suffix
sharpSuffix = '#',
// keyboard class
kbdClass = 'piano-keyboard',
// normalized keyboard data
kbdData = {};
/**
* Gets the octave of a key.
* @param keyNumber The key's number, starting with 0 = G#0.
* @returns {number} The octave
*/
function getOctave(keyNumber) {
return Math.floor((keyNumber + pitchClassCount - 4) / pitchClassCount);
}
/**
* Gets the pitch class of a key.
* @param keyNumber The key's number, starting with 0 = G#0.
* @returns {number} The pitch class (0 = G#, 11 = G).
*/
function getPitchClass(keyNumber) {
while (keyNumber < 0) {
keyNumber += pitchClassCount;
}
return keyNumber % pitchClassCount;
}
/**
* Gets the left position ratio of a key.
* @param keyNumber The key's number, starting with 0 = G#0.
* @returns {number} The left position ratio, with 1 = spanning a full octave.
*/
function getLeftPositionRatio(keyNumber) {
var ratios = [
(8 / pitchClassCount) + blackKeyRatioAdjustment / 2,
(5 / whiteKeyPitchClassCount),
(10 / pitchClassCount),
(6 / whiteKeyPitchClassCount),
0,
(1 / pitchClassCount) + blackKeyRatioAdjustment / 2,
(1 / whiteKeyPitchClassCount),
(3 / pitchClassCount) + blackKeyRatioAdjustment / 2,
(2 / whiteKeyPitchClassCount),
(3 / whiteKeyPitchClassCount),
(6 / pitchClassCount) + blackKeyRatioAdjustment,
(4 / whiteKeyPitchClassCount)
];
return ratios[getPitchClass(keyNumber)];
}
/**
* Gets the name of the key's pitch class.
* @param keyNumber The key's number, starting with 0 = G#0.
* @returns {string} The name of the pitch class.
*/
function getPitchClassName(keyNumber) {
var pitchClasses = [
'G' + sharpSuffix,
'A',
'A' + sharpSuffix,
'B',
'C',
'C' + sharpSuffix,
'D',
'D' + sharpSuffix,
'E',
'F',
'F' + sharpSuffix,
'G'
];
return pitchClasses[getPitchClass(keyNumber)];
}
/**
* Gets the width unit of the keyboard.
* @returns {string} The width unit.
*/
function getWidthUnit() {
var whiteKeyWidth = parseFloat(kbdData.whiteKeyWidth);
if (isNaN(whiteKeyWidth)) {
switch (kbdData.whiteKeyWidth) {
case 'auto':
return '%';
default:
break;
}
}
return 'px';
}
/**
* Gets the unit-less black key width of the keyboard.
* @returns {number} The unit-less width of the black keys.
*/
function getBlackKeyWidth() {
var blackKeyWidth = getWhiteKeyWidth() * whiteKeyPitchClassCount / pitchClassCount;
if (kbdData.keyProportion === 'balanced') {
return getWhiteKeyWidth();
}
return getWidthUnit() === 'px' ? Math.ceil(blackKeyWidth) : blackKeyWidth;
}
/**
* Gets the unit-less white key width of the keyboard.
* @returns {number} The unit-less width of the white keys.
*/
function getWhiteKeyWidth() {
var whiteKeyWidth = parseFloat(kbdData.whiteKeyWidth),
startKey = kbdData.startKey,
endKey = kbdData.endKey;
if (isNaN(whiteKeyWidth)) {
switch (kbdData.whiteKeyWidth) {
case 'auto':
if (kbdData.keyProportion === 'balanced') {
return 100 / (kbdData.endKey - kbdData.startKey + 1);
}
return 100 / getWhiteKeysInRange(startKey, endKey);
default:
break;
}
}
return whiteKeyWidth;
}
/**
* Gets the unit-less horizontal offset of the keyboard.
* @returns {number} The horizontal offset of the keyboard.
*/
function getHorizontalOffset() {
var whiteKeyWidth = getWhiteKeyWidth(),
ratio = getLeftPositionRatio(kbdData.startKey),
octave = getOctave(kbdData.startKey);
if (kbdData.keyProportion === 'balanced') {
return (kbdData.startKey - 1) * whiteKeyWidth;
}
return (whiteKeyWidth * whiteKeyPitchClassCount * ratio) + (octave * whiteKeyWidth * whiteKeyPitchClassCount);
}
/**
* Gets the white keys within a range of keys.
* @param startKey The start key number of the range.
* @param endKey The end key number of the range.
* @returns {number} The number of white keys within the range.
*/
function getWhiteKeysInRange(startKey, endKey) {
var whiteKeys = 0,
i;
for (i = startKey; i <= endKey; i++) {
(function (i) {
switch (getPitchClass(i)) {
case PitchClass.GSharp:
case PitchClass.ASharp:
case PitchClass.CSharp:
case PitchClass.DSharp:
case PitchClass.FSharp:
return;
default:
break;
}
++whiteKeys;
})(i);
}
return whiteKeys;
}
/**
* Gets the left offset of the key.
* @param keyNumber
* @returns {number} The left offset of the key.
*/
function getLeftOffset(keyNumber) {
var whiteKeyWidth = getWhiteKeyWidth(),
ratio = getLeftPositionRatio(keyNumber),
octave = getOctave(keyNumber);
if (kbdData.keyProportion === 'balanced') {
switch (getPitchClass(keyNumber)) {
case PitchClass.A:
case PitchClass.B:
case PitchClass.D:
case PitchClass.E:
case PitchClass.G:
return ((keyNumber - 1) * whiteKeyWidth - whiteKeyWidth / 2) - getHorizontalOffset();
default:
break;
}
return ((keyNumber - 1) * whiteKeyWidth) - getHorizontalOffset();
}
return (whiteKeyWidth * whiteKeyPitchClassCount * ratio + (octave * whiteKeyWidth * whiteKeyPitchClassCount)) - getHorizontalOffset();
}
/**
* Generates styles for white keys.
* @returns {string} The CSS string for styling white keys.
*/
function generateStyleForWhiteKeys() {
var css = '',
whiteKeyWidth = getWhiteKeyWidth(),
widthUnit = getWidthUnit(),
i;
for (i = kbdData.startKey; i <= kbdData.endKey; i++) {
(function (i) {
var left;
left = getLeftOffset(i);
css += '.piano-keyboard[data-kbd-id="' + kbdData.id + '"]>.key[data-key="' + i + '"]{left:' + left + widthUnit + '}';
})(i);
}
css += '.piano-keyboard[data-kbd-id="' + kbdData.id + '"]>.white.key{width:' + whiteKeyWidth + widthUnit + '}';
if (widthUnit !== '%') {
if (kbdData.keyProportion === 'balanced') {
css += '.piano-keyboard[data-kbd-id="' + kbdData.id + '"]{width:' + ((kbdData.endKey - kbdData.startKey + 1) * whiteKeyWidth) + widthUnit + '}'
} else {
css += '.piano-keyboard[data-kbd-id="' + kbdData.id + '"]{width:' + (getWhiteKeysInRange(kbdData.startKey, kbdData.endKey) * whiteKeyWidth) + widthUnit + '}'
}
}
if (kbdData.keyProportion === 'balanced') {
css += '.piano-keyboard[data-kbd-id="' + kbdData.id + '"]>.white.key[data-pitch="C"],.piano-keyboard[data-kbd-id="' + kbdData.id + '"]>.white.key[data-pitch="E"],.piano-keyboard[data-kbd-id="' + kbdData.id + '"]>.white.key[data-pitch="F"],.piano-keyboard[data-kbd-id="' + kbdData.id + '"]>.white.key[data-pitch="B"]{width:' + (whiteKeyWidth * 1.5) + widthUnit + '}';
css += '.piano-keyboard[data-kbd-id="' + kbdData.id + '"]>.white.key[data-pitch="D"],.piano-keyboard[data-kbd-id="' + kbdData.id + '"]>.white.key[data-pitch="G"],.piano-keyboard[data-kbd-id="' + kbdData.id + '"]>.white.key[data-pitch="A"]{width:' + (whiteKeyWidth * 2) + widthUnit + '}';
css += '.piano-keyboard[data-kbd-id="' + kbdData.id + '"]>.white.key[data-pitch="C"]:last-child{width:' + whiteKeyWidth + widthUnit + '}';
css += '.piano-keyboard[data-kbd-id="' + kbdData.id + '"]>.white.key[data-pitch="D"]:last-child{width:' + (whiteKeyWidth * 1.5) + widthUnit + '}';
css += '.piano-keyboard[data-kbd-id="' + kbdData.id + '"]>.white.key[data-pitch="F"]:last-child{width:' + whiteKeyWidth + widthUnit + '}';
css += '.piano-keyboard[data-kbd-id="' + kbdData.id + '"]>.white.key[data-pitch="G"]:last-child{width:' + (whiteKeyWidth * 1.5) + widthUnit + '}';
css += '.piano-keyboard[data-kbd-id="' + kbdData.id + '"]>.white.key[data-pitch="A"]:last-child{width:' + (whiteKeyWidth * 1.5) + widthUnit + '}';
css += '.piano-keyboard[data-kbd-id="' + kbdData.id + '"]>.white.key:first-child{left:0}';
css += '.piano-keyboard[data-kbd-id="' + kbdData.id + '"]>.white.key[data-pitch="D"]:first-child{width:' + (whiteKeyWidth * 1.5) + widthUnit + '}';
css += '.piano-keyboard[data-kbd-id="' + kbdData.id + '"]>.white.key[data-pitch="E"]:first-child{width:' + whiteKeyWidth + widthUnit + '}';
css += '.piano-keyboard[data-kbd-id="' + kbdData.id + '"]>.white.key[data-pitch="G"]:first-child{width:' + (whiteKeyWidth * 1.5) + widthUnit + '}';
css += '.piano-keyboard[data-kbd-id="' + kbdData.id + '"]>.white.key[data-pitch="A"]:first-child{width:' + (whiteKeyWidth * 1.5) + widthUnit + '}';
css += '.piano-keyboard[data-kbd-id="' + kbdData.id + '"]>.white.key[data-pitch="B"]:first-child{width:' + whiteKeyWidth + widthUnit + '}';
}
return css;
}
/**
* Generates styles for black keys.
* @returns {string} CSS string for styling black keys.
*/
function generateStyleForBlackKeys() {
var css = '',
blackKeyWidth = getBlackKeyWidth(),
widthUnit = getWidthUnit();
css += '.piano-keyboard[data-kbd-id="' + kbdData.id + '"]>.black.key{width:' + blackKeyWidth + widthUnit + '}';
return css;
}
/**
* Generates styles for the keyboard.
* @returns {string} CSS string for styling the keyboard.
*/
function generateStyle() {
var css = '';
css += generateStyleForWhiteKeys();
css += generateStyleForBlackKeys();
return css;
}
/**
* Generates the keys of the keyboard.
*/
function generateKeys() {
var i;
for (i = kbdData.startKey; i <= kbdData.endKey; i++) {
(function (i) {
var key = document.createElement('button'),
pitchClass = getPitchClassName(i),
octave = getOctave(i);
key.dataset.key = i;
key.dataset.octave = octave;
key.classList.add(pitchClass.indexOf(sharpSuffix) > -1 ? 'black' : 'white');
key.dataset.pitch = pitchClass;
key.classList.add('key');
key.setAttribute('tabindex', -1);
kbdEl.appendChild(key);
})(i);
}
}
/**
* Paints the key being pressed.
* @param {Element} keyEl The key element.
*/
function paintNoteOn(keyEl) {
keyEl.classList.add('-active');
}
/**
* Paints the key being released.
* @param {Element} keyEl The key element.
*/
function paintNoteOff(keyEl) {
keyEl.classList.remove('-active');
}
/**
* Triggers a keyboard event.
* @param {string} type The event type.
* @param {string} detail Additional data of the event.
* @returns {CustomEvent} The event.
*/
function triggerKeyboardEvent(type, detail) {
var event = new CustomEvent(type);
event.eventName = type;
switch (type) {
case 'noteon':
case 'noteoff':
event.key = parseInt(detail.key.dataset.key);
event.velocity = parseInt(detail.velocity);
event.octave = parseInt(detail.key.dataset.octave);
event.pitch = detail.key.dataset.pitch;
break;
default:
break;
}
kbdEl.dispatchEvent(event);
return event;
}
/**
* Binds the events of the keyboard.
*/
function bindEvents() {
var mouseVel = 0;
/**
*
* @param e
*/
function onNoteOn(e) {
var kbdEvent;
e.preventDefault();
if (e.buttons === 1 && !e.target.classList.contains('-active') && e.target.classList.contains('key')) {
if (kbdEl.tabIndex === 0) {
kbdEl.focus();
}
kbdEvent = triggerKeyboardEvent('noteon', { key: e.target, velocity: e.velocity });
if (kbdEvent.defaultPrevented) {
return;
}
paintNoteOn(e.target);
}
}
/**
*
* @param e
*/
function onNoteOff(e) {
var kbdEvent;
e.preventDefault();
if (e.target.classList.contains('-active') && e.target.classList.contains('key')) {
kbdEvent = triggerKeyboardEvent('noteoff', { key: e.target, velocity: e.velocity });
if (kbdEvent.defaultPrevented) {
return;
}
paintNoteOff(e.target);
}
}
/**
*
* @param e
*/
function onKeyboardKeydown(e) {
var bindingsMap,
key,
keyEl,
kbdEvent;
if (e.shiftKey || e.ctrlKey || e.altKey || e.metaKey) {
return;
}
bindingsMap = kbdData.bindingsMap.toLowerCase();
if (typeof bindingsMap === 'undefined') {
return;
}
key = bindings[bindingsMap][e.which];
if (typeof key === 'undefined') {
return;
}
keyEl = kbdEl.querySelector('[data-key="' + key + '"]');
if (!keyEl || keyEl.classList.contains('-active')) {
return;
}
kbdEvent = triggerKeyboardEvent('noteon', { key: keyEl, velocity: kbdData.keyboardVelocity });
if (kbdEvent.defaultPrevented) {
return;
}
e.preventDefault();
paintNoteOn(keyEl);
}
/**
*
* @param e
*/
function onKeyboardKeyup(e) {
var bindingsMap,
key,
keyEl,
kbdEvent;
if (e.shiftKey || e.ctrlKey || e.altKey || e.metaKey) {
return;
}
bindingsMap = kbdData.bindingsMap.toLowerCase();
if (typeof bindingsMap === 'undefined') {
return;
}
key = bindings[bindingsMap][e.which];
if (typeof key === 'undefined') {
return;
}
keyEl = kbdEl.querySelector('[data-key="' + key + '"]');
if (!keyEl || !keyEl.classList.contains('-active')) {
return;
}
kbdEvent = triggerKeyboardEvent('noteoff', { key: keyEl, velocity: kbdData.keyboardVelocity });
if (kbdEvent.defaultPrevented) {
return;
}
e.preventDefault();
paintNoteOff(keyEl);
}
/**
*
* @param e
*/
function onMouseDown(e) {
var maxY = parseFloat(window.getComputedStyle(e.target).height),
offsetY = e.offsetY + parseFloat(getComputedStyle(e.target).borderTopWidth);
mouseVel = Math.floor(maxVelocity * (offsetY / maxY));
e.velocity = mouseVel;
onNoteOn(e);
}
/**
*
* @param e
*/
function onMouseUp(e) {
e.velocity = mouseVel;
onNoteOff(e);
}
/**
*
* @param e
*/
function onMouseEnter(e) {
e.velocity = mouseVel;
onNoteOn(e);
}
/**
*
* @param e
*/
function onMouseLeave(e) {
e.velocity = mouseVel;
onNoteOff(e);
}
kbdEl.addEventListener('keydown', onKeyboardKeydown);
kbdEl.addEventListener('keyup', onKeyboardKeyup);
kbdEl.addEventListener('mousedown', onMouseDown, true);
kbdEl.addEventListener('mouseenter', onMouseEnter, true);
kbdEl.addEventListener('mouseup', onMouseUp, true);
kbdEl.addEventListener('mouseleave', onMouseLeave, true);
}
/**
* Normalizes the keyboard data.
*/
function normalizeKeyboardData() {
var temp;
kbdData.id = (kbdEl.dataset.kbdId = kbdEl.dataset.kbdId || Date.now());
kbdData.startKey = parseInt(kbdEl.dataset.startKey) || grandPianoStartKey;
kbdData.endKey = parseInt(kbdEl.dataset.endKey) || grandPianoEndKey;
kbdData.whiteKeyWidth = kbdEl.dataset.whiteKeyWidth || 'auto';
kbdData.keyboardVelocity = parseInt(kbdEl.dataset.keyboardVelocity) || keyboardVelocity;
kbdData.keyProportion = kbdEl.dataset.keyProportion;
if (isNaN(kbdData.startKey)) {
kbdData.startKey = grandPianoStartKey;
}
if (isNaN(kbdData.endKey)) {
kbdData.endKey = grandPianoEndKey;
}
if (kbdData.startKey > kbdData.endKey) {
temp = kbdData.startKey;
kbdData.startKey = kbdData.endKey;
kbdData.endKey = temp;
}
kbdData.bindingsMap = 'standard'; // TODO implement other bindings maps
}
/**
*
*/
function addKeyboardUiAttributes() {
if (kbdEl.hasAttribute('tabindex')) {
return;
}
kbdEl.setAttribute('tabindex', 0);
}
/**
*
*/
function initializeKeyboardStyle() {
var styleEl = document.querySelector('style[data-kbd-id="' + kbdData.id + '"]'),
styleParent = document.getElementsByTagName('head')[0] || document.body;
if (!styleEl) {
styleEl = document.createElement('style');
styleEl.dataset.kbdId = kbdData.id;
styleParent.appendChild(styleEl);
}
styleEl.innerHTML = generateStyle(kbdData);
if (kbdEl.classList.contains(kbdClass)) {
return;
}
kbdEl.classList.add(kbdClass);
}
/**
*
*/
function addMethods() {
kbdEl.playNote = function playNote(noteInfo) {
var keyEl = kbdEl.querySelector('[data-key="' + noteInfo.key + '"]'),
detail;
if (!keyEl || keyEl.classList.contains('-active')) {
return kbdEl;
}
detail = { key: keyEl };
detail.velocity = noteInfo.velocity;
detail.pan = noteInfo.pan;
triggerKeyboardEvent('noteon', detail);
paintNoteOn(keyEl);
return kbdEl;
};
kbdEl.stopNote = function stopNote(noteInfo) {
var keyEl = kbdEl.querySelector('[data-key="' + noteInfo.key + '"]'),
detail;
if (!keyEl || keyEl.classList.contains('-active')) {
return kbdEl;
}
detail = { key: keyEl };
detail.velocity = noteInfo.velocity;
detail.pan = noteInfo.pan;
triggerKeyboardEvent('noteoff', detail);
paintNoteOn(keyEl);
return kbdEl;
};
}
normalizeKeyboardData();
initializeKeyboardStyle();
generateKeys();
bindEvents();
addKeyboardUiAttributes();
addMethods();
};
});
|
mit
|
J2/shade
|
src/com/shade/controls/InstructionImage.java
|
1893
|
package com.shade.controls;
import org.newdawn.slick.Color;
import org.newdawn.slick.Graphics;
import org.newdawn.slick.Image;
import org.newdawn.slick.state.StateBasedGame;
import com.shade.base.Animatable;
/* Some image which hides, displays itself, pauses, then hides again. */
public class InstructionImage implements Animatable {
private enum State {
OFF, FADEIN, FADEOUT
};
private float x, y;
private int hideTimer, showTimer, alphaTimer;
private int hideTime;
private State state;
private Image sprite;
private Color filter;
public InstructionImage(float x, float y, Image sprite) {
this.x = x;
this.y = y;
this.sprite = sprite;
state = State.OFF;
filter = new Color(Color.white);
filter.a = 0;
}
/* How long to stay invisible for. */
public void setTimer(int time) {
hideTime = time;
}
public void activate() {
filter.a = 0;
state = State.FADEIN;
}
public void deactivate() {
state = State.FADEOUT;
}
public void render(StateBasedGame game, Graphics g) {
sprite.draw(x, y, filter);
}
public void update(StateBasedGame game, int delta) {
if (state == State.FADEIN) {
hideTimer += delta;
if (hideTime < hideTime) {
return;
}
showTimer += delta;
alphaTimer += delta;
if (alphaTimer > 100 && filter.a < 1) {
alphaTimer = 0;
filter.a += .05f;
}
}
if (state == State.FADEOUT && filter.a > 0) {
alphaTimer += delta;
if (alphaTimer > 100) {
alphaTimer = 0;
filter.a -= .1f;
}
}
}
public void reset() {
state = State.OFF;
filter.a = 0;
}
}
|
mit
|
joansmith/ontrack
|
ontrack-core/src/main/java/net/ontrack/core/model/ImportResponse.java
|
124
|
package net.ontrack.core.model;
import lombok.Data;
@Data
public class ImportResponse {
private final String uid;
}
|
mit
|
ditup/ditapp-ng
|
src/app/tag/tag-resolver.service.spec.ts
|
520
|
import { TestBed, inject } from '@angular/core/testing';
import { TagResolver } from './tag-resolver.service';
import { ModelService } from '../model.service';
class FakeModelService {
}
describe('TagResolver', () => {
beforeEach(() => {
TestBed.configureTestingModule({
providers: [
TagResolver,
{ provide: ModelService, useClass: FakeModelService }
]
});
});
it('should ...', inject([TagResolver], (service: TagResolver) => {
expect(service).toBeTruthy();
}));
});
|
mit
|
yitzchak/dicy
|
packages/core/ext-types/command-join.d.ts
|
116
|
declare module 'command-join' {
function commandJoin(args: string | string[]): string;
export = commandJoin;
}
|
mit
|
ceolter/ag-grid
|
community-modules/core/dist/cjs/utils/general.d.ts
|
1568
|
// Type definitions for @ag-grid-community/core v25.1.0
// Project: http://www.ag-grid.com/
// Definitions by: Niall Crosby <https://github.com/ag-grid/>
import { ICellRendererComp } from '../rendering/cellRenderers/iCellRenderer';
import { AgPromise } from './promise';
/** @deprecated */
export declare function getNameOfClass(theClass: any): string;
export declare function findLineByLeastSquares(values: number[]): number[];
/**
* Converts a CSS object into string
* @param {Object} stylesToUse an object eg: {color: 'black', top: '25px'}
* @return {string} A string like "color: black; top: 25px;" for html
*/
export declare function cssStyleObjectToMarkup(stylesToUse: any): string;
/**
* Displays a message to the browser. this is useful in iPad, where you can't easily see the console.
* so the javascript code can use this to give feedback. this is NOT intended to be called in production.
* it is intended the AG Grid developer calls this to troubleshoot, but then takes out the calls before
* checking in.
* @param {string} msg
*/
export declare function message(msg: string): void;
/**
* cell renderers are used in a few places. they bind to dom slightly differently to other cell renderes as they
* can return back strings (instead of html elemnt) in the getGui() method. common code placed here to handle that.
* @param {AgPromise<ICellRendererComp>} cellRendererPromise
* @param {HTMLElement} eTarget
*/
export declare function bindCellRendererToHtmlElement(cellRendererPromise: AgPromise<ICellRendererComp>, eTarget: HTMLElement): void;
|
mit
|
TextFileDataTools/text-file-data-tools
|
src/main/java/stexfires/core/record/PairRecord.java
|
4278
|
package stexfires.core.record;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import stexfires.core.Field;
import stexfires.core.Fields;
import stexfires.core.TextRecord;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import java.util.stream.Stream;
/**
* @author Mathias Kalb
* @since 0.1
*/
public class PairRecord implements TextRecord {
public static final int FIRST_VALUE_INDEX = Fields.FIRST_FIELD_INDEX;
public static final int SECOND_VALUE_INDEX = Fields.FIRST_FIELD_INDEX + 1;
public static final int FIELD_SIZE = 2;
private final String category;
private final Long recordId;
private final Field firstField;
private final Field secondField;
private final int hashCode;
public PairRecord(@Nullable String firstValue, @Nullable String secondValue) {
this(null, null, firstValue, secondValue);
}
public PairRecord(@Nullable String category, @Nullable Long recordId,
@Nullable String firstValue, @Nullable String secondValue) {
this.category = category;
this.recordId = recordId;
Field[] fields = Fields.newArray(firstValue, secondValue);
firstField = fields[FIRST_VALUE_INDEX];
secondField = fields[SECOND_VALUE_INDEX];
hashCode = Objects.hash(category, recordId, firstField, secondField);
}
public PairRecord newRecordSwapped() {
return new PairRecord(category, recordId, secondField.getValue(), firstField.getValue());
}
@Override
public final Field[] arrayOfFields() {
return new Field[]{firstField, secondField};
}
@Override
public final List<Field> listOfFields() {
List<Field> list = new ArrayList<>(FIELD_SIZE);
list.add(firstField);
list.add(secondField);
return list;
}
@Override
public final Stream<Field> streamOfFields() {
return Stream.of(firstField, secondField);
}
@Override
public final String getCategory() {
return category;
}
@Override
public final Long getRecordId() {
return recordId;
}
@Override
public final int size() {
return FIELD_SIZE;
}
@Override
public final boolean isEmpty() {
return false;
}
@Override
public final boolean isValidIndex(int index) {
return index == FIRST_VALUE_INDEX || index == SECOND_VALUE_INDEX;
}
@SuppressWarnings("ReturnOfNull")
@Override
public final Field getFieldAt(int index) {
return switch (index) {
case FIRST_VALUE_INDEX -> firstField;
case SECOND_VALUE_INDEX -> secondField;
default -> null;
};
}
@Override
public final @NotNull Field getFirstField() {
return firstField;
}
@SuppressWarnings("SuspiciousGetterSetter")
@Override
public final @NotNull Field getLastField() {
return secondField;
}
public final @NotNull Field getSecondField() {
return secondField;
}
@Override
public final String getValueOfFirstField() {
return firstField.getValue();
}
@Override
public final String getValueOfLastField() {
return secondField.getValue();
}
public final String getValueOfSecondField() {
return secondField.getValue();
}
@Override
public boolean equals(@Nullable Object obj) {
if (this == obj)
return true;
if (obj == null || getClass() != obj.getClass())
return false;
PairRecord record = (PairRecord) obj;
return Objects.equals(category, record.category) &&
Objects.equals(recordId, record.recordId) &&
Objects.equals(firstField, record.firstField) &&
Objects.equals(secondField, record.secondField);
}
@Override
public int hashCode() {
return hashCode;
}
@Override
public String toString() {
return "PairRecord{" +
"category=" + category +
", recordId=" + recordId +
", firstValue=" + firstField.getValue() +
", secondValue=" + secondField.getValue() +
'}';
}
}
|
mit
|
rainliu/algorithms
|
sort/QuickSort.go
|
881
|
package sort
import (
"algorithms"
"math/rand"
"time"
)
type QuickSort struct {
sorter
}
func (this *QuickSort) Sort(a []algorithms.Comparable) {
this.sort(a, 0, len(a)-1)
}
func (this *QuickSort) sort(a []algorithms.Comparable, low, high int){
if high<=low {
return
}
j:= this.partition(a, low, high);
this.sort(a, low, j-1)
this.sort(a, j+1, high)
}
func (this *QuickSort) partition(a []algorithms.Comparable, low, high int) int{
//random swap low and x
r:= rand.New(rand.NewSource(time.Now().UnixNano()))
x:= low + (r.Int()%(high+1-low));
this.Swap(a, low, x)
////////
i:=low;
j:=high+1
v:=a[low]
for {
for i=i+1; this.Less(a[i], v); i++ {
if i==high {
break;
}
}
for j=j-1; this.Less(v, a[j]); j-- {
if j==low {
break;
}
}
if i>=j {
break;
}
this.Swap(a, i, j)
}
this.Swap(a, low, j)
return j;
}
|
mit
|
webtown-php/KunstmaanBundlesCMS
|
src/Kunstmaan/AdminListBundle/Entity/LockableEntity.php
|
2694
|
<?php
namespace Kunstmaan\AdminListBundle\Entity;
use Doctrine\ORM\Mapping as ORM;
use Kunstmaan\AdminBundle\Entity\AbstractEntity;
use Symfony\Component\Validator\Constraints as Assert;
/**
* @ORM\Entity(repositoryClass="Kunstmaan\AdminListBundle\Repository\LockableEntityRepository")
* @ORM\Table(name="kuma_lockable_entity",
* uniqueConstraints={@ORM\UniqueConstraint(name="ix_kuma_lockable_entity_id_class", columns={"entityId", "entityClass"})},
* indexes={@ORM\Index(name="idx__lockable_entity_id_class", columns={"entityId", "entityClass"})}
* )
*/
class LockableEntity extends AbstractEntity
{
/**
* @var \DateTime
*
* @ORM\Column(type="datetime")
*/
protected $created;
/**
* @var \DateTime
*
* @ORM\Column(type="datetime")
*/
protected $updated;
/**
* @var string
*
* @ORM\Column(type="string")
*/
protected $entityClass;
/**
* @var integer
*
* @ORM\Column(type="bigint")
*/
protected $entityId;
/**
* Constructor
*/
public function __construct()
{
$this->setCreated(new \DateTime());
$this->setUpdated(new \DateTime());
}
/**
* Set created
*
* @param \DateTime $created
*
* @return LockableEntity
*/
public function setCreated(\DateTime $created)
{
$this->created = $created;
return $this;
}
/**
* Get created
*
* @return \DateTime
*/
public function getCreated()
{
return $this->created;
}
/**
* Set updated
*
* @param \DateTime $updated
*
* @return LockableEntity
*/
public function setUpdated(\DateTime $updated)
{
$this->updated = $updated;
return $this;
}
/**
* Get updated
*
* @return \DateTime
*/
public function getUpdated()
{
return $this->updated;
}
/**
* Get entityClass.
*
* @return string.
*/
public function getEntityClass()
{
return $this->entityClass;
}
/**
* @param $entityClass
*
* @return LockableEntity
*/
public function setEntityClass($entityClass)
{
$this->entityClass = $entityClass;
return $this;
}
/**
* Get entityId.
*
* @return integer.
*/
public function getEntityId()
{
return $this->entityId;
}
/**
* Set integer
*
* @param $entityId
*
* @return LockableEntity
*/
public function setEntityId($entityId)
{
$this->entityId = $entityId;
return $this;
}
}
|
mit
|
xrcat/x-guice
|
guice-property/src/test/java/com/maxifier/guice/property/converter/FileTypeConverterTest.java
|
1577
|
package com.maxifier.guice.property.converter;
import com.google.inject.Guice;
import com.google.inject.Inject;
import com.google.inject.Injector;
import com.google.inject.TypeLiteral;
import com.maxifier.guice.property.Property;
import com.maxifier.guice.property.PropertyModule;
import org.testng.annotations.Test;
import java.io.File;
import java.util.Properties;
import static org.testng.Assert.assertEquals;
/**
* @author Aleksey Didik (28.10.2009 19:28:24)
*/
public class FileTypeConverterTest {
private static final String FILE_NAME = "foo.txt";
private static final String PARENT_FILE_NAME = "." + File.separatorChar + "hello";
private static final String FULL_NAME = PARENT_FILE_NAME + File.separatorChar + FILE_NAME;
@Test
public void testConvert() {
FileTypeConverter converter = new FileTypeConverter();
File file = (File) converter.convert(FULL_NAME, TypeLiteral.get(File.class));
assertEquals(file.getName(), FILE_NAME);
assertEquals(file.getParent(), PARENT_FILE_NAME);
}
@Test
public void testInContainer() {
Properties props = new Properties();
props.put("file.name", FULL_NAME);
Injector inj = Guice.createInjector(PropertyModule.loadFrom(props).withConverters());
Foo foo = inj.getInstance(Foo.class);
File fooFile = foo.file;
assertEquals(fooFile.getName(), FILE_NAME);
assertEquals(fooFile.getParent(), PARENT_FILE_NAME);
}
static class Foo {
@Inject
@Property("file.name")
File file;
}
}
|
mit
|
medeiros/medeiros.github.io
|
#jekyll-theme-hydejack/assets/js/LEGACY-shadydom-hydejack-9.0.4.js
|
791
|
/*!
* __ __ __ __
* /\ \/\ \ /\ \ __ /\ \
* \ \ \_\ \ __ __ \_\ \ __ /\_\ __ ___ \ \ \/'\
* \ \ _ \ /\ \/\ \ /'_` \ /'__`\ \/\ \ /'__`\ /'___\\ \ , <
* \ \ \ \ \\ \ \_\ \ /\ \L\ \ /\ __/ \ \ \ /\ \L\.\_ /\ \__/ \ \ \\`\
* \ \_\ \_\\/`____ \\ \___,_\\ \____\ _\ \ \\ \__/.\_\\ \____\ \ \_\ \_\
* \/_/\/_/ `/___/> \\/__,_ / \/____//\ \_\ \\/__/\/_/ \/____/ \/_/\/_/
* /\___/ \ \____/
* \/__/ \/___/
*
* Powered by Hydejack v9.0.4 <https://hydejack.com/>
*/
(window.webpackJsonp=window.webpackJsonp||[]).push([[9],{341:function(n,w,o){"use strict";o.r(w);o(342),o(346)}}]);
|
mit
|
rshaghoulian/HackerRank-solutions
|
Java/Advanced/Covariant Return Types/Solution.java
|
887
|
// github.com/RodneyShag
class Flower {
String whatsYourName() {
return "I have many names and types.";
}
}
class Jasmine extends Flower {
@Override
String whatsYourName() {
return "Jasmine";
}
}
class Lily extends Flower {
@Override
String whatsYourName() {
return "Lily";
}
}
class Lotus extends Flower {
@Override
String whatsYourName() {
return "Lotus";
}
}
class Region {
Flower yourNationalFlower() {
return new Flower();
}
}
class WestBengal extends Region {
@Override
Jasmine yourNationalFlower() {
return new Jasmine();
}
}
class Karnataka extends Region {
@Override
Lotus yourNationalFlower() {
return new Lotus();
}
}
class AndhraPradesh extends Region {
@Override
Lily yourNationalFlower() {
return new Lily();
}
}
|
mit
|
cpojer/jest
|
e2e/browser-resolver/__tests__/test.js
|
366
|
/**
* Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
/* eslint-env browser */
'use strict';
const div = require('../fake-pkg');
test('dummy test', () => {
expect(div).toBeInstanceOf(HTMLDivElement);
});
|
mit
|
Trojka/AndroidGestureDSL
|
androidGestureDSL/src/main/java/be/trojkasoftware/android/gestures/CheckRelationMilliSecondsOfTimingIdCondition.java
|
927
|
package be.trojkasoftware.android.gestures;
public class CheckRelationMilliSecondsOfTimingIdCondition implements IGestureCondition {
public CheckRelationMilliSecondsOfTimingIdCondition(RelationType relationType, int rangeValue, String key) {
this.relationType = relationType;
this.range = rangeValue;
this.dataKey = key;
}
@Override
public boolean checkCondition(GestureEvent motion, TouchGesture gesture) {
Long actionOnTime = (Long)gesture.getContext(dataKey);
if(actionOnTime != null)
{
switch(relationType)
{
case Equal:
return (long)(motion.getTime() - actionOnTime.longValue()) == range;
case Within:
return (long)(motion.getTime() - actionOnTime.longValue()) <= range;
case Exceed:
return (long)(motion.getTime() - actionOnTime.longValue()) >= range;
}
}
return true;
}
private RelationType relationType;
private int range;
private String dataKey;
}
|
mit
|
theRocket/wsbaracing
|
vendor/plugins/backgroundrb/server/lib/log_worker.rb
|
562
|
class LogWorker < Packet::Worker
set_worker_name :log_worker
#set_no_auto_load(true)
attr_accessor :log_file
def worker_init
@log_file = Logger.new("#{RAILS_HOME}/log/backgroundrb_#{CONFIG_FILE[:backgroundrb][:port]}.log")
end
def receive_data p_data
case p_data[:type]
when :request: process_request(p_data)
when :response: process_response(p_data)
end
end
def process_request(p_data)
log_data = p_data[:data]
@log_file.info(log_data)
end
def process_response
puts "Not implemented and needed"
end
end
|
mit
|
joseadame/phpsandbox
|
vendor/andres-montanez/magallanes/Mage/Task/BuiltIn/Filesystem/ApplyFaclsTask.php
|
1280
|
<?php
namespace Mage\Task\BuiltIn\Filesystem;
use Mage\Task\AbstractTask;
use Mage\Task\SkipException;
use Mage\Task\Releases\IsReleaseAware;
class ApplyFaclsTask extends AbstractTask implements IsReleaseAware
{
/**
* Returns the Title of the Task
* @return string
*/
public function getName()
{
return 'Set file ACLs on remote system [built-in]';
}
/**
* Runs the task
*
* @return boolean
* @throws SkipException
*/
public function run()
{
$releasesDirectory = $this->getConfig()->release('directory', 'releases');
$releasesDirectory = rtrim($this->getConfig()->deployment('to'), '/') . '/' . $releasesDirectory;
$currentCopy = $releasesDirectory . '/' . $this->getConfig()->getReleaseId();
$aclParam = $this->getParameter('acl_param', '');
if (empty($aclParam)) {
throw new SkipException('Parameter acl_param not set.');
}
$folders = $this->getParameter('folders', []);
$recursive = $this->getParameter('recursive', false) ? ' -R ' : ' ';
foreach ($folders as $folder) {
$this->runCommandRemote("setfacl$recursive-m $aclParam $currentCopy/$folder", $output);
}
return true;
}
}
|
mit
|
Haacked/Subtext
|
src/Subtext.Framework/Syndication/RssCategoryHandler.cs
|
3179
|
#region Disclaimer/Info
///////////////////////////////////////////////////////////////////////////////////////////////////
// Subtext WebLog
//
// Subtext is an open source weblog system that is a fork of the .TEXT
// weblog system.
//
// For updated news and information please visit http://subtextproject.com/
// Subtext is hosted at Google Code at http://code.google.com/p/subtext/
// The development mailing list is at subtext@googlegroups.com
//
// This project is licensed under the BSD license. See the License.txt file for more information.
///////////////////////////////////////////////////////////////////////////////////////////////////
#endregion
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using Subtext.Framework.Components;
using Subtext.Framework.Data;
using Subtext.Framework.Routing;
namespace Subtext.Framework.Syndication
{
public class RssCategoryHandler : EntryCollectionHandler<Entry>
{
protected LinkCategory Category;
ICollection<Entry> _posts;
public RssCategoryHandler(ISubtextContext subtextContext)
: base(subtextContext)
{
}
protected override BaseSyndicationWriter SyndicationWriter
{
get
{
return new CategoryWriter(new StringWriter(), _posts, Category,
Url.CategoryUrl(Category).ToFullyQualifiedUrl(Blog), SubtextContext);
}
}
/// <summary>
/// Returns true if the feed is the main feed. False for category feeds and comment feeds.
/// </summary>
protected override bool IsMainfeed
{
get { return false; }
}
protected override ICollection<Entry> GetFeedEntries()
{
if (Category == null)
{
Category = Cacher.SingleCategory(SubtextContext);
}
if (Category != null && _posts == null)
{
_posts = Cacher.GetEntriesByCategory(10, Category.Id, SubtextContext);
}
return _posts;
}
/// <summary>
/// Builds the feed using delta encoding if it's true.
/// </summary>
/// <returns></returns>
protected override CachedFeed BuildFeed()
{
CachedFeed feed = null;
_posts = GetFeedEntries();
if (_posts != null && _posts.Count > 0)
{
feed = new CachedFeed();
var cw = new CategoryWriter(new StringWriter(), _posts, Category,
Url.CategoryUrl(Category).ToFullyQualifiedUrl(Blog), SubtextContext);
feed.LastModifiedUtc = _posts.First().DateCreatedUtc;
feed.Xml = cw.Xml;
}
return feed;
}
/// <summary>
/// Gets the item created date.
/// </summary>
/// <param name="item">The item.</param>
/// <returns></returns>
protected override DateTime GetItemCreatedDateUtc(Entry item)
{
return item.DateCreatedUtc;
}
}
}
|
mit
|
intridea/omniauth
|
spec/omniauth/failure_endpoint_spec.rb
|
2361
|
require 'helper'
describe OmniAuth::FailureEndpoint do
subject { OmniAuth::FailureEndpoint }
context 'raise-out environment' do
before do
@rack_env = ENV['RACK_ENV']
ENV['RACK_ENV'] = 'test'
@default = OmniAuth.config.failure_raise_out_environments
OmniAuth.config.failure_raise_out_environments = ['test']
end
it 'raises out the error' do
expect do
subject.call('omniauth.error' => StandardError.new('Blah'))
end.to raise_error(StandardError, 'Blah')
end
it 'raises out an OmniAuth::Error if no omniauth.error is set' do
expect { subject.call('omniauth.error.type' => 'example') }.to raise_error(OmniAuth::Error, 'example')
end
after do
ENV['RACK_ENV'] = @rack_env
OmniAuth.config.failure_raise_out_environments = @default
end
end
context 'non-raise-out environment' do
let(:env) do
{'omniauth.error.type' => 'invalid_request', 'omniauth.error.strategy' => ExampleStrategy.new({})}
end
it 'is a redirect' do
status, = *subject.call(env)
expect(status).to eq(302)
end
it 'includes the SCRIPT_NAME' do
_, head, = *subject.call(env.merge('SCRIPT_NAME' => '/random'))
expect(head['Location']).to eq('/random/auth/failure?message=invalid_request&strategy=test')
end
it 'respects the globally configured path prefix' do
allow(OmniAuth.config).to receive(:path_prefix).and_return('/boo')
_, head, = *subject.call(env)
expect(head['Location']).to eq('/boo/failure?message=invalid_request&strategy=test')
end
it 'respects the custom path prefix configured on the strategy' do
env['omniauth.error.strategy'] = ExampleStrategy.new({}, path_prefix: "/some/custom/path")
_, head, = *subject.call(env)
expect(head['Location']).to eq('/some/custom/path/failure?message=invalid_request&strategy=test')
end
it 'includes the origin (escaped) if one is provided' do
env['omniauth.origin'] = '/origin-example'
_, head, = *subject.call(env)
expect(head['Location']).to be_include('&origin=%2Forigin-example')
end
it 'escapes the message key' do
_, head = *subject.call(env.merge('omniauth.error.type' => 'Connection refused!'))
expect(head['Location']).to be_include('message=Connection+refused%21')
end
end
end
|
mit
|
wenjoy/homePage
|
node_modules/geetest/node_modules/request/node_modules/har-validator/node_modules/echint/node_modules/lintspaces/node_modules/editorconfig/editorconfig.js
|
3955
|
var fs = require('fs');
var path = require('path');
var minimatch = require('./lib/fnmatch');
var iniparser = require('./lib/ini');
var Version = require('./lib/version');
var pkg = require('./package.json');
var knownProps = ['end_of_line', 'indent_style', 'indent_size',
'insert_final_newline', 'trim_trailing_whitespace', 'charset'];
function fnmatch(filepath, glob) {
var matchOptions = {matchBase: true, dot: true, noext: true};
glob = glob.replace(/\*\*/g, '{*,**/**/**}');
return minimatch(filepath, glob, matchOptions);
}
function getConfigFileNames(filepath, configname) {
var old_dirname = filepath;
var dirname = old_dirname;
var paths = [];
do {
paths.push(path.join(dirname, configname || ".editorconfig"));
old_dirname = dirname;
dirname = path.dirname(old_dirname);
} while(dirname != old_dirname);
return paths;
}
function processMatches(matches, version) {
// Set indent_size to "tab" if indent_size is unspecified and
// indent_style is set to "tab".
if ("indent_style" in matches && matches.indent_style === "tab" &&
!("indent_size" in matches) && version.gte(new Version(0, 10))) {
matches.indent_size = "tab";
}
// Set tab_width to indent_size if indent_size is specified and
// tab_width is unspecified
if ("indent_size" in matches && !("tab_width" in matches) &&
matches.indent_size !== "tab")
matches.tab_width = matches.indent_size;
// Set indent_size to tab_width if indent_size is "tab"
if("indent_size" in matches && "tab_width" in matches &&
matches.indent_size === "tab")
matches.indent_size = matches.tab_width;
return matches;
}
function processOptions(options) {
options = options || {};
options.version = new Version(options.version || pkg.version);
return options;
}
function parseFromFiles(filepath, configs, options) {
var matches = {};
configs.reverse().forEach(function (file) {
var pathPrefix = path.dirname(file.name);
var config = file.contents;
config.forEach(function (section) {
var fullGlob;
var glob = section[0];
var options = section[1];
if (!glob) return;
if (glob.indexOf('/') === -1) {
fullGlob = path.join(pathPrefix, "**/" + glob);
} else if (glob.indexOf('/') === 0) {
fullGlob = path.join(pathPrefix, glob.substring(1));
} else {
fullGlob = path.join(pathPrefix, glob);
}
if (fnmatch(filepath, fullGlob)) {
for (var key in options) {
var value = options[key];
if (knownProps.indexOf(key) !== -1) {
value = value.toLowerCase();
}
try {
value = JSON.parse(value);
} catch(e){}
matches[key.toLowerCase()] = value;
}
}
});
});
return processMatches(matches, options.version);
}
function getConfigsForFiles(files) {
var configs = [];
for (var i = 0; i < files.length; i++) {
files[i].contents = iniparser.parseString(files[i].contents);
configs.push(files[i]);
if (/^true$/i.test(files[i].contents[0][1].root)) break;
}
return configs;
}
function readConfigFiles(filepaths) {
var files = [];
filepaths.forEach(function (configFilePath) {
if (fs.existsSync(configFilePath)) {
files.push({
name: configFilePath,
contents: fs.readFileSync(configFilePath, 'utf-8')
});
}
});
return files;
}
module.exports.parseFromFiles = function(filepath, files, options) {
filepath = path.resolve(filepath);
options = processOptions(options);
return parseFromFiles(filepath, getConfigsForFiles(files), options);
};
module.exports.parse = function(filepath, options) {
filepath = path.resolve(filepath);
options = processOptions(options);
var filepaths = getConfigFileNames(path.dirname(filepath), options.config);
var files = readConfigFiles(filepaths);
return parseFromFiles(filepath, getConfigsForFiles(files), options);
};
|
mit
|
albertopeam/Android-Architecture-Components
|
app/src/main/java/es/albertopeam/apparchitecturelibs/domain/AddNote.java
|
181
|
package es.albertopeam.apparchitecturelibs.domain;
/**
* Created by Alberto Penas Amorberto Penas Amor on 27/05/2017.
*/
public interface AddNote {
void add(String note);
}
|
mit
|
navalev/nether
|
NetherLoadTest/Nether.Sdk/NetherClient.cs
|
4078
|
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using IdentityModel.Client;
using Newtonsoft.Json;
using System;
using System.Net.Http;
using System.Security.Authentication;
using System.Text;
using System.Threading.Tasks;
namespace Nether.Sdk
{
public class NetherClient
{
private readonly string _baseUrl;
private readonly string _clientId;
private readonly string _clientSecret;
private readonly HttpClient _httpClient;
public NetherClient(string baseUrl, string clientId, string clientSecret)
{
_baseUrl = baseUrl;
_clientId = clientId;
_clientSecret = clientSecret;
_httpClient = CreateClient(baseUrl);
}
private HttpClient CreateClient(string baseUrl)
{
return new HttpClient
{
BaseAddress = new Uri(baseUrl)
};
}
private string _accessToken_Internal;
public string AccessToken
{
get { return _accessToken_Internal; }
set
{
_accessToken_Internal = value;
_httpClient.SetBearerToken(_accessToken_Internal);
}
}
public async Task<OperationResult> LoginUserNamePasswordAsync(string username, string password)
{
// TODO - create a type so that the caller can determine success/failure (with message)
var disco = await DiscoveryClient.GetAsync(_baseUrl + "/identity");
// request token
var tokenClient = new TokenClient(disco.TokenEndpoint, _clientId, _clientSecret);
var tokenResponse = await tokenClient.RequestResourceOwnerPasswordAsync(username, password, "nether-all");
if (tokenResponse.IsError)
{
AccessToken = null;
return new OperationResult
{
IsSuccess = false,
Message = tokenResponse.Error
};
}
else
{
AccessToken = tokenResponse.AccessToken;
return new OperationResult { IsSuccess = true };
}
}
// TODO - create result model rather than returning JSON string!
public async Task<OperationResult<string>> GetScoresAsync(string leaderboardType = null)
{
string uri = "/api/leaderboards";
if (leaderboardType != null)
{
uri += "/" + leaderboardType;
}
var response = await _httpClient.GetAsync(uri);
if (response.IsSuccessStatusCode)
{
var content = await response.Content.ReadAsStringAsync();
return new OperationResult<string> { IsSuccess = true, Result = content };
}
else
{
return new OperationResult<string> { IsSuccess = false, Message = response.StatusCode.ToString() }; // TODO - read message body for error?
}
}
public async Task<OperationResult> PostScoreAsync(int score)
{
var response = await _httpClient.PostAsJsonAsync(
"/api/scores",
new
{
country = "missing",
score = score
}
);
if (response.IsSuccessStatusCode)
{
return new OperationResult { IsSuccess = true };
}
else
{
return new OperationResult { IsSuccess = false, Message = response.StatusCode.ToString() }; // TODO - read message body for error?
}
}
}
public class OperationResult
{
public bool IsSuccess { get; set; }
public string Message { get; set; }
}
public class OperationResult<T> : OperationResult
{
public T Result { get; set; }
}
}
|
mit
|
zephir-lang/zephir
|
Library/Variable/Globals.php
|
1959
|
<?php
/**
* This file is part of the Zephir.
*
* (c) Phalcon Team <team@zephir-lang.com>
*
* For the full copyright and license information, please view
* the LICENSE file that was distributed with this source code.
*/
namespace Zephir\Variable;
/**
* Zephir\Variable\Globals.
*
* Represents a central knowledge base on global variables.
*/
class Globals
{
/**
* The variables passed to the current script via the URL parameters.
*/
const GET = '_GET';
/**
* The variables passed to the current script via the HTTP POST method.
*/
const POST = '_POST';
/**
* An array in which the environment variable names are element keys,
* and the environment variable value strings are element values.
*/
const ENV = '_ENV';
/**
* The variables passed to the current script via HTTP Cookies.
*/
const COOKIE = '_COOKIE';
/**
* Server and execution environment information, such as headers, paths, and script locations.
*/
const SERVER = '_SERVER';
/**
* The session variables available to the current script.
*/
const SESSION = '_SESSION';
/**
* By default contains the contents of $_COOKIE, $_GET, and $_POST.
*/
const REQUEST = '_REQUEST';
/**
* The items uploaded to the current script via the HTTP POST method.
*/
const FILES = '_FILES';
/**
* Names of superglobal variables.
*
* @var array
*/
protected $superglobals = [
self::GET => 1,
self::POST => 1,
self::ENV => 1,
self::COOKIE => 1,
self::SERVER => 1,
self::SESSION => 1,
self::REQUEST => 1,
self::FILES => 1,
];
/**
* Checks if a variable is a superglobal.
*
* @param string $name
*
* @return bool
*/
public function isSuperGlobal($name)
{
return isset($this->superglobals[$name]);
}
}
|
mit
|
antonradev/newscms
|
application/views/categories/menu.php
|
110
|
<li><a href="category.php?id=<?php echo $row['category_id']; ?>"><?php echo $row['category_name']; ?></a></li>
|
mit
|
Catchouli-old/Fariss
|
Farseer Physics Engine 3.2 XNA/Factories/JointFactory.cs
|
10429
|
using FarseerPhysics.Dynamics;
using FarseerPhysics.Dynamics.Joints;
using Microsoft.Xna.Framework;
namespace FarseerPhysics.Factories
{
/// <summary>
/// An easy to use factory for using joints.
/// </summary>
public static class JointFactory
{
#region Revolute Joint
/// <summary>
/// Creates a revolute joint and adds it to the world
/// </summary>
/// <param name="bodyA"></param>
/// <param name="bodyB"></param>
/// <param name="localanchorB"></param>
/// <returns></returns>
public static RevoluteJoint CreateRevoluteJoint(Body bodyA, Body bodyB, Vector2 localanchorB)
{
Vector2 localanchorA = bodyA.GetLocalPoint(bodyB.GetWorldPoint(localanchorB));
RevoluteJoint joint = new RevoluteJoint(bodyA, bodyB, localanchorA, localanchorB);
return joint;
}
/// <summary>
/// Creates a revolute joint and adds it to the world
/// </summary>
/// <param name="world"></param>
/// <param name="bodyA"></param>
/// <param name="bodyB"></param>
/// <param name="localanchorB"></param>
/// <returns></returns>
public static RevoluteJoint CreateRevoluteJoint(World world, Body bodyA, Body bodyB, Vector2 localanchorB)
{
RevoluteJoint joint = CreateRevoluteJoint(bodyA, bodyB, localanchorB);
world.AddJoint(joint);
return joint;
}
/// <summary>
/// Creates the fixed revolute joint.
/// </summary>
/// <param name="world">The world.</param>
/// <param name="body">The body.</param>
/// <param name="bodyAnchor">The body anchor.</param>
/// <param name="worldAnchor">The world anchor.</param>
/// <returns></returns>
public static FixedRevoluteJoint CreateFixedRevoluteJoint(World world, Body body, Vector2 bodyAnchor,
Vector2 worldAnchor)
{
FixedRevoluteJoint fixedRevoluteJoint = new FixedRevoluteJoint(body, bodyAnchor, worldAnchor);
world.AddJoint(fixedRevoluteJoint);
return fixedRevoluteJoint;
}
#endregion
#region Weld Joint
/// <summary>
/// Creates a weld joint
/// </summary>
/// <param name="bodyA"></param>
/// <param name="bodyB"></param>
/// <param name="localAnchor"></param>
/// <returns></returns>
public static WeldJoint CreateWeldJoint(Body bodyA, Body bodyB, Vector2 localAnchor)
{
WeldJoint joint = new WeldJoint(bodyA, bodyB, bodyA.GetLocalPoint(localAnchor),
bodyB.GetLocalPoint(localAnchor));
return joint;
}
/// <summary>
/// Creates a weld joint and adds it to the world
/// </summary>
/// <param name="world"></param>
/// <param name="bodyA"></param>
/// <param name="bodyB"></param>
/// <param name="localanchorB"></param>
/// <returns></returns>
public static WeldJoint CreateWeldJoint(World world, Body bodyA, Body bodyB, Vector2 localanchorB)
{
WeldJoint joint = CreateWeldJoint(bodyA, bodyB, localanchorB);
world.AddJoint(joint);
return joint;
}
public static WeldJoint CreateWeldJoint(World world, Body bodyA, Body bodyB, Vector2 localAnchorA,
Vector2 localAnchorB)
{
WeldJoint weldJoint = new WeldJoint(bodyA, bodyB, localAnchorA, localAnchorB);
world.AddJoint(weldJoint);
return weldJoint;
}
#endregion
#region Prismatic Joint
/// <summary>
/// Creates a prsimatic joint
/// </summary>
/// <param name="bodyA"></param>
/// <param name="bodyB"></param>
/// <param name="localanchorB"></param>
/// <param name="axis"></param>
/// <returns></returns>
public static PrismaticJoint CreatePrismaticJoint(Body bodyA, Body bodyB, Vector2 localanchorB, Vector2 axis)
{
Vector2 localanchorA = bodyA.GetLocalPoint(bodyB.GetWorldPoint(localanchorB));
PrismaticJoint joint = new PrismaticJoint(bodyA, bodyB, localanchorA, localanchorB, axis);
return joint;
}
/// <summary>
/// Creates a prismatic joint and adds it to the world
/// </summary>
/// <param name="world"></param>
/// <param name="bodyA"></param>
/// <param name="bodyB"></param>
/// <param name="localanchorB"></param>
/// <param name="axis"></param>
/// <returns></returns>
public static PrismaticJoint CreatePrismaticJoint(World world, Body bodyA, Body bodyB, Vector2 localanchorB,
Vector2 axis)
{
PrismaticJoint joint = CreatePrismaticJoint(bodyA, bodyB, localanchorB, axis);
world.AddJoint(joint);
return joint;
}
#endregion
#region Line Joint
/// <summary>
/// Creates a line joint
/// </summary>
/// <param name="bodyA"></param>
/// <param name="bodyB"></param>
/// <param name="localanchorB"></param>
/// <param name="axis"></param>
/// <returns></returns>
public static LineJoint CreateLineJoint(Body bodyA, Body bodyB, Vector2 localanchorB, Vector2 axis)
{
Vector2 localanchorA = bodyA.GetLocalPoint(bodyB.GetWorldPoint(localanchorB));
LineJoint joint = new LineJoint(bodyA, bodyB, localanchorA, localanchorB, axis);
return joint;
}
/// <summary>
/// Creates a line joint and adds it to the world
/// </summary>
/// <param name="world"></param>
/// <param name="bodyA"></param>
/// <param name="bodyB"></param>
/// <param name="localanchorB"></param>
/// <param name="axis"></param>
/// <returns></returns>
public static LineJoint CreateLineJoint(World world, Body bodyA, Body bodyB, Vector2 localanchorB, Vector2 axis)
{
LineJoint joint = CreateLineJoint(bodyA, bodyB, localanchorB, axis);
world.AddJoint(joint);
return joint;
}
#endregion
#region Angle Joint
/// <summary>
/// Creates an angle joint.
/// </summary>
/// <param name="world">The world.</param>
/// <param name="bodyA">The first body.</param>
/// <param name="bodyB">The second body.</param>
/// <returns></returns>
public static AngleJoint CreateAngleJoint(World world, Body bodyA, Body bodyB)
{
AngleJoint angleJoint = new AngleJoint(bodyA, bodyB);
world.AddJoint(angleJoint);
return angleJoint;
}
/// <summary>
/// Creates a fixed angle joint.
/// </summary>
/// <param name="world">The world.</param>
/// <param name="body">The body.</param>
/// <returns></returns>
public static FixedAngleJoint CreateFixedAngleJoint(World world, Body body)
{
FixedAngleJoint angleJoint = new FixedAngleJoint(body);
world.AddJoint(angleJoint);
return angleJoint;
}
#endregion
#region Distance Joint
public static DistanceJoint CreateDistanceJoint(World world, Body bodyA, Body bodyB, Vector2 anchorA,
Vector2 anchorB)
{
DistanceJoint distanceJoint = new DistanceJoint(bodyA, bodyB, anchorA, anchorB);
world.AddJoint(distanceJoint);
return distanceJoint;
}
public static FixedDistanceJoint CreateFixedDistanceJoint(World world, Body body, Vector2 localAnchor,
Vector2 worldAnchor)
{
FixedDistanceJoint distanceJoint = new FixedDistanceJoint(body, localAnchor, worldAnchor);
world.AddJoint(distanceJoint);
return distanceJoint;
}
#endregion
#region Friction Joint
public static FrictionJoint CreateFrictionJoint(World world, Body bodyA, Body bodyB, Vector2 anchorA,
Vector2 anchorB)
{
FrictionJoint frictionJoint = new FrictionJoint(bodyA, bodyB, anchorA, anchorB);
world.AddJoint(frictionJoint);
return frictionJoint;
}
public static FixedFrictionJoint CreateFixedFrictionJoint(World world, Body body, Vector2 bodyAnchor)
{
FixedFrictionJoint frictionJoint = new FixedFrictionJoint(body, bodyAnchor);
world.AddJoint(frictionJoint);
return frictionJoint;
}
#endregion
#region Gear Joint
public static GearJoint CreateGearJoint(World world, Joint jointA, Joint jointB, float ratio)
{
GearJoint gearJoint = new GearJoint(jointA, jointB, ratio);
world.AddJoint(gearJoint);
return gearJoint;
}
#endregion
#region Pulley Joint
public static PulleyJoint CreatePulleyJoint(World world, Body bodyA, Body bodyB, Vector2 groundAnchorA,
Vector2 groundAnchorB, Vector2 anchorA, Vector2 anchorB, float ratio)
{
PulleyJoint pulleyJoint = new PulleyJoint(bodyA, bodyB, groundAnchorA, groundAnchorB, anchorA, anchorB,
ratio);
world.AddJoint(pulleyJoint);
return pulleyJoint;
}
#endregion
#region Slider Joint
public static SliderJoint CreateSliderJoint(World world, Body bodyA, Body bodyB, Vector2 anchorA,
Vector2 anchorB, float minLength, float maxLength)
{
SliderJoint sliderJoint = new SliderJoint(bodyA, bodyB, anchorA, anchorB, minLength, maxLength);
world.AddJoint(sliderJoint);
return sliderJoint;
}
#endregion
}
}
|
mit
|
soccermetrics/match-result-app
|
resultapp/resultapp/main.py
|
212
|
#
# main.py
#
# imports
from flask import Flask
import os
# configuration
DEBUG = int(os.environ["DEBUG"])
app = Flask(__name__)
app.debug = bool(DEBUG)
if __name__ == "__main__":
app.run()
import views
|
mit
|
stel-s/thedeb-symfony
|
app/cache/dev/annotations/df785bf2c76b8d46073dc8722d237e710985b1a6.cache.php
|
281
|
<?php return unserialize('a:2:{i:0;O:27:"Doctrine\\ORM\\Mapping\\Entity":2:{s:15:"repositoryClass";N;s:8:"readOnly";b:0;}i:1;O:26:"Doctrine\\ORM\\Mapping\\Table":5:{s:4:"name";s:14:"reason_against";s:6:"schema";N;s:7:"indexes";N;s:17:"uniqueConstraints";N;s:7:"options";a:0:{}}}');
|
mit
|
obi-two/Rebelion
|
data/scripts/templates/object/tangible/powerup/weapon/shared_thrown_explosive.py
|
468
|
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Tangible()
result.template = "object/tangible/powerup/weapon/shared_thrown_explosive.iff"
result.attribute_template_id = -1
result.stfName("powerup_n","weapon_thrown_explosive")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
mit
|
q2a-projects/CleanStrap
|
cleanstrap/qa-theme.php
|
5297
|
<?php
$debug = false;
/* don't allow this page to be requested directly from browser */
if (!defined('QA_VERSION')) {
header('Location: /');
exit;
}
$cs_error ='';
if($debug){
error_reporting(E_ALL);
ini_set('display_errors', '1');
}else{
error_reporting(0);
@ini_set('display_errors', 0);
}
function get_base_url()
{
return(qa_opt('site_url'));
}
define('Q_THEME_DIR', dirname( __FILE__ ));
define('Q_THEME_URL', qa_opt('site_url').'qa-theme/'.qa_get_site_theme());
include_once Q_THEME_DIR.'/functions.php';
include_once Q_THEME_DIR.'/inc/blocks.php';
qa_register_phrases(Q_THEME_DIR . '/language/cs-lang-*.php', 'cleanstrap');
if(isset($_REQUEST['cs_ajax'])){
if(isset($_REQUEST['cs_ajax'])){
$action = 'cs_ajax_'.$_REQUEST['action'];
if(function_exists($action))
$action();
}
}else{
global $qa_request;
$version = qa_opt('cs_version');
if(empty($version)) $version=0;
if (version_compare($version, '2.4.3') < 0){
if(!(bool)qa_opt('cs_init')){ // theme init
cs_register_widget_position(
array(
'Top' => 'Before navbar',
'Header' => 'After navbar',
'Header left' => 'Left side of header',
'Header Right' => 'Right side of header',
'Left' => 'Right side below menu',
'Content Top' => 'Before questions list',
'Content Bottom' => 'After questions lists',
'Right' => 'Right side of content',
'Bottom' => 'Below content and before footer',
'Home Slide' => 'Home Top',
'Home 1 Left' => 'Home position 1',
'Home 1 Center' => 'Home position 1',
'Home 2' => 'Home position 2',
'Home 3 Left' => 'Home position 3',
'Home 3 Center' => 'Home position 3',
'Home Right' => 'Home right side',
'Question Right' => 'Right side of question',
'User Content' => 'On user page'
)
);
reset_theme_options();
qa_opt('cs_init',true);
}
//create table for widgets
qa_db_query_sub(
'CREATE TABLE IF NOT EXISTS ^ra_widgets ('.
'id INT(10) NOT NULL AUTO_INCREMENT,'.
'name VARCHAR (64),'.
'position VARCHAR (64),'.
'widget_order INT(2) NOT NULL DEFAULT 0,'.
'param LONGTEXT,'.
'PRIMARY KEY (id),'.
'UNIQUE KEY id (id)'.
') ENGINE=MyISAM DEFAULT CHARSET=utf8 AUTO_INCREMENT=1;'
);
$version = '2.4.3';
qa_opt('cs_version', $version); // update version of theme
}
if (version_compare($version, '2.4.4') < 0){
qa_db_query_sub(
'RENAME TABLE ^ra_widgets TO ^cs_widgets;'
);
$version = '2.4.4';
qa_opt('cs_version', $version);
}
if (qa_get_logged_in_level()>=QA_USER_LEVEL_ADMIN){
qa_register_layer('/inc/options.php', 'Theme Options', Q_THEME_DIR , Q_THEME_URL );
qa_register_layer('/inc/widgets.php', 'Theme Widgets', Q_THEME_DIR , Q_THEME_URL );
}
qa_register_module('widget', '/inc/widget_ask.php', 'cs_ask_widget', 'CS Ajax Ask', Q_THEME_DIR, Q_THEME_URL);
qa_register_module('widget', '/inc/widget_tags.php', 'cs_tags_widget', 'CS Tags', Q_THEME_DIR, Q_THEME_URL);
qa_register_module('widget', '/inc/widget_ticker.php', 'cs_ticker_widget', 'CS Ticker', Q_THEME_DIR, Q_THEME_URL);
qa_register_module('widget', '/inc/widget_top_users.php', 'cs_top_users_widget', 'CS Top Contributors', Q_THEME_DIR, Q_THEME_URL);
qa_register_module('widget', '/inc/widget_activity.php', 'cs_activity_widget', 'CS Site Activity', Q_THEME_DIR, Q_THEME_URL);
qa_register_module('widget', '/inc/widget_question_activity.php', 'cs_question_activity_widget', 'CS Question Activity', Q_THEME_DIR, Q_THEME_URL);
qa_register_module('widget', '/inc/widget_featured_questions.php', 'cs_featured_questions_widget', 'CS Featured Questions', Q_THEME_DIR, Q_THEME_URL);
qa_register_module('widget', '/inc/widget_site_status.php', 'cs_site_status_widget', 'CS Site Status', Q_THEME_DIR, Q_THEME_URL);
//qa_register_module('widget', '/inc/widget_twitter.php', 'cs_twitter_widget', 'CS Twitter Widget', Q_THEME_DIR, Q_THEME_URL);
//qa_register_module('widget', '/inc/widget_feed.php', 'cs_feed_widget', 'CS Feed Widget', Q_THEME_DIR, Q_THEME_URL);
qa_register_module('widget', '/inc/widget_new_users.php', 'cs_new_users_widget', 'CS New Users', Q_THEME_DIR, Q_THEME_URL);
qa_register_module('widget', '/inc/widget_related_questions.php', 'cs_related_questions', 'CS Related Questions', Q_THEME_DIR, Q_THEME_URL);
qa_register_module('widget', '/inc/widget_text.php', 'cs_widget_text', 'CS Text Widget', Q_THEME_DIR, Q_THEME_URL);
qa_register_module('widget', '/inc/widget_posts.php', 'cs_widget_posts', 'CS Posts', Q_THEME_DIR, Q_THEME_URL);
qa_register_module('widget', '/inc/widget_user_activity.php', 'cs_user_activity_widget', 'CS User Activity', Q_THEME_DIR, Q_THEME_URL);
qa_register_module('widget', '/inc/widget_user_posts.php', 'cs_user_posts_widget', 'CS User Posts', Q_THEME_DIR, Q_THEME_URL);
//enable category widget only if category is active in q2a
if ( qa_using_categories() ){
qa_register_module('widget', '/inc/widget_categories.php', 'widget_categories', 'CS Categories', Q_THEME_DIR, Q_THEME_URL);
qa_register_module('widget', '/inc/widget_current_category.php', 'cs_current_category_widget', 'CS Current Cat', Q_THEME_DIR, Q_THEME_URL);
}
}
|
mit
|
ceolter/angular-grid
|
grid-packages/ag-grid-docs/src/example-runner/chart-vanilla-to-vue.ts
|
4245
|
import { getFunctionName, isInstanceMethod, removeFunctionKeyword } from './parser-utils';
import { templatePlaceholder } from './chart-vanilla-src-parser';
import { toInput, toConst, toMember, toAssignment, convertTemplate, getImport } from './vue-utils';
import { wrapOptionsUpdateCode } from './chart-utils';
function processFunction(code: string): string {
return wrapOptionsUpdateCode(removeFunctionKeyword(code));
}
function getImports(componentFileNames: string[]): string[] {
const imports = [
"import Vue from 'vue';",
"import { cloneDeep } from 'lodash';",
"import * as agCharts from 'ag-charts-community';",
"import { AgChartsVue } from 'ag-charts-vue';",
];
if (componentFileNames) {
imports.push(...componentFileNames.map(getImport));
}
return imports;
}
function getPropertyBindings(bindings: any, componentFileNames: string[]): [string[], string[], string[]] {
const propertyAssignments = [];
const propertyVars = [];
const propertyAttributes = [];
bindings.properties
.forEach(property => {
if (componentFileNames.length > 0 && property.name === 'components') {
property.name = 'frameworkComponents';
}
if (property.value === 'true' || property.value === 'false') {
propertyAttributes.push(toConst(property));
} else if (property.value === null || property.value === 'null') {
propertyAttributes.push(toInput(property));
} else {
// for when binding a method
// see javascript-grid-keyboard-navigation for an example
// tabToNextCell needs to be bound to the react component
if (!isInstanceMethod(bindings.instanceMethods, property)) {
propertyAttributes.push(toInput(property));
propertyVars.push(toMember(property));
}
propertyAssignments.push(toAssignment(property));
}
});
return [propertyAssignments, propertyVars, propertyAttributes];
}
function getTemplate(bindings: any, attributes: string[]): string {
const agChartTag = `<ag-charts-vue
${attributes.join('\n ')}></ag-charts-vue>`;
const template = bindings.template ? bindings.template.replace(templatePlaceholder, agChartTag) : agChartTag;
return convertTemplate(template);
};
function getAllMethods(bindings: any): [string[], string[], string[]] {
const externalEventHandlers = bindings.externalEventHandlers.map(event => processFunction(event.body));
const instanceMethods = bindings.instanceMethods.map(processFunction);
const globalMethods = bindings.globals.map(body => {
const funcName = getFunctionName(body);
if (funcName) {
return `window.${funcName} = ${body}`;
}
// probably a var
return body;
});
return [externalEventHandlers, instanceMethods, globalMethods];
}
export function vanillaToVue(bindings: any, componentFileNames: string[]): () => string {
return () => {
const imports = getImports(componentFileNames);
const [propertyAssignments, propertyVars, propertyAttributes] = getPropertyBindings(bindings, componentFileNames);
const [externalEventHandlers, instanceMethods, globalMethods] = getAllMethods(bindings);
const template = getTemplate(bindings, propertyAttributes);
return `${imports.join('\n')}
const ChartExample = {
template: \`
${template}
\`,
components: {
'ag-charts-vue': AgChartsVue
},
data: function() {
return {
${propertyVars.join(',\n ')}
}
},
beforeMount() {
${propertyAssignments.join(';\n ')}
},
mounted() {
${bindings.init.join(';\n ')}
},
methods: {
${instanceMethods.concat(externalEventHandlers).map(snippet => `${snippet.trim()},`).join('\n')}
}
}
${globalMethods.join('\n\n')}
new Vue({
el: '#app',
components: {
'my-component': ChartExample
}
});
`;
};
}
if (typeof window !== 'undefined') {
(<any>window).vanillaToVue = vanillaToVue;
}
|
mit
|
odlp/antifreeze
|
vendor/code.cloudfoundry.org/cli/command/v6/restage_command.go
|
4392
|
package v6
import (
"github.com/cloudfoundry/noaa/consumer"
"code.cloudfoundry.org/cli/actor/sharedaction"
"code.cloudfoundry.org/cli/actor/v2action"
"code.cloudfoundry.org/cli/actor/v2v3action"
"code.cloudfoundry.org/cli/actor/v3action"
"code.cloudfoundry.org/cli/api/cloudcontroller/ccversion"
"code.cloudfoundry.org/cli/command"
"code.cloudfoundry.org/cli/command/flag"
"code.cloudfoundry.org/cli/command/v6/shared"
sharedV3 "code.cloudfoundry.org/cli/command/v6/shared"
log "github.com/sirupsen/logrus"
)
//go:generate counterfeiter . RestageActor
type RestageActor interface {
AppActor
RestageApplication(app v2action.Application, client v2action.NOAAClient) (<-chan *v2action.LogMessage, <-chan error, <-chan v2action.ApplicationStateChange, <-chan string, <-chan error)
}
type RestageCommand struct {
RequiredArgs flag.AppName `positional-args:"yes"`
usage interface{} `usage:"CF_NAME restage APP_NAME"`
relatedCommands interface{} `related_commands:"restart"`
envCFStagingTimeout interface{} `environmentName:"CF_STAGING_TIMEOUT" environmentDescription:"Max wait time for buildpack staging, in minutes" environmentDefault:"15"`
envCFStartupTimeout interface{} `environmentName:"CF_STARTUP_TIMEOUT" environmentDescription:"Max wait time for app instance startup, in minutes" environmentDefault:"5"`
UI command.UI
Config command.Config
SharedActor command.SharedActor
Actor RestageActor
ApplicationSummaryActor shared.ApplicationSummaryActor
NOAAClient *consumer.Consumer
}
func (cmd *RestageCommand) Setup(config command.Config, ui command.UI) error {
cmd.UI = ui
cmd.Config = config
cmd.SharedActor = sharedaction.NewActor(config)
sharedActor := sharedaction.NewActor(config)
ccClient, uaaClient, err := shared.NewClients(config, ui, true)
if err != nil {
return err
}
ccClientV3, _, err := sharedV3.NewV3BasedClients(config, ui, true, "")
if err != nil {
return err
}
v2Actor := v2action.NewActor(ccClient, uaaClient, config)
v3Actor := v3action.NewActor(ccClientV3, config, sharedActor, nil)
cmd.Actor = v2action.NewActor(ccClient, uaaClient, config)
cmd.ApplicationSummaryActor = v2v3action.NewActor(v2Actor, v3Actor)
cmd.NOAAClient = shared.NewNOAAClient(ccClient.DopplerEndpoint(), config, uaaClient, ui)
return nil
}
func (cmd RestageCommand) Execute(args []string) error {
err := cmd.SharedActor.CheckTarget(true, true)
if err != nil {
return err
}
user, err := cmd.Config.CurrentUser()
if err != nil {
return err
}
cmd.UI.DisplayTextWithFlavor("Restaging app {{.AppName}} in org {{.OrgName}} / space {{.SpaceName}} as {{.CurrentUser}}...",
map[string]interface{}{
"AppName": cmd.RequiredArgs.AppName,
"OrgName": cmd.Config.TargetedOrganization().Name,
"SpaceName": cmd.Config.TargetedSpace().Name,
"CurrentUser": user.Name,
})
app, warnings, err := cmd.Actor.GetApplicationByNameAndSpace(cmd.RequiredArgs.AppName, cmd.Config.TargetedSpace().GUID)
cmd.UI.DisplayWarnings(warnings)
if err != nil {
return err
}
messages, logErrs, appState, apiWarnings, errs := cmd.Actor.RestageApplication(app, cmd.NOAAClient)
err = shared.PollStart(cmd.UI, cmd.Config, messages, logErrs, appState, apiWarnings, errs)
if err != nil {
return err
}
cmd.UI.DisplayNewline()
if err := command.MinimumCCAPIVersionCheck(cmd.ApplicationSummaryActor.CloudControllerV3APIVersion(), ccversion.MinVersionApplicationFlowV3); err != nil {
log.WithField("v3_api_version", cmd.ApplicationSummaryActor.CloudControllerV3APIVersion()).Debug("using v2 for app display")
appSummary, warnings, err := cmd.Actor.GetApplicationSummaryByNameAndSpace(cmd.RequiredArgs.AppName, cmd.Config.TargetedSpace().GUID)
cmd.UI.DisplayWarnings(warnings)
if err != nil {
return err
}
shared.DisplayAppSummary(cmd.UI, appSummary, true)
} else {
log.WithField("v3_api_version", cmd.ApplicationSummaryActor.CloudControllerV3APIVersion()).Debug("using v3 for app display")
appSummary, warnings, err := cmd.ApplicationSummaryActor.GetApplicationSummaryByNameAndSpace(cmd.RequiredArgs.AppName, cmd.Config.TargetedSpace().GUID, true)
cmd.UI.DisplayWarnings(warnings)
if err != nil {
return err
}
shared.NewAppSummaryDisplayer2(cmd.UI).AppDisplay(appSummary, true)
}
return nil
}
|
mit
|
wp-plugins/nokautwl
|
vendor/nokaut/api-kit/src/Converter/Metadata/Facet/PropertyFacet/ValueConverter.php
|
556
|
<?php
/**
* Created by PhpStorm.
* User: jjuszkiewicz
* Date: 11.07.2014
* Time: 15:13
*/
namespace Nokaut\ApiKit\Converter\Metadata\Facet\PropertyFacet;
use Nokaut\ApiKit\Converter\ConverterInterface;
use Nokaut\ApiKit\Entity\Metadata\Facet\PropertyFacet\Value;
class ValueConverter implements ConverterInterface
{
public function convert(\stdClass $object)
{
$valueEntity = new Value();
foreach ($object as $field => $value) {
$valueEntity->set($field, $value);
}
return $valueEntity;
}
}
|
mit
|
foretagsplatsen/widget-js
|
webpack.config.js
|
155
|
const config = require("./webpack.base.config.js");
config.entry = "./src/widgetjs.js";
config.output.filename = "widgetjs.js";
module.exports = config;
|
mit
|
mohsen-alizadeh/rails-pdate
|
test/time_shifting_test.rb
|
713
|
require 'test_helper'
class TimeShiftingTest < ActiveSupport::TestCase
test "time shifting" do
assert_equal "1368/11/09".to_pdate.beginning_of_year.to_a, [1368, 1, 1]
assert_equal "1368/11/09".to_pdate.beginning_of_month.to_a, [1368, 11, 1]
assert_equal "1394/05/11".to_pdate.beginning_of_week.to_a, [1394, 5, 10]
assert_equal "1394/02/02".to_pdate.beginning_of_week.to_a, [1394, 1, 29]
assert_equal "1393/01/01".to_pdate.beginning_of_week.to_a, [1392, 12, 24]
assert_equal "1368/11/09".to_pdate.end_of_year.to_a, [1368, 12, 29]
assert_equal "1368/11/09".to_pdate.end_of_month.to_a, [1368, 11, 30]
assert_equal "1394/05/11".to_pdate.end_of_week.to_a, [1394, 5, 16]
end
end
|
mit
|
johnbabb/C--Toggl-Api-Client
|
Toggl/Interfaces/ITaskService.cs
|
946
|
using System.Collections.Generic;
namespace Toggl.Interfaces
{
public interface ITaskService
{
/// <summary>
///
/// https://www.toggl.com/public/api#get_tasks
/// </summary>
/// <returns></returns>
List<Task> List();
Task Get(int id);
/// <summary>
///
/// https://www.toggl.com/public/api#post_tasks
/// </summary>
/// <param name="t"></param>
/// <returns></returns>
Task Add(Task t);
/// <summary>
///
/// https://www.toggl.com/public/api#put_tasks
/// </summary>
/// <param name="t"></param>
/// <returns></returns>
Task Edit(Task t);
/// <summary>
///
/// https://www.toggl.com/public/api#del_tasks
/// </summary>
/// <param name="Id"></param>
/// <returns></returns>
bool Delete(int id);
}
}
|
mit
|
BenGorUser/UserBundle
|
src/BenGorUser/UserBundle/DependencyInjection/Compiler/Routing/InviteRoutesLoaderBuilder.php
|
1444
|
<?php
/*
* This file is part of the BenGorUser package.
*
* (c) Beñat Espiña <benatespina@gmail.com>
* (c) Gorka Laucirica <gorka.lauzirika@gmail.com>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
namespace BenGorUser\UserBundle\DependencyInjection\Compiler\Routing;
/**
* Invite routes loader builder.
*
* @author Beñat Espiña <benatespina@gmail.com>
*/
class InviteRoutesLoaderBuilder extends RoutesLoaderBuilder
{
/**
* {@inheritdoc}
*/
protected function definitionName()
{
return 'bengor.user_bundle.routing.invite_routes_loader';
}
/**
* {@inheritdoc}
*/
protected function defaultRouteName($user)
{
return sprintf('bengor_user_%s_invite', $user);
}
/**
* {@inheritdoc}
*/
protected function defaultRoutePath($user)
{
return sprintf('/%s/invite', $user);
}
/**
* {@inheritdoc}
*/
protected function definitionApiName()
{
return 'bengor.user_bundle.routing.api_invite_routes_loader';
}
/**
* {@inheritdoc}
*/
protected function defaultApiRouteName($user)
{
return sprintf('bengor_user_%s_api_invite', $user);
}
/**
* {@inheritdoc}
*/
protected function defaultApiRoutePath($user)
{
return sprintf('/api/%s/invite', $user);
}
}
|
mit
|
KSemenenko/Google-Analytics-for-Xamarin-Forms
|
Plugin.GoogleAnalytics/Plugin.GoogleAnalytics.Abstractions/Model/Transactions.cs
|
820
|
using System.Collections.Generic;
namespace Plugin.GoogleAnalytics.Abstractions.Model
{
public sealed class Transaction
{
public Transaction()
{
Items = new List<TransactionItem>();
}
public Transaction(string transactionId, long totalCostInMicros)
: this()
{
TransactionId = transactionId;
TotalCostInMicros = totalCostInMicros;
}
public string TransactionId { get; set; }
public string Affiliation { get; set; }
public long TotalCostInMicros { get; set; }
public long ShippingCostInMicros { get; set; }
public long TotalTaxInMicros { get; set; }
public string CurrencyCode { get; set; }
public IList<TransactionItem> Items { get; private set; }
}
}
|
mit
|
rslnautic/TriangleNet-Unity
|
TriangleNet/Assets/Triangle/Meshing/Converter.cs
|
17404
|
// -----------------------------------------------------------------------
// <copyright file="Converter.cs" company="">
// Original Triangle code by Jonathan Richard Shewchuk, http://www.cs.cmu.edu/~quake/triangle.html
// Triangle.NET code by Christian Woltering, http://triangle.codeplex.com/
// </copyright>
// -----------------------------------------------------------------------
namespace TriangleNet.Meshing
{
using System;
using System.Collections.Generic;
using System.Linq;
using TriangleNet.Geometry;
using TriangleNet.Topology;
using TriangleNet.Topology.DCEL;
using HVertex = TriangleNet.Topology.DCEL.Vertex;
using TVertex = TriangleNet.Geometry.Vertex;
/// <summary>
/// The Converter class provides methods for mesh reconstruction and conversion.
/// </summary>
public static class Converter
{
#region Triangle mesh conversion
/// <summary>
/// Reconstruct a triangulation from its raw data representation.
/// </summary>
public static Mesh ToMesh(Polygon polygon, IList<ITriangle> triangles)
{
return ToMesh(polygon, triangles.ToArray());
}
/// <summary>
/// Reconstruct a triangulation from its raw data representation.
/// </summary>
public static Mesh ToMesh(Polygon polygon, ITriangle[] triangles)
{
Otri tri = default(Otri);
Osub subseg = default(Osub);
int i = 0;
int elements = triangles == null ? 0 : triangles.Length;
int segments = polygon.Segments.Count;
// TODO: Configuration should be a function argument.
var mesh = new Mesh(new Configuration());
mesh.TransferNodes(polygon.Points);
mesh.regions.AddRange(polygon.Regions);
mesh.behavior.useRegions = polygon.Regions.Count > 0;
if (polygon.Segments.Count > 0)
{
mesh.behavior.Poly = true;
mesh.holes.AddRange(polygon.Holes);
}
// Create the triangles.
for (i = 0; i < elements; i++)
{
mesh.MakeTriangle(ref tri);
}
if (mesh.behavior.Poly)
{
mesh.insegments = segments;
// Create the subsegments.
for (i = 0; i < segments; i++)
{
mesh.MakeSegment(ref subseg);
}
}
var vertexarray = SetNeighbors(mesh, triangles);
SetSegments(mesh, polygon, vertexarray);
return mesh;
}
/// <summary>
/// Finds the adjacencies between triangles by forming a stack of triangles for
/// each vertex. Each triangle is on three different stacks simultaneously.
/// </summary>
private static List<Otri>[] SetNeighbors(Mesh mesh, ITriangle[] triangles)
{
Otri tri = default(Otri);
Otri triangleleft = default(Otri);
Otri checktri = default(Otri);
Otri checkleft = default(Otri);
Otri nexttri;
TVertex tdest, tapex;
TVertex checkdest, checkapex;
int[] corner = new int[3];
int aroundvertex;
int i;
// Allocate a temporary array that maps each vertex to some adjacent triangle.
var vertexarray = new List<Otri>[mesh.vertices.Count];
// Each vertex is initially unrepresented.
for (i = 0; i < mesh.vertices.Count; i++)
{
Otri tmp = default(Otri);
tmp.tri = mesh.dummytri;
vertexarray[i] = new List<Otri>(3);
vertexarray[i].Add(tmp);
}
i = 0;
// Read the triangles from the .ele file, and link
// together those that share an edge.
foreach (var item in mesh.triangles)
{
tri.tri = item;
// Copy the triangle's three corners.
for (int j = 0; j < 3; j++)
{
corner[j] = triangles[i].GetVertexID(j);
if ((corner[j] < 0) || (corner[j] >= mesh.invertices))
{
Log.Instance.Error("Triangle has an invalid vertex index.", "MeshReader.Reconstruct()");
throw new Exception("Triangle has an invalid vertex index.");
}
}
// Read the triangle's attributes.
tri.tri.label = triangles[i].Label;
// TODO: VarArea
if (mesh.behavior.VarArea)
{
tri.tri.area = triangles[i].Area;
}
// Set the triangle's vertices.
tri.orient = 0;
tri.SetOrg(mesh.vertices[corner[0]]);
tri.SetDest(mesh.vertices[corner[1]]);
tri.SetApex(mesh.vertices[corner[2]]);
// Try linking the triangle to others that share these vertices.
for (tri.orient = 0; tri.orient < 3; tri.orient++)
{
// Take the number for the origin of triangleloop.
aroundvertex = corner[tri.orient];
int index = vertexarray[aroundvertex].Count - 1;
// Look for other triangles having this vertex.
nexttri = vertexarray[aroundvertex][index];
// Push the current triangle onto the stack.
vertexarray[aroundvertex].Add(tri);
checktri = nexttri;
if (checktri.tri.id != Mesh.DUMMY)
{
tdest = tri.Dest();
tapex = tri.Apex();
// Look for other triangles that share an edge.
do
{
checkdest = checktri.Dest();
checkapex = checktri.Apex();
if (tapex == checkdest)
{
// The two triangles share an edge; bond them together.
tri.Lprev(ref triangleleft);
triangleleft.Bond(ref checktri);
}
if (tdest == checkapex)
{
// The two triangles share an edge; bond them together.
checktri.Lprev(ref checkleft);
tri.Bond(ref checkleft);
}
// Find the next triangle in the stack.
index--;
nexttri = vertexarray[aroundvertex][index];
checktri = nexttri;
} while (checktri.tri.id != Mesh.DUMMY);
}
}
i++;
}
return vertexarray;
}
/// <summary>
/// Finds the adjacencies between triangles and subsegments.
/// </summary>
private static void SetSegments(Mesh mesh, Polygon polygon, List<Otri>[] vertexarray)
{
Otri checktri = default(Otri);
Otri nexttri; // Triangle
TVertex checkdest;
Otri checkneighbor = default(Otri);
Osub subseg = default(Osub);
Otri prevlink; // Triangle
TVertex tmp;
TVertex sorg, sdest;
bool notfound;
//bool segmentmarkers = false;
int boundmarker;
int aroundvertex;
int i;
int hullsize = 0;
// Prepare to count the boundary edges.
if (mesh.behavior.Poly)
{
// Link the segments to their neighboring triangles.
boundmarker = 0;
i = 0;
foreach (var item in mesh.subsegs.Values)
{
subseg.seg = item;
sorg = polygon.Segments[i].GetVertex(0);
sdest = polygon.Segments[i].GetVertex(1);
boundmarker = polygon.Segments[i].Label;
if ((sorg.id < 0 || sorg.id >= mesh.invertices) || (sdest.id < 0 || sdest.id >= mesh.invertices))
{
Log.Instance.Error("Segment has an invalid vertex index.", "MeshReader.Reconstruct()");
throw new Exception("Segment has an invalid vertex index.");
}
// set the subsegment's vertices.
subseg.orient = 0;
subseg.SetOrg(sorg);
subseg.SetDest(sdest);
subseg.SetSegOrg(sorg);
subseg.SetSegDest(sdest);
subseg.seg.boundary = boundmarker;
// Try linking the subsegment to triangles that share these vertices.
for (subseg.orient = 0; subseg.orient < 2; subseg.orient++)
{
// Take the number for the destination of subsegloop.
aroundvertex = subseg.orient == 1 ? sorg.id : sdest.id;
int index = vertexarray[aroundvertex].Count - 1;
// Look for triangles having this vertex.
prevlink = vertexarray[aroundvertex][index];
nexttri = vertexarray[aroundvertex][index];
checktri = nexttri;
tmp = subseg.Org();
notfound = true;
// Look for triangles having this edge. Note that I'm only
// comparing each triangle's destination with the subsegment;
// each triangle's apex is handled through a different vertex.
// Because each triangle appears on three vertices' lists, each
// occurrence of a triangle on a list can (and does) represent
// an edge. In this way, most edges are represented twice, and
// every triangle-subsegment bond is represented once.
while (notfound && (checktri.tri.id != Mesh.DUMMY))
{
checkdest = checktri.Dest();
if (tmp == checkdest)
{
// We have a match. Remove this triangle from the list.
//prevlink = vertexarray[aroundvertex][index];
vertexarray[aroundvertex].Remove(prevlink);
// Bond the subsegment to the triangle.
checktri.SegBond(ref subseg);
// Check if this is a boundary edge.
checktri.Sym(ref checkneighbor);
if (checkneighbor.tri.id == Mesh.DUMMY)
{
// The next line doesn't insert a subsegment (because there's
// already one there), but it sets the boundary markers of
// the existing subsegment and its vertices.
mesh.InsertSubseg(ref checktri, 1);
hullsize++;
}
notfound = false;
}
index--;
// Find the next triangle in the stack.
prevlink = vertexarray[aroundvertex][index];
nexttri = vertexarray[aroundvertex][index];
checktri = nexttri;
}
}
i++;
}
}
// Mark the remaining edges as not being attached to any subsegment.
// Also, count the (yet uncounted) boundary edges.
for (i = 0; i < mesh.vertices.Count; i++)
{
// Search the stack of triangles adjacent to a vertex.
int index = vertexarray[i].Count - 1;
nexttri = vertexarray[i][index];
checktri = nexttri;
while (checktri.tri.id != Mesh.DUMMY)
{
// Find the next triangle in the stack before this
// information gets overwritten.
index--;
nexttri = vertexarray[i][index];
// No adjacent subsegment. (This overwrites the stack info.)
checktri.SegDissolve(mesh.dummysub);
checktri.Sym(ref checkneighbor);
if (checkneighbor.tri.id == Mesh.DUMMY)
{
mesh.InsertSubseg(ref checktri, 1);
hullsize++;
}
checktri = nexttri;
}
}
mesh.hullsize = hullsize;
}
#endregion
#region DCEL conversion
public static DcelMesh ToDCEL(Mesh mesh)
{
var dcel = new DcelMesh();
var vertices = new HVertex[mesh.vertices.Count];
var faces = new Face[mesh.triangles.Count];
dcel.HalfEdges.Capacity = 2 * mesh.NumberOfEdges;
mesh.Renumber();
HVertex vertex;
foreach (var v in mesh.vertices.Values)
{
vertex = new HVertex(v.x, v.y);
vertex.id = v.id;
vertex.label = v.label;
vertices[v.id] = vertex;
}
// Maps a triangle to its 3 edges (used to set next pointers).
var map = new List<HalfEdge>[mesh.triangles.Count];
Face face;
foreach (var t in mesh.triangles)
{
face = new Face(null);
face.id = t.id;
faces[t.id] = face;
map[t.id] = new List<HalfEdge>(3);
}
Otri tri = default(Otri), neighbor = default(Otri);
TriangleNet.Geometry.Vertex org, dest;
int id, nid, count = mesh.triangles.Count;
HalfEdge edge, twin, next;
var edges = dcel.HalfEdges;
// Count half-edges (edge ids).
int k = 0;
// Maps a vertex to its leaving boundary edge.
var boundary = new Dictionary<int, HalfEdge>();
foreach (var t in mesh.triangles)
{
id = t.id;
tri.tri = t;
for (int i = 0; i < 3; i++)
{
tri.orient = i;
tri.Sym(ref neighbor);
nid = neighbor.tri.id;
if (id < nid || nid < 0)
{
face = faces[id];
// Get the endpoints of the current triangle edge.
org = tri.Org();
dest = tri.Dest();
// Create half-edges.
edge = new HalfEdge(vertices[org.id], face);
twin = new HalfEdge(vertices[dest.id], nid < 0 ? Face.Empty : faces[nid]);
map[id].Add(edge);
if (nid >= 0)
{
map[nid].Add(twin);
}
else
{
boundary.Add(dest.id, twin);
}
// Set leaving edges.
edge.origin.leaving = edge;
twin.origin.leaving = twin;
// Set twin edges.
edge.twin = twin;
twin.twin = edge;
edge.id = k++;
twin.id = k++;
edges.Add(edge);
edges.Add(twin);
}
}
}
// Set next pointers for each triangle face.
foreach (var t in map)
{
edge = t[0];
next = t[1];
if (edge.twin.origin.id == next.origin.id)
{
edge.next = next;
next.next = t[2];
t[2].next = edge;
}
else
{
edge.next = t[2];
next.next = edge;
t[2].next = next;
}
}
// Resolve boundary edges.
foreach (var e in boundary.Values)
{
e.next = boundary[e.twin.origin.id];
}
dcel.Vertices.AddRange(vertices);
dcel.Faces.AddRange(faces);
return dcel;
}
#endregion
}
}
|
mit
|
F483/picopayments
|
picopayments_hub/version.py
|
270
|
# coding: utf-8
# Copyright (c) 2016 Fabian Barkhau <f483@storj.io>
# License: MIT (see LICENSE file)
# Store the version here so:
# 1) we don't load dependencies by storing it in __init__.py
# 2) we can import it in setup.py for the same reason
__version__ = '1.0.5'
|
mit
|
Javier3131/ProyectoDAW
|
public/modules/tables/tables.module.js
|
71
|
'use strict';
ApplicationConfiguration.registerModule('app.tables');
|
mit
|
attilaolah/pinfeed
|
handler.go
|
2347
|
package main
import (
"io"
"io/ioutil"
"net/http"
"regexp"
"strings"
)
const (
origin = "https://www.pinterest.com/"
repoURL = "https://github.com/attilaolah/pinfeed"
)
var (
thumb = regexp.MustCompile("\\b(https?://[0-9a-z-]+.pinimg.com/)(\\d+x)(/[/0-9a-f]+.jpg)\\b")
replacement = []byte("${1}1200x${3}")
headers = []string{
// Cache control headers
"Age",
"Cache-Control",
"Content-Type",
"Date",
"Etag",
"Last-Modified",
"Vary",
// Pinterest-specific stuff
"Pinterest-Breed",
"Pinterest-Generated-By",
"Pinterest-Version",
}
)
func pinFeed(w http.ResponseWriter, r *http.Request) {
// Home page:
if r.URL.Path == "/" {
http.Redirect(w, r, repoURL, http.StatusMovedPermanently)
return
}
// Feed pages:
req, err := http.NewRequest(r.Method, feedURL(r.URL.Path), nil)
if err != nil {
w.WriteHeader(http.StatusInternalServerError)
return
}
// Pass along HTTP headers to Pinterest:
for key, vals := range r.Header {
for _, val := range vals {
req.Header.Add(key, val)
}
}
// Don't pass along the request's Accept-Encoding, enforce gzip or deflate:
req.Header.Set("Accept-Encoding", "gzip, deflate")
// Make an HTTP request:
res, err := http.DefaultClient.Do(req)
if err != nil {
w.WriteHeader(http.StatusInternalServerError)
return
}
defer res.Body.Close()
if decodeBody(res) != nil {
w.WriteHeader(http.StatusInternalServerError)
return
}
// Copy white-listed headers to the response:
for _, key := range headers {
if val := res.Header.Get(key); val != "" {
w.Header().Set(key, val)
}
}
w.WriteHeader(res.StatusCode)
// Write modified response:
buf, err := replaceThumbs(res.Body)
if err != nil {
w.WriteHeader(http.StatusInternalServerError)
return
}
w.Write(buf)
}
func feedURL(path string) string {
username, feed := userAndFeed(path)
if feed == "" {
feed = "feed"
}
return origin + "/" + username + "/" + feed + ".rss"
}
func userAndFeed(path string) (username, feed string) {
path = strings.TrimSuffix(path, ".rss")
parts := strings.SplitN(path, "/", 4)
if len(parts) > 1 {
username = parts[1]
}
if len(parts) > 2 {
feed = parts[2]
}
return
}
func replaceThumbs(r io.Reader) (buf []byte, err error) {
if buf, err = ioutil.ReadAll(r); err == nil {
buf = thumb.ReplaceAll(buf, replacement)
}
return
}
|
mit
|
arteezy/vagrant-postgresql
|
cookbooks/apt/spec/spec_helper.rb
|
92
|
require 'chefspec'
require 'chefspec/berkshelf'
at_exit { ChefSpec::Coverage.report! }
|
mit
|
bitfluent/wheneva
|
vendor/plugins/formtastic/spec/label_spec.rb
|
1515
|
# coding: utf-8
require File.dirname(__FILE__) + '/test_helper'
describe 'SemanticFormBuilder#label' do
include FormtasticSpecHelper
before do
@output_buffer = ''
mock_everything
end
it 'should humanize the given attribute' do
semantic_form_for(@new_post) do |builder|
builder.label(:login).should have_tag('label', :with => /Login/)
end
end
it 'should be printed as span' do
semantic_form_for(@new_post) do |builder|
builder.label(:login, nil, { :required => true, :as_span => true }).should have_tag('span.label abbr')
end
end
describe 'when required is given' do
it 'should append a required note' do
semantic_form_for(@new_post) do |builder|
builder.label(:login, nil, :required => true).should have_tag('label abbr')
end
end
it 'should allow require option to be given as second argument' do
semantic_form_for(@new_post) do |builder|
builder.label(:login, :required => true).should have_tag('label abbr')
end
end
end
describe 'when label is given' do
it 'should allow the text to be given as label option' do
semantic_form_for(@new_post) do |builder|
builder.label(:login, :required => true, :label => 'My label').should have_tag('label', :with => /My label/)
end
end
it 'should return nil if label is false' do
semantic_form_for(@new_post) do |builder|
builder.label(:login, :label => false).should be_blank
end
end
end
end
|
mit
|
zedr0n/AngleSharp.Local
|
AngleSharp/Extensions/StyleExtensions.cs
|
6192
|
namespace AngleSharp.Extensions
{
using AngleSharp.Dom;
using AngleSharp.Dom.Collections;
using AngleSharp.Dom.Css;
using AngleSharp.Dom.Html;
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
/// <summary>
/// A set of extension methods for style / related methods.
/// </summary>
[DebuggerStepThrough]
static class StyleExtensions
{
/// <summary>
/// Computes the declarations for the given element in the context of
/// the specified styling rules.
/// </summary>
/// <param name="rules">The styles to use.</param>
/// <param name="element">The element that is questioned.</param>
/// <param name="pseudoSelector">The optional pseudo selector to use.</param>
/// <returns>The style declaration containing all the declarations.</returns>
public static CssStyleDeclaration ComputeDeclarations(this StyleCollection rules, IElement element, String pseudoSelector = null)
{
var computedStyle = new CssStyleDeclaration();
var pseudoElement = PseudoElement.Create(element, pseudoSelector);
if (pseudoElement != null)
element = pseudoElement;
computedStyle.SetDeclarations(rules.ComputeCascadedStyle(element).Declarations);
var htmlElement = element as IHtmlElement;
if (htmlElement != null)
computedStyle.SetDeclarations(htmlElement.Style.OfType<CssProperty>());
var nodes = element.GetAncestors().OfType<IElement>();
foreach (var node in nodes)
{
var style = rules.ComputeCascadedStyle(node);
computedStyle.UpdateDeclarations(style.Declarations);
}
return computedStyle;
}
/// <summary>
/// Gets all possible style sheet sets from the list of style sheets.
/// </summary>
/// <param name="sheets">The list of style sheets.</param>
/// <returns>An enumeration over all sets.</returns>
public static IEnumerable<String> GetAllStyleSheetSets(this IStyleSheetList sheets)
{
var existing = new List<String>();
foreach (var sheet in sheets)
{
var title = sheet.Title;
if (String.IsNullOrEmpty(title) || existing.Contains(title))
continue;
existing.Add(title);
yield return title;
}
}
/// <summary>
/// Gets the enabled style sheet sets from the list of style sheets.
/// </summary>
/// <param name="sheets">The list of style sheets.</param>
/// <returns>An enumeration over the enabled sets.</returns>
public static IEnumerable<String> GetEnabledStyleSheetSets(this IStyleSheetList sheets)
{
var excluded = new List<String>();
foreach (var sheet in sheets)
{
var title = sheet.Title;
if (String.IsNullOrEmpty(title) || excluded.Contains(title))
continue;
else if (sheet.IsDisabled)
excluded.Add(title);
}
return sheets.GetAllStyleSheetSets().Except(excluded);
}
/// <summary>
/// Sets the enabled style sheet sets in the list of style sheets.
/// </summary>
/// <param name="sheets">The list of style sheets.</param>
/// <param name="name">The name of the set to enabled.</param>
public static void EnableStyleSheetSet(this IStyleSheetList sheets, String name)
{
foreach (var sheet in sheets)
{
var title = sheet.Title;
if (!String.IsNullOrEmpty(title))
sheet.IsDisabled = title != name;
}
}
/// <summary>
/// Creates a new StyleSheetList instance for the given node.
/// </summary>
/// <param name="parent">The node to get the StyleSheets from.</param>
/// <returns>The new StyleSheetList instance.</returns>
public static IStyleSheetList CreateStyleSheets(this INode parent)
{
return new StyleSheetList(parent.GetStyleSheets());
}
/// <summary>
/// Gets an enumeration over all the stylesheets from the given parent.
/// </summary>
/// <param name="parent">The parent to use.</param>
/// <returns>The enumeration over all stylesheets.</returns>
public static IEnumerable<IStyleSheet> GetStyleSheets(this INode parent)
{
foreach (var child in parent.ChildNodes)
{
if (child.NodeType == NodeType.Element)
{
var linkStyle = child as ILinkStyle;
if (linkStyle != null)
{
var sheet = linkStyle.Sheet;
if (sheet != null)
yield return sheet;
}
else
{
foreach (var sheet in child.GetStyleSheets())
yield return sheet;
}
}
}
}
/// <summary>
/// Tries to find the matching namespace url for the given prefix.
/// </summary>
/// <param name="sheets">The list of style sheets.</param>
/// <param name="prefix">The prefix of the namespace to find.</param>
public static String LocateNamespace(this IStyleSheetList sheets, String prefix)
{
foreach (var sheet in sheets)
{
var css = sheet as CssStyleSheet;
if (sheet.IsDisabled || css == null)
continue;
foreach (var rule in css.Rules.OfType<CssNamespaceRule>())
{
if (rule.Prefix == prefix)
return rule.NamespaceUri;
}
}
return null;
}
}
}
|
mit
|
ferblape/query_memcached
|
test/query_memcached_test.rb
|
1699
|
require 'test/unit'
require File.expand_path(File.join(File.dirname(__FILE__), '/testing_app/config/environment.rb'))
class QueryMemcachedTest < Test::Unit::TestCase
def test_extract_table_names
p = "select * from pets"
q = "select * from author_favorites where (author_favorites.author_id = 36) AND (author_favorites.user_id = 11) LIMIT 1"
r = "SELECT * from binary_fields where (binary_fields.place_id = 3)"
s = "select distinct(p.id) from pets p, binary_fields u, binary_fields pu, where pu.user_id = u.id and pu.place_id = p.id and p.id != 3"
t = "select count(*) as count_all from pets inner join binary_fields on pets_id = binary_fields.place_id where (place_id = 3) AND (binary_fields.user_id = 11)"
u = "select * from categories order by created_at DESC limit 10"
v = "SELECT * from pets where id IN (SELECT * from pets where place_id = pets.id)"
w = "SELECT categories.* FROM categories INNER JOIN binary_fields ON binary_fields.id = categories.user_id WHERE ((binary_fields.contact_id = 1))"
assert_equal ['pets'], ActiveRecord::Base.extract_table_names(p)
assert_equal ['author_favorites'], ActiveRecord::Base.extract_table_names(q)
assert_equal ['binary_fields'], ActiveRecord::Base.extract_table_names(r)
assert_equal ['pets', 'binary_fields'], ActiveRecord::Base.extract_table_names(s)
assert_equal ['pets', 'binary_fields'], ActiveRecord::Base.extract_table_names(t)
assert_equal ['categories'], ActiveRecord::Base.extract_table_names(u)
assert_equal ['pets'], ActiveRecord::Base.extract_table_names(v)
assert_equal ['categories', 'binary_fields'], ActiveRecord::Base.extract_table_names(w)
end
end
|
mit
|
adiyoss/DeepWDM
|
lib/utils.py
|
840
|
# run system commands
from subprocess import call
def easy_call(command):
try:
call(command, shell=True)
except Exception as exception:
print "Error: could not execute the following"
print ">>", command
print type(exception) # the exception instance
print exception.args # arguments stored in .args
exit(-1)
def crop_wav(wav_path, start_trim, end_trim, output_path):
duration = end_trim - start_trim
cmd = 'sbin/sox %s %s trim %s %s' % (wav_path, output_path, str(start_trim), str(duration))
easy_call(cmd)
def get_wav_file_length(wav_file):
import wave
import contextlib
with contextlib.closing(wave.open(wav_file, 'r')) as f:
frames = f.getnframes()
rate = f.getframerate()
duration = frames / float(rate)
return duration
|
mit
|
ror/ember-cli-cms
|
Brocfile.js
|
1738
|
/* global require, module */
var EmberApp = require('ember-cli/lib/broccoli/ember-app');
var app = new EmberApp();
var pickFiles = require('broccoli-static-compiler');
var bootstrapFonts = pickFiles('bower_components/bootstrap-sass-official/assets/fonts/bootstrap', {
srcDir: '/',
destDir: '/assets/bootstrap'
});
var mergeTrees = require('broccoli-merge-trees');
// Use `app.import` to add additional libraries to the generated
// output files.
//
// If you need to use different assets in different
// environments, specify an object as the first parameter. That
// object's keys should be the environment name and the values
// should be the asset to use in that environment.
//
// If the library that you are including contains AMD or ES6
// modules that you would like to import into your application
// please specify an object with the list of modules as keys
// along with the exports of each module as its value.
app.import({
development: 'bower_components/bootstrap/dist/css/bootstrap.css',
production: 'bower_components/bootstrap/dist/css/bootstrap.min.css'
});
//fixme change to addon
app.import('vendor/trunk8/trunk8.js');
app.import({
development: 'vendor/html2canvas/dist/html2canvas.js',
production: 'vendor/html2canvas/dist/html2canvas.min.js'
}
);
app.import({
development: 'bower_components/jquery.countdown/dist/jquery.countdown.js',
production: 'bower_components/jquery.countdown/dist/jquery.countdown.min.js'
}
);
app.import('bower_components/css-flip-counter/js/flipcounter.js');
app.import('bower_components/css-flip-counter/js/modernizr.custom.21954.js');
app.import('bower_components/css-flip-counter/css/style.css');
module.exports = mergeTrees([app.toTree(), bootstrapFonts]);
|
mit
|
christabor/jinja2_template_pack
|
tests/test_datetimes.py
|
760
|
"""Test munging filters."""
from dateutil.parser import parse as dtparse
from flask_extras.filters import datetimes
class TestStr2Dt:
"""All tests for str2dt function."""
def test_title_returns_valid(self):
"""Test function."""
timestr = '01-05-1900 00:00:00'
res = datetimes.str2dt(timestr)
assert res == dtparse(timestr) == res
def test_title_returns_invalid(self):
"""Test function."""
assert datetimes.str2dt(None) is None
def test_title_returns_invalid_nonetype_str(self):
"""Test function."""
assert datetimes.str2dt('None') is None
def test_title_returns_invalid_nonetype_str2(self):
"""Test function."""
assert datetimes.str2dt('null') is None
|
mit
|
bookman25/jest
|
e2e/each/__tests__/failure.test.js
|
1452
|
/**
* Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
it.each([[true, true], [true, false]])(
'array table fails on one row: expected %s == %s',
(left, right) => {
expect(left).toBe(right);
}
);
it.each([[1, 2], [3, 4]])(
'array table fails on all rows expected %s == %s',
(left, right) => {
expect(left).toBe(right);
}
);
it.each`
left | right
${true} | ${false}
${true} | ${true}
`(
'template table fails on one row expected: $left == $right',
({left, right}) => {
expect(left).toBe(right);
}
);
it.each`
left | right
${1} | ${2}
${3} | ${4}
`(
'template table fails on all rows expected: $left == $right',
({left, right}) => {
expect(left).toBe(right);
}
);
test.each(['red', 'green', 'bean'])(
"The word %s contains the letter 'z'",
word => {
expect(/z/.test(word)).toBe(true);
}
);
describe.each`
left | right
${'a'} | ${'b'}
${'c'} | ${'d'}
`(
'template table describe fails on all rows expected "$left" == "$right"',
({left, right}) => {
it('fails ', () => {
expect(left).toBe(right);
});
}
);
describe.each([['a', 'b'], ['c', 'd']])(
'array table describe fails on all rows expected %s == %s',
(left, right) => {
it('fails', () => {
expect(left).toBe(right);
});
}
);
|
mit
|
jim-parry/CodeIgniter4
|
tests/system/Database/Live/ForgeTest.php
|
20593
|
<?php namespace CodeIgniter\Database\Live;
use CodeIgniter\Database\Exceptions\DatabaseException;
use CodeIgniter\Database\Forge;
use CodeIgniter\Test\CIDatabaseTestCase;
/**
* @group DatabaseLive
*/
class ForgeTest extends CIDatabaseTestCase
{
protected $refresh = true;
protected $seed = 'Tests\Support\Database\Seeds\CITestSeeder';
/**
* @var \CodeIgniter\Database\Forge
*/
protected $forge;
protected function setUp(): void
{
parent::setUp();
$this->forge = \Config\Database::forge($this->DBGroup);
}
public function testCreateDatabase()
{
$database_created = $this->forge->createDatabase('test_forge_database');
$this->assertTrue($database_created);
}
public function testCreateDatabaseIfNotExists()
{
$dbName = 'test_forge_database_exist';
$databaseCreateIfNotExists = $this->forge->createDatabase($dbName, true);
if ($this->db->DBDriver !== 'SQLite3')
{
$this->forge->dropDatabase($dbName);
}
$this->assertTrue($databaseCreateIfNotExists);
}
public function testCreateDatabaseIfNotExistsWithDb()
{
$dbName = 'test_forge_database_exist';
$this->forge->createDatabase($dbName);
$databaseExists = $this->forge->createDatabase($dbName, true);
if ($this->db->DBDriver !== 'SQLite3')
{
$this->forge->dropDatabase($dbName);
}
$this->assertTrue($databaseExists);
}
public function testDropDatabase()
{
if ($this->db->DBDriver === 'SQLite3')
{
$this->markTestSkipped('SQLite3 requires file path to drop database');
}
$database_dropped = $this->forge->dropDatabase('test_forge_database');
$this->assertTrue($database_dropped);
}
public function testCreateDatabaseExceptionNoCreateStatement()
{
$this->setPrivateProperty($this->forge, 'createDatabaseStr', false);
if ($this->db->DBDriver === 'SQLite3')
{
$database_created = $this->forge->createDatabase('test_forge_database');
$this->assertTrue($database_created);
}
else
{
$this->expectException(DatabaseException::class);
$this->expectExceptionMessage('This feature is not available for the database you are using.');
$this->forge->createDatabase('test_forge_database');
}
}
public function testDropDatabaseExceptionNoDropStatement()
{
$this->setPrivateProperty($this->forge, 'dropDatabaseStr', false);
if ($this->db->DBDriver === 'SQLite3')
{
$this->markTestSkipped('SQLite3 requires file path to drop database');
}
else
{
$this->expectException(DatabaseException::class);
$this->expectExceptionMessage('This feature is not available for the database you are using.');
$this->forge->dropDatabase('test_forge_database');
}
}
public function testCreateTable()
{
$this->forge->dropTable('forge_test_table', true);
$this->forge->addField([
'id' => [
'type' => 'INTEGER',
'constraint' => 11,
'unsigned' => true,
'auto_increment' => true,
],
'mobile' => [
'type' => 'INTEGER',
'constraint' => 10,
'unsigned' => true,
],
]);
$unsignedAttributes = [
'INTEGER',
];
$this->setPrivateProperty($this->forge, 'unsigned', $unsignedAttributes);
$this->forge->addKey('id', true);
$this->forge->createTable('forge_test_table');
$exist = $this->db->tableExists('forge_test_table');
$this->forge->dropTable('forge_test_table', true);
$this->assertTrue($exist);
}
public function testCreateTableWithAttributes()
{
if ($this->db->DBDriver === 'SQLite3')
{
$this->markTestSkipped('SQLite3 does not support comments on tables or columns.');
}
$this->forge->dropTable('forge_test_attributes', true);
$this->forge->addField('id');
$attributes = [
'comment' => "Forge's Test",
];
$this->forge->createTable('forge_test_attributes', false, $attributes);
$exist = $this->db->tableExists('forge_test_attributes');
$this->forge->dropTable('forge_test_attributes', true, true);
$this->assertTrue($exist);
}
public function testCreateTableWithArrayFieldConstraints()
{
if (in_array($this->db->DBDriver, ['MySQLi', 'SQLite3']))
{
$this->forge->dropTable('forge_array_constraint', true);
$this->forge->addField([
'status' => [
'type' => 'ENUM',
'constraint' => [
'sad',
'ok',
'happy',
],
],
]);
$this->forge->createTable('forge_array_constraint');
$fields = $this->db->getFieldData('forge_array_constraint');
$this->assertEquals('status', $fields[0]->name);
if ($this->db->DBDriver === 'SQLite3')
{
// SQLite3 converts array constraints to TEXT CHECK(...)
$this->assertEquals('TEXT', $fields[0]->type);
}
else
{
$this->assertEquals('enum', $fields[0]->type);
}
$this->forge->dropTable('forge_array_constraint', true);
}
else
{
$this->expectNotToPerformAssertions();
}
}
public function testCreateTableWithStringField()
{
$this->forge->dropTable('forge_test_table', true);
$this->forge->addField('id');
$this->forge->addField('name varchar(100) NULL');
$this->forge->createTable('forge_test_table');
$exist = $this->db->tableExists('forge_test_table');
$this->forge->dropTable('db_forge_test_table', true);
$this->assertTrue($exist);
}
public function testCreateTableWithEmptyName()
{
$this->forge->dropTable('forge_test_table', true);
$this->forge->addField('id');
$this->forge->addField('name varchar(100) NULL');
$this->expectException(\InvalidArgumentException::class);
$this->expectExceptionMessage('A table name is required for that operation.');
$this->forge->createTable('');
}
public function testCreateTableWithNoFields()
{
$this->forge->dropTable('forge_test_table', true);
$this->expectException(\RuntimeException::class);
$this->expectExceptionMessage('Field information is required.');
$this->forge->createTable('forge_test_table');
}
public function testCreateTableWithStringFieldException()
{
$this->expectException(\InvalidArgumentException::class);
$this->expectExceptionMessage('Field information is required for that operation.');
$this->forge->dropTable('forge_test_table', true);
$this->forge->addField('id');
$this->forge->addField('name');
$this->forge->createTable('forge_test_table');
}
public function testRenameTable()
{
$this->forge->dropTable('forge_test_table_dummy', true);
$this->forge->addField('id');
$this->forge->addField('name varchar(100) NULL');
$this->forge->createTable('forge_test_table');
$this->forge->renameTable('forge_test_table', 'forge_test_table_dummy');
$exist = $this->db->tableExists('forge_test_table_dummy');
$this->assertTrue($exist);
}
public function testRenameTableEmptyNameException()
{
$this->forge->dropTable('forge_test_table_dummy', true);
$this->forge->addField('id');
$this->forge->addField('name varchar(100) NULL');
$this->forge->createTable('forge_test_table');
$this->expectException(\InvalidArgumentException::class);
$this->expectExceptionMessage('A table name is required for that operation.');
$this->forge->renameTable('forge_test_table', '');
}
public function testRenameTableNoRenameStatementException()
{
$this->setPrivateProperty($this->forge, 'renameTableStr', false);
$this->forge->dropTable('forge_test_table', true);
$this->forge->addField('id');
$this->forge->addField('name varchar(100) NULL');
$this->forge->createTable('forge_test_table');
$this->expectException(DatabaseException::class);
$this->expectExceptionMessage('This feature is not available for the database you are using.');
$this->forge->renameTable('forge_test_table', 'forge_test_table_dummy');
}
public function testDropTableWithEmptyName()
{
$this->expectException(DatabaseException::class);
$this->expectExceptionMessage('A table name is required for that operation.');
$this->forge->dropTable('', true);
}
public function testForeignKey()
{
$attributes = [];
if ($this->db->DBDriver === 'MySQLi')
{
$attributes = ['ENGINE' => 'InnoDB'];
}
$this->forge->addField([
'id' => [
'type' => 'INTEGER',
'constraint' => 11,
],
'name' => [
'type' => 'VARCHAR',
'constraint' => 255,
],
]);
$this->forge->addKey('id', true);
$this->forge->createTable('forge_test_users', true, $attributes);
$this->forge->addField([
'id' => [
'type' => 'INTEGER',
'constraint' => 11,
],
'users_id' => [
'type' => 'INTEGER',
'constraint' => 11,
],
'name' => [
'type' => 'VARCHAR',
'constraint' => 255,
],
]);
$this->forge->addKey('id', true);
$this->forge->addForeignKey('users_id', 'forge_test_users', 'id', 'CASCADE', 'CASCADE');
$this->forge->createTable('forge_test_invoices', true, $attributes);
$foreignKeyData = $this->db->getForeignKeyData('forge_test_invoices');
if ($this->db->DBDriver === 'SQLite3')
{
$this->assertEquals($foreignKeyData[0]->constraint_name, 'users_id to db_forge_test_users.id');
$this->assertEquals($foreignKeyData[0]->sequence, 0);
}
else
{
$this->assertEquals($foreignKeyData[0]->constraint_name, $this->db->DBPrefix . 'forge_test_invoices_users_id_foreign');
$this->assertEquals($foreignKeyData[0]->column_name, 'users_id');
$this->assertEquals($foreignKeyData[0]->foreign_column_name, 'id');
}
$this->assertEquals($foreignKeyData[0]->table_name, $this->db->DBPrefix . 'forge_test_invoices');
$this->assertEquals($foreignKeyData[0]->foreign_table_name, $this->db->DBPrefix . 'forge_test_users');
$this->forge->dropTable('forge_test_invoices', true);
$this->forge->dropTable('forge_test_users', true);
}
public function testForeignKeyFieldNotExistException()
{
$this->expectException(DatabaseException::class);
$this->expectExceptionMessage('Field `user_id` not found.');
$attributes = [];
if ($this->db->DBDriver === 'MySQLi')
{
$attributes = ['ENGINE' => 'InnoDB'];
}
$this->forge->addField([
'id' => [
'type' => 'INTEGER',
'constraint' => 11,
],
'name' => [
'type' => 'VARCHAR',
'constraint' => 255,
],
]);
$this->forge->addKey('id', true);
$this->forge->createTable('forge_test_users', true, $attributes);
$this->forge->addField([
'id' => [
'type' => 'INTEGER',
'constraint' => 11,
],
'users_id' => [
'type' => 'INTEGER',
'constraint' => 11,
],
'name' => [
'type' => 'VARCHAR',
'constraint' => 255,
],
]);
$this->forge->addKey('id', true);
$this->forge->addForeignKey('user_id', 'forge_test_users', 'id', 'CASCADE', 'CASCADE');
$this->forge->createTable('forge_test_invoices', true, $attributes);
}
public function testDropForeignKey()
{
$attributes = [];
if ($this->db->DBDriver === 'MySQLi')
{
$attributes = ['ENGINE' => 'InnoDB'];
}
$this->forge->addField([
'id' => [
'type' => 'INTEGER',
'constraint' => 11,
],
'name' => [
'type' => 'VARCHAR',
'constraint' => 255,
],
]);
$this->forge->addKey('id', true);
$this->forge->createTable('forge_test_users', true, $attributes);
$this->forge->addField([
'id' => [
'type' => 'INTEGER',
'constraint' => 11,
],
'users_id' => [
'type' => 'INTEGER',
'constraint' => 11,
],
'name' => [
'type' => 'VARCHAR',
'constraint' => 255,
],
]);
$this->forge->addKey('id', true);
$this->forge->addForeignKey('users_id', 'forge_test_users', 'id', 'CASCADE', 'CASCADE');
$this->forge->createTable('forge_test_invoices', true, $attributes);
$this->forge->dropForeignKey('forge_test_invoices', 'forge_test_invoices_users_id_foreign');
$foreignKeyData = $this->db->getForeignKeyData('forge_test_invoices');
$this->assertEmpty($foreignKeyData);
$this->forge->dropTable('forge_test_invoices', true);
$this->forge->dropTable('forge_test_users', true);
}
public function testAddColumn()
{
$this->forge->dropTable('forge_test_table', true);
$this->forge->addField([
'id' => [
'type' => 'INTEGER',
'constraint' => 11,
'unsigned' => false,
'auto_increment' => true,
],
]);
$this->forge->addKey('id', true);
$this->forge->createTable('forge_test_table');
$newField = [
'username' => [
'type' => 'VARCHAR',
'constraint' => 255,
'unique' => false,
],
];
$this->forge->addColumn('forge_test_table', $newField);
$fieldNames = $this->db->table('forge_test_table')
->get()
->getFieldNames();
$this->forge->dropTable('forge_test_table', true);
$this->assertEquals('username', $fieldNames[1]);
}
public function testAddFields()
{
$this->forge->dropTable('forge_test_fields', true);
$this->forge->addField([
'id' => [
'type' => 'INTEGER',
'constraint' => 11,
'unsigned' => false,
'auto_increment' => true,
],
'username' => [
'type' => 'VARCHAR',
'constraint' => 255,
'unique' => false,
],
'name' => [
'type' => 'VARCHAR',
'constraint' => 255,
],
'active' => [
'type' => 'INTEGER',
'constraint' => 11,
'default' => 0,
],
]);
$this->forge->addKey('id', true);
$this->forge->addUniqueKey(['username', 'active']);
$create = $this->forge->createTable('forge_test_fields', true);
//Check Field names
$fieldsNames = $this->db->getFieldNames('forge_test_fields');
$this->assertContains('id', $fieldsNames);
$this->assertContains('username', $fieldsNames);
$this->assertContains('name', $fieldsNames);
$this->assertContains('active', $fieldsNames);
$fieldsData = $this->db->getFieldData('forge_test_fields');
$this->assertContains($fieldsData[0]->name, ['id', 'name', 'username', 'active']);
$this->assertContains($fieldsData[1]->name, ['id', 'name', 'username', 'active']);
if ($this->db->DBDriver === 'MySQLi')
{
//Check types
$this->assertEquals($fieldsData[0]->type, 'int');
$this->assertEquals($fieldsData[1]->type, 'varchar');
$this->assertEquals($fieldsData[0]->max_length, 11);
$this->assertNull($fieldsData[0]->default);
$this->assertNull($fieldsData[1]->default);
$this->assertEquals($fieldsData[0]->primary_key, 1);
$this->assertEquals($fieldsData[1]->max_length, 255);
}
elseif ($this->db->DBDriver === 'Postgre')
{
//Check types
$this->assertEquals($fieldsData[0]->type, 'integer');
$this->assertEquals($fieldsData[1]->type, 'character varying');
$this->assertEquals($fieldsData[0]->max_length, 32);
$this->assertNull($fieldsData[1]->default);
$this->assertEquals($fieldsData[1]->max_length, 255);
}
elseif ($this->db->DBDriver === 'SQLite3')
{
$this->assertEquals(strtolower($fieldsData[0]->type), 'integer');
$this->assertEquals(strtolower($fieldsData[1]->type), 'varchar');
$this->assertEquals($fieldsData[1]->default, null);
}
else
{
$this->assertTrue(false, 'DB Driver not supported');
}
$this->forge->dropTable('forge_test_fields', true);
}
public function testCompositeKey()
{
// SQLite3 uses auto increment different
$unique_or_auto = $this->db->DBDriver === 'SQLite3' ? 'unique' : 'auto_increment';
$this->forge->addField([
'id' => [
'type' => 'INTEGER',
'constraint' => 3,
$unique_or_auto => true,
],
'code' => [
'type' => 'VARCHAR',
'constraint' => 40,
],
'company' => [
'type' => 'VARCHAR',
'constraint' => 40,
],
'active' => [
'type' => 'INTEGER',
'constraint' => 1,
],
]);
$this->forge->addPrimaryKey('id');
$this->forge->addKey(['code', 'company']);
$this->forge->addUniqueKey(['code', 'active']);
$this->forge->createTable('forge_test_1', true);
$keys = $this->db->getIndexData('forge_test_1');
if ($this->db->DBDriver === 'MySQLi')
{
$this->assertEquals($keys['PRIMARY']->name, 'PRIMARY');
$this->assertEquals($keys['PRIMARY']->fields, ['id']);
$this->assertEquals($keys['PRIMARY']->type, 'PRIMARY');
$this->assertEquals($keys['code_company']->name, 'code_company');
$this->assertEquals($keys['code_company']->fields, ['code', 'company']);
$this->assertEquals($keys['code_company']->type, 'INDEX');
$this->assertEquals($keys['code_active']->name, 'code_active');
$this->assertEquals($keys['code_active']->fields, ['code', 'active']);
$this->assertEquals($keys['code_active']->type, 'UNIQUE');
}
elseif ($this->db->DBDriver === 'Postgre')
{
$this->assertEquals($keys['pk_db_forge_test_1']->name, 'pk_db_forge_test_1');
$this->assertEquals($keys['pk_db_forge_test_1']->fields, ['id']);
$this->assertEquals($keys['pk_db_forge_test_1']->type, 'PRIMARY');
$this->assertEquals($keys['db_forge_test_1_code_company']->name, 'db_forge_test_1_code_company');
$this->assertEquals($keys['db_forge_test_1_code_company']->fields, ['code', 'company']);
$this->assertEquals($keys['db_forge_test_1_code_company']->type, 'INDEX');
$this->assertEquals($keys['db_forge_test_1_code_active']->name, 'db_forge_test_1_code_active');
$this->assertEquals($keys['db_forge_test_1_code_active']->fields, ['code', 'active']);
$this->assertEquals($keys['db_forge_test_1_code_active']->type, 'UNIQUE');
}
elseif ($this->db->DBDriver === 'SQLite3')
{
$this->assertEquals($keys['sqlite_autoindex_db_forge_test_1_1']->name, 'sqlite_autoindex_db_forge_test_1_1');
$this->assertEquals($keys['sqlite_autoindex_db_forge_test_1_1']->fields, ['id']);
$this->assertEquals($keys['db_forge_test_1_code_company']->name, 'db_forge_test_1_code_company');
$this->assertEquals($keys['db_forge_test_1_code_company']->fields, ['code', 'company']);
$this->assertEquals($keys['db_forge_test_1_code_active']->name, 'db_forge_test_1_code_active');
$this->assertEquals($keys['db_forge_test_1_code_active']->fields, ['code', 'active']);
}
$this->forge->dropTable('forge_test_1', true);
}
public function testDropColumn()
{
$this->forge->dropTable('forge_test_two', true);
$this->forge->addField([
'id' => [
'type' => 'INTEGER',
'constraint' => 11,
'unsigned' => false,
'auto_increment' => true,
],
'name' => [
'type' => 'varchar',
'constraint' => 255,
'null' => true,
],
]);
$this->forge->addKey('id', true);
$this->forge->createTable('forge_test_two');
$this->assertTrue($this->db->fieldExists('name', 'forge_test_two'));
$this->forge->dropColumn('forge_test_two', 'name');
$this->db->resetDataCache();
$this->assertFalse($this->db->fieldExists('name', 'forge_test_two'));
$this->forge->dropTable('forge_test_two', true);
}
public function testModifyColumnRename()
{
$this->forge->dropTable('forge_test_three', true);
$this->forge->addField([
'id' => [
'type' => 'INTEGER',
'constraint' => 11,
'unsigned' => false,
'auto_increment' => true,
],
'name' => [
'type' => 'varchar',
'constraint' => 255,
'null' => true,
],
]);
$this->forge->addKey('id', true);
$this->forge->createTable('forge_test_three');
$this->assertTrue($this->db->fieldExists('name', 'forge_test_three'));
$this->forge->modifyColumn('forge_test_three', [
'name' => [
'name' => 'altered',
'type' => 'varchar',
'constraint' => 255,
'null' => true,
],
]);
$this->db->resetDataCache();
$this->assertFalse($this->db->fieldExists('name', 'forge_test_three'));
$this->assertTrue($this->db->fieldExists('altered', 'forge_test_three'));
$this->forge->dropTable('forge_test_three', true);
}
public function testConnectWithArrayGroup()
{
$group = config('Database');
$group = $group->tests;
$forge = \Config\Database::forge($group);
$this->assertInstanceOf(Forge::class, $forge);
}
/**
* @see https://github.com/codeigniter4/CodeIgniter4/issues/1983
*/
public function testDropTableSuccess()
{
// Add an index to user table so we have
// something to work with
$this->forge->addField([
'id' => [
'type' => 'INTEGER',
'constraint' => 3,
],
]);
$this->forge->addKey('id');
$this->forge->createTable('droptest');
$this->assertCount(1, $this->db->getIndexData('droptest'));
$this->forge->dropTable('droptest', true);
$this->assertFalse($this->db->tableExists('dropTest'));
if ($this->db->DBDriver === 'SQLite3')
{
$this->assertCount(0, $this->db->getIndexData('droptest'));
}
}
}
|
mit
|
lunastorm/wissbi
|
3rd_party/libcxx/test/containers/sequences/deque/deque.cons/move.pass.cpp
|
1774
|
//===----------------------------------------------------------------------===//
//
// The LLVM Compiler Infrastructure
//
// This file is dual licensed under the MIT and the University of Illinois Open
// Source Licenses. See LICENSE.TXT for details.
//
//===----------------------------------------------------------------------===//
// <deque>
// deque(deque&&);
#include <deque>
#include <cassert>
#include "../../../MoveOnly.h"
#include "../../../test_allocator.h"
int main()
{
#ifndef _LIBCPP_HAS_NO_RVALUE_REFERENCES
{
int ab[] = {3, 4, 2, 8, 0, 1, 44, 34, 45, 96, 80, 1, 13, 31, 45};
int* an = ab + sizeof(ab)/sizeof(ab[0]);
typedef test_allocator<MoveOnly> A;
std::deque<MoveOnly, A> c1(A(1));
for (int* p = ab; p < an; ++p)
c1.push_back(MoveOnly(*p));
std::deque<MoveOnly, A> c2(A(2));
for (int* p = ab; p < an; ++p)
c2.push_back(MoveOnly(*p));
std::deque<MoveOnly, A> c3 = std::move(c1);
assert(c2 == c3);
assert(c1.size() == 0);
assert(c3.get_allocator() == c1.get_allocator());
}
{
int ab[] = {3, 4, 2, 8, 0, 1, 44, 34, 45, 96, 80, 1, 13, 31, 45};
int* an = ab + sizeof(ab)/sizeof(ab[0]);
typedef other_allocator<MoveOnly> A;
std::deque<MoveOnly, A> c1(A(1));
for (int* p = ab; p < an; ++p)
c1.push_back(MoveOnly(*p));
std::deque<MoveOnly, A> c2(A(2));
for (int* p = ab; p < an; ++p)
c2.push_back(MoveOnly(*p));
std::deque<MoveOnly, A> c3 = std::move(c1);
assert(c2 == c3);
assert(c1.size() == 0);
assert(c3.get_allocator() == c1.get_allocator());
}
#endif // _LIBCPP_HAS_NO_RVALUE_REFERENCES
}
|
mit
|
freidcreations/QueryMule
|
src/Query/Common/Select/HasOrderBy.php
|
1447
|
<?php
declare(strict_types=1);
namespace Redstraw\Hooch\Query\Common\Select;
use Redstraw\Hooch\Query\Exception\InterfaceException;
use Redstraw\Hooch\Query\Field\FieldInterface;
use Redstraw\Hooch\Query\Sql;
use Redstraw\Hooch\Query\Statement\SelectInterface;
/**
* Trait HasOrderBy
* @package Redstraw\Hooch\Query\Common\Sql
*/
trait HasOrderBy
{
/**
* @param FieldInterface $field
* @param string $order
* @return SelectInterface
* @throws InterfaceException
*/
public function orderBy(FieldInterface $field, string $order = SQL::DESC): SelectInterface
{
if($this instanceof SelectInterface){
$field->setAccent($this->query()->accent());
$query = $this->query();
$this->query()->clause(Sql::ORDER, function (Sql $sql) use ($query, $field, $order) {
return $sql
->ifThenAppend(!$query->hasClause(Sql::ORDER), Sql::ORDER)
->ifThenAppend(!$query->hasClause(Sql::ORDER), Sql::BY)
->ifThenAppend($query->hasClause(Sql::ORDER), ',' , [], false)
->append($field->sql()->queryString())
->append(strtoupper($order));
});
return $this;
}else {
throw new InterfaceException(sprintf("Must invoke SelectInterface in: %s.", get_class($this)));
}
}
}
|
mit
|
enettolima/magento-training
|
magento2ce/app/code/Magento/Cms/Test/Unit/Model/Template/FilterProviderTest.php
|
2400
|
<?php
/**
* Copyright © 2016 Magento. All rights reserved.
* See COPYING.txt for license details.
*/
namespace Magento\Cms\Test\Unit\Model\Template;
class FilterProviderTest extends \PHPUnit_Framework_TestCase
{
/**
* @var \Magento\Cms\Model\Template\FilterProvider
*/
protected $_model;
/**
* @var \PHPUnit_Framework_MockObject_MockObject
*/
protected $_objectManagerMock;
/**
* @var \PHPUnit_Framework_MockObject_MockObject
*/
protected $_filterMock;
protected function setUp()
{
$this->_filterMock = $this->getMock('Magento\Cms\Model\Template\Filter', [], [], '', false);
$this->_objectManagerMock = $this->getMock('Magento\Framework\ObjectManagerInterface');
$this->_objectManagerMock->expects($this->any())->method('get')->will($this->returnValue($this->_filterMock));
$this->_model = new \Magento\Cms\Model\Template\FilterProvider($this->_objectManagerMock);
}
/**
* @covers \Magento\Cms\Model\Template\FilterProvider::getBlockFilter
*/
public function testGetBlockFilter()
{
$this->assertInstanceOf('Magento\Cms\Model\Template\Filter', $this->_model->getBlockFilter());
}
/**
* @covers \Magento\Cms\Model\Template\FilterProvider::getPageFilter
*/
public function testGetPageFilter()
{
$this->assertInstanceOf('Magento\Cms\Model\Template\Filter', $this->_model->getPageFilter());
}
/**
* @covers \Magento\Cms\Model\Template\FilterProvider::getPageFilter
*/
public function testGetPageFilterInnerCache()
{
$this->_objectManagerMock->expects($this->once())->method('get')->will($this->returnValue($this->_filterMock));
$this->_model->getPageFilter();
$this->_model->getPageFilter();
}
/**
* @covers \Magento\Cms\Model\Template\FilterProvider::getPageFilter
* @expectedException \Exception
*/
public function testGetPageWrongInstance()
{
$someClassMock = $this->getMock('SomeClass');
$objectManagerMock = $this->getMock('Magento\Framework\ObjectManagerInterface');
$objectManagerMock->expects($this->once())->method('get')->will($this->returnValue($someClassMock));
$model = new \Magento\Cms\Model\Template\FilterProvider($objectManagerMock, 'SomeClass', 'SomeClass');
$model->getPageFilter();
}
}
|
mit
|
skynode/eShopOnContainers
|
src/Services/Webhooks/Webhooks.API/Infrastructure/HttpGlobalExceptionFilter.cs
|
2410
|
using Microsoft.AspNetCore.Hosting;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc;
using Microsoft.AspNetCore.Mvc.Filters;
using Microsoft.Extensions.Hosting;
using Microsoft.Extensions.Logging;
using System.Net;
using Webhooks.API.Exceptions;
using Webhooks.API.Infrastructure.ActionResult;
namespace Webhooks.API.Infrastructure
{
public class HttpGlobalExceptionFilter : IExceptionFilter
{
private readonly IWebHostEnvironment env;
private readonly ILogger<HttpGlobalExceptionFilter> logger;
public HttpGlobalExceptionFilter(IWebHostEnvironment env, ILogger<HttpGlobalExceptionFilter> logger)
{
this.env = env;
this.logger = logger;
}
public void OnException(ExceptionContext context)
{
logger.LogError(new EventId(context.Exception.HResult),
context.Exception,
context.Exception.Message);
if (context.Exception.GetType() == typeof(WebhooksDomainException))
{
var problemDetails = new ValidationProblemDetails()
{
Instance = context.HttpContext.Request.Path,
Status = StatusCodes.Status400BadRequest,
Detail = "Please refer to the errors property for additional details."
};
problemDetails.Errors.Add("DomainValidations", new string[] { context.Exception.Message.ToString() });
context.Result = new BadRequestObjectResult(problemDetails);
context.HttpContext.Response.StatusCode = (int)HttpStatusCode.BadRequest;
}
else
{
var json = new JsonErrorResponse
{
Messages = new[] { "An error ocurred." }
};
if (env.IsDevelopment())
{
json.DeveloperMeesage = context.Exception;
}
context.Result = new InternalServerErrorObjectResult(json);
context.HttpContext.Response.StatusCode = (int)HttpStatusCode.InternalServerError;
}
context.ExceptionHandled = true;
}
private class JsonErrorResponse
{
public string[] Messages { get; set; }
public object DeveloperMeesage { get; set; }
}
}
}
|
mit
|
davehorton/drachtio-server
|
deps/boost_1_77_0/tools/build/src/engine/modules/sequence.cpp
|
2973
|
/*
* Copyright Vladimir Prus 2003.
* Distributed under the Boost Software License, Version 1.0.
* (See accompanying file LICENSE.txt or copy at
* https://www.bfgroup.xyz/b2/LICENSE.txt)
*/
#include "../native.h"
#include "../object.h"
#include "../lists.h"
#include "../compile.h"
#include <stdlib.h>
#ifndef max
# define max(a,b) ((a)>(b)?(a):(b))
#endif
LIST * sequence_select_highest_ranked( FRAME * frame, int flags )
{
/* Returns all of 'elements' for which corresponding element in parallel */
/* list 'rank' is equal to the maximum value in 'rank'. */
LIST * const elements = lol_get( frame->args, 0 );
LIST * const rank = lol_get( frame->args, 1 );
LIST * result = L0;
int highest_rank = -1;
{
LISTITER iter = list_begin( rank );
LISTITER const end = list_end( rank );
for ( ; iter != end; iter = list_next( iter ) )
{
int const current = atoi( object_str( list_item( iter ) ) );
highest_rank = max( highest_rank, current );
}
}
{
LISTITER iter = list_begin( rank );
LISTITER const end = list_end( rank );
LISTITER elements_iter = list_begin( elements );
for ( ; iter != end; iter = list_next( iter ), elements_iter =
list_next( elements_iter ) )
if ( atoi( object_str( list_item( iter ) ) ) == highest_rank )
result = list_push_back( result, object_copy( list_item(
elements_iter ) ) );
}
return result;
}
LIST * sequence_transform( FRAME * frame, int flags )
{
LIST * function = lol_get( frame->args, 0 );
LIST * sequence = lol_get( frame->args, 1 );
LIST * result = L0;
OBJECT * function_name = list_front( function );
LISTITER args_begin = list_next( list_begin( function ) ), args_end = list_end( function );
LISTITER iter = list_begin( sequence ), end = list_end( sequence );
RULE * rule = bindrule( function_name, frame->prev->module );
for ( ; iter != end; iter = list_next( iter ) )
{
FRAME inner[ 1 ];
frame_init( inner );
inner->prev = frame;
inner->prev_user = frame->prev_user;
inner->module = frame->prev->module;
lol_add( inner->args, list_push_back( list_copy_range( function, args_begin, args_end ), object_copy( list_item( iter ) ) ) );
result = list_append( result, evaluate_rule( rule, function_name, inner ) );
frame_free( inner );
}
return result;
}
void init_sequence()
{
{
char const * args[] = { "elements", "*", ":", "rank", "*", 0 };
declare_native_rule( "sequence", "select-highest-ranked", args,
sequence_select_highest_ranked, 1 );
}
{
char const * args[] = { "function", "+", ":", "sequence", "*", 0 };
declare_native_rule( "sequence", "transform", args,
sequence_transform, 1 );
}
}
|
mit
|
lindexi/lindexi_gd
|
WpfInk/WpfInk/PresentationCore/MS/Internal/Ink/EllipticalNodeOperations.cs
|
38057
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Collections.Generic;
using System.Windows;
using System.Windows.Media;
using System.Windows.Ink;
using System.Windows.Input;
using System.Diagnostics;
namespace MS.Internal.Ink
{
/// <summary>
/// StrokeNodeOperations implementation for elliptical nodes
/// </summary>
internal class EllipticalNodeOperations : StrokeNodeOperations
{
/// <summary>
/// Constructor
/// </summary>
/// <param name="nodeShape"></param>
internal EllipticalNodeOperations(StylusShape nodeShape)
: base(nodeShape)
{
System.Diagnostics.Debug.Assert((nodeShape != null) && nodeShape.IsEllipse);
_radii = new Size(nodeShape.Width * 0.5, nodeShape.Height * 0.5);
// All operations with ellipses become simple(r) if transfrom ellipses into circles.
// Use the max of the radii for the radius of the circle
_radius = Math.Max(_radii.Width, _radii.Height);
// Compute ellipse-to-circle and circle-to-elliipse transforms. The former is used
// in hit-testing operations while the latter is used when computing vertices of
// a quadrangle connecting two ellipses
_transform = nodeShape.Transform;
_nodeShapeToCircle = _transform;
Debug.Assert(_nodeShapeToCircle.HasInverse, "About to invert a non-invertible transform");
_nodeShapeToCircle.Invert();
if (DoubleUtil.AreClose(_radii.Width, _radii.Height))
{
_circleToNodeShape = _transform;
}
else
{
// Reverse the rotation
if (false == DoubleUtil.IsZero(nodeShape.Rotation))
{
_nodeShapeToCircle.Rotate(-nodeShape.Rotation);
Debug.Assert(_nodeShapeToCircle.HasInverse, "Just rotated an invertible transform and produced a non-invertible one");
}
// Scale to enlarge
double sx, sy;
if (_radii.Width > _radii.Height)
{
sx = 1;
sy = _radii.Width / _radii.Height;
}
else
{
sx = _radii.Height / _radii.Width;
sy = 1;
}
_nodeShapeToCircle.Scale(sx, sy);
Debug.Assert(_nodeShapeToCircle.HasInverse, "Just scaled an invertible transform and produced a non-invertible one");
_circleToNodeShape = _nodeShapeToCircle;
_circleToNodeShape.Invert();
}
}
/// <summary>
/// This is probably not the best (design-wise) but the cheapest way to tell
/// EllipticalNodeOperations from all other implementations of node operations.
/// </summary>
internal override bool IsNodeShapeEllipse { get { return true; } }
/// <summary>
/// Finds connecting points for a pair of stroke nodes
/// </summary>
/// <param name="beginNode">a node to connect</param>
/// <param name="endNode">another node, next to beginNode</param>
/// <returns>connecting quadrangle</returns>
internal override Quad GetConnectingQuad(in StrokeNodeData beginNode,in StrokeNodeData endNode)
{
if (beginNode.IsEmpty || endNode.IsEmpty || DoubleUtil.AreClose(beginNode.Position, endNode.Position))
{
return Quad.Empty;
}
// Get the vector between the node positions
Vector spine = endNode.Position - beginNode.Position;
if (_nodeShapeToCircle.IsIdentity == false)
{
spine = _nodeShapeToCircle.Transform(spine);
}
double beginRadius = _radius * beginNode.PressureFactor;
double endRadius = _radius * endNode.PressureFactor;
// Get the vector and the distance between the node positions
double distanceSquared = spine.LengthSquared;
double delta = endRadius - beginRadius;
double deltaSquared = DoubleUtil.IsZero(delta) ? 0 : (delta * delta);
if (DoubleUtil.LessThanOrClose(distanceSquared, deltaSquared))
{
// One circle is contained within the other
return Quad.Empty;
}
// Thus, at this point, distance > 0, which avoids the DivideByZero error
// Also, here, distanceSquared > deltaSquared
// Thus, 0 <= rSin < 1
// Get the components of the radius vectors
double distance = Math.Sqrt(distanceSquared);
spine /= distance;
Vector rad = spine;
// Turn left
double temp = rad.Y;
rad.Y = -rad.X;
rad.X = temp;
Vector vectorToLeftTangent, vectorToRightTangent;
double rSinSquared = deltaSquared / distanceSquared;
if (DoubleUtil.IsZero(rSinSquared))
{
vectorToLeftTangent = rad;
vectorToRightTangent = -rad;
}
else
{
rad *= Math.Sqrt(1 - rSinSquared);
spine *= Math.Sqrt(rSinSquared);
if (beginNode.PressureFactor < endNode.PressureFactor)
{
spine = -spine;
}
vectorToLeftTangent = spine + rad;
vectorToRightTangent = spine - rad;
}
// Get the common tangent points
if (_circleToNodeShape.IsIdentity == false)
{
vectorToLeftTangent = _circleToNodeShape.Transform(vectorToLeftTangent);
vectorToRightTangent = _circleToNodeShape.Transform(vectorToRightTangent);
}
return new Quad(beginNode.Position + (vectorToLeftTangent * beginRadius),
endNode.Position + (vectorToLeftTangent * endRadius),
endNode.Position + (vectorToRightTangent * endRadius),
beginNode.Position + (vectorToRightTangent * beginRadius));
}
/// <summary>
///
/// </summary>
/// <param name="node"></param>
/// <param name="quad"></param>
/// <returns></returns>
internal override IEnumerable<ContourSegment> GetContourSegments(StrokeNodeData node, Quad quad)
{
System.Diagnostics.Debug.Assert(node.IsEmpty == false);
if (quad.IsEmpty)
{
Point point = node.Position;
point.X += _radius;
yield return new ContourSegment(point, point, node.Position);
}
else if (_nodeShapeToCircle.IsIdentity)
{
yield return new ContourSegment(quad.A, quad.B);
yield return new ContourSegment(quad.B, quad.C, node.Position);
yield return new ContourSegment(quad.C, quad.D);
yield return new ContourSegment(quad.D, quad.A);
}
}
/// <summary>
/// ISSUE-2004/06/15- temporary workaround to avoid hit-testing ellipses with ellipses
/// </summary>
/// <param name="beginNode"></param>
/// <param name="endNode"></param>
/// <returns></returns>
internal override IEnumerable<ContourSegment> GetNonBezierContourSegments(StrokeNodeData beginNode, StrokeNodeData endNode)
{
Quad quad = beginNode.IsEmpty ? Quad.Empty : base.GetConnectingQuad(beginNode, endNode);
return base.GetContourSegments(endNode, quad);
}
/// <summary>
/// Hit-tests a stroke segment defined by two nodes against a linear segment.
/// </summary>
/// <param name="beginNode">Begin node of the stroke segment to hit-test. Can be empty (none)</param>
/// <param name="endNode">End node of the stroke segment</param>
/// <param name="quad">Pre-computed quadrangle connecting the two nodes.
/// Can be empty if the begion node is empty or when one node is entirely inside the other</param>
/// <param name="hitBeginPoint">an end point of the hitting linear segment</param>
/// <param name="hitEndPoint">an end point of the hitting linear segment</param>
/// <returns>true if the hitting segment intersect the contour comprised of the two stroke nodes</returns>
internal override bool HitTest(
in StrokeNodeData beginNode, in StrokeNodeData endNode, Quad quad, Point hitBeginPoint, Point hitEndPoint)
{
StrokeNodeData bigNode, smallNode;
if (beginNode.IsEmpty || (quad.IsEmpty && (endNode.PressureFactor > beginNode.PressureFactor)))
{
// Need to test one node only
bigNode = endNode;
smallNode = StrokeNodeData.Empty;
}
else
{
// In this case the size doesn't matter.
bigNode = beginNode;
smallNode = endNode;
}
// Compute the positions of the involved points relative to bigNode.
Vector hitBegin = hitBeginPoint - bigNode.Position;
Vector hitEnd = hitEndPoint - bigNode.Position;
// If the node shape is an ellipse, transform the scene to turn the shape to a circle
if (_nodeShapeToCircle.IsIdentity == false)
{
hitBegin = _nodeShapeToCircle.Transform(hitBegin);
hitEnd = _nodeShapeToCircle.Transform(hitEnd);
}
bool isHit = false;
// Hit-test the big node
double bigRadius = _radius * bigNode.PressureFactor;
Vector nearest = GetNearest(hitBegin, hitEnd);
if (nearest.LengthSquared <= (bigRadius * bigRadius))
{
isHit = true;
}
else if (quad.IsEmpty == false)
{
// Hit-test the other node
Vector spineVector = smallNode.Position - bigNode.Position;
if (_nodeShapeToCircle.IsIdentity == false)
{
spineVector = _nodeShapeToCircle.Transform(spineVector);
}
double smallRadius = _radius * smallNode.PressureFactor;
nearest = GetNearest(hitBegin - spineVector, hitEnd - spineVector);
if ((nearest.LengthSquared <= (smallRadius * smallRadius)) || HitTestQuadSegment(quad, hitBeginPoint, hitEndPoint))
{
isHit = true;
}
}
return isHit;
}
/// <summary>
/// Hit-tests a stroke segment defined by two nodes against another stroke segment.
/// </summary>
/// <param name="beginNode">Begin node of the stroke segment to hit-test. Can be empty (none)</param>
/// <param name="endNode">End node of the stroke segment</param>
/// <param name="quad">Pre-computed quadrangle connecting the two nodes.
/// Can be empty if the begion node is empty or when one node is entirely inside the other</param>
/// <param name="hitContour">a collection of basic segments outlining the hitting contour</param>
/// <returns>true if the contours intersect or overlap</returns>
internal override bool HitTest(
in StrokeNodeData beginNode, in StrokeNodeData endNode, Quad quad, IEnumerable<ContourSegment> hitContour)
{
StrokeNodeData bigNode, smallNode;
double bigRadiusSquared, smallRadiusSquared = 0;
Vector spineVector;
if (beginNode.IsEmpty || (quad.IsEmpty && (endNode.PressureFactor > beginNode.PressureFactor)))
{
// Need to test one node only
bigNode = endNode;
smallNode = StrokeNodeData.Empty;
spineVector = new Vector();
}
else
{
// In this case the size doesn't matter.
bigNode = beginNode;
smallNode = endNode;
smallRadiusSquared = _radius * smallNode.PressureFactor;
smallRadiusSquared *= smallRadiusSquared;
// Find position of smallNode relative to the bigNode.
spineVector = smallNode.Position - bigNode.Position;
// If the node shape is an ellipse, transform the scene to turn the shape to a circle
if (_nodeShapeToCircle.IsIdentity == false)
{
spineVector = _nodeShapeToCircle.Transform(spineVector);
}
}
bigRadiusSquared = _radius * bigNode.PressureFactor;
bigRadiusSquared *= bigRadiusSquared;
bool isHit = false;
// When hit-testing a contour against another contour, like in this case,
// the default implementation checks whether any edge (segment) of the hitting
// contour intersects with the contour of the ink segment. But this doesn't cover
// the case when the ink segment is entirely inside of the hitting segment.
// The bool variable isInside is used here to track that case. It answers the question
// 'Is ink contour inside if the hitting contour?'. It's initialized to 'true"
// and then verified for each edge of the hitting contour until there's a hit or
// until it's false.
bool isInside = true;
foreach (ContourSegment hitSegment in hitContour)
{
if (hitSegment.IsArc)
{
// ISSUE-2004/06/15-vsmirnov - ellipse vs arc hit-testing is not implemented
// and currently disabled in ErasingStroke
}
else
{
// Find position of the hitting segment relative to bigNode transformed to circle.
Vector hitBegin = hitSegment.Begin - bigNode.Position;
Vector hitEnd = hitBegin + hitSegment.Vector;
if (_nodeShapeToCircle.IsIdentity == false)
{
hitBegin = _nodeShapeToCircle.Transform(hitBegin);
hitEnd = _nodeShapeToCircle.Transform(hitEnd);
}
// Hit-test the big node
Vector nearest = GetNearest(hitBegin, hitEnd);
if (nearest.LengthSquared <= bigRadiusSquared)
{
isHit = true;
break;
}
// Hit-test the other node
if (quad.IsEmpty == false)
{
nearest = GetNearest(hitBegin - spineVector, hitEnd - spineVector);
if ((nearest.LengthSquared <= smallRadiusSquared) ||
HitTestQuadSegment(quad, hitSegment.Begin, hitSegment.End))
{
isHit = true;
break;
}
}
// While there's still a chance to find the both nodes inside the hitting contour,
// continue checking on position of the endNode relative to the edges of the hitting contour.
if (isInside &&
(WhereIsVectorAboutVector(endNode.Position - hitSegment.Begin, hitSegment.Vector) != HitResult.Right))
{
isInside = false;
}
}
}
return (isHit || isInside);
}
/// <summary>
/// Cut-test ink segment defined by two nodes and a connecting quad against a linear segment
/// </summary>
/// <param name="beginNode">Begin node of the ink segment</param>
/// <param name="endNode">End node of the ink segment</param>
/// <param name="quad">Pre-computed quadrangle connecting the two ink nodes</param>
/// <param name="hitBeginPoint">egin point of the hitting segment</param>
/// <param name="hitEndPoint">End point of the hitting segment</param>
/// <returns>Exact location to cut at represented by StrokeFIndices</returns>
internal override StrokeFIndices CutTest(
in StrokeNodeData beginNode, in StrokeNodeData endNode, Quad quad, Point hitBeginPoint, Point hitEndPoint)
{
// Compute the positions of the involved points relative to the endNode.
Vector spineVector = beginNode.IsEmpty ? new Vector(0, 0) : (beginNode.Position - endNode.Position);
Vector hitBegin = hitBeginPoint - endNode.Position;
Vector hitEnd = hitEndPoint - endNode.Position;
// If the node shape is an ellipse, transform the scene to turn the shape to a circle
if (_nodeShapeToCircle.IsIdentity == false)
{
spineVector = _nodeShapeToCircle.Transform(spineVector);
hitBegin = _nodeShapeToCircle.Transform(hitBegin);
hitEnd = _nodeShapeToCircle.Transform(hitEnd);
}
StrokeFIndices result = StrokeFIndices.Empty;
// Hit-test the end node
double beginRadius = 0, endRadius = _radius * endNode.PressureFactor;
Vector nearest = GetNearest(hitBegin, hitEnd);
if (nearest.LengthSquared <= (endRadius * endRadius))
{
result.EndFIndex = StrokeFIndices.AfterLast;
result.BeginFIndex = beginNode.IsEmpty ? StrokeFIndices.BeforeFirst : 1;
}
if (beginNode.IsEmpty == false)
{
// Hit-test the first node
beginRadius = _radius * beginNode.PressureFactor;
nearest = GetNearest(hitBegin - spineVector, hitEnd - spineVector);
if (nearest.LengthSquared <= (beginRadius * beginRadius))
{
result.BeginFIndex = StrokeFIndices.BeforeFirst;
if (!DoubleUtil.AreClose(result.EndFIndex, StrokeFIndices.AfterLast))
{
result.EndFIndex = 0;
}
}
}
// If both nodes are hit or nothing is hit at all, return.
if (result.IsFull || quad.IsEmpty
|| (result.IsEmpty && (HitTestQuadSegment(quad, hitBeginPoint, hitEndPoint) == false)))
{
return result;
}
// Find out whether the {begin, end} segment intersects with the contour
// of the stroke segment {_lastNode, _thisNode}, and find the lower index
// of the fragment to cut out.
if (!DoubleUtil.AreClose(result.BeginFIndex, StrokeFIndices.BeforeFirst))
{
result.BeginFIndex = ClipTest(-spineVector, beginRadius, endRadius, hitBegin - spineVector, hitEnd - spineVector);
}
if (!DoubleUtil.AreClose(result.EndFIndex, StrokeFIndices.AfterLast))
{
result.EndFIndex = 1 - ClipTest(spineVector, endRadius, beginRadius, hitBegin, hitEnd);
}
if (IsInvalidCutTestResult(result))
{
return StrokeFIndices.Empty;
}
return result;
}
/// <summary>
/// CutTest an inking StrokeNode segment (two nodes and a connecting quadrangle) against a hitting contour
/// (represented by an enumerator of Contoursegments).
/// </summary>
/// <param name="beginNode">The begin StrokeNodeData</param>
/// <param name="endNode">The end StrokeNodeData</param>
/// <param name="quad">Connecing quadrangle between the begin and end inking node</param>
/// <param name="hitContour">The hitting ContourSegments</param>
/// <returns>StrokeFIndices representing the location for cutting</returns>
internal override StrokeFIndices CutTest(
in StrokeNodeData beginNode, in StrokeNodeData endNode, Quad quad, IEnumerable<ContourSegment> hitContour)
{
// Compute the positions of the beginNode relative to the endNode.
Vector spineVector = beginNode.IsEmpty ? new Vector(0, 0) : (beginNode.Position - endNode.Position);
// If the node shape is an ellipse, transform the scene to turn the shape to a circle
if (_nodeShapeToCircle.IsIdentity == false)
{
spineVector = _nodeShapeToCircle.Transform(spineVector);
}
double beginRadius = 0, endRadius;
double beginRadiusSquared = 0, endRadiusSquared;
endRadius = _radius * endNode.PressureFactor;
endRadiusSquared = endRadius * endRadius;
if (beginNode.IsEmpty == false)
{
beginRadius = _radius * beginNode.PressureFactor;
beginRadiusSquared = beginRadius * beginRadius;
}
bool isInside = true;
StrokeFIndices result = StrokeFIndices.Empty;
foreach (ContourSegment hitSegment in hitContour)
{
if (hitSegment.IsArc)
{
// ISSUE-2004/06/15-vsmirnov - ellipse vs arc hit-testing is not implemented
// and currently disabled in ErasingStroke
}
else
{
Vector hitBegin = hitSegment.Begin - endNode.Position;
Vector hitEnd = hitBegin + hitSegment.Vector;
// If the node shape is an ellipse, transform the scene to turn
// the shape into circle.
if (_nodeShapeToCircle.IsIdentity == false)
{
hitBegin = _nodeShapeToCircle.Transform(hitBegin);
hitEnd = _nodeShapeToCircle.Transform(hitEnd);
}
bool isHit = false;
// Hit-test the end node
Vector nearest = GetNearest(hitBegin, hitEnd);
if (nearest.LengthSquared < endRadiusSquared)
{
isHit = true;
if (!DoubleUtil.AreClose(result.EndFIndex, StrokeFIndices.AfterLast))
{
result.EndFIndex = StrokeFIndices.AfterLast;
if (beginNode.IsEmpty)
{
result.BeginFIndex = StrokeFIndices.BeforeFirst;
break;
}
if (DoubleUtil.AreClose(result.BeginFIndex, StrokeFIndices.BeforeFirst))
{
break;
}
}
}
if ((beginNode.IsEmpty == false) && (!isHit || !DoubleUtil.AreClose(result.BeginFIndex, StrokeFIndices.BeforeFirst)))
{
// Hit-test the first node
nearest = GetNearest(hitBegin - spineVector, hitEnd - spineVector);
if (nearest.LengthSquared < beginRadiusSquared)
{
isHit = true;
if (!DoubleUtil.AreClose(result.BeginFIndex, StrokeFIndices.BeforeFirst))
{
result.BeginFIndex = StrokeFIndices.BeforeFirst;
if (DoubleUtil.AreClose(result.EndFIndex, StrokeFIndices.AfterLast))
{
break;
}
}
}
}
// If both nodes are hit or nothing is hit at all, return.
if (beginNode.IsEmpty || (!isHit && (quad.IsEmpty ||
(HitTestQuadSegment(quad, hitSegment.Begin, hitSegment.End) == false))))
{
if (isInside && (WhereIsVectorAboutVector(
endNode.Position - hitSegment.Begin, hitSegment.Vector) != HitResult.Right))
{
isInside = false;
}
continue;
}
isInside = false;
// Calculate the exact locations to cut.
CalculateCutLocations(spineVector, hitBegin, hitEnd, endRadius, beginRadius, ref result);
if (result.IsFull)
{
break;
}
}
}
//
if (!result.IsFull)
{
if (isInside == true)
{
System.Diagnostics.Debug.Assert(result.IsEmpty);
result = StrokeFIndices.Full;
}
else if ((DoubleUtil.AreClose(result.EndFIndex, StrokeFIndices.BeforeFirst)) && (!DoubleUtil.AreClose(result.BeginFIndex, StrokeFIndices.AfterLast)))
{
result.EndFIndex = StrokeFIndices.AfterLast;
}
else if ((DoubleUtil.AreClose(result.BeginFIndex,StrokeFIndices.AfterLast)) && (!DoubleUtil.AreClose(result.EndFIndex, StrokeFIndices.BeforeFirst)))
{
result.BeginFIndex = StrokeFIndices.BeforeFirst;
}
}
if (IsInvalidCutTestResult(result))
{
return StrokeFIndices.Empty;
}
return result;
}
/// <summary>
/// Clip-Testing a circluar inking segment against a linear segment.
/// See http://tabletpc/longhorn/Specs/Rendering%20and%20Hit-Testing%20Ink%20in%20Avalon%20M11.doc section
/// 5.4.4.14 Clip-Testing a Circular Inking Segment against a Linear Segment for details of the algorithm
/// </summary>
/// <param name="spineVector">Represent the spine of the inking segment pointing from the beginNode to endNode</param>
/// <param name="beginRadius">Radius of the beginNode</param>
/// <param name="endRadius">Radius of the endNode</param>
/// <param name="hitBegin">Hitting segment start point</param>
/// <param name="hitEnd">Hitting segment end point</param>
/// <returns>A double which represents the location for cutting</returns>
private static double ClipTest(Vector spineVector, double beginRadius, double endRadius, Vector hitBegin, Vector hitEnd)
{
// First handle the special case when the spineVector is (0,0). In other words, this is the case
// when the stylus stays at the the location but pressure changes.
if (DoubleUtil.IsZero(spineVector.X) && DoubleUtil.IsZero(spineVector.Y))
{
System.Diagnostics.Debug.Assert(DoubleUtil.AreClose(beginRadius, endRadius) == false);
Vector nearest = GetNearest(hitBegin, hitEnd);
double radius;
if (nearest.X == 0)
{
radius = Math.Abs(nearest.Y);
}
else if (nearest.Y == 0)
{
radius = Math.Abs(nearest.X);
}
else
{
radius = nearest.Length;
}
return AdjustFIndex((radius - beginRadius) / (endRadius - beginRadius));
}
// This change to ClipTest with a point if the two hitting segment are close enough.
if (DoubleUtil.AreClose(hitBegin, hitEnd))
{
return ClipTest(spineVector, beginRadius, endRadius, hitBegin);
}
double findex;
Vector hitVector = hitEnd - hitBegin;
if (DoubleUtil.IsZero(Vector.Determinant(spineVector, hitVector)))
{
// hitVector and spineVector are parallel
findex = ClipTest(spineVector, beginRadius, endRadius, GetNearest(hitBegin, hitEnd));
System.Diagnostics.Debug.Assert(!double.IsNaN(findex));
}
else
{
// On the line defined by the segment find point P1Xp, the nearest to the beginNode.Position
double x = GetProjectionFIndex(hitBegin, hitEnd);
Vector P1Xp = hitBegin + (hitVector * x);
if (P1Xp.LengthSquared < (beginRadius * beginRadius))
{
System.Diagnostics.Debug.Assert(DoubleUtil.IsBetweenZeroAndOne(x) == false);
findex = ClipTest(spineVector, beginRadius, endRadius, (0 > x) ? hitBegin : hitEnd);
System.Diagnostics.Debug.Assert(!double.IsNaN(findex));
}
else
{
// Find the projection point P of endNode.Position to the line (beginNode.Position, B).
Vector P1P2p = spineVector + GetProjection(-spineVector, P1Xp - spineVector);
//System.Diagnostics.Debug.Assert(false == DoubleUtil.IsZero(P1P2p.LengthSquared));
//System.Diagnostics.Debug.Assert(false == DoubleUtil.IsZero(endRadius - beginRadius + P1P2p.Length));
// There checks are here since if either fail no real solution can be caculated and we may
// as well bail out now and save the caculations that are below.
if (DoubleUtil.IsZero(P1P2p.LengthSquared) || DoubleUtil.IsZero(endRadius - beginRadius + P1P2p.Length))
return 1d;
// Calculate the findex of the point to split the ink segment at.
findex = (P1Xp.Length - beginRadius) / (endRadius - beginRadius + P1P2p.Length);
System.Diagnostics.Debug.Assert(!double.IsNaN(findex));
// Find the projection of the split point to the line of this segment.
Vector S = spineVector * findex;
double r = GetProjectionFIndex(hitBegin - S, hitEnd - S);
// If the nearest point misses the segment, then find the findex
// of the node nearest to the segment.
if (false == DoubleUtil.IsBetweenZeroAndOne(r))
{
findex = ClipTest(spineVector, beginRadius, endRadius, (0 > r) ? hitBegin : hitEnd);
System.Diagnostics.Debug.Assert(!double.IsNaN(findex));
}
}
}
return AdjustFIndex(findex);
}
/// <summary>
/// Clip-Testing a circular inking segment again a hitting point.
///
/// What need to find out a doulbe value s, which is between 0 and 1, such that
/// DistanceOf(hit - s*spine) = beginRadius + s * (endRadius - beginRadius)
/// That is
/// (hit.X-s*spine.X)^2 + (hit.Y-s*spine.Y)^2 = [beginRadius + s*(endRadius-beginRadius)]^2
/// Rearrange
/// A*s^2 + B*s + C = 0
/// where the value of A, B and C are described in the source code.
/// Solving for s:
/// s = (-B + sqrt(B^2-4A*C))/(2A) or s = (-B - sqrt(B^2-4A*C))/(2A)
/// The smaller value between 0 and 1 is the one we want and discard the other one.
/// </summary>
/// <param name="spine">Represent the spine of the inking segment pointing from the beginNode to endNode</param>
/// <param name="beginRadius">Radius of the beginNode</param>
/// <param name="endRadius">Radius of the endNode</param>
/// <param name="hit">The hitting point</param>
/// <returns>A double which represents the location for cutting</returns>
private static double ClipTest(Vector spine, double beginRadius, double endRadius, Vector hit)
{
double radDiff = endRadius - beginRadius;
double A = spine.X*spine.X + spine.Y*spine.Y - radDiff*radDiff;
double B = -2.0f*(hit.X*spine.X + hit.Y * spine.Y + beginRadius*radDiff);
double C = hit.X * hit.X + hit.Y * hit.Y - beginRadius * beginRadius;
// There checks are here since if either fail no real solution can be caculated and we may
// as well bail out now and save the caculations that are below.
if (DoubleUtil.IsZero(A) || !DoubleUtil.GreaterThanOrClose(B*B, 4.0f*A*C))
return 1d;
double tmp = Math.Sqrt(B*B-4.0f * A * C);
double s1 = (-B + tmp)/(2.0f * A);
double s2 = (-B - tmp)/(2.0f * A);
double findex;
if (DoubleUtil.IsBetweenZeroAndOne(s1) && DoubleUtil.IsBetweenZeroAndOne(s1))
{
findex = Math.Min(s1, s2);
}
else if (DoubleUtil.IsBetweenZeroAndOne(s1))
{
findex = s1;
}
else if (DoubleUtil.IsBetweenZeroAndOne(s2))
{
findex = s2;
}
else
{
// There is still possiblity that value like 1.0000000000000402 is not considered
// as "IsOne" by DoubleUtil class. We should be at either one of the following two cases:
// 1. s1/s2 around 0 but not close enough (say -0.0000000000001)
// 2. s1/s2 around 1 but not close enought (say 1.0000000000000402)
if (s1 > 1d && s2 > 1d)
{
findex = 1d;
}
else if (s1 < 0d && s2 < 0d)
{
findex = 0d;
}
else
{
findex = Math.Abs(Math.Min(s1, s2) - 0d) < Math.Abs(Math.Max(s1, s2) - 1d) ? 0d : 1d;
}
}
return AdjustFIndex(findex);
}
/// <summary>
/// Helper function to find out the relative location of a segment {segBegin, segEnd} against
/// a strokeNode (spine).
/// </summary>
/// <param name="spine">the spineVector of the StrokeNode</param>
/// <param name="segBegin">Start position of the line segment</param>
/// <param name="segEnd">End position of the line segment</param>
/// <returns>HitResult</returns>
private static HitResult WhereIsNodeAboutSegment(Vector spine, Vector segBegin, Vector segEnd)
{
HitResult whereabout = HitResult.Right;
Vector segVector = segEnd - segBegin;
if ((WhereIsVectorAboutVector(-segBegin, segVector) == HitResult.Left)
&& !DoubleUtil.IsZero(Vector.Determinant(spine, segVector)))
{
whereabout = HitResult.Left;
}
return whereabout;
}
/// <summary>
/// Helper method to calculate the exact location to cut
/// </summary>
/// <param name="spineVector">Vector the relative location of the two inking nodes</param>
/// <param name="hitBegin">the begin point of the hitting segment</param>
/// <param name="hitEnd">the end point of the hitting segment</param>
/// <param name="endRadius">endNode radius</param>
/// <param name="beginRadius">beginNode radius</param>
/// <param name="result">StrokeFIndices representing the location for cutting</param>
private void CalculateCutLocations(
Vector spineVector, Vector hitBegin, Vector hitEnd, double endRadius, double beginRadius, ref StrokeFIndices result)
{
// Find out whether the {hitBegin, hitEnd} segment intersects with the contour
// of the stroke segment, and find the lower index of the fragment to cut out.
if (!DoubleUtil.AreClose(result.EndFIndex, StrokeFIndices.AfterLast))
{
if (WhereIsNodeAboutSegment(spineVector, hitBegin, hitEnd) == HitResult.Left)
{
double findex = 1 - ClipTest(spineVector, endRadius, beginRadius, hitBegin, hitEnd);
if (findex > result.EndFIndex)
{
result.EndFIndex = findex;
}
}
}
// Find out whether the {hitBegin, hitEnd} segment intersects with the contour
// of the stroke segment, and find the higher index of the fragment to cut out.
if (!DoubleUtil.AreClose(result.BeginFIndex, StrokeFIndices.BeforeFirst))
{
hitBegin -= spineVector;
hitEnd -= spineVector;
if (WhereIsNodeAboutSegment(-spineVector, hitBegin, hitEnd) == HitResult.Left)
{
double findex = ClipTest(-spineVector, beginRadius, endRadius, hitBegin, hitEnd);
if (findex < result.BeginFIndex)
{
result.BeginFIndex = findex;
}
}
}
}
private double _radius = 0;
private Size _radii;
private Matrix _transform;
private Matrix _nodeShapeToCircle;
private Matrix _circleToNodeShape;
}
}
|
mit
|
origami99/SoftUni
|
01. ProgBasics/C# Basics/Exercises/05. Simple Loops/Half-Sum-Element/HalfSumElement.cs
|
660
|
using System;
class HalfSumElement
{
static void Main()
{
int n = int.Parse(Console.ReadLine());
int max = int.MinValue;
int sum = 0;
for (int i = 1; i <= n; i++)
{
int num = int.Parse(Console.ReadLine());
sum += num;
if (num > max)
{
max = num;
}
}
if (max == sum - max)
{
Console.WriteLine("Yes{0}Sum = {1}", Environment.NewLine, sum - max);
}
else
{
Console.WriteLine("No{0}Diff = {1}", Environment.NewLine, Math.Abs((sum - max) - max));
}
}
}
|
mit
|
saguijs/sagui
|
spec/fixtures/project-with-node-modules/node_modules/dependencyA/index.js
|
123
|
var transientDependency = require('transient-dependency')
module.exports = function () {
return transientDependency()
}
|
mit
|
MerlinTechnology/lmdo
|
lmdo/__init__.py
|
22
|
__version__ = '2.3.0'
|
mit
|
110035/kissy
|
src/dom/sub-modules/base/src/base/class.js
|
5848
|
/**
* batch class operation
* @ignore
* @author yiminghe@gmail.com
*/
KISSY.add('dom/base/class', function (S, Dom) {
var slice = [].slice,
NodeType = Dom.NodeType,
RE_SPLIT = /[\.\s]\s*\.?/;
function strToArray(str) {
str = S.trim(str || '');
var arr = str.split(RE_SPLIT),
newArr = [], v,
l = arr.length,
i = 0;
for (; i < l; i++) {
if (v = arr[i]) {
newArr.push(v);
}
}
return newArr;
}
function batchClassList(method) {
return function (elem, classNames) {
var i, l,
className,
classList = elem.classList,
extraArgs = slice.call(arguments, 2);
for (i = 0, l = classNames.length; i < l; i++) {
if (className = classNames[i]) {
classList[method].apply(classList, [className].concat(extraArgs));
}
}
}
}
function batchEls(method) {
return function (selector, className) {
var classNames = strToArray(className),
extraArgs = slice.call(arguments, 2);
Dom.query(selector).each(function (elem) {
if (elem.nodeType == NodeType.ELEMENT_NODE) {
Dom[method].apply(Dom, [elem, classNames].concat(extraArgs));
}
});
}
}
S.mix(Dom,
/**
* @override KISSY.DOM
* @class
* @singleton
*/
{
_hasClass: function (elem, classNames) {
var i, l, className, classList = elem.classList;
if (classList.length) {
for (i = 0, l = classNames.length; i < l; i++) {
className = classNames[i];
if (className && !classList.contains(className)) {
return false;
}
}
return true;
}
return false;
},
_addClass: batchClassList('add'),
_removeClass: batchClassList('remove'),
_toggleClass: batchClassList('toggle'),
/**
* Determine whether any of the matched elements are assigned the given classes.
* @param {HTMLElement|String|HTMLElement[]} selector matched elements
* @method
* @param {String} className One or more class names to search for.
* multiple class names is separated by space
* @return {Boolean}
*/
hasClass: function (selector, className) {
var elem = Dom.get(selector);
return elem && elem.nodeType == NodeType.ELEMENT_NODE && Dom._hasClass(elem, strToArray(className));
},
/**
* Replace a class with another class for matched elements.
* If no oldClassName is present, the newClassName is simply added.
* @param {HTMLElement|String|HTMLElement[]} selector matched elements
* @method
* @param {String} oldClassName One or more class names to be removed from the class attribute of each matched element.
* multiple class names is separated by space
* @param {String} newClassName One or more class names to be added to the class attribute of each matched element.
* multiple class names is separated by space
*/
replaceClass: function (selector, oldClassName, newClassName) {
Dom.removeClass(selector, oldClassName);
Dom.addClass(selector, newClassName);
},
/**
* Adds the specified class(es) to each of the set of matched elements.
* @method
* @param {HTMLElement|String|HTMLElement[]} selector matched elements
* @param {String} className One or more class names to be added to the class attribute of each matched element.
* multiple class names is separated by space
*/
addClass: batchEls('_addClass'),
/**
* Remove a single class, multiple classes, or all classes from each element in the set of matched elements.
* @param {HTMLElement|String|HTMLElement[]} selector matched elements
* @method
* @param {String} className One or more class names to be removed from the class attribute of each matched element.
* multiple class names is separated by space
*/
removeClass: batchEls('_removeClass'),
/**
* Add or remove one or more classes from each element in the set of
* matched elements, depending on either the class's presence or the
* value of the switch argument.
* @param {HTMLElement|String|HTMLElement[]} selector matched elements
* @param {String} className One or more class names to be added to the class attribute of each matched element.
* multiple class names is separated by space
* @method
*/
toggleClass: batchEls('_toggleClass')
// @param [state] {Boolean} optional boolean to indicate whether class
// should be added or removed regardless of current state.
// latest firefox/ie10 does not support
});
return Dom;
}, {
requires: ['./api']
});
/*
http://jsperf.com/kissy-classlist-vs-classname 17157:14741
http://jsperf.com/kissy-1-3-vs-jquery-on-dom-class 15721:15223
NOTES:
- hasClass/addClass/removeClass 的逻辑和 jQuery 保持一致
- toggleClass 不支持 value 为 undefined 的情形(jQuery 支持)
*/
|
mit
|
mixcloud/django-speedbar
|
speedbar/media/js/speedbar.js
|
675
|
/*global _speedbar_panel_url:false */
$(function() {
var expanded = false;
var have_contents = false;
var $speedbar_panel = $('#speedbar-panel');
var $body = $('.body', $speedbar_panel);
var $tab = $('.tab', $speedbar_panel);
$tab.click(function() {
if(!have_contents) {
$.get(_speedbar_panel_url, function(data) {
$body.text(data);
have_contents = true;
});
}
$body.css('height', ($(window).height() * 0.9) + 'px');
if(expanded) {
$body.slideUp();
} else {
$body.slideDown();
}
expanded = ! expanded;
});
});
|
mit
|
Zazz-PlayerMe/upa-models
|
test/mocks/RawGameMetadataModel.js
|
569
|
var standard = {
id: 234,
title: "title",
alias: null,
description: "description",
cover: {
original: "cover.original",
cached: "cover.cached",
original_filename: "cover.original_filename"
},
box: {
original: "box.original",
cached: "box.cached",
original_filename: "box.original_filename"
},
slug: "slug",
url: "url",
check_in_type: "playing",
likes_count: 0,
has_liked: false,
has_favourited: false,
favourites_count: 0
};
export {
standard as default
};
|
mit
|
genlu/roslyn
|
src/Workspaces/CoreTest/WorkspaceServiceTests/TemporaryStorageServiceTests.cs
|
14876
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.CodeAnalysis.Host;
using Microsoft.CodeAnalysis.Test.Utilities;
using Microsoft.CodeAnalysis.Text;
using Roslyn.Test.Utilities;
using Xunit;
namespace Microsoft.CodeAnalysis.UnitTests
{
[UseExportProvider]
public class TemporaryStorageServiceTests
{
[Fact, Trait(Traits.Feature, Traits.Features.Workspace)]
public void TestTemporaryStorageText()
{
using var workspace = new AdhocWorkspace();
var textFactory = Assert.IsType<TextFactoryService>(workspace.Services.GetService<ITextFactoryService>());
var service = new TemporaryStorageServiceFactory.TemporaryStorageService(textFactory);
// test normal string
var text = SourceText.From(new string(' ', 4096) + "public class A {}");
TestTemporaryStorage(service, text);
// test empty string
text = SourceText.From(string.Empty);
TestTemporaryStorage(service, text);
// test large string
text = SourceText.From(new string(' ', 1024 * 1024) + "public class A {}");
TestTemporaryStorage(service, text);
}
[WorkItem(531188, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/531188")]
[Fact, Trait(Traits.Feature, Traits.Features.Workspace)]
public void TestTemporaryStorageStream()
{
using var workspace = new AdhocWorkspace();
var textFactory = Assert.IsType<TextFactoryService>(workspace.Services.GetService<ITextFactoryService>());
var service = new TemporaryStorageServiceFactory.TemporaryStorageService(textFactory);
var temporaryStorage = service.CreateTemporaryStreamStorage(System.Threading.CancellationToken.None);
using var data = SerializableBytes.CreateWritableStream();
for (var i = 0; i < SharedPools.ByteBufferSize; i++)
{
data.WriteByte((byte)(i % 2));
}
data.Position = 0;
temporaryStorage.WriteStreamAsync(data).Wait();
using var result = temporaryStorage.ReadStreamAsync().Result;
Assert.Equal(data.Length, result.Length);
for (var i = 0; i < SharedPools.ByteBufferSize; i++)
{
Assert.Equal(i % 2, result.ReadByte());
}
}
private static void TestTemporaryStorage(ITemporaryStorageService temporaryStorageService, SourceText text)
{
// create a temporary storage location
var temporaryStorage = temporaryStorageService.CreateTemporaryTextStorage(System.Threading.CancellationToken.None);
// write text into it
temporaryStorage.WriteTextAsync(text).Wait();
// read text back from it
var text2 = temporaryStorage.ReadTextAsync().Result;
Assert.NotSame(text, text2);
Assert.Equal(text.ToString(), text2.ToString());
Assert.Equal(text.Encoding, text2.Encoding);
temporaryStorage.Dispose();
}
[Fact, Trait(Traits.Feature, Traits.Features.Workspace)]
public void TestTemporaryTextStorageExceptions()
{
using var workspace = new AdhocWorkspace();
var textFactory = Assert.IsType<TextFactoryService>(workspace.Services.GetService<ITextFactoryService>());
var service = new TemporaryStorageServiceFactory.TemporaryStorageService(textFactory);
var storage = service.CreateTemporaryTextStorage(CancellationToken.None);
// Nothing has been written yet
Assert.Throws<InvalidOperationException>(() => storage.ReadText());
Assert.Throws<AggregateException>(() => storage.ReadTextAsync().Result);
// write a normal string
var text = SourceText.From(new string(' ', 4096) + "public class A {}");
storage.WriteTextAsync(text).Wait();
// Writing multiple times is not allowed
Assert.Throws<InvalidOperationException>(() => storage.WriteText(text));
Assert.Throws<AggregateException>(() => storage.WriteTextAsync(text).Wait());
}
[Fact, Trait(Traits.Feature, Traits.Features.Workspace)]
public void TestTemporaryStreamStorageExceptions()
{
using var workspace = new AdhocWorkspace();
var textFactory = Assert.IsType<TextFactoryService>(workspace.Services.GetService<ITextFactoryService>());
var service = new TemporaryStorageServiceFactory.TemporaryStorageService(textFactory);
var storage = service.CreateTemporaryStreamStorage(CancellationToken.None);
// Nothing has been written yet
Assert.Throws<InvalidOperationException>(() => storage.ReadStream());
Assert.Throws<AggregateException>(() => storage.ReadStreamAsync().Result);
// write a normal stream
var stream = new MemoryStream();
stream.Write(new byte[] { 42 }, 0, 1);
stream.Position = 0;
storage.WriteStreamAsync(stream).Wait();
// Writing multiple times is not allowed
Assert.Throws<InvalidOperationException>(() => storage.WriteStream(null));
Assert.Throws<AggregateException>(() => storage.WriteStreamAsync(null).Wait());
}
[Fact]
public void TestZeroLengthStreams()
{
using var workspace = new AdhocWorkspace();
var textFactory = Assert.IsType<TextFactoryService>(workspace.Services.GetService<ITextFactoryService>());
var service = new TemporaryStorageServiceFactory.TemporaryStorageService(textFactory);
var storage = service.CreateTemporaryStreamStorage(CancellationToken.None);
// 0 length streams are allowed
using (var stream1 = new MemoryStream())
{
storage.WriteStream(stream1);
}
using (var stream2 = storage.ReadStream())
{
Assert.Equal(0, stream2.Length);
}
}
[Fact, Trait(Traits.Feature, Traits.Features.Workspace)]
public void TestTemporaryStorageMemoryMappedFileManagement()
{
using var workspace = new AdhocWorkspace();
var textFactory = Assert.IsType<TextFactoryService>(workspace.Services.GetService<ITextFactoryService>());
var service = new TemporaryStorageServiceFactory.TemporaryStorageService(textFactory);
var buffer = new MemoryStream(257 * 1024 + 1);
for (var i = 0; i < buffer.Length; i++)
{
buffer.WriteByte((byte)i);
}
// Do a relatively cheap concurrent stress test of the backing MemoryMappedFile management
var tasks = Enumerable.Range(1, 257).Select(async i =>
{
for (var j = 1; j < 5; j++)
{
using ITemporaryStreamStorage storage1 = service.CreateTemporaryStreamStorage(CancellationToken.None),
storage2 = service.CreateTemporaryStreamStorage(CancellationToken.None);
var storage3 = service.CreateTemporaryStreamStorage(CancellationToken.None); // let the finalizer run for this instance
storage1.WriteStream(new MemoryStream(buffer.GetBuffer(), 0, 1024 * i - 1));
storage2.WriteStream(new MemoryStream(buffer.GetBuffer(), 0, 1024 * i));
storage3.WriteStream(new MemoryStream(buffer.GetBuffer(), 0, 1024 * i + 1));
await Task.Yield();
using Stream s1 = storage1.ReadStream(),
s2 = storage2.ReadStream(),
s3 = storage3.ReadStream();
Assert.Equal(1024 * i - 1, s1.Length);
Assert.Equal(1024 * i, s2.Length);
Assert.Equal(1024 * i + 1, s3.Length);
}
});
Task.WaitAll(tasks.ToArray());
GC.Collect(2);
GC.WaitForPendingFinalizers();
GC.Collect(2);
}
[Fact(Skip = "This test exists so it can be locally executed for scale testing, when required. Do not remove this test or unskip it in CI.")]
[Trait(Traits.Feature, Traits.Features.Workspace)]
public void TestTemporaryStorageScaling()
{
// This will churn through 4GB of memory. It validates that we don't
// use up our address space in a 32 bit process.
if (Environment.Is64BitOperatingSystem && !Environment.Is64BitProcess)
{
using var workspace = new AdhocWorkspace();
var textFactory = Assert.IsType<TextFactoryService>(workspace.Services.GetService<ITextFactoryService>());
var service = new TemporaryStorageServiceFactory.TemporaryStorageService(textFactory);
using var data = SerializableBytes.CreateWritableStream();
for (var i = 0; i < 1024 * 128; i++)
{
data.WriteByte(1);
}
// Create 4GB of memory mapped files
var fileCount = (int)((long)4 * 1024 * 1024 * 1024 / data.Length);
var storageHandles = new List<ITemporaryStreamStorage>(fileCount);
for (var i = 0; i < fileCount; i++)
{
var s = service.CreateTemporaryStreamStorage(CancellationToken.None);
storageHandles.Add(s);
data.Position = 0;
s.WriteStreamAsync(data).Wait();
}
for (var i = 0; i < 1024 * 5; i++)
{
using var s = storageHandles[i].ReadStreamAsync().Result;
Assert.Equal(1, s.ReadByte());
storageHandles[i].Dispose();
}
}
}
[Fact]
public void StreamTest1()
{
using var workspace = new AdhocWorkspace();
var textFactory = Assert.IsType<TextFactoryService>(workspace.Services.GetService<ITextFactoryService>());
var service = new TemporaryStorageServiceFactory.TemporaryStorageService(textFactory);
var storage = service.CreateTemporaryStreamStorage(CancellationToken.None);
using var expected = new MemoryStream();
for (var i = 0; i < 10000; i++)
{
expected.WriteByte((byte)(i % byte.MaxValue));
}
expected.Position = 0;
storage.WriteStream(expected);
expected.Position = 0;
using var stream = storage.ReadStream();
Assert.Equal(expected.Length, stream.Length);
for (var i = 0; i < expected.Length; i++)
{
Assert.Equal(expected.ReadByte(), stream.ReadByte());
}
}
[Fact]
public void StreamTest2()
{
using var workspace = new AdhocWorkspace();
var textFactory = Assert.IsType<TextFactoryService>(workspace.Services.GetService<ITextFactoryService>());
var service = new TemporaryStorageServiceFactory.TemporaryStorageService(textFactory);
var storage = service.CreateTemporaryStreamStorage(CancellationToken.None);
using var expected = new MemoryStream();
for (var i = 0; i < 10000; i++)
{
expected.WriteByte((byte)(i % byte.MaxValue));
}
expected.Position = 0;
storage.WriteStream(expected);
expected.Position = 0;
using var stream = storage.ReadStream();
Assert.Equal(expected.Length, stream.Length);
var index = 0;
int count;
var bytes = new byte[1000];
while ((count = stream.Read(bytes, 0, bytes.Length)) > 0)
{
for (var i = 0; i < count; i++)
{
Assert.Equal((byte)(index % byte.MaxValue), bytes[i]);
index++;
}
}
Assert.Equal(index, stream.Length);
}
[Fact]
public void StreamTest3()
{
using var workspace = new AdhocWorkspace();
var textFactory = Assert.IsType<TextFactoryService>(workspace.Services.GetService<ITextFactoryService>());
var service = new TemporaryStorageServiceFactory.TemporaryStorageService(textFactory);
var storage = service.CreateTemporaryStreamStorage(CancellationToken.None);
using var expected = new MemoryStream();
var random = new Random(Environment.TickCount);
for (var i = 0; i < 100; i++)
{
var position = random.Next(10000);
expected.Position = position;
var value = (byte)(i % byte.MaxValue);
expected.WriteByte(value);
}
expected.Position = 0;
storage.WriteStream(expected);
expected.Position = 0;
using var stream = storage.ReadStream();
Assert.Equal(expected.Length, stream.Length);
for (var i = 0; i < expected.Length; i++)
{
var value = expected.ReadByte();
if (value != 0)
{
stream.Position = i;
Assert.Equal(value, stream.ReadByte());
}
}
}
[Fact, Trait(Traits.Feature, Traits.Features.Workspace)]
public void TestTemporaryStorageTextEncoding()
{
using var workspace = new AdhocWorkspace();
var textFactory = Assert.IsType<TextFactoryService>(workspace.Services.GetService<ITextFactoryService>());
var service = new TemporaryStorageServiceFactory.TemporaryStorageService(textFactory);
// test normal string
var text = SourceText.From(new string(' ', 4096) + "public class A {}", Encoding.ASCII);
TestTemporaryStorage(service, text);
// test empty string
text = SourceText.From(string.Empty);
TestTemporaryStorage(service, text);
// test large string
text = SourceText.From(new string(' ', 1024 * 1024) + "public class A {}");
TestTemporaryStorage(service, text);
}
}
}
|
mit
|
BullisEntrepreneurship/LeanLaunchLab
|
db/schema.rb
|
26220
|
# encoding: UTF-8
# This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# Note that this schema.rb definition is the authoritative source for your
# database schema. If you need to create the application database on another
# system, you should be using db:schema:load, not running all the migrations
# from scratch. The latter is a flawed and unsustainable approach (the more migrations
# you'll amass, the slower it'll run and the greater likelihood for issues).
#
# It's strongly recommended to check this file into your version control system.
ActiveRecord::Schema.define(:version => 20121001163922) do
create_table "attachments", :force => true do |t|
t.string "data_file_name", :null => false
t.string "data_content_type", :null => false
t.integer "data_file_size", :null => false
t.datetime "data_updated_at", :null => false
t.integer "item_id", :null => false
t.string "item_type", :null => false
t.integer "member_id", :null => false
t.datetime "created_at"
t.datetime "updated_at"
end
add_index "attachments", ["item_type", "item_id"], :name => "index_attachments_on_item_type_and_item_id"
add_index "attachments", ["member_id"], :name => "index_attachments_on_member_id"
create_table "audits", :force => true do |t|
t.integer "auditable_id", :null => false
t.string "auditable_type", :null => false
t.integer "associated_id"
t.string "associated_type"
t.integer "user_id"
t.string "user_type"
t.string "username"
t.string "action", :null => false
t.text "audited_changes", :null => false
t.integer "version", :default => 0, :null => false
t.string "comment"
t.string "remote_address"
t.datetime "created_at"
end
add_index "audits", ["associated_id", "associated_type"], :name => "associated_index"
add_index "audits", ["auditable_id", "auditable_type"], :name => "auditable_index"
add_index "audits", ["created_at"], :name => "index_audits_on_created_at"
add_index "audits", ["user_id", "user_type"], :name => "user_index"
create_table "blog_posts", :force => true do |t|
t.integer "project_id", :null => false
t.integer "member_id", :null => false
t.string "subject", :null => false
t.text "body"
t.datetime "created_at"
t.datetime "updated_at"
t.string "the_ask"
t.datetime "published_at"
t.boolean "urgent", :default => false, :null => false
t.date "date"
t.string "post_type"
t.string "text1"
t.string "text2"
end
add_index "blog_posts", ["member_id"], :name => "index_blog_posts_on_member_id"
add_index "blog_posts", ["project_id", "published_at"], :name => "index_blog_posts_on_project_id_and_published_at"
create_table "boxes", :force => true do |t|
t.string "name", :null => false
t.string "label", :null => false
t.text "description"
t.string "startup_label", :null => false
t.text "startup_description"
end
add_index "boxes", ["name"], :name => "index_boxes_on_name", :unique => true
create_table "canvas_items", :force => true do |t|
t.integer "project_id", :null => false
t.integer "box_id", :null => false
t.string "text", :null => false
t.datetime "created_at"
t.datetime "updated_at"
t.integer "item_status_id", :default => 1
t.integer "original_id"
t.boolean "deleted", :default => false, :null => false
t.string "display_color", :default => "yellow", :null => false
t.integer "hypothesis_id"
t.text "description"
t.integer "x"
t.integer "y"
t.integer "z"
t.datetime "inactive_at"
t.boolean "added", :default => false, :null => false
t.boolean "updated", :default => false, :null => false
end
add_index "canvas_items", ["hypothesis_id"], :name => "index_canvas_items_on_hypothesis_id"
add_index "canvas_items", ["original_id"], :name => "index_canvas_items_on_original_id"
add_index "canvas_items", ["project_id"], :name => "index_canvas_items_on_project_id"
create_table "charges", :force => true do |t|
t.integer "organization_id"
t.decimal "amount", :precision => 10, :scale => 2
t.integer "num_members"
t.decimal "member_price", :precision => 10, :scale => 2
t.text "comments"
t.date "period_start", :null => false
t.date "period_end", :null => false
t.string "stripe_charge_id"
t.datetime "created_at", :null => false
t.datetime "updated_at", :null => false
end
add_index "charges", ["organization_id"], :name => "index_charges_on_organization_id"
add_index "charges", ["stripe_charge_id"], :name => "index_charges_on_stripe_charge_id"
create_table "ckeditor_assets", :force => true do |t|
t.string "data_file_name", :null => false
t.string "data_content_type", :null => false
t.integer "data_file_size", :null => false
t.integer "assetable_id"
t.string "assetable_type", :limit => 30
t.string "type", :limit => 30
t.datetime "created_at"
t.datetime "updated_at"
end
add_index "ckeditor_assets", ["assetable_type", "assetable_id"], :name => "idx_ckeditor_assetable"
add_index "ckeditor_assets", ["assetable_type", "type", "assetable_id"], :name => "idx_ckeditor_assetable_type"
create_table "comments", :force => true do |t|
t.integer "blog_post_id"
t.integer "member_id", :null => false
t.text "body", :null => false
t.datetime "created_at"
t.datetime "updated_at"
t.integer "hypothesis_id"
end
add_index "comments", ["blog_post_id"], :name => "index_comments_on_blog_post_id"
add_index "comments", ["hypothesis_id"], :name => "index_comments_on_hypothesis_id"
add_index "comments", ["member_id"], :name => "index_comments_on_member_id"
create_table "experiments", :force => true do |t|
t.integer "project_id"
t.integer "hypothesis_id", :null => false
t.integer "position"
t.string "title", :null => false
t.string "success_criteria"
t.date "start_date"
t.date "end_date"
t.datetime "created_at"
t.datetime "updated_at"
t.integer "item_status_id"
t.datetime "completed_at"
end
add_index "experiments", ["hypothesis_id"], :name => "index_experiments_on_hypothesis_id"
add_index "experiments", ["item_status_id"], :name => "index_experiments_on_item_status_id"
add_index "experiments", ["project_id"], :name => "index_experiments_on_project_id"
create_table "hypotheses", :force => true do |t|
t.integer "project_id"
t.integer "position"
t.string "title", :null => false
t.text "description"
t.integer "item_status_id"
t.datetime "created_at"
t.datetime "updated_at"
t.datetime "completed_at"
t.integer "hypothesis_id"
t.string "completed_reason"
end
add_index "hypotheses", ["hypothesis_id"], :name => "index_hypotheses_on_hypothesis_id"
add_index "hypotheses", ["item_status_id"], :name => "index_hypotheses_on_item_status_id"
add_index "hypotheses", ["project_id", "completed_at"], :name => "index_hypotheses_on_project_id_and_completed_at"
create_table "item_statuses", :force => true do |t|
t.string "status", :null => false
t.datetime "created_at"
t.datetime "updated_at"
end
add_index "item_statuses", ["status"], :name => "index_item_statuses_on_status", :unique => true
create_table "member_blog_post_views", :force => true do |t|
t.integer "member_id", :null => false
t.integer "blog_post_id", :null => false
t.datetime "created_at"
t.datetime "updated_at"
end
add_index "member_blog_post_views", ["blog_post_id", "member_id"], :name => "index_member_blog_post_views_on_blog_post_id_and_member_id"
add_index "member_blog_post_views", ["member_id"], :name => "index_member_blog_post_views_on_member_id"
create_table "members", :force => true do |t|
t.integer "user_id", :null => false
t.integer "project_id", :null => false
t.string "join_code"
t.datetime "created_at"
t.datetime "updated_at"
t.boolean "activated", :default => true
t.string "level", :null => false
t.boolean "is_owner", :default => false
t.string "role_name", :null => false
t.boolean "notify_hypotheses", :default => true, :null => false
t.boolean "notify_interviews", :default => true, :null => false
t.boolean "notify_updates", :default => true, :null => false
t.boolean "daily_summary", :default => true, :null => false
t.boolean "weekly_summary", :default => true, :null => false
t.datetime "accessed_at"
t.boolean "display_plan_todo", :default => true, :null => false
t.boolean "display_plan_in_progress", :default => true, :null => false
t.boolean "display_plan_done", :default => true, :null => false
t.boolean "notify_hypotheses_validated", :default => true, :null => false
t.string "plan_done_sort", :default => "newest", :null => false
t.boolean "notify_goals", :default => true, :null => false
end
add_index "members", ["join_code"], :name => "index_members_on_join_code", :unique => true
add_index "members", ["project_id"], :name => "index_members_on_project_id"
add_index "members", ["user_id", "project_id"], :name => "index_members_on_user_id_and_project_id", :unique => true
create_table "organization_members", :force => true do |t|
t.integer "user_id", :null => false
t.integer "organization_id", :null => false
t.string "level", :null => false
t.datetime "created_at", :null => false
t.datetime "updated_at", :null => false
t.string "payment_code"
t.date "paid_at"
t.string "stripe_customer_id"
t.integer "cc_exp_month"
t.integer "cc_exp_year"
t.string "cc_last4"
t.string "cc_type"
t.string "stripe_charge_id"
end
add_index "organization_members", ["organization_id"], :name => "index_organization_members_on_organization_id"
add_index "organization_members", ["user_id", "organization_id"], :name => "index_organization_members_on_user_id_and_organization_id", :unique => true
create_table "organizations", :force => true do |t|
t.string "name", :null => false
t.string "organization_type"
t.datetime "created_at", :null => false
t.datetime "updated_at", :null => false
t.string "stripe_customer_id"
t.integer "cc_exp_month"
t.integer "cc_exp_year"
t.string "cc_last4"
t.string "cc_type"
t.integer "cc_user_id"
t.string "brightidea_api_key"
t.date "trial_end_date"
t.boolean "invoice_billing", :default => false, :null => false
t.boolean "auto_locked", :default => false, :null => false
t.boolean "admin_locked", :default => false, :null => false
t.text "admin_comments"
t.integer "promotion_id"
t.datetime "promotion_expires_at"
t.decimal "member_price", :precision => 10, :scale => 2
end
add_index "organizations", ["brightidea_api_key"], :name => "index_organizations_on_brightidea_api_key"
add_index "organizations", ["cc_user_id"], :name => "index_organizations_on_cc_user_id"
add_index "organizations", ["name"], :name => "index_organizations_on_name"
add_index "organizations", ["promotion_expires_at"], :name => "index_organizations_on_promotion_expires_at"
add_index "organizations", ["promotion_id"], :name => "index_organizations_on_promotion_id"
add_index "organizations", ["stripe_customer_id"], :name => "index_organizations_on_stripe_customer_id", :unique => true
add_index "organizations", ["trial_end_date"], :name => "index_organizations_on_trial_end_date"
create_table "projects", :force => true do |t|
t.string "name", :null => false
t.datetime "created_at"
t.datetime "updated_at"
t.string "pitch"
t.string "url"
t.boolean "canvas_startup_headers", :default => false, :null => false
t.boolean "canvas_include_plan_default", :default => true, :null => false
t.boolean "canvas_highlight_new", :default => true, :null => false
t.integer "organization_id", :null => false
t.string "brightidea_id"
t.boolean "goal_canvas_completed", :default => false, :null => false
t.boolean "goal_create_hypothesis_completed", :default => false, :null => false
t.boolean "goal_test_hypothesis_completed", :default => false, :null => false
t.boolean "goal_validate_hypothesis_completed", :default => false, :null => false
t.boolean "goal_interview_completed", :default => false, :null => false
t.boolean "goal_invite_completed", :default => false, :null => false
t.string "stripe_customer_id"
t.integer "cc_exp_month"
t.integer "cc_exp_year"
t.string "cc_last4"
t.string "cc_type"
t.integer "cc_user_id"
t.decimal "price", :precision => 10, :scale => 2
t.string "payment_code"
t.date "paid_at"
end
add_index "projects", ["brightidea_id"], :name => "index_projects_on_brightidea_id"
add_index "projects", ["organization_id", "name"], :name => "index_projects_on_organization_id_and_name", :unique => true
add_index "projects", ["payment_code"], :name => "index_projects_on_payment_code", :unique => true
create_table "promotions", :force => true do |t|
t.string "name", :null => false
t.string "code", :null => false
t.integer "monthly_discount_percent"
t.integer "months"
t.datetime "created_at", :null => false
t.datetime "updated_at", :null => false
end
add_index "promotions", ["code"], :name => "index_promotions_on_code", :unique => true
add_index "promotions", ["name"], :name => "index_promotions_on_name", :unique => true
create_table "questions", :force => true do |t|
t.integer "hypothesis_id", :null => false
t.string "title", :null => false
t.integer "position"
t.datetime "created_at", :null => false
t.datetime "updated_at", :null => false
t.integer "project_id", :null => false
end
add_index "questions", ["hypothesis_id"], :name => "index_questions_on_hypothesis_id"
add_index "questions", ["project_id"], :name => "index_questions_on_project_id"
create_table "settings", :force => true do |t|
t.integer "user_id", :null => false
t.boolean "post_email", :default => true, :null => false
t.boolean "feedback_email", :default => true, :null => false
t.boolean "digest_email", :default => false, :null => false
t.datetime "created_at"
t.datetime "updated_at"
t.integer "ui_version", :default => 2, :null => false
t.integer "ui_available", :default => 2
t.string "time_zone", :default => "America/Los_Angeles", :null => false
t.string "home_page"
t.datetime "project_goal_canvas_notified_at"
t.datetime "project_goal_hypothesis_notified_at"
t.datetime "project_goal_test_notified_at"
t.datetime "project_goal_validate_notified_at"
t.datetime "project_goal_interview_notified_at"
t.datetime "project_goal_invite_notified_at"
end
add_index "settings", ["time_zone"], :name => "index_settings_on_time_zone"
add_index "settings", ["user_id"], :name => "index_settings_on_user_id"
create_table "signups", :force => true do |t|
t.string "email"
t.datetime "created_at"
t.datetime "updated_at"
end
add_index "signups", ["created_at"], :name => "index_signups_on_created_at"
create_table "subscription_levels", :force => true do |t|
t.string "name", :null => false
t.string "tagline"
t.text "description"
t.boolean "available", :default => true, :null => false
t.decimal "monthly_price", :precision => 10, :scale => 2
t.decimal "yearly_price", :precision => 10, :scale => 2
t.integer "max_projects"
t.integer "max_members"
t.integer "max_storage_mb"
t.boolean "support_email", :default => true, :null => false
t.boolean "support_chat", :default => false, :null => false
t.boolean "support_phone", :default => false, :null => false
t.datetime "created_at", :null => false
t.datetime "updated_at", :null => false
end
create_table "subscriptions", :force => true do |t|
t.integer "organization_id", :null => false
t.integer "subscription_level_id", :null => false
t.boolean "yearly", :default => false, :null => false
t.decimal "price", :precision => 10, :scale => 2, :null => false
t.date "start_date", :null => false
t.date "end_date"
t.datetime "created_at", :null => false
t.datetime "updated_at", :null => false
end
add_index "subscriptions", ["organization_id"], :name => "index_subscriptions_on_organization_id"
add_index "subscriptions", ["subscription_level_id"], :name => "index_subscriptions_on_subscription_level_id"
create_table "tasks", :force => true do |t|
t.integer "project_id", :null => false
t.integer "position"
t.string "title", :null => false
t.datetime "created_at"
t.datetime "updated_at"
t.integer "assigned_to_member_id"
t.date "due_date"
t.integer "hypothesis_id"
t.datetime "completed_at"
t.text "description"
end
add_index "tasks", ["assigned_to_member_id"], :name => "index_tasks_on_assigned_to_member_id"
add_index "tasks", ["hypothesis_id"], :name => "index_tasks_on_hypothesis_id"
add_index "tasks", ["project_id"], :name => "index_goals_on_project_id"
create_table "user_activities", :force => true do |t|
t.integer "user_id", :null => false
t.integer "member_id"
t.string "name", :null => false
t.string "email", :null => false
t.string "action", :null => false
t.string "description", :null => false
t.datetime "created_at"
t.datetime "updated_at"
end
add_index "user_activities", ["member_id"], :name => "index_user_activities_on_member_id"
add_index "user_activities", ["user_id"], :name => "index_user_activities_on_user_id"
create_table "users", :force => true do |t|
t.string "email", :null => false
t.string "encrypted_password", :limit => 128, :null => false
t.string "reset_password_token"
t.datetime "reset_password_sent_at"
t.datetime "remember_created_at"
t.integer "sign_in_count", :default => 0
t.datetime "current_sign_in_at"
t.datetime "last_sign_in_at"
t.string "current_sign_in_ip"
t.string "last_sign_in_ip"
t.string "confirmation_token"
t.datetime "confirmed_at"
t.datetime "confirmation_sent_at"
t.datetime "created_at"
t.datetime "updated_at"
t.string "name", :null => false
t.boolean "has_changed_password", :default => true, :null => false
t.boolean "admin", :default => false, :null => false
t.string "tracking_code", :null => false
t.string "source"
end
add_index "users", ["confirmation_token"], :name => "index_users_on_confirmation_token", :unique => true
add_index "users", ["email"], :name => "index_users_on_email", :unique => true
add_index "users", ["reset_password_token"], :name => "index_users_on_reset_password_token", :unique => true
add_index "users", ["tracking_code"], :name => "index_users_on_tracking_code", :unique => true
add_foreign_key "attachments", "members", name: "attachments_member_id_fk"
add_foreign_key "blog_posts", "members", name: "blog_posts_member_id_fk"
add_foreign_key "blog_posts", "projects", name: "blog_posts_project_id_fk", dependent: :delete
add_foreign_key "canvas_items", "boxes", name: "canvas_items_box_id_fk"
add_foreign_key "canvas_items", "canvas_items", name: "canvas_items_original_id_fk", column: "original_id", dependent: :delete
add_foreign_key "canvas_items", "hypotheses", name: "canvas_items_hypothesis_id_fk", dependent: :nullify
add_foreign_key "canvas_items", "item_statuses", name: "canvas_items_item_status_id_fk", dependent: :nullify
add_foreign_key "canvas_items", "projects", name: "canvas_items_project_id_fk", dependent: :delete
add_foreign_key "charges", "organizations", name: "charges_organization_id_fk", dependent: :nullify
add_foreign_key "comments", "blog_posts", name: "comments_blog_post_id_fk", dependent: :delete
add_foreign_key "comments", "hypotheses", name: "comments_hypothesis_id_fk", dependent: :delete
add_foreign_key "comments", "members", name: "comments_member_id_fk"
add_foreign_key "experiments", "hypotheses", name: "experiments_hypothesis_id_fk", dependent: :nullify
add_foreign_key "experiments", "item_statuses", name: "experiments_item_status_id_fk", dependent: :nullify
add_foreign_key "experiments", "projects", name: "experiments_project_id_fk", dependent: :delete
add_foreign_key "hypotheses", "hypotheses", name: "hypotheses_hypothesis_id_fk", dependent: :nullify
add_foreign_key "hypotheses", "item_statuses", name: "hypotheses_item_status_id_fk", dependent: :nullify
add_foreign_key "hypotheses", "projects", name: "hypotheses_project_id_fk", dependent: :delete
add_foreign_key "member_blog_post_views", "blog_posts", name: "member_blog_post_views_blog_post_id_fk", dependent: :delete
add_foreign_key "member_blog_post_views", "members", name: "member_blog_post_views_member_id_fk", dependent: :delete
add_foreign_key "members", "projects", name: "members_project_id_fk", dependent: :delete
add_foreign_key "members", "users", name: "members_user_id_fk"
add_foreign_key "organization_members", "organizations", name: "organization_members_organization_id_fk", dependent: :delete
add_foreign_key "organization_members", "users", name: "organization_members_user_id_fk", dependent: :delete
add_foreign_key "organizations", "promotions", name: "organizations_promotion_id_fk"
add_foreign_key "organizations", "users", name: "organizations_cc_user_id_fk", column: "cc_user_id"
add_foreign_key "projects", "organizations", name: "projects_organization_id_fk"
add_foreign_key "questions", "hypotheses", name: "questions_hypothesis_id_fk"
add_foreign_key "questions", "projects", name: "questions_project_id_fk", dependent: :delete
add_foreign_key "settings", "users", name: "settings_user_id_fk", dependent: :delete
add_foreign_key "subscriptions", "organizations", name: "subscriptions_organization_id_fk", dependent: :delete
add_foreign_key "subscriptions", "subscription_levels", name: "subscriptions_subscription_level_id_fk"
add_foreign_key "tasks", "hypotheses", name: "tasks_hypothesis_id_fk", dependent: :nullify
add_foreign_key "tasks", "members", name: "tasks_assigned_to_member_id_fk", column: "assigned_to_member_id", dependent: :nullify
add_foreign_key "tasks", "projects", name: "goals_project_id_fk", dependent: :delete
add_foreign_key "user_activities", "members", name: "user_activities_member_id_fk", dependent: :nullify
add_foreign_key "user_activities", "users", name: "user_activities_user_id_fk", dependent: :delete
end
|
mit
|
mrbigmouth/acgn-stock
|
server/publications/ruleAgenda/ruleAgendaDetail.js
|
1012
|
import { Meteor } from 'meteor/meteor';
import { check } from 'meteor/check';
import { dbRuleAgendas } from '/db/dbRuleAgendas';
import { dbRuleIssues } from '/db/dbRuleIssues';
import { dbRuleIssueOptions } from '/db/dbRuleIssueOptions';
import { limitSubscription } from '/server/imports/utils/rateLimit';
import { debug } from '/server/imports/utils/debug';
Meteor.publish('ruleAgendaDetail', function(agendaId) {
debug.log('publish ruleAgendaDetail');
check(agendaId, String);
const agendaCursor = dbRuleAgendas.find(agendaId);
const agenda = agendaCursor.fetch()[0];
const issueCursor = dbRuleIssues.find({
_id: {
$in: agenda.issues
}
});
let optionIds = [];
issueCursor.forEach((issue) => {
optionIds = optionIds.concat(issue.options);
});
const optionCursor = dbRuleIssueOptions.find({
_id: {
$in: optionIds
}
});
return [agendaCursor, issueCursor, optionCursor];
});
// 一分鐘最多重複訂閱5次
limitSubscription('ruleAgendaDetail', 5);
|
mit
|
harryggg/coursemology2
|
db/migrate/20141205065248_create_instance_users.rb
|
361
|
# frozen_string_literal: true
class CreateInstanceUsers < ActiveRecord::Migration
def change
create_table :instance_users do |t|
t.references :instance, null: false
t.references :user,
null: false,
index: :unique
t.timestamps
t.index [:instance_id, :user_id], unique: true
end
end
end
|
mit
|
ee08b397/LeetCode-2
|
JAVA/RotateList.java
|
677
|
/**
* Definition for singly-linked list.
* public class ListNode {
* int val;
* ListNode next;
* ListNode(int x) {
* val = x;
* next = null;
* }
* }
*/
public class Solution {
public ListNode rotateRight(ListNode head, int n) {
if (head == null) return head;
ListNode tail = head;
int count = 1;
while (tail.next != null) {
count++;
tail = tail.next;
}
tail.next = head;
count = count - n % count;
while(count-- > 0) {
head = head.next;
tail = tail.next;
}
tail.next = null;
return head;
}
}
|
mit
|
eiriklv/hearsay-admin
|
helpers/index.js
|
138
|
exports = module.exports = function () {
return {
common: require('./common')(),
react: require('./react')()
};
};
|
mit
|
xiangsongtao/jikexueyuan_Homework
|
Lesson14-Web Security/1.开发手机版本的百度新闻/baidunewsThinkPHP/Public/BaiduNews/js/basic/findAll.js
|
1676
|
define(['jquery', 'createInnerBoxFragment'], function($, createInnerBoxFragment) {
var offsetback;
function findAll(table, offset, findNum) {
// console.log("findNow::" + table);
$.ajax({
type: "post",
url: "/baidunewsThinkPHP/BaiduNews/Index/findAll",
async: false,
beforeSend: function() {
$(".loadMore").text("正在加载……");
},
data: {
"table": table,
"findNum": findNum,
"offset": offset
},
success: function(data) {
// console.log(data);
if (data == "") {
$(".loadMore").text("已全部加载完毕!");
loadMoreFlag = 0;
return;
}
$(".loadMore").text("点击加载更多");
//查询成功后offset计数
offsetback = offset + findNum;
//console.log("当前offset:" + offsetback);
//插入返回的新闻html片段,插入在“更多”之前
createInnerBoxFragment(data, table);
$(".page-loading").css("display", "none");
},
// complete: function(){
//
// $(".loadMore").text("点击加载更多");
// },
error: function(data) {
// console.log(data);
console.log("error: " + data.responseText);
}
});
return offsetback;
}
return findAll;
});
|
mit
|
tkjone/django-starters
|
series_3/p_01/myproject/src/server/static/js/index.js
|
27
|
console.log('App Loaded');
|
mit
|
junwei-wang/com.hotfixs.books
|
thinking-in-java/enumerated/src/main/java/com/hotfixs/thinkinginjava/enumerated/EnumMaps.java
|
828
|
package com.hotfixs.thinkinginjava.enumerated;
import static com.hotfixs.thinkinginjava.common.Print.print;
import java.util.EnumMap;
import java.util.Map;
/**
* @author Junwei Wang (i.junwei.wang@gmail.com)
*/
interface Command {
void action();
}
public class EnumMaps {
public static void main(String[] args) {
EnumMap<AlarmPoints, Command> em = new EnumMap<>(AlarmPoints.class);
em.put(AlarmPoints.KITCHEN, () -> print("Kitchen fire!"));
em.put(AlarmPoints.BATHROOM, () -> print("Bathroom alarm!"));
for (Map.Entry<AlarmPoints, Command> e : em.entrySet()) {
print(e.getKey() + ": ");
e.getValue().action();
}
try {
em.get(AlarmPoints.UTILITY).action();
} catch (Exception e) {
print(e);
}
}
}
|
mit
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.