repo_name
stringlengths 4
116
| path
stringlengths 4
379
| size
stringlengths 1
7
| content
stringlengths 3
1.05M
| license
stringclasses 15
values |
---|---|---|---|---|
kpsroka/splendid
|
src/ui/welcome/WelcomeScreen.test.js
|
2833
|
/*
* Copyright 2017 K. P. Sroka
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { shallow } from 'enzyme';
import React from 'react';
import sinon from 'sinon';
import WelcomeScreen from './WelcomeScreen';
import NewGameComponent from './NewGameComponent';
import JoinGameComponent from './JoinGameComponent';
describe('WelcomeScreen', () => {
const explodingCallback = () => { throw new Error('Should not be called.'); };
it('displays "Splendid!" on welcome screen', () => {
const welcomeScreen = shallow(
<WelcomeScreen mode="WELCOME" setUiMode={explodingCallback} />
);
expect(welcomeScreen.text()).toContain('Splendid!');
});
it('changes mode to JOIN on button click', () => {
const setUiModeSpy = sinon.spy();
const welcomeScreen = shallow(
<WelcomeScreen mode="WELCOME" setUiMode={setUiModeSpy} />
);
expect(setUiModeSpy.called).toBe(false);
welcomeScreen.find('[testId="join"]').simulate('click');
expect(setUiModeSpy.calledOnce).toBe(true);
expect(setUiModeSpy.calledWith('JOIN')).toBe(true);
});
it('changes mode to CREATE on button click', () => {
const setUiModeSpy = sinon.spy();
const welcomeScreen = shallow(
<WelcomeScreen mode="WELCOME" setUiMode={setUiModeSpy} />
);
expect(setUiModeSpy.called).toBe(false);
welcomeScreen.find('[testId="create"]').simulate('click');
expect(setUiModeSpy.calledOnce).toBe(true);
expect(setUiModeSpy.calledWith('CREATE')).toBe(true);
});
it('displays NewGameComponent on CREATE uiMode', () => {
const welcomeScreen = shallow(
<WelcomeScreen mode="CREATE" setUiMode={explodingCallback} />
);
expect(welcomeScreen.find(NewGameComponent).length).toBe(1);
});
it('displays JoinGameComponent on JOIN uiMode', () => {
const welcomeScreen = shallow(
<WelcomeScreen mode="JOIN" setUiMode={explodingCallback} />
);
expect(welcomeScreen.find(JoinGameComponent).length).toBe(1);
});
it('doesn\'t display Join-/NewGameComponent on welcome screen', () => {
const welcomeScreen = shallow(
<WelcomeScreen mode="WELCOME" setUiMode={explodingCallback} />
);
expect(welcomeScreen.find(JoinGameComponent).length).toBe(0);
expect(welcomeScreen.find(NewGameComponent).length).toBe(0);
});
});
|
apache-2.0
|
liam-kelly/port
|
includes/views/themes/cloudburst/errors/505.php
|
1373
|
<?php
/**
* Copyright 2014 William Caleb Kelly
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Name:
* Description:
* Date: 1/9/14
* Programmer: Liam Kelly
*/
//Send error header
header($_SERVER['SERVER_PROTOCOL'].'505 Internal Server Error');
?>
<h3>505, Internal Server Error</h3>
<p>
We are having some issues with our service right now. Please come back later.<br />
<img src="/<?php echo $base_dir; ?>/includes/views/themes/<?php echo $theme->dir_name; ?>/images/derpy.png" alt="Derpy Hooves" title="Derpy Hooves"/>
<br/>If you see this pony, tell her a datacenter is no place for muffins.<br />
Please note I do not hold the copyright for this image, credit goes to <a href="http://www.deviantart.com/art/Derpy-404-326148742">aman692</a> on DeviantArt.
</p>
|
apache-2.0
|
muhbaasu/fx-sharp
|
src/FxSharp.Tests/Extensions/StackExtensionsTests.cs
|
1073
|
using System.Collections.Generic;
using FxSharp.Extensions;
using Microsoft.VisualStudio.TestTools.UnitTesting;
namespace FxSharp.Tests.Extensions
{
[TestClass]
public class StackExtensionsTests
{
private readonly Stack<int> _empty = new Stack<int>();
private readonly Stack<int> _stack = new Stack<int>();
public StackExtensionsTests()
{
_stack.Push(1);
_stack.Push(2);
_stack.Push(3);
}
[TestMethod]
public void PopShouldReturnNothingWhenStackIsEmpty()
{
Assert.IsTrue(_empty.PopOrNothing().IsNothing());
}
[TestMethod]
public void PopShouldReturnJustWhenStackNotEmpty()
{
Assert.IsTrue(_stack.PopOrNothing().IsJust());
}
[TestMethod]
public void PopShouldReturnCorrectValue()
{
_stack.PopOrNothing().Match_(
just: top => Assert.AreEqual(3, top),
nothing: () => Assert.Fail("Should not be nothing"));
}
}
}
|
apache-2.0
|
amirakhmedov/ignite
|
examples/src/main/java/org/apache/ignite/examples/ml/tutorial/Step_2_Imputing.java
|
4596
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.examples.ml.tutorial;
import java.io.FileNotFoundException;
import org.apache.ignite.Ignite;
import org.apache.ignite.IgniteCache;
import org.apache.ignite.Ignition;
import org.apache.ignite.ml.math.functions.IgniteBiFunction;
import org.apache.ignite.ml.math.primitives.vector.Vector;
import org.apache.ignite.ml.math.primitives.vector.VectorUtils;
import org.apache.ignite.ml.preprocessing.imputing.ImputerTrainer;
import org.apache.ignite.ml.selection.scoring.evaluator.Evaluator;
import org.apache.ignite.ml.selection.scoring.metric.Accuracy;
import org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer;
import org.apache.ignite.ml.tree.DecisionTreeNode;
import org.apache.ignite.thread.IgniteThread;
/**
* Usage of {@link ImputerTrainer} to fill missed data ({@code Double.NaN}) values in the chosen columns.
* <p>
* Code in this example launches Ignite grid and fills the cache with test data (based on Titanic passengers data).</p>
* <p>
* After that it defines preprocessors that extract features from an upstream data and
* <a href="https://en.wikipedia.org/wiki/Imputation_(statistics)">impute</a> missing values.</p>
* <p>
* Then, it trains the model based on the processed data using decision tree classification.</p>
* <p>
* Finally, this example uses {@link Evaluator} functionality to compute metrics from predictions.</p>
*/
public class Step_2_Imputing {
/** Run example. */
public static void main(String[] args) throws InterruptedException {
System.out.println();
System.out.println(">>> Tutorial step 2 (imputing) example started.");
try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
IgniteThread igniteThread = new IgniteThread(ignite.configuration().getIgniteInstanceName(),
Step_2_Imputing.class.getSimpleName(), () -> {
try {
IgniteCache<Integer, Object[]> dataCache = TitanicUtils.readPassengers(ignite);
IgniteBiFunction<Integer, Object[], Vector> featureExtractor
= (k, v) -> VectorUtils.of((double) v[0], (double) v[5], (double) v[6]);
IgniteBiFunction<Integer, Object[], Double> lbExtractor = (k, v) -> (double) v[1];
IgniteBiFunction<Integer, Object[], Vector> imputingPreprocessor = new ImputerTrainer<Integer, Object[]>()
.fit(ignite,
dataCache,
featureExtractor // "pclass", "sibsp", "parch"
);
DecisionTreeClassificationTrainer trainer = new DecisionTreeClassificationTrainer(5, 0);
// Train decision tree model.
DecisionTreeNode mdl = trainer.fit(
ignite,
dataCache,
imputingPreprocessor,
lbExtractor
);
System.out.println("\n>>> Trained model: " + mdl);
double accuracy = Evaluator.evaluate(
dataCache,
mdl,
imputingPreprocessor,
lbExtractor,
new Accuracy<>()
);
System.out.println("\n>>> Accuracy " + accuracy);
System.out.println("\n>>> Test Error " + (1 - accuracy));
System.out.println(">>> Tutorial step 2 (imputing) example completed.");
}
catch (FileNotFoundException e) {
e.printStackTrace();
}
});
igniteThread.start();
igniteThread.join();
}
}
}
|
apache-2.0
|
cmanlh/DDLGenerator
|
src/main/java/com/lifeonwalden/codeGenerator/javaClass/impl/HashBeanGeneratorImplDepracted.java
|
14009
|
package com.lifeonwalden.codeGenerator.javaClass.impl;
import com.lifeonwalden.codeGenerator.bean.Column;
import com.lifeonwalden.codeGenerator.bean.Table;
import com.lifeonwalden.codeGenerator.bean.config.Config;
import com.lifeonwalden.codeGenerator.constant.BeanTypeEnum;
import com.lifeonwalden.codeGenerator.constant.JdbcTypeEnum;
import com.lifeonwalden.codeGenerator.constant.SpecialInnerSuffix;
import com.lifeonwalden.codeGenerator.util.NameUtil;
import com.lifeonwalden.codeGenerator.util.StringUtil;
import com.lifeonwalden.codeGenerator.util.TableInfoUtil;
import com.lifeonwalden.forestbatis.biz.bean.AbstractMapBean;
import com.lifeonwalden.forestbatis.biz.bean.AbstractParamMapBean;
import com.lifeonwalden.forestbatis.biz.support.OrderBean;
import com.squareup.javapoet.*;
import com.squareup.javapoet.TypeSpec.Builder;
import javax.lang.model.element.Modifier;
import java.io.File;
import java.io.IOException;
import java.util.Date;
import java.util.List;
/**
* work with below mapping generator class, it's deprecated.
*
* @see com.lifeonwalden.codeGenerator.mybatis.impl.update.SQLDirectUpdateElementGeneratorDeprecated
* <p>
* Please use below version
* @see com.lifeonwalden.codeGenerator.javaClass.impl.HashBeanGeneratorImpl
*/
@Deprecated
public class HashBeanGeneratorImplDepracted extends BeanGeneratorImpl {
@Override
public String generate(Table table, Config config) {
generateResultBean(table, config);
generateParamBean(table, config);
if (table.getExtProps() != null && table.getExtProps().size() > 0) {
generateExtParamBean(table, config);
}
return null;
}
private void generateResultBean(Table table, Config config) {
String className = NameUtil.getResultBeanName(table, config);
ClassName beanClass = ClassName.get(config.getBeanInfo().getPackageName(), className);
Builder beanTypeBuilder = TypeSpec.classBuilder(className).addModifiers(Modifier.PUBLIC)
.superclass(AbstractMapBean.class);
beanTypeBuilder.addField(FieldSpec.builder(long.class, "serialVersionUID", Modifier.PRIVATE, Modifier.FINAL, Modifier.STATIC)
.initializer("$L$L", TableInfoUtil.getSerialVersionUID(table, BeanTypeEnum.HASH), "L").build());
String orderByProperty = "orderBy", orderByParameter = "orderList";
ParameterizedTypeName orderByParamType = ParameterizedTypeName.get(ClassName.get(List.class), ClassName.get(OrderBean.class));
beanTypeBuilder.addMethod(MethodSpec.methodBuilder("set".concat(StringUtil.firstAlphToUpper(orderByProperty))).returns(beanClass)
.addModifiers(Modifier.PUBLIC).addParameter(orderByParamType, orderByParameter)
.addStatement("dataMap.put($S,$L)", orderByProperty, orderByParameter)
.addStatement("return this").build());
beanTypeBuilder.addMethod(MethodSpec.methodBuilder("get" + StringUtil.firstAlphToUpper(orderByProperty)).addModifiers(Modifier.PUBLIC)
.returns(orderByParamType)
.addCode(CodeBlock.builder().addStatement("Object val = dataMap.get($S)", orderByProperty)
.beginControlFlow("if (null == val)").addStatement("return null").endControlFlow().addStatement("return ($T)val", orderByParamType)
.build()).build());
for (Column column : table.getColumns()) {
String propertyName = StringUtil.removeUnderline(column.getName());
beanTypeBuilder.addField(FieldSpec.builder(String.class, StringUtil.firstAlphToUpper(propertyName), Modifier.PUBLIC, Modifier.FINAL, Modifier.STATIC)
.initializer("\"$L\"", propertyName).build());
methodBuild(beanTypeBuilder, beanClass, column, true, true);
}
try {
JavaFileTmp.builder(config.getBeanInfo().getPackageName(), beanTypeBuilder.build()).build().writeTo(
new File(new File(config.getOutputLocation()).getPath() + File.separator + config.getBeanInfo().getFolderName()), config.getEncoding());
} catch (IOException e) {
e.printStackTrace();
}
}
private void generateParamBean(Table table, Config config) {
CodeBlock.Builder codeBlockBuilder = CodeBlock.builder();
boolean staticBlock = false;
for (Column column : table.getColumns()) {
String javaType = column.getJavaType();
if (null == javaType) {
JdbcTypeEnum jdbcType = JdbcTypeEnum.nameOf(column.getType().toUpperCase());
if (null == jdbcType) {
throw new RuntimeException("unknow jdbc type : " + column.getType().toUpperCase());
}
javaType = jdbcType.getJavaType();
}
ClassName javaTypeClassName = ClassName.bestGuess(javaType);
String propertyName = StringUtil.removeUnderline(column.getName());
if (!javaTypeClassName.equals(ClassName.get(String.class))) {
staticBlock = true;
codeBlockBuilder.addStatement("typeMap.put($S, $T.class)", propertyName, javaTypeClassName);
if (javaTypeClassName.equals(ClassName.get(Date.class))) {
codeBlockBuilder.addStatement("typeMap.put($S, $T.class)", propertyName.concat(SpecialInnerSuffix.START), javaTypeClassName);
codeBlockBuilder.addStatement("typeMap.put($S, $T.class)", propertyName.concat(SpecialInnerSuffix.END), javaTypeClassName);
}
}
}
String className = NameUtil.getParamBeanName(table, config);
ClassName _className = ClassName.get(config.getBeanInfo().getPackageName(), className);
Builder beanTypeBuilder =
TypeSpec.classBuilder(className).addModifiers(Modifier.PUBLIC)
.superclass(AbstractParamMapBean.class);
beanTypeBuilder.addField(FieldSpec.builder(long.class, "serialVersionUID", Modifier.PRIVATE, Modifier.FINAL, Modifier.STATIC)
.initializer("$L$L", TableInfoUtil.getSerialVersionUID(table, BeanTypeEnum.HASH_PARAM), "L").build());
if (staticBlock) {
beanTypeBuilder.addStaticBlock(codeBlockBuilder.build());
}
for (Column column : table.getColumns()) {
methodBuild(beanTypeBuilder, _className, column, true, false);
}
try {
JavaFileTmp.builder(config.getBeanInfo().getPackageName(), beanTypeBuilder.build()).build().writeTo(
new File(new File(config.getOutputLocation()).getPath() + File.separator + config.getBeanInfo().getFolderName()), config.getEncoding());
} catch (IOException e) {
e.printStackTrace();
}
}
private void generateExtParamBean(Table table, Config config) {
CodeBlock.Builder codeBlockBuilder = CodeBlock.builder();
boolean staticBlock = false;
for (Column column : table.getExtProps()) {
String javaType = column.getJavaType();
if (null == javaType) {
JdbcTypeEnum jdbcType = JdbcTypeEnum.nameOf(column.getType().toUpperCase());
if (null == jdbcType) {
throw new RuntimeException("unknow jdbc type : " + column.getType().toUpperCase());
}
javaType = jdbcType.getJavaType();
}
ClassName javaTypeClassName = ClassName.bestGuess(javaType);
if (!javaTypeClassName.equals(ClassName.get(String.class))) {
staticBlock = true;
codeBlockBuilder.addStatement("typeMap.put($S, $T.class)", StringUtil.removeUnderline(column.getName()), javaTypeClassName);
}
}
String parentClassName = NameUtil.getParamBeanName(table, config);
String className = NameUtil.getExtParamBeanName(table, config);
ClassName _parentClassName = ClassName.get(config.getBeanInfo().getPackageName(), parentClassName);
ClassName _className = ClassName.get(config.getBeanInfo().getPackageName(), className);
Builder beanTypeBuilder = TypeSpec.classBuilder(className).addModifiers(Modifier.PUBLIC).superclass(_parentClassName);
beanTypeBuilder.addField(FieldSpec.builder(long.class, "serialVersionUID", Modifier.PRIVATE, Modifier.FINAL, Modifier.STATIC)
.initializer("$L$L", TableInfoUtil.getSerialVersionUID(table, BeanTypeEnum.HASH_EXT_PARAM), "L").build());
if (staticBlock) {
beanTypeBuilder.addStaticBlock(codeBlockBuilder.build());
}
for (Column column : table.getExtProps()) {
methodBuild(beanTypeBuilder, _className, column, false, false);
}
try {
JavaFileTmp.builder(config.getBeanInfo().getPackageName(), beanTypeBuilder.build()).build().writeTo(
new File(new File(config.getOutputLocation()).getPath() + File.separator + config.getBeanInfo().getFolderName()), config.getEncoding());
} catch (IOException e) {
e.printStackTrace();
}
}
private void methodBuild(Builder beanBuilder, ClassName beanClass, Column column, boolean advanced, boolean complicated) {
String javaType = column.getJavaType();
if (null == javaType) {
JdbcTypeEnum jdbcType = JdbcTypeEnum.nameOf(column.getType().toUpperCase());
if (null == jdbcType) {
throw new RuntimeException("unknow jdbc type : " + column.getType().toUpperCase());
}
javaType = jdbcType.getJavaType();
}
ClassName javaTypeClassName = ClassName.bestGuess(javaType);
ParameterizedTypeName parameterizedTypeName = ParameterizedTypeName.get(ClassName.get(List.class), javaTypeClassName);
String propertyName = StringUtil.removeUnderline(column.getName());
buildSetMethod(beanBuilder, beanClass, javaTypeClassName, propertyName, column);
buildGetMethod(beanBuilder, javaTypeClassName, propertyName, column);
if (advanced) {
if (TableInfoUtil.allowedDateRange(column)) {
String dateStartName = propertyName.concat(SpecialInnerSuffix.START);
buildSetMethod(beanBuilder, beanClass, javaTypeClassName, dateStartName, column);
buildGetMethod(beanBuilder, javaTypeClassName, dateStartName, column);
String dateEndName = propertyName.concat(SpecialInnerSuffix.END);
buildSetMethod(beanBuilder, beanClass, javaTypeClassName, dateEndName, column);
buildGetMethod(beanBuilder, javaTypeClassName, dateEndName, column);
}
}
if (complicated) {
String pickedName = propertyName.concat(SpecialInnerSuffix.PICKED);
buildSetMethod(beanBuilder, beanClass, ClassName.get(Boolean.class), pickedName, column);
buildGetMethod(beanBuilder, ClassName.get(Boolean.class), pickedName, column);
if (column.isEnableIn()) {
String inName = propertyName.concat(SpecialInnerSuffix.IN);
buildSetMethod(beanBuilder, beanClass, parameterizedTypeName, inName, column);
buildGetMethod(beanBuilder, parameterizedTypeName, inName, column);
}
if (column.isEnableNotIn()) {
String notInName = propertyName.concat(SpecialInnerSuffix.NOT_IN);
buildSetMethod(beanBuilder, beanClass, parameterizedTypeName, notInName, column);
buildGetMethod(beanBuilder, parameterizedTypeName, notInName, column);
}
if (column.isEnableLike() && TableInfoUtil.allowedLike(column)) {
String likeName = propertyName.concat(SpecialInnerSuffix.LIKE);
buildSetMethod(beanBuilder, beanClass, javaTypeClassName, likeName, column);
buildGetMethod(beanBuilder, javaTypeClassName, likeName, column);
}
if (column.isEnableNotLike() && TableInfoUtil.allowedLike(column)) {
String notLikeName = propertyName.concat(SpecialInnerSuffix.NOT_LIKE);
buildSetMethod(beanBuilder, beanClass, javaTypeClassName, notLikeName, column);
buildGetMethod(beanBuilder, javaTypeClassName, notLikeName, column);
}
}
}
private void buildSetMethod(Builder beanBuilder, ClassName className, TypeName javaTypeClassName, String propertyName, Column column) {
MethodSpec.Builder setMethodBuilder =
MethodSpec.methodBuilder("set" + StringUtil.firstAlphToUpper(propertyName)).returns(className)
.addModifiers(Modifier.PUBLIC).addParameter(javaTypeClassName, propertyName)
.addStatement("dataMap.put($S,$L)", propertyName, propertyName)
.addStatement("return this");
if (column.getNote() != null && column.getNote().length() > 0) {
setMethodBuilder.addJavadoc("$L", column.getNote());
}
beanBuilder.addMethod(setMethodBuilder.build());
}
private void buildGetMethod(Builder beanBuilder, TypeName javaTypeClassName, String propertyName, Column column) {
MethodSpec.Builder getMethodBuilder =
MethodSpec.methodBuilder("get" + StringUtil.firstAlphToUpper(propertyName)).addModifiers(Modifier.PUBLIC)
.returns(javaTypeClassName)
.addCode(CodeBlock.builder().addStatement("Object val = dataMap.get($S)", propertyName)
.beginControlFlow("if (null == val)").addStatement("return null").endControlFlow().addStatement("return ($T)val", javaTypeClassName)
.build());
if (column.getNote() != null && column.getNote().length() > 0) {
getMethodBuilder.addJavadoc("$L", column.getNote());
}
beanBuilder.addMethod(getMethodBuilder.build());
}
}
|
apache-2.0
|
zuesgooogle/game-server
|
src/main/java/com/simplegame/server/stage/model/core/element/IHatredManager.java
|
936
|
package com.simplegame.server.stage.model.core.element;
import java.util.Collection;
import java.util.Map;
import com.simplegame.server.stage.model.core.element.impl.hatred.HatredStatistic;
import com.simplegame.server.stage.model.core.stage.ElementType;
public interface IHatredManager {
public IHatred getHatredest();
public HatredStatistic addActiveHatred(IFighter fighter, int paramInt1, int paramInt2);
public HatredStatistic addPassiveHatred(IFighter fighter, int paramInt1, int paramInt2);
public void refreshHatred();
public void clear();
public void addInsideHatred(Integer paramInteger, IFighter fighter);
public IHatred getLastActiveAttackTarget();
public boolean containsSpecificElementTypeHatred(ElementType paramElementType);
public Map<String, Integer> getHarmMap();
public int getHatredValByRole(IFighter fighter);
public Collection<IHatred> getHatreds();
}
|
apache-2.0
|
fastly/omnibus-chef
|
config/software/openssl-customization.rb
|
2456
|
#
# Copyright:: Copyright (c) 2014 Chef Software, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# This software makes sure that SSL_CERT_FILE environment variable is pointed
# to the bundled CA certificates that ship with omnibus. With this, Chef
# tools can be used with https URLs out of the box.
name "openssl-customization"
source path: "#{project.files_path}/#{name}"
if windows?
dependency "ruby-windows"
else
dependency "ruby"
dependency "rubygems"
end
build do
if windows?
block "Add OpenSSL customization file" do
# gets directories for RbConfig::CONFIG and sanitizes them.
def get_sanitized_rbconfig(config)
ruby = windows_safe_path("#{install_dir}/embedded/bin/ruby")
config_dir = Bundler.with_clean_env do
command_output = %x|#{ruby} -rrbconfig -e "puts RbConfig::CONFIG['#{config}']"|.strip
windows_safe_path(command_output)
end
if config_dir.nil? || config_dir.empty?
raise "could not determine embedded ruby's RbConfig::CONFIG['#{config}']"
end
config_dir
end
embedded_ruby_site_dir = get_sanitized_rbconfig('sitelibdir')
embedded_ruby_lib_dir = get_sanitized_rbconfig('rubylibdir')
source_ssl_env_hack = File.join(project_dir, "windows", "ssl_env_hack.rb")
destination_ssl_env_hack = File.join(embedded_ruby_site_dir, "ssl_env_hack.rb")
copy(source_ssl_env_hack, destination_ssl_env_hack)
# Unfortunately there is no patch on windows, but luckily we only need to append a line to the openssl.rb
# to pick up our script which find the CA bundle in omnibus installations and points SSL_CERT_FILE to it
# if it's not already set
source_openssl_rb = File.join(embedded_ruby_lib_dir, "openssl.rb")
File.open(source_openssl_rb, "a") do |f|
f.write("\nrequire 'ssl_env_hack'\n")
end
end
end
end
|
apache-2.0
|
MyRobotLab/pyrobotlab
|
service/WatchDogTimer.py
|
819
|
# FIXME - "default" checkPoint() - e.g. watchdog.checkPoint() should work
# FIXME - re-running the script makes multiple timers & multiple corrective actions
# FIXME - make start() & stop() to enable and disable all watchdogtimers
# start services
Runtime.start("joy", "Joystick")
Runtime.start("gui","SwingGui")
Runtime.start("watchdog", "WatchDogTimer")
Runtime.start("python", "Python")
Runtime.start("m1", "Motor")
# adding and activating a checkpoint
watchdog.addTimer("joystickCheck")
watchdog.addAction("m1", "stop")
# python subscribes to joystick data
python.subscribe("joy","publishJoystickInput")
# new joystick data suppresses activation action
def onJoystickInput(data):
watchdog.checkPoint("watchdog", "joystickCheck")
# stop the watchdog
# watchdog.stop()
# start the watchdog
# watchdog.start()
|
apache-2.0
|
Alorel/dropbox-v2-php
|
src/Options/Mixins/MuteTrait.php
|
1816
|
<?php
/**
* Copyright (c) Arturas Molcanovas <a.molcanovas@gmail.com> 2016.
* https://github.com/Alorel/dropbox-v2-php
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
namespace Alorel\Dropbox\Options\Mixins;
use Alorel\Dropbox\Options\Option;
/**
* Normally, users are made aware of any file modifications in their Dropbox account via notifications in the
* client software. If true, this tells the clients that this modification shouldn't result in a user
* notification. The default for this field is False.
*
* @author Art <a.molcanovas@gmail.com>
*/
trait MuteTrait {
/**
* Normally, users are made aware of any file modifications in their Dropbox account via notifications in the
* client software. If true, this tells the clients that this modification shouldn't result in a user
* notification. The default for this field is False.
*
* @author Art <a.molcanovas@gmail.com>
*
* @param bool $set The setting
*
* @return self
*/
public function setMute($set) {
$this[Option::MUTE] = $set;
return $this;
}
}
|
apache-2.0
|
puyanLiu/LPYFramework
|
前端练习/10canvas/js/30PieChart.js
|
2447
|
/*
* @Author: liupuyan
* @Date: 2017-01-09 18:34:36
* @Last Modified by: liupuyan
* @Last Modified time: 2017-01-09 18:54:16
*/
'use strict';
function PieChart(option) {
this._init(option);
}
PieChart.prototype = {
_init: function(option) {
this.x = option.x || 0;
this.y = option.y || 0;
this.r = option.r || 0;
this.data = option.data || [];
// 饼状图所有的物件的组
this.group = new Konva.Group({
x: this.x,
y: this.y
});
// 饼状图 所有的扇形的组
this.wedgeGroup = new Konva.Group({
x: 0,
y: 0
});
this.group.add(this.wedgeGroup);
// 添加一个放百分比文字的组
this.textGroup = new Konva.Group({
x: 0,
y: 0
});
this.group.add(this.textGroup);
var self = this;
var tempAngle = -90;
this.data.forEach(function(item, index) {
// 做一个扇形
// wedg楔形物
var angle = 360 * item.value;
var wedge = new Konva.Wedge({
x: 0,
y: 0,
angle: angle,
radius: self.r,
fill: item.color,
rotation: tempAngle
});
self.wedgeGroup.add(wedge);
// 绘制文本的角度
var textAngle = tempAngle + 1 / 2 * angle;
// 绘制百分比的文本
var text = new Konva.Text({
x: (self.r + 20) * Math.cos(Math.PI / 180 * textAngle),
y: (self.r + 20) * Math.sin(Math.PI / 180 * textAngle),
text: item.value * 100 + '%',
fill: item.color
});
// 根据角度判断设置文本的位置
if (textAngle > 90 && textAngle < 270) {
// 让文本局右对齐
text.x(text.x() - text.getWidth());
}
self.textGroup.add(text);
tempAngle += angle;
});
// 外圆绘制
var cir = new Konva.Circle({
x: 0,
y: 0,
radius: this.r + 10,
stroke: '#ccc',
strokeWidth: 2
});
this.group.add(cir);
this._animateIndex = 0;
},
addToGroupOrLayer: function(arg) {
arg.add(this.group);
},
playAnimate: function() {
var self = this;
if (this._animateIndex == 0) {
this.wedgeGroup.getChildren().each(function(item, index) {
item.angle(0);
});
}
var item = this.wedgeGroup.getChildren()[this._animateIndex]; // 取到要进行动画的扇形
item.to({
angle: this.data[this._animateIndex].value * 360,
duration: this.data[this._animateIndex].value * 2,
onFinish: function() {
self._animateIndex++;
if (self._animateIndex >= data.length) {
self._animateIndex = 0;
return;
}
self.playAnimate();
}
});
}
};
|
apache-2.0
|
aws/aws-sdk-java
|
aws-java-sdk-medialive/src/main/java/com/amazonaws/services/medialive/model/transform/BatchStartResultJsonUnmarshaller.java
|
3177
|
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.medialive.model.transform;
import java.math.*;
import javax.annotation.Generated;
import com.amazonaws.services.medialive.model.*;
import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*;
import com.amazonaws.transform.*;
import com.fasterxml.jackson.core.JsonToken;
import static com.fasterxml.jackson.core.JsonToken.*;
/**
* BatchStartResult JSON Unmarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class BatchStartResultJsonUnmarshaller implements Unmarshaller<BatchStartResult, JsonUnmarshallerContext> {
public BatchStartResult unmarshall(JsonUnmarshallerContext context) throws Exception {
BatchStartResult batchStartResult = new BatchStartResult();
int originalDepth = context.getCurrentDepth();
String currentParentElement = context.getCurrentParentElement();
int targetDepth = originalDepth + 1;
JsonToken token = context.getCurrentToken();
if (token == null)
token = context.nextToken();
if (token == VALUE_NULL) {
return batchStartResult;
}
while (true) {
if (token == null)
break;
if (token == FIELD_NAME || token == START_OBJECT) {
if (context.testExpression("failed", targetDepth)) {
context.nextToken();
batchStartResult.setFailed(new ListUnmarshaller<BatchFailedResultModel>(BatchFailedResultModelJsonUnmarshaller.getInstance())
.unmarshall(context));
}
if (context.testExpression("successful", targetDepth)) {
context.nextToken();
batchStartResult.setSuccessful(new ListUnmarshaller<BatchSuccessfulResultModel>(BatchSuccessfulResultModelJsonUnmarshaller.getInstance())
.unmarshall(context));
}
} else if (token == END_ARRAY || token == END_OBJECT) {
if (context.getLastParsedParentElement() == null || context.getLastParsedParentElement().equals(currentParentElement)) {
if (context.getCurrentDepth() <= originalDepth)
break;
}
}
token = context.nextToken();
}
return batchStartResult;
}
private static BatchStartResultJsonUnmarshaller instance;
public static BatchStartResultJsonUnmarshaller getInstance() {
if (instance == null)
instance = new BatchStartResultJsonUnmarshaller();
return instance;
}
}
|
apache-2.0
|
google-research/neural-structural-optimization
|
setup.py
|
1236
|
# Copyright 2019 Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import setuptools
INSTALL_REQUIRES = [
'absl-py',
'apache-beam',
'autograd',
'nlopt',
'numpy',
'matplotlib',
'Pillow',
'scipy',
'scikit-image',
'seaborn',
'xarray',
]
if sys.version_info[:2] < (3, 7):
INSTALL_REQUIRES.append('dataclasses')
setuptools.setup(
name='neural-structural-optimization',
version='0.0.0',
license='Apache 2.0',
author='Google LLC',
author_email='noreply@google.com',
install_requires=INSTALL_REQUIRES,
url='https://github.com/google-research/neural-structural-optimization',
packages=setuptools.find_packages(),
python_requires='>=3.6')
|
apache-2.0
|
gergo13/JavaMainRepo
|
Students/Bologa M. Marius - Vasile/Zoowsome/src/javasmmr/zoowsome/models/animals/Mammal.java
|
1085
|
package javasmmr.zoowsome.models.animals;
/**
*
* @author Marius Bologa
*
*/
public abstract class Mammal extends Animal {
/**
*
* @param dangerPerc
* How dangerous an animal is.
* @param maintenanceCost
* The maintenance cost.
*/
public Mammal(double dangerPerc, double maintenanceCost) {
super(dangerPerc, maintenanceCost);
}
/**
*
*/
private float normalBodyTemp;
/**
*
*/
private float percBodyHair;
/**
*
* @return Normal body temperature.
*/
public final float getNormalBodyTemp() {
return normalBodyTemp;
}
/**
*
* @param normalBodyTemp
* Normal body temperature.
*/
public final void setNormalBodyTemp(final float normalBodyTemp) {
this.normalBodyTemp = normalBodyTemp;
}
/**
*
* @return % of body covered in hair
*/
public final float getPercBodyHair() {
return percBodyHair;
}
/**
*
* @param percBodyHair
* % of body covered in hair
*/
public final void setPercBodyHair(final float percBodyHair) {
this.percBodyHair = percBodyHair;
}
}
|
apache-2.0
|
twitter-forks/presto
|
presto-main/src/main/java/com/facebook/presto/sql/planner/LocalExecutionPlanner.java
|
165070
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.sql.planner;
import com.facebook.airlift.json.JsonCodec;
import com.facebook.presto.Session;
import com.facebook.presto.SystemSessionProperties;
import com.facebook.presto.common.Page;
import com.facebook.presto.common.PageBuilder;
import com.facebook.presto.common.block.BlockEncodingSerde;
import com.facebook.presto.common.block.SortOrder;
import com.facebook.presto.common.function.OperatorType;
import com.facebook.presto.common.function.QualifiedFunctionName;
import com.facebook.presto.common.function.SqlFunctionProperties;
import com.facebook.presto.common.type.Type;
import com.facebook.presto.common.type.TypeSignature;
import com.facebook.presto.execution.ExplainAnalyzeContext;
import com.facebook.presto.execution.StageExecutionId;
import com.facebook.presto.execution.TaskManagerConfig;
import com.facebook.presto.execution.buffer.OutputBuffer;
import com.facebook.presto.execution.buffer.PagesSerdeFactory;
import com.facebook.presto.execution.scheduler.ExecutionWriterTarget;
import com.facebook.presto.execution.scheduler.ExecutionWriterTarget.CreateHandle;
import com.facebook.presto.execution.scheduler.ExecutionWriterTarget.DeleteHandle;
import com.facebook.presto.execution.scheduler.ExecutionWriterTarget.InsertHandle;
import com.facebook.presto.execution.scheduler.TableWriteInfo;
import com.facebook.presto.execution.scheduler.TableWriteInfo.DeleteScanInfo;
import com.facebook.presto.index.IndexManager;
import com.facebook.presto.metadata.AnalyzeTableHandle;
import com.facebook.presto.metadata.FunctionManager;
import com.facebook.presto.metadata.Metadata;
import com.facebook.presto.operator.AggregationOperator.AggregationOperatorFactory;
import com.facebook.presto.operator.AssignUniqueIdOperator;
import com.facebook.presto.operator.DeleteOperator.DeleteOperatorFactory;
import com.facebook.presto.operator.DevNullOperator.DevNullOperatorFactory;
import com.facebook.presto.operator.DriverFactory;
import com.facebook.presto.operator.EnforceSingleRowOperator;
import com.facebook.presto.operator.ExplainAnalyzeOperator.ExplainAnalyzeOperatorFactory;
import com.facebook.presto.operator.FilterAndProjectOperator;
import com.facebook.presto.operator.GroupIdOperator;
import com.facebook.presto.operator.HashAggregationOperator.HashAggregationOperatorFactory;
import com.facebook.presto.operator.HashBuilderOperator.HashBuilderOperatorFactory;
import com.facebook.presto.operator.HashSemiJoinOperator.HashSemiJoinOperatorFactory;
import com.facebook.presto.operator.JoinBridgeManager;
import com.facebook.presto.operator.JoinOperatorFactory;
import com.facebook.presto.operator.JoinOperatorFactory.OuterOperatorFactoryResult;
import com.facebook.presto.operator.LimitOperator.LimitOperatorFactory;
import com.facebook.presto.operator.LocalPlannerAware;
import com.facebook.presto.operator.LookupJoinOperators;
import com.facebook.presto.operator.LookupOuterOperator.LookupOuterOperatorFactory;
import com.facebook.presto.operator.LookupSourceFactory;
import com.facebook.presto.operator.MarkDistinctOperator.MarkDistinctOperatorFactory;
import com.facebook.presto.operator.MetadataDeleteOperator.MetadataDeleteOperatorFactory;
import com.facebook.presto.operator.NestedLoopJoinBridge;
import com.facebook.presto.operator.NestedLoopJoinPagesSupplier;
import com.facebook.presto.operator.OperatorFactory;
import com.facebook.presto.operator.OrderByOperator.OrderByOperatorFactory;
import com.facebook.presto.operator.OutputFactory;
import com.facebook.presto.operator.PageSinkCommitStrategy;
import com.facebook.presto.operator.PagesIndex;
import com.facebook.presto.operator.PagesSpatialIndexFactory;
import com.facebook.presto.operator.PartitionFunction;
import com.facebook.presto.operator.PartitionedLookupSourceFactory;
import com.facebook.presto.operator.PipelineExecutionStrategy;
import com.facebook.presto.operator.RowNumberOperator;
import com.facebook.presto.operator.ScanFilterAndProjectOperator.ScanFilterAndProjectOperatorFactory;
import com.facebook.presto.operator.SetBuilderOperator.SetBuilderOperatorFactory;
import com.facebook.presto.operator.SetBuilderOperator.SetSupplier;
import com.facebook.presto.operator.SourceOperatorFactory;
import com.facebook.presto.operator.SpatialIndexBuilderOperator.SpatialIndexBuilderOperatorFactory;
import com.facebook.presto.operator.SpatialIndexBuilderOperator.SpatialPredicate;
import com.facebook.presto.operator.SpatialJoinOperator.SpatialJoinOperatorFactory;
import com.facebook.presto.operator.StageExecutionDescriptor;
import com.facebook.presto.operator.StatisticsWriterOperator.StatisticsWriterOperatorFactory;
import com.facebook.presto.operator.StreamingAggregationOperator.StreamingAggregationOperatorFactory;
import com.facebook.presto.operator.TableCommitContext;
import com.facebook.presto.operator.TableFinishOperator.PageSinkCommitter;
import com.facebook.presto.operator.TableScanOperator.TableScanOperatorFactory;
import com.facebook.presto.operator.TableWriterMergeOperator.TableWriterMergeOperatorFactory;
import com.facebook.presto.operator.TaskContext;
import com.facebook.presto.operator.TaskOutputOperator.TaskOutputFactory;
import com.facebook.presto.operator.TopNOperator.TopNOperatorFactory;
import com.facebook.presto.operator.TopNRowNumberOperator;
import com.facebook.presto.operator.ValuesOperator.ValuesOperatorFactory;
import com.facebook.presto.operator.WindowFunctionDefinition;
import com.facebook.presto.operator.WindowOperator.WindowOperatorFactory;
import com.facebook.presto.operator.aggregation.AccumulatorFactory;
import com.facebook.presto.operator.aggregation.InternalAggregationFunction;
import com.facebook.presto.operator.aggregation.LambdaProvider;
import com.facebook.presto.operator.exchange.LocalExchange.LocalExchangeFactory;
import com.facebook.presto.operator.exchange.LocalExchangeSinkOperator.LocalExchangeSinkOperatorFactory;
import com.facebook.presto.operator.exchange.LocalExchangeSourceOperator.LocalExchangeSourceOperatorFactory;
import com.facebook.presto.operator.exchange.LocalMergeSourceOperator.LocalMergeSourceOperatorFactory;
import com.facebook.presto.operator.exchange.PageChannelSelector;
import com.facebook.presto.operator.index.DynamicTupleFilterFactory;
import com.facebook.presto.operator.index.FieldSetFilteringRecordSet;
import com.facebook.presto.operator.index.IndexBuildDriverFactoryProvider;
import com.facebook.presto.operator.index.IndexJoinLookupStats;
import com.facebook.presto.operator.index.IndexLookupSourceFactory;
import com.facebook.presto.operator.index.IndexSourceOperator;
import com.facebook.presto.operator.project.CursorProcessor;
import com.facebook.presto.operator.project.PageProcessor;
import com.facebook.presto.operator.repartition.OptimizedPartitionedOutputOperator.OptimizedPartitionedOutputFactory;
import com.facebook.presto.operator.repartition.PartitionedOutputOperator.PartitionedOutputFactory;
import com.facebook.presto.operator.window.FrameInfo;
import com.facebook.presto.operator.window.WindowFunctionSupplier;
import com.facebook.presto.spi.ColumnHandle;
import com.facebook.presto.spi.ConnectorIndex;
import com.facebook.presto.spi.PrestoException;
import com.facebook.presto.spi.RecordSet;
import com.facebook.presto.spi.TableHandle;
import com.facebook.presto.spi.function.FunctionHandle;
import com.facebook.presto.spi.function.FunctionMetadata;
import com.facebook.presto.spi.plan.AggregationNode;
import com.facebook.presto.spi.plan.AggregationNode.Aggregation;
import com.facebook.presto.spi.plan.AggregationNode.Step;
import com.facebook.presto.spi.plan.Assignments;
import com.facebook.presto.spi.plan.DistinctLimitNode;
import com.facebook.presto.spi.plan.FilterNode;
import com.facebook.presto.spi.plan.LimitNode;
import com.facebook.presto.spi.plan.MarkDistinctNode;
import com.facebook.presto.spi.plan.OrderingScheme;
import com.facebook.presto.spi.plan.PlanNode;
import com.facebook.presto.spi.plan.PlanNodeId;
import com.facebook.presto.spi.plan.ProjectNode;
import com.facebook.presto.spi.plan.TableScanNode;
import com.facebook.presto.spi.plan.TopNNode;
import com.facebook.presto.spi.plan.UnionNode;
import com.facebook.presto.spi.plan.ValuesNode;
import com.facebook.presto.spi.relation.CallExpression;
import com.facebook.presto.spi.relation.ConstantExpression;
import com.facebook.presto.spi.relation.InputReferenceExpression;
import com.facebook.presto.spi.relation.LambdaDefinitionExpression;
import com.facebook.presto.spi.relation.RowExpression;
import com.facebook.presto.spi.relation.VariableReferenceExpression;
import com.facebook.presto.spiller.PartitioningSpillerFactory;
import com.facebook.presto.spiller.SingleStreamSpillerFactory;
import com.facebook.presto.spiller.SpillerFactory;
import com.facebook.presto.split.MappedRecordSet;
import com.facebook.presto.split.PageSinkManager;
import com.facebook.presto.split.PageSourceProvider;
import com.facebook.presto.sql.gen.ExpressionCompiler;
import com.facebook.presto.sql.gen.JoinCompiler;
import com.facebook.presto.sql.gen.JoinFilterFunctionCompiler;
import com.facebook.presto.sql.gen.JoinFilterFunctionCompiler.JoinFilterFunctionFactory;
import com.facebook.presto.sql.gen.OrderingCompiler;
import com.facebook.presto.sql.gen.PageFunctionCompiler;
import com.facebook.presto.sql.planner.optimizations.IndexJoinOptimizer;
import com.facebook.presto.sql.planner.plan.AssignUniqueId;
import com.facebook.presto.sql.planner.plan.DeleteNode;
import com.facebook.presto.sql.planner.plan.EnforceSingleRowNode;
import com.facebook.presto.sql.planner.plan.ExchangeNode;
import com.facebook.presto.sql.planner.plan.ExplainAnalyzeNode;
import com.facebook.presto.sql.planner.plan.GroupIdNode;
import com.facebook.presto.sql.planner.plan.IndexJoinNode;
import com.facebook.presto.sql.planner.plan.IndexSourceNode;
import com.facebook.presto.sql.planner.plan.InternalPlanVisitor;
import com.facebook.presto.sql.planner.plan.JoinNode;
import com.facebook.presto.sql.planner.plan.MetadataDeleteNode;
import com.facebook.presto.sql.planner.plan.OutputNode;
import com.facebook.presto.sql.planner.plan.RemoteSourceNode;
import com.facebook.presto.sql.planner.plan.RowNumberNode;
import com.facebook.presto.sql.planner.plan.SampleNode;
import com.facebook.presto.sql.planner.plan.SemiJoinNode;
import com.facebook.presto.sql.planner.plan.SortNode;
import com.facebook.presto.sql.planner.plan.SpatialJoinNode;
import com.facebook.presto.sql.planner.plan.StatisticAggregationsDescriptor;
import com.facebook.presto.sql.planner.plan.StatisticsWriterNode;
import com.facebook.presto.sql.planner.plan.TableFinishNode;
import com.facebook.presto.sql.planner.plan.TableWriterMergeNode;
import com.facebook.presto.sql.planner.plan.TableWriterNode;
import com.facebook.presto.sql.planner.plan.TopNRowNumberNode;
import com.facebook.presto.sql.planner.plan.UnnestNode;
import com.facebook.presto.sql.planner.plan.WindowNode;
import com.facebook.presto.sql.planner.plan.WindowNode.Frame;
import com.facebook.presto.sql.relational.VariableToChannelTranslator;
import com.facebook.presto.sql.tree.SymbolReference;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.VerifyException;
import com.google.common.collect.ContiguousSet;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.ImmutableBiMap;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableMap.Builder;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSetMultimap;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Multimap;
import com.google.common.collect.SetMultimap;
import com.google.common.primitives.Ints;
import io.airlift.units.DataSize;
import javax.inject.Inject;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.OptionalInt;
import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Function;
import java.util.function.Predicate;
import java.util.function.Supplier;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import static com.facebook.presto.SystemSessionProperties.getAggregationOperatorUnspillMemoryLimit;
import static com.facebook.presto.SystemSessionProperties.getFilterAndProjectMinOutputPageRowCount;
import static com.facebook.presto.SystemSessionProperties.getFilterAndProjectMinOutputPageSize;
import static com.facebook.presto.SystemSessionProperties.getIndexLoaderTimeout;
import static com.facebook.presto.SystemSessionProperties.getTaskConcurrency;
import static com.facebook.presto.SystemSessionProperties.getTaskPartitionedWriterCount;
import static com.facebook.presto.SystemSessionProperties.getTaskWriterCount;
import static com.facebook.presto.SystemSessionProperties.isExchangeCompressionEnabled;
import static com.facebook.presto.SystemSessionProperties.isOptimizeCommonSubExpressions;
import static com.facebook.presto.SystemSessionProperties.isOptimizedRepartitioningEnabled;
import static com.facebook.presto.SystemSessionProperties.isSpillEnabled;
import static com.facebook.presto.common.type.BigintType.BIGINT;
import static com.facebook.presto.common.type.TypeSignature.parseTypeSignature;
import static com.facebook.presto.common.type.TypeUtils.writeNativeValue;
import static com.facebook.presto.expressions.LogicalRowExpressions.TRUE_CONSTANT;
import static com.facebook.presto.expressions.RowExpressionNodeInliner.replaceExpression;
import static com.facebook.presto.geospatial.SphericalGeographyUtils.sphericalDistance;
import static com.facebook.presto.operator.DistinctLimitOperator.DistinctLimitOperatorFactory;
import static com.facebook.presto.operator.NestedLoopBuildOperator.NestedLoopBuildOperatorFactory;
import static com.facebook.presto.operator.NestedLoopJoinOperator.NestedLoopJoinOperatorFactory;
import static com.facebook.presto.operator.PageSinkCommitStrategy.LIFESPAN_COMMIT;
import static com.facebook.presto.operator.PageSinkCommitStrategy.NO_COMMIT;
import static com.facebook.presto.operator.PageSinkCommitStrategy.TASK_COMMIT;
import static com.facebook.presto.operator.PipelineExecutionStrategy.GROUPED_EXECUTION;
import static com.facebook.presto.operator.PipelineExecutionStrategy.UNGROUPED_EXECUTION;
import static com.facebook.presto.operator.TableFinishOperator.TableFinishOperatorFactory;
import static com.facebook.presto.operator.TableFinishOperator.TableFinisher;
import static com.facebook.presto.operator.TableWriterOperator.TableWriterOperatorFactory;
import static com.facebook.presto.operator.TableWriterUtils.CONTEXT_CHANNEL;
import static com.facebook.presto.operator.TableWriterUtils.FRAGMENT_CHANNEL;
import static com.facebook.presto.operator.TableWriterUtils.ROW_COUNT_CHANNEL;
import static com.facebook.presto.operator.TableWriterUtils.STATS_START_CHANNEL;
import static com.facebook.presto.operator.WindowFunctionDefinition.window;
import static com.facebook.presto.operator.unnest.UnnestOperator.UnnestOperatorFactory;
import static com.facebook.presto.spi.StandardErrorCode.COMPILER_ERROR;
import static com.facebook.presto.spi.StandardErrorCode.GENERIC_INTERNAL_ERROR;
import static com.facebook.presto.spi.plan.AggregationNode.Step.FINAL;
import static com.facebook.presto.spi.plan.AggregationNode.Step.INTERMEDIATE;
import static com.facebook.presto.spi.plan.AggregationNode.Step.PARTIAL;
import static com.facebook.presto.spi.relation.ExpressionOptimizer.Level.OPTIMIZED;
import static com.facebook.presto.sql.gen.LambdaBytecodeGenerator.compileLambdaProvider;
import static com.facebook.presto.sql.planner.RowExpressionInterpreter.rowExpressionInterpreter;
import static com.facebook.presto.sql.planner.SystemPartitioningHandle.COORDINATOR_DISTRIBUTION;
import static com.facebook.presto.sql.planner.SystemPartitioningHandle.FIXED_ARBITRARY_DISTRIBUTION;
import static com.facebook.presto.sql.planner.SystemPartitioningHandle.FIXED_BROADCAST_DISTRIBUTION;
import static com.facebook.presto.sql.planner.SystemPartitioningHandle.SCALED_WRITER_DISTRIBUTION;
import static com.facebook.presto.sql.planner.SystemPartitioningHandle.SINGLE_DISTRIBUTION;
import static com.facebook.presto.sql.planner.plan.AssignmentUtils.identityAssignments;
import static com.facebook.presto.sql.planner.plan.JoinNode.DistributionType.REPLICATED;
import static com.facebook.presto.sql.planner.plan.JoinNode.Type.FULL;
import static com.facebook.presto.sql.planner.plan.JoinNode.Type.INNER;
import static com.facebook.presto.sql.planner.plan.JoinNode.Type.RIGHT;
import static com.facebook.presto.sql.relational.Expressions.constant;
import static com.facebook.presto.util.Reflection.constructorMethodHandle;
import static com.facebook.presto.util.SpatialJoinUtils.ST_CONTAINS;
import static com.facebook.presto.util.SpatialJoinUtils.ST_CROSSES;
import static com.facebook.presto.util.SpatialJoinUtils.ST_DISTANCE;
import static com.facebook.presto.util.SpatialJoinUtils.ST_EQUALS;
import static com.facebook.presto.util.SpatialJoinUtils.ST_INTERSECTS;
import static com.facebook.presto.util.SpatialJoinUtils.ST_OVERLAPS;
import static com.facebook.presto.util.SpatialJoinUtils.ST_TOUCHES;
import static com.facebook.presto.util.SpatialJoinUtils.ST_WITHIN;
import static com.facebook.presto.util.SpatialJoinUtils.extractSupportedSpatialComparisons;
import static com.facebook.presto.util.SpatialJoinUtils.extractSupportedSpatialFunctions;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.base.Verify.verify;
import static com.google.common.collect.DiscreteDomain.integers;
import static com.google.common.collect.ImmutableList.toImmutableList;
import static com.google.common.collect.ImmutableSet.toImmutableSet;
import static com.google.common.collect.Iterables.getOnlyElement;
import static com.google.common.collect.Range.closedOpen;
import static io.airlift.units.DataSize.Unit.BYTE;
import static java.lang.String.format;
import static java.util.Objects.requireNonNull;
import static java.util.stream.IntStream.range;
public class LocalExecutionPlanner
{
private final Metadata metadata;
private final Optional<ExplainAnalyzeContext> explainAnalyzeContext;
private final PageSourceProvider pageSourceProvider;
private final IndexManager indexManager;
private final PartitioningProviderManager partitioningProviderManager;
private final NodePartitioningManager nodePartitioningManager;
private final PageSinkManager pageSinkManager;
private final ExpressionCompiler expressionCompiler;
private final PageFunctionCompiler pageFunctionCompiler;
private final JoinFilterFunctionCompiler joinFilterFunctionCompiler;
private final DataSize maxIndexMemorySize;
private final IndexJoinLookupStats indexJoinLookupStats;
private final DataSize maxPartialAggregationMemorySize;
private final DataSize maxPagePartitioningBufferSize;
private final DataSize maxLocalExchangeBufferSize;
private final SpillerFactory spillerFactory;
private final SingleStreamSpillerFactory singleStreamSpillerFactory;
private final PartitioningSpillerFactory partitioningSpillerFactory;
private final BlockEncodingSerde blockEncodingSerde;
private final PagesIndex.Factory pagesIndexFactory;
private final JoinCompiler joinCompiler;
private final LookupJoinOperators lookupJoinOperators;
private final OrderingCompiler orderingCompiler;
private final JsonCodec<TableCommitContext> tableCommitContextCodec;
private static final TypeSignature SPHERICAL_GEOGRAPHY_TYPE_SIGNATURE = parseTypeSignature("SphericalGeography");
@Inject
public LocalExecutionPlanner(
Metadata metadata,
Optional<ExplainAnalyzeContext> explainAnalyzeContext,
PageSourceProvider pageSourceProvider,
IndexManager indexManager,
PartitioningProviderManager partitioningProviderManager,
NodePartitioningManager nodePartitioningManager,
PageSinkManager pageSinkManager,
ExpressionCompiler expressionCompiler,
PageFunctionCompiler pageFunctionCompiler,
JoinFilterFunctionCompiler joinFilterFunctionCompiler,
IndexJoinLookupStats indexJoinLookupStats,
TaskManagerConfig taskManagerConfig,
SpillerFactory spillerFactory,
SingleStreamSpillerFactory singleStreamSpillerFactory,
PartitioningSpillerFactory partitioningSpillerFactory,
BlockEncodingSerde blockEncodingSerde,
PagesIndex.Factory pagesIndexFactory,
JoinCompiler joinCompiler,
LookupJoinOperators lookupJoinOperators,
OrderingCompiler orderingCompiler,
JsonCodec<TableCommitContext> tableCommitContextCodec)
{
this.explainAnalyzeContext = requireNonNull(explainAnalyzeContext, "explainAnalyzeContext is null");
this.pageSourceProvider = requireNonNull(pageSourceProvider, "pageSourceProvider is null");
this.indexManager = requireNonNull(indexManager, "indexManager is null");
this.partitioningProviderManager = requireNonNull(partitioningProviderManager, "partitioningProviderManager is null");
this.nodePartitioningManager = requireNonNull(nodePartitioningManager, "nodePartitioningManager is null");
this.metadata = requireNonNull(metadata, "metadata is null");
this.pageSinkManager = requireNonNull(pageSinkManager, "pageSinkManager is null");
this.expressionCompiler = requireNonNull(expressionCompiler, "compiler is null");
this.pageFunctionCompiler = requireNonNull(pageFunctionCompiler, "pageFunctionCompiler is null");
this.joinFilterFunctionCompiler = requireNonNull(joinFilterFunctionCompiler, "compiler is null");
this.indexJoinLookupStats = requireNonNull(indexJoinLookupStats, "indexJoinLookupStats is null");
this.maxIndexMemorySize = requireNonNull(taskManagerConfig, "taskManagerConfig is null").getMaxIndexMemoryUsage();
this.spillerFactory = requireNonNull(spillerFactory, "spillerFactory is null");
this.singleStreamSpillerFactory = requireNonNull(singleStreamSpillerFactory, "singleStreamSpillerFactory is null");
this.partitioningSpillerFactory = requireNonNull(partitioningSpillerFactory, "partitioningSpillerFactory is null");
this.blockEncodingSerde = requireNonNull(blockEncodingSerde, "blockEncodingSerde is null");
this.maxPartialAggregationMemorySize = taskManagerConfig.getMaxPartialAggregationMemoryUsage();
this.maxPagePartitioningBufferSize = taskManagerConfig.getMaxPagePartitioningBufferSize();
this.maxLocalExchangeBufferSize = taskManagerConfig.getMaxLocalExchangeBufferSize();
this.pagesIndexFactory = requireNonNull(pagesIndexFactory, "pagesIndexFactory is null");
this.joinCompiler = requireNonNull(joinCompiler, "joinCompiler is null");
this.lookupJoinOperators = requireNonNull(lookupJoinOperators, "lookupJoinOperators is null");
this.orderingCompiler = requireNonNull(orderingCompiler, "orderingCompiler is null");
this.tableCommitContextCodec = requireNonNull(tableCommitContextCodec, "tableCommitContextCodec is null");
}
public LocalExecutionPlan plan(
TaskContext taskContext,
PlanNode plan,
PartitioningScheme partitioningScheme,
StageExecutionDescriptor stageExecutionDescriptor,
List<PlanNodeId> partitionedSourceOrder,
OutputBuffer outputBuffer,
RemoteSourceFactory remoteSourceFactory,
TableWriteInfo tableWriteInfo)
{
return plan(
taskContext,
plan,
partitioningScheme,
stageExecutionDescriptor,
partitionedSourceOrder,
createOutputFactory(taskContext, partitioningScheme, outputBuffer),
remoteSourceFactory,
tableWriteInfo,
false);
}
public LocalExecutionPlan plan(
TaskContext taskContext,
PlanNode plan,
PartitioningScheme partitioningScheme,
StageExecutionDescriptor stageExecutionDescriptor,
List<PlanNodeId> partitionedSourceOrder,
OutputFactory outputFactory,
RemoteSourceFactory remoteSourceFactory,
TableWriteInfo tableWriteInfo,
boolean pageSinkCommitRequired)
{
return plan(
taskContext,
stageExecutionDescriptor,
plan,
partitioningScheme.getOutputLayout(),
partitionedSourceOrder,
outputFactory,
createOutputPartitioning(taskContext, partitioningScheme),
remoteSourceFactory,
tableWriteInfo,
pageSinkCommitRequired);
}
private OutputFactory createOutputFactory(TaskContext taskContext, PartitioningScheme partitioningScheme, OutputBuffer outputBuffer)
{
if (partitioningScheme.getPartitioning().getHandle().equals(FIXED_BROADCAST_DISTRIBUTION) ||
partitioningScheme.getPartitioning().getHandle().equals(FIXED_ARBITRARY_DISTRIBUTION) ||
partitioningScheme.getPartitioning().getHandle().equals(SCALED_WRITER_DISTRIBUTION) ||
partitioningScheme.getPartitioning().getHandle().equals(SINGLE_DISTRIBUTION) ||
partitioningScheme.getPartitioning().getHandle().equals(COORDINATOR_DISTRIBUTION)) {
return new TaskOutputFactory(outputBuffer);
}
if (isOptimizedRepartitioningEnabled(taskContext.getSession())) {
return new OptimizedPartitionedOutputFactory(outputBuffer, maxPagePartitioningBufferSize);
}
else {
return new PartitionedOutputFactory(outputBuffer, maxPagePartitioningBufferSize);
}
}
private Optional<OutputPartitioning> createOutputPartitioning(TaskContext taskContext, PartitioningScheme partitioningScheme)
{
if (partitioningScheme.getPartitioning().getHandle().equals(FIXED_BROADCAST_DISTRIBUTION) ||
partitioningScheme.getPartitioning().getHandle().equals(FIXED_ARBITRARY_DISTRIBUTION) ||
partitioningScheme.getPartitioning().getHandle().equals(SCALED_WRITER_DISTRIBUTION) ||
partitioningScheme.getPartitioning().getHandle().equals(SINGLE_DISTRIBUTION) ||
partitioningScheme.getPartitioning().getHandle().equals(COORDINATOR_DISTRIBUTION)) {
return Optional.empty();
}
List<VariableReferenceExpression> outputLayout = partitioningScheme.getOutputLayout();
// We can convert the variables directly into channels, because the root must be a sink and therefore the layout is fixed
List<Integer> partitionChannels;
List<Optional<ConstantExpression>> partitionConstants;
List<Type> partitionChannelTypes;
if (partitioningScheme.getHashColumn().isPresent()) {
partitionChannels = ImmutableList.of(outputLayout.indexOf(partitioningScheme.getHashColumn().get()));
partitionConstants = ImmutableList.of(Optional.empty());
partitionChannelTypes = ImmutableList.of(BIGINT);
}
else {
checkArgument(
partitioningScheme.getPartitioning().getArguments().stream().allMatch(argument -> argument instanceof ConstantExpression || argument instanceof VariableReferenceExpression),
format("Expect all partitioning arguments to be either ConstantExpression or VariableReferenceExpression, but get %s", partitioningScheme.getPartitioning().getArguments()));
partitionChannels = partitioningScheme.getPartitioning().getArguments().stream()
.map(argument -> {
if (argument instanceof ConstantExpression) {
return -1;
}
return outputLayout.indexOf(argument);
})
.collect(toImmutableList());
partitionConstants = partitioningScheme.getPartitioning().getArguments().stream()
.map(argument -> {
if (argument instanceof ConstantExpression) {
return Optional.of((ConstantExpression) argument);
}
return Optional.<ConstantExpression>empty();
})
.collect(toImmutableList());
partitionChannelTypes = partitioningScheme.getPartitioning().getArguments().stream()
.map(RowExpression::getType)
.collect(toImmutableList());
}
PartitionFunction partitionFunction = nodePartitioningManager.getPartitionFunction(taskContext.getSession(), partitioningScheme, partitionChannelTypes);
OptionalInt nullChannel = OptionalInt.empty();
Set<VariableReferenceExpression> partitioningColumns = partitioningScheme.getPartitioning().getVariableReferences();
// partitioningColumns expected to have one column in the normal case, and zero columns when partitioning on a constant
checkArgument(!partitioningScheme.isReplicateNullsAndAny() || partitioningColumns.size() <= 1);
if (partitioningScheme.isReplicateNullsAndAny() && partitioningColumns.size() == 1) {
nullChannel = OptionalInt.of(outputLayout.indexOf(getOnlyElement(partitioningColumns)));
}
return Optional.of(new OutputPartitioning(partitionFunction, partitionChannels, partitionConstants, partitioningScheme.isReplicateNullsAndAny(), nullChannel));
}
@VisibleForTesting
public LocalExecutionPlan plan(
TaskContext taskContext,
StageExecutionDescriptor stageExecutionDescriptor,
PlanNode plan,
List<VariableReferenceExpression> outputLayout,
List<PlanNodeId> partitionedSourceOrder,
OutputFactory outputOperatorFactory,
Optional<OutputPartitioning> outputPartitioning,
RemoteSourceFactory remoteSourceFactory,
TableWriteInfo tableWriteInfo,
boolean pageSinkCommitRequired)
{
Session session = taskContext.getSession();
LocalExecutionPlanContext context = new LocalExecutionPlanContext(taskContext, tableWriteInfo);
PhysicalOperation physicalOperation = plan.accept(new Visitor(session, stageExecutionDescriptor, remoteSourceFactory, pageSinkCommitRequired), context);
Function<Page, Page> pagePreprocessor = enforceLayoutProcessor(outputLayout, physicalOperation.getLayout());
List<Type> outputTypes = outputLayout.stream()
.map(VariableReferenceExpression::getType)
.collect(toImmutableList());
context.addDriverFactory(
context.isInputDriver(),
true,
ImmutableList.<OperatorFactory>builder()
.addAll(physicalOperation.getOperatorFactories())
.add(outputOperatorFactory.createOutputOperator(
context.getNextOperatorId(),
plan.getId(),
outputTypes,
pagePreprocessor,
outputPartitioning,
new PagesSerdeFactory(blockEncodingSerde, isExchangeCompressionEnabled(session))))
.build(),
context.getDriverInstanceCount(),
physicalOperation.getPipelineExecutionStrategy());
addLookupOuterDrivers(context);
// notify operator factories that planning has completed
context.getDriverFactories().stream()
.map(DriverFactory::getOperatorFactories)
.flatMap(List::stream)
.filter(LocalPlannerAware.class::isInstance)
.map(LocalPlannerAware.class::cast)
.forEach(LocalPlannerAware::localPlannerComplete);
return new LocalExecutionPlan(context.getDriverFactories(), partitionedSourceOrder, stageExecutionDescriptor);
}
private static void addLookupOuterDrivers(LocalExecutionPlanContext context)
{
// For an outer join on the lookup side (RIGHT or FULL) add an additional
// driver to output the unused rows in the lookup source
for (DriverFactory factory : context.getDriverFactories()) {
List<OperatorFactory> operatorFactories = factory.getOperatorFactories();
for (int i = 0; i < operatorFactories.size(); i++) {
OperatorFactory operatorFactory = operatorFactories.get(i);
if (!(operatorFactory instanceof JoinOperatorFactory)) {
continue;
}
JoinOperatorFactory lookupJoin = (JoinOperatorFactory) operatorFactory;
Optional<OuterOperatorFactoryResult> outerOperatorFactoryResult = lookupJoin.createOuterOperatorFactory();
if (outerOperatorFactoryResult.isPresent()) {
// Add a new driver to output the unmatched rows in an outer join.
// We duplicate all of the factories above the JoinOperator (the ones reading from the joins),
// and replace the JoinOperator with the OuterOperator (the one that produces unmatched rows).
ImmutableList.Builder<OperatorFactory> newOperators = ImmutableList.builder();
newOperators.add(outerOperatorFactoryResult.get().getOuterOperatorFactory());
operatorFactories.subList(i + 1, operatorFactories.size()).stream()
.map(OperatorFactory::duplicate)
.forEach(newOperators::add);
context.addDriverFactory(false, factory.isOutputDriver(), newOperators.build(), OptionalInt.of(1), outerOperatorFactoryResult.get().getBuildExecutionStrategy());
}
}
}
}
private static class LocalExecutionPlanContext
{
private final TaskContext taskContext;
private final List<DriverFactory> driverFactories;
private final Optional<IndexSourceContext> indexSourceContext;
// this is shared with all subContexts
private final AtomicInteger nextPipelineId;
private final TableWriteInfo tableWriteInfo;
private int nextOperatorId;
private boolean inputDriver = true;
private OptionalInt driverInstanceCount = OptionalInt.empty();
public LocalExecutionPlanContext(TaskContext taskContext, TableWriteInfo tableWriteInfo)
{
this(taskContext, new ArrayList<>(), Optional.empty(), new AtomicInteger(0), tableWriteInfo);
}
private LocalExecutionPlanContext(
TaskContext taskContext,
List<DriverFactory> driverFactories,
Optional<IndexSourceContext> indexSourceContext,
AtomicInteger nextPipelineId,
TableWriteInfo tableWriteInfo)
{
this.taskContext = taskContext;
this.driverFactories = driverFactories;
this.indexSourceContext = indexSourceContext;
this.nextPipelineId = nextPipelineId;
this.tableWriteInfo = tableWriteInfo;
}
public void addDriverFactory(boolean inputDriver, boolean outputDriver, List<OperatorFactory> operatorFactories, OptionalInt driverInstances, PipelineExecutionStrategy pipelineExecutionStrategy)
{
if (pipelineExecutionStrategy == GROUPED_EXECUTION) {
OperatorFactory firstOperatorFactory = operatorFactories.get(0);
if (inputDriver) {
checkArgument(firstOperatorFactory instanceof ScanFilterAndProjectOperatorFactory || firstOperatorFactory instanceof TableScanOperatorFactory);
}
else {
checkArgument(firstOperatorFactory instanceof LocalExchangeSourceOperatorFactory || firstOperatorFactory instanceof LookupOuterOperatorFactory);
}
}
driverFactories.add(new DriverFactory(getNextPipelineId(), inputDriver, outputDriver, operatorFactories, driverInstances, pipelineExecutionStrategy));
}
private List<DriverFactory> getDriverFactories()
{
return ImmutableList.copyOf(driverFactories);
}
public Session getSession()
{
return taskContext.getSession();
}
public StageExecutionId getStageExecutionId()
{
return taskContext.getTaskId().getStageExecutionId();
}
public Optional<IndexSourceContext> getIndexSourceContext()
{
return indexSourceContext;
}
private int getNextPipelineId()
{
return nextPipelineId.getAndIncrement();
}
private int getNextOperatorId()
{
return nextOperatorId++;
}
private boolean isInputDriver()
{
return inputDriver;
}
private void setInputDriver(boolean inputDriver)
{
this.inputDriver = inputDriver;
}
public TableWriteInfo getTableWriteInfo()
{
return tableWriteInfo;
}
public LocalExecutionPlanContext createSubContext()
{
checkState(!indexSourceContext.isPresent(), "index build plan can not have sub-contexts");
return new LocalExecutionPlanContext(taskContext, driverFactories, indexSourceContext, nextPipelineId, tableWriteInfo);
}
public LocalExecutionPlanContext createIndexSourceSubContext(IndexSourceContext indexSourceContext)
{
return new LocalExecutionPlanContext(taskContext, driverFactories, Optional.of(indexSourceContext), nextPipelineId, tableWriteInfo);
}
public OptionalInt getDriverInstanceCount()
{
return driverInstanceCount;
}
public void setDriverInstanceCount(int driverInstanceCount)
{
checkArgument(driverInstanceCount > 0, "driverInstanceCount must be > 0");
if (this.driverInstanceCount.isPresent()) {
checkState(this.driverInstanceCount.getAsInt() == driverInstanceCount, "driverInstance count already set to " + this.driverInstanceCount.getAsInt());
}
this.driverInstanceCount = OptionalInt.of(driverInstanceCount);
}
}
private static class IndexSourceContext
{
private final SetMultimap<VariableReferenceExpression, Integer> indexLookupToProbeInput;
public IndexSourceContext(SetMultimap<VariableReferenceExpression, Integer> indexLookupToProbeInput)
{
this.indexLookupToProbeInput = ImmutableSetMultimap.copyOf(requireNonNull(indexLookupToProbeInput, "indexLookupToProbeInput is null"));
}
private SetMultimap<VariableReferenceExpression, Integer> getIndexLookupToProbeInput()
{
return indexLookupToProbeInput;
}
}
public static class LocalExecutionPlan
{
private final List<DriverFactory> driverFactories;
private final List<PlanNodeId> tableScanSourceOrder;
private final StageExecutionDescriptor stageExecutionDescriptor;
public LocalExecutionPlan(List<DriverFactory> driverFactories, List<PlanNodeId> tableScanSourceOrder, StageExecutionDescriptor stageExecutionDescriptor)
{
this.driverFactories = ImmutableList.copyOf(requireNonNull(driverFactories, "driverFactories is null"));
this.tableScanSourceOrder = ImmutableList.copyOf(requireNonNull(tableScanSourceOrder, "tableScanSourceOrder is null"));
this.stageExecutionDescriptor = requireNonNull(stageExecutionDescriptor, "stageExecutionDescriptor is null");
}
public List<DriverFactory> getDriverFactories()
{
return driverFactories;
}
public List<PlanNodeId> getTableScanSourceOrder()
{
return tableScanSourceOrder;
}
public StageExecutionDescriptor getStageExecutionDescriptor()
{
return stageExecutionDescriptor;
}
}
private class Visitor
extends InternalPlanVisitor<PhysicalOperation, LocalExecutionPlanContext>
{
private final Session session;
private final StageExecutionDescriptor stageExecutionDescriptor;
private final RemoteSourceFactory remoteSourceFactory;
private final boolean pageSinkCommitRequired;
private Visitor(Session session, StageExecutionDescriptor stageExecutionDescriptor, RemoteSourceFactory remoteSourceFactory, boolean pageSinkCommitRequired)
{
this.session = requireNonNull(session, "session is null");
this.stageExecutionDescriptor = requireNonNull(stageExecutionDescriptor, "stageExecutionDescriptor is null");
this.remoteSourceFactory = requireNonNull(remoteSourceFactory, "remoteSourceFactory is null");
this.pageSinkCommitRequired = pageSinkCommitRequired;
}
@Override
public PhysicalOperation visitRemoteSource(RemoteSourceNode node, LocalExecutionPlanContext context)
{
if (node.getOrderingScheme().isPresent()) {
return createMergeSource(node, context);
}
return createRemoteSource(node, context);
}
private PhysicalOperation createMergeSource(RemoteSourceNode node, LocalExecutionPlanContext context)
{
checkArgument(node.getOrderingScheme().isPresent(), "orderingScheme is absent");
// merging remote source must have a single driver
context.setDriverInstanceCount(1);
OrderingScheme orderingScheme = node.getOrderingScheme().get();
ImmutableMap<VariableReferenceExpression, Integer> layout = makeLayout(node);
List<Integer> sortChannels = getChannelsForVariables(orderingScheme.getOrderByVariables(), layout);
List<SortOrder> sortOrder = getOrderingList(orderingScheme);
List<Type> types = getSourceOperatorTypes(node);
ImmutableList<Integer> outputChannels = IntStream.range(0, types.size())
.boxed()
.collect(toImmutableList());
OperatorFactory operatorFactory = remoteSourceFactory.createMergeRemoteSource(
session,
context.getNextOperatorId(),
node.getId(),
types,
outputChannels,
sortChannels,
sortOrder);
return new PhysicalOperation(operatorFactory, makeLayout(node), context, UNGROUPED_EXECUTION);
}
private PhysicalOperation createRemoteSource(RemoteSourceNode node, LocalExecutionPlanContext context)
{
if (node.isEnsureSourceOrdering()) {
context.setDriverInstanceCount(1);
}
else if (!context.getDriverInstanceCount().isPresent()) {
context.setDriverInstanceCount(getTaskConcurrency(session));
}
OperatorFactory operatorFactory = remoteSourceFactory.createRemoteSource(
session,
context.getNextOperatorId(),
node.getId(),
getSourceOperatorTypes(node));
return new PhysicalOperation(operatorFactory, makeLayout(node), context, UNGROUPED_EXECUTION);
}
@Override
public PhysicalOperation visitExplainAnalyze(ExplainAnalyzeNode node, LocalExecutionPlanContext context)
{
ExplainAnalyzeContext analyzeContext = explainAnalyzeContext
.orElseThrow(() -> new IllegalStateException("ExplainAnalyze can only run on coordinator"));
PhysicalOperation source = node.getSource().accept(this, context);
OperatorFactory operatorFactory = new ExplainAnalyzeOperatorFactory(
context.getNextOperatorId(),
node.getId(),
analyzeContext.getQueryPerformanceFetcher(),
metadata.getFunctionManager(),
node.isVerbose());
return new PhysicalOperation(operatorFactory, makeLayout(node), context, source);
}
@Override
public PhysicalOperation visitOutput(OutputNode node, LocalExecutionPlanContext context)
{
return node.getSource().accept(this, context);
}
@Override
public PhysicalOperation visitRowNumber(RowNumberNode node, LocalExecutionPlanContext context)
{
PhysicalOperation source = node.getSource().accept(this, context);
List<Integer> partitionChannels = getChannelsForVariables(node.getPartitionBy(), source.getLayout());
List<Type> partitionTypes = partitionChannels.stream()
.map(channel -> source.getTypes().get(channel))
.collect(toImmutableList());
ImmutableList.Builder<Integer> outputChannels = ImmutableList.builder();
for (int i = 0; i < source.getTypes().size(); i++) {
outputChannels.add(i);
}
// compute the layout of the output from the window operator
ImmutableMap.Builder<VariableReferenceExpression, Integer> outputMappings = ImmutableMap.builder();
outputMappings.putAll(source.getLayout());
// row number function goes in the last channel
int channel = source.getTypes().size();
outputMappings.put(node.getRowNumberVariable(), channel);
Optional<Integer> hashChannel = node.getHashVariable().map(variableChannelGetter(source));
OperatorFactory operatorFactory = new RowNumberOperator.RowNumberOperatorFactory(
context.getNextOperatorId(),
node.getId(),
source.getTypes(),
outputChannels.build(),
partitionChannels,
partitionTypes,
node.getMaxRowCountPerPartition(),
hashChannel,
10_000,
joinCompiler);
return new PhysicalOperation(operatorFactory, outputMappings.build(), context, source);
}
@Override
public PhysicalOperation visitTopNRowNumber(TopNRowNumberNode node, LocalExecutionPlanContext context)
{
PhysicalOperation source = node.getSource().accept(this, context);
List<Integer> partitionChannels = getChannelsForVariables(node.getPartitionBy(), source.getLayout());
List<Type> partitionTypes = partitionChannels.stream()
.map(channel -> source.getTypes().get(channel))
.collect(toImmutableList());
List<VariableReferenceExpression> orderByVariables = node.getOrderingScheme().getOrderByVariables();
List<Integer> sortChannels = getChannelsForVariables(orderByVariables, source.getLayout());
List<SortOrder> sortOrder = orderByVariables.stream()
.map(variable -> node.getOrderingScheme().getOrdering(variable))
.collect(toImmutableList());
ImmutableList.Builder<Integer> outputChannels = ImmutableList.builder();
for (int i = 0; i < source.getTypes().size(); i++) {
outputChannels.add(i);
}
// compute the layout of the output from the window operator
ImmutableMap.Builder<VariableReferenceExpression, Integer> outputMappings = ImmutableMap.builder();
outputMappings.putAll(source.getLayout());
if (!node.isPartial() || !partitionChannels.isEmpty()) {
// row number function goes in the last channel
int channel = source.getTypes().size();
outputMappings.put(node.getRowNumberVariable(), channel);
}
Optional<Integer> hashChannel = node.getHashVariable().map(variableChannelGetter(source));
OperatorFactory operatorFactory = new TopNRowNumberOperator.TopNRowNumberOperatorFactory(
context.getNextOperatorId(),
node.getId(),
source.getTypes(),
outputChannels.build(),
partitionChannels,
partitionTypes,
sortChannels,
sortOrder,
node.getMaxRowCountPerPartition(),
node.isPartial(),
hashChannel,
1000,
joinCompiler);
return new PhysicalOperation(operatorFactory, makeLayout(node), context, source);
}
@Override
public PhysicalOperation visitWindow(WindowNode node, LocalExecutionPlanContext context)
{
PhysicalOperation source = node.getSource().accept(this, context);
List<Integer> partitionChannels = ImmutableList.copyOf(getChannelsForVariables(node.getPartitionBy(), source.getLayout()));
List<Integer> preGroupedChannels = ImmutableList.copyOf(getChannelsForVariables(node.getPrePartitionedInputs(), source.getLayout()));
List<Integer> sortChannels = ImmutableList.of();
List<SortOrder> sortOrder = ImmutableList.of();
if (node.getOrderingScheme().isPresent()) {
OrderingScheme orderingScheme = node.getOrderingScheme().get();
sortChannels = getChannelsForVariables(orderingScheme.getOrderByVariables(), source.getLayout());
sortOrder = getOrderingList(orderingScheme);
}
ImmutableList.Builder<Integer> outputChannels = ImmutableList.builder();
for (int i = 0; i < source.getTypes().size(); i++) {
outputChannels.add(i);
}
ImmutableList.Builder<WindowFunctionDefinition> windowFunctionsBuilder = ImmutableList.builder();
ImmutableList.Builder<VariableReferenceExpression> windowFunctionOutputVariablesBuilder = ImmutableList.builder();
for (Map.Entry<VariableReferenceExpression, WindowNode.Function> entry : node.getWindowFunctions().entrySet()) {
Optional<Integer> frameStartChannel = Optional.empty();
Optional<Integer> frameEndChannel = Optional.empty();
Frame frame = entry.getValue().getFrame();
if (frame.getStartValue().isPresent()) {
frameStartChannel = Optional.of(source.getLayout().get(frame.getStartValue().get()));
}
if (frame.getEndValue().isPresent()) {
frameEndChannel = Optional.of(source.getLayout().get(frame.getEndValue().get()));
}
FrameInfo frameInfo = new FrameInfo(frame.getType(), frame.getStartType(), frameStartChannel, frame.getEndType(), frameEndChannel);
WindowNode.Function function = entry.getValue();
CallExpression call = function.getFunctionCall();
FunctionHandle functionHandle = function.getFunctionHandle();
ImmutableList.Builder<Integer> arguments = ImmutableList.builder();
for (RowExpression argument : call.getArguments()) {
checkState(argument instanceof VariableReferenceExpression);
arguments.add(source.getLayout().get(argument));
}
VariableReferenceExpression variable = entry.getKey();
FunctionManager functionManager = metadata.getFunctionManager();
WindowFunctionSupplier windowFunctionSupplier = functionManager.getWindowFunctionImplementation(functionHandle);
Type type = metadata.getType(functionManager.getFunctionMetadata(functionHandle).getReturnType());
windowFunctionsBuilder.add(window(windowFunctionSupplier, type, frameInfo, function.isIgnoreNulls(), arguments.build()));
windowFunctionOutputVariablesBuilder.add(variable);
}
List<VariableReferenceExpression> windowFunctionOutputVariables = windowFunctionOutputVariablesBuilder.build();
// compute the layout of the output from the window operator
ImmutableMap.Builder<VariableReferenceExpression, Integer> outputMappings = ImmutableMap.builder();
for (VariableReferenceExpression variable : node.getSource().getOutputVariables()) {
outputMappings.put(variable, source.getLayout().get(variable));
}
// window functions go in remaining channels starting after the last channel from the source operator, one per channel
int channel = source.getTypes().size();
for (VariableReferenceExpression variable : windowFunctionOutputVariables) {
outputMappings.put(variable, channel);
channel++;
}
OperatorFactory operatorFactory = new WindowOperatorFactory(
context.getNextOperatorId(),
node.getId(),
source.getTypes(),
outputChannels.build(),
windowFunctionsBuilder.build(),
partitionChannels,
preGroupedChannels,
sortChannels,
sortOrder,
node.getPreSortedOrderPrefix(),
10_000,
pagesIndexFactory);
return new PhysicalOperation(operatorFactory, outputMappings.build(), context, source);
}
@Override
public PhysicalOperation visitTopN(TopNNode node, LocalExecutionPlanContext context)
{
PhysicalOperation source = node.getSource().accept(this, context);
List<VariableReferenceExpression> orderByVariables = node.getOrderingScheme().getOrderByVariables();
List<Integer> sortChannels = new ArrayList<>();
List<SortOrder> sortOrders = new ArrayList<>();
for (VariableReferenceExpression variable : orderByVariables) {
sortChannels.add(source.getLayout().get(variable));
sortOrders.add(node.getOrderingScheme().getOrdering(variable));
}
OperatorFactory operator = new TopNOperatorFactory(
context.getNextOperatorId(),
node.getId(),
source.getTypes(),
(int) node.getCount(),
sortChannels,
sortOrders);
return new PhysicalOperation(operator, source.getLayout(), context, source);
}
@Override
public PhysicalOperation visitSort(SortNode node, LocalExecutionPlanContext context)
{
PhysicalOperation source = node.getSource().accept(this, context);
List<VariableReferenceExpression> orderByVariables = node.getOrderingScheme().getOrderByVariables();
List<Integer> orderByChannels = getChannelsForVariables(orderByVariables, source.getLayout());
ImmutableList.Builder<SortOrder> sortOrder = ImmutableList.builder();
for (VariableReferenceExpression variable : orderByVariables) {
sortOrder.add(node.getOrderingScheme().getOrdering(variable));
}
ImmutableList.Builder<Integer> outputChannels = ImmutableList.builder();
for (int i = 0; i < source.getTypes().size(); i++) {
outputChannels.add(i);
}
boolean spillEnabled = isSpillEnabled(context.getSession());
OperatorFactory operator = new OrderByOperatorFactory(
context.getNextOperatorId(),
node.getId(),
source.getTypes(),
outputChannels.build(),
10_000,
orderByChannels,
sortOrder.build(),
pagesIndexFactory,
spillEnabled,
Optional.of(spillerFactory),
orderingCompiler);
return new PhysicalOperation(operator, source.getLayout(), context, source);
}
@Override
public PhysicalOperation visitLimit(LimitNode node, LocalExecutionPlanContext context)
{
PhysicalOperation source = node.getSource().accept(this, context);
OperatorFactory operatorFactory = new LimitOperatorFactory(context.getNextOperatorId(), node.getId(), node.getCount());
return new PhysicalOperation(operatorFactory, source.getLayout(), context, source);
}
@Override
public PhysicalOperation visitDistinctLimit(DistinctLimitNode node, LocalExecutionPlanContext context)
{
PhysicalOperation source = node.getSource().accept(this, context);
Optional<Integer> hashChannel = node.getHashVariable().map(variableChannelGetter(source));
List<Integer> distinctChannels = getChannelsForVariables(node.getDistinctVariables(), source.getLayout());
OperatorFactory operatorFactory = new DistinctLimitOperatorFactory(
context.getNextOperatorId(),
node.getId(),
source.getTypes(),
distinctChannels,
node.getLimit(),
hashChannel,
joinCompiler);
return new PhysicalOperation(operatorFactory, makeLayout(node), context, source);
}
@Override
public PhysicalOperation visitGroupId(GroupIdNode node, LocalExecutionPlanContext context)
{
PhysicalOperation source = node.getSource().accept(this, context);
Map<VariableReferenceExpression, Integer> newLayout = new HashMap<>();
ImmutableList.Builder<Type> outputTypes = ImmutableList.builder();
int outputChannel = 0;
for (VariableReferenceExpression output : node.getGroupingSets().stream().flatMap(Collection::stream).collect(Collectors.toSet())) {
newLayout.put(output, outputChannel++);
outputTypes.add(source.getTypes().get(source.getLayout().get(node.getGroupingColumns().get(output))));
}
Map<VariableReferenceExpression, Integer> argumentMappings = new HashMap<>();
for (VariableReferenceExpression output : node.getAggregationArguments()) {
int inputChannel = source.getLayout().get(output);
newLayout.put(output, outputChannel++);
outputTypes.add(source.getTypes().get(inputChannel));
argumentMappings.put(output, inputChannel);
}
// for every grouping set, create a mapping of all output to input channels (including arguments)
ImmutableList.Builder<Map<Integer, Integer>> mappings = ImmutableList.builder();
for (List<VariableReferenceExpression> groupingSet : node.getGroupingSets()) {
ImmutableMap.Builder<Integer, Integer> setMapping = ImmutableMap.builder();
for (VariableReferenceExpression output : groupingSet) {
setMapping.put(newLayout.get(output), source.getLayout().get(node.getGroupingColumns().get(output)));
}
for (VariableReferenceExpression output : argumentMappings.keySet()) {
setMapping.put(newLayout.get(output), argumentMappings.get(output));
}
mappings.add(setMapping.build());
}
newLayout.put(node.getGroupIdVariable(), outputChannel);
outputTypes.add(BIGINT);
OperatorFactory groupIdOperatorFactory = new GroupIdOperator.GroupIdOperatorFactory(context.getNextOperatorId(),
node.getId(),
outputTypes.build(),
mappings.build());
return new PhysicalOperation(groupIdOperatorFactory, newLayout, context, source);
}
@Override
public PhysicalOperation visitAggregation(AggregationNode node, LocalExecutionPlanContext context)
{
PhysicalOperation source = node.getSource().accept(this, context);
if (node.getGroupingKeys().isEmpty()) {
return planGlobalAggregation(node, source, context);
}
boolean spillEnabled = isSpillEnabled(context.getSession());
DataSize unspillMemoryLimit = getAggregationOperatorUnspillMemoryLimit(context.getSession());
return planGroupByAggregation(node, source, spillEnabled, unspillMemoryLimit, context);
}
@Override
public PhysicalOperation visitMarkDistinct(MarkDistinctNode node, LocalExecutionPlanContext context)
{
PhysicalOperation source = node.getSource().accept(this, context);
List<Integer> channels = getChannelsForVariables(node.getDistinctVariables(), source.getLayout());
Optional<Integer> hashChannel = node.getHashVariable().map(variableChannelGetter(source));
MarkDistinctOperatorFactory operator = new MarkDistinctOperatorFactory(context.getNextOperatorId(), node.getId(), source.getTypes(), channels, hashChannel, joinCompiler);
return new PhysicalOperation(operator, makeLayout(node), context, source);
}
@Override
public PhysicalOperation visitSample(SampleNode node, LocalExecutionPlanContext context)
{
// For system sample, the splits are already filtered out, so no specific action needs to be taken here
if (node.getSampleType() == SampleNode.Type.SYSTEM) {
return node.getSource().accept(this, context);
}
throw new UnsupportedOperationException("not yet implemented: " + node);
}
@Override
public PhysicalOperation visitFilter(FilterNode node, LocalExecutionPlanContext context)
{
PlanNode sourceNode = node.getSource();
RowExpression filterExpression = node.getPredicate();
List<VariableReferenceExpression> outputVariables = node.getOutputVariables();
return visitScanFilterAndProject(context, node.getId(), sourceNode, Optional.of(filterExpression), identityAssignments(outputVariables), outputVariables);
}
@Override
public PhysicalOperation visitProject(ProjectNode node, LocalExecutionPlanContext context)
{
PlanNode sourceNode;
Optional<RowExpression> filterExpression = Optional.empty();
if (node.getSource() instanceof FilterNode) {
FilterNode filterNode = (FilterNode) node.getSource();
sourceNode = filterNode.getSource();
filterExpression = Optional.of(filterNode.getPredicate());
}
else {
sourceNode = node.getSource();
}
return visitScanFilterAndProject(context, node.getId(), sourceNode, filterExpression, node.getAssignments(), node.getOutputVariables());
}
// TODO: This should be refactored, so that there's an optimizer that merges scan-filter-project into a single PlanNode
private PhysicalOperation visitScanFilterAndProject(
LocalExecutionPlanContext context,
PlanNodeId planNodeId,
PlanNode sourceNode,
Optional<RowExpression> filterExpression,
Assignments assignments,
List<VariableReferenceExpression> outputVariables)
{
// if source is a table scan we fold it directly into the filter and project
// otherwise we plan it as a normal operator
Map<VariableReferenceExpression, Integer> sourceLayout;
TableHandle table = null;
List<ColumnHandle> columns = null;
PhysicalOperation source = null;
if (sourceNode instanceof TableScanNode) {
TableScanNode tableScanNode = (TableScanNode) sourceNode;
Optional<DeleteScanInfo> deleteScanInfo = context.getTableWriteInfo().getDeleteScanInfo();
if (deleteScanInfo.isPresent() && deleteScanInfo.get().getId() == tableScanNode.getId()) {
table = deleteScanInfo.get().getTableHandle();
}
else {
table = tableScanNode.getTable();
}
// extract the column handles and channel to type mapping
sourceLayout = new LinkedHashMap<>();
columns = new ArrayList<>();
int channel = 0;
for (VariableReferenceExpression variable : tableScanNode.getOutputVariables()) {
columns.add(tableScanNode.getAssignments().get(variable));
Integer input = channel;
sourceLayout.put(variable, input);
channel++;
}
}
else {
// plan source
source = sourceNode.accept(this, context);
sourceLayout = source.getLayout();
}
// filterExpression may contain large function calls; evaluate them before compiling.
if (filterExpression.isPresent()) {
// TODO: theoretically, filterExpression could be a constant value (true or false) after optimization; we could possibly optimize the execution.
filterExpression = Optional.of(bindChannels(filterExpression.get(), sourceLayout));
}
// build output mapping
ImmutableMap.Builder<VariableReferenceExpression, Integer> outputMappingsBuilder = ImmutableMap.builder();
for (int i = 0; i < outputVariables.size(); i++) {
VariableReferenceExpression variable = outputVariables.get(i);
outputMappingsBuilder.put(variable, i);
}
Map<VariableReferenceExpression, Integer> outputMappings = outputMappingsBuilder.build();
// compiler uses inputs instead of variables, so rewrite the expressions first
List<RowExpression> projections = outputVariables.stream()
.map(assignments::get)
.map(expression -> bindChannels(expression, sourceLayout))
.collect(toImmutableList());
try {
if (columns != null) {
Supplier<CursorProcessor> cursorProcessor = expressionCompiler.compileCursorProcessor(session.getSqlFunctionProperties(), filterExpression, projections, sourceNode.getId(), isOptimizeCommonSubExpressions(session));
Supplier<PageProcessor> pageProcessor = expressionCompiler.compilePageProcessor(session.getSqlFunctionProperties(), filterExpression, projections, isOptimizeCommonSubExpressions(session), Optional.of(context.getStageExecutionId() + "_" + planNodeId));
SourceOperatorFactory operatorFactory = new ScanFilterAndProjectOperatorFactory(
context.getNextOperatorId(),
planNodeId,
sourceNode.getId(),
pageSourceProvider,
cursorProcessor,
pageProcessor,
table,
columns,
projections.stream().map(RowExpression::getType).collect(toImmutableList()),
getFilterAndProjectMinOutputPageSize(session),
getFilterAndProjectMinOutputPageRowCount(session));
return new PhysicalOperation(operatorFactory, outputMappings, context, stageExecutionDescriptor.isScanGroupedExecution(sourceNode.getId()) ? GROUPED_EXECUTION : UNGROUPED_EXECUTION);
}
else {
Supplier<PageProcessor> pageProcessor = expressionCompiler.compilePageProcessor(session.getSqlFunctionProperties(), filterExpression, projections, isOptimizeCommonSubExpressions(session), Optional.of(context.getStageExecutionId() + "_" + planNodeId));
OperatorFactory operatorFactory = new FilterAndProjectOperator.FilterAndProjectOperatorFactory(
context.getNextOperatorId(),
planNodeId,
pageProcessor,
projections.stream().map(RowExpression::getType).collect(toImmutableList()),
getFilterAndProjectMinOutputPageSize(session),
getFilterAndProjectMinOutputPageRowCount(session));
return new PhysicalOperation(operatorFactory, outputMappings, context, source);
}
}
catch (PrestoException e) {
throw e;
}
catch (RuntimeException e) {
throw new PrestoException(COMPILER_ERROR, "Compiler failed", e);
}
}
private RowExpression bindChannels(RowExpression expression, Map<VariableReferenceExpression, Integer> sourceLayout)
{
Type type = expression.getType();
Object value = new RowExpressionInterpreter(expression, metadata, session.toConnectorSession(), OPTIMIZED).optimize();
if (value instanceof RowExpression) {
RowExpression optimized = (RowExpression) value;
// building channel info
expression = VariableToChannelTranslator.translate(optimized, sourceLayout);
}
else {
expression = constant(value, type);
}
return expression;
}
@Override
public PhysicalOperation visitTableScan(TableScanNode node, LocalExecutionPlanContext context)
{
List<ColumnHandle> columns = new ArrayList<>();
for (VariableReferenceExpression variable : node.getOutputVariables()) {
columns.add(node.getAssignments().get(variable));
}
TableHandle tableHandle;
Optional<DeleteScanInfo> deleteScanInfo = context.getTableWriteInfo().getDeleteScanInfo();
if (deleteScanInfo.isPresent() && deleteScanInfo.get().getId() == node.getId()) {
tableHandle = deleteScanInfo.get().getTableHandle();
}
else {
tableHandle = node.getTable();
}
OperatorFactory operatorFactory = new TableScanOperatorFactory(context.getNextOperatorId(), node.getId(), pageSourceProvider, tableHandle, columns);
return new PhysicalOperation(operatorFactory, makeLayout(node), context, stageExecutionDescriptor.isScanGroupedExecution(node.getId()) ? GROUPED_EXECUTION : UNGROUPED_EXECUTION);
}
@Override
public PhysicalOperation visitValues(ValuesNode node, LocalExecutionPlanContext context)
{
// a values node must have a single driver
context.setDriverInstanceCount(1);
if (node.getRows().isEmpty()) {
OperatorFactory operatorFactory = new ValuesOperatorFactory(context.getNextOperatorId(), node.getId(), ImmutableList.of());
return new PhysicalOperation(operatorFactory, makeLayout(node), context, UNGROUPED_EXECUTION);
}
List<Type> outputTypes = node.getOutputVariables().stream().map(VariableReferenceExpression::getType).collect(toImmutableList());
PageBuilder pageBuilder = new PageBuilder(node.getRows().size(), outputTypes);
for (List<RowExpression> row : node.getRows()) {
pageBuilder.declarePosition();
for (int i = 0; i < row.size(); i++) {
// evaluate the literal value
Object result = rowExpressionInterpreter(row.get(i), metadata, context.getSession().toConnectorSession()).evaluate();
writeNativeValue(outputTypes.get(i), pageBuilder.getBlockBuilder(i), result);
}
}
OperatorFactory operatorFactory = new ValuesOperatorFactory(context.getNextOperatorId(), node.getId(), ImmutableList.of(pageBuilder.build()));
return new PhysicalOperation(operatorFactory, makeLayout(node), context, UNGROUPED_EXECUTION);
}
@Override
public PhysicalOperation visitUnnest(UnnestNode node, LocalExecutionPlanContext context)
{
PhysicalOperation source = node.getSource().accept(this, context);
ImmutableList.Builder<Type> replicateTypes = ImmutableList.builder();
for (VariableReferenceExpression variable : node.getReplicateVariables()) {
replicateTypes.add(variable.getType());
}
List<VariableReferenceExpression> unnestVariables = ImmutableList.copyOf(node.getUnnestVariables().keySet());
ImmutableList.Builder<Type> unnestTypes = ImmutableList.builder();
for (VariableReferenceExpression variable : unnestVariables) {
unnestTypes.add(variable.getType());
}
Optional<VariableReferenceExpression> ordinalityVariable = node.getOrdinalityVariable();
Optional<Type> ordinalityType = ordinalityVariable.map(VariableReferenceExpression::getType);
ordinalityType.ifPresent(type -> checkState(type.equals(BIGINT), "Type of ordinalityVariable must always be BIGINT."));
List<Integer> replicateChannels = getChannelsForVariables(node.getReplicateVariables(), source.getLayout());
List<Integer> unnestChannels = getChannelsForVariables(unnestVariables, source.getLayout());
// Source channels are always laid out first, followed by the unnested variables
ImmutableMap.Builder<VariableReferenceExpression, Integer> outputMappings = ImmutableMap.builder();
int channel = 0;
for (VariableReferenceExpression variable : node.getReplicateVariables()) {
outputMappings.put(variable, channel);
channel++;
}
for (VariableReferenceExpression variable : unnestVariables) {
for (VariableReferenceExpression unnestedVariable : node.getUnnestVariables().get(variable)) {
outputMappings.put(unnestedVariable, channel);
channel++;
}
}
if (ordinalityVariable.isPresent()) {
outputMappings.put(ordinalityVariable.get(), channel);
channel++;
}
OperatorFactory operatorFactory = new UnnestOperatorFactory(
context.getNextOperatorId(),
node.getId(),
replicateChannels,
replicateTypes.build(),
unnestChannels,
unnestTypes.build(),
ordinalityType.isPresent());
return new PhysicalOperation(operatorFactory, outputMappings.build(), context, source);
}
private ImmutableMap<VariableReferenceExpression, Integer> makeLayout(PlanNode node)
{
return makeLayoutFromOutputVariables(node.getOutputVariables());
}
private ImmutableMap<VariableReferenceExpression, Integer> makeLayoutFromOutputVariables(List<VariableReferenceExpression> outputVariables)
{
Builder<VariableReferenceExpression, Integer> outputMappings = ImmutableMap.builder();
int channel = 0;
for (VariableReferenceExpression variable : outputVariables) {
outputMappings.put(variable, channel);
channel++;
}
return outputMappings.build();
}
@Override
public PhysicalOperation visitIndexSource(IndexSourceNode node, LocalExecutionPlanContext context)
{
checkState(context.getIndexSourceContext().isPresent(), "Must be in an index source context");
IndexSourceContext indexSourceContext = context.getIndexSourceContext().get();
SetMultimap<VariableReferenceExpression, Integer> indexLookupToProbeInput = indexSourceContext.getIndexLookupToProbeInput();
checkState(indexLookupToProbeInput.keySet().equals(node.getLookupVariables()));
// Finalize the variable lookup layout for the index source
List<VariableReferenceExpression> lookupVariableSchema = ImmutableList.copyOf(node.getLookupVariables());
// Identify how to remap the probe key Input to match the source index lookup layout
ImmutableList.Builder<Integer> remappedProbeKeyChannelsBuilder = ImmutableList.builder();
// Identify overlapping fields that can produce the same lookup variable.
// We will filter incoming keys to ensure that overlapping fields will have the same value.
ImmutableList.Builder<Set<Integer>> overlappingFieldSetsBuilder = ImmutableList.builder();
for (VariableReferenceExpression lookupVariable : node.getLookupVariables()) {
Set<Integer> potentialProbeInputs = indexLookupToProbeInput.get(lookupVariable);
checkState(!potentialProbeInputs.isEmpty(), "Must have at least one source from the probe input");
if (potentialProbeInputs.size() > 1) {
overlappingFieldSetsBuilder.add(potentialProbeInputs.stream().collect(toImmutableSet()));
}
remappedProbeKeyChannelsBuilder.add(Iterables.getFirst(potentialProbeInputs, null));
}
List<Set<Integer>> overlappingFieldSets = overlappingFieldSetsBuilder.build();
List<Integer> remappedProbeKeyChannels = remappedProbeKeyChannelsBuilder.build();
Function<RecordSet, RecordSet> probeKeyNormalizer = recordSet -> {
if (!overlappingFieldSets.isEmpty()) {
recordSet = new FieldSetFilteringRecordSet(metadata.getFunctionManager(), recordSet, overlappingFieldSets);
}
return new MappedRecordSet(recordSet, remappedProbeKeyChannels);
};
// Declare the input and output schemas for the index and acquire the actual Index
List<ColumnHandle> lookupSchema = lookupVariableSchema.stream().map(node.getAssignments()::get).collect(toImmutableList());
List<ColumnHandle> outputSchema = node.getAssignments().entrySet().stream()
.filter(entry -> node.getOutputVariables().contains(entry.getKey()))
.map(Map.Entry::getValue)
.collect(toImmutableList());
ConnectorIndex index = indexManager.getIndex(session, node.getIndexHandle(), lookupSchema, outputSchema);
OperatorFactory operatorFactory = new IndexSourceOperator.IndexSourceOperatorFactory(context.getNextOperatorId(), node.getId(), index, probeKeyNormalizer);
return new PhysicalOperation(operatorFactory, makeLayout(node), context, UNGROUPED_EXECUTION);
}
/**
* This method creates a mapping from each index source lookup variable (directly applied to the index)
* to the corresponding probe key Input
*/
private SetMultimap<VariableReferenceExpression, Integer> mapIndexSourceLookupVariableToProbeKeyInput(IndexJoinNode node, Map<VariableReferenceExpression, Integer> probeKeyLayout)
{
Set<VariableReferenceExpression> indexJoinVariables = node.getCriteria().stream()
.map(IndexJoinNode.EquiJoinClause::getIndex)
.collect(toImmutableSet());
// Trace the index join variables to the index source lookup variables
// Map: Index join variable => Index source lookup variable
Map<VariableReferenceExpression, VariableReferenceExpression> indexKeyTrace = IndexJoinOptimizer.IndexKeyTracer.trace(node.getIndexSource(), indexJoinVariables);
// Map the index join variables to the probe key Input
Multimap<VariableReferenceExpression, Integer> indexToProbeKeyInput = HashMultimap.create();
for (IndexJoinNode.EquiJoinClause clause : node.getCriteria()) {
indexToProbeKeyInput.put(clause.getIndex(), probeKeyLayout.get(clause.getProbe()));
}
// Create the mapping from index source look up variable to probe key Input
ImmutableSetMultimap.Builder<VariableReferenceExpression, Integer> builder = ImmutableSetMultimap.builder();
for (Map.Entry<VariableReferenceExpression, VariableReferenceExpression> entry : indexKeyTrace.entrySet()) {
VariableReferenceExpression indexJoinVariable = entry.getKey();
VariableReferenceExpression indexLookupVariable = entry.getValue();
builder.putAll(indexJoinVariable, indexToProbeKeyInput.get(indexLookupVariable));
}
return builder.build();
}
@Override
public PhysicalOperation visitIndexJoin(IndexJoinNode node, LocalExecutionPlanContext context)
{
List<IndexJoinNode.EquiJoinClause> clauses = node.getCriteria();
List<VariableReferenceExpression> probeVariables = clauses.stream().map(IndexJoinNode.EquiJoinClause::getProbe).collect(toImmutableList());
List<VariableReferenceExpression> indexVariables = clauses.stream().map(IndexJoinNode.EquiJoinClause::getIndex).collect(toImmutableList());
// Plan probe side
PhysicalOperation probeSource = node.getProbeSource().accept(this, context);
List<Integer> probeChannels = getChannelsForVariables(probeVariables, probeSource.getLayout());
OptionalInt probeHashChannel = node.getProbeHashVariable().map(variableChannelGetter(probeSource))
.map(OptionalInt::of).orElse(OptionalInt.empty());
// The probe key channels will be handed to the index according to probeVariable order
Map<VariableReferenceExpression, Integer> probeKeyLayout = new HashMap<>();
for (int i = 0; i < probeVariables.size(); i++) {
// Duplicate variables can appear and we only need to take take one of the Inputs
probeKeyLayout.put(probeVariables.get(i), i);
}
// Plan the index source side
SetMultimap<VariableReferenceExpression, Integer> indexLookupToProbeInput = mapIndexSourceLookupVariableToProbeKeyInput(node, probeKeyLayout);
LocalExecutionPlanContext indexContext = context.createIndexSourceSubContext(new IndexSourceContext(indexLookupToProbeInput));
PhysicalOperation indexSource = node.getIndexSource().accept(this, indexContext);
List<Integer> indexOutputChannels = getChannelsForVariables(indexVariables, indexSource.getLayout());
OptionalInt indexHashChannel = node.getIndexHashVariable().map(variableChannelGetter(indexSource))
.map(OptionalInt::of).orElse(OptionalInt.empty());
// Identify just the join keys/channels needed for lookup by the index source (does not have to use all of them).
Set<VariableReferenceExpression> indexVariablesNeededBySource = IndexJoinOptimizer.IndexKeyTracer.trace(node.getIndexSource(), ImmutableSet.copyOf(indexVariables)).keySet();
Set<Integer> lookupSourceInputChannels = node.getCriteria().stream()
.filter(equiJoinClause -> indexVariablesNeededBySource.contains(equiJoinClause.getIndex()))
.map(IndexJoinNode.EquiJoinClause::getProbe)
.map(probeKeyLayout::get)
.collect(toImmutableSet());
Optional<DynamicTupleFilterFactory> dynamicTupleFilterFactory = Optional.empty();
if (lookupSourceInputChannels.size() < probeKeyLayout.values().size()) {
int[] nonLookupInputChannels = Ints.toArray(node.getCriteria().stream()
.filter(equiJoinClause -> !indexVariablesNeededBySource.contains(equiJoinClause.getIndex()))
.map(IndexJoinNode.EquiJoinClause::getProbe)
.map(probeKeyLayout::get)
.collect(toImmutableList()));
int[] nonLookupOutputChannels = Ints.toArray(node.getCriteria().stream()
.filter(equiJoinClause -> !indexVariablesNeededBySource.contains(equiJoinClause.getIndex()))
.map(IndexJoinNode.EquiJoinClause::getIndex)
.map(variable -> indexSource.getLayout().get(variable))
.collect(toImmutableList()));
int filterOperatorId = indexContext.getNextOperatorId();
dynamicTupleFilterFactory = Optional.of(new DynamicTupleFilterFactory(
filterOperatorId,
node.getId(),
nonLookupInputChannels,
nonLookupOutputChannels,
indexSource.getTypes(),
session.getSqlFunctionProperties(),
pageFunctionCompiler));
}
IndexBuildDriverFactoryProvider indexBuildDriverFactoryProvider = new IndexBuildDriverFactoryProvider(
indexContext.getNextPipelineId(),
indexContext.getNextOperatorId(),
node.getId(),
indexContext.isInputDriver(),
indexSource.getTypes(),
indexSource.getOperatorFactories(),
dynamicTupleFilterFactory);
IndexLookupSourceFactory indexLookupSourceFactory = new IndexLookupSourceFactory(
lookupSourceInputChannels,
indexOutputChannels,
indexHashChannel,
indexSource.getTypes(),
indexSource.getLayout(),
indexBuildDriverFactoryProvider,
maxIndexMemorySize,
indexJoinLookupStats,
SystemSessionProperties.isShareIndexLoading(session),
pagesIndexFactory,
joinCompiler,
getIndexLoaderTimeout(session));
verify(probeSource.getPipelineExecutionStrategy() == UNGROUPED_EXECUTION);
verify(indexSource.getPipelineExecutionStrategy() == UNGROUPED_EXECUTION);
JoinBridgeManager<LookupSourceFactory> lookupSourceFactoryManager = new JoinBridgeManager<>(
false,
UNGROUPED_EXECUTION,
UNGROUPED_EXECUTION,
lifespan -> indexLookupSourceFactory,
indexLookupSourceFactory.getOutputTypes());
ImmutableMap.Builder<VariableReferenceExpression, Integer> outputMappings = ImmutableMap.builder();
outputMappings.putAll(probeSource.getLayout());
// inputs from index side of the join are laid out following the input from the probe side,
// so adjust the channel ids but keep the field layouts intact
int offset = probeSource.getTypes().size();
for (Map.Entry<VariableReferenceExpression, Integer> entry : indexSource.getLayout().entrySet()) {
Integer input = entry.getValue();
outputMappings.put(entry.getKey(), offset + input);
}
OperatorFactory lookupJoinOperatorFactory;
OptionalInt totalOperatorsCount = getJoinOperatorsCountForSpill(context, session);
switch (node.getType()) {
case INNER:
lookupJoinOperatorFactory = lookupJoinOperators.innerJoin(context.getNextOperatorId(), node.getId(), lookupSourceFactoryManager, probeSource.getTypes(), probeChannels, probeHashChannel, Optional.empty(), totalOperatorsCount, partitioningSpillerFactory);
break;
case SOURCE_OUTER:
lookupJoinOperatorFactory = lookupJoinOperators.probeOuterJoin(context.getNextOperatorId(), node.getId(), lookupSourceFactoryManager, probeSource.getTypes(), probeChannels, probeHashChannel, Optional.empty(), totalOperatorsCount, partitioningSpillerFactory);
break;
default:
throw new AssertionError("Unknown type: " + node.getType());
}
return new PhysicalOperation(lookupJoinOperatorFactory, outputMappings.build(), context, probeSource);
}
@Override
public PhysicalOperation visitJoin(JoinNode node, LocalExecutionPlanContext context)
{
if (node.isCrossJoin()) {
return createNestedLoopJoin(node, context);
}
List<JoinNode.EquiJoinClause> clauses = node.getCriteria();
List<VariableReferenceExpression> leftVariables = Lists.transform(clauses, JoinNode.EquiJoinClause::getLeft);
List<VariableReferenceExpression> rightVariables = Lists.transform(clauses, JoinNode.EquiJoinClause::getRight);
switch (node.getType()) {
case INNER:
case LEFT:
case RIGHT:
case FULL:
return createLookupJoin(node, node.getLeft(), leftVariables, node.getLeftHashVariable(), node.getRight(), rightVariables, node.getRightHashVariable(), context);
default:
throw new UnsupportedOperationException("Unsupported join type: " + node.getType());
}
}
@Override
public PhysicalOperation visitSpatialJoin(SpatialJoinNode node, LocalExecutionPlanContext context)
{
RowExpression filterExpression = node.getFilter();
List<CallExpression> spatialFunctions = extractSupportedSpatialFunctions(filterExpression, metadata.getFunctionManager());
for (CallExpression spatialFunction : spatialFunctions) {
Optional<PhysicalOperation> operation = tryCreateSpatialJoin(context, node, removeExpressionFromFilter(filterExpression, spatialFunction), spatialFunction, Optional.empty(), Optional.empty());
if (operation.isPresent()) {
return operation.get();
}
}
List<CallExpression> spatialComparisons = extractSupportedSpatialComparisons(filterExpression, metadata.getFunctionManager());
for (CallExpression spatialComparison : spatialComparisons) {
FunctionMetadata functionMetadata = metadata.getFunctionManager().getFunctionMetadata(spatialComparison.getFunctionHandle());
checkArgument(functionMetadata.getOperatorType().isPresent() && functionMetadata.getOperatorType().get().isComparisonOperator());
if (functionMetadata.getOperatorType().get() == OperatorType.LESS_THAN || functionMetadata.getOperatorType().get() == OperatorType.LESS_THAN_OR_EQUAL) {
// ST_Distance(a, b) <= r
RowExpression radius = spatialComparison.getArguments().get(1);
if (radius instanceof VariableReferenceExpression && node.getRight().getOutputVariables().contains(radius)) {
CallExpression spatialFunction = (CallExpression) spatialComparison.getArguments().get(0);
Optional<PhysicalOperation> operation = tryCreateSpatialJoin(
context,
node,
removeExpressionFromFilter(filterExpression, spatialComparison),
spatialFunction,
Optional.of((VariableReferenceExpression) radius),
functionMetadata.getOperatorType());
if (operation.isPresent()) {
return operation.get();
}
}
}
}
throw new VerifyException("No valid spatial relationship found for spatial join");
}
private Optional<PhysicalOperation> tryCreateSpatialJoin(
LocalExecutionPlanContext context,
SpatialJoinNode node,
Optional<RowExpression> filterExpression,
CallExpression spatialFunction,
Optional<VariableReferenceExpression> radius,
Optional<OperatorType> comparisonOperator)
{
List<RowExpression> arguments = spatialFunction.getArguments();
verify(arguments.size() == 2);
if (!(arguments.get(0) instanceof VariableReferenceExpression) || !(arguments.get(1) instanceof VariableReferenceExpression)) {
return Optional.empty();
}
VariableReferenceExpression firstVariable = (VariableReferenceExpression) arguments.get(0);
VariableReferenceExpression secondVariable = (VariableReferenceExpression) arguments.get(1);
PlanNode probeNode = node.getLeft();
Set<SymbolReference> probeSymbols = getSymbolReferences(probeNode.getOutputVariables());
PlanNode buildNode = node.getRight();
Set<SymbolReference> buildSymbols = getSymbolReferences(buildNode.getOutputVariables());
if (probeSymbols.contains(new SymbolReference(firstVariable.getName())) && buildSymbols.contains(new SymbolReference(secondVariable.getName()))) {
return Optional.of(createSpatialLookupJoin(
node,
probeNode,
firstVariable,
buildNode,
secondVariable,
radius,
spatialTest(spatialFunction, true, comparisonOperator),
filterExpression,
context));
}
else if (probeSymbols.contains(new SymbolReference(secondVariable.getName())) && buildSymbols.contains(new SymbolReference(firstVariable.getName()))) {
return Optional.of(createSpatialLookupJoin(
node,
probeNode,
secondVariable,
buildNode,
firstVariable,
radius,
spatialTest(spatialFunction, false, comparisonOperator),
filterExpression,
context));
}
return Optional.empty();
}
private Optional<RowExpression> removeExpressionFromFilter(RowExpression filter, RowExpression expression)
{
RowExpression updatedJoinFilter = replaceExpression(filter, ImmutableMap.of(expression, TRUE_CONSTANT));
return updatedJoinFilter == TRUE_CONSTANT ? Optional.empty() : Optional.of(updatedJoinFilter);
}
private SpatialPredicate spatialTest(CallExpression functionCall, boolean probeFirst, Optional<OperatorType> comparisonOperator)
{
FunctionMetadata functionMetadata = metadata.getFunctionManager().getFunctionMetadata(functionCall.getFunctionHandle());
QualifiedFunctionName functionName = functionMetadata.getName();
List<TypeSignature> argumentTypes = functionMetadata.getArgumentTypes();
Predicate<TypeSignature> isSpherical = (typeSignature)
-> typeSignature.equals(SPHERICAL_GEOGRAPHY_TYPE_SIGNATURE);
if (argumentTypes.stream().allMatch(isSpherical)) {
return sphericalSpatialTest(functionName, comparisonOperator);
}
else if (argumentTypes.stream().noneMatch(isSpherical)) {
return euclideanSpatialTest(functionName, comparisonOperator, probeFirst);
}
else {
throw new PrestoException(GENERIC_INTERNAL_ERROR, "Mixing spherical and euclidean geometric types");
}
}
private SpatialPredicate euclideanSpatialTest(QualifiedFunctionName functionName, Optional<OperatorType> comparisonOperator, boolean probeFirst)
{
if (functionName.equals(ST_CONTAINS)) {
if (probeFirst) {
return (buildGeometry, probeGeometry, radius) -> probeGeometry.contains(buildGeometry);
}
else {
return (buildGeometry, probeGeometry, radius) -> buildGeometry.contains(probeGeometry);
}
}
if (functionName.equals(ST_WITHIN)) {
if (probeFirst) {
return (buildGeometry, probeGeometry, radius) -> probeGeometry.within(buildGeometry);
}
else {
return (buildGeometry, probeGeometry, radius) -> buildGeometry.within(probeGeometry);
}
}
if (functionName.equals(ST_CROSSES)) {
return (buildGeometry, probeGeometry, radius) -> buildGeometry.crosses(probeGeometry);
}
if (functionName.equals(ST_EQUALS)) {
return (buildGeometry, probeGeometry, radius) -> buildGeometry.Equals(probeGeometry);
}
if (functionName.equals(ST_INTERSECTS)) {
return (buildGeometry, probeGeometry, radius) -> buildGeometry.intersects(probeGeometry);
}
if (functionName.equals(ST_OVERLAPS)) {
return (buildGeometry, probeGeometry, radius) -> buildGeometry.overlaps(probeGeometry);
}
if (functionName.equals(ST_TOUCHES)) {
return (buildGeometry, probeGeometry, radius) -> buildGeometry.touches(probeGeometry);
}
if (functionName.equals(ST_DISTANCE)) {
if (comparisonOperator.get() == OperatorType.LESS_THAN) {
return (buildGeometry, probeGeometry, radius) -> buildGeometry.distance(probeGeometry) < radius.getAsDouble();
}
else if (comparisonOperator.get() == OperatorType.LESS_THAN_OR_EQUAL) {
return (buildGeometry, probeGeometry, radius) -> buildGeometry.distance(probeGeometry) <= radius.getAsDouble();
}
else {
throw new UnsupportedOperationException("Unsupported comparison operator: " + comparisonOperator);
}
}
throw new UnsupportedOperationException("Unsupported spatial function: " + functionName);
}
private SpatialPredicate sphericalSpatialTest(QualifiedFunctionName functionName, Optional<OperatorType> comparisonOperator)
{
if (functionName.equals(ST_DISTANCE)) {
if (comparisonOperator.get() == OperatorType.LESS_THAN) {
return (buildGeometry, probeGeometry, radius) -> sphericalDistance(buildGeometry, probeGeometry) < radius.getAsDouble();
}
else if (comparisonOperator.get() == OperatorType.LESS_THAN_OR_EQUAL) {
return (buildGeometry, probeGeometry, radius) -> sphericalDistance(buildGeometry, probeGeometry) <= radius.getAsDouble();
}
else {
throw new UnsupportedOperationException("Unsupported spherical comparison operator: " + comparisonOperator);
}
}
throw new UnsupportedOperationException("Unsupported spherical spatial function: " + functionName);
}
private Set<SymbolReference> getSymbolReferences(Collection<VariableReferenceExpression> variables)
{
return variables.stream().map(VariableReferenceExpression::getName).map(SymbolReference::new).collect(toImmutableSet());
}
private PhysicalOperation createNestedLoopJoin(JoinNode node, LocalExecutionPlanContext context)
{
PhysicalOperation probeSource = node.getLeft().accept(this, context);
LocalExecutionPlanContext buildContext = context.createSubContext();
PhysicalOperation buildSource = node.getRight().accept(this, buildContext);
checkState(
buildSource.getPipelineExecutionStrategy() == UNGROUPED_EXECUTION,
"Build source of a nested loop join is expected to be GROUPED_EXECUTION.");
checkArgument(node.getType() == INNER, "NestedLoopJoin is only used for inner join");
JoinBridgeManager<NestedLoopJoinBridge> nestedLoopJoinBridgeManager = new JoinBridgeManager<>(
false,
probeSource.getPipelineExecutionStrategy(),
buildSource.getPipelineExecutionStrategy(),
lifespan -> new NestedLoopJoinPagesSupplier(),
buildSource.getTypes());
NestedLoopBuildOperatorFactory nestedLoopBuildOperatorFactory = new NestedLoopBuildOperatorFactory(
buildContext.getNextOperatorId(),
node.getId(),
nestedLoopJoinBridgeManager);
checkArgument(buildContext.getDriverInstanceCount().orElse(1) == 1, "Expected local execution to not be parallel");
context.addDriverFactory(
buildContext.isInputDriver(),
false,
ImmutableList.<OperatorFactory>builder()
.addAll(buildSource.getOperatorFactories())
.add(nestedLoopBuildOperatorFactory)
.build(),
buildContext.getDriverInstanceCount(),
buildSource.getPipelineExecutionStrategy());
ImmutableMap.Builder<VariableReferenceExpression, Integer> outputMappings = ImmutableMap.builder();
outputMappings.putAll(probeSource.getLayout());
// inputs from build side of the join are laid out following the input from the probe side,
// so adjust the channel ids but keep the field layouts intact
int offset = probeSource.getTypes().size();
for (Map.Entry<VariableReferenceExpression, Integer> entry : buildSource.getLayout().entrySet()) {
outputMappings.put(entry.getKey(), offset + entry.getValue());
}
OperatorFactory operatorFactory = new NestedLoopJoinOperatorFactory(context.getNextOperatorId(), node.getId(), nestedLoopJoinBridgeManager);
return new PhysicalOperation(operatorFactory, outputMappings.build(), context, probeSource);
}
private PhysicalOperation createSpatialLookupJoin(
SpatialJoinNode node,
PlanNode probeNode,
VariableReferenceExpression probeVariable,
PlanNode buildNode,
VariableReferenceExpression buildVariable,
Optional<VariableReferenceExpression> radiusVariable,
SpatialPredicate spatialRelationshipTest,
Optional<RowExpression> joinFilter,
LocalExecutionPlanContext context)
{
// Plan probe
PhysicalOperation probeSource = probeNode.accept(this, context);
// Plan build
PagesSpatialIndexFactory pagesSpatialIndexFactory = createPagesSpatialIndexFactory(node,
buildNode,
buildVariable,
radiusVariable,
probeSource.getLayout(),
spatialRelationshipTest,
joinFilter,
context);
OperatorFactory operator = createSpatialLookupJoin(node, probeNode, probeSource, probeVariable, pagesSpatialIndexFactory, context);
ImmutableMap.Builder<VariableReferenceExpression, Integer> outputMappings = ImmutableMap.builder();
List<VariableReferenceExpression> outputVariables = node.getOutputVariables();
for (int i = 0; i < outputVariables.size(); i++) {
outputMappings.put(outputVariables.get(i), i);
}
return new PhysicalOperation(operator, outputMappings.build(), context, probeSource);
}
private OperatorFactory createSpatialLookupJoin(SpatialJoinNode node,
PlanNode probeNode,
PhysicalOperation probeSource,
VariableReferenceExpression probeVariable,
PagesSpatialIndexFactory pagesSpatialIndexFactory,
LocalExecutionPlanContext context)
{
List<Type> probeTypes = probeSource.getTypes();
List<VariableReferenceExpression> probeOutputVariables = node.getOutputVariables().stream()
.filter(probeNode.getOutputVariables()::contains)
.collect(toImmutableList());
List<Integer> probeOutputChannels = ImmutableList.copyOf(getChannelsForVariables(probeOutputVariables, probeSource.getLayout()));
Function<VariableReferenceExpression, Integer> probeChannelGetter = variableChannelGetter(probeSource);
int probeChannel = probeChannelGetter.apply(probeVariable);
Optional<Integer> partitionChannel = node.getLeftPartitionVariable().map(probeChannelGetter);
return new SpatialJoinOperatorFactory(
context.getNextOperatorId(),
node.getId(),
node.getType(),
probeTypes,
probeOutputChannels,
probeChannel,
partitionChannel,
pagesSpatialIndexFactory);
}
private PagesSpatialIndexFactory createPagesSpatialIndexFactory(
SpatialJoinNode node,
PlanNode buildNode,
VariableReferenceExpression buildVariable,
Optional<VariableReferenceExpression> radiusVariable,
Map<VariableReferenceExpression, Integer> probeLayout,
SpatialPredicate spatialRelationshipTest,
Optional<RowExpression> joinFilter,
LocalExecutionPlanContext context)
{
LocalExecutionPlanContext buildContext = context.createSubContext();
PhysicalOperation buildSource = buildNode.accept(this, buildContext);
List<VariableReferenceExpression> buildOutputVariables = node.getOutputVariables().stream()
.filter(buildNode.getOutputVariables()::contains)
.collect(toImmutableList());
Map<VariableReferenceExpression, Integer> buildLayout = buildSource.getLayout();
List<Integer> buildOutputChannels = ImmutableList.copyOf(getChannelsForVariables(buildOutputVariables, buildLayout));
Function<VariableReferenceExpression, Integer> buildChannelGetter = variableChannelGetter(buildSource);
Integer buildChannel = buildChannelGetter.apply(buildVariable);
Optional<Integer> radiusChannel = radiusVariable.map(buildChannelGetter::apply);
Optional<JoinFilterFunctionFactory> filterFunctionFactory = joinFilter
.map(filterExpression -> compileJoinFilterFunction(
session.getSqlFunctionProperties(),
filterExpression,
probeLayout,
buildLayout));
Optional<Integer> partitionChannel = node.getRightPartitionVariable().map(buildChannelGetter);
SpatialIndexBuilderOperatorFactory builderOperatorFactory = new SpatialIndexBuilderOperatorFactory(
buildContext.getNextOperatorId(),
node.getId(),
buildSource.getTypes(),
buildOutputChannels,
buildChannel,
radiusChannel,
partitionChannel,
spatialRelationshipTest,
node.getKdbTree(),
filterFunctionFactory,
10_000,
pagesIndexFactory);
context.addDriverFactory(
buildContext.isInputDriver(),
false,
ImmutableList.<OperatorFactory>builder()
.addAll(buildSource.getOperatorFactories())
.add(builderOperatorFactory)
.build(),
buildContext.getDriverInstanceCount(),
buildSource.getPipelineExecutionStrategy());
return builderOperatorFactory.getPagesSpatialIndexFactory();
}
private PhysicalOperation createLookupJoin(JoinNode node,
PlanNode probeNode,
List<VariableReferenceExpression> probeVariables,
Optional<VariableReferenceExpression> probeHashVariable,
PlanNode buildNode,
List<VariableReferenceExpression> buildVariables,
Optional<VariableReferenceExpression> buildHashVariable,
LocalExecutionPlanContext context)
{
// Plan probe
PhysicalOperation probeSource = probeNode.accept(this, context);
// Plan build
JoinBridgeManager<PartitionedLookupSourceFactory> lookupSourceFactory =
createLookupSourceFactory(node, buildNode, buildVariables, buildHashVariable, probeSource, context);
OperatorFactory operator = createLookupJoin(node, probeSource, probeVariables, probeHashVariable, lookupSourceFactory, context);
ImmutableMap.Builder<VariableReferenceExpression, Integer> outputMappings = ImmutableMap.builder();
List<VariableReferenceExpression> outputVariables = node.getOutputVariables();
for (int i = 0; i < outputVariables.size(); i++) {
outputMappings.put(outputVariables.get(i), i);
}
return new PhysicalOperation(operator, outputMappings.build(), context, probeSource);
}
private JoinBridgeManager<PartitionedLookupSourceFactory> createLookupSourceFactory(
JoinNode node,
PlanNode buildNode,
List<VariableReferenceExpression> buildVariables,
Optional<VariableReferenceExpression> buildHashVariable,
PhysicalOperation probeSource,
LocalExecutionPlanContext context)
{
// Determine if planning broadcast join
Optional<JoinNode.DistributionType> distributionType = node.getDistributionType();
boolean isBroadcastJoin = distributionType.isPresent() && distributionType.get() == REPLICATED;
LocalExecutionPlanContext buildContext = context.createSubContext();
PhysicalOperation buildSource = buildNode.accept(this, buildContext);
if (buildSource.getPipelineExecutionStrategy() == GROUPED_EXECUTION) {
checkState(
probeSource.getPipelineExecutionStrategy() == GROUPED_EXECUTION,
"Build execution is GROUPED_EXECUTION. Probe execution is expected be GROUPED_EXECUTION, but is UNGROUPED_EXECUTION.");
}
List<VariableReferenceExpression> buildOutputVariables = node.getOutputVariables().stream()
.filter(node.getRight().getOutputVariables()::contains)
.collect(toImmutableList());
List<Integer> buildOutputChannels = ImmutableList.copyOf(getChannelsForVariables(buildOutputVariables, buildSource.getLayout()));
List<Integer> buildChannels = ImmutableList.copyOf(getChannelsForVariables(buildVariables, buildSource.getLayout()));
OptionalInt buildHashChannel = buildHashVariable.map(variableChannelGetter(buildSource))
.map(OptionalInt::of).orElse(OptionalInt.empty());
boolean spillEnabled = isSpillEnabled(context.getSession());
boolean buildOuter = node.getType() == RIGHT || node.getType() == FULL;
int partitionCount = buildContext.getDriverInstanceCount().orElse(1);
Optional<JoinFilterFunctionFactory> filterFunctionFactory = node.getFilter()
.map(filterExpression -> compileJoinFilterFunction(
session.getSqlFunctionProperties(),
filterExpression,
probeSource.getLayout(),
buildSource.getLayout()));
Optional<SortExpressionContext> sortExpressionContext = node.getSortExpressionContext(metadata.getFunctionManager());
Optional<Integer> sortChannel = sortExpressionContext
.map(SortExpressionContext::getSortExpression)
.map(sortExpression -> sortExpressionAsSortChannel(
sortExpression,
probeSource.getLayout(),
buildSource.getLayout()));
List<JoinFilterFunctionFactory> searchFunctionFactories = sortExpressionContext
.map(SortExpressionContext::getSearchExpressions)
.map(searchExpressions -> searchExpressions.stream()
.map(searchExpression -> compileJoinFilterFunction(
session.getSqlFunctionProperties(),
searchExpression,
probeSource.getLayout(),
buildSource.getLayout()))
.collect(toImmutableList()))
.orElse(ImmutableList.of());
ImmutableList<Type> buildOutputTypes = buildOutputChannels.stream()
.map(buildSource.getTypes()::get)
.collect(toImmutableList());
JoinBridgeManager<PartitionedLookupSourceFactory> lookupSourceFactoryManager = new JoinBridgeManager<>(
buildOuter,
probeSource.getPipelineExecutionStrategy(),
buildSource.getPipelineExecutionStrategy(),
lifespan -> new PartitionedLookupSourceFactory(
buildSource.getTypes(),
buildOutputTypes,
buildChannels.stream()
.map(buildSource.getTypes()::get)
.collect(toImmutableList()),
buildContext.getDriverInstanceCount().orElse(1),
buildSource.getLayout(),
buildOuter),
buildOutputTypes);
HashBuilderOperatorFactory hashBuilderOperatorFactory = new HashBuilderOperatorFactory(
buildContext.getNextOperatorId(),
node.getId(),
lookupSourceFactoryManager,
buildOutputChannels,
buildChannels,
buildHashChannel,
filterFunctionFactory,
sortChannel,
searchFunctionFactories,
10_000,
pagesIndexFactory,
spillEnabled && !buildOuter && partitionCount > 1,
singleStreamSpillerFactory,
isBroadcastJoin);
context.addDriverFactory(
buildContext.isInputDriver(),
false,
ImmutableList.<OperatorFactory>builder()
.addAll(buildSource.getOperatorFactories())
.add(hashBuilderOperatorFactory)
.build(),
buildContext.getDriverInstanceCount(),
buildSource.getPipelineExecutionStrategy());
return lookupSourceFactoryManager;
}
private JoinFilterFunctionFactory compileJoinFilterFunction(
SqlFunctionProperties sqlFunctionProperties,
RowExpression filterExpression,
Map<VariableReferenceExpression, Integer> probeLayout,
Map<VariableReferenceExpression, Integer> buildLayout)
{
Map<VariableReferenceExpression, Integer> joinSourcesLayout = createJoinSourcesLayout(buildLayout, probeLayout);
return joinFilterFunctionCompiler.compileJoinFilterFunction(sqlFunctionProperties, bindChannels(filterExpression, joinSourcesLayout), buildLayout.size());
}
private int sortExpressionAsSortChannel(
RowExpression sortExpression,
Map<VariableReferenceExpression, Integer> probeLayout,
Map<VariableReferenceExpression, Integer> buildLayout)
{
Map<VariableReferenceExpression, Integer> joinSourcesLayout = createJoinSourcesLayout(buildLayout, probeLayout);
RowExpression rewrittenSortExpression = bindChannels(sortExpression, joinSourcesLayout);
checkArgument(rewrittenSortExpression instanceof InputReferenceExpression, "Unsupported expression type [%s]", rewrittenSortExpression);
return ((InputReferenceExpression) rewrittenSortExpression).getField();
}
private OperatorFactory createLookupJoin(
JoinNode node,
PhysicalOperation probeSource,
List<VariableReferenceExpression> probeVariables,
Optional<VariableReferenceExpression> probeHashVariable,
JoinBridgeManager<? extends LookupSourceFactory> lookupSourceFactoryManager,
LocalExecutionPlanContext context)
{
List<Type> probeTypes = probeSource.getTypes();
List<VariableReferenceExpression> probeOutputVariables = node.getOutputVariables().stream()
.filter(node.getLeft().getOutputVariables()::contains)
.collect(toImmutableList());
List<Integer> probeOutputChannels = ImmutableList.copyOf(getChannelsForVariables(probeOutputVariables, probeSource.getLayout()));
List<Integer> probeJoinChannels = ImmutableList.copyOf(getChannelsForVariables(probeVariables, probeSource.getLayout()));
OptionalInt probeHashChannel = probeHashVariable.map(variableChannelGetter(probeSource))
.map(OptionalInt::of).orElse(OptionalInt.empty());
OptionalInt totalOperatorsCount = getJoinOperatorsCountForSpill(context, session);
switch (node.getType()) {
case INNER:
return lookupJoinOperators.innerJoin(context.getNextOperatorId(), node.getId(), lookupSourceFactoryManager, probeTypes, probeJoinChannels, probeHashChannel, Optional.of(probeOutputChannels), totalOperatorsCount, partitioningSpillerFactory);
case LEFT:
return lookupJoinOperators.probeOuterJoin(context.getNextOperatorId(), node.getId(), lookupSourceFactoryManager, probeTypes, probeJoinChannels, probeHashChannel, Optional.of(probeOutputChannels), totalOperatorsCount, partitioningSpillerFactory);
case RIGHT:
return lookupJoinOperators.lookupOuterJoin(context.getNextOperatorId(), node.getId(), lookupSourceFactoryManager, probeTypes, probeJoinChannels, probeHashChannel, Optional.of(probeOutputChannels), totalOperatorsCount, partitioningSpillerFactory);
case FULL:
return lookupJoinOperators.fullOuterJoin(context.getNextOperatorId(), node.getId(), lookupSourceFactoryManager, probeTypes, probeJoinChannels, probeHashChannel, Optional.of(probeOutputChannels), totalOperatorsCount, partitioningSpillerFactory);
default:
throw new UnsupportedOperationException("Unsupported join type: " + node.getType());
}
}
private OptionalInt getJoinOperatorsCountForSpill(LocalExecutionPlanContext context, Session session)
{
OptionalInt driverInstanceCount = context.getDriverInstanceCount();
if (isSpillEnabled(session)) {
checkState(driverInstanceCount.isPresent(), "A fixed distribution is required for JOIN when spilling is enabled");
}
return driverInstanceCount;
}
private Map<VariableReferenceExpression, Integer> createJoinSourcesLayout(Map<VariableReferenceExpression, Integer> lookupSourceLayout, Map<VariableReferenceExpression, Integer> probeSourceLayout)
{
Builder<VariableReferenceExpression, Integer> joinSourcesLayout = ImmutableMap.builder();
joinSourcesLayout.putAll(lookupSourceLayout);
for (Map.Entry<VariableReferenceExpression, Integer> probeLayoutEntry : probeSourceLayout.entrySet()) {
joinSourcesLayout.put(probeLayoutEntry.getKey(), probeLayoutEntry.getValue() + lookupSourceLayout.size());
}
return joinSourcesLayout.build();
}
@Override
public PhysicalOperation visitSemiJoin(SemiJoinNode node, LocalExecutionPlanContext context)
{
// Plan probe
PhysicalOperation probeSource = node.getSource().accept(this, context);
// Plan build
LocalExecutionPlanContext buildContext = context.createSubContext();
PhysicalOperation buildSource = node.getFilteringSource().accept(this, buildContext);
checkState(buildSource.getPipelineExecutionStrategy() == probeSource.getPipelineExecutionStrategy(), "build and probe have different pipelineExecutionStrategy");
checkArgument(buildContext.getDriverInstanceCount().orElse(1) == 1, "Expected local execution to not be parallel");
int probeChannel = probeSource.getLayout().get(node.getSourceJoinVariable());
int buildChannel = buildSource.getLayout().get(node.getFilteringSourceJoinVariable());
Optional<Integer> buildHashChannel = node.getFilteringSourceHashVariable().map(variableChannelGetter(buildSource));
SetBuilderOperatorFactory setBuilderOperatorFactory = new SetBuilderOperatorFactory(
buildContext.getNextOperatorId(),
node.getId(),
buildSource.getTypes().get(buildChannel),
buildChannel,
buildHashChannel,
10_000,
joinCompiler);
SetSupplier setProvider = setBuilderOperatorFactory.getSetProvider();
context.addDriverFactory(
buildContext.isInputDriver(),
false,
ImmutableList.<OperatorFactory>builder()
.addAll(buildSource.getOperatorFactories())
.add(setBuilderOperatorFactory)
.build(),
buildContext.getDriverInstanceCount(),
buildSource.getPipelineExecutionStrategy());
// Source channels are always laid out first, followed by the boolean output variable
Map<VariableReferenceExpression, Integer> outputMappings = ImmutableMap.<VariableReferenceExpression, Integer>builder()
.putAll(probeSource.getLayout())
.put(node.getSemiJoinOutput(), probeSource.getLayout().size())
.build();
HashSemiJoinOperatorFactory operator = new HashSemiJoinOperatorFactory(context.getNextOperatorId(), node.getId(), setProvider, probeSource.getTypes(), probeChannel);
return new PhysicalOperation(operator, outputMappings, context, probeSource);
}
@Override
public PhysicalOperation visitTableWriter(TableWriterNode node, LocalExecutionPlanContext context)
{
// Set table writer count
if (node.getTablePartitioningScheme().isPresent()) {
context.setDriverInstanceCount(getTaskPartitionedWriterCount(session));
}
else {
context.setDriverInstanceCount(getTaskWriterCount(session));
}
// serialize writes by forcing data through a single writer
PhysicalOperation source = node.getSource().accept(this, context);
ImmutableMap.Builder<VariableReferenceExpression, Integer> outputMapping = ImmutableMap.builder();
outputMapping.put(node.getRowCountVariable(), ROW_COUNT_CHANNEL);
outputMapping.put(node.getFragmentVariable(), FRAGMENT_CHANNEL);
outputMapping.put(node.getTableCommitContextVariable(), CONTEXT_CHANNEL);
OperatorFactory statisticsAggregation = node.getStatisticsAggregation().map(aggregation -> {
List<VariableReferenceExpression> groupingVariables = aggregation.getGroupingVariables();
if (groupingVariables.isEmpty()) {
return createAggregationOperatorFactory(
node.getId(),
aggregation.getAggregations(),
PARTIAL,
STATS_START_CHANNEL,
outputMapping,
source,
context,
true);
}
return createHashAggregationOperatorFactory(
node.getId(),
aggregation.getAggregations(),
ImmutableSet.of(),
groupingVariables,
PARTIAL,
Optional.empty(),
Optional.empty(),
source,
false,
false,
false,
new DataSize(0, BYTE),
context,
STATS_START_CHANNEL,
outputMapping,
200,
// This aggregation must behave as INTERMEDIATE.
// Using INTERMEDIATE aggregation directly
// is not possible, as it doesn't accept raw input data.
// Disabling partial pre-aggregation memory limit effectively
// turns PARTIAL aggregation into INTERMEDIATE.
Optional.empty(),
true);
}).orElse(new DevNullOperatorFactory(context.getNextOperatorId(), node.getId()));
List<Integer> inputChannels = node.getColumns().stream()
.map(source::variableToChannel)
.collect(toImmutableList());
OperatorFactory operatorFactory = new TableWriterOperatorFactory(
context.getNextOperatorId(),
node.getId(),
pageSinkManager,
context.getTableWriteInfo().getWriterTarget().orElseThrow(() -> new VerifyException("writerTarget is absent")),
inputChannels,
session,
statisticsAggregation,
getVariableTypes(node.getOutputVariables()),
tableCommitContextCodec,
getPageSinkCommitStrategy());
return new PhysicalOperation(operatorFactory, outputMapping.build(), context, source);
}
private PageSinkCommitStrategy getPageSinkCommitStrategy()
{
if (stageExecutionDescriptor.isRecoverableGroupedExecution()) {
return LIFESPAN_COMMIT;
}
if (pageSinkCommitRequired) {
return TASK_COMMIT;
}
return NO_COMMIT;
}
@Override
public PhysicalOperation visitStatisticsWriterNode(StatisticsWriterNode node, LocalExecutionPlanContext context)
{
PhysicalOperation source = node.getSource().accept(this, context);
StatisticAggregationsDescriptor<Integer> descriptor = node.getDescriptor().map(source.getLayout()::get);
AnalyzeTableHandle analyzeTableHandle = context.getTableWriteInfo().getAnalyzeTableHandle().orElseThrow(() -> new VerifyException("analyzeTableHandle is absent"));
OperatorFactory operatorFactory = new StatisticsWriterOperatorFactory(
context.getNextOperatorId(),
node.getId(),
computedStatistics -> metadata.finishStatisticsCollection(session, analyzeTableHandle, computedStatistics),
node.isRowCountEnabled(),
descriptor);
return new PhysicalOperation(operatorFactory, makeLayout(node), context, source);
}
@Override
public PhysicalOperation visitTableWriteMerge(TableWriterMergeNode node, LocalExecutionPlanContext context)
{
PhysicalOperation source = node.getSource().accept(this, context);
ImmutableMap.Builder<VariableReferenceExpression, Integer> outputMapping = ImmutableMap.builder();
outputMapping.put(node.getRowCountVariable(), ROW_COUNT_CHANNEL);
outputMapping.put(node.getFragmentVariable(), FRAGMENT_CHANNEL);
outputMapping.put(node.getTableCommitContextVariable(), CONTEXT_CHANNEL);
OperatorFactory statisticsAggregation = node.getStatisticsAggregation().map(aggregation -> {
List<VariableReferenceExpression> groupingVariables = aggregation.getGroupingVariables();
if (groupingVariables.isEmpty()) {
return createAggregationOperatorFactory(
node.getId(),
aggregation.getAggregations(),
INTERMEDIATE,
STATS_START_CHANNEL,
outputMapping,
source,
context,
true);
}
return createHashAggregationOperatorFactory(
node.getId(),
aggregation.getAggregations(),
ImmutableSet.of(),
groupingVariables,
INTERMEDIATE,
Optional.empty(),
Optional.empty(),
source,
false,
false,
false,
new DataSize(0, BYTE),
context,
STATS_START_CHANNEL,
outputMapping,
200,
Optional.empty(),
true);
}).orElse(new DevNullOperatorFactory(context.getNextOperatorId(), node.getId()));
OperatorFactory operatorFactory = new TableWriterMergeOperatorFactory(
context.getNextOperatorId(),
node.getId(),
statisticsAggregation,
tableCommitContextCodec,
session,
getVariableTypes(node.getOutputVariables()));
return new PhysicalOperation(operatorFactory, outputMapping.build(), context, source);
}
@Override
public PhysicalOperation visitTableFinish(TableFinishNode node, LocalExecutionPlanContext context)
{
PhysicalOperation source = node.getSource().accept(this, context);
ImmutableMap.Builder<VariableReferenceExpression, Integer> outputMapping = ImmutableMap.builder();
OperatorFactory statisticsAggregation = node.getStatisticsAggregation().map(aggregation -> {
List<VariableReferenceExpression> groupingVariables = aggregation.getGroupingVariables();
if (groupingVariables.isEmpty()) {
return createAggregationOperatorFactory(
node.getId(),
aggregation.getAggregations(),
FINAL,
0,
outputMapping,
source,
context,
true);
}
return createHashAggregationOperatorFactory(
node.getId(),
aggregation.getAggregations(),
ImmutableSet.of(),
groupingVariables,
FINAL,
Optional.empty(),
Optional.empty(),
source,
false,
false,
false,
new DataSize(0, BYTE),
context,
0,
outputMapping,
200,
// final aggregation ignores partial pre-aggregation memory limit
Optional.empty(),
true);
}).orElse(new DevNullOperatorFactory(context.getNextOperatorId(), node.getId()));
Map<VariableReferenceExpression, Integer> aggregationOutput = outputMapping.build();
StatisticAggregationsDescriptor<Integer> descriptor = node.getStatisticsAggregationDescriptor()
.map(desc -> desc.map(aggregationOutput::get))
.orElse(StatisticAggregationsDescriptor.empty());
ExecutionWriterTarget writerTarget = context.getTableWriteInfo().getWriterTarget().orElseThrow(() -> new VerifyException("writerTarget is absent"));
OperatorFactory operatorFactory = new TableFinishOperatorFactory(
context.getNextOperatorId(),
node.getId(),
createTableFinisher(session, metadata, writerTarget),
createPageSinkCommitter(session, metadata, writerTarget),
statisticsAggregation,
descriptor,
session,
tableCommitContextCodec);
Map<VariableReferenceExpression, Integer> layout = ImmutableMap.of(node.getOutputVariables().get(0), 0);
return new PhysicalOperation(operatorFactory, layout, context, source);
}
@Override
public PhysicalOperation visitDelete(DeleteNode node, LocalExecutionPlanContext context)
{
PhysicalOperation source = node.getSource().accept(this, context);
OperatorFactory operatorFactory = new DeleteOperatorFactory(context.getNextOperatorId(), node.getId(), source.getLayout().get(node.getRowId()), tableCommitContextCodec);
Map<VariableReferenceExpression, Integer> layout = ImmutableMap.<VariableReferenceExpression, Integer>builder()
.put(node.getOutputVariables().get(0), 0)
.put(node.getOutputVariables().get(1), 1)
.build();
return new PhysicalOperation(operatorFactory, layout, context, source);
}
@Override
public PhysicalOperation visitMetadataDelete(MetadataDeleteNode node, LocalExecutionPlanContext context)
{
OperatorFactory operatorFactory = new MetadataDeleteOperatorFactory(context.getNextOperatorId(), node.getId(), metadata, session, node.getTableHandle());
return new PhysicalOperation(operatorFactory, makeLayout(node), context, UNGROUPED_EXECUTION);
}
@Override
public PhysicalOperation visitUnion(UnionNode node, LocalExecutionPlanContext context)
{
throw new UnsupportedOperationException("Union node should not be present in a local execution plan");
}
@Override
public PhysicalOperation visitEnforceSingleRow(EnforceSingleRowNode node, LocalExecutionPlanContext context)
{
PhysicalOperation source = node.getSource().accept(this, context);
OperatorFactory operatorFactory = new EnforceSingleRowOperator.EnforceSingleRowOperatorFactory(context.getNextOperatorId(), node.getId());
return new PhysicalOperation(operatorFactory, makeLayout(node), context, source);
}
@Override
public PhysicalOperation visitAssignUniqueId(AssignUniqueId node, LocalExecutionPlanContext context)
{
PhysicalOperation source = node.getSource().accept(this, context);
OperatorFactory operatorFactory = new AssignUniqueIdOperator.AssignUniqueIdOperatorFactory(
context.getNextOperatorId(),
node.getId());
return new PhysicalOperation(operatorFactory, makeLayout(node), context, source);
}
@Override
public PhysicalOperation visitExchange(ExchangeNode node, LocalExecutionPlanContext context)
{
checkArgument(node.getScope().isLocal(), "Only local exchanges are supported in the local planner");
if (node.getOrderingScheme().isPresent()) {
return createLocalMerge(node, context);
}
return createLocalExchange(node, context);
}
private PhysicalOperation createLocalMerge(ExchangeNode node, LocalExecutionPlanContext context)
{
checkArgument(node.getOrderingScheme().isPresent(), "orderingScheme is absent");
checkState(node.getSources().size() == 1, "single source is expected");
// local merge source must have a single driver
context.setDriverInstanceCount(1);
PlanNode sourceNode = getOnlyElement(node.getSources());
LocalExecutionPlanContext subContext = context.createSubContext();
PhysicalOperation source = sourceNode.accept(this, subContext);
int operatorsCount = subContext.getDriverInstanceCount().orElse(1);
List<Type> types = getSourceOperatorTypes(node);
LocalExchangeFactory exchangeFactory = new LocalExchangeFactory(
partitioningProviderManager,
session,
node.getPartitioningScheme().getPartitioning().getHandle(),
operatorsCount,
types,
ImmutableList.of(),
Optional.empty(),
source.getPipelineExecutionStrategy(),
maxLocalExchangeBufferSize);
List<OperatorFactory> operatorFactories = new ArrayList<>(source.getOperatorFactories());
List<VariableReferenceExpression> expectedLayout = node.getInputs().get(0);
Function<Page, Page> pagePreprocessor = enforceLayoutProcessor(expectedLayout, source.getLayout());
operatorFactories.add(new LocalExchangeSinkOperatorFactory(
exchangeFactory,
subContext.getNextOperatorId(),
node.getId(),
exchangeFactory.newSinkFactoryId(),
pagePreprocessor));
context.addDriverFactory(subContext.isInputDriver(), false, operatorFactories, subContext.getDriverInstanceCount(), source.getPipelineExecutionStrategy());
// the main driver is not an input... the exchange sources are the input for the plan
context.setInputDriver(false);
OrderingScheme orderingScheme = node.getOrderingScheme().get();
ImmutableMap<VariableReferenceExpression, Integer> layout = makeLayout(node);
List<Integer> sortChannels = getChannelsForVariables(orderingScheme.getOrderByVariables(), layout);
List<SortOrder> orderings = getOrderingList(orderingScheme);
OperatorFactory operatorFactory = new LocalMergeSourceOperatorFactory(
context.getNextOperatorId(),
node.getId(),
exchangeFactory,
types,
orderingCompiler,
sortChannels,
orderings);
return new PhysicalOperation(operatorFactory, layout, context, UNGROUPED_EXECUTION);
}
private PhysicalOperation createLocalExchange(ExchangeNode node, LocalExecutionPlanContext context)
{
int driverInstanceCount;
if (node.getType() == ExchangeNode.Type.GATHER) {
driverInstanceCount = 1;
context.setDriverInstanceCount(1);
}
else if (context.getDriverInstanceCount().isPresent()) {
driverInstanceCount = context.getDriverInstanceCount().getAsInt();
}
else {
driverInstanceCount = getTaskConcurrency(session);
context.setDriverInstanceCount(driverInstanceCount);
}
List<Type> types = getSourceOperatorTypes(node);
List<Integer> channels = node.getPartitioningScheme().getPartitioning().getArguments().stream()
.map(argument -> {
checkArgument(argument instanceof VariableReferenceExpression, format("Expect VariableReferenceExpression but get %s", argument));
return node.getOutputVariables().indexOf(argument);
})
.collect(toImmutableList());
Optional<Integer> hashChannel = node.getPartitioningScheme().getHashColumn()
.map(variable -> node.getOutputVariables().indexOf(variable));
PipelineExecutionStrategy exchangeSourcePipelineExecutionStrategy = GROUPED_EXECUTION;
List<DriverFactoryParameters> driverFactoryParametersList = new ArrayList<>();
for (int i = 0; i < node.getSources().size(); i++) {
PlanNode sourceNode = node.getSources().get(i);
LocalExecutionPlanContext subContext = context.createSubContext();
PhysicalOperation source = sourceNode.accept(this, subContext);
driverFactoryParametersList.add(new DriverFactoryParameters(subContext, source));
if (source.getPipelineExecutionStrategy() == UNGROUPED_EXECUTION) {
exchangeSourcePipelineExecutionStrategy = UNGROUPED_EXECUTION;
}
}
LocalExchangeFactory localExchangeFactory = new LocalExchangeFactory(
partitioningProviderManager,
session,
node.getPartitioningScheme().getPartitioning().getHandle(),
driverInstanceCount,
types,
channels,
hashChannel,
exchangeSourcePipelineExecutionStrategy,
maxLocalExchangeBufferSize);
for (int i = 0; i < node.getSources().size(); i++) {
DriverFactoryParameters driverFactoryParameters = driverFactoryParametersList.get(i);
PhysicalOperation source = driverFactoryParameters.getSource();
LocalExecutionPlanContext subContext = driverFactoryParameters.getSubContext();
List<VariableReferenceExpression> expectedLayout = node.getInputs().get(i);
Function<Page, Page> pagePreprocessor = enforceLayoutProcessor(expectedLayout, source.getLayout());
List<OperatorFactory> operatorFactories = new ArrayList<>(source.getOperatorFactories());
operatorFactories.add(new LocalExchangeSinkOperatorFactory(
localExchangeFactory,
subContext.getNextOperatorId(),
node.getId(),
localExchangeFactory.newSinkFactoryId(),
pagePreprocessor));
context.addDriverFactory(
subContext.isInputDriver(),
false,
operatorFactories,
subContext.getDriverInstanceCount(),
exchangeSourcePipelineExecutionStrategy);
}
// the main driver is not an input... the exchange sources are the input for the plan
context.setInputDriver(false);
// instance count must match the number of partitions in the exchange
verify(context.getDriverInstanceCount().getAsInt() == localExchangeFactory.getBufferCount(),
"driver instance count must match the number of exchange partitions");
return new PhysicalOperation(new LocalExchangeSourceOperatorFactory(context.getNextOperatorId(), node.getId(), localExchangeFactory), makeLayout(node), context, exchangeSourcePipelineExecutionStrategy);
}
@Override
public PhysicalOperation visitPlan(PlanNode node, LocalExecutionPlanContext context)
{
throw new UnsupportedOperationException("not yet implemented");
}
private List<Type> getSourceOperatorTypes(PlanNode node)
{
return getVariableTypes(node.getOutputVariables());
}
private List<Type> getVariableTypes(List<VariableReferenceExpression> variables)
{
return variables.stream()
.map(VariableReferenceExpression::getType)
.collect(toImmutableList());
}
private AccumulatorFactory buildAccumulatorFactory(
PhysicalOperation source,
Aggregation aggregation,
boolean spillEnabled)
{
FunctionManager functionManager = metadata.getFunctionManager();
InternalAggregationFunction internalAggregationFunction = functionManager.getAggregateFunctionImplementation(aggregation.getFunctionHandle());
List<Integer> valueChannels = new ArrayList<>();
for (RowExpression argument : aggregation.getArguments()) {
if (!(argument instanceof LambdaDefinitionExpression)) {
checkArgument(argument instanceof VariableReferenceExpression, "argument must be variable reference");
valueChannels.add(source.getLayout().get(argument));
}
}
List<LambdaProvider> lambdaProviders = new ArrayList<>();
List<LambdaDefinitionExpression> lambdas = aggregation.getArguments().stream()
.filter(LambdaDefinitionExpression.class::isInstance)
.map(LambdaDefinitionExpression.class::cast)
.collect(toImmutableList());
for (int i = 0; i < lambdas.size(); i++) {
List<Class> lambdaInterfaces = internalAggregationFunction.getLambdaInterfaces();
Class<? extends LambdaProvider> lambdaProviderClass = compileLambdaProvider(lambdas.get(i), metadata, session.getSqlFunctionProperties(), lambdaInterfaces.get(i));
try {
lambdaProviders.add((LambdaProvider) constructorMethodHandle(lambdaProviderClass, SqlFunctionProperties.class).invoke(session.getSqlFunctionProperties()));
}
catch (Throwable t) {
throw new RuntimeException(t);
}
}
Optional<Integer> maskChannel = aggregation.getMask().map(value -> source.getLayout().get(value));
List<SortOrder> sortOrders = ImmutableList.of();
List<VariableReferenceExpression> sortKeys = ImmutableList.of();
if (aggregation.getOrderBy().isPresent()) {
OrderingScheme orderBy = aggregation.getOrderBy().get();
sortKeys = orderBy.getOrderByVariables();
sortOrders = getOrderingList(orderBy);
}
return internalAggregationFunction.bind(
valueChannels,
maskChannel,
source.getTypes(),
getChannelsForVariables(sortKeys, source.getLayout()),
sortOrders,
pagesIndexFactory,
aggregation.isDistinct(),
joinCompiler,
lambdaProviders,
spillEnabled,
session);
}
private PhysicalOperation planGlobalAggregation(AggregationNode node, PhysicalOperation source, LocalExecutionPlanContext context)
{
ImmutableMap.Builder<VariableReferenceExpression, Integer> outputMappings = ImmutableMap.builder();
AggregationOperatorFactory operatorFactory = createAggregationOperatorFactory(
node.getId(),
node.getAggregations(),
node.getStep(),
0,
outputMappings,
source,
context,
node.getStep().isOutputPartial());
return new PhysicalOperation(operatorFactory, outputMappings.build(), context, source);
}
private AggregationOperatorFactory createAggregationOperatorFactory(
PlanNodeId planNodeId,
Map<VariableReferenceExpression, Aggregation> aggregations,
Step step,
int startOutputChannel,
ImmutableMap.Builder<VariableReferenceExpression, Integer> outputMappings,
PhysicalOperation source,
LocalExecutionPlanContext context,
boolean useSystemMemory)
{
int outputChannel = startOutputChannel;
ImmutableList.Builder<AccumulatorFactory> accumulatorFactories = ImmutableList.builder();
for (Map.Entry<VariableReferenceExpression, Aggregation> entry : aggregations.entrySet()) {
VariableReferenceExpression variable = entry.getKey();
Aggregation aggregation = entry.getValue();
accumulatorFactories.add(buildAccumulatorFactory(source, aggregation, false));
outputMappings.put(variable, outputChannel); // one aggregation per channel
outputChannel++;
}
return new AggregationOperatorFactory(context.getNextOperatorId(), planNodeId, step, accumulatorFactories.build(), useSystemMemory);
}
private PhysicalOperation planGroupByAggregation(
AggregationNode node,
PhysicalOperation source,
boolean spillEnabled,
DataSize unspillMemoryLimit,
LocalExecutionPlanContext context)
{
ImmutableMap.Builder<VariableReferenceExpression, Integer> mappings = ImmutableMap.builder();
OperatorFactory operatorFactory = createHashAggregationOperatorFactory(
node.getId(),
node.getAggregations(),
node.getGlobalGroupingSets(),
node.getGroupingKeys(),
node.getStep(),
node.getHashVariable(),
node.getGroupIdVariable(),
source,
node.hasDefaultOutput(),
spillEnabled,
node.isStreamable(),
unspillMemoryLimit,
context,
0,
mappings,
10_000,
Optional.of(maxPartialAggregationMemorySize),
node.getStep().isOutputPartial());
return new PhysicalOperation(operatorFactory, mappings.build(), context, source);
}
private OperatorFactory createHashAggregationOperatorFactory(
PlanNodeId planNodeId,
Map<VariableReferenceExpression, Aggregation> aggregations,
Set<Integer> globalGroupingSets,
List<VariableReferenceExpression> groupbyVariables,
Step step,
Optional<VariableReferenceExpression> hashVariable,
Optional<VariableReferenceExpression> groupIdVariable,
PhysicalOperation source,
boolean hasDefaultOutput,
boolean spillEnabled,
boolean isStreamable,
DataSize unspillMemoryLimit,
LocalExecutionPlanContext context,
int startOutputChannel,
ImmutableMap.Builder<VariableReferenceExpression, Integer> outputMappings,
int expectedGroups,
Optional<DataSize> maxPartialAggregationMemorySize,
boolean useSystemMemory)
{
List<VariableReferenceExpression> aggregationOutputVariables = new ArrayList<>();
List<AccumulatorFactory> accumulatorFactories = new ArrayList<>();
for (Map.Entry<VariableReferenceExpression, Aggregation> entry : aggregations.entrySet()) {
VariableReferenceExpression variable = entry.getKey();
Aggregation aggregation = entry.getValue();
accumulatorFactories.add(buildAccumulatorFactory(source, aggregation, !isStreamable && spillEnabled));
aggregationOutputVariables.add(variable);
}
// add group-by key fields each in a separate channel
int channel = startOutputChannel;
Optional<Integer> groupIdChannel = Optional.empty();
for (VariableReferenceExpression variable : groupbyVariables) {
outputMappings.put(variable, channel);
if (groupIdVariable.isPresent() && groupIdVariable.get().equals(variable)) {
groupIdChannel = Optional.of(channel);
}
channel++;
}
// hashChannel follows the group by channels
if (hashVariable.isPresent()) {
outputMappings.put(hashVariable.get(), channel++);
}
// aggregations go in following channels
for (VariableReferenceExpression variable : aggregationOutputVariables) {
outputMappings.put(variable, channel);
channel++;
}
List<Integer> groupByChannels = getChannelsForVariables(groupbyVariables, source.getLayout());
List<Type> groupByTypes = groupByChannels.stream()
.map(entry -> source.getTypes().get(entry))
.collect(toImmutableList());
if (isStreamable) {
return new StreamingAggregationOperatorFactory(
context.getNextOperatorId(),
planNodeId,
source.getTypes(),
groupByTypes,
groupByChannels,
step,
accumulatorFactories,
joinCompiler);
}
else {
Optional<Integer> hashChannel = hashVariable.map(variableChannelGetter(source));
return new HashAggregationOperatorFactory(
context.getNextOperatorId(),
planNodeId,
groupByTypes,
groupByChannels,
ImmutableList.copyOf(globalGroupingSets),
step,
hasDefaultOutput,
accumulatorFactories,
hashChannel,
groupIdChannel,
expectedGroups,
maxPartialAggregationMemorySize,
spillEnabled,
unspillMemoryLimit,
spillerFactory,
joinCompiler,
useSystemMemory);
}
}
}
private static TableFinisher createTableFinisher(Session session, Metadata metadata, ExecutionWriterTarget target)
{
return (fragments, statistics) -> {
if (target instanceof CreateHandle) {
return metadata.finishCreateTable(session, ((CreateHandle) target).getHandle(), fragments, statistics);
}
else if (target instanceof InsertHandle) {
return metadata.finishInsert(session, ((InsertHandle) target).getHandle(), fragments, statistics);
}
else if (target instanceof DeleteHandle) {
metadata.finishDelete(session, ((DeleteHandle) target).getHandle(), fragments);
return Optional.empty();
}
else {
throw new AssertionError("Unhandled target type: " + target.getClass().getName());
}
};
}
private static PageSinkCommitter createPageSinkCommitter(Session session, Metadata metadata, ExecutionWriterTarget target)
{
return fragments -> {
if (target instanceof CreateHandle) {
return metadata.commitPageSinkAsync(session, ((CreateHandle) target).getHandle(), fragments);
}
else if (target instanceof InsertHandle) {
return metadata.commitPageSinkAsync(session, ((InsertHandle) target).getHandle(), fragments);
}
else {
throw new AssertionError("Unhandled target type: " + target.getClass().getName());
}
};
}
private static Function<Page, Page> enforceLayoutProcessor(List<VariableReferenceExpression> expectedLayout, Map<VariableReferenceExpression, Integer> inputLayout)
{
int[] channels = expectedLayout.stream()
.peek(variable -> checkArgument(inputLayout.containsKey(variable), "channel not found for variable: %s", variable))
.mapToInt(inputLayout::get)
.toArray();
if (Arrays.equals(channels, range(0, inputLayout.size()).toArray())) {
// this is an identity mapping
return Function.identity();
}
return new PageChannelSelector(channels);
}
private static List<Integer> getChannelsForVariables(Collection<VariableReferenceExpression> variables, Map<VariableReferenceExpression, Integer> layout)
{
ImmutableList.Builder<Integer> builder = ImmutableList.builder();
for (VariableReferenceExpression variable : variables) {
checkArgument(layout.containsKey(variable));
builder.add(layout.get(variable));
}
return builder.build();
}
private static Function<VariableReferenceExpression, Integer> variableChannelGetter(PhysicalOperation source)
{
return input -> {
checkArgument(source.getLayout().containsKey(input));
return source.getLayout().get(input);
};
}
/**
* List of sort orders in the same order as the list of variables returned from `getOrderByVariables()`. This means for
* index i, variable `getOrderByVariables().get(i)` has order `getOrderingList().get(i)`.
*/
private static List<SortOrder> getOrderingList(OrderingScheme orderingScheme)
{
return orderingScheme.getOrderByVariables().stream().map(orderingScheme.getOrderingsMap()::get).collect(toImmutableList());
}
/**
* Encapsulates an physical operator plus the mapping of logical variables to channel/field
*/
private static class PhysicalOperation
{
private final List<OperatorFactory> operatorFactories;
private final Map<VariableReferenceExpression, Integer> layout;
private final List<Type> types;
private final PipelineExecutionStrategy pipelineExecutionStrategy;
public PhysicalOperation(OperatorFactory operatorFactory, Map<VariableReferenceExpression, Integer> layout, LocalExecutionPlanContext context, PipelineExecutionStrategy pipelineExecutionStrategy)
{
this(operatorFactory, layout, context, Optional.empty(), pipelineExecutionStrategy);
}
public PhysicalOperation(OperatorFactory operatorFactory, Map<VariableReferenceExpression, Integer> layout, LocalExecutionPlanContext context, PhysicalOperation source)
{
this(operatorFactory, layout, context, Optional.of(requireNonNull(source, "source is null")), source.getPipelineExecutionStrategy());
}
private PhysicalOperation(
OperatorFactory operatorFactory,
Map<VariableReferenceExpression, Integer> layout,
LocalExecutionPlanContext context,
Optional<PhysicalOperation> source,
PipelineExecutionStrategy pipelineExecutionStrategy)
{
requireNonNull(operatorFactory, "operatorFactory is null");
requireNonNull(layout, "layout is null");
requireNonNull(context, "context is null");
requireNonNull(source, "source is null");
requireNonNull(pipelineExecutionStrategy, "pipelineExecutionStrategy is null");
this.operatorFactories = ImmutableList.<OperatorFactory>builder()
.addAll(source.map(PhysicalOperation::getOperatorFactories).orElse(ImmutableList.of()))
.add(operatorFactory)
.build();
this.layout = ImmutableMap.copyOf(layout);
this.types = toTypes(layout);
this.pipelineExecutionStrategy = pipelineExecutionStrategy;
}
private static List<Type> toTypes(Map<VariableReferenceExpression, Integer> layout)
{
// verify layout covers all values
int channelCount = layout.values().stream().mapToInt(Integer::intValue).max().orElse(-1) + 1;
checkArgument(
layout.size() == channelCount && ImmutableSet.copyOf(layout.values()).containsAll(ContiguousSet.create(closedOpen(0, channelCount), integers())),
"Layout does not have a variable for every output channel: %s", layout);
Map<Integer, VariableReferenceExpression> channelLayout = ImmutableBiMap.copyOf(layout).inverse();
return range(0, channelCount)
.mapToObj(channelLayout::get)
.map(VariableReferenceExpression::getType)
.collect(toImmutableList());
}
private int variableToChannel(VariableReferenceExpression input)
{
checkArgument(layout.containsKey(input));
return layout.get(input);
}
public List<Type> getTypes()
{
return types;
}
public Map<VariableReferenceExpression, Integer> getLayout()
{
return layout;
}
private List<OperatorFactory> getOperatorFactories()
{
return operatorFactories;
}
public PipelineExecutionStrategy getPipelineExecutionStrategy()
{
return pipelineExecutionStrategy;
}
}
private static class DriverFactoryParameters
{
private final LocalExecutionPlanContext subContext;
private final PhysicalOperation source;
public DriverFactoryParameters(LocalExecutionPlanContext subContext, PhysicalOperation source)
{
this.subContext = subContext;
this.source = source;
}
public LocalExecutionPlanContext getSubContext()
{
return subContext;
}
public PhysicalOperation getSource()
{
return source;
}
}
}
|
apache-2.0
|
EixoX/jetfuel
|
jetfuel-core/src/main/java/com/eixox/data/sql/Postgres.java
|
622
|
package com.eixox.data.sql;
import java.util.Properties;
public class Postgres extends Database {
public Postgres(String url, Properties properties) {
super(url, properties);
}
@Override
public DatabaseCommand createCommand() {
return new PostgresCommand(this);
}
@Override
public final char getNamePrefix() {
return '"';
}
@Override
public final char getNameSuffix() {
return '"';
}
@Override
public final boolean supportsTop() {
return false;
}
@Override
public final boolean supportsOffset() {
return true;
}
@Override
public final boolean supportsLimit() {
return true;
}
}
|
apache-2.0
|
parshimers/incubator-asterixdb
|
asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/handle/IHandle.java
|
986
|
/*
* Copyright 2009-2013 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.uci.ics.asterix.optimizer.handle;
/**
* A handle is a way of accessing an ADM instance or a collection of ADM
* instances nested within another ADM instance.
*
* @author Nicola
*/
public interface IHandle {
public enum HandleType {
FIELD_INDEX_AND_TYPE,
FIELD_NAME
}
public HandleType getHandleType();
}
|
apache-2.0
|
a-chervin/onlinedebug
|
test/xryusha/onlinedebug/testcases/breakpoints/methodrelated/ConstructorMethod.java
|
739
|
package xryusha.onlinedebug.testcases.breakpoints.methodrelated;
import xryusha.onlinedebug.testcases.Flow;
public class ConstructorMethod extends Flow
{
@Override
public void reset()
{
CtorTest.counter = 0;
}
@Override
public Object call() throws Exception
{
f();
return null;
}
void f()
{
String BP = "sdsfs";
CtorTest ct = new CtorTest();
BP = "sdsfs";
ct = new CtorTest("asdsad");
BP = "sdsfs";
}
static class CtorTest
{
static int counter = 0;
CtorTest()
{
String BP="sadfs";
}
CtorTest(String vvv)
{
String BP="sadfs";
}
}
}
|
apache-2.0
|
scouter-project/scouter
|
scouter.webapp/src/main/java/scouterx/webapp/layer/controller/UserController.java
|
3712
|
/*
* Copyright 2015 the original author or authors.
* @https://github.com/scouter-project/scouter
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package scouterx.webapp.layer.controller;
import io.swagger.annotations.Api;
import scouterx.webapp.framework.annotation.NoAuth;
import scouterx.webapp.framework.client.server.Server;
import scouterx.webapp.framework.client.server.ServerManager;
import scouterx.webapp.layer.service.UserService;
import scouterx.webapp.layer.service.UserTokenService;
import scouterx.webapp.request.LoginRequest;
import scouterx.webapp.view.BearerTokenView;
import scouterx.webapp.view.CommonResultView;
import javax.crypto.KeyGenerator;
import javax.inject.Singleton;
import javax.servlet.http.HttpServletRequest;
import javax.validation.Valid;
import javax.ws.rs.Consumes;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import java.security.Key;
import java.security.NoSuchAlgorithmException;
import java.security.SecureRandom;
/**
* @author Gun Lee (gunlee01@gmail.com) on 2017. 8. 27.
*/
@Path("/v1/user")
@Api("User")
@Singleton
@Produces(MediaType.APPLICATION_JSON)
public class UserController {
@Context
HttpServletRequest servletRequest;
final UserService userService = new UserService();
final UserTokenService userTokenService = new UserTokenService();
/**
* traditional webapplication login for web client application ( success will response "set cookie JSESSIONID" )
*
* @param loginRequest @see {@link LoginRequest}
*/
@NoAuth
@POST @Path("/login")
@Consumes(MediaType.APPLICATION_JSON)
public CommonResultView<Boolean> login(@Valid final LoginRequest loginRequest) {
userService.login(ServerManager.getInstance().getServer(loginRequest.getServerId()), loginRequest.getUser());
servletRequest.getSession(true).setAttribute("userId", loginRequest.getUser().getId());
return CommonResultView.success();
}
/**
* login for 3rd party application ( success will be responsed with Bearer Token which should be exist in the 'Authorization' header from next request.)
*
* @param loginRequest @see {@link LoginRequest}
*/
@NoAuth
@POST @Path("/loginGetToken")
@Consumes(MediaType.APPLICATION_JSON)
public CommonResultView<BearerTokenView> login3rdParty(@Valid final LoginRequest loginRequest) {
Server server = ServerManager.getInstance().getServer(loginRequest.getServerId());
userService.login(server, loginRequest.getUser());
String bearerToken = userTokenService.publishToken(server, loginRequest.getUser());
return CommonResultView.success(new BearerTokenView(true, bearerToken));
}
public static void main(String[] args) {
Key key;
SecureRandom rand = new SecureRandom();
KeyGenerator generator = null;
try {
generator = KeyGenerator.getInstance("AES");
} catch (NoSuchAlgorithmException e) {
e.printStackTrace();
}
generator.init(256, rand);
key = generator.generateKey();
}
}
|
apache-2.0
|
jmptrader/Strata
|
modules/pricer/src/main/java/com/opengamma/strata/pricer/bond/LegalEntityDiscountingProvider.java
|
28845
|
/**
* Copyright (C) 2015 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.strata.pricer.bond;
import java.io.Serializable;
import java.time.LocalDate;
import java.util.Map;
import java.util.Map.Entry;
import java.util.NoSuchElementException;
import java.util.Set;
import org.joda.beans.Bean;
import org.joda.beans.BeanDefinition;
import org.joda.beans.ImmutableBean;
import org.joda.beans.ImmutablePreBuild;
import org.joda.beans.ImmutableValidator;
import org.joda.beans.JodaBeanUtils;
import org.joda.beans.MetaProperty;
import org.joda.beans.Property;
import org.joda.beans.PropertyDefinition;
import org.joda.beans.impl.direct.DirectFieldsBeanBuilder;
import org.joda.beans.impl.direct.DirectMetaBean;
import org.joda.beans.impl.direct.DirectMetaProperty;
import org.joda.beans.impl.direct.DirectMetaPropertyMap;
import com.google.common.collect.ImmutableMap;
import com.opengamma.strata.basics.StandardId;
import com.opengamma.strata.basics.currency.Currency;
import com.opengamma.strata.collect.tuple.Pair;
import com.opengamma.strata.market.param.CurrencyParameterSensitivities;
import com.opengamma.strata.market.sensitivity.PointSensitivities;
import com.opengamma.strata.market.sensitivity.PointSensitivity;
import com.opengamma.strata.pricer.DiscountFactors;
import com.opengamma.strata.pricer.rate.RatesProvider;
import com.opengamma.strata.product.SecurityId;
/**
* The discounting factors provider, used to calculate analytic measures.
* <p>
* The primary usage of this provider is to price bonds issued by a legal entity.
* This includes discount factors of repo curves and issuer curves.
*/
@BeanDefinition
public final class LegalEntityDiscountingProvider
implements ImmutableBean, Serializable {
/**
* The valuation date.
* All curves and other data items in this provider are calibrated for this date.
*/
@PropertyDefinition(validate = "notNull")
private final LocalDate valuationDate;
/**
* The bond group map.
* <p>
* This map is used to convert the {@link StandardId} that identifies the bond to
* the associated bond group in order to lookup a repo curve.
* <p>
* See {@link LegalEntityDiscountingProvider#repoCurveDiscountFactors(SecurityId, StandardId, Currency)}.
*/
@PropertyDefinition(validate = "notNull", get = "private")
private final ImmutableMap<StandardId, BondGroup> bondMap;
/**
* The repo curves, defaulted to an empty map.
* The curve data, predicting the future, associated with each bond group and currency.
*/
@PropertyDefinition(validate = "notNull", get = "private")
private final ImmutableMap<Pair<BondGroup, Currency>, DiscountFactors> repoCurves;
/**
* The legal entity group map.
* <p>
* This map is used to convert the {@link StandardId} that identifies the legal entity to
* the associated legal entity group in order to lookup an issuer curve.
* <p>
* See {@link LegalEntityDiscountingProvider#issuerCurveDiscountFactors(StandardId, Currency)}.
*/
@PropertyDefinition(validate = "notEmpty", get = "private")
private final ImmutableMap<StandardId, LegalEntityGroup> legalEntityMap;
/**
* The issuer curves.
* The curve data, predicting the future, associated with each legal entity group and currency.
*/
@PropertyDefinition(validate = "notEmpty", get = "private")
private final ImmutableMap<Pair<LegalEntityGroup, Currency>, DiscountFactors> issuerCurves;
//-------------------------------------------------------------------------
@ImmutablePreBuild
private static void preBuild(Builder builder) {
if (builder.valuationDate == null && !builder.issuerCurves.isEmpty()) {
builder.valuationDate = builder.issuerCurves.values().iterator().next().getValuationDate();
}
}
@ImmutableValidator
private void validate() {
for (Entry<Pair<BondGroup, Currency>, DiscountFactors> entry : repoCurves.entrySet()) {
if (!entry.getValue().getValuationDate().isEqual(valuationDate)) {
throw new IllegalArgumentException("Invalid valuation date for the repo curve: " + entry.getValue());
}
if (!bondMap.containsValue(entry.getKey().getFirst())) {
throw new IllegalArgumentException("No map to the bond group from ID: " + entry.getKey().getFirst());
}
}
for (Entry<Pair<LegalEntityGroup, Currency>, DiscountFactors> entry : issuerCurves.entrySet()) {
if (!entry.getValue().getValuationDate().isEqual(valuationDate)) {
throw new IllegalArgumentException("Invalid valuation date for the issuer curve: " + entry.getValue());
}
if (!legalEntityMap.containsValue(entry.getKey().getFirst())) {
throw new IllegalArgumentException("No map to the legal entity group from ID: " + entry.getKey().getFirst());
}
}
}
//-------------------------------------------------------------------------
/**
* Gets the discount factors of a repo curve for standard IDs and a currency.
* <p>
* If the bond standard ID is matched in a BondGroup, the relevant DiscountFactors is returned,
* if not the issuer standard ID is checked and the relevant DiscountFactors is returned;
* if both the bond and the issuer ID are not in any BondGroup, an error is thrown.
* <p>
* If the valuation date is on or after the specified date, the discount factor is 1.
*
* @param securityId the standard ID of security to get the discount factors for
* @param issuerId the standard ID of legal entity to get the discount factors for
* @param currency the currency to get the discount factors for
* @return the discount factors
* @throws IllegalArgumentException if the discount factors are not available
*/
public RepoCurveDiscountFactors repoCurveDiscountFactors(SecurityId securityId, StandardId issuerId, Currency currency) {
BondGroup bondGroup = bondMap.get(securityId.getStandardId());
if (bondGroup == null) {
bondGroup = bondMap.get(issuerId);
if (bondGroup == null) {
throw new IllegalArgumentException("Unable to find map for ID: " + securityId + ", " + issuerId);
}
}
return repoCurveDiscountFactors(bondGroup, currency);
}
// lookup the discount factors for the bond group
private RepoCurveDiscountFactors repoCurveDiscountFactors(BondGroup bondGroup, Currency currency) {
DiscountFactors discountFactors = repoCurves.get(Pair.of(bondGroup, currency));
if (discountFactors == null) {
throw new IllegalArgumentException("Unable to find repo curve: " + bondGroup + ", " + currency);
}
return RepoCurveDiscountFactors.of(discountFactors, bondGroup);
}
//-------------------------------------------------------------------------
/**
* Gets the discount factors of an issuer curve for a standard ID and a currency.
* <p>
* If the valuation date is on or after the specified date, the discount factor is 1.
*
* @param issuerId the standard ID to get the discount factors for
* @param currency the currency to get the discount factors for
* @return the discount factors
* @throws IllegalArgumentException if the discount factors are not available
*/
public IssuerCurveDiscountFactors issuerCurveDiscountFactors(StandardId issuerId, Currency currency) {
LegalEntityGroup legalEntityGroup = legalEntityMap.get(issuerId);
if (legalEntityGroup == null) {
throw new IllegalArgumentException("Unable to find map for ID: " + issuerId);
}
return issuerCurveDiscountFactors(legalEntityGroup, currency);
}
// lookup the discount factors for the legal entity group
private IssuerCurveDiscountFactors issuerCurveDiscountFactors(LegalEntityGroup legalEntityGroup, Currency currency) {
DiscountFactors discountFactors = issuerCurves.get(Pair.of(legalEntityGroup, currency));
if (discountFactors == null) {
throw new IllegalArgumentException("Unable to find issuer curve: " + legalEntityGroup + ", " + currency);
}
return IssuerCurveDiscountFactors.of(discountFactors, legalEntityGroup);
}
//-------------------------------------------------------------------------
/**
* Computes the parameter sensitivity.
* <p>
* This computes the {@link CurrencyParameterSensitivities} associated with the {@link PointSensitivities}.
* This corresponds to the projection of the point sensitivity to the curve internal parameters representation.
* <p>
* The sensitivities handled here are {@link RepoCurveZeroRateSensitivity} and {@link IssuerCurveZeroRateSensitivity}.
* For the other sensitivity objects, use {@link RatesProvider} instead.
*
* @param pointSensitivities the point sensitivity
* @return the sensitivity to the curve parameters
*/
public CurrencyParameterSensitivities parameterSensitivity(PointSensitivities pointSensitivities) {
CurrencyParameterSensitivities sens = CurrencyParameterSensitivities.empty();
for (PointSensitivity point : pointSensitivities.getSensitivities()) {
if (point instanceof RepoCurveZeroRateSensitivity) {
RepoCurveZeroRateSensitivity pt = (RepoCurveZeroRateSensitivity) point;
RepoCurveDiscountFactors factors = repoCurveDiscountFactors(pt.getBondGroup(), pt.getCurveCurrency());
sens = sens.combinedWith(factors.parameterSensitivity(pt));
} else if (point instanceof IssuerCurveZeroRateSensitivity) {
IssuerCurveZeroRateSensitivity pt = (IssuerCurveZeroRateSensitivity) point;
IssuerCurveDiscountFactors factors = issuerCurveDiscountFactors(pt.getLegalEntityGroup(), pt.getCurveCurrency());
sens = sens.combinedWith(factors.parameterSensitivity(pt));
}
}
return sens;
}
//------------------------- AUTOGENERATED START -------------------------
///CLOVER:OFF
/**
* The meta-bean for {@code LegalEntityDiscountingProvider}.
* @return the meta-bean, not null
*/
public static LegalEntityDiscountingProvider.Meta meta() {
return LegalEntityDiscountingProvider.Meta.INSTANCE;
}
static {
JodaBeanUtils.registerMetaBean(LegalEntityDiscountingProvider.Meta.INSTANCE);
}
/**
* The serialization version id.
*/
private static final long serialVersionUID = 1L;
/**
* Returns a builder used to create an instance of the bean.
* @return the builder, not null
*/
public static LegalEntityDiscountingProvider.Builder builder() {
return new LegalEntityDiscountingProvider.Builder();
}
private LegalEntityDiscountingProvider(
LocalDate valuationDate,
Map<StandardId, BondGroup> bondMap,
Map<Pair<BondGroup, Currency>, DiscountFactors> repoCurves,
Map<StandardId, LegalEntityGroup> legalEntityMap,
Map<Pair<LegalEntityGroup, Currency>, DiscountFactors> issuerCurves) {
JodaBeanUtils.notNull(valuationDate, "valuationDate");
JodaBeanUtils.notNull(bondMap, "bondMap");
JodaBeanUtils.notNull(repoCurves, "repoCurves");
JodaBeanUtils.notEmpty(legalEntityMap, "legalEntityMap");
JodaBeanUtils.notEmpty(issuerCurves, "issuerCurves");
this.valuationDate = valuationDate;
this.bondMap = ImmutableMap.copyOf(bondMap);
this.repoCurves = ImmutableMap.copyOf(repoCurves);
this.legalEntityMap = ImmutableMap.copyOf(legalEntityMap);
this.issuerCurves = ImmutableMap.copyOf(issuerCurves);
validate();
}
@Override
public LegalEntityDiscountingProvider.Meta metaBean() {
return LegalEntityDiscountingProvider.Meta.INSTANCE;
}
@Override
public <R> Property<R> property(String propertyName) {
return metaBean().<R>metaProperty(propertyName).createProperty(this);
}
@Override
public Set<String> propertyNames() {
return metaBean().metaPropertyMap().keySet();
}
//-----------------------------------------------------------------------
/**
* Gets the valuation date.
* All curves and other data items in this provider are calibrated for this date.
* @return the value of the property, not null
*/
public LocalDate getValuationDate() {
return valuationDate;
}
//-----------------------------------------------------------------------
/**
* Gets the bond group map.
* <p>
* This map is used to convert the {@link StandardId} that identifies the bond to
* the associated bond group in order to lookup a repo curve.
* <p>
* See {@link LegalEntityDiscountingProvider#repoCurveDiscountFactors(SecurityId, StandardId, Currency)}.
* @return the value of the property, not null
*/
private ImmutableMap<StandardId, BondGroup> getBondMap() {
return bondMap;
}
//-----------------------------------------------------------------------
/**
* Gets the repo curves, defaulted to an empty map.
* The curve data, predicting the future, associated with each bond group and currency.
* @return the value of the property, not null
*/
private ImmutableMap<Pair<BondGroup, Currency>, DiscountFactors> getRepoCurves() {
return repoCurves;
}
//-----------------------------------------------------------------------
/**
* Gets the legal entity group map.
* <p>
* This map is used to convert the {@link StandardId} that identifies the legal entity to
* the associated legal entity group in order to lookup an issuer curve.
* <p>
* See {@link LegalEntityDiscountingProvider#issuerCurveDiscountFactors(StandardId, Currency)}.
* @return the value of the property, not empty
*/
private ImmutableMap<StandardId, LegalEntityGroup> getLegalEntityMap() {
return legalEntityMap;
}
//-----------------------------------------------------------------------
/**
* Gets the issuer curves.
* The curve data, predicting the future, associated with each legal entity group and currency.
* @return the value of the property, not empty
*/
private ImmutableMap<Pair<LegalEntityGroup, Currency>, DiscountFactors> getIssuerCurves() {
return issuerCurves;
}
//-----------------------------------------------------------------------
/**
* Returns a builder that allows this bean to be mutated.
* @return the mutable builder, not null
*/
public Builder toBuilder() {
return new Builder(this);
}
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (obj != null && obj.getClass() == this.getClass()) {
LegalEntityDiscountingProvider other = (LegalEntityDiscountingProvider) obj;
return JodaBeanUtils.equal(valuationDate, other.valuationDate) &&
JodaBeanUtils.equal(bondMap, other.bondMap) &&
JodaBeanUtils.equal(repoCurves, other.repoCurves) &&
JodaBeanUtils.equal(legalEntityMap, other.legalEntityMap) &&
JodaBeanUtils.equal(issuerCurves, other.issuerCurves);
}
return false;
}
@Override
public int hashCode() {
int hash = getClass().hashCode();
hash = hash * 31 + JodaBeanUtils.hashCode(valuationDate);
hash = hash * 31 + JodaBeanUtils.hashCode(bondMap);
hash = hash * 31 + JodaBeanUtils.hashCode(repoCurves);
hash = hash * 31 + JodaBeanUtils.hashCode(legalEntityMap);
hash = hash * 31 + JodaBeanUtils.hashCode(issuerCurves);
return hash;
}
@Override
public String toString() {
StringBuilder buf = new StringBuilder(192);
buf.append("LegalEntityDiscountingProvider{");
buf.append("valuationDate").append('=').append(valuationDate).append(',').append(' ');
buf.append("bondMap").append('=').append(bondMap).append(',').append(' ');
buf.append("repoCurves").append('=').append(repoCurves).append(',').append(' ');
buf.append("legalEntityMap").append('=').append(legalEntityMap).append(',').append(' ');
buf.append("issuerCurves").append('=').append(JodaBeanUtils.toString(issuerCurves));
buf.append('}');
return buf.toString();
}
//-----------------------------------------------------------------------
/**
* The meta-bean for {@code LegalEntityDiscountingProvider}.
*/
public static final class Meta extends DirectMetaBean {
/**
* The singleton instance of the meta-bean.
*/
static final Meta INSTANCE = new Meta();
/**
* The meta-property for the {@code valuationDate} property.
*/
private final MetaProperty<LocalDate> valuationDate = DirectMetaProperty.ofImmutable(
this, "valuationDate", LegalEntityDiscountingProvider.class, LocalDate.class);
/**
* The meta-property for the {@code bondMap} property.
*/
@SuppressWarnings({"unchecked", "rawtypes" })
private final MetaProperty<ImmutableMap<StandardId, BondGroup>> bondMap = DirectMetaProperty.ofImmutable(
this, "bondMap", LegalEntityDiscountingProvider.class, (Class) ImmutableMap.class);
/**
* The meta-property for the {@code repoCurves} property.
*/
@SuppressWarnings({"unchecked", "rawtypes" })
private final MetaProperty<ImmutableMap<Pair<BondGroup, Currency>, DiscountFactors>> repoCurves = DirectMetaProperty.ofImmutable(
this, "repoCurves", LegalEntityDiscountingProvider.class, (Class) ImmutableMap.class);
/**
* The meta-property for the {@code legalEntityMap} property.
*/
@SuppressWarnings({"unchecked", "rawtypes" })
private final MetaProperty<ImmutableMap<StandardId, LegalEntityGroup>> legalEntityMap = DirectMetaProperty.ofImmutable(
this, "legalEntityMap", LegalEntityDiscountingProvider.class, (Class) ImmutableMap.class);
/**
* The meta-property for the {@code issuerCurves} property.
*/
@SuppressWarnings({"unchecked", "rawtypes" })
private final MetaProperty<ImmutableMap<Pair<LegalEntityGroup, Currency>, DiscountFactors>> issuerCurves = DirectMetaProperty.ofImmutable(
this, "issuerCurves", LegalEntityDiscountingProvider.class, (Class) ImmutableMap.class);
/**
* The meta-properties.
*/
private final Map<String, MetaProperty<?>> metaPropertyMap$ = new DirectMetaPropertyMap(
this, null,
"valuationDate",
"bondMap",
"repoCurves",
"legalEntityMap",
"issuerCurves");
/**
* Restricted constructor.
*/
private Meta() {
}
@Override
protected MetaProperty<?> metaPropertyGet(String propertyName) {
switch (propertyName.hashCode()) {
case 113107279: // valuationDate
return valuationDate;
case 63526809: // bondMap
return bondMap;
case 587630454: // repoCurves
return repoCurves;
case 1085102016: // legalEntityMap
return legalEntityMap;
case -1909076611: // issuerCurves
return issuerCurves;
}
return super.metaPropertyGet(propertyName);
}
@Override
public LegalEntityDiscountingProvider.Builder builder() {
return new LegalEntityDiscountingProvider.Builder();
}
@Override
public Class<? extends LegalEntityDiscountingProvider> beanType() {
return LegalEntityDiscountingProvider.class;
}
@Override
public Map<String, MetaProperty<?>> metaPropertyMap() {
return metaPropertyMap$;
}
//-----------------------------------------------------------------------
/**
* The meta-property for the {@code valuationDate} property.
* @return the meta-property, not null
*/
public MetaProperty<LocalDate> valuationDate() {
return valuationDate;
}
/**
* The meta-property for the {@code bondMap} property.
* @return the meta-property, not null
*/
public MetaProperty<ImmutableMap<StandardId, BondGroup>> bondMap() {
return bondMap;
}
/**
* The meta-property for the {@code repoCurves} property.
* @return the meta-property, not null
*/
public MetaProperty<ImmutableMap<Pair<BondGroup, Currency>, DiscountFactors>> repoCurves() {
return repoCurves;
}
/**
* The meta-property for the {@code legalEntityMap} property.
* @return the meta-property, not null
*/
public MetaProperty<ImmutableMap<StandardId, LegalEntityGroup>> legalEntityMap() {
return legalEntityMap;
}
/**
* The meta-property for the {@code issuerCurves} property.
* @return the meta-property, not null
*/
public MetaProperty<ImmutableMap<Pair<LegalEntityGroup, Currency>, DiscountFactors>> issuerCurves() {
return issuerCurves;
}
//-----------------------------------------------------------------------
@Override
protected Object propertyGet(Bean bean, String propertyName, boolean quiet) {
switch (propertyName.hashCode()) {
case 113107279: // valuationDate
return ((LegalEntityDiscountingProvider) bean).getValuationDate();
case 63526809: // bondMap
return ((LegalEntityDiscountingProvider) bean).getBondMap();
case 587630454: // repoCurves
return ((LegalEntityDiscountingProvider) bean).getRepoCurves();
case 1085102016: // legalEntityMap
return ((LegalEntityDiscountingProvider) bean).getLegalEntityMap();
case -1909076611: // issuerCurves
return ((LegalEntityDiscountingProvider) bean).getIssuerCurves();
}
return super.propertyGet(bean, propertyName, quiet);
}
@Override
protected void propertySet(Bean bean, String propertyName, Object newValue, boolean quiet) {
metaProperty(propertyName);
if (quiet) {
return;
}
throw new UnsupportedOperationException("Property cannot be written: " + propertyName);
}
}
//-----------------------------------------------------------------------
/**
* The bean-builder for {@code LegalEntityDiscountingProvider}.
*/
public static final class Builder extends DirectFieldsBeanBuilder<LegalEntityDiscountingProvider> {
private LocalDate valuationDate;
private Map<StandardId, BondGroup> bondMap = ImmutableMap.of();
private Map<Pair<BondGroup, Currency>, DiscountFactors> repoCurves = ImmutableMap.of();
private Map<StandardId, LegalEntityGroup> legalEntityMap = ImmutableMap.of();
private Map<Pair<LegalEntityGroup, Currency>, DiscountFactors> issuerCurves = ImmutableMap.of();
/**
* Restricted constructor.
*/
private Builder() {
}
/**
* Restricted copy constructor.
* @param beanToCopy the bean to copy from, not null
*/
private Builder(LegalEntityDiscountingProvider beanToCopy) {
this.valuationDate = beanToCopy.getValuationDate();
this.bondMap = beanToCopy.getBondMap();
this.repoCurves = beanToCopy.getRepoCurves();
this.legalEntityMap = beanToCopy.getLegalEntityMap();
this.issuerCurves = beanToCopy.getIssuerCurves();
}
//-----------------------------------------------------------------------
@Override
public Object get(String propertyName) {
switch (propertyName.hashCode()) {
case 113107279: // valuationDate
return valuationDate;
case 63526809: // bondMap
return bondMap;
case 587630454: // repoCurves
return repoCurves;
case 1085102016: // legalEntityMap
return legalEntityMap;
case -1909076611: // issuerCurves
return issuerCurves;
default:
throw new NoSuchElementException("Unknown property: " + propertyName);
}
}
@SuppressWarnings("unchecked")
@Override
public Builder set(String propertyName, Object newValue) {
switch (propertyName.hashCode()) {
case 113107279: // valuationDate
this.valuationDate = (LocalDate) newValue;
break;
case 63526809: // bondMap
this.bondMap = (Map<StandardId, BondGroup>) newValue;
break;
case 587630454: // repoCurves
this.repoCurves = (Map<Pair<BondGroup, Currency>, DiscountFactors>) newValue;
break;
case 1085102016: // legalEntityMap
this.legalEntityMap = (Map<StandardId, LegalEntityGroup>) newValue;
break;
case -1909076611: // issuerCurves
this.issuerCurves = (Map<Pair<LegalEntityGroup, Currency>, DiscountFactors>) newValue;
break;
default:
throw new NoSuchElementException("Unknown property: " + propertyName);
}
return this;
}
@Override
public Builder set(MetaProperty<?> property, Object value) {
super.set(property, value);
return this;
}
@Override
public Builder setString(String propertyName, String value) {
setString(meta().metaProperty(propertyName), value);
return this;
}
@Override
public Builder setString(MetaProperty<?> property, String value) {
super.setString(property, value);
return this;
}
@Override
public Builder setAll(Map<String, ? extends Object> propertyValueMap) {
super.setAll(propertyValueMap);
return this;
}
@Override
public LegalEntityDiscountingProvider build() {
preBuild(this);
return new LegalEntityDiscountingProvider(
valuationDate,
bondMap,
repoCurves,
legalEntityMap,
issuerCurves);
}
//-----------------------------------------------------------------------
/**
* Sets the valuation date.
* All curves and other data items in this provider are calibrated for this date.
* @param valuationDate the new value, not null
* @return this, for chaining, not null
*/
public Builder valuationDate(LocalDate valuationDate) {
JodaBeanUtils.notNull(valuationDate, "valuationDate");
this.valuationDate = valuationDate;
return this;
}
/**
* Sets the bond group map.
* <p>
* This map is used to convert the {@link StandardId} that identifies the bond to
* the associated bond group in order to lookup a repo curve.
* <p>
* See {@link LegalEntityDiscountingProvider#repoCurveDiscountFactors(SecurityId, StandardId, Currency)}.
* @param bondMap the new value, not null
* @return this, for chaining, not null
*/
public Builder bondMap(Map<StandardId, BondGroup> bondMap) {
JodaBeanUtils.notNull(bondMap, "bondMap");
this.bondMap = bondMap;
return this;
}
/**
* Sets the repo curves, defaulted to an empty map.
* The curve data, predicting the future, associated with each bond group and currency.
* @param repoCurves the new value, not null
* @return this, for chaining, not null
*/
public Builder repoCurves(Map<Pair<BondGroup, Currency>, DiscountFactors> repoCurves) {
JodaBeanUtils.notNull(repoCurves, "repoCurves");
this.repoCurves = repoCurves;
return this;
}
/**
* Sets the legal entity group map.
* <p>
* This map is used to convert the {@link StandardId} that identifies the legal entity to
* the associated legal entity group in order to lookup an issuer curve.
* <p>
* See {@link LegalEntityDiscountingProvider#issuerCurveDiscountFactors(StandardId, Currency)}.
* @param legalEntityMap the new value, not empty
* @return this, for chaining, not null
*/
public Builder legalEntityMap(Map<StandardId, LegalEntityGroup> legalEntityMap) {
JodaBeanUtils.notEmpty(legalEntityMap, "legalEntityMap");
this.legalEntityMap = legalEntityMap;
return this;
}
/**
* Sets the issuer curves.
* The curve data, predicting the future, associated with each legal entity group and currency.
* @param issuerCurves the new value, not empty
* @return this, for chaining, not null
*/
public Builder issuerCurves(Map<Pair<LegalEntityGroup, Currency>, DiscountFactors> issuerCurves) {
JodaBeanUtils.notEmpty(issuerCurves, "issuerCurves");
this.issuerCurves = issuerCurves;
return this;
}
//-----------------------------------------------------------------------
@Override
public String toString() {
StringBuilder buf = new StringBuilder(192);
buf.append("LegalEntityDiscountingProvider.Builder{");
buf.append("valuationDate").append('=').append(JodaBeanUtils.toString(valuationDate)).append(',').append(' ');
buf.append("bondMap").append('=').append(JodaBeanUtils.toString(bondMap)).append(',').append(' ');
buf.append("repoCurves").append('=').append(JodaBeanUtils.toString(repoCurves)).append(',').append(' ');
buf.append("legalEntityMap").append('=').append(JodaBeanUtils.toString(legalEntityMap)).append(',').append(' ');
buf.append("issuerCurves").append('=').append(JodaBeanUtils.toString(issuerCurves));
buf.append('}');
return buf.toString();
}
}
///CLOVER:ON
//-------------------------- AUTOGENERATED END --------------------------
}
|
apache-2.0
|
epsiinside/interne
|
src/net/wastl/webmail/server/SystemCheck.java
|
5580
|
/*
* @(#)$Id: SystemCheck.java 113 2008-10-29 23:41:26Z unsaved $
*
* Copyright 2008 by the JWebMail Development Team and Sebastian Schaffert.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.wastl.webmail.server;
import java.io.BufferedReader;
import java.io.EOFException;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.PrintWriter;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import net.wastl.webmail.exceptions.WebMailException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
public class SystemCheck {
private static Log log = LogFactory.getLog(SystemCheck.class);
public SystemCheck(WebMailServer parent) throws WebMailException {
log.info("Checking Java Virtual Machine ... ");
log.info("Version: "+System.getProperty("java.version")+" ... ");
/* Test if the Java version might cause trouble */
if(System.getProperty("java.version").compareTo("1.5")>=0) {
log.info("JDK version ok.");
} else {
log.warn("At least Java 1.5 is required for WebMail.");
}
/* Test if the operating system is supported */
log.info("Operating System: "+System.getProperty("os.name")+"/"+System.getProperty("os.arch")+" "+System.getProperty("os.version")+" ... ");
if(System.getProperty("os.name").equals("SunOS") ||
System.getProperty("os.name").equals("Solaris") ||
System.getProperty("os.name").equals("Linux")) {
log.info("OS variant Ok");
} else {
log.warn("WebMail was only tested\n on Solaris, HP-UX and Linux and may cause problems on your platform.");
}
/* Check if we are running as root and issue a warning */
try {
log.info("User name: "+System.getProperty("user.name")+" ... ");
if(!System.getProperty("user.name").equals("root")) {
log.info("User ok.");
} else {
log.warn("warning. You are running WebMail as root. This may be a potential security problem.");
}
} catch(Exception ex) {
// Security restriction prohibit reading the username, then we do not need to
// check for root anyway
}
/* Check whether all WebMail system properties are defined */
log.info("WebMail System Properties: ");
//checkPathProperty(parent,"webmail.plugin.path");
//checkPathProperty(parent,"webmail.auth.path");
checkPathProperty(parent,"webmail.lib.path");
checkPathProperty(parent,"webmail.template.path");
checkPathProperty(parent,"webmail.data.path");
checkPathProperty(parent,"webmail.xml.path");
log.info("WebMail System Properties ok!");
log.info("Setting DTD-path in webmail.xml ... ");
File f1=new File(parent.getProperty("webmail.data.path")+System.getProperty("file.separator")+"webmail.xml");
File f2=new File(parent.getProperty("webmail.data.path")+System.getProperty("file.separator")+"webmail.xml."+Long.toHexString(System.currentTimeMillis()));
try {
Pattern regexp=Pattern.compile("<!DOCTYPE SYSDATA SYSTEM \".*\">");
BufferedReader file1=new BufferedReader(new FileReader(f1));
PrintWriter file2=new PrintWriter(new FileWriter(f2));
try {
String line=file1.readLine();
while(line != null) {
Matcher m = regexp.matcher(line);
String s = m.replaceAll("<!DOCTYPE SYSDATA SYSTEM \"file://"+
parent.getProperty("webmail.xml.path")+
System.getProperty("file.separator")+
"sysdata.dtd"+"\">");
// String s=regexp.substituteAll(line,"<!DOCTYPE SYSDATA SYSTEM \"file://"+
// parent.getProperty("webmail.xml.path")+
// System.getProperty("file.separator")+
// "sysdata.dtd"+"\">");
//log.debug(s);
file2.println(s);
line=file1.readLine();
}
} catch(EOFException ex) {
}
file2.close();
file1.close();
} catch(Exception ex) {
throw new WebMailException(ex);
}
f2.renameTo(f1);
log.info("Done checking system!");
}
protected static void checkPathProperty(WebMailServer parent,String property) throws WebMailException {
if(parent.getProperty(property) == null ||
parent.getProperty(property).equals("")) {
throw new WebMailException("fatal error. "+property+" not defined.");
} else {
File f=new File(parent.getProperty(property));
parent.setProperty(property,f.getAbsolutePath());
}
}
}
|
apache-2.0
|
googleads/google-ads-php
|
src/Google/Ads/GoogleAds/Lib/V9/GoogleAdsResponseMetadata.php
|
1751
|
<?php
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
namespace Google\Ads\GoogleAds\Lib\V9;
/**
* Holds the response metadata of Google Ads API for a successful request.
*/
class GoogleAdsResponseMetadata
{
use GoogleAdsMetadataTrait;
private $metadata;
/**
* Creates a `GoogleAdsResponseMetadata` instance with the specified parameters.
*
* @param array $metadata the metadata
*/
public function __construct(array $metadata)
{
$this->metadata = $metadata;
}
/**
* Gets an associative array of metadata keys and values.
* Keys are strings and values are arrays of string values or binary message data.
*
* @return array an associative array of metadata keys and values.
*/
public function getMetadata()
{
return $this->metadata;
}
/**
* Gets the request ID returned in the RPC trailers.
* Returns null if no request ID has been received.
*
* @return string|null the request ID
*/
public function getRequestId()
{
return $this->getFirstHeaderValue(
self::$REQUEST_ID_HEADER_KEY,
$this->getMetadata() ?: []
);
}
}
|
apache-2.0
|
jraduget/kaleido-repository
|
kaleido-mailing/src/main/java/org/kaleidofoundry/mail/dispatcher/MailDispatcherProvider.java
|
4661
|
/*
* Copyright 2008-2021 the original author or authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kaleidofoundry.mail.dispatcher;
import static org.kaleidofoundry.mail.MailConstants.MailDispatcherPluginName;
import static org.kaleidofoundry.mail.dispatcher.MailDispatcherContextBuilder.*;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.util.Set;
import org.kaleidofoundry.core.context.AbstractProviderService;
import org.kaleidofoundry.core.context.ProviderException;
import org.kaleidofoundry.core.context.RuntimeContext;
import org.kaleidofoundry.core.lang.annotation.NotNull;
import org.kaleidofoundry.core.plugin.Declare;
import org.kaleidofoundry.core.plugin.PluginFactory;
import org.kaleidofoundry.core.plugin.model.Plugin;
import org.kaleidofoundry.core.store.ResourceException;
import org.kaleidofoundry.core.util.Registry;
import org.kaleidofoundry.mail.session.MailSessionException;
/**
* @author Jerome RADUGET
*/
public class MailDispatcherProvider extends AbstractProviderService<MailDispatcher> {
public MailDispatcherProvider(Class<MailDispatcher> genericClassInterface) {
super(genericClassInterface);
}
@Override
protected Registry<String, MailDispatcher> getRegistry() {
return MailDispatcherFactory.REGISTRY;
}
@Override
protected MailDispatcher _provides(RuntimeContext<MailDispatcher> context) throws ProviderException {
return provides(context.getName(), context);
}
public MailDispatcher provides(String name) {
return provides(name, new RuntimeContext<MailDispatcher>(name, MailDispatcher.class));
}
public MailDispatcher provides(@NotNull final String name, RuntimeContext<MailDispatcher> context) throws ProviderException {
MailDispatcher mailDispatcher = getRegistry().get(name);
if (mailDispatcher == null) {
return create(name, context);
} else {
return mailDispatcher;
}
}
private MailDispatcher create(@NotNull final String name, @NotNull final RuntimeContext<MailDispatcher> context) throws ProviderException {
final String provider = context.getString(PROVIDER, MailDispatcherEnum.sync.name());
// optimization purposes
if (MailDispatcherEnum.sync.name().equals(provider)) { return new SynchronousMailDispatcher(context); }
if (MailDispatcherEnum.async.name().equals(provider)) { return new AsynchronousMailDispatcher(context); }
// plugin extension mechanism
final Set<Plugin<MailDispatcher>> pluginImpls = PluginFactory.getImplementationRegistry().findByInterface(MailDispatcher.class);
// scan each @Declare store implementation, to get one which handle the uri scheme
for (final Plugin<MailDispatcher> pi : pluginImpls) {
final Class<? extends MailDispatcher> impl = pi.getAnnotatedClass();
try {
final Declare declarePlugin = impl.getAnnotation(Declare.class);
final String pluginName = declarePlugin.value().replace(MailDispatcherPluginName, "").toLowerCase();
if (pluginName.endsWith(provider)) {
final Constructor<? extends MailDispatcher> constructor = impl.getConstructor(RuntimeContext.class);
return constructor.newInstance(context);
}
} catch (final NoSuchMethodException e) {
throw new ProviderException("context.provider.error.NoSuchConstructorException", impl.getName(), "RuntimeContext<MailDispatcher> context");
} catch (final InstantiationException e) {
throw new ProviderException("context.provider.error.InstantiationException", impl.getName(), e.getMessage());
} catch (final IllegalAccessException e) {
throw new ProviderException("context.provider.error.IllegalAccessException", impl.getName(), "RuntimeContext<MailDispatcher> context");
} catch (final InvocationTargetException e) {
if (e.getCause() instanceof ResourceException) {
throw new ProviderException(e.getCause());
} else {
throw new ProviderException("context.provider.error.InvocationTargetException", e.getCause(), impl.getName(),
"RuntimeContext<MailDispatcher> context");
}
}
}
throw new ProviderException(new MailSessionException("mail.session.provider.illegal", provider));
}
}
|
apache-2.0
|
BannukDE/Narrator
|
commands/admin/roleid.js
|
1188
|
exports.run = async (client, message, args, level) => { // eslint-disable-line no-unused-vars
if (args.length < 1) return message.reply("please type < all >, < rolename > or mention a role: < @rolename >");
if (args[0] === "all") {
const roles = message.guild.roles.sort((a, b) => a.name > b.name ? 1 : -1);
let msg = "";
roles.forEach(r => {
msg += r.name + " : " + r.id;
message.channel.send(client.embed().setDescription(msg));
});
}
else if (args[0].startsWith("<@&")) {
message.channel.send(client.embed().setDescription(`${args[0]} : ${args[0].slice(3, -1)}\n`));
}
else {
let counter = 0;
message.guild.roles.forEach(r => {
if (r.name.toLowerCase().includes(args[0].toLowerCase()) === true) counter++, message.channel.send(client.embed().setDescription(`<@&${r.id}> : ${r.id}\n`));
});
if (counter === 0) message.reply(`${args[0]} not found in roles`);
}
};
exports.conf = {
admin: true,
guildOnly: true,
aliases: [],
permLevel: 7
};
exports.help = {
name: "roleid",
category: "0-Admin",
description: "Get the ID of a specific or all role/s",
usage: "roleid < rolenname / @rolenname / all >"
};
|
apache-2.0
|
majetideepak/arrow
|
dev/merge_arrow_pr.py
|
19324
|
#!/usr/bin/env python
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Utility for creating well-formed pull request merges and pushing them to
# Apache.
# usage: ./merge_arrow_pr.py (see config env vars below)
#
# This utility assumes you already have a local Arrow git clone and that you
# have added remotes corresponding to both (i) the Github Apache Arrow mirror
# and (ii) the apache git repo.
#
# There are several pieces of authorization possibly needed via environment
# variables
#
# APACHE_JIRA_USERNAME: your Apache JIRA id
# APACHE_JIRA_PASSWORD: your Apache JIRA password
# ARROW_GITHUB_API_TOKEN: a GitHub API token to use for API requests (to avoid
# rate limiting)
import configparser
import os
import pprint
import re
import subprocess
import sys
import requests
import getpass
from six.moves import input
import six
try:
import jira.client
except ImportError:
print("Could not find jira library. "
"Run 'sudo pip install jira' to install.")
print("Exiting without trying to close the associated JIRA.")
sys.exit(1)
# Remote name which points to the GitHub site
PR_REMOTE_NAME = os.environ.get("PR_REMOTE_NAME", "apache")
# For testing to avoid accidentally pushing to apache
DEBUG = bool(int(os.environ.get("DEBUG", 0)))
if DEBUG:
print("**************** DEBUGGING ****************")
# Prefix added to temporary branches
BRANCH_PREFIX = "PR_TOOL"
JIRA_API_BASE = "https://issues.apache.org/jira"
def get_json(url, headers=None):
req = requests.get(url, headers=headers)
return req.json()
def run_cmd(cmd):
if isinstance(cmd, six.string_types):
cmd = cmd.split(' ')
try:
output = subprocess.check_output(cmd)
except subprocess.CalledProcessError as e:
# this avoids hiding the stdout / stderr of failed processes
print('Command failed: %s' % cmd)
print('With output:')
print('--------------')
print(e.output)
print('--------------')
raise e
if isinstance(output, six.binary_type):
output = output.decode('utf-8')
return output
original_head = run_cmd("git rev-parse HEAD")[:8]
def clean_up():
print("Restoring head pointer to %s" % original_head)
run_cmd("git checkout %s" % original_head)
branches = run_cmd("git branch").replace(" ", "").split("\n")
for branch in [x for x in branches
if x.startswith(BRANCH_PREFIX)]:
print("Deleting local branch %s" % branch)
run_cmd("git branch -D %s" % branch)
_REGEX_CI_DIRECTIVE = re.compile(r'\[[^\]]*\]')
def strip_ci_directives(commit_message):
# Remove things like '[force ci]', '[skip appveyor]' from the assembled
# commit message
return _REGEX_CI_DIRECTIVE.sub('', commit_message)
def fix_version_from_branch(branch, versions):
# Note: Assumes this is a sorted (newest->oldest) list of un-released
# versions
if branch == "master":
return versions[-1]
else:
branch_ver = branch.replace("branch-", "")
return [x for x in versions if x.name.startswith(branch_ver)][-1]
# We can merge both ARROW and PARQUET patchesa
SUPPORTED_PROJECTS = ['ARROW', 'PARQUET']
PR_TITLE_REGEXEN = [(project, re.compile(r'^(' + project + r'-[0-9]+)\b.*$'))
for project in SUPPORTED_PROJECTS]
class JiraIssue(object):
def __init__(self, jira_con, jira_id, project, cmd):
self.jira_con = jira_con
self.jira_id = jira_id
self.project = project
self.cmd = cmd
try:
self.issue = jira_con.issue(jira_id)
except Exception as e:
self.cmd.fail("ASF JIRA could not find %s\n%s" % (jira_id, e))
def get_candidate_fix_versions(self, merge_branches=('master',)):
# Only suggest versions starting with a number, like 0.x but not JS-0.x
all_versions = self.jira_con.project_versions(self.project)
unreleased_versions = [x for x in all_versions
if not x.raw['released']]
unreleased_versions = sorted(unreleased_versions,
key=lambda x: x.name, reverse=True)
mainline_version_regex = re.compile(r'\d.*')
mainline_versions = [x for x in unreleased_versions
if mainline_version_regex.match(x.name)]
default_fix_versions = [
fix_version_from_branch(x, mainline_versions).name
for x in merge_branches]
for v in default_fix_versions:
# Handles the case where we have forked a release branch but not
# yet made the release. In this case, if the PR is committed to
# the master branch and the release branch, we only consider the
# release branch to be the fix version. E.g. it is not valid to
# have both 1.1.0 and 1.0.0 as fix versions.
(major, minor, patch) = v.split(".")
if patch == "0":
previous = "%s.%s.%s" % (major, int(minor) - 1, 0)
if previous in default_fix_versions:
default_fix_versions = [x for x in default_fix_versions
if x != v]
return all_versions, default_fix_versions
def resolve(self, fix_versions, comment):
fields = self.issue.fields
cur_status = fields.status.name
if cur_status == "Resolved" or cur_status == "Closed":
self.cmd.fail("JIRA issue %s already has status '%s'"
% (self.jira_id, cur_status))
console_output = format_resolved_issue_status(self.jira_id, cur_status,
fields.summary,
fields.assignee,
fields.components)
print(console_output)
resolve = [x for x in self.jira_con.transitions(self.jira_id)
if x['name'] == "Resolve Issue"][0]
self.jira_con.transition_issue(self.jira_id, resolve["id"],
comment=comment,
fixVersions=fix_versions)
print("Successfully resolved %s!" % (self.jira_id))
def format_resolved_issue_status(jira_id, status, summary, assignee,
components):
if assignee is None:
assignee = "NOT ASSIGNED!!!"
else:
assignee = assignee.displayName
if len(components) == 0:
components = 'NO COMPONENTS!!!'
else:
components = ', '.join((x.name for x in components))
return """=== JIRA {} ===
Summary\t\t{}
Assignee\t{}
Components\t{}
Status\t\t{}
URL\t\t{}/{}""".format(jira_id, summary, assignee, components, status,
'/'.join((JIRA_API_BASE, 'browse')),
jira_id)
class GitHubAPI(object):
def __init__(self, project_name):
self.github_api = ("https://api.github.com/repos/apache/{0}"
.format(project_name))
token = os.environ.get('ARROW_GITHUB_API_TOKEN', None)
if token:
self.headers = {'Authorization': 'token {0}'.format(token)}
else:
self.headers = None
def get_pr_data(self, number):
return get_json("%s/pulls/%s" % (self.github_api, number),
headers=self.headers)
class CommandInput(object):
"""
Interface to input(...) to enable unit test mocks to be created
"""
def fail(self, msg):
clean_up()
raise Exception(msg)
def prompt(self, prompt):
return input(prompt)
def getpass(self, prompt):
return getpass.getpass(prompt)
def continue_maybe(self, prompt):
while True:
result = input("\n%s (y/n): " % prompt)
if result.lower() == "y":
return
elif result.lower() == "n":
self.fail("Okay, exiting")
else:
prompt = "Please input 'y' or 'n'"
class PullRequest(object):
def __init__(self, cmd, github_api, git_remote, jira_con, number):
self.cmd = cmd
self.git_remote = git_remote
self.con = jira_con
self.number = number
self._pr_data = github_api.get_pr_data(number)
try:
self.url = self._pr_data["url"]
self.title = self._pr_data["title"]
self.body = self._pr_data["body"]
self.target_ref = self._pr_data["base"]["ref"]
self.user_login = self._pr_data["user"]["login"]
self.base_ref = self._pr_data["head"]["ref"]
except KeyError:
pprint.pprint(self._pr_data)
raise
self.description = "%s/%s" % (self.user_login, self.base_ref)
self.jira_issue = self._get_jira()
def show(self):
print("\n=== Pull Request #%s ===" % self.number)
print("title\t%s\nsource\t%s\ntarget\t%s\nurl\t%s"
% (self.title, self.description, self.target_ref, self.url))
@property
def is_merged(self):
return bool(self._pr_data["merged"])
@property
def is_mergeable(self):
return bool(self._pr_data["mergeable"])
def _get_jira(self):
jira_id = None
for project, regex in PR_TITLE_REGEXEN:
m = regex.search(self.title)
if m:
jira_id = m.group(1)
break
if jira_id is None:
options = ' or '.join('{0}-XXX'.format(project)
for project in SUPPORTED_PROJECTS)
self.cmd.fail("PR title should be prefixed by a jira id "
"{0}, but found {1}".format(options, self.title))
return JiraIssue(self.con, jira_id, project, self.cmd)
def merge(self, target_ref='master'):
"""
merge the requested PR and return the merge hash
"""
pr_branch_name = "%s_MERGE_PR_%s" % (BRANCH_PREFIX, self.number)
target_branch_name = "%s_MERGE_PR_%s_%s" % (BRANCH_PREFIX,
self.number,
target_ref.upper())
run_cmd("git fetch %s pull/%s/head:%s" % (self.git_remote,
self.number,
pr_branch_name))
run_cmd("git fetch %s %s:%s" % (self.git_remote, target_ref,
target_branch_name))
run_cmd("git checkout %s" % target_branch_name)
had_conflicts = False
try:
run_cmd(['git', 'merge', pr_branch_name, '--squash'])
except Exception as e:
msg = ("Error merging: %s\nWould you like to "
"manually fix-up this merge?" % e)
self.cmd.continue_maybe(msg)
msg = ("Okay, please fix any conflicts and 'git add' "
"conflicting files... Finished?")
self.cmd.continue_maybe(msg)
had_conflicts = True
commit_authors = run_cmd(['git', 'log', 'HEAD..%s' % pr_branch_name,
'--pretty=format:%an <%ae>']).split("\n")
distinct_authors = sorted(set(commit_authors),
key=lambda x: commit_authors.count(x),
reverse=True)
for i, author in enumerate(distinct_authors):
print("Author {}: {}".format(i + 1, author))
if len(distinct_authors) > 1:
primary_author = self.cmd.prompt(
"Enter primary author in the format of "
"\"name <email>\" [%s]: " % distinct_authors[0])
if primary_author == "":
primary_author = distinct_authors[0]
else:
# When primary author is specified manually, de-dup it from
# author list and put it at the head of author list.
distinct_authors = [x for x in distinct_authors
if x != primary_author]
distinct_authors = [primary_author] + distinct_authors
else:
# If there is only one author, do not prompt for a lead author
primary_author = distinct_authors[0]
commits = run_cmd(['git', 'log', 'HEAD..%s' % pr_branch_name,
'--pretty=format:%h <%an> %s']).split("\n\n")
merge_message_flags = []
merge_message_flags += ["-m", self.title]
if self.body is not None:
merge_message_flags += ["-m", self.body]
committer_name = run_cmd("git config --get user.name").strip()
committer_email = run_cmd("git config --get user.email").strip()
authors = ("Authored-by:" if len(distinct_authors) == 1
else "Lead-authored-by:")
authors += " %s" % (distinct_authors.pop(0))
if len(distinct_authors) > 0:
authors += "\n" + "\n".join(["Co-authored-by: %s" % a
for a in distinct_authors])
authors += "\n" + "Signed-off-by: %s <%s>" % (committer_name,
committer_email)
if had_conflicts:
committer_name = run_cmd("git config --get user.name").strip()
committer_email = run_cmd("git config --get user.email").strip()
message = ("This patch had conflicts when merged, "
"resolved by\nCommitter: %s <%s>" %
(committer_name, committer_email))
merge_message_flags += ["-m", message]
# The string "Closes #%s" string is required for GitHub to correctly
# close the PR
merge_message_flags += [
"-m",
"Closes #%s from %s and squashes the following commits:"
% (self.number, self.description)]
for c in commits:
stripped_message = strip_ci_directives(c).strip()
merge_message_flags += ["-m", stripped_message]
merge_message_flags += ["-m", authors]
if DEBUG:
print("\n".join(merge_message_flags))
run_cmd(['git', 'commit',
'--no-verify', # do not run commit hooks
'--author="%s"' % primary_author] +
merge_message_flags)
self.cmd.continue_maybe("Merge complete (local ref %s). Push to %s?"
% (target_branch_name, self.git_remote))
try:
push_cmd = ('git push %s %s:%s' % (self.git_remote,
target_branch_name,
target_ref))
if DEBUG:
print(push_cmd)
else:
run_cmd(push_cmd)
except Exception as e:
clean_up()
self.cmd.fail("Exception while pushing: %s" % e)
merge_hash = run_cmd("git rev-parse %s" % target_branch_name)[:8]
clean_up()
print("Pull request #%s merged!" % self.number)
print("Merge hash: %s" % merge_hash)
return merge_hash
def prompt_for_fix_version(cmd, jira_issue):
(all_versions,
default_fix_versions) = jira_issue.get_candidate_fix_versions()
default_fix_versions = ",".join(default_fix_versions)
issue_fix_versions = cmd.prompt("Enter comma-separated "
"fix version(s) [%s]: "
% default_fix_versions)
if issue_fix_versions == "":
issue_fix_versions = default_fix_versions
issue_fix_versions = issue_fix_versions.replace(" ", "").split(",")
def get_version_json(version_str):
return [x for x in all_versions if x.name == version_str][0].raw
return [get_version_json(v) for v in issue_fix_versions]
CONFIG_FILE = "~/.config/arrow/merge.conf"
def load_configuration():
config = configparser.ConfigParser()
config.read(os.path.expanduser(CONFIG_FILE))
return config
def get_credentials(cmd):
username, password = None, None
config = load_configuration()
if "jira" in config.sections():
username = config["jira"].get("username")
password = config["jira"].get("password")
# Fallback to environment variables
if not username:
username = os.environ.get("APACHE_JIRA_USERNAME")
if not password:
password = os.environ.get("APACHE_JIRA_PASSWORD")
# Fallback to user tty prompt
if not username:
username = cmd.prompt("Env APACHE_JIRA_USERNAME not set, "
"please enter your JIRA username:")
if not password:
password = cmd.getpass("Env APACHE_JIRA_PASSWORD not set, "
"please enter your JIRA password:")
return (username, password)
def connect_jira(cmd):
return jira.client.JIRA({'server': JIRA_API_BASE},
basic_auth=get_credentials(cmd))
def get_pr_num():
if len(sys.argv) == 2:
return sys.argv[1]
return input("Which pull request would you like to merge? (e.g. 34): ")
def cli():
# Location of your Arrow git clone
ARROW_HOME = os.path.abspath(os.path.dirname(__file__))
PROJECT_NAME = os.environ.get('ARROW_PROJECT_NAME') or 'arrow'
print("ARROW_HOME = " + ARROW_HOME)
print("PROJECT_NAME = " + PROJECT_NAME)
cmd = CommandInput()
pr_num = get_pr_num()
os.chdir(ARROW_HOME)
github_api = GitHubAPI(PROJECT_NAME)
jira_con = connect_jira(cmd)
pr = PullRequest(cmd, github_api, PR_REMOTE_NAME, jira_con, pr_num)
if pr.is_merged:
print("Pull request %s has already been merged")
sys.exit(0)
if not pr.is_mergeable:
msg = ("Pull request %s is not mergeable in its current form.\n"
% pr_num + "Continue? (experts only!)")
cmd.continue_maybe(msg)
pr.show()
cmd.continue_maybe("Proceed with merging pull request #%s?" % pr_num)
# merged hash not used
pr.merge()
cmd.continue_maybe("Would you like to update the associated JIRA?")
jira_comment = (
"Issue resolved by pull request %s\n[%s/%s]"
% (pr_num,
"https://github.com/apache/" + PROJECT_NAME + "/pull",
pr_num))
fix_versions_json = prompt_for_fix_version(cmd, pr.jira_issue)
pr.jira_issue.resolve(fix_versions_json, jira_comment)
if __name__ == '__main__':
try:
cli()
except Exception:
raise
|
apache-2.0
|
dCache/jglobus-1.8
|
src/org/globus/gsi/gssapi/net/GssOutputStream.java
|
2975
|
/*
* Copyright 1999-2006 University of Chicago
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.globus.gsi.gssapi.net;
import java.io.OutputStream;
import java.io.IOException;
import org.globus.common.ChainedIOException;
import org.ietf.jgss.GSSContext;
import org.ietf.jgss.GSSException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
public abstract class GssOutputStream extends OutputStream {
private static Log logger =
LogFactory.getLog(GssOutputStream.class.getName());
protected OutputStream out;
protected GSSContext context;
protected boolean autoFlush = false;
protected byte [] buff;
protected int index;
public GssOutputStream(OutputStream out, GSSContext context) {
this(out, context, 16384);
}
public GssOutputStream(OutputStream out, GSSContext context, int size) {
this.out = out;
this.context = context;
this.buff = new byte[size];
this.index = 0;
}
public void setAutoFlush(boolean autoFlush) {
this.autoFlush = autoFlush;
}
public boolean getAutoFlush() {
return this.autoFlush;
}
public void write(int b)
throws IOException {
if (this.index == this.buff.length) {
flushData();
}
buff[index++] = (byte)b;
if (this.autoFlush) {
flushData();
}
}
public void write(byte[] data)
throws IOException {
write(data, 0, data.length);
}
public void write(byte [] data, int off, int len)
throws IOException {
int max;
while (len > 0) {
if (this.index + len > this.buff.length) {
max = (this.buff.length - this.index);
System.arraycopy(data, off, this.buff, this.index, max);
this.index += max;
flushData();
len -= max;
off += max;
} else {
System.arraycopy(data, off, this.buff, this.index, len);
this.index += len;
if (this.autoFlush) {
flushData();
}
break;
}
}
}
protected byte[] wrap()
throws IOException {
try {
return context.wrap(this.buff, 0, this.index, null);
} catch (GSSException e) {
throw new ChainedIOException("wrap failed", e);
}
}
public abstract void flush()
throws IOException;
private void flushData()
throws IOException {
flush();
this.index = 0;
}
public void close()
throws IOException {
logger.debug("close");
flushData();
this.out.close();
}
}
|
apache-2.0
|
FirebasePrivate/firebase-js-sdk-1
|
packages/messaging/test/tokenDetailsModel-deleteToken.test.ts
|
4123
|
/**
* Copyright 2017 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { assert } from 'chai';
import makeFakeSubscription from './make-fake-subscription';
import { deleteDatabase } from './testing-utils/db-helper';
import Errors from '../src/models/errors';
import TokenDetailsModel from '../src/models/token-details-model';
import arrayBufferToBase64 from '../src/helpers/array-buffer-to-base64';
describe('Firebase Messaging > TokenDetailsModel.deleteToken()', function() {
const EXAMPLE_INPUT = {
swScope: '/example-scope',
vapidKey:
'BNJxw7sCGkGLOUP2cawBaBXRuWZ3lw_PmQMgreLVVvX_b' +
'4emEWVURkCF8fUTHEFe2xrEgTt5ilh5xD94v0pFe_I',
subscription: makeFakeSubscription(),
fcmSenderId: '1234567',
fcmToken: 'qwerty',
fcmPushSet: '7654321'
};
let globalTokenModel;
const cleanUp = () => {
let promises = [];
if (globalTokenModel) {
promises.push(globalTokenModel.closeDatabase());
}
return Promise.all(promises)
.then(() => deleteDatabase(TokenDetailsModel.dbName))
.then(() => (globalTokenModel = null));
};
beforeEach(function() {
return cleanUp();
});
after(function() {
return cleanUp();
});
it('should handle no input', function() {
globalTokenModel = new TokenDetailsModel();
return globalTokenModel.deleteToken().then(
() => {
throw new Error('Expected this to throw an error due to no token');
},
err => {
assert.equal(
'messaging/' + Errors.codes.INVALID_DELETE_TOKEN,
err.code
);
}
);
});
it('should handle empty string', function() {
globalTokenModel = new TokenDetailsModel();
return globalTokenModel.deleteToken('').then(
() => {
throw new Error('Expected this to throw an error due to no token');
},
err => {
assert.equal(
'messaging/' + Errors.codes.INVALID_DELETE_TOKEN,
err.code
);
}
);
});
it('should delete current token', function() {
globalTokenModel = new TokenDetailsModel();
return globalTokenModel
.saveTokenDetails(EXAMPLE_INPUT)
.then(() => {
return globalTokenModel.deleteToken(EXAMPLE_INPUT.fcmToken);
})
.then(details => {
const subscriptionKeys = ['endpoint', 'auth', 'p256dh'];
const subscriptionValues = {
endpoint: EXAMPLE_INPUT.subscription.endpoint,
auth: arrayBufferToBase64(EXAMPLE_INPUT.subscription.getKey('auth')),
p256dh: arrayBufferToBase64(
EXAMPLE_INPUT.subscription.getKey('p256dh')
)
};
subscriptionKeys.forEach(keyName => {
assert.equal(details[keyName], subscriptionValues[keyName]);
});
Object.keys(details).forEach(keyName => {
if (subscriptionKeys.indexOf(keyName) !== -1) {
return;
}
assert.equal(details[keyName], EXAMPLE_INPUT[keyName]);
});
return globalTokenModel.getTokenDetailsFromToken(
EXAMPLE_INPUT.fcmToken
);
})
.then(tokenDetails => {
assert.equal(null, tokenDetails);
});
});
it('should handle deleting a non-existant token', function() {
globalTokenModel = new TokenDetailsModel();
return globalTokenModel.deleteToken('bad-token').then(
() => {
throw new Error('Expected this delete to throw and error.');
},
err => {
assert.equal(
'messaging/' + Errors.codes.DELETE_TOKEN_NOT_FOUND,
err.code
);
}
);
});
});
|
apache-2.0
|
AbraaoAlves/tsd
|
src/tsd/data/DefVersion.ts
|
2055
|
/// <reference path="../_ref.d.ts" />
'use strict';
import VError = require('verror');
import assertVar = require('../../xm/assertVar');
import Def = require('./Def');
import DefBlob = require('./DefBlob');
import DefCommit = require('./DefCommit');
import DefInfo = require('./DefInfo');
/*
DefVersion: version of a definition (the file content in the repo)
NOTE: for practical reasons linked to a commit (tree) instead of a blob
*/
// TODO rename DefVersion to DefRevision / DefRev
class DefVersion {
// TODO swap for non-writable properties?
private _def: Def = null;
private _commit: DefCommit = null;
// NOTE blobs are impractical to work with: api rate-limits and no access over raw.github
private _blob: DefBlob = null;
// parse from tags
// TODO shouldn't this be DefVersion? from same commit? (still could easily get the head)
dependencies: Def[] = [];
solved: boolean = false;
// parsed from header
info: DefInfo;
constructor(def: Def, commit: DefCommit) {
assertVar(def, Def, 'def');
assertVar(commit, DefCommit, 'commit');
this._def = def;
this._commit = commit;
}
setContent(blob: DefBlob): void {
assertVar(blob, DefBlob, 'blob');
if (this._blob) {
throw new VError('already got a blob %s != %s', this._blob.sha, blob.sha);
}
this._blob = blob;
}
hasContent(): boolean {
return (this._blob && this._blob.hasContent());
}
get key(): string {
if (!this._def || !this._commit) {
return null;
}
return this._def.path + '-' + this._commit.commitSha;
}
get def(): Def {
return this._def;
}
get commit(): DefCommit {
return this._commit;
}
get blob(): DefBlob {
return this._blob;
}
// human friendly
get blobShaShort(): string {
return this._blob ? this._blob.shaShort : '<no blob>';
}
toString(): string {
var str = '';
str += (this._def ? this._def.path : '<no def>');
str += ' : ' + (this._commit ? this._commit.commitShort : '<no commit>');
str += ' : ' + (this._blob ? this._blob.shaShort : '<no blob>');
return str;
}
}
export = DefVersion;
|
apache-2.0
|
Jason-Gew/Java_Modules
|
KafkaLogger/KafkaLogger (Deprecated)/src/main/java/gew/kafka/logger/kafka/ProducerBuilder.java
|
2702
|
package gew.kafka.logger.kafka;
/**
* Builder class for instantiating the Kafka Producer...
* @author Jason/Ge Wu
*/
public class ProducerBuilder
{
private String server;
private String topic;
private String clientId;
private Integer retries;
private String acknowledge;
private String serializeClass;
private Boolean enableMessageQueue;
public ProducerBuilder() { }
public ProducerBuilder setServer(String server) {
this.server = server;
return this;
}
public ProducerBuilder setTopic(String topic) {
this.topic = topic;
return this;
}
public ProducerBuilder setClientId(String clientId) {
this.clientId = clientId;
return this;
}
public ProducerBuilder setRetries(Integer retries) {
this.retries = retries;
return this;
}
public ProducerBuilder setAcknowledge(String acknowledge) {
this.acknowledge = acknowledge;
return this;
}
public ProducerBuilder setSerializeClass(String serializeClass) {
this.serializeClass = serializeClass;
return this;
}
public ProducerBuilder setEnableMessageQueue(Boolean enableMessageQueue) {
this.enableMessageQueue = enableMessageQueue;
return this;
}
public Producer build()
{
Producer producer = new Producer();
if(this.server != null && !this.server.isEmpty()) {
producer.server = this.server;
} else {
throw new IllegalArgumentException("Invalid Kafka Bootstrap Server Address");
}
if(this.topic != null && !this.topic.isEmpty()) {
producer.topic = this.topic;
}
if(this.clientId != null && !this.clientId.isEmpty()) {
producer.clientId = this.clientId;
} else {
producer.clientId = "Kafka-Producer-" + System.currentTimeMillis()/1000000;
}
if(this.retries!= null && this.retries >= 0)
producer.retries = this.retries.toString();
else
producer.retries = "0";
if(this.acknowledge != null && !this.acknowledge.isEmpty())
producer.acknowledge = this.acknowledge;
else
producer.acknowledge = "0";
if(this.serializeClass != null && !this.serializeClass.isEmpty())
producer.serializeClass = this.serializeClass;
else
throw new IllegalArgumentException("Invalid Serialization Class");
if(this.enableMessageQueue != null)
producer.enableMessageQueue = this.enableMessageQueue;
else
producer.enableMessageQueue = false;
return producer;
}
}
|
apache-2.0
|
dolotech/bullfight
|
src/code.google.com/p/go.tools/pointer/testdata/flow.go
|
997
|
// +build ignore
package main
// Demonstration of directionality of flow edges.
func f1() {}
func f2() {}
var somepred bool
// Tracking functions.
func flow1() {
s := f1
p := f2
q := p
r := q
if somepred {
r = s
}
print(s) // @pointsto main.f1
print(p) // @pointsto main.f2
print(q) // @pointsto main.f2
print(r) // @pointsto main.f1 | main.f2
}
// Tracking concrete types in interfaces.
func flow2() {
var s interface{} = 1
var p interface{} = "foo"
q := p
r := q
if somepred {
r = s
}
print(s) // @types int
print(p) // @types string
print(q) // @types string
print(r) // @types int | string
}
var g1, g2 int
// Tracking addresses of globals.
func flow3() {
s := &g1
p := &g2
q := p
r := q
if somepred {
r = s
}
print(s) // @pointsto main.g1
print(p) // @pointsto main.g2
print(q) // @pointsto main.g2
print(r) // @pointsto main.g2 | main.g1
}
func main() {
flow1()
flow2()
flow3()
}
|
apache-2.0
|
onyxbits/raccoon4
|
src/main/java/de/onyxbits/raccoon/gui/HyperTextPane.java
|
3159
|
/*
* Copyright 2015 Patrick Ahlbrecht
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.onyxbits.raccoon.gui;
import java.awt.Color;
import java.awt.Dimension;
import java.awt.Font;
import java.awt.Graphics;
import java.awt.Graphics2D;
import java.awt.RenderingHints;
import java.net.URISyntaxException;
import javax.swing.JEditorPane;
import javax.swing.event.HyperlinkEvent;
import javax.swing.event.HyperlinkEvent.EventType;
import de.onyxbits.weave.swing.BrowseAction;
/**
* A subclass of {@link JEditorPane} that supports smooth rendering of HTML text
* and clickable links.
*
* @author patrick
*
*/
public class HyperTextPane extends JEditorPane {
/**
*
*/
private static final long serialVersionUID = 1L;
private boolean tooltip;
public HyperTextPane(String txt) {
super("text/html", txt);
setEditable(false);
}
@Override
public void paintComponent(Graphics g) {
Graphics2D graphics2d = (Graphics2D) g;
Object tmp = graphics2d.getRenderingHint(RenderingHints.KEY_ANTIALIASING);
graphics2d.setRenderingHint(RenderingHints.KEY_ANTIALIASING,
RenderingHints.VALUE_ANTIALIAS_ON);
super.paintComponent(g);
graphics2d.setRenderingHint(RenderingHints.KEY_ANTIALIASING, tmp);
}
/**
* Only draw the content (behave like a JLabel).
*
* @return this reference for chaining.
*/
public HyperTextPane withTransparency() {
setBackground(new Color(0, 0, 0, 0));
setOpaque(false);
putClientProperty(JEditorPane.HONOR_DISPLAY_PROPERTIES, Boolean.TRUE);
setFont(new Font(Font.DIALOG,Font.PLAIN,14));
return this;
}
/**
* Force a fixed width (and a dynamic height).
*
* @param width
* width of the pane
* @return this reference for chaining.
*/
public HyperTextPane withWidth(int width) {
setSize(width, Short.MAX_VALUE);
revalidate();
setPreferredSize(new Dimension(width, getPreferredSize().height));
return this;
}
/**
* Show the target URL in a tooltip when hovering over a link
*
* @return this reference for chaining.
*/
public HyperTextPane withLinkToolTip() {
tooltip = true;
return this;
}
@Override
public void fireHyperlinkUpdate(HyperlinkEvent e) {
if (tooltip) {
if (e.getEventType() == EventType.ENTERED) {
try {
setToolTipText(e.getURL().toURI().toString());
}
catch (URISyntaxException e1) {
setToolTipText(null);
}
}
if (e.getEventType() == EventType.EXITED) {
setToolTipText(null);
}
}
if (e.getEventType() == EventType.ACTIVATED) {
try {
BrowseAction.open(e.getURL().toURI());
}
catch (Exception e1) {
}
}
super.fireHyperlinkUpdate(e);
}
}
|
apache-2.0
|
kironuniversity/main-website
|
plugins/ryanchung/news/Plugin.php
|
1556
|
<?php namespace RyanChung\News;
use System\Classes\PluginBase;
use Backend;
/**
* Curriculum Plugin Information File
*/
class Plugin extends PluginBase
{
/**
* Returns information about this plugin.
*
* @return array
*/
public function pluginDetails()
{
return [
'name' => 'News',
'description' => 'Plugin for news management',
'author' => 'Ryan Chung',
'icon' => 'icon-newspaper-o'
];
}
public function registerComponents()
{
return [
'RyanChung\News\Components\Newslist' => 'Newslist',
'RyanChung\News\Components\Start' => 'StartNewsList'
];
}
public function registerPermissions()
{
return [
'ryanchung.news.edit_news' => ['label' => 'Edit News']
];
}
public function registerNavigation()
{
return [
'News' => [
'label' => 'News',
'url' => Backend::url('ryanchung/news/articles'),
'icon' => 'icon-newspaper-o',
'permissions' => ['ryanchung.news.*'],
'order' => 600,
'sideMenu' => [
'articles' => [
'label' => 'Articles',
'icon' => 'icon-newspaper-o',
'url' => Backend::url('ryanchung/news/articles'),
'permissions' => ['ryanchung.news.*']
]
]
]
];
}
}
|
apache-2.0
|
coison/ebstores
|
ebstores-bean-service/ebstores-bean-model/src/main/java/com/coison/model/Members.java
|
5353
|
package com.coison.model;
import java.io.Serializable;
import java.util.Date;
public class Members implements Serializable {
private Long id;
private String username;
private String password;
private String payPwd;
private Short userType;
private Short regType;
private String email;
private String salt;
private String secques;
private String nickName;
private String sex;
private String address;
private Date birthday;
private String constellation;
private Integer age;
private Integer job;
private String sign;
private String imgUrl;
private Byte yn;
private String recordCode;
private Integer regGift;
private String createBy;
private Date createDate;
private String updateBy;
private Date updateDate;
private String remarks;
private String delFlag;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getUsername() {
return username;
}
public void setUsername(String username) {
this.username = username == null ? null : username.trim();
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password == null ? null : password.trim();
}
public String getPayPwd() {
return payPwd;
}
public void setPayPwd(String payPwd) {
this.payPwd = payPwd == null ? null : payPwd.trim();
}
public Short getUserType() {
return userType;
}
public void setUserType(Short userType) {
this.userType = userType;
}
public Short getRegType() {
return regType;
}
public void setRegType(Short regType) {
this.regType = regType;
}
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email == null ? null : email.trim();
}
public String getSalt() {
return salt;
}
public void setSalt(String salt) {
this.salt = salt == null ? null : salt.trim();
}
public String getSecques() {
return secques;
}
public void setSecques(String secques) {
this.secques = secques == null ? null : secques.trim();
}
public String getNickName() {
return nickName;
}
public void setNickName(String nickName) {
this.nickName = nickName == null ? null : nickName.trim();
}
public String getSex() {
return sex;
}
public void setSex(String sex) {
this.sex = sex == null ? null : sex.trim();
}
public String getAddress() {
return address;
}
public void setAddress(String address) {
this.address = address == null ? null : address.trim();
}
public Date getBirthday() {
return birthday;
}
public void setBirthday(Date birthday) {
this.birthday = birthday;
}
public String getConstellation() {
return constellation;
}
public void setConstellation(String constellation) {
this.constellation = constellation == null ? null : constellation.trim();
}
public Integer getAge() {
return age;
}
public void setAge(Integer age) {
this.age = age;
}
public Integer getJob() {
return job;
}
public void setJob(Integer job) {
this.job = job;
}
public String getSign() {
return sign;
}
public void setSign(String sign) {
this.sign = sign == null ? null : sign.trim();
}
public String getImgUrl() {
return imgUrl;
}
public void setImgUrl(String imgUrl) {
this.imgUrl = imgUrl == null ? null : imgUrl.trim();
}
public Byte getYn() {
return yn;
}
public void setYn(Byte yn) {
this.yn = yn;
}
public String getRecordCode() {
return recordCode;
}
public void setRecordCode(String recordCode) {
this.recordCode = recordCode == null ? null : recordCode.trim();
}
public Integer getRegGift() {
return regGift;
}
public void setRegGift(Integer regGift) {
this.regGift = regGift;
}
public String getCreateBy() {
return createBy;
}
public void setCreateBy(String createBy) {
this.createBy = createBy == null ? null : createBy.trim();
}
public Date getCreateDate() {
return createDate;
}
public void setCreateDate(Date createDate) {
this.createDate = createDate;
}
public String getUpdateBy() {
return updateBy;
}
public void setUpdateBy(String updateBy) {
this.updateBy = updateBy == null ? null : updateBy.trim();
}
public Date getUpdateDate() {
return updateDate;
}
public void setUpdateDate(Date updateDate) {
this.updateDate = updateDate;
}
public String getRemarks() {
return remarks;
}
public void setRemarks(String remarks) {
this.remarks = remarks == null ? null : remarks.trim();
}
public String getDelFlag() {
return delFlag;
}
public void setDelFlag(String delFlag) {
this.delFlag = delFlag == null ? null : delFlag.trim();
}
}
|
apache-2.0
|
googleads/google-ads-dotnet
|
src/V9/Types/AppPaymentModelType.g.cs
|
9072
|
// <auto-generated>
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v9/enums/app_payment_model_type.proto
// </auto-generated>
#pragma warning disable 1591, 0612, 3021
#region Designer generated code
using pb = global::Google.Protobuf;
using pbc = global::Google.Protobuf.Collections;
using pbr = global::Google.Protobuf.Reflection;
using scg = global::System.Collections.Generic;
namespace Google.Ads.GoogleAds.V9.Enums {
/// <summary>Holder for reflection information generated from google/ads/googleads/v9/enums/app_payment_model_type.proto</summary>
public static partial class AppPaymentModelTypeReflection {
#region Descriptor
/// <summary>File descriptor for google/ads/googleads/v9/enums/app_payment_model_type.proto</summary>
public static pbr::FileDescriptor Descriptor {
get { return descriptor; }
}
private static pbr::FileDescriptor descriptor;
static AppPaymentModelTypeReflection() {
byte[] descriptorData = global::System.Convert.FromBase64String(
string.Concat(
"Cjpnb29nbGUvYWRzL2dvb2dsZWFkcy92OS9lbnVtcy9hcHBfcGF5bWVudF9t",
"b2RlbF90eXBlLnByb3RvEh1nb29nbGUuYWRzLmdvb2dsZWFkcy52OS5lbnVt",
"cxocZ29vZ2xlL2FwaS9hbm5vdGF0aW9ucy5wcm90byJYChdBcHBQYXltZW50",
"TW9kZWxUeXBlRW51bSI9ChNBcHBQYXltZW50TW9kZWxUeXBlEg8KC1VOU1BF",
"Q0lGSUVEEAASCwoHVU5LTk9XThABEggKBFBBSUQQHkLtAQohY29tLmdvb2ds",
"ZS5hZHMuZ29vZ2xlYWRzLnY5LmVudW1zQhhBcHBQYXltZW50TW9kZWxUeXBl",
"UHJvdG9QAVpCZ29vZ2xlLmdvbGFuZy5vcmcvZ2VucHJvdG8vZ29vZ2xlYXBp",
"cy9hZHMvZ29vZ2xlYWRzL3Y5L2VudW1zO2VudW1zogIDR0FBqgIdR29vZ2xl",
"LkFkcy5Hb29nbGVBZHMuVjkuRW51bXPKAh1Hb29nbGVcQWRzXEdvb2dsZUFk",
"c1xWOVxFbnVtc+oCIUdvb2dsZTo6QWRzOjpHb29nbGVBZHM6OlY5OjpFbnVt",
"c2IGcHJvdG8z"));
descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData,
new pbr::FileDescriptor[] { global::Google.Api.AnnotationsReflection.Descriptor, },
new pbr::GeneratedClrTypeInfo(null, null, new pbr::GeneratedClrTypeInfo[] {
new pbr::GeneratedClrTypeInfo(typeof(global::Google.Ads.GoogleAds.V9.Enums.AppPaymentModelTypeEnum), global::Google.Ads.GoogleAds.V9.Enums.AppPaymentModelTypeEnum.Parser, null, null, new[]{ typeof(global::Google.Ads.GoogleAds.V9.Enums.AppPaymentModelTypeEnum.Types.AppPaymentModelType) }, null, null)
}));
}
#endregion
}
#region Messages
/// <summary>
/// Represents a criterion for targeting paid apps.
/// </summary>
public sealed partial class AppPaymentModelTypeEnum : pb::IMessage<AppPaymentModelTypeEnum>
#if !GOOGLE_PROTOBUF_REFSTRUCT_COMPATIBILITY_MODE
, pb::IBufferMessage
#endif
{
private static readonly pb::MessageParser<AppPaymentModelTypeEnum> _parser = new pb::MessageParser<AppPaymentModelTypeEnum>(() => new AppPaymentModelTypeEnum());
private pb::UnknownFieldSet _unknownFields;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
[global::System.CodeDom.Compiler.GeneratedCode("protoc", null)]
public static pb::MessageParser<AppPaymentModelTypeEnum> Parser { get { return _parser; } }
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
[global::System.CodeDom.Compiler.GeneratedCode("protoc", null)]
public static pbr::MessageDescriptor Descriptor {
get { return global::Google.Ads.GoogleAds.V9.Enums.AppPaymentModelTypeReflection.Descriptor.MessageTypes[0]; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
[global::System.CodeDom.Compiler.GeneratedCode("protoc", null)]
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
[global::System.CodeDom.Compiler.GeneratedCode("protoc", null)]
public AppPaymentModelTypeEnum() {
OnConstruction();
}
partial void OnConstruction();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
[global::System.CodeDom.Compiler.GeneratedCode("protoc", null)]
public AppPaymentModelTypeEnum(AppPaymentModelTypeEnum other) : this() {
_unknownFields = pb::UnknownFieldSet.Clone(other._unknownFields);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
[global::System.CodeDom.Compiler.GeneratedCode("protoc", null)]
public AppPaymentModelTypeEnum Clone() {
return new AppPaymentModelTypeEnum(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
[global::System.CodeDom.Compiler.GeneratedCode("protoc", null)]
public override bool Equals(object other) {
return Equals(other as AppPaymentModelTypeEnum);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
[global::System.CodeDom.Compiler.GeneratedCode("protoc", null)]
public bool Equals(AppPaymentModelTypeEnum other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
return Equals(_unknownFields, other._unknownFields);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
[global::System.CodeDom.Compiler.GeneratedCode("protoc", null)]
public override int GetHashCode() {
int hash = 1;
if (_unknownFields != null) {
hash ^= _unknownFields.GetHashCode();
}
return hash;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
[global::System.CodeDom.Compiler.GeneratedCode("protoc", null)]
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
[global::System.CodeDom.Compiler.GeneratedCode("protoc", null)]
public void WriteTo(pb::CodedOutputStream output) {
#if !GOOGLE_PROTOBUF_REFSTRUCT_COMPATIBILITY_MODE
output.WriteRawMessage(this);
#else
if (_unknownFields != null) {
_unknownFields.WriteTo(output);
}
#endif
}
#if !GOOGLE_PROTOBUF_REFSTRUCT_COMPATIBILITY_MODE
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
[global::System.CodeDom.Compiler.GeneratedCode("protoc", null)]
void pb::IBufferMessage.InternalWriteTo(ref pb::WriteContext output) {
if (_unknownFields != null) {
_unknownFields.WriteTo(ref output);
}
}
#endif
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
[global::System.CodeDom.Compiler.GeneratedCode("protoc", null)]
public int CalculateSize() {
int size = 0;
if (_unknownFields != null) {
size += _unknownFields.CalculateSize();
}
return size;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
[global::System.CodeDom.Compiler.GeneratedCode("protoc", null)]
public void MergeFrom(AppPaymentModelTypeEnum other) {
if (other == null) {
return;
}
_unknownFields = pb::UnknownFieldSet.MergeFrom(_unknownFields, other._unknownFields);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
[global::System.CodeDom.Compiler.GeneratedCode("protoc", null)]
public void MergeFrom(pb::CodedInputStream input) {
#if !GOOGLE_PROTOBUF_REFSTRUCT_COMPATIBILITY_MODE
input.ReadRawMessage(this);
#else
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
_unknownFields = pb::UnknownFieldSet.MergeFieldFrom(_unknownFields, input);
break;
}
}
#endif
}
#if !GOOGLE_PROTOBUF_REFSTRUCT_COMPATIBILITY_MODE
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
[global::System.CodeDom.Compiler.GeneratedCode("protoc", null)]
void pb::IBufferMessage.InternalMergeFrom(ref pb::ParseContext input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
_unknownFields = pb::UnknownFieldSet.MergeFieldFrom(_unknownFields, ref input);
break;
}
}
}
#endif
#region Nested types
/// <summary>Container for nested types declared in the AppPaymentModelTypeEnum message type.</summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
[global::System.CodeDom.Compiler.GeneratedCode("protoc", null)]
public static partial class Types {
/// <summary>
/// Enum describing possible app payment models.
/// </summary>
public enum AppPaymentModelType {
/// <summary>
/// Not specified.
/// </summary>
[pbr::OriginalName("UNSPECIFIED")] Unspecified = 0,
/// <summary>
/// Used for return value only. Represents value unknown in this version.
/// </summary>
[pbr::OriginalName("UNKNOWN")] Unknown = 1,
/// <summary>
/// Represents paid-for apps.
/// </summary>
[pbr::OriginalName("PAID")] Paid = 30,
}
}
#endregion
}
#endregion
}
#endregion Designer generated code
|
apache-2.0
|
jageall/IdentityServer.v3.AdminModule
|
Source/MongoDb.AdminModule/ReadScopes.cs
|
2073
|
/*
* Copyright 2014, 2015 James Geall
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System.Collections.Generic;
using System.Management.Automation;
using IdentityServer3.Core.Models;
namespace IdentityServer3.Admin.MongoDb.Powershell
{
[Cmdlet(VerbsCommon.Get, "Scopes")]
public class ReadScopes : MongoCmdlet
{
[Parameter(HelpMessage = "Gets the predefined standard scopes from identity server. These need to be persisted into the database using Set-Scope if you want them available to the application at runtime")]
public SwitchParameter Predefined { get; set; }
protected override void BeginProcessing()
{
if(!Predefined)
base.BeginProcessing();
}
protected override void ProcessRecord()
{
IEnumerable<Scope> scopes;
if (Predefined)
{
var builtin = BuiltInScopes();
scopes = builtin;
}
else
{
scopes = ScopeStore.GetScopesAsync().Result;
}
foreach (var scope in scopes)
{
WriteObject(scope);
}
}
public static IEnumerable<Scope> BuiltInScopes()
{
foreach (var scope in StandardScopes.All)
{
yield return scope;
}
yield return StandardScopes.AllClaims;
yield return StandardScopes.OfflineAccess;
yield return StandardScopes.Roles;
}
}
}
|
apache-2.0
|
jroper/netty
|
codec/src/main/java/io/netty/handler/codec/FixedLengthFrameDecoder.java
|
2949
|
/*
* Copyright 2011 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.handler.codec;
import io.netty.buffer.ChannelBuffer;
import io.netty.buffer.ChannelBuffers;
import io.netty.channel.ChannelBufferHolder;
import io.netty.channel.ChannelBufferHolders;
import io.netty.channel.ChannelInboundHandlerContext;
/**
* A decoder that splits the received {@link ChannelBuffer}s by the fixed number
* of bytes. For example, if you received the following four fragmented packets:
* <pre>
* +---+----+------+----+
* | A | BC | DEFG | HI |
* +---+----+------+----+
* </pre>
* A {@link FixedLengthFrameDecoder}{@code (3)} will decode them into the
* following three packets with the fixed length:
* <pre>
* +-----+-----+-----+
* | ABC | DEF | GHI |
* +-----+-----+-----+
* </pre>
*/
public class FixedLengthFrameDecoder extends StreamToMessageDecoder<Object> {
private final int frameLength;
private final boolean allocateFullBuffer;
/**
* Calls {@link #FixedLengthFrameDecoder(int, boolean)} with <code>false</code>
*/
public FixedLengthFrameDecoder(int frameLength) {
this(frameLength, false);
}
/**
* Creates a new instance.
*
* @param frameLength the length of the frame
* @param allocateFullBuffer <code>true</code> if the cumulative {@link ChannelBuffer} should use the {@link #frameLength} as its initial size
*/
public FixedLengthFrameDecoder(int frameLength, boolean allocateFullBuffer) {
if (frameLength <= 0) {
throw new IllegalArgumentException(
"frameLength must be a positive integer: " + frameLength);
}
this.frameLength = frameLength;
this.allocateFullBuffer = allocateFullBuffer;
}
@Override
public ChannelBufferHolder<Byte> newInboundBuffer(
ChannelInboundHandlerContext<Byte> ctx) throws Exception {
if (allocateFullBuffer) {
return ChannelBufferHolders.byteBuffer(ChannelBuffers.dynamicBuffer(frameLength));
} else {
return super.newInboundBuffer(ctx);
}
}
@Override
public Object decode(ChannelInboundHandlerContext<Byte> ctx, ChannelBuffer in) throws Exception {
if (in.readableBytes() < frameLength) {
return null;
} else {
return in.readBytes(frameLength);
}
}
}
|
apache-2.0
|
dtroyer/golang-client
|
objectstorage/v1/objectstorage.go
|
6614
|
// Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License. You may obtain
// a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations
// under the License.
package objectstorage
import (
"io/ioutil"
"net/http"
"net/url"
"strconv"
"git.openstack.org/openstack/golang-client/openstack"
"git.openstack.org/openstack/golang-client/util"
)
var zeroByte = &([]byte{}) //pointer to empty []byte
//ListContainers calls the OpenStack list containers API using
//previously obtained token.
//"limit" and "marker" corresponds to the API's "limit" and "marker".
//"url" can be regular storage or cdn-enabled storage URL.
//It returns []byte which then needs to be unmarshalled to decode the JSON.
func ListContainers(session *openstack.Session, limit int64, marker, url string) ([]byte, error) {
return ListObjects(session, limit, marker, "", "", "", url)
}
//GetAccountMeta calls the OpenStack retrieve account metadata API using
//previously obtained token.
func GetAccountMeta(session *openstack.Session, url string) (http.Header, error) {
return GetObjectMeta(session, url)
}
//DeleteContainer calls the OpenStack delete container API using
//previously obtained token.
func DeleteContainer(session *openstack.Session, url string) error {
return DeleteObject(session, url)
}
//GetContainerMeta calls the OpenStack retrieve object metadata API
//using previously obtained token.
//url can be regular storage or CDN-enabled storage URL.
func GetContainerMeta(session *openstack.Session, url string) (http.Header, error) {
return GetObjectMeta(session, url)
}
//SetContainerMeta calls the OpenStack API to create / update meta data
//for container using previously obtained token.
//url can be regular storage or CDN-enabled storage URL.
func SetContainerMeta(session *openstack.Session, url string, headers http.Header) (err error) {
return SetObjectMeta(session, url, headers)
}
//PutContainer calls the OpenStack API to create / update
//container using previously obtained token.
func PutContainer(session *openstack.Session, url string, headers http.Header) error {
return PutObject(session, zeroByte, url, headers)
}
//ListObjects calls the OpenStack list object API using previously
//obtained token. "Limit", "marker", "prefix", "path", "delim" corresponds
//to the API's "limit", "marker", "prefix", "path", and "delimiter".
func ListObjects(session *openstack.Session, limit int64,
marker, prefix, path, delim, conURL string) ([]byte, error) {
var query url.Values = url.Values{}
query.Add("format", "json")
if limit > 0 {
query.Add("limit", strconv.FormatInt(limit, 10))
}
if marker != "" {
query.Add("marker", url.QueryEscape(marker))
}
if prefix != "" {
query.Add("prefix", url.QueryEscape(prefix))
}
if path != "" {
query.Add("path", url.QueryEscape(path))
}
if delim != "" {
query.Add("delimiter", url.QueryEscape(delim))
}
resp, err := session.Get(conURL, &query, nil)
if err != nil {
return nil, err
}
if err = util.CheckHTTPResponseStatusCode(resp); err != nil {
return nil, err
}
body, err := ioutil.ReadAll(resp.Body)
defer resp.Body.Close()
if err != nil {
return []byte{}, err
}
return body, nil
}
//PutObject calls the OpenStack create object API using previously
//obtained token.
//url can be regular storage or CDN-enabled storage URL.
func PutObject(session *openstack.Session, fContent *[]byte, url string, headers http.Header) (err error) {
resp, err := session.Put(url, nil, &headers, fContent)
if err != nil {
return err
}
return util.CheckHTTPResponseStatusCode(resp)
}
//CopyObject calls the OpenStack copy object API using previously obtained
//token. Note from API doc: "The destination container must exist before
//attempting the copy."
func CopyObject(session *openstack.Session, srcURL, destURL string) (err error) {
var headers http.Header = http.Header{}
headers.Add("Destination", destURL)
resp, err := session.Request("COPY", srcURL, nil, &headers, zeroByte)
if err != nil {
return err
}
return util.CheckHTTPResponseStatusCode(resp)
}
//DeleteObject calls the OpenStack delete object API using
//previously obtained token.
//
//Note from API doc: "A DELETE to a versioned object removes the current version
//of the object and replaces it with the next-most current version, moving it
//from the non-current container to the current." .. "If you want to completely
//remove an object and you have five total versions of it, you must DELETE it
//five times."
func DeleteObject(session *openstack.Session, url string) (err error) {
resp, err := session.Delete(url, nil, nil)
if err != nil {
return err
}
return util.CheckHTTPResponseStatusCode(resp)
}
//SetObjectMeta calls the OpenStack API to create/update meta data for
//object using previously obtained token.
func SetObjectMeta(session *openstack.Session, url string, headers http.Header) (err error) {
// headers.Add("X-Auth-Token", token)
resp, err := session.Post(url, nil, &headers, zeroByte)
if err != nil {
return err
}
return util.CheckHTTPResponseStatusCode(resp)
}
//GetObjectMeta calls the OpenStack retrieve object metadata API using
//previously obtained token.
func GetObjectMeta(session *openstack.Session, url string) (http.Header, error) {
resp, err := session.Head(url, nil, nil)
if err != nil {
return nil, err
}
return resp.Header, util.CheckHTTPResponseStatusCode(resp)
}
//GetObject calls the OpenStack retrieve object API using previously
//obtained token. It returns http.Header, object / file content downloaded
//from the server, and err.
//
//Since this implementation of GetObject retrieves header info, it
//effectively executes GetObjectMeta also in addition to getting the
//object content.
func GetObject(session *openstack.Session, url string) (http.Header, []byte, error) {
resp, err := session.Get(url, nil, nil)
if err != nil {
return nil, nil, err
}
if err = util.CheckHTTPResponseStatusCode(resp); err != nil {
return nil, nil, err
}
var body []byte
if body, err = ioutil.ReadAll(resp.Body); err != nil {
return nil, nil, err
}
resp.Body.Close()
return resp.Header, body, nil
}
|
apache-2.0
|
google-research/simclr
|
lars_optimizer.py
|
6037
|
# coding=utf-8
# Copyright 2020 The SimCLR Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific simclr governing permissions and
# limitations under the License.
# ==============================================================================
"""Functions and classes related to optimization (weight updates)."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import re
import tensorflow.compat.v1 as tf
EETA_DEFAULT = 0.001
class LARSOptimizer(tf.train.Optimizer):
"""Layer-wise Adaptive Rate Scaling for large batch training.
Introduced by "Large Batch Training of Convolutional Networks" by Y. You,
I. Gitman, and B. Ginsburg. (https://arxiv.org/abs/1708.03888)
"""
def __init__(self,
learning_rate,
momentum=0.9,
use_nesterov=False,
weight_decay=0.0,
exclude_from_weight_decay=None,
exclude_from_layer_adaptation=None,
classic_momentum=True,
eeta=EETA_DEFAULT,
name="LARSOptimizer"):
"""Constructs a LARSOptimizer.
Args:
learning_rate: A `float` for learning rate.
momentum: A `float` for momentum.
use_nesterov: A 'Boolean' for whether to use nesterov momentum.
weight_decay: A `float` for weight decay.
exclude_from_weight_decay: A list of `string` for variable screening, if
any of the string appears in a variable's name, the variable will be
excluded for computing weight decay. For example, one could specify
the list like ['batch_normalization', 'bias'] to exclude BN and bias
from weight decay.
exclude_from_layer_adaptation: Similar to exclude_from_weight_decay, but
for layer adaptation. If it is None, it will be defaulted the same as
exclude_from_weight_decay.
classic_momentum: A `boolean` for whether to use classic (or popular)
momentum. The learning rate is applied during momeuntum update in
classic momentum, but after momentum for popular momentum.
eeta: A `float` for scaling of learning rate when computing trust ratio.
name: The name for the scope.
"""
super(LARSOptimizer, self).__init__(False, name)
self.learning_rate = learning_rate
self.momentum = momentum
self.weight_decay = weight_decay
self.use_nesterov = use_nesterov
self.classic_momentum = classic_momentum
self.eeta = eeta
self.exclude_from_weight_decay = exclude_from_weight_decay
# exclude_from_layer_adaptation is set to exclude_from_weight_decay if the
# arg is None.
if exclude_from_layer_adaptation:
self.exclude_from_layer_adaptation = exclude_from_layer_adaptation
else:
self.exclude_from_layer_adaptation = exclude_from_weight_decay
def apply_gradients(self, grads_and_vars, global_step=None, name=None):
assignments = []
for (grad, param) in grads_and_vars:
if grad is None or param is None:
continue
param_name = param.op.name
v = tf.get_variable(
name=param_name + "/Momentum",
shape=param.shape.as_list(),
dtype=tf.float32,
trainable=False,
initializer=tf.zeros_initializer())
if self._use_weight_decay(param_name):
grad += self.weight_decay * param
if self.classic_momentum:
trust_ratio = 1.0
if self._do_layer_adaptation(param_name):
w_norm = tf.norm(param, ord=2)
g_norm = tf.norm(grad, ord=2)
trust_ratio = tf.where(
tf.greater(w_norm, 0), tf.where(
tf.greater(g_norm, 0), (self.eeta * w_norm / g_norm),
1.0),
1.0)
scaled_lr = self.learning_rate * trust_ratio
next_v = tf.multiply(self.momentum, v) + scaled_lr * grad
if self.use_nesterov:
update = tf.multiply(self.momentum, next_v) + scaled_lr * grad
else:
update = next_v
next_param = param - update
else:
next_v = tf.multiply(self.momentum, v) + grad
if self.use_nesterov:
update = tf.multiply(self.momentum, next_v) + grad
else:
update = next_v
trust_ratio = 1.0
if self._do_layer_adaptation(param_name):
w_norm = tf.norm(param, ord=2)
v_norm = tf.norm(update, ord=2)
trust_ratio = tf.where(
tf.greater(w_norm, 0), tf.where(
tf.greater(v_norm, 0), (self.eeta * w_norm / v_norm),
1.0),
1.0)
scaled_lr = trust_ratio * self.learning_rate
next_param = param - scaled_lr * update
assignments.extend([param.assign(next_param), v.assign(next_v)])
if global_step is not None:
new_global_step = global_step + 1
assignments.append(global_step.assign(new_global_step))
return tf.group(*assignments, name=name)
def _use_weight_decay(self, param_name):
"""Whether to use L2 weight decay for `param_name`."""
if not self.weight_decay:
return False
if self.exclude_from_weight_decay:
for r in self.exclude_from_weight_decay:
if re.search(r, param_name) is not None:
return False
return True
def _do_layer_adaptation(self, param_name):
"""Whether to do layer-wise learning rate adaptation for `param_name`."""
if self.exclude_from_layer_adaptation:
for r in self.exclude_from_layer_adaptation:
if re.search(r, param_name) is not None:
return False
return True
|
apache-2.0
|
dubenju/javay
|
src/java/org/jaudiotagger/tag/id3/AbstractTagFrameBody.java
|
9970
|
/**
* @author : Paul Taylor
* @author : Eric Farng
*
* Version @version:$Id: AbstractTagFrameBody.java 895 2010-04-15 15:21:45Z paultaylor $
*
* MusicTag Copyright (C)2003,2004
*
* This library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser
* General Public License as published by the Free Software Foundation; either version 2.1 of the License,
* or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even
* the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License along with this library; if not,
* you can get a copy from http://www.opensource.org/licenses/lgpl-license.php or write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*
* FragmentBody contains the data for a fragment.
* ID3v2 tags have frames bodys. Lyrics3 tags have fields bodys
* ID3v1 tags do not have fragments bodys.
* Fragment Bodies consist of a number of MP3Objects held in an objectList
* Methods are additionally defined here to restrieve and set these objects.
* We also specify methods for getting/setting the text encoding of textual
* data.
* Fragment bodies should not be concerned about their parent fragment. For
* example most ID3v2 frames can be applied to ID3v2tags of different versions.
* The frame header will need modification based on the frame version but this
* should have no effect on the frame body.
*/
package org.jaudiotagger.tag.id3;
import org.jaudiotagger.tag.datatype.AbstractDataType;
import org.jaudiotagger.tag.datatype.DataTypes;
import org.jaudiotagger.tag.id3.valuepair.TextEncoding;
import java.util.ArrayList;
import java.util.Iterator;
/**
* A frame body contains the data content for a frame
*/
public abstract class AbstractTagFrameBody extends AbstractTagItem {
public void createStructure() {
}
/**
* Reference to the header associated with this frame body, a framebody can be created without a header
* but one it is associated with a header this should be set. It is principally useful for the framebody to know
* its header, because this will specify its tag version and some framebodies behave slighly different
* between tag versions.
*/
private AbstractTagFrame header;
/**
* List of data types that make up this particular frame body.
*/
protected ArrayList<AbstractDataType> objectList = new ArrayList<AbstractDataType>();
/**
* Return the Text Encoding
*
* @return the text encoding used by this framebody
*/
public final byte getTextEncoding() {
AbstractDataType o = getObject(DataTypes.OBJ_TEXT_ENCODING);
if (o != null) {
Long encoding = (Long) (o.getValue());
return encoding.byteValue();
} else {
return TextEncoding.ISO_8859_1;
}
}
/**
* Set the Text Encoding to use for this frame body
*
* @param textEncoding to use for this frame body
*/
public final void setTextEncoding(byte textEncoding) {
//Number HashMap actually converts this byte to a long
setObjectValue(DataTypes.OBJ_TEXT_ENCODING, textEncoding);
}
/**
* Creates a new framebody, at this point the bodys
* ObjectList is setup which defines what datatypes are expected in body
*/
protected AbstractTagFrameBody() {
setupObjectList();
}
/**
* Copy Constructor for fragment body. Copies all objects in the
* Object Iterator with data.
*
* @param copyObject
*/
protected AbstractTagFrameBody(AbstractTagFrameBody copyObject) {
AbstractDataType newObject;
for (int i = 0; i < copyObject.objectList.size(); i++) {
newObject = (AbstractDataType) ID3Tags.copyObject(copyObject.objectList.get(i));
newObject.setBody(this);
this.objectList.add(newObject);
}
}
/**
* @return the text value that the user would expect to see for this framebody type, this should be overrridden
* for all framebodies
*/
public String getUserFriendlyValue() {
return toString();
}
/**
* This method calls <code>toString</code> for all it's objects and appends
* them without any newline characters.
*
* @return brief description string
*/
public String getBriefDescription() {
String str = "";
for (AbstractDataType object : objectList) {
if ((object.toString() != null) && (object.toString().length() > 0)) {
str += (object.getIdentifier() + "=\"" + object.toString() + "\"; ");
}
}
return str;
}
/**
* This method calls <code>toString</code> for all it's objects and appends
* them. It contains new line characters and is more suited for display
* purposes
*
* @return formatted description string
*/
public final String getLongDescription() {
String str = "";
for (AbstractDataType object : objectList) {
if ((object.toString() != null) && (object.toString().length() > 0)) {
str += (object.getIdentifier() + " = " + object.toString() + "\n");
}
}
return str;
}
/**
* Sets all objects of identifier type to value defined by <code>obj</code> argument.
*
* @param identifier <code>MP3Object</code> identifier
* @param value new datatype value
*/
public final void setObjectValue(String identifier, Object value) {
AbstractDataType object;
Iterator<AbstractDataType> iterator = objectList.listIterator();
while (iterator.hasNext()) {
object = iterator.next();
if (object.getIdentifier().equals(identifier)) {
object.setValue(value);
}
}
}
/**
* Returns the value of the datatype with the specified
* <code>identifier</code>
*
* @param identifier
* @return the value of the dattype with the specified
* <code>identifier</code>
*/
public final Object getObjectValue(String identifier) {
return getObject(identifier).getValue();
}
/**
* Returns the datatype with the specified
* <code>identifier</code>
*
* @param identifier
* @return the datatype with the specified
* <code>identifier</code>
*/
public final AbstractDataType getObject(String identifier) {
AbstractDataType object;
Iterator<AbstractDataType> iterator = objectList.listIterator();
while (iterator.hasNext()) {
object = iterator.next();
if (object.getIdentifier().equals(identifier)) {
return object;
}
}
return null;
}
/**
* Returns the size in bytes of this fragmentbody
*
* @return estimated size in bytes of this datatype
*/
public int getSize() {
int size = 0;
AbstractDataType object;
Iterator<AbstractDataType> iterator = objectList.listIterator();
while (iterator.hasNext()) {
object = iterator.next();
size += object.getSize();
}
return size;
}
/**
* Returns true if this instance and its entire DataType
* array list is a subset of the argument. This class is a subset if it is
* the same class as the argument.
*
* @param obj datatype to determine subset of
* @return true if this instance and its entire datatype array list is a
* subset of the argument.
*/
public boolean isSubsetOf(Object obj) {
if (!(obj instanceof AbstractTagFrameBody)) {
return false;
}
ArrayList<AbstractDataType> superset = ((AbstractTagFrameBody) obj).objectList;
for (AbstractDataType anObjectList : objectList) {
if (anObjectList.getValue() != null) {
if (!superset.contains(anObjectList)) {
return false;
}
}
}
return true;
}
/**
* Returns true if this datatype and its entire DataType array
* list equals the argument. This datatype is equal to the argument if they
* are the same class.
*
* @param obj datatype to determine equality of
* @return true if this datatype and its entire <code>MP3Object</code> array
* list equals the argument.
*/
public boolean equals(Object obj) {
if (!(obj instanceof AbstractTagFrameBody)) {
return false;
}
AbstractTagFrameBody object = (AbstractTagFrameBody) obj;
boolean check = this.objectList.equals(object.objectList) && super.equals(obj);
return check;
}
/**
* Returns an iterator of the DataType list.
*
* @return iterator of the DataType list.
*/
public Iterator iterator() {
return objectList.iterator();
}
/**
* Return brief description of FrameBody
*
* @return brief description of FrameBody
*/
public String toString() {
return getBriefDescription();
}
/**
* Create the list of Datatypes that this body
* expects in the correct order This method needs to be implemented by concrete subclasses
*/
protected abstract void setupObjectList();
/**
* Get Reference to header
*
* @return
*/
public AbstractTagFrame getHeader() {
return header;
}
/**
* Set header
*
* @param header
*/
public void setHeader(AbstractTagFrame header) {
this.header = header;
}
}
|
apache-2.0
|
daher-alfawares/xr.desktop
|
sdk/physx/2.8.3/TrainingPrograms/Programs/Chap3/Lesson306/source/Lesson306.cpp
|
12030
|
// ===============================================================================
// NVIDIA PHYSX SDK TRAINING PROGRAMS
// LESSON 306: INTERSECTION TESTS
//
// Written by Bob Schade, 5-1-06
// ===============================================================================
#include <GL/glut.h>
#include <stdio.h>
#include "NxPhysics.h"
#include "CommonCode.h"
#include "Actors.h"
#include "Lesson306.h"
#include "EntityReport.h"
#include "NxTriangle.h"
// Physics SDK globals
extern NxPhysicsSDK* gPhysicsSDK;
extern NxScene* gScene;
extern NxVec3 gDefaultGravity;
// User report globals
extern DebugRenderer gDebugRenderer;
extern UserAllocator* gAllocator;
extern NxVec3 gCameraPos;
extern NxVec3 gCameraForward;
// Force globals
extern NxVec3 gForceVec;
extern NxReal gForceStrength;
extern bool bForceMode;
// Simulation globals
extern bool bHardwareScene;
extern bool bPause;
extern bool bShadows;
extern bool bDebugWireframeMode;
// Actor globals
NxActor* groundPlane = 0;
NxActor* box = 0;
NxActor* sphere = 0;
NxActor* capsule = 0;
NxActor* pyramid = 0;
NxActor* heightfield = 0;
// Focus actor
extern NxActor* gSelectedActor;
// Intersection test globals
typedef enum
{
SPHERE_SHAPE_TEST,
AABB_SHAPE_TEST,
PLANES_SHAPE_TEST,
SPHERE_SHAPE_CHECK,
AABB_SHAPE_CHECK,
AABB_TRIANGLE_TEST,
NUM_INTERSECTION_TESTS
} kIntersectionTestType;
NxU32 gIntersectionTestType = SPHERE_SHAPE_TEST;
const NxU32 gNumIntersectionTestTypes = 6;
// HUD globals
extern HUD hud;
char* gIntersectionTestTypeString[gNumIntersectionTestTypes] =
{
"Sphere-Shape Test","AABB-Shape Test",
"Planes-Shape Test","Sphere-Shape Check",
"AABB-Shape Check", "AABB-Triangle Test"
};
void PrintControls()
{
printf("\n Flight Controls:\n ----------------\n w = forward, s = back\n a = strafe left, d = strafe right\n q = up, z = down\n");
printf("\n Force Controls:\n ---------------\n i = +z, k = -z\n j = +x, l = -x\n u = +y, m = -y\n");
printf("\n Miscellaneous:\n --------------\n p = Pause\n r = Select Next Actor\n f = Toggle Force Mode\n b = Toggle Debug Wireframe Mode\n x = Toggle Shadows\n t = Move Focus Actor to (0,5,0)\n");
printf("\n Special:\n --------\n v = Switch Intersection Test Type\n");
}
void DrawIntersectedActor(NxActor *actor, NxActor* selectedActor)
{
NxShape*const* shapes = actor->getShapes();
NxU32 nShapes = actor->getNbShapes();
while (nShapes--)
{
DrawShape(shapes[nShapes], true);
}
nShapes = actor->getNbShapes();
if (selectedActor && actor == selectedActor)
{
while (nShapes--)
{
if (shapes[nShapes]->getFlag(NX_TRIGGER_ENABLE))
{
DrawWireShape(shapes[nShapes], NxVec3(0,0,1), true);
}
else
{
if (actor->userData && ((ActorUserData *)(actor->userData))->flags & UD_PASSES_INTERSECTION_TEST)
DrawWireShape(shapes[nShapes], NxVec3(0.5,0.5,1), true);
else
DrawWireShape(shapes[nShapes], NxVec3(1,1,1), true);
}
}
if (actor->userData) ((ActorUserData *)(actor->userData))->flags &= ~UD_PASSES_INTERSECTION_TEST;
}
}
void RenderActors(bool shadows)
{
// Render all the actors in the scene
NxU32 nbActors = gScene->getNbActors();
NxActor** actors = gScene->getActors();
while (nbActors--)
{
NxActor* actor = *actors++;
DrawIntersectedActor(actor, gSelectedActor);
// Handle shadows
if (shadows)
{
DrawActorShadow(actor, true);
}
}
}
void ProcessInputs()
{
ProcessForceKeys();
// Show debug wireframes
if (bDebugWireframeMode)
{
if (gScene) gDebugRenderer.renderData(*gScene->getDebugRenderable());
}
}
void RenderIntersectedEntities()
{
switch (gIntersectionTestType)
{
case SPHERE_SHAPE_TEST:
{
NxSphere worldSphere(NxVec3(0,0,0), 5);
NxShapesType shapeType = NX_DYNAMIC_SHAPES; // NX_STATIC_SHAPES, NX_ALL_SHAPES
NxU32 nbShapes = gScene->getNbDynamicShapes(); // + gScene->getNbStaticShapes()
NxShape** shapes = (NxShape**)NxAlloca(nbShapes*sizeof(NxShape*));
for (NxU32 i = 0; i < nbShapes; i++) shapes[i] = NULL;
NxU32 activeGroups = 0xffffffff;
NxGroupsMask* groupsMask = NULL;
gScene->overlapSphereShapes(worldSphere, shapeType, nbShapes, shapes, &gShapeReport, activeGroups, groupsMask);
DrawWireSphere(&worldSphere, NxVec3(1,0,0));
while (nbShapes--)
{
if (shapes[nbShapes])
{
DrawWireShape(shapes[nbShapes], NxVec3(0,0,0), true);
}
}
}
break;
case AABB_SHAPE_TEST:
{
NxBounds3 worldBounds;
worldBounds.set(NxVec3(-5,-5,-5), NxVec3(5,5,5));
NxShapesType shapeType = NX_DYNAMIC_SHAPES; // NX_STATIC_SHAPES, NX_ALL_SHAPES
NxU32 nbShapes = gScene->getNbDynamicShapes(); // + gScene->getNbStaticShapes()
NxShape** shapes = (NxShape**)NxAlloca(nbShapes*sizeof(NxShape*));
for (NxU32 i = 0; i < nbShapes; i++) shapes[i] = NULL;
NxU32 activeGroups = 0xffffffff;
NxGroupsMask* groupsMask = NULL;
gScene->overlapAABBShapes(worldBounds, shapeType, nbShapes, shapes, &gShapeReport, activeGroups, groupsMask);
NxBox box;
NxMat34 mat;
NxCreateBox(box, worldBounds, mat);
DrawWireBox(box, NxVec3(1,0,0));
while (nbShapes--)
{
if (shapes[nbShapes])
{
DrawWireShape(shapes[nbShapes], NxVec3(0,0,0), true);
}
}
}
break;
case PLANES_SHAPE_TEST:
{
NxU32 nbPlanes = 2;
NxPlane worldPlanes[2];
worldPlanes[0].set(NxVec3(-2,0,2), NxVec3(0,0,1));
worldPlanes[1].set(NxVec3(-2,0,2), NxVec3(1,0,0));
NxShapesType shapeType = NX_DYNAMIC_SHAPES; // NX_STATIC_SHAPES, NX_ALL_SHAPES
NxU32 nbShapes = gScene->getNbDynamicShapes(); // + gScene->getNbStaticShapes()
NxShape** shapes = (NxShape**)NxAlloca(nbShapes*sizeof(NxShape*));
for (NxU32 i = 0; i < nbShapes; i++) shapes[i] = NULL;
NxU32 activeGroups = 0xffffffff;
NxGroupsMask* groupsMask = NULL;
gScene->cullShapes(nbPlanes, worldPlanes, shapeType, nbShapes, shapes, &gShapeReport, activeGroups, groupsMask);
// NxBox box;
// NxMat34 mat;
// NxCreateBox(box, worldBounds, mat);
// DrawWireBox(&box, NxVec3(1,0,0));
while (nbShapes--)
{
if (shapes[nbShapes])
{
DrawWireShape(shapes[nbShapes], NxVec3(0,0,0), true);
}
}
}
break;
case SPHERE_SHAPE_CHECK:
{
NxSphere worldSphere(NxVec3(0,0,0), 5);
NxShapesType shapeType = NX_DYNAMIC_SHAPES; // NX_STATIC_SHAPES, NX_ALL_SHAPES
NxU32 activeGroups = 0xffffffff;
NxGroupsMask* groupsMask = NULL;
bool intersection = gScene->checkOverlapSphere(worldSphere, shapeType, activeGroups, groupsMask);
if (intersection)
DrawWireSphere(&worldSphere, NxVec3(1,0,0));
else
DrawWireSphere(&worldSphere, NxVec3(0,1,0));
}
break;
case AABB_SHAPE_CHECK:
{
NxBounds3 worldBounds;
worldBounds.set(NxVec3(-5,-5,-5), NxVec3(5,5,5));
NxShapesType shapeType = NX_DYNAMIC_SHAPES; // NX_STATIC_SHAPES, NX_ALL_SHAPES
NxU32 activeGroups = 0xffffffff;
NxGroupsMask* groupsMask = NULL;
gScene->checkOverlapAABB(worldBounds, shapeType, activeGroups, groupsMask);
NxBox box;
NxMat34 mat;
NxCreateBox(box, worldBounds, mat);
bool intersection = gScene->checkOverlapAABB(worldBounds, shapeType, activeGroups, groupsMask);
if (intersection)
DrawWireBox(box, NxVec3(1,0,0));
else
DrawWireBox(box, NxVec3(0,1,0));
}
break;
case AABB_TRIANGLE_TEST:
{
NxBounds3 worldBounds;
worldBounds.set(NxVec3(-13,-2,-2), NxVec3(-17,2,2));
// worldBounds.set(NxVec3(7,-2,3), NxVec3(3,2,7));
NxShapesType shapeType = NX_ALL_SHAPES; // NX_DYNAMIC_SHAPES, NX_STATIC_SHAPES
NxU32 activeGroups = 0xffffffff;
NxGroupsMask* groupsMask = NULL;
NxShape*const* shapes = heightfield->getShapes();
NxTriangleMeshShape* tmShape = shapes[0]->isTriangleMesh();
if (!tmShape) break;
NxU32 nbTriangles;
const NxU32* triangles;
// Collide AABB against mesh shape
if (!tmShape->overlapAABBTriangles((const NxBounds3&)worldBounds, NX_QUERY_WORLD_SPACE, nbTriangles, triangles))
break;
// Loop through triangles
while (nbTriangles--)
{
NxU32 index = *triangles++;
NxTriangle currentTriangle;
NxTriangle edgeTri;
NxU32 edgeFlags;
tmShape->getTriangle(currentTriangle, &edgeTri, &edgeFlags, index);
NxMat34 shapePose = tmShape->getGlobalPose();
NxVec3 pos[3];
pos[0] = currentTriangle.verts[0];
pos[1] = currentTriangle.verts[1];
pos[2] = currentTriangle.verts[2];
DrawLine(pos[0], pos[1], NxVec3(0,1,0));
DrawLine(pos[1], pos[2], NxVec3(0,1,0));
DrawLine(pos[2], pos[0], NxVec3(0,1,0));
}
NxBox box;
NxMat34 mat;
NxCreateBox(box, worldBounds, mat);
DrawWireBox(box, NxVec3(1,0,0));
}
break;
}
}
void RenderCallback()
{
// Clear buffers
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
ProcessCameraKeys();
SetupCamera();
if (gScene && !bPause)
{
GetPhysicsResults();
ProcessInputs();
StartPhysics();
}
// Display scene
RenderActors(bShadows);
RenderIntersectedEntities();
if (bForceMode)
DrawForce(gSelectedActor, gForceVec, NxVec3(1,1,0));
else
DrawForce(gSelectedActor, gForceVec, NxVec3(0,1,1));
gForceVec = NxVec3(0,0,0);
// Render HUD
hud.Render();
glFlush();
glutSwapBuffers();
}
void SpecialKeys(unsigned char key, int x, int y)
{
switch (key)
{
case 'v': { gIntersectionTestType = (gIntersectionTestType+1)%gNumIntersectionTestTypes;
// Add intersection test type to HUD
char ds[512];
sprintf(ds, "INTERSECTION TEST: %s", gIntersectionTestTypeString[gIntersectionTestType]);
hud.SetDisplayString(2, ds, 0.015f, 0.92f);
break; }
}
}
void InitializeSpecialHUD()
{
char ds[512];
// Add intersection test type to HUD
sprintf(ds, "INTERSECTION TEST: %s", gIntersectionTestTypeString[gIntersectionTestType]);
hud.AddDisplayString(ds, 0.015f, 0.92f);
}
void InitNx()
{
// Set camera position and forward
gCameraPos.set(0, 5, -25);
gCameraForward.set(-0.25, 0, 1) ;
// Create a memory allocator
gAllocator = new UserAllocator;
// Create the physics SDK
gPhysicsSDK = NxCreatePhysicsSDK(NX_PHYSICS_SDK_VERSION, gAllocator);
if (!gPhysicsSDK) return;
// Set the physics parameters
gPhysicsSDK->setParameter(NX_SKIN_WIDTH, 0.01);
// Set the debug visualization parameters
gPhysicsSDK->setParameter(NX_VISUALIZATION_SCALE, 1);
gPhysicsSDK->setParameter(NX_VISUALIZE_COLLISION_SHAPES, 1);
gPhysicsSDK->setParameter(NX_VISUALIZE_ACTOR_AXES, 1);
gPhysicsSDK->setParameter(NX_VISUALIZE_COLLISION_FNORMALS, 1);
// Create the scene
NxSceneDesc sceneDesc;
sceneDesc.gravity = gDefaultGravity;
sceneDesc.simType = NX_SIMULATION_HW;
gScene = gPhysicsSDK->createScene(sceneDesc);
if(!gScene){
sceneDesc.simType = NX_SIMULATION_SW;
gScene = gPhysicsSDK->createScene(sceneDesc);
if(!gScene) return;
}
// Create the default material
NxMaterial* defaultMaterial = gScene->getMaterialFromIndex(0);
defaultMaterial->setRestitution(0.5);
defaultMaterial->setStaticFriction(0.5);
defaultMaterial->setDynamicFriction(0.5);
// Create the objects in the scene
groundPlane = CreateGroundPlane();
box = CreateBox(NxVec3(5,0,0), NxVec3(0.5,1,0.5), 20);
sphere = CreateSphere(NxVec3(0,0,5), 1, 10);
capsule = CreateCapsule(NxVec3(-5,0,0), 2, 0.6, 10);
pyramid = CreatePyramid(NxVec3(0,0,0), NxVec3(1,0.5,1.5), 10);
heightfield = CreateFlatHeightfield(NxVec3(-20,0,-5), 5, 5, 2);
AddUserDataToActors(gScene);
gSelectedActor = pyramid;
// Initialize HUD
InitializeHUD();
InitializeSpecialHUD();
// Get the current time
getElapsedTime();
// Start the first frame of the simulation
if (gScene) StartPhysics();
}
int main(int argc, char** argv)
{
PrintControls();
InitGlut(argc, argv, "Lesson 306: Entity Report");
InitNx();
glutMainLoop();
ReleaseNx();
return 0;
}
|
apache-2.0
|
chuckjaz/TypeScript
|
src/compiler/transformers/module/system.ts
|
80587
|
/// <reference path="../../factory.ts" />
/// <reference path="../../visitor.ts" />
/// <reference path="../destructuring.ts" />
/*@internal*/
namespace ts {
export function transformSystemModule(context: TransformationContext) {
interface DependencyGroup {
name: StringLiteral;
externalImports: (ImportDeclaration | ImportEqualsDeclaration | ExportDeclaration)[];
}
const {
startLexicalEnvironment,
endLexicalEnvironment,
hoistVariableDeclaration
} = context;
const compilerOptions = context.getCompilerOptions();
const resolver = context.getEmitResolver();
const host = context.getEmitHost();
const previousOnSubstituteNode = context.onSubstituteNode;
const previousOnEmitNode = context.onEmitNode;
context.onSubstituteNode = onSubstituteNode;
context.onEmitNode = onEmitNode;
context.enableSubstitution(SyntaxKind.Identifier); // Substitutes expression identifiers for imported symbols.
context.enableSubstitution(SyntaxKind.BinaryExpression); // Substitutes assignments to exported symbols.
context.enableSubstitution(SyntaxKind.PrefixUnaryExpression); // Substitutes updates to exported symbols.
context.enableSubstitution(SyntaxKind.PostfixUnaryExpression); // Substitutes updates to exported symbols.
context.enableEmitNotification(SyntaxKind.SourceFile); // Restore state when substituting nodes in a file.
const moduleInfoMap: ExternalModuleInfo[] = []; // The ExternalModuleInfo for each file.
const deferredExports: Statement[][] = []; // Exports to defer until an EndOfDeclarationMarker is found.
const exportFunctionsMap: Identifier[] = []; // The export function associated with a source file.
const noSubstitutionMap: boolean[][] = []; // Set of nodes for which substitution rules should be ignored for each file.
let currentSourceFile: SourceFile; // The current file.
let moduleInfo: ExternalModuleInfo; // ExternalModuleInfo for the current file.
let exportFunction: Identifier; // The export function for the current file.
let contextObject: Identifier; // The context object for the current file.
let hoistedStatements: Statement[];
let enclosingBlockScopedContainer: Node;
let noSubstitution: boolean[]; // Set of nodes for which substitution rules should be ignored.
return transformSourceFile;
/**
* Transforms the module aspects of a SourceFile.
*
* @param node The SourceFile node.
*/
function transformSourceFile(node: SourceFile) {
if (node.isDeclarationFile || !(isEffectiveExternalModule(node, compilerOptions) || node.transformFlags & TransformFlags.ContainsDynamicImport)) {
return node;
}
const id = getOriginalNodeId(node);
currentSourceFile = node;
enclosingBlockScopedContainer = node;
// System modules have the following shape:
//
// System.register(['dep-1', ... 'dep-n'], function(exports) {/* module body function */})
//
// The parameter 'exports' here is a callback '<T>(name: string, value: T) => T' that
// is used to publish exported values. 'exports' returns its 'value' argument so in
// most cases expressions that mutate exported values can be rewritten as:
//
// expr -> exports('name', expr)
//
// The only exception in this rule is postfix unary operators,
// see comment to 'substitutePostfixUnaryExpression' for more details
// Collect information about the external module and dependency groups.
moduleInfo = moduleInfoMap[id] = collectExternalModuleInfo(node, resolver, compilerOptions);
// Make sure that the name of the 'exports' function does not conflict with
// existing identifiers.
exportFunction = createUniqueName("exports");
exportFunctionsMap[id] = exportFunction;
contextObject = createUniqueName("context");
// Add the body of the module.
const dependencyGroups = collectDependencyGroups(moduleInfo.externalImports);
const moduleBodyBlock = createSystemModuleBody(node, dependencyGroups);
const moduleBodyFunction = createFunctionExpression(
/*modifiers*/ undefined,
/*asteriskToken*/ undefined,
/*name*/ undefined,
/*typeParameters*/ undefined,
[
createParameter(/*decorators*/ undefined, /*modifiers*/ undefined, /*dotDotDotToken*/ undefined, exportFunction),
createParameter(/*decorators*/ undefined, /*modifiers*/ undefined, /*dotDotDotToken*/ undefined, contextObject)
],
/*type*/ undefined,
moduleBodyBlock
);
// Write the call to `System.register`
// Clear the emit-helpers flag for later passes since we'll have already used it in the module body
// So the helper will be emit at the correct position instead of at the top of the source-file
const moduleName = tryGetModuleNameFromFile(node, host, compilerOptions);
const dependencies = createArrayLiteral(map(dependencyGroups, dependencyGroup => dependencyGroup.name));
const updated = setEmitFlags(
updateSourceFileNode(
node,
setTextRange(
createNodeArray([
createStatement(
createCall(
createPropertyAccess(createIdentifier("System"), "register"),
/*typeArguments*/ undefined,
moduleName
? [moduleName, dependencies, moduleBodyFunction]
: [dependencies, moduleBodyFunction]
)
)
]),
node.statements
)
), EmitFlags.NoTrailingComments);
if (!(compilerOptions.outFile || compilerOptions.out)) {
moveEmitHelpers(updated, moduleBodyBlock, helper => !helper.scoped);
}
if (noSubstitution) {
noSubstitutionMap[id] = noSubstitution;
noSubstitution = undefined;
}
currentSourceFile = undefined;
moduleInfo = undefined;
exportFunction = undefined;
contextObject = undefined;
hoistedStatements = undefined;
enclosingBlockScopedContainer = undefined;
return aggregateTransformFlags(updated);
}
/**
* Collects the dependency groups for this files imports.
*
* @param externalImports The imports for the file.
*/
function collectDependencyGroups(externalImports: (ImportDeclaration | ImportEqualsDeclaration | ExportDeclaration)[]) {
const groupIndices = createMap<number>();
const dependencyGroups: DependencyGroup[] = [];
for (let i = 0; i < externalImports.length; i++) {
const externalImport = externalImports[i];
const externalModuleName = getExternalModuleNameLiteral(externalImport, currentSourceFile, host, resolver, compilerOptions);
if (externalModuleName) {
const text = externalModuleName.text;
const groupIndex = groupIndices.get(text);
if (groupIndex !== undefined) {
// deduplicate/group entries in dependency list by the dependency name
dependencyGroups[groupIndex].externalImports.push(externalImport);
}
else {
groupIndices.set(text, dependencyGroups.length);
dependencyGroups.push({
name: externalModuleName,
externalImports: [externalImport]
});
}
}
}
return dependencyGroups;
}
/**
* Adds the statements for the module body function for the source file.
*
* @param node The source file for the module.
* @param dependencyGroups The grouped dependencies of the module.
*/
function createSystemModuleBody(node: SourceFile, dependencyGroups: DependencyGroup[]) {
// Shape of the body in system modules:
//
// function (exports) {
// <list of local aliases for imports>
// <hoisted variable declarations>
// <hoisted function declarations>
// return {
// setters: [
// <list of setter function for imports>
// ],
// execute: function() {
// <module statements>
// }
// }
// <temp declarations>
// }
//
// i.e:
//
// import {x} from 'file1'
// var y = 1;
// export function foo() { return y + x(); }
// console.log(y);
//
// Will be transformed to:
//
// function(exports) {
// function foo() { return y + file_1.x(); }
// exports("foo", foo);
// var file_1, y;
// return {
// setters: [
// function(v) { file_1 = v }
// ],
// execute(): function() {
// y = 1;
// console.log(y);
// }
// };
// }
const statements: Statement[] = [];
// We start a new lexical environment in this function body, but *not* in the
// body of the execute function. This allows us to emit temporary declarations
// only in the outer module body and not in the inner one.
startLexicalEnvironment();
// Add any prologue directives.
const ensureUseStrict = compilerOptions.alwaysStrict || (!compilerOptions.noImplicitUseStrict && isExternalModule(currentSourceFile));
const statementOffset = addPrologue(statements, node.statements, ensureUseStrict, sourceElementVisitor);
// var __moduleName = context_1 && context_1.id;
statements.push(
createVariableStatement(
/*modifiers*/ undefined,
createVariableDeclarationList([
createVariableDeclaration(
"__moduleName",
/*type*/ undefined,
createLogicalAnd(
contextObject,
createPropertyAccess(contextObject, "id")
)
)
])
)
);
// Visit the synthetic external helpers import declaration if present
visitNode(moduleInfo.externalHelpersImportDeclaration, sourceElementVisitor, isStatement);
// Visit the statements of the source file, emitting any transformations into
// the `executeStatements` array. We do this *before* we fill the `setters` array
// as we both emit transformations as well as aggregate some data used when creating
// setters. This allows us to reduce the number of times we need to loop through the
// statements of the source file.
const executeStatements = visitNodes(node.statements, sourceElementVisitor, isStatement, statementOffset);
// Emit early exports for function declarations.
addRange(statements, hoistedStatements);
// We emit hoisted variables early to align roughly with our previous emit output.
// Two key differences in this approach are:
// - Temporary variables will appear at the top rather than at the bottom of the file
addRange(statements, endLexicalEnvironment());
const exportStarFunction = addExportStarIfNeeded(statements);
const moduleObject = createObjectLiteral([
createPropertyAssignment("setters",
createSettersArray(exportStarFunction, dependencyGroups)
),
createPropertyAssignment("execute",
createFunctionExpression(
/*modifiers*/ undefined,
/*asteriskToken*/ undefined,
/*name*/ undefined,
/*typeParameters*/ undefined,
/*parameters*/ [],
/*type*/ undefined,
createBlock(executeStatements, /*multiLine*/ true)
)
)
]);
moduleObject.multiLine = true;
statements.push(createReturn(moduleObject));
return createBlock(statements, /*multiLine*/ true);
}
/**
* Adds an exportStar function to a statement list if it is needed for the file.
*
* @param statements A statement list.
*/
function addExportStarIfNeeded(statements: Statement[]) {
if (!moduleInfo.hasExportStarsToExportValues) {
return;
}
// when resolving exports local exported entries/indirect exported entries in the module
// should always win over entries with similar names that were added via star exports
// to support this we store names of local/indirect exported entries in a set.
// this set is used to filter names brought by star expors.
// local names set should only be added if we have anything exported
if (!moduleInfo.exportedNames && moduleInfo.exportSpecifiers.size === 0) {
// no exported declarations (export var ...) or export specifiers (export {x})
// check if we have any non star export declarations.
let hasExportDeclarationWithExportClause = false;
for (const externalImport of moduleInfo.externalImports) {
if (externalImport.kind === SyntaxKind.ExportDeclaration && externalImport.exportClause) {
hasExportDeclarationWithExportClause = true;
break;
}
}
if (!hasExportDeclarationWithExportClause) {
// we still need to emit exportStar helper
const exportStarFunction = createExportStarFunction(/*localNames*/ undefined);
statements.push(exportStarFunction);
return exportStarFunction.name;
}
}
const exportedNames: ObjectLiteralElementLike[] = [];
if (moduleInfo.exportedNames) {
for (const exportedLocalName of moduleInfo.exportedNames) {
if (exportedLocalName.text === "default") {
continue;
}
// write name of exported declaration, i.e 'export var x...'
exportedNames.push(
createPropertyAssignment(
createLiteral(exportedLocalName),
createTrue()
)
);
}
}
for (const externalImport of moduleInfo.externalImports) {
if (externalImport.kind !== SyntaxKind.ExportDeclaration) {
continue;
}
const exportDecl = <ExportDeclaration>externalImport;
if (!exportDecl.exportClause) {
// export * from ...
continue;
}
for (const element of exportDecl.exportClause.elements) {
// write name of indirectly exported entry, i.e. 'export {x} from ...'
exportedNames.push(
createPropertyAssignment(
createLiteral(unescapeLeadingUnderscores((element.name || element.propertyName).text)),
createTrue()
)
);
}
}
const exportedNamesStorageRef = createUniqueName("exportedNames");
statements.push(
createVariableStatement(
/*modifiers*/ undefined,
createVariableDeclarationList([
createVariableDeclaration(
exportedNamesStorageRef,
/*type*/ undefined,
createObjectLiteral(exportedNames, /*multiline*/ true)
)
])
)
);
const exportStarFunction = createExportStarFunction(exportedNamesStorageRef);
statements.push(exportStarFunction);
return exportStarFunction.name;
}
/**
* Creates an exportStar function for the file, with an optional set of excluded local
* names.
*
* @param localNames An optional reference to an object containing a set of excluded local
* names.
*/
function createExportStarFunction(localNames: Identifier | undefined) {
const exportStarFunction = createUniqueName("exportStar");
const m = createIdentifier("m");
const n = createIdentifier("n");
const exports = createIdentifier("exports");
let condition: Expression = createStrictInequality(n, createLiteral("default"));
if (localNames) {
condition = createLogicalAnd(
condition,
createLogicalNot(
createCall(
createPropertyAccess(localNames, "hasOwnProperty"),
/*typeArguments*/ undefined,
[n]
)
)
);
}
return createFunctionDeclaration(
/*decorators*/ undefined,
/*modifiers*/ undefined,
/*asteriskToken*/ undefined,
exportStarFunction,
/*typeParameters*/ undefined,
[createParameter(/*decorators*/ undefined, /*modifiers*/ undefined, /*dotDotDotToken*/ undefined, m)],
/*type*/ undefined,
createBlock([
createVariableStatement(
/*modifiers*/ undefined,
createVariableDeclarationList([
createVariableDeclaration(
exports,
/*type*/ undefined,
createObjectLiteral([])
)
])
),
createForIn(
createVariableDeclarationList([
createVariableDeclaration(n, /*type*/ undefined)
]),
m,
createBlock([
setEmitFlags(
createIf(
condition,
createStatement(
createAssignment(
createElementAccess(exports, n),
createElementAccess(m, n)
)
)
),
EmitFlags.SingleLine
)
])
),
createStatement(
createCall(
exportFunction,
/*typeArguments*/ undefined,
[exports]
)
)
], /*multiline*/ true)
);
}
/**
* Creates an array setter callbacks for each dependency group.
*
* @param exportStarFunction A reference to an exportStarFunction for the file.
* @param dependencyGroups An array of grouped dependencies.
*/
function createSettersArray(exportStarFunction: Identifier, dependencyGroups: DependencyGroup[]) {
const setters: Expression[] = [];
for (const group of dependencyGroups) {
// derive a unique name for parameter from the first named entry in the group
const localName = forEach(group.externalImports, i => getLocalNameForExternalImport(i, currentSourceFile));
const parameterName = localName ? getGeneratedNameForNode(localName) : createUniqueName("");
const statements: Statement[] = [];
for (const entry of group.externalImports) {
const importVariableName = getLocalNameForExternalImport(entry, currentSourceFile);
switch (entry.kind) {
case SyntaxKind.ImportDeclaration:
if (!(<ImportDeclaration>entry).importClause) {
// 'import "..."' case
// module is imported only for side-effects, no emit required
break;
}
// falls through
case SyntaxKind.ImportEqualsDeclaration:
Debug.assert(importVariableName !== undefined);
// save import into the local
statements.push(
createStatement(
createAssignment(importVariableName, parameterName)
)
);
break;
case SyntaxKind.ExportDeclaration:
Debug.assert(importVariableName !== undefined);
if ((<ExportDeclaration>entry).exportClause) {
// export {a, b as c} from 'foo'
//
// emit as:
//
// exports_({
// "a": _["a"],
// "c": _["b"]
// });
const properties: PropertyAssignment[] = [];
for (const e of (<ExportDeclaration>entry).exportClause.elements) {
properties.push(
createPropertyAssignment(
createLiteral(unescapeLeadingUnderscores(e.name.text)),
createElementAccess(
parameterName,
createLiteral(unescapeLeadingUnderscores((e.propertyName || e.name).text))
)
)
);
}
statements.push(
createStatement(
createCall(
exportFunction,
/*typeArguments*/ undefined,
[createObjectLiteral(properties, /*multiline*/ true)]
)
)
);
}
else {
// export * from 'foo'
//
// emit as:
//
// exportStar(foo_1_1);
statements.push(
createStatement(
createCall(
exportStarFunction,
/*typeArguments*/ undefined,
[parameterName]
)
)
);
}
break;
}
}
setters.push(
createFunctionExpression(
/*modifiers*/ undefined,
/*asteriskToken*/ undefined,
/*name*/ undefined,
/*typeParameters*/ undefined,
[createParameter(/*decorators*/ undefined, /*modifiers*/ undefined, /*dotDotDotToken*/ undefined, parameterName)],
/*type*/ undefined,
createBlock(statements, /*multiLine*/ true)
)
);
}
return createArrayLiteral(setters, /*multiLine*/ true);
}
//
// Top-level Source Element Visitors
//
/**
* Visit source elements at the top-level of a module.
*
* @param node The node to visit.
*/
function sourceElementVisitor(node: Node): VisitResult<Node> {
switch (node.kind) {
case SyntaxKind.ImportDeclaration:
return visitImportDeclaration(<ImportDeclaration>node);
case SyntaxKind.ImportEqualsDeclaration:
return visitImportEqualsDeclaration(<ImportEqualsDeclaration>node);
case SyntaxKind.ExportDeclaration:
// ExportDeclarations are elided as they are handled via
// `appendExportsOfDeclaration`.
return undefined;
case SyntaxKind.ExportAssignment:
return visitExportAssignment(<ExportAssignment>node);
default:
return nestedElementVisitor(node);
}
}
/**
* Visits an ImportDeclaration node.
*
* @param node The node to visit.
*/
function visitImportDeclaration(node: ImportDeclaration): VisitResult<Statement> {
let statements: Statement[];
if (node.importClause) {
hoistVariableDeclaration(getLocalNameForExternalImport(node, currentSourceFile));
}
if (hasAssociatedEndOfDeclarationMarker(node)) {
// Defer exports until we encounter an EndOfDeclarationMarker node
const id = getOriginalNodeId(node);
deferredExports[id] = appendExportsOfImportDeclaration(deferredExports[id], node);
}
else {
statements = appendExportsOfImportDeclaration(statements, node);
}
return singleOrMany(statements);
}
/**
* Visits an ImportEqualsDeclaration node.
*
* @param node The node to visit.
*/
function visitImportEqualsDeclaration(node: ImportEqualsDeclaration): VisitResult<Statement> {
Debug.assert(isExternalModuleImportEqualsDeclaration(node), "import= for internal module references should be handled in an earlier transformer.");
let statements: Statement[];
hoistVariableDeclaration(getLocalNameForExternalImport(node, currentSourceFile));
if (hasAssociatedEndOfDeclarationMarker(node)) {
// Defer exports until we encounter an EndOfDeclarationMarker node
const id = getOriginalNodeId(node);
deferredExports[id] = appendExportsOfImportEqualsDeclaration(deferredExports[id], node);
}
else {
statements = appendExportsOfImportEqualsDeclaration(statements, node);
}
return singleOrMany(statements);
}
/**
* Visits an ExportAssignment node.
*
* @param node The node to visit.
*/
function visitExportAssignment(node: ExportAssignment): VisitResult<Statement> {
if (node.isExportEquals) {
// Elide `export=` as it is illegal in a SystemJS module.
return undefined;
}
const expression = visitNode(node.expression, destructuringAndImportCallVisitor, isExpression);
const original = node.original;
if (original && hasAssociatedEndOfDeclarationMarker(original)) {
// Defer exports until we encounter an EndOfDeclarationMarker node
const id = getOriginalNodeId(node);
deferredExports[id] = appendExportStatement(deferredExports[id], createIdentifier("default"), expression, /*allowComments*/ true);
}
else {
return createExportStatement(createIdentifier("default"), expression, /*allowComments*/ true);
}
}
/**
* Visits a FunctionDeclaration, hoisting it to the outer module body function.
*
* @param node The node to visit.
*/
function visitFunctionDeclaration(node: FunctionDeclaration): VisitResult<Statement> {
if (hasModifier(node, ModifierFlags.Export)) {
hoistedStatements = append(hoistedStatements,
updateFunctionDeclaration(
node,
node.decorators,
visitNodes(node.modifiers, modifierVisitor, isModifier),
node.asteriskToken,
getDeclarationName(node, /*allowComments*/ true, /*allowSourceMaps*/ true),
/*typeParameters*/ undefined,
visitNodes(node.parameters, destructuringAndImportCallVisitor, isParameterDeclaration),
/*type*/ undefined,
visitNode(node.body, destructuringAndImportCallVisitor, isBlock)));
}
else {
hoistedStatements = append(hoistedStatements, visitEachChild(node, destructuringAndImportCallVisitor, context));
}
if (hasAssociatedEndOfDeclarationMarker(node)) {
// Defer exports until we encounter an EndOfDeclarationMarker node
const id = getOriginalNodeId(node);
deferredExports[id] = appendExportsOfHoistedDeclaration(deferredExports[id], node);
}
else {
hoistedStatements = appendExportsOfHoistedDeclaration(hoistedStatements, node);
}
return undefined;
}
/**
* Visits a ClassDeclaration, hoisting its name to the outer module body function.
*
* @param node The node to visit.
*/
function visitClassDeclaration(node: ClassDeclaration): VisitResult<Statement> {
let statements: Statement[];
// Hoist the name of the class declaration to the outer module body function.
const name = getLocalName(node);
hoistVariableDeclaration(name);
// Rewrite the class declaration into an assignment of a class expression.
statements = append(statements,
setTextRange(
createStatement(
createAssignment(
name,
setTextRange(
createClassExpression(
/*modifiers*/ undefined,
node.name,
/*typeParameters*/ undefined,
visitNodes(node.heritageClauses, destructuringAndImportCallVisitor, isHeritageClause),
visitNodes(node.members, destructuringAndImportCallVisitor, isClassElement)
),
node
)
)
),
node
)
);
if (hasAssociatedEndOfDeclarationMarker(node)) {
// Defer exports until we encounter an EndOfDeclarationMarker node
const id = getOriginalNodeId(node);
deferredExports[id] = appendExportsOfHoistedDeclaration(deferredExports[id], node);
}
else {
statements = appendExportsOfHoistedDeclaration(statements, node);
}
return singleOrMany(statements);
}
/**
* Visits a variable statement, hoisting declared names to the top-level module body.
* Each declaration is rewritten into an assignment expression.
*
* @param node The node to visit.
*/
function visitVariableStatement(node: VariableStatement): VisitResult<Statement> {
if (!shouldHoistVariableDeclarationList(node.declarationList)) {
return visitNode(node, destructuringAndImportCallVisitor, isStatement);
}
let expressions: Expression[];
const isExportedDeclaration = hasModifier(node, ModifierFlags.Export);
const isMarkedDeclaration = hasAssociatedEndOfDeclarationMarker(node);
for (const variable of node.declarationList.declarations) {
if (variable.initializer) {
expressions = append(expressions, transformInitializedVariable(variable, isExportedDeclaration && !isMarkedDeclaration));
}
else {
hoistBindingElement(variable);
}
}
let statements: Statement[];
if (expressions) {
statements = append(statements, setTextRange(createStatement(inlineExpressions(expressions)), node));
}
if (isMarkedDeclaration) {
// Defer exports until we encounter an EndOfDeclarationMarker node
const id = getOriginalNodeId(node);
deferredExports[id] = appendExportsOfVariableStatement(deferredExports[id], node, isExportedDeclaration);
}
else {
statements = appendExportsOfVariableStatement(statements, node, /*exportSelf*/ false);
}
return singleOrMany(statements);
}
/**
* Hoists the declared names of a VariableDeclaration or BindingElement.
*
* @param node The declaration to hoist.
*/
function hoistBindingElement(node: VariableDeclaration | BindingElement): void {
if (isBindingPattern(node.name)) {
for (const element of node.name.elements) {
if (!isOmittedExpression(element)) {
hoistBindingElement(element);
}
}
}
else {
hoistVariableDeclaration(getSynthesizedClone(node.name));
}
}
/**
* Determines whether a VariableDeclarationList should be hoisted.
*
* @param node The node to test.
*/
function shouldHoistVariableDeclarationList(node: VariableDeclarationList) {
// hoist only non-block scoped declarations or block scoped declarations parented by source file
return (getEmitFlags(node) & EmitFlags.NoHoisting) === 0
&& (enclosingBlockScopedContainer.kind === SyntaxKind.SourceFile
|| (getOriginalNode(node).flags & NodeFlags.BlockScoped) === 0);
}
/**
* Transform an initialized variable declaration into an expression.
*
* @param node The node to transform.
* @param isExportedDeclaration A value indicating whether the variable is exported.
*/
function transformInitializedVariable(node: VariableDeclaration, isExportedDeclaration: boolean): Expression {
const createAssignment = isExportedDeclaration ? createExportedVariableAssignment : createNonExportedVariableAssignment;
return isBindingPattern(node.name)
? flattenDestructuringAssignment(
node,
destructuringAndImportCallVisitor,
context,
FlattenLevel.All,
/*needsValue*/ false,
createAssignment
)
: createAssignment(node.name, visitNode(node.initializer, destructuringAndImportCallVisitor, isExpression));
}
/**
* Creates an assignment expression for an exported variable declaration.
*
* @param name The name of the variable.
* @param value The value of the variable's initializer.
* @param location The source map location for the assignment.
*/
function createExportedVariableAssignment(name: Identifier, value: Expression, location?: TextRange) {
return createVariableAssignment(name, value, location, /*isExportedDeclaration*/ true);
}
/**
* Creates an assignment expression for a non-exported variable declaration.
*
* @param name The name of the variable.
* @param value The value of the variable's initializer.
* @param location The source map location for the assignment.
*/
function createNonExportedVariableAssignment(name: Identifier, value: Expression, location?: TextRange) {
return createVariableAssignment(name, value, location, /*isExportedDeclaration*/ false);
}
/**
* Creates an assignment expression for a variable declaration.
*
* @param name The name of the variable.
* @param value The value of the variable's initializer.
* @param location The source map location for the assignment.
* @param isExportedDeclaration A value indicating whether the variable is exported.
*/
function createVariableAssignment(name: Identifier, value: Expression, location: TextRange, isExportedDeclaration: boolean) {
hoistVariableDeclaration(getSynthesizedClone(name));
return isExportedDeclaration
? createExportExpression(name, preventSubstitution(setTextRange(createAssignment(name, value), location)))
: preventSubstitution(setTextRange(createAssignment(name, value), location));
}
/**
* Visits a MergeDeclarationMarker used as a placeholder for the beginning of a merged
* and transformed declaration.
*
* @param node The node to visit.
*/
function visitMergeDeclarationMarker(node: MergeDeclarationMarker): VisitResult<Statement> {
// For an EnumDeclaration or ModuleDeclaration that merges with a preceeding
// declaration we do not emit a leading variable declaration. To preserve the
// begin/end semantics of the declararation and to properly handle exports
// we wrapped the leading variable declaration in a `MergeDeclarationMarker`.
//
// To balance the declaration, we defer the exports of the elided variable
// statement until we visit this declaration's `EndOfDeclarationMarker`.
if (hasAssociatedEndOfDeclarationMarker(node) && node.original.kind === SyntaxKind.VariableStatement) {
const id = getOriginalNodeId(node);
const isExportedDeclaration = hasModifier(node.original, ModifierFlags.Export);
deferredExports[id] = appendExportsOfVariableStatement(deferredExports[id], <VariableStatement>node.original, isExportedDeclaration);
}
return node;
}
/**
* Determines whether a node has an associated EndOfDeclarationMarker.
*
* @param node The node to test.
*/
function hasAssociatedEndOfDeclarationMarker(node: Node) {
return (getEmitFlags(node) & EmitFlags.HasEndOfDeclarationMarker) !== 0;
}
/**
* Visits a DeclarationMarker used as a placeholder for the end of a transformed
* declaration.
*
* @param node The node to visit.
*/
function visitEndOfDeclarationMarker(node: EndOfDeclarationMarker): VisitResult<Statement> {
// For some transformations we emit an `EndOfDeclarationMarker` to mark the actual
// end of the transformed declaration. We use this marker to emit any deferred exports
// of the declaration.
const id = getOriginalNodeId(node);
const statements = deferredExports[id];
if (statements) {
delete deferredExports[id];
return append(statements, node);
}
return node;
}
/**
* Appends the exports of an ImportDeclaration to a statement list, returning the
* statement list.
*
* @param statements A statement list to which the down-level export statements are to be
* appended. If `statements` is `undefined`, a new array is allocated if statements are
* appended.
* @param decl The declaration whose exports are to be recorded.
*/
function appendExportsOfImportDeclaration(statements: Statement[], decl: ImportDeclaration) {
if (moduleInfo.exportEquals) {
return statements;
}
const importClause = decl.importClause;
if (!importClause) {
return statements;
}
if (importClause.name) {
statements = appendExportsOfDeclaration(statements, importClause);
}
const namedBindings = importClause.namedBindings;
if (namedBindings) {
switch (namedBindings.kind) {
case SyntaxKind.NamespaceImport:
statements = appendExportsOfDeclaration(statements, namedBindings);
break;
case SyntaxKind.NamedImports:
for (const importBinding of namedBindings.elements) {
statements = appendExportsOfDeclaration(statements, importBinding);
}
break;
}
}
return statements;
}
/**
* Appends the export of an ImportEqualsDeclaration to a statement list, returning the
* statement list.
*
* @param statements A statement list to which the down-level export statements are to be
* appended. If `statements` is `undefined`, a new array is allocated if statements are
* appended.
* @param decl The declaration whose exports are to be recorded.
*/
function appendExportsOfImportEqualsDeclaration(statements: Statement[], decl: ImportEqualsDeclaration): Statement[] | undefined {
if (moduleInfo.exportEquals) {
return statements;
}
return appendExportsOfDeclaration(statements, decl);
}
/**
* Appends the exports of a VariableStatement to a statement list, returning the statement
* list.
*
* @param statements A statement list to which the down-level export statements are to be
* appended. If `statements` is `undefined`, a new array is allocated if statements are
* appended.
* @param node The VariableStatement whose exports are to be recorded.
* @param exportSelf A value indicating whether to also export each VariableDeclaration of
* `nodes` declaration list.
*/
function appendExportsOfVariableStatement(statements: Statement[] | undefined, node: VariableStatement, exportSelf: boolean): Statement[] | undefined {
if (moduleInfo.exportEquals) {
return statements;
}
for (const decl of node.declarationList.declarations) {
if (decl.initializer || exportSelf) {
statements = appendExportsOfBindingElement(statements, decl, exportSelf);
}
}
return statements;
}
/**
* Appends the exports of a VariableDeclaration or BindingElement to a statement list,
* returning the statement list.
*
* @param statements A statement list to which the down-level export statements are to be
* appended. If `statements` is `undefined`, a new array is allocated if statements are
* appended.
* @param decl The declaration whose exports are to be recorded.
* @param exportSelf A value indicating whether to also export the declaration itself.
*/
function appendExportsOfBindingElement(statements: Statement[] | undefined, decl: VariableDeclaration | BindingElement, exportSelf: boolean): Statement[] | undefined {
if (moduleInfo.exportEquals) {
return statements;
}
if (isBindingPattern(decl.name)) {
for (const element of decl.name.elements) {
if (!isOmittedExpression(element)) {
statements = appendExportsOfBindingElement(statements, element, exportSelf);
}
}
}
else if (!isGeneratedIdentifier(decl.name)) {
let excludeName: string;
if (exportSelf) {
statements = appendExportStatement(statements, decl.name, getLocalName(decl));
excludeName = unescapeLeadingUnderscores(decl.name.text);
}
statements = appendExportsOfDeclaration(statements, decl, excludeName);
}
return statements;
}
/**
* Appends the exports of a ClassDeclaration or FunctionDeclaration to a statement list,
* returning the statement list.
*
* @param statements A statement list to which the down-level export statements are to be
* appended. If `statements` is `undefined`, a new array is allocated if statements are
* appended.
* @param decl The declaration whose exports are to be recorded.
*/
function appendExportsOfHoistedDeclaration(statements: Statement[] | undefined, decl: ClassDeclaration | FunctionDeclaration): Statement[] | undefined {
if (moduleInfo.exportEquals) {
return statements;
}
let excludeName: string;
if (hasModifier(decl, ModifierFlags.Export)) {
const exportName = hasModifier(decl, ModifierFlags.Default) ? createLiteral("default") : decl.name;
statements = appendExportStatement(statements, exportName, getLocalName(decl));
excludeName = getTextOfIdentifierOrLiteral(exportName);
}
if (decl.name) {
statements = appendExportsOfDeclaration(statements, decl, excludeName);
}
return statements;
}
/**
* Appends the exports of a declaration to a statement list, returning the statement list.
*
* @param statements A statement list to which the down-level export statements are to be
* appended. If `statements` is `undefined`, a new array is allocated if statements are
* appended.
* @param decl The declaration to export.
* @param excludeName An optional name to exclude from exports.
*/
function appendExportsOfDeclaration(statements: Statement[] | undefined, decl: Declaration, excludeName?: string): Statement[] | undefined {
if (moduleInfo.exportEquals) {
return statements;
}
const name = getDeclarationName(decl);
const exportSpecifiers = moduleInfo.exportSpecifiers.get(unescapeLeadingUnderscores(name.text));
if (exportSpecifiers) {
for (const exportSpecifier of exportSpecifiers) {
if (exportSpecifier.name.text !== excludeName) {
statements = appendExportStatement(statements, exportSpecifier.name, name);
}
}
}
return statements;
}
/**
* Appends the down-level representation of an export to a statement list, returning the
* statement list.
*
* @param statements A statement list to which the down-level export statements are to be
* appended. If `statements` is `undefined`, a new array is allocated if statements are
* appended.
* @param exportName The name of the export.
* @param expression The expression to export.
* @param allowComments Whether to allow comments on the export.
*/
function appendExportStatement(statements: Statement[] | undefined, exportName: Identifier | StringLiteral, expression: Expression, allowComments?: boolean): Statement[] | undefined {
statements = append(statements, createExportStatement(exportName, expression, allowComments));
return statements;
}
/**
* Creates a call to the current file's export function to export a value.
*
* @param name The bound name of the export.
* @param value The exported value.
* @param allowComments An optional value indicating whether to emit comments for the statement.
*/
function createExportStatement(name: Identifier | StringLiteral, value: Expression, allowComments?: boolean) {
const statement = createStatement(createExportExpression(name, value));
startOnNewLine(statement);
if (!allowComments) {
setEmitFlags(statement, EmitFlags.NoComments);
}
return statement;
}
/**
* Creates a call to the current file's export function to export a value.
*
* @param name The bound name of the export.
* @param value The exported value.
*/
function createExportExpression(name: Identifier | StringLiteral, value: Expression) {
const exportName = isIdentifier(name) ? createLiteral(name) : name;
return createCall(exportFunction, /*typeArguments*/ undefined, [exportName, value]);
}
//
// Top-Level or Nested Source Element Visitors
//
/**
* Visit nested elements at the top-level of a module.
*
* @param node The node to visit.
*/
function nestedElementVisitor(node: Node): VisitResult<Node> {
switch (node.kind) {
case SyntaxKind.VariableStatement:
return visitVariableStatement(<VariableStatement>node);
case SyntaxKind.FunctionDeclaration:
return visitFunctionDeclaration(<FunctionDeclaration>node);
case SyntaxKind.ClassDeclaration:
return visitClassDeclaration(<ClassDeclaration>node);
case SyntaxKind.ForStatement:
return visitForStatement(<ForStatement>node);
case SyntaxKind.ForInStatement:
return visitForInStatement(<ForInStatement>node);
case SyntaxKind.ForOfStatement:
return visitForOfStatement(<ForOfStatement>node);
case SyntaxKind.DoStatement:
return visitDoStatement(<DoStatement>node);
case SyntaxKind.WhileStatement:
return visitWhileStatement(<WhileStatement>node);
case SyntaxKind.LabeledStatement:
return visitLabeledStatement(<LabeledStatement>node);
case SyntaxKind.WithStatement:
return visitWithStatement(<WithStatement>node);
case SyntaxKind.SwitchStatement:
return visitSwitchStatement(<SwitchStatement>node);
case SyntaxKind.CaseBlock:
return visitCaseBlock(<CaseBlock>node);
case SyntaxKind.CaseClause:
return visitCaseClause(<CaseClause>node);
case SyntaxKind.DefaultClause:
return visitDefaultClause(<DefaultClause>node);
case SyntaxKind.TryStatement:
return visitTryStatement(<TryStatement>node);
case SyntaxKind.CatchClause:
return visitCatchClause(<CatchClause>node);
case SyntaxKind.Block:
return visitBlock(<Block>node);
case SyntaxKind.MergeDeclarationMarker:
return visitMergeDeclarationMarker(<MergeDeclarationMarker>node);
case SyntaxKind.EndOfDeclarationMarker:
return visitEndOfDeclarationMarker(<EndOfDeclarationMarker>node);
default:
return destructuringAndImportCallVisitor(node);
}
}
/**
* Visits the body of a ForStatement to hoist declarations.
*
* @param node The node to visit.
*/
function visitForStatement(node: ForStatement): VisitResult<Statement> {
const savedEnclosingBlockScopedContainer = enclosingBlockScopedContainer;
enclosingBlockScopedContainer = node;
node = updateFor(
node,
visitForInitializer(node.initializer),
visitNode(node.condition, destructuringAndImportCallVisitor, isExpression),
visitNode(node.incrementor, destructuringAndImportCallVisitor, isExpression),
visitNode(node.statement, nestedElementVisitor, isStatement)
);
enclosingBlockScopedContainer = savedEnclosingBlockScopedContainer;
return node;
}
/**
* Visits the body of a ForInStatement to hoist declarations.
*
* @param node The node to visit.
*/
function visitForInStatement(node: ForInStatement): VisitResult<Statement> {
const savedEnclosingBlockScopedContainer = enclosingBlockScopedContainer;
enclosingBlockScopedContainer = node;
node = updateForIn(
node,
visitForInitializer(node.initializer),
visitNode(node.expression, destructuringAndImportCallVisitor, isExpression),
visitNode(node.statement, nestedElementVisitor, isStatement, liftToBlock)
);
enclosingBlockScopedContainer = savedEnclosingBlockScopedContainer;
return node;
}
/**
* Visits the body of a ForOfStatement to hoist declarations.
*
* @param node The node to visit.
*/
function visitForOfStatement(node: ForOfStatement): VisitResult<Statement> {
const savedEnclosingBlockScopedContainer = enclosingBlockScopedContainer;
enclosingBlockScopedContainer = node;
node = updateForOf(
node,
node.awaitModifier,
visitForInitializer(node.initializer),
visitNode(node.expression, destructuringAndImportCallVisitor, isExpression),
visitNode(node.statement, nestedElementVisitor, isStatement, liftToBlock)
);
enclosingBlockScopedContainer = savedEnclosingBlockScopedContainer;
return node;
}
/**
* Determines whether to hoist the initializer of a ForStatement, ForInStatement, or
* ForOfStatement.
*
* @param node The node to test.
*/
function shouldHoistForInitializer(node: ForInitializer): node is VariableDeclarationList {
return isVariableDeclarationList(node)
&& shouldHoistVariableDeclarationList(node);
}
/**
* Visits the initializer of a ForStatement, ForInStatement, or ForOfStatement
*
* @param node The node to visit.
*/
function visitForInitializer(node: ForInitializer): ForInitializer {
if (!node) {
return node;
}
if (shouldHoistForInitializer(node)) {
let expressions: Expression[];
for (const variable of node.declarations) {
expressions = append(expressions, transformInitializedVariable(variable, /*isExportedDeclaration*/ false));
}
return expressions ? inlineExpressions(expressions) : createOmittedExpression();
}
else {
return visitEachChild(node, nestedElementVisitor, context);
}
}
/**
* Visits the body of a DoStatement to hoist declarations.
*
* @param node The node to visit.
*/
function visitDoStatement(node: DoStatement): VisitResult<Statement> {
return updateDo(
node,
visitNode(node.statement, nestedElementVisitor, isStatement, liftToBlock),
visitNode(node.expression, destructuringAndImportCallVisitor, isExpression)
);
}
/**
* Visits the body of a WhileStatement to hoist declarations.
*
* @param node The node to visit.
*/
function visitWhileStatement(node: WhileStatement): VisitResult<Statement> {
return updateWhile(
node,
visitNode(node.expression, destructuringAndImportCallVisitor, isExpression),
visitNode(node.statement, nestedElementVisitor, isStatement, liftToBlock)
);
}
/**
* Visits the body of a LabeledStatement to hoist declarations.
*
* @param node The node to visit.
*/
function visitLabeledStatement(node: LabeledStatement): VisitResult<Statement> {
return updateLabel(
node,
node.label,
visitNode(node.statement, nestedElementVisitor, isStatement, liftToBlock)
);
}
/**
* Visits the body of a WithStatement to hoist declarations.
*
* @param node The node to visit.
*/
function visitWithStatement(node: WithStatement): VisitResult<Statement> {
return updateWith(
node,
visitNode(node.expression, destructuringAndImportCallVisitor, isExpression),
visitNode(node.statement, nestedElementVisitor, isStatement, liftToBlock)
);
}
/**
* Visits the body of a SwitchStatement to hoist declarations.
*
* @param node The node to visit.
*/
function visitSwitchStatement(node: SwitchStatement): VisitResult<Statement> {
return updateSwitch(
node,
visitNode(node.expression, destructuringAndImportCallVisitor, isExpression),
visitNode(node.caseBlock, nestedElementVisitor, isCaseBlock)
);
}
/**
* Visits the body of a CaseBlock to hoist declarations.
*
* @param node The node to visit.
*/
function visitCaseBlock(node: CaseBlock): CaseBlock {
const savedEnclosingBlockScopedContainer = enclosingBlockScopedContainer;
enclosingBlockScopedContainer = node;
node = updateCaseBlock(
node,
visitNodes(node.clauses, nestedElementVisitor, isCaseOrDefaultClause)
);
enclosingBlockScopedContainer = savedEnclosingBlockScopedContainer;
return node;
}
/**
* Visits the body of a CaseClause to hoist declarations.
*
* @param node The node to visit.
*/
function visitCaseClause(node: CaseClause): VisitResult<CaseOrDefaultClause> {
return updateCaseClause(
node,
visitNode(node.expression, destructuringAndImportCallVisitor, isExpression),
visitNodes(node.statements, nestedElementVisitor, isStatement)
);
}
/**
* Visits the body of a DefaultClause to hoist declarations.
*
* @param node The node to visit.
*/
function visitDefaultClause(node: DefaultClause): VisitResult<CaseOrDefaultClause> {
return visitEachChild(node, nestedElementVisitor, context);
}
/**
* Visits the body of a TryStatement to hoist declarations.
*
* @param node The node to visit.
*/
function visitTryStatement(node: TryStatement): VisitResult<Statement> {
return visitEachChild(node, nestedElementVisitor, context);
}
/**
* Visits the body of a CatchClause to hoist declarations.
*
* @param node The node to visit.
*/
function visitCatchClause(node: CatchClause): CatchClause {
const savedEnclosingBlockScopedContainer = enclosingBlockScopedContainer;
enclosingBlockScopedContainer = node;
node = updateCatchClause(
node,
node.variableDeclaration,
visitNode(node.block, nestedElementVisitor, isBlock)
);
enclosingBlockScopedContainer = savedEnclosingBlockScopedContainer;
return node;
}
/**
* Visits the body of a Block to hoist declarations.
*
* @param node The node to visit.
*/
function visitBlock(node: Block): Block {
const savedEnclosingBlockScopedContainer = enclosingBlockScopedContainer;
enclosingBlockScopedContainer = node;
node = visitEachChild(node, nestedElementVisitor, context);
enclosingBlockScopedContainer = savedEnclosingBlockScopedContainer;
return node;
}
//
// Destructuring Assignment Visitors
//
/**
* Visit nodes to flatten destructuring assignments to exported symbols.
*
* @param node The node to visit.
*/
function destructuringAndImportCallVisitor(node: Node): VisitResult<Node> {
if (node.transformFlags & TransformFlags.DestructuringAssignment
&& node.kind === SyntaxKind.BinaryExpression) {
return visitDestructuringAssignment(<DestructuringAssignment>node);
}
else if (isImportCall(node)) {
return visitImportCallExpression(node);
}
else if ((node.transformFlags & TransformFlags.ContainsDestructuringAssignment) || (node.transformFlags & TransformFlags.ContainsDynamicImport)) {
return visitEachChild(node, destructuringAndImportCallVisitor, context);
}
else {
return node;
}
}
function visitImportCallExpression(node: ImportCall): Expression {
// import("./blah")
// emit as
// System.register([], function (_export, _context) {
// return {
// setters: [],
// execute: () => {
// _context.import('./blah');
// }
// };
// });
return createCall(
createPropertyAccess(
contextObject,
createIdentifier("import")
),
/*typeArguments*/ undefined,
node.arguments
);
}
/**
* Visits a DestructuringAssignment to flatten destructuring to exported symbols.
*
* @param node The node to visit.
*/
function visitDestructuringAssignment(node: DestructuringAssignment): VisitResult<Expression> {
if (hasExportedReferenceInDestructuringTarget(node.left)) {
return flattenDestructuringAssignment(
node,
destructuringAndImportCallVisitor,
context,
FlattenLevel.All,
/*needsValue*/ true
);
}
return visitEachChild(node, destructuringAndImportCallVisitor, context);
}
/**
* Determines whether the target of a destructuring assigment refers to an exported symbol.
*
* @param node The destructuring target.
*/
function hasExportedReferenceInDestructuringTarget(node: Expression | ObjectLiteralElementLike): boolean {
if (isAssignmentExpression(node, /*excludeCompoundAssignment*/ true)) {
return hasExportedReferenceInDestructuringTarget(node.left);
}
else if (isSpreadElement(node)) {
return hasExportedReferenceInDestructuringTarget(node.expression);
}
else if (isObjectLiteralExpression(node)) {
return some(node.properties, hasExportedReferenceInDestructuringTarget);
}
else if (isArrayLiteralExpression(node)) {
return some(node.elements, hasExportedReferenceInDestructuringTarget);
}
else if (isShorthandPropertyAssignment(node)) {
return hasExportedReferenceInDestructuringTarget(node.name);
}
else if (isPropertyAssignment(node)) {
return hasExportedReferenceInDestructuringTarget(node.initializer);
}
else if (isIdentifier(node)) {
const container = resolver.getReferencedExportContainer(node);
return container !== undefined && container.kind === SyntaxKind.SourceFile;
}
else {
return false;
}
}
//
// Modifier Visitors
//
/**
* Visit nodes to elide module-specific modifiers.
*
* @param node The node to visit.
*/
function modifierVisitor(node: Node): VisitResult<Node> {
switch (node.kind) {
case SyntaxKind.ExportKeyword:
case SyntaxKind.DefaultKeyword:
return undefined;
}
return node;
}
//
// Emit Notification
//
/**
* Hook for node emit notifications.
*
* @param hint A hint as to the intended usage of the node.
* @param node The node to emit.
* @param emitCallback A callback used to emit the node in the printer.
*/
function onEmitNode(hint: EmitHint, node: Node, emitCallback: (hint: EmitHint, node: Node) => void): void {
if (node.kind === SyntaxKind.SourceFile) {
const id = getOriginalNodeId(node);
currentSourceFile = <SourceFile>node;
moduleInfo = moduleInfoMap[id];
exportFunction = exportFunctionsMap[id];
noSubstitution = noSubstitutionMap[id];
if (noSubstitution) {
delete noSubstitutionMap[id];
}
previousOnEmitNode(hint, node, emitCallback);
currentSourceFile = undefined;
moduleInfo = undefined;
exportFunction = undefined;
noSubstitution = undefined;
}
else {
previousOnEmitNode(hint, node, emitCallback);
}
}
//
// Substitutions
//
/**
* Hooks node substitutions.
*
* @param hint A hint as to the intended usage of the node.
* @param node The node to substitute.
*/
function onSubstituteNode(hint: EmitHint, node: Node) {
node = previousOnSubstituteNode(hint, node);
if (isSubstitutionPrevented(node)) {
return node;
}
if (hint === EmitHint.Expression) {
return substituteExpression(<Expression>node);
}
return node;
}
/**
* Substitute the expression, if necessary.
*
* @param node The node to substitute.
*/
function substituteExpression(node: Expression) {
switch (node.kind) {
case SyntaxKind.Identifier:
return substituteExpressionIdentifier(<Identifier>node);
case SyntaxKind.BinaryExpression:
return substituteBinaryExpression(<BinaryExpression>node);
case SyntaxKind.PrefixUnaryExpression:
case SyntaxKind.PostfixUnaryExpression:
return substituteUnaryExpression(<PrefixUnaryExpression | PostfixUnaryExpression>node);
}
return node;
}
/**
* Substitution for an Identifier expression that may contain an imported or exported symbol.
*
* @param node The node to substitute.
*/
function substituteExpressionIdentifier(node: Identifier): Expression {
if (getEmitFlags(node) & EmitFlags.HelperName) {
const externalHelpersModuleName = getExternalHelpersModuleName(currentSourceFile);
if (externalHelpersModuleName) {
return createPropertyAccess(externalHelpersModuleName, node);
}
return node;
}
// When we see an identifier in an expression position that
// points to an imported symbol, we should substitute a qualified
// reference to the imported symbol if one is needed.
//
// - We do not substitute generated identifiers for any reason.
// - We do not substitute identifiers tagged with the LocalName flag.
if (!isGeneratedIdentifier(node) && !isLocalName(node)) {
const importDeclaration = resolver.getReferencedImportDeclaration(node);
if (importDeclaration) {
if (isImportClause(importDeclaration)) {
return setTextRange(
createPropertyAccess(
getGeneratedNameForNode(importDeclaration.parent),
createIdentifier("default")
),
/*location*/ node
);
}
else if (isImportSpecifier(importDeclaration)) {
return setTextRange(
createPropertyAccess(
getGeneratedNameForNode(importDeclaration.parent.parent.parent),
getSynthesizedClone(importDeclaration.propertyName || importDeclaration.name)
),
/*location*/ node
);
}
}
}
return node;
}
/**
* Substitution for a BinaryExpression that may contain an imported or exported symbol.
*
* @param node The node to substitute.
*/
function substituteBinaryExpression(node: BinaryExpression): Expression {
// When we see an assignment expression whose left-hand side is an exported symbol,
// we should ensure all exports of that symbol are updated with the correct value.
//
// - We do not substitute generated identifiers for any reason.
// - We do not substitute identifiers tagged with the LocalName flag.
// - We do not substitute identifiers that were originally the name of an enum or
// namespace due to how they are transformed in TypeScript.
// - We only substitute identifiers that are exported at the top level.
if (isAssignmentOperator(node.operatorToken.kind)
&& isIdentifier(node.left)
&& !isGeneratedIdentifier(node.left)
&& !isLocalName(node.left)
&& !isDeclarationNameOfEnumOrNamespace(node.left)) {
const exportedNames = getExports(node.left);
if (exportedNames) {
// For each additional export of the declaration, apply an export assignment.
let expression: Expression = node;
for (const exportName of exportedNames) {
expression = createExportExpression(exportName, preventSubstitution(expression));
}
return expression;
}
}
return node;
}
/**
* Substitution for a UnaryExpression that may contain an imported or exported symbol.
*
* @param node The node to substitute.
*/
function substituteUnaryExpression(node: PrefixUnaryExpression | PostfixUnaryExpression): Expression {
// When we see a prefix or postfix increment expression whose operand is an exported
// symbol, we should ensure all exports of that symbol are updated with the correct
// value.
//
// - We do not substitute generated identifiers for any reason.
// - We do not substitute identifiers tagged with the LocalName flag.
// - We do not substitute identifiers that were originally the name of an enum or
// namespace due to how they are transformed in TypeScript.
// - We only substitute identifiers that are exported at the top level.
if ((node.operator === SyntaxKind.PlusPlusToken || node.operator === SyntaxKind.MinusMinusToken)
&& isIdentifier(node.operand)
&& !isGeneratedIdentifier(node.operand)
&& !isLocalName(node.operand)
&& !isDeclarationNameOfEnumOrNamespace(node.operand)) {
const exportedNames = getExports(node.operand);
if (exportedNames) {
let expression: Expression = node.kind === SyntaxKind.PostfixUnaryExpression
? setTextRange(
createPrefix(
node.operator,
node.operand
),
node
)
: node;
for (const exportName of exportedNames) {
expression = createExportExpression(exportName, preventSubstitution(expression));
}
if (node.kind === SyntaxKind.PostfixUnaryExpression) {
expression = node.operator === SyntaxKind.PlusPlusToken
? createSubtract(preventSubstitution(expression), createLiteral(1))
: createAdd(preventSubstitution(expression), createLiteral(1));
}
return expression;
}
}
return node;
}
/**
* Gets the exports of a name.
*
* @param name The name.
*/
function getExports(name: Identifier) {
let exportedNames: Identifier[];
if (!isGeneratedIdentifier(name)) {
const valueDeclaration = resolver.getReferencedImportDeclaration(name)
|| resolver.getReferencedValueDeclaration(name);
if (valueDeclaration) {
const exportContainer = resolver.getReferencedExportContainer(name, /*prefixLocals*/ false);
if (exportContainer && exportContainer.kind === SyntaxKind.SourceFile) {
exportedNames = append(exportedNames, getDeclarationName(valueDeclaration));
}
exportedNames = addRange(exportedNames, moduleInfo && moduleInfo.exportedBindings[getOriginalNodeId(valueDeclaration)]);
}
}
return exportedNames;
}
/**
* Prevent substitution of a node for this transformer.
*
* @param node The node which should not be substituted.
*/
function preventSubstitution<T extends Node>(node: T): T {
if (noSubstitution === undefined) noSubstitution = [];
noSubstitution[getNodeId(node)] = true;
return node;
}
/**
* Determines whether a node should not be substituted.
*
* @param node The node to test.
*/
function isSubstitutionPrevented(node: Node) {
return noSubstitution && node.id && noSubstitution[node.id];
}
}
}
|
apache-2.0
|
Buble1981/MyDroolsFork
|
drools-compiler/src/test/java/org/drools/compiler/phreak/RemoveRuleTest.java
|
29580
|
package org.drools.compiler.phreak;
import org.drools.core.FactHandle;import org.drools.core.base.ClassObjectType;
import org.drools.core.common.InternalFactHandle;import org.drools.core.common.InternalRuleBase;
import org.drools.core.common.NodeMemories;
import org.drools.core.impl.KnowledgeBaseImpl;
import org.drools.core.impl.StatefulKnowledgeSessionImpl;
import org.drools.core.reteoo.BetaMemory;
import org.drools.core.reteoo.EvalConditionNode;
import org.drools.core.reteoo.JoinNode;
import org.drools.core.reteoo.LeftInputAdapterNode;
import org.drools.core.reteoo.LeftInputAdapterNode.LiaNodeMemory;
import org.drools.core.reteoo.ObjectTypeNode;
import org.drools.core.reteoo.PathMemory;
import org.drools.core.reteoo.ReteooRuleBase;
import org.drools.core.reteoo.ReteooWorkingMemoryInterface;
import org.drools.core.reteoo.RuleTerminalNode;
import org.drools.core.reteoo.SegmentMemory;
import org.junit.Test;
import org.kie.api.KieBaseConfiguration;
import org.kie.api.io.ResourceType;
import org.kie.api.runtime.rule.Match;
import org.kie.internal.KnowledgeBase;
import org.kie.internal.KnowledgeBaseFactory;
import org.kie.internal.builder.KnowledgeBuilder;
import org.kie.internal.builder.KnowledgeBuilderFactory;
import org.kie.internal.builder.conf.RuleEngineOption;
import org.kie.internal.definition.KnowledgePackage;
import org.kie.internal.io.ResourceFactory;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import static junit.framework.Assert.assertNotNull;
import static junit.framework.TestCase.assertNotSame;
import static junit.framework.TestCase.assertSame;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNull;
public class RemoveRuleTest {
@Test
public void testPopulatedSingleRuleNoSharing() throws Exception {
KieBaseConfiguration kconf = ( KieBaseConfiguration ) KnowledgeBaseFactory.newKnowledgeBaseConfiguration();
kconf.setOption( RuleEngineOption.PHREAK );
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase(kconf);
ReteooWorkingMemoryInterface wm = ((StatefulKnowledgeSessionImpl)kbase.newStatefulKnowledgeSession()).session;
wm.insert(new A(1));
wm.insert(new B(1));
wm.insert(new C(1));
wm.insert(new C(2));
wm.insert(new D(1));
wm.insert(new E(1));
wm.fireAllRules();
kbase.addKnowledgePackages( buildKnowledgePackage("r1", " A() B() C(object == 2) D() E()\n") );
List list = new ArrayList();
wm.setGlobal("list", list);
ObjectTypeNode aotn = getObjectTypeNode(kbase, A.class );
LeftInputAdapterNode liaNode = (LeftInputAdapterNode) aotn.getSinkPropagator().getSinks()[0];
LiaNodeMemory lm = ( LiaNodeMemory ) wm.getNodeMemory(liaNode);
SegmentMemory sm = lm.getSegmentMemory();
assertEquals(1, sm.getStagedLeftTuples().insertSize());
wm.fireAllRules();
BetaMemory bMem = ( BetaMemory ) sm.getNodeMemories().get(1);
assertEquals( 1, bMem.getLeftTupleMemory().size() );
assertEquals( 1, bMem.getRightTupleMemory().size() );
BetaMemory eMem = ( BetaMemory ) sm.getNodeMemories().get(4);
assertEquals( 1, eMem.getLeftTupleMemory().size() );
assertEquals( 1, eMem.getRightTupleMemory().size() );
NodeMemories nms = wm.getNodeMemories();
assertEquals( 13, countNodeMemories(nms));
assertEquals(0, sm.getStagedLeftTuples().insertSize());
assertEquals(1, list.size() );
assertEquals( "r1", ((Match)list.get(0)).getRule().getName() );
kbase.removeRule("org.kie", "r1");
assertEquals( 6, countNodeMemories(nms)); // still has OTN
assertEquals( 0, bMem.getLeftTupleMemory().size() );
assertEquals( 0, bMem.getRightTupleMemory().size() );
assertEquals( 0, eMem.getLeftTupleMemory().size() );
assertEquals( 0, eMem.getRightTupleMemory().size() );
}
@Test
public void testPopulatedSingleRuleNoSharingWithSubnetworkAtStart() throws Exception {
KieBaseConfiguration kconf = ( KieBaseConfiguration ) KnowledgeBaseFactory.newKnowledgeBaseConfiguration();
kconf.setOption( RuleEngineOption.PHREAK );
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase(kconf);
ReteooWorkingMemoryInterface wm = ((StatefulKnowledgeSessionImpl)kbase.newStatefulKnowledgeSession()).session;
wm.insert(new A(1));
wm.insert(new A(2));
wm.insert(new D(1));
wm.insert(new E(1));
wm.insert(new C(2));
wm.fireAllRules();
kbase.addKnowledgePackages( buildKnowledgePackage("r1", " A() not( B() and C() ) D() E()\n") );
List list = new ArrayList();
wm.setGlobal("list", list);
ObjectTypeNode aotn = getObjectTypeNode(kbase, A.class );
LeftInputAdapterNode liaNode = (LeftInputAdapterNode) aotn.getSinkPropagator().getSinks()[0];
LiaNodeMemory lm = ( LiaNodeMemory ) wm.getNodeMemory(liaNode);
SegmentMemory sm = lm.getSegmentMemory();
SegmentMemory subSm = sm.getFirst();
SegmentMemory mainSm = subSm.getNext();
wm.fireAllRules();
assertEquals(2, list.size() );
assertEquals( "r1", ((Match)list.get(0)).getRule().getName() );
assertEquals( "r1", ((Match)list.get(1)).getRule().getName() );
kbase.removeRule("org.kie", "r1");
wm.insert(new A(1));
wm.fireAllRules();
assertEquals(2, list.size() );
}
private int countNodeMemories(NodeMemories nms) {
int count = 0;
for ( int i = 0; i < nms.length(); i++ ) {
if ( nms.peekNodeMemory(i) != null ) {
System.out.println(nms.peekNodeMemory(i) );
count++;
}
}
return count;
}
@Test
public void testPopulatedRuleMidwayShare() throws Exception {
KnowledgeBase kbase1 = buildKnowledgeBase("r1", " A() B() C(1;) D() E()\n");
ReteooWorkingMemoryInterface wm = ((StatefulKnowledgeSessionImpl)kbase1.newStatefulKnowledgeSession()).session;
List list = new ArrayList();
wm.setGlobal("list", list);
wm.insert(new A(1));
wm.insert(new A(2));
wm.insert(new A(3));
wm.insert(new B(1));
wm.insert(new C(1));
wm.insert(new C(2));
wm.insert(new D(1));
wm.insert(new E(1));
wm.fireAllRules();
assertEquals( 13, countNodeMemories(wm.getNodeMemories()));
kbase1.addKnowledgePackages( buildKnowledgePackage("r2", " a : A() B() C(2;) D() E()\n") );
wm.fireAllRules();
ObjectTypeNode aotn = getObjectTypeNode(kbase1, A.class );
LeftInputAdapterNode liaNode = (LeftInputAdapterNode) aotn.getSinkPropagator().getSinks()[0];
JoinNode bNode = (JoinNode) liaNode.getSinkPropagator().getFirstLeftTupleSink();
JoinNode c1Node = (JoinNode) bNode.getSinkPropagator().getFirstLeftTupleSink();
JoinNode c2Node = (JoinNode) bNode.getSinkPropagator().getLastLeftTupleSink();
LiaNodeMemory lm = ( LiaNodeMemory ) wm.getNodeMemory(liaNode);
SegmentMemory sm = lm.getSegmentMemory();
BetaMemory c1Mem = ( BetaMemory ) wm.getNodeMemory(c1Node);
assertSame( sm.getFirst(), c1Mem.getSegmentMemory());
assertEquals( 3, c1Mem.getLeftTupleMemory().size() );
assertEquals( 1, c1Mem.getRightTupleMemory().size() );
BetaMemory c2Mem = ( BetaMemory ) wm.getNodeMemory(c2Node);
SegmentMemory c2Smem = sm.getFirst().getNext();
assertSame( c2Smem, c2Mem.getSegmentMemory());
assertEquals( 3, c2Mem.getLeftTupleMemory().size() );
assertEquals( 1, c2Mem.getRightTupleMemory().size() );
assertEquals(6, list.size() );
kbase1.removeRule("org.kie", "r2");
assertEquals( 13, countNodeMemories(wm.getNodeMemories()));
assertNull( sm.getFirst());
assertEquals( 0, c2Mem.getLeftTupleMemory().size() );
assertEquals( 0, c2Mem.getRightTupleMemory().size() );
assertSame( sm, c1Mem.getSegmentMemory()); // c1SMem repoints back to original Smem
wm.insert(new A(1));
wm.fireAllRules();
assertEquals( "r1", ((Match)list.get(6)).getRule().getName() );
assertEquals(7, list.size() ); // only one more added, as second rule as removed
}
@Test
public void testPopulatedRuleWithEvals() throws Exception {
KnowledgeBase kbase1 = buildKnowledgeBase("r1", " a:A() B() eval(1==1) eval(1==1) C(1;) \n");
ReteooWorkingMemoryInterface wm = ((StatefulKnowledgeSessionImpl)kbase1.newStatefulKnowledgeSession()).session;
List list = new ArrayList();
wm.setGlobal("list", list);
wm.insert(new A(1));
wm.insert(new A(2));
wm.insert(new A(3));
wm.insert(new B(1));
wm.insert(new C(1));
wm.insert(new C(2));
wm.insert(new D(1));
wm.insert(new E(1));
wm.fireAllRules();
assertEquals( 13, countNodeMemories(wm.getNodeMemories()));
kbase1.addKnowledgePackages( buildKnowledgePackage("r2", " a:A() B() eval(1==1) eval(1==1) C(2;) \n") );
wm.fireAllRules();
ObjectTypeNode aotn = getObjectTypeNode(kbase1, A.class );
LeftInputAdapterNode liaNode = (LeftInputAdapterNode) aotn.getSinkPropagator().getSinks()[0];
JoinNode bNode = (JoinNode) liaNode.getSinkPropagator().getFirstLeftTupleSink();
EvalConditionNode e1 = (EvalConditionNode) bNode.getSinkPropagator().getFirstLeftTupleSink();
EvalConditionNode e2 = (EvalConditionNode) e1.getSinkPropagator().getFirstLeftTupleSink();
JoinNode c1Node = (JoinNode) e2.getSinkPropagator().getFirstLeftTupleSink();
JoinNode c2Node = (JoinNode) e2.getSinkPropagator().getLastLeftTupleSink();
LiaNodeMemory lm = ( LiaNodeMemory ) wm.getNodeMemory(liaNode);
SegmentMemory sm = lm.getSegmentMemory();
BetaMemory c1Mem = ( BetaMemory ) wm.getNodeMemory(c1Node);
assertSame( sm.getFirst(), c1Mem.getSegmentMemory());
assertEquals( 3, c1Mem.getLeftTupleMemory().size() );
assertEquals( 1, c1Mem.getRightTupleMemory().size() );
BetaMemory c2Mem = ( BetaMemory ) wm.getNodeMemory(c2Node);
SegmentMemory c2Smem = sm.getFirst().getNext();
assertSame( c2Smem, c2Mem.getSegmentMemory());
assertEquals( 3, c2Mem.getLeftTupleMemory().size() );
assertEquals( 1, c2Mem.getRightTupleMemory().size() );
assertEquals(6, list.size() );
kbase1.removeRule("org.kie", "r2");
assertEquals( 13, countNodeMemories(wm.getNodeMemories()));
assertNull( sm.getFirst());
assertEquals( 0, c2Mem.getLeftTupleMemory().size() );
assertEquals( 0, c2Mem.getRightTupleMemory().size() );
assertSame( sm, c1Mem.getSegmentMemory()); // c1SMem repoints back to original Smem
wm.insert(new A(1));
wm.fireAllRules();
assertEquals( "r1", ((Match)list.get(6)).getRule().getName() );
assertEquals(7, list.size() ); // only one more added, as second rule as removed
}
@Test
public void testPopulatedSharedLiaNode() throws Exception {
KnowledgeBase kbase1 = buildKnowledgeBase("r1", " A() B(1;) C() D() E()\n");
ReteooWorkingMemoryInterface wm = ((StatefulKnowledgeSessionImpl)kbase1.newStatefulKnowledgeSession()).session;
List list = new ArrayList();
wm.setGlobal("list", list);
wm.insert(new A(1));
wm.insert(new A(2));
wm.insert(new A(3));
wm.insert(new B(1));
wm.insert(new B(2));
wm.insert(new C(1));
wm.insert(new D(1));
wm.insert(new E(1));
wm.fireAllRules();
assertEquals( 3, list.size() );
assertEquals( 13, countNodeMemories(wm.getNodeMemories()));
kbase1.addKnowledgePackages( buildKnowledgePackage("r2", " a : A() B(2;) C() D() E()\n") );
wm.fireAllRules();
assertEquals( 19, countNodeMemories(wm.getNodeMemories()));
ObjectTypeNode aotn = getObjectTypeNode(kbase1, A.class );
LeftInputAdapterNode liaNode = (LeftInputAdapterNode) aotn.getSinkPropagator().getSinks()[0];
JoinNode b1Node = (JoinNode) liaNode.getSinkPropagator().getFirstLeftTupleSink();
JoinNode b2Node = (JoinNode) liaNode.getSinkPropagator().getLastLeftTupleSink();
JoinNode c1Node = (JoinNode) b1Node.getSinkPropagator().getLastLeftTupleSink();
LiaNodeMemory lm = ( LiaNodeMemory ) wm.getNodeMemory(liaNode);
SegmentMemory sm = lm.getSegmentMemory();
BetaMemory b1Mem = ( BetaMemory ) wm.getNodeMemory(b1Node);
assertSame( sm.getFirst(), b1Mem.getSegmentMemory());
assertEquals( 3, b1Mem.getLeftTupleMemory().size() );
assertEquals( 1, b1Mem.getRightTupleMemory().size() );
BetaMemory b2Mem = ( BetaMemory ) wm.getNodeMemory(b2Node);
SegmentMemory b2Smem = sm.getFirst().getNext();
assertSame( b2Smem, b2Mem.getSegmentMemory());
assertEquals( 3, b2Mem.getLeftTupleMemory().size() );
assertEquals( 1, b2Mem.getRightTupleMemory().size() );
assertEquals(6, list.size() );
BetaMemory c1Mem = ( BetaMemory ) wm.getNodeMemory(c1Node);
assertSame( b1Mem.getSegmentMemory(), c1Mem.getSegmentMemory() );
assertNotSame(b1Mem.getSegmentMemory(), b2Mem.getSegmentMemory());
wm.fireAllRules();
assertEquals(6, list.size() );
assertEquals( 19, countNodeMemories(wm.getNodeMemories()));
kbase1.removeRule("org.kie", "r2");
assertEquals( 13, countNodeMemories(wm.getNodeMemories()));
assertSame( sm, b1Mem.getSegmentMemory());
assertSame( sm, c1Mem.getSegmentMemory());
assertNull(sm.getFirst());
assertEquals( 3, b1Mem.getLeftTupleMemory().size() );
assertEquals( 1, b1Mem.getRightTupleMemory().size() );
//SegmentMemory b2Smem = sm.getFirst().remove();
assertSame( b2Smem, b2Mem.getSegmentMemory());
assertEquals( 0, b2Mem.getLeftTupleMemory().size() );
assertEquals( 0, b2Mem.getRightTupleMemory().size() );
wm.insert(new A(1));
wm.fireAllRules();
assertEquals( "r1", ((Match)list.get(6)).getRule().getName() );
assertEquals(7, list.size() ); // only one more added, as second rule as removed
}
@Test
public void testPopulatedSharedLiaNodeNoBeta() throws Exception {
KnowledgeBase kbase1 = buildKnowledgeBase("r1", " A()\n");
ReteooWorkingMemoryInterface wm = ((StatefulKnowledgeSessionImpl)kbase1.newStatefulKnowledgeSession()).session;
List list = new ArrayList();
wm.setGlobal("list", list);
A a1 = new A(1);
InternalFactHandle fh1 = (InternalFactHandle) wm.insert(a1);
A a2 = new A(2);
InternalFactHandle fh2 = (InternalFactHandle) wm.insert(a2);
wm.fireAllRules();
assertEquals( 2, list.size() );
kbase1.addKnowledgePackages( buildKnowledgePackage("r2", " a : A()\n") );
wm.fireAllRules();
assertEquals( 4, list.size() );
kbase1.removeRule("org.kie", "r1");
kbase1.removeRule("org.kie", "r2");
list.clear();
assertNull( fh1.getFirstLeftTuple() );
assertNull( fh1.getLastLeftTuple() );
assertNull( fh2.getFirstLeftTuple() );
assertNull( fh2.getLastLeftTuple() );
wm.update( fh1,a1 );
wm.update( fh2,a2 );
wm.fireAllRules();
assertEquals( 0, list.size() );
}
@Test
public void testPopulatedSharedToRtn() throws Exception {
KnowledgeBase kbase1 = buildKnowledgeBase("r1", " A() B() C() D() E()\n");
ReteooWorkingMemoryInterface wm = ((StatefulKnowledgeSessionImpl)kbase1.newStatefulKnowledgeSession()).session;
List list = new ArrayList();
wm.setGlobal("list", list);
wm.insert(new A(1));
wm.insert(new A(2));
wm.insert(new B(1));
wm.insert(new C(1));
wm.insert(new D(1));
wm.insert(new E(1));
wm.fireAllRules();
assertEquals( 2, list.size() );
assertEquals( 12, countNodeMemories(wm.getNodeMemories()));
kbase1.addKnowledgePackages( buildKnowledgePackage("r2", " A() B() C() D() E()\n") );
wm.fireAllRules();
assertEquals( 13, countNodeMemories(wm.getNodeMemories()));
assertEquals(4, list.size() );
RuleTerminalNode rtn1 = getRtn("r1", kbase1);
RuleTerminalNode rtn2 = getRtn("r2", kbase1);
PathMemory pmem1 = ( PathMemory ) wm.getNodeMemory(rtn1);
PathMemory pmem2 = ( PathMemory ) wm.getNodeMemory(rtn2);
SegmentMemory[] smems1 = pmem1.getSegmentMemories();
SegmentMemory[] smems2 = pmem2.getSegmentMemories();
assertEquals(2, smems1.length );
assertEquals(2, smems2.length );
assertSame( smems1[0], smems2[0] );
assertNotSame( smems1[1], smems2[1] );
SegmentMemory sm = smems1[0];
assertEquals( smems1[1], sm.getFirst() );
JoinNode eNode1 = ( JoinNode ) rtn1.getLeftTupleSource();
JoinNode eNode2 = ( JoinNode ) rtn2.getLeftTupleSource();
assertSame( eNode1, eNode2 );
pmem1 = ( PathMemory ) wm.getNodeMemory(rtn1);
kbase1.removeRule("org.kie", "r2");
System.out.println( "---" );
assertEquals( 12, countNodeMemories(wm.getNodeMemories()));
assertNull( sm.getFirst() );
pmem1 = ( PathMemory ) wm.getNodeMemory(rtn1);
smems1 = pmem1.getSegmentMemories();
assertEquals(1, smems1.length );
assertSame( sm, smems1[0]);
wm.insert(new A(1));
wm.fireAllRules();
assertEquals( "r1", ((Match)list.get(4)).getRule().getName() );
assertEquals(5, list.size() ); // only one more added, as second rule as removed
}
@Test
public void testPopulatedMultipleSharesRemoveFirst() throws Exception {
KnowledgeBase kbase1 = buildKnowledgeBase("r1", " A(1;) A(2;) B(1;) B(2;) C(1;) D() E()\n" );
ReteooWorkingMemoryInterface wm = ((StatefulKnowledgeSessionImpl)kbase1.newStatefulKnowledgeSession()).session;
List list = new ArrayList();
wm.setGlobal("list", list);
InternalFactHandle fh1 = ( InternalFactHandle ) wm.insert(new A(1));
InternalFactHandle fh2 = ( InternalFactHandle ) wm.insert(new A(2));
InternalFactHandle fh3 = ( InternalFactHandle ) wm.insert(new A(2));
InternalFactHandle fh4 = ( InternalFactHandle ) wm.insert(new A(3));
InternalFactHandle fh5 = (InternalFactHandle) wm.insert(new B(1));
InternalFactHandle fh6 = (InternalFactHandle) wm.insert(new B(2));
InternalFactHandle fh7 = (InternalFactHandle) wm.insert(new C(1));
InternalFactHandle fh8 = (InternalFactHandle) wm.insert(new C(2));
InternalFactHandle fh9 = (InternalFactHandle) wm.insert(new D(1));
InternalFactHandle fh10 = (InternalFactHandle) wm.insert(new E(1));
wm.fireAllRules();
assertEquals( 2, list.size() );
kbase1.addKnowledgePackages( buildKnowledgePackage("r2", " A(1;) A(2;) B(1;) B(2;) C(2;) D() E()\n") );
kbase1.addKnowledgePackages( buildKnowledgePackage("r3", " A(1;) A(3;) B(1;) B(2;) C(2;) D() E()\n") );
wm.fireAllRules();
assertEquals( 5, list.size() );
kbase1.removeRule("org.kie", "r1");
list.clear();
wm.update( fh1, fh1.getObject() );
wm.update( fh2, fh2.getObject() );
wm.update( fh3, fh3.getObject() );
wm.update( fh4, fh4.getObject() );
wm.update( fh5, fh5.getObject() );
wm.update( fh6, fh6.getObject() );
wm.update( fh7, fh7.getObject() );
wm.update( fh8, fh8.getObject() );
wm.update( fh9, fh9.getObject() );
wm.update( fh10, fh10.getObject() );
wm.fireAllRules();
assertEquals( 3, list.size() );
}
@Test
public void testPopulatedMultipleSharesRemoveMid() throws Exception {
KnowledgeBase kbase1 = buildKnowledgeBase("r1", " A(1;) A(2;) B(1;) B(2;) C(1;) D() E()\n" );
ReteooWorkingMemoryInterface wm = ((StatefulKnowledgeSessionImpl)kbase1.newStatefulKnowledgeSession()).session;
List list = new ArrayList();
wm.setGlobal("list", list);
InternalFactHandle fh1 = ( InternalFactHandle ) wm.insert(new A(1));
InternalFactHandle fh2 = ( InternalFactHandle ) wm.insert(new A(2));
InternalFactHandle fh3 = ( InternalFactHandle ) wm.insert(new A(2));
InternalFactHandle fh4 = ( InternalFactHandle ) wm.insert(new A(3));
InternalFactHandle fh5 = (InternalFactHandle) wm.insert(new B(1));
InternalFactHandle fh6 = (InternalFactHandle) wm.insert(new B(2));
InternalFactHandle fh7 = (InternalFactHandle) wm.insert(new C(1));
InternalFactHandle fh8 = (InternalFactHandle) wm.insert(new C(2));
InternalFactHandle fh9 = (InternalFactHandle) wm.insert(new D(1));
InternalFactHandle fh10 = (InternalFactHandle) wm.insert(new E(1));
wm.fireAllRules();
assertEquals( 2, list.size() );
kbase1.addKnowledgePackages( buildKnowledgePackage("r2", " A(1;) A(2;) B(1;) B(2;) C(2;) D() E()\n") );
kbase1.addKnowledgePackages( buildKnowledgePackage("r3", " A(1;) A(3;) B(1;) B(2;) C(2;) D() E()\n") );
wm.fireAllRules();
assertEquals( 5, list.size() );
kbase1.removeRule("org.kie", "r2");
list.clear();
wm.update( fh1, fh1.getObject() );
wm.update( fh2, fh2.getObject() );
wm.update( fh3, fh3.getObject() );
wm.update( fh4, fh4.getObject() );
wm.update( fh5, fh5.getObject() );
wm.update( fh6, fh6.getObject() );
wm.update( fh7, fh7.getObject() );
wm.update( fh8, fh8.getObject() );
wm.update( fh9, fh9.getObject() );
wm.update( fh10, fh10.getObject() );
wm.fireAllRules();
assertEquals( 3, list.size() );
}
@Test
public void testPopulatedMultipleSharesRemoveLast() throws Exception {
KnowledgeBase kbase1 = buildKnowledgeBase("r1", " A(1;) A(2;) B(1;) B(2;) C(1;) D() E()\n" );
ReteooWorkingMemoryInterface wm = ((StatefulKnowledgeSessionImpl)kbase1.newStatefulKnowledgeSession()).session;
List list = new ArrayList();
wm.setGlobal("list", list);
InternalFactHandle fh1 = ( InternalFactHandle ) wm.insert(new A(1));
InternalFactHandle fh2 = ( InternalFactHandle ) wm.insert(new A(2));
InternalFactHandle fh3 = ( InternalFactHandle ) wm.insert(new A(2));
InternalFactHandle fh4 = ( InternalFactHandle ) wm.insert(new A(3));
InternalFactHandle fh5 = (InternalFactHandle) wm.insert(new B(1));
InternalFactHandle fh6 = (InternalFactHandle) wm.insert(new B(2));
InternalFactHandle fh7 = (InternalFactHandle) wm.insert(new C(1));
InternalFactHandle fh8 = (InternalFactHandle) wm.insert(new C(2));
InternalFactHandle fh9 = (InternalFactHandle) wm.insert(new D(1));
InternalFactHandle fh10 = (InternalFactHandle) wm.insert(new E(1));
wm.fireAllRules();
assertEquals( 2, list.size() );
kbase1.addKnowledgePackages( buildKnowledgePackage("r2", " A(1;) A(2;) B(1;) B(2;) C(2;) D() E()\n") );
kbase1.addKnowledgePackages( buildKnowledgePackage("r3", " A(1;) A(3;) B(1;) B(2;) C(2;) D() E()\n") );
wm.fireAllRules();
assertEquals( 5, list.size() );
kbase1.removeRule("org.kie", "r3");
list.clear();
wm.update( fh1, fh1.getObject() );
wm.update( fh2, fh2.getObject() );
wm.update( fh3, fh3.getObject() );
wm.update( fh4, fh4.getObject() );
wm.update( fh5, fh5.getObject() );
wm.update( fh6, fh6.getObject() );
wm.update( fh7, fh7.getObject() );
wm.update( fh8, fh8.getObject() );
wm.update( fh9, fh9.getObject() );
wm.update( fh10, fh10.getObject() );
wm.fireAllRules();
assertEquals( 4, list.size() );
}
@Test
public void testSplitTwoBeforeCreatedSegment() throws Exception {
KnowledgeBase kbase1 = buildKnowledgeBase("r1", " A(1;) A(2;) B(1;) B(2;) C(1;) C(2;) D(1;) D(2;) E(1;) E(2;)\n" );
kbase1.addKnowledgePackages( buildKnowledgePackage("r2", " A(1;) A(2;) B(1;) B(2;) C(1;) C(2;) D(1;) D(2;) E(1;) E(2;)\n") );
kbase1.addKnowledgePackages( buildKnowledgePackage("r3", " A(1;) A(2;) B(1;) B(2;) C(1;) C(2;) D(1;) D(2;)\n") );
kbase1.addKnowledgePackages( buildKnowledgePackage("r4", " A(1;) A(2;) B(1;) B(2;) C(1;) C(2;) \n") );
ReteooWorkingMemoryInterface wm = ((StatefulKnowledgeSessionImpl)kbase1.newStatefulKnowledgeSession()).session;
List list = new ArrayList();
wm.setGlobal("list", list);
wm.insert(new E(1));
wm.insert(new E(2));
RuleTerminalNode rtn1 = getRtn( "r1", kbase1 );
RuleTerminalNode rtn2 = getRtn( "r2", kbase1 );
RuleTerminalNode rtn3 = getRtn( "r3", kbase1 );
RuleTerminalNode rtn4 = getRtn( "r4", kbase1 );
PathMemory pm1 = (PathMemory) wm.getNodeMemory(rtn1);
SegmentMemory[] smems = pm1.getSegmentMemories();
assertEquals(4, smems.length);
assertNull( smems[0]);
assertNull( smems[1]);
assertNull( smems[3]);
SegmentMemory sm = smems[2];
assertEquals( 2, sm.getPos() );
assertEquals( 4, sm.getSegmentPosMaskBit() );
assertEquals( 4, pm1.getLinkedSegmentMask() );
kbase1.addKnowledgePackages( buildKnowledgePackage("r5", " A(1;) A(2;) B(1;) B(2;) \n") );
smems = pm1.getSegmentMemories();
assertEquals(5, smems.length);
assertNull( smems[0]);
assertNull( smems[1]);
assertNull( smems[2]);
sm = smems[3];
assertEquals( 3, sm.getPos() );
assertEquals( 8, sm.getSegmentPosMaskBit() );
assertEquals( 8, pm1.getLinkedSegmentMask() );
RuleTerminalNode rtn5 = getRtn( "r5", kbase1 );
PathMemory pm5 = (PathMemory) wm.getNodeMemory(rtn5);
smems = pm5.getSegmentMemories();
assertEquals(2, smems.length);
assertNull( smems[0]);
assertNull( smems[1]);
}
private RuleTerminalNode getRtn(String ruleName, KnowledgeBase kbase) {
return ( RuleTerminalNode ) ((ReteooRuleBase)((KnowledgeBaseImpl) kbase).ruleBase).getReteooBuilder().getTerminalNodes(ruleName)[0];
}
private KnowledgeBase buildKnowledgeBase(String ruleName, String rule) {
String str = "";
str += "package org.kie \n";
str += "import " + A.class.getCanonicalName() + "\n" ;
str += "import " + B.class.getCanonicalName() + "\n" ;
str += "import " + C.class.getCanonicalName() + "\n" ;
str += "import " + D.class.getCanonicalName() + "\n" ;
str += "import " + E.class.getCanonicalName() + "\n" ;
str += "global java.util.List list \n";
int i = 0;
str += "rule " + ruleName + " when \n";
str += rule;
str += "then \n";
str += " list.add( kcontext.getMatch() );\n";
str += "end \n";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource(str.getBytes()),
ResourceType.DRL );
assertFalse( kbuilder.getErrors().toString(), kbuilder.hasErrors() );
KieBaseConfiguration kconf = KnowledgeBaseFactory.newKnowledgeBaseConfiguration();
kconf.setOption( RuleEngineOption.PHREAK );
KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase(kconf);
kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() );
return kbase;
}
private Collection<KnowledgePackage> buildKnowledgePackage(String ruleName, String rule) {
String str = "";
str += "package org.kie \n";
str += "import " + A.class.getCanonicalName() + "\n" ;
str += "import " + B.class.getCanonicalName() + "\n" ;
str += "import " + C.class.getCanonicalName() + "\n" ;
str += "import " + D.class.getCanonicalName() + "\n" ;
str += "import " + E.class.getCanonicalName() + "\n" ;
str += "global java.util.List list \n";
int i = 0;
str += "rule " + ruleName + " when \n";
str += rule;
str += "then \n";
str += " list.add( kcontext.getMatch() );\n";
str += "end \n";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource(str.getBytes()),
ResourceType.DRL );
assertFalse( kbuilder.getErrors().toString(), kbuilder.hasErrors() );
return kbuilder.getKnowledgePackages();
}
public ObjectTypeNode getObjectTypeNode(KnowledgeBase kbase, Class<?> nodeClass) {
List<ObjectTypeNode> nodes = ((InternalRuleBase)((KnowledgeBaseImpl)kbase).ruleBase).getRete().getObjectTypeNodes();
for ( ObjectTypeNode n : nodes ) {
if ( ((ClassObjectType)n.getObjectType()).getClassType() == nodeClass ) {
return n;
}
}
return null;
}
}
|
apache-2.0
|
vowovrz/thinkinj
|
thinkinj/src/main/java/concurrency/SimpleThread.java
|
867
|
package concurrency;
//: concurrency/SimpleThread.java
// Inheriting directly from the Thread class.
public class SimpleThread extends Thread {
private int countDown = 5;
private static int threadCount = 0;
public SimpleThread() {
// Store the thread name:
super(Integer.toString(++threadCount));
start();
}
public String toString() {
return "#" + getName() + "(" + countDown + "), ";
}
public void run() {
while(true) {
System.out.print(this);
if(--countDown == 0)
return;
}
}
public static void main(String[] args) {
for(int i = 0; i < 5; i++)
new SimpleThread();
}
} /* Output:
#1(5), #1(4), #1(3), #1(2), #1(1), #2(5), #2(4), #2(3), #2(2), #2(1), #3(5), #3(4), #3(3), #3(2), #3(1), #4(5), #4(4), #4(3), #4(2), #4(1), #5(5), #5(4), #5(3), #5(2), #5(1),
*///:~
|
apache-2.0
|
Sage-Bionetworks/Synapse-Repository-Services
|
services/repository/src/test/java/org/sagebionetworks/repo/web/controller/AbstractAutowiredControllerTestBase.java
|
2633
|
package org.sagebionetworks.repo.web.controller;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.extension.ExtendWith;
import org.sagebionetworks.repo.manager.oauth.OIDCTokenHelper;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.mock.web.MockServletConfig;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit.jupiter.SpringExtension;
import org.springframework.web.context.WebApplicationContext;
import org.springframework.web.context.support.StaticWebApplicationContext;
import org.springframework.web.servlet.DispatcherServlet;
/**
* Base class for autowired controller tests
*
*/
@ExtendWith(SpringExtension.class)
@ContextConfiguration(locations = { "classpath:test-context.xml" })
public abstract class AbstractAutowiredControllerTestBase implements ApplicationContextAware {
@Autowired
protected ServletTestHelper servletTestHelper;
@Autowired
private OIDCTokenHelper oidcTokenHelper;
protected DispatcherServlet dispatchServlet;
protected EntityServletTestHelper entityServletHelper;
private ApplicationContext parentContext;
// injected by spring
public void setApplicationContext(ApplicationContext parentContext) {
this.parentContext = parentContext;
}
@BeforeEach
public void beforeBase() throws Exception {
MockServletConfig servletConfig = new MockServletConfig("repository");
servletConfig.addInitParameter("contextConfigLocation", "classpath:test-empty-context.xml");
StaticWebApplicationContext webApplicationContext = new StaticWebApplicationContext();
webApplicationContext.setParent(parentContext);
webApplicationContext.refresh();
servletConfig.getServletContext().setAttribute(WebApplicationContext.class.getName() + ".ROOT", webApplicationContext);
dispatchServlet = new DispatcherServlet();
dispatchServlet.init(servletConfig);
servletTestHelper.setUp(dispatchServlet);
entityServletHelper = new EntityServletTestHelper(dispatchServlet, oidcTokenHelper);
}
@AfterEach
public void afterBase() throws Exception {
servletTestHelper.tearDown();
}
public ServletTestHelper getServletTestHelper() {
return servletTestHelper;
}
public DispatcherServlet getDispatcherServlet() {
return dispatchServlet;
}
public EntityServletTestHelper getEntityServletTestHelper() {
return entityServletHelper;
}
}
|
apache-2.0
|
bhb27/KA27
|
app/src/main/java/com/grarak/kerneladiutor/fragments/kernel/VMFragment.java
|
25404
|
/*
* Copyright (C) 2015 Willi Ye
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.grarak.kerneladiutor.fragments.kernel;
import android.os.Bundle;
import android.text.InputType;
import com.grarak.kerneladiutor.R;
import com.grarak.kerneladiutor.elements.cards.CardViewItem;
import com.grarak.kerneladiutor.elements.cards.EditTextCardView;
import com.grarak.kerneladiutor.elements.cards.PopupCardView;
import com.grarak.kerneladiutor.elements.cards.SeekBarCardView;
import com.grarak.kerneladiutor.elements.cards.SwitchCardView;
import com.grarak.kerneladiutor.elements.DDivider;
import com.grarak.kerneladiutor.fragments.RecyclerViewFragment;
import com.grarak.kerneladiutor.utils.kernel.VM;
import com.grarak.kerneladiutor.utils.kernel.Ram;
import com.grarak.kerneladiutor.utils.Utils;
import java.lang.Math;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import java.util.Timer;
import java.util.TimerTask;
/**
* Created by willi on 27.12.14.
*/
public class VMFragment extends RecyclerViewFragment implements PopupCardView.DPopupCard.OnDPopupCardListener, SeekBarCardView.DSeekBarCard.OnDSeekBarCardListener, SwitchCardView.DSwitchCard.OnDSwitchCardListener {
private CardViewItem.DCardView mPRPressureCard, mPRAcgEffCard;
private CardViewItem.DCardView mZramDiskCard, mZramSwapUsedCard, mZramRWCard, mZramDataSizeCard;
private SeekBarCardView.DSeekBarCard mPRPerSwapSizeCard, mPRSwapWinCard, mPRSwapOptEffCard, mPRPressureMaxCard, mPRPressureMinCard, mDirtyRatioCard, mDirtyBackgroundRatioCard, mDirtyExpireCard, mDirtyWritebackCard, mOverCommitRatioCard, mSwappinessCard, mVFSCachePressureCard, mZRAMDisksizeCard, mZRAMMaxCompStreamsCard, mExtraFreeKbytesCard, mMinFreeKbytesCard;
private SeekBarCardView.DSeekBarCard mDirty_Writeback_SuspendCard, mDirty_Writeback_ActiveCard;
private PopupCardView.DPopupCard mZRAMCompAlgosCard;
private SwitchCardView.DSwitchCard mProcessReclaimCard, mLaptopModeCard, mDynamic_Dirty_WritebackCard;
private Timer setZRAMTimer = new Timer();
private boolean CanUpdateZramStatus = true;
@Override
public void init(Bundle savedInstanceState) {
super.init(savedInstanceState);
if (VM.hasProcessReclaim()) processreclaimInit();
if (VM.hasDirtyRatio()) dirtyratioInit();
if (VM.hasDirtyBackgroundRatio()) dirtybackgroundratioInit();
if (VM.hasDirtyExpire()) dirtyexpireInit();
if (VM.hasDirtyWriteback() && !VM.isDynamicDirtyWritebackActive()) dirtywritebackInit();
if (VM.hasDynamicDirtyWriteback()) dynamicdirtywritebackInit();
if (VM.hasOverCommitRatio()) overcommitratioInit();
if (VM.hasSwappiness()) swappinessInit();
if (VM.hasVFSCachePressure()) vfscachepressureInit();
if (VM.hasLaptopMode()) laptopmodeInit();
if (VM.hasMinFreeKbytes()) minfreekbytesInit();
if (VM.hasExtraFreeKbytes()) extrafreekbytesInit();
if (VM.hasZRAM()) {
zramInit();
ExtraZramInit();
}
Update();
}
private void processreclaimInit() {
DDivider mProcessReclaimDividerCard = new DDivider();
mProcessReclaimDividerCard.setText(getString(R.string.process_reclaim));
addView(mProcessReclaimDividerCard);
mProcessReclaimCard = new SwitchCardView.DSwitchCard();
mProcessReclaimCard.setTitle(getString(R.string.process_reclaim_enable));
mProcessReclaimCard.setDescription(getString(R.string.process_reclaim_enable_summary));
mProcessReclaimCard.setChecked(VM.isProcessReclaimActive());
mProcessReclaimCard.setOnDSwitchCardListener(this);
addView(mProcessReclaimCard);
if (VM.isProcessReclaimActive()) {
// short things here PR = Process Reclaim
if (VM.hasPRPressure()) {
mPRPressureCard = new CardViewItem.DCardView();
mPRPressureCard.setTitle(getString(R.string.process_reclaim_pressure));
mPRPressureCard.setDescription(String.valueOf(VM.getPRPressure()));
addView(mPRPressureCard);
}
if (VM.hasPRAvgEff()) {
mPRAcgEffCard = new CardViewItem.DCardView();
mPRAcgEffCard.setTitle(getString(R.string.process_reclaim_avg_eff));
mPRAcgEffCard.setDescription(String.valueOf(VM.getPRAvgEff()));
addView(mPRAcgEffCard);
}
if (VM.hasPRPerSwapSize()) {
List < String > list = new ArrayList < > ();
for (int i = 1; i <= 128; i++)
list.add(String.valueOf(i * 64));
mPRPerSwapSizeCard = new SeekBarCardView.DSeekBarCard(list);
mPRPerSwapSizeCard.setTitle(getString(R.string.process_reclaim_per_swap_size));
mPRPerSwapSizeCard.setDescription(getString(R.string.process_reclaim_per_swap_size_summary));
mPRPerSwapSizeCard.setProgress((VM.getPRPerSwapSize() / 64) - 1);
mPRPerSwapSizeCard.setOnDSeekBarCardListener(this);
addView(mPRPerSwapSizeCard);
}
if (VM.hasPRSwapWin()) {
List < String > list = new ArrayList < > ();
for (int i = 1; i <= 10; i++)
list.add(String.valueOf(i));
mPRSwapWinCard = new SeekBarCardView.DSeekBarCard(list);
mPRSwapWinCard.setTitle(getString(R.string.process_reclaim_swap_eff_win));
mPRSwapWinCard.setDescription(getString(R.string.process_reclaim_swap_eff_win_summary));
mPRSwapWinCard.setProgress((VM.getPRSwapWin()) - 1);
mPRSwapWinCard.setOnDSeekBarCardListener(this);
addView(mPRSwapWinCard);
}
if (VM.hasPRSwapOptEff()) {
List < String > list = new ArrayList < > ();
for (int i = 1; i <= 100; i++)
list.add(String.valueOf(i));
mPRSwapOptEffCard = new SeekBarCardView.DSeekBarCard(list);
mPRSwapOptEffCard.setTitle(getString(R.string.process_reclaim_swap_opt_eff));
mPRSwapOptEffCard.setDescription(getString(R.string.process_reclaim_swap_opt_eff_summary));
mPRSwapOptEffCard.setProgress((VM.getPRSwapOptEff()) - 1);
mPRSwapOptEffCard.setOnDSeekBarCardListener(this);
addView(mPRSwapOptEffCard);
}
if (VM.hasPRPressureMax()) {
List < String > list = new ArrayList < > ();
for (int i = 1; i <= 100; i++)
list.add(String.valueOf(i));
mPRPressureMaxCard = new SeekBarCardView.DSeekBarCard(list);
mPRPressureMaxCard.setTitle(getString(R.string.process_reclaim_pressure_max));
mPRPressureMaxCard.setDescription(getString(R.string.process_reclaim_pressure_max_summary));
mPRPressureMaxCard.setProgress((VM.getPRPressureMax()) - 1);
mPRPressureMaxCard.setOnDSeekBarCardListener(this);
addView(mPRPressureMaxCard);
}
if (VM.hasPRPressureMin()) {
List < String > list = new ArrayList < > ();
for (int i = 1; i <= 100; i++)
list.add(String.valueOf(i));
mPRPressureMinCard = new SeekBarCardView.DSeekBarCard(list);
mPRPressureMinCard.setTitle(getString(R.string.process_reclaim_pressure_min));
mPRPressureMinCard.setDescription(getString(R.string.process_reclaim_pressure_min_summary));
mPRPressureMinCard.setProgress((VM.getPRPressureMin()) - 1);
mPRPressureMinCard.setOnDSeekBarCardListener(this);
addView(mPRPressureMinCard);
}
}
}
private void dirtyratioInit() {
DDivider mVMDividerCard = new DDivider();
mVMDividerCard.setText(getString(R.string.virtual_memory));
addView(mVMDividerCard);
List < String > list = new ArrayList < > ();
list.add(getString(R.string.disabled));
for (int i = 1; i <= 100; i++)
list.add(i + getString(R.string.percent));
mDirtyRatioCard = new SeekBarCardView.DSeekBarCard(list);
mDirtyRatioCard.setTitle(getString(R.string.dirty_ratio));
mDirtyRatioCard.setDescription(getString(R.string.dirty_ratio_summary));
mDirtyRatioCard.setProgress(VM.getDirtyRatio());
mDirtyRatioCard.setOnDSeekBarCardListener(this);
addView(mDirtyRatioCard);
}
private void dirtybackgroundratioInit() {
List < String > list = new ArrayList < > ();
list.add(getString(R.string.disabled));
for (int i = 1; i <= 100; i++)
list.add(i + getString(R.string.percent));
mDirtyBackgroundRatioCard = new SeekBarCardView.DSeekBarCard(list);
mDirtyBackgroundRatioCard.setTitle(getString(R.string.dirty_background_ratio));
mDirtyBackgroundRatioCard.setDescription(getString(R.string.dirty_background_ratio_summary));
mDirtyBackgroundRatioCard.setProgress(VM.getDirtyBackgroundRatio());
mDirtyBackgroundRatioCard.setOnDSeekBarCardListener(this);
addView(mDirtyBackgroundRatioCard);
}
private void dirtyexpireInit() {
List < String > list = new ArrayList < > ();
for (int i = 1; i <= 500; i++)
list.add(i * 10 + getString(R.string.cs));
mDirtyExpireCard = new SeekBarCardView.DSeekBarCard(list);
mDirtyExpireCard.setTitle(getString(R.string.dirty_expire_centisecs));
mDirtyExpireCard.setDescription(getString(R.string.dirty_expire_centisecs_summary));
mDirtyExpireCard.setProgress((VM.getDirtyExpire() / 10) - 1);
mDirtyExpireCard.setOnDSeekBarCardListener(this);
addView(mDirtyExpireCard);
}
private void dirtywritebackInit() {
List < String > list = new ArrayList < > ();
for (int i = 1; i <= 900; i++)
list.add(i * 10 + getString(R.string.cs));
mDirtyWritebackCard = new SeekBarCardView.DSeekBarCard(list);
mDirtyWritebackCard.setTitle(getString(R.string.dirty_writeback_centisecs));
mDirtyWritebackCard.setDescription(getString(R.string.dirty_writeback_centisecs_summary));
mDirtyWritebackCard.setProgress((VM.getDirtyWriteback()) - 1);
mDirtyWritebackCard.setOnDSeekBarCardListener(this);
addView(mDirtyWritebackCard);
}
private void dynamicdirtywritebackInit() {
if (VM.hasDynamicDirtyWriteback()) {
mDynamic_Dirty_WritebackCard = new SwitchCardView.DSwitchCard();
mDynamic_Dirty_WritebackCard.setTitle(getString(R.string.dynamic_dirty_writeback_centisecs));
mDynamic_Dirty_WritebackCard.setDescription(getString(R.string.dynamic_dirty_writeback_centisecs_summary));
mDynamic_Dirty_WritebackCard.setChecked(VM.isDynamicDirtyWritebackActive());
mDynamic_Dirty_WritebackCard.setOnDSwitchCardListener(this);
addView(mDynamic_Dirty_WritebackCard);
}
if (VM.isDynamicDirtyWritebackActive()) {
List < String > list = new ArrayList < > ();
for (int i = 1; i <= 900; i++)
list.add(i * 10 + getString(R.string.cs));
if (VM.hasDirtySuspendWriteback()) {
mDirty_Writeback_SuspendCard = new SeekBarCardView.DSeekBarCard(list);
mDirty_Writeback_SuspendCard.setTitle(getString(R.string.dirty_writeback_suspend_centisecs));
mDirty_Writeback_SuspendCard.setDescription(getString(R.string.dirty_writeback_suspend_centisecs_summary));
mDirty_Writeback_SuspendCard.setProgress((VM.getDirtySuspendWriteback()) - 1);
mDirty_Writeback_SuspendCard.setOnDSeekBarCardListener(this);
addView(mDirty_Writeback_SuspendCard);
}
if (VM.hasDirtyActiveWriteback()) {
mDirty_Writeback_ActiveCard = new SeekBarCardView.DSeekBarCard(list);
mDirty_Writeback_ActiveCard.setTitle(getString(R.string.dirty_writeback_active_centisecs));
mDirty_Writeback_ActiveCard.setDescription(getString(R.string.dirty_writeback_active_centisecs_summary));
mDirty_Writeback_ActiveCard.setProgress((VM.getDirtySuspendWriteback()) - 1);
mDirty_Writeback_ActiveCard.setOnDSeekBarCardListener(this);
addView(mDirty_Writeback_ActiveCard);
}
}
}
private void overcommitratioInit() {
List < String > list = new ArrayList < > ();
list.add(getString(R.string.disabled));
for (int i = 1; i <= 100; i++)
list.add(i + getString(R.string.percent));
mOverCommitRatioCard = new SeekBarCardView.DSeekBarCard(list);
mOverCommitRatioCard.setTitle(getString(R.string.overcommit_ratio));
mOverCommitRatioCard.setDescription(getString(R.string.overcommit_ratio_summary));
mOverCommitRatioCard.setProgress(VM.getOverCommitRatio());
mOverCommitRatioCard.setOnDSeekBarCardListener(this);
addView(mOverCommitRatioCard);
}
private void swappinessInit() {
List < String > list = new ArrayList < > ();
list.add(getString(R.string.disabled));
for (int i = 1; i <= 100; i++)
list.add(i + getString(R.string.percent));
mSwappinessCard = new SeekBarCardView.DSeekBarCard(list);
mSwappinessCard.setTitle(getString(R.string.swappiness));
mSwappinessCard.setDescription(getString(R.string.swappiness_summary));
mSwappinessCard.setProgress(VM.getSwappiness());
mSwappinessCard.setOnDSeekBarCardListener(this);
addView(mSwappinessCard);
}
private void vfscachepressureInit() {
List < String > list = new ArrayList < > ();
list.add(getString(R.string.disabled));
for (int i = 1; i <= 150; i++)
list.add(String.valueOf(i));
mVFSCachePressureCard = new SeekBarCardView.DSeekBarCard(list);
mVFSCachePressureCard.setTitle(getString(R.string.vfs_cache_pressure));
mVFSCachePressureCard.setDescription(getString(R.string.vfs_cache_pressure_summary));
mVFSCachePressureCard.setProgress(VM.getVFSCachePressure() - 1);
mVFSCachePressureCard.setOnDSeekBarCardListener(this);
addView(mVFSCachePressureCard);
}
private void laptopmodeInit() {
mLaptopModeCard = new SwitchCardView.DSwitchCard();
mLaptopModeCard.setTitle(getString(R.string.laptop_mode));
mLaptopModeCard.setDescription(getString(R.string.laptop_mode_summary));
mLaptopModeCard.setChecked(VM.isLaptopModeActive());
mLaptopModeCard.setOnDSwitchCardListener(this);
addView(mLaptopModeCard);
}
private void minfreekbytesInit() {
List < String > list = new ArrayList < > ();
list.add(getString(R.string.disabled));
for (int i = 1; i <= 5000; i++)
list.add(i + getString(R.string.kb));
mMinFreeKbytesCard = new SeekBarCardView.DSeekBarCard(list);
mMinFreeKbytesCard.setTitle(getString(R.string.min_free_kbytes));
mMinFreeKbytesCard.setDescription(getString(R.string.min_free_kbytes_summary));
mMinFreeKbytesCard.setProgress(VM.getMinFreeKbytes());
mMinFreeKbytesCard.setOnDSeekBarCardListener(this);
addView(mMinFreeKbytesCard);
}
private void extrafreekbytesInit() {
List < String > list = new ArrayList < > ();
list.add(getString(R.string.disabled));
for (int i = 1; i <= 1000; i++)
list.add((i * 100) + getString(R.string.kb));
mExtraFreeKbytesCard = new SeekBarCardView.DSeekBarCard(list);
mExtraFreeKbytesCard.setTitle(getString(R.string.extra_free_kbytes));
mExtraFreeKbytesCard.setDescription(getString(R.string.extra_free_kbytes_summary));
mExtraFreeKbytesCard.setProgress(VM.getExtraFreeKbytes() / 100);
mExtraFreeKbytesCard.setOnDSeekBarCardListener(this);
addView(mExtraFreeKbytesCard);
}
private void zramInit() {
DDivider mZRAMDividerCard = new DDivider();
mZRAMDividerCard.setText(getString(R.string.zram));
mZRAMDividerCard.setDescription(getString(R.string.zram_summary));
addView(mZRAMDividerCard);
int mTotalRAM = (int)(Ram.GetRam(true, getActivity()) * 0.073); // 0.073 = (7.3 / 100) result Around 60 to 65%
List < String > list = new ArrayList < > ();
for (int i = 0; i < mTotalRAM; i++)
list.add((i * 10) + getString(R.string.mb));
mZRAMDisksizeCard = new SeekBarCardView.DSeekBarCard(list);
mZRAMDisksizeCard.setTitle(getString(R.string.disksize));
mZRAMDisksizeCard.setDescription(getString(R.string.disksize_summary));
mZRAMDisksizeCard.setProgress(VM.getZRAMDisksize() / 10);
mZRAMDisksizeCard.setOnDSeekBarCardListener(this);
addView(mZRAMDisksizeCard);
if (VM.hasZRAMCompAlgos()) {
mZRAMCompAlgosCard = new PopupCardView.DPopupCard(VM.getZRAMCompAlgos());
mZRAMCompAlgosCard.setTitle(getString(R.string.zram_comp_algo));
mZRAMCompAlgosCard.setDescription(getString(R.string.zram_comp_algo_summary));
mZRAMCompAlgosCard.setItem(VM.getZRAMCompAlgo());
mZRAMCompAlgosCard.setOnDPopupCardListener(this);
addView(mZRAMCompAlgosCard);
}
if (VM.hasZRAMMaxCompStreams()) {
List < String > listCS = new ArrayList < > ();
for (int i = 1; i <= 4; i++)
listCS.add(i + "");
mZRAMMaxCompStreamsCard = new SeekBarCardView.DSeekBarCard(listCS);
mZRAMMaxCompStreamsCard.setTitle(getString(R.string.zram_comp_streams));
mZRAMMaxCompStreamsCard.setProgress(VM.getZRAMMaxCompStreams() - 1);
mZRAMMaxCompStreamsCard.setOnDSeekBarCardListener(this);
addView(mZRAMMaxCompStreamsCard);
}
zramStatus();
}
private void zramStatus() {
String Swap = VM.getFreeSwap(getActivity());
if (Swap != null) {
String[] swap_split = Swap.split("[ ]+");
mZramSwapUsedCard = new CardViewItem.DCardView();
mZramSwapUsedCard.setTitle(getString(R.string.disksize_used));
addView(mZramSwapUsedCard);
} else {
mZramDiskCard = new CardViewItem.DCardView();
mZramDiskCard.setTitle(getString(R.string.disksize));
addView(mZramDiskCard);
}
}
private void ExtraZramInit() {
if (VM.hasZRAMDataSize()) {
mZramDataSizeCard = new CardViewItem.DCardView();
mZramDataSizeCard.setTitle(getString(R.string.zram_data_size));
addView(mZramDataSizeCard);
}
if (VM.hasZRAMReadWrites() && VM.hasZRAMFailReadWrites()) {
mZramRWCard = new CardViewItem.DCardView();
mZramRWCard.setTitle(getString(R.string.read_write));
addView(mZramRWCard);
}
}
@Override
public void onChanged(SeekBarCardView.DSeekBarCard dSeekBarCard, int position) {}
@Override
public void onStop(SeekBarCardView.DSeekBarCard dSeekBarCard, int position) {
if (dSeekBarCard == mPRPerSwapSizeCard) VM.setPRPerSwapSize((position + 1) * 64, getActivity());
else if (dSeekBarCard == mPRSwapWinCard) VM.setPRSwapWin(position + 1, getActivity());
else if (dSeekBarCard == mPRSwapOptEffCard) VM.setPRSwapOptEff(position + 1, getActivity());
else if (dSeekBarCard == mPRPressureMaxCard) VM.setPRPressureMax(position + 1, getActivity());
else if (dSeekBarCard == mPRPressureMinCard) VM.setPRPressureMin(position + 1, getActivity());
else if (dSeekBarCard == mDirtyRatioCard) VM.setDirtyRatio(position, getActivity());
else if (dSeekBarCard == mDirtyBackgroundRatioCard) VM.setDirtyBackgroundRatio(position, getActivity());
else if (dSeekBarCard == mDirtyExpireCard) VM.setDirtyExpire((position + 1) * 10, getActivity());
else if (dSeekBarCard == mDirtyWritebackCard) VM.setDirtyWriteback(position + 1, getActivity());
else if (dSeekBarCard == mDirty_Writeback_SuspendCard) VM.setDirtySuspendWriteback(position + 1, getActivity());
else if (dSeekBarCard == mDirty_Writeback_ActiveCard) VM.setDirtyActiveWriteback(position + 1, getActivity());
else if (dSeekBarCard == mOverCommitRatioCard) VM.setOverCommitRatio(position, getActivity());
else if (dSeekBarCard == mSwappinessCard) VM.setSwappiness(position, getActivity());
else if (dSeekBarCard == mVFSCachePressureCard) VM.setVFSCachePressure(position + 1, getActivity());
else if (dSeekBarCard == mMinFreeKbytesCard) VM.setMinFreeKbytes(position, getActivity());
else if (dSeekBarCard == mExtraFreeKbytesCard) VM.setExtraFreeKbytes(position * 100, getActivity());
else if (dSeekBarCard == mZRAMDisksizeCard) setZRAM(null, String.valueOf(position * 10), null);
else if (dSeekBarCard == mZRAMMaxCompStreamsCard) setZRAM(null, null, String.valueOf(position + 1));
}
@Override
public void onItemSelected(PopupCardView.DPopupCard dPopupCard, int position) {
if (dPopupCard == mZRAMCompAlgosCard)
setZRAM(VM.getZRAMCompAlgos().get(position), null, null);
}
@Override
public void onChecked(SwitchCardView.DSwitchCard dSwitchCard, boolean checked) {
if (dSwitchCard == mProcessReclaimCard) {
VM.activateProcessReclaim(checked, getActivity());
RefreshFrag();
} else if (dSwitchCard == mLaptopModeCard)
VM.activateLaptopMode(checked, getActivity());
else if (dSwitchCard == mDynamic_Dirty_WritebackCard) {
VM.activateDynamicDirtyWriteback(checked, getActivity());
RefreshFrag();
}
}
@Override
public boolean onRefresh() {
Update();
return true;
}
public void Update() {
if (mPRPressureCard != null) mPRPressureCard.setDescription(String.valueOf(VM.getPRPressure()));
if (mPRAcgEffCard != null) mPRAcgEffCard.setDescription(String.valueOf(VM.getPRAvgEff()));
if (CanUpdateZramStatus) {
if (mZRAMDisksizeCard != null) mZRAMDisksizeCard.setProgress(VM.getZRAMDisksize() / 10);
if (mZRAMMaxCompStreamsCard != null) mZRAMMaxCompStreamsCard.setProgress(VM.getZRAMMaxCompStreams() - 1);
if (mZRAMCompAlgosCard != null) mZRAMCompAlgosCard.setItem(VM.getZRAMCompAlgo());
if (mZramDiskCard != null)
mZramDiskCard.setDescription(VM.getZRAMDisksize() + getString(R.string.mb));
if (mZramSwapUsedCard != null) {
String[] swap_split = VM.getFreeSwap(getActivity()).split("[ ]+");
int total = Utils.stringToInt(swap_split[1]);
int free = Utils.stringToInt(swap_split[3]);
int used = Utils.stringToInt(swap_split[2]);
mZramSwapUsedCard.setDescription(Utils.KbToMb(total, getActivity()) + " | " +
Utils.KbToMb(free, getActivity()) + " | " + Utils.KbToMb(used, getActivity()) + " | " +
Utils.percentage(total, used, getActivity()));
}
if (mZramDataSizeCard != null) {
int original = VM.getZramOrigDataSize();
int compressed = VM.getZramCompDataSize();
mZramDataSizeCard.setDescription(Utils.bToMb(original, getActivity()) + " | " +
Utils.bToMb(compressed, getActivity()) + " | " + Utils.percentage(original / 1024, compressed / 1024, getActivity()));
}
if (mZramRWCard != null)
mZramRWCard.setDescription(getString(R.string.total) + VM.getZramReadWrites() + "\n" +
getString(R.string.fail) + VM.getZramFailReadWrites());
}
}
private void RefreshFrag() {
view.invalidate();
try {
Thread.sleep(100);
} catch (InterruptedException ex) {
Thread.currentThread().interrupt();
}
getActivity().getSupportFragmentManager().beginTransaction().detach(this).attach(this).commit();
}
private void setZRAM(String algo, String disksize, String max_comp) {
CanUpdateZramStatus = false;
setZRAMTimer.cancel();
setZRAMTimer.purge();
setZRAMTimer = new Timer();
setZRAMTimer.schedule(new TimerTask() {
@Override
public void run() {
VM.setZRAM(algo, disksize, max_comp, getActivity());
CanUpdateZramStatus = true;
}
}, 3000);
}
}
|
apache-2.0
|
Razaltan/openstorefront
|
server/openstorefront/openstorefront-web/src/main/webapp/client/scripts/component/notificationPanel.js
|
5805
|
/*
* Copyright 2015 Space Dynamics Laboratory - Utah State University Research Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/* global Ext */
Ext.define('OSF.component.NotificationPanel', {
extend: 'Ext.panel.Panel',
alias: 'osf.widget.NotificationPanel',
layout: 'fit',
initComponent: function () {
this.callParent();
var notPanel = this;
notPanel.loadAll = false;
var dataStore = Ext.create('Ext.data.Store', {
autoLoad: true,
fields: [
'eventId',
'eventType',
'eventTypeDescription',
'message',
'username',
'roleGroup',
'entityName',
'entityId',
'entityMetaDataStatus',
'createDts',
'updateDts',
'readMessage'
],
proxy: {
type: 'ajax',
url: '../api/v1/resource/notificationevent',
reader: {
type: 'json',
rootProperty: 'data',
totalProperty: 'totalNumber'
}
}
});
var actionMarkAsRead = function (record) {
CoreService.usersevice.getCurrentUser().then(function (response) {
var usercontext = Ext.decode(response.responseText);
Ext.Ajax.request({
url: '../api/v1/resource/notificationevent/' + record.get('eventId'),
method: 'PUT',
jsonData: {
username: usercontext.username
},
success: function (response) {
notPanel.notificationGrid.getStore().load({
params: {
all: notPanel.loadAll
}
});
}
});
});
};
var actionRemove = function (record) {
Ext.Ajax.request({
url: '../api/v1/resource/notificationevent/' + record.get('eventId'),
method: 'DELETE',
success: function (response) {
notPanel.notificationGrid.getStore().load({
params: {
all: notPanel.loadAll
}
});
}
});
};
notPanel.notificationGrid = Ext.create('Ext.grid.Panel', {
store: dataStore,
columnLines: true,
bodyStyle: 'background-color: white',
columns: [
{
text: 'Unread',
width: 75,
align: 'center',
dataIndex: 'readMessage',
//xtype: 'widgetcolumn',
renderer: function (value) {
if (value) {
return '';
} else {
return '<i class="fa fa-check" title="Mark as read"></i>';
}
}
},
{text: 'Event Date', dataIndex: 'createDts', width: 150, xtype: 'datecolumn', format: 'm/d/y H:i:s'},
{text: 'Type', groupable: 'true', dataIndex: 'eventTypeDescription', width: 175},
{text: 'Message', dataIndex: 'message', flex: 1,
renderer: function (value, metadata, record) {
switch (record.get('eventType')) {
case 'WATCH':
return value + '<i>View the changes <a href="../single?id=' + record.get('entityId') + '"><strong>here</strong></a>.</i>';
break;
case 'REPORT':
return value + '<i>View/Download the report <a href="usertools.jsp?dc=' + Math.random() + '#Reports"><strong>here</strong></a></i>.';
break;
case 'ADMIN':
return '<i class="fa fa-warning"></i> ' + value;
break;
case 'TASK':
case 'IMPORT':
default:
return value;
break;
}
}
},
{
text: 'Action',
dataIndex: '',
sortable: false,
xtype: 'widgetcolumn',
align: 'center',
width: 75,
widget: {
xtype: 'button',
iconCls: 'fa fa-trash',
maxWidth: 25,
cls: 'button-danger',
handler: function () {
var record = this.getWidgetRecord();
actionRemove(record);
}
}
}
],
listeners: {
cellclick: function (grid, td, cellIndex, record, tr, rowIndex, e, eOpts) {
if (cellIndex === 0) {
actionMarkAsRead(record);
}
}
},
dockedItems: [
{
dock: 'top',
xtype: 'toolbar',
items: [
{
text: 'Refresh',
scale: 'medium',
iconCls: 'fa fa-2x fa-refresh',
handler: function () {
this.up('grid').getStore().load({
params: {
all: notPanel.loadAll
}
});
}
},
{
xtype: 'tbseparator'
},
{
text: 'Unread',
scale: 'medium',
pressed: true,
toggleGroup: 'filter',
handler: function () {
notPanel.loadAll = false,
this.up('grid').getStore().load({
params: {
all: false
}
});
}
},
{
text: 'All',
scale: 'medium',
toggleGroup: 'filter',
handler: function () {
notPanel.loadAll = true,
this.up('grid').getStore().load({
params: {
all: true
}
});
}
}
]
},
{
dock: 'bottom',
xtype: 'panel',
html: 'Loading...',
listeners: {
beforerender: function (panel) {
Ext.Ajax.request({
url: '../api/v1/service/application/configproperties/notification.max.days',
success: function (response) {
var keyData = Ext.decode(response.responseText);
panel.update('*Notifications time out after <b>' + keyData.description + '</b> day(s)');
}
});
}
}
}
]
});
notPanel.add(notPanel.notificationGrid);
},
refreshData: function () {
this.notificationGrid.getStore().load({
params: {
all: this.loadAll
}
});
}
});
|
apache-2.0
|
gawkermedia/googleads-java-lib
|
modules/adwords_appengine/src/main/java/com/google/api/ads/adwords/jaxws/v201601/cm/LabelServiceInterfacemutate.java
|
2365
|
package com.google.api.ads.adwords.jaxws.v201601.cm;
import java.util.ArrayList;
import java.util.List;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlType;
/**
*
* Applies the list of mutate operations.
*
* @param operations The operations to apply. The same {@link Label} cannot be specified in
* more than one operation.
* @return The applied {@link Label}s.
* @throws ApiException when there is at least one error with the request
*
*
* <p>Java class for mutate element declaration.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <element name="mutate">
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="operations" type="{https://adwords.google.com/api/adwords/cm/v201601}LabelOperation" maxOccurs="unbounded" minOccurs="0"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </element>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "", propOrder = {
"operations"
})
@XmlRootElement(name = "mutate")
public class LabelServiceInterfacemutate {
protected List<LabelOperation> operations;
/**
* Gets the value of the operations property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the operations property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getOperations().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link LabelOperation }
*
*
*/
public List<LabelOperation> getOperations() {
if (operations == null) {
operations = new ArrayList<LabelOperation>();
}
return this.operations;
}
}
|
apache-2.0
|
Lesha-spr/greed
|
public/src/actions/auth/auth.actions.js
|
644
|
import alt from './../../alt';
class AuthActions {
constructor() {
this.generateActions(
'clear',
'fetchLoginForm',
'successFetchLoginForm',
'fetchRegistrationForm',
'successFetchRegistrationForm',
'showLoginForm',
'showRegistrationForm',
'login',
'logout',
'successLogout',
'registration',
'successRegistration',
'fetchUser',
'successFetchUser',
'errorFetchUser',
'error'
);
}
}
export default alt.createActions(AuthActions);
|
apache-2.0
|
pedrohidalgo/pleasure
|
src/main/java/com/qualixium/playnb/filetype/conf/ConfLanguageHelper.java
|
24805
|
package com.qualixium.playnb.filetype.conf;
import com.qualixium.playnb.filetype.conf.completion.ConfigurationItem;
import java.util.ArrayList;
import java.util.List;
public class ConfLanguageHelper {
public static List<ConfigurationItem> getKeys() {
List<ConfigurationItem> items = new ArrayList<>();
items.add(new ConfigurationItem("session.cookieName", "The default name for the cookie is PLAY_SESSION. This can be changed by configuring the key session.cookieName in application.conf", "https://www.playframework.com/documentation/2.4.x/ScalaSessionFlash#How-it-is-different-in-Play"));
items.add(new ConfigurationItem("session.maxAge", "You can also set the maximum age of the session cookie by configuring the key session.maxAge (in milliseconds) in application.conf", "https://www.playframework.com/documentation/2.4.x/ScalaSessionFlash#How-it-is-different-in-Play"));
items.add(new ConfigurationItem("play.http.parser.maxMemoryBuffer", "By default, the maximum content length that they will parse is 100KB. It can be overridden by specifying the play.http.parser.maxMemoryBuffer property in application.conf", "https://www.playframework.com/documentation/2.4.x/ScalaBodyParsers#Max-content-length"));
items.add(new ConfigurationItem("play.http.parser.maxDiskBuffer", "For parsers that buffer content on disk, such as the raw parser or multipart/form-data, the maximum content length is specified using the play.http.parser.maxDiskBuffer property, it defaults to 10MB", "https://www.playframework.com/documentation/2.4.x/ScalaBodyParsers#Max-content-length"));
items.add(new ConfigurationItem("play.http.errorHandler", "If you don’t want to place your error handler in the root package, or if you want to be able to configure different error handlers for different\n"
+ "environments, you can do this by configuring the play.http.errorHandler configuration property in application.conf", "https://www.playframework.com/documentation/2.4.x/ScalaErrorHandling#Supplying-a-custom-error-handler"));
items.add(new ConfigurationItem("play.http.filters", "The Filters class can either be in the root package, or if it has another name or is in another package, needs to be configured using\n"
+ "play.http.filters in application.conf: play.http.filters = \"filters.MyFilters\"", "https://www.playframework.com/documentation/2.4.x/ScalaHttpFilters#Using-filters"));
items.add(new ConfigurationItem("db.default.driver", "Database Driver class name: org.postgresql.Driver", "https://www.playframework.com/documentation/2.4.x/ScalaDatabase#Configuring-JDBC-connection-pools"));
items.add(new ConfigurationItem("db.default.url", "Database connection url: jdbc:postgresql://database.example.com/playdb\"", "https://www.playframework.com/documentation/2.4.x/ScalaDatabase#Configuring-JDBC-connection-pools"));
items.add(new ConfigurationItem("db.default.username", "dbuser", "https://www.playframework.com/documentation/2.4.x/ScalaDatabase#Configuring-JDBC-connection-pools"));
items.add(new ConfigurationItem("db.default.password", "a strong password", "https://www.playframework.com/documentation/2.4.x/ScalaDatabase#Configuring-JDBC-connection-pools"));
items.add(new ConfigurationItem("db.default.jndiName", "JPA requires the datasource to be accessible via JNDI. You can expose any Play-managed datasource via JNDI by adding this configuration in\n"
+ "conf/application.conf", "https://www.playframework.com/documentation/2.4.x/JavaDatabase#Exposing-the-datasource-through-JNDI"));
items.add(new ConfigurationItem("play.db.pool", "Out of the box, Play provides two database connection pool implementations, HikariCP\n"
+ "and BoneCP. The default is HikariCP, but this can\n"
+ "be changed by setting the play.db.pool property:\n"
+ "play.db.pool=bonecp", "https://www.playframework.com/documentation/2.4.x/JavaDatabase#Selecting-and-configuring-the-connection-pool"));
items.add(new ConfigurationItem("play.cache.bindCaches", "If you want to access multiple different ehcache caches, then you’ll need to tell Play to bind them in application.conf, like so:\n"
+ "play.cache.bindCaches = [\"db-cache\", \"user-cache\", \"session-cache\"]", "https://www.playframework.com/documentation/2.4.x/JavaCache#Accessing-different-caches"));
items.add(new ConfigurationItem("play.ws.followRedirects", "Configures the client to follow 301 and 302 redirects (default is true).", "https://www.playframework.com/documentation/2.4.x/ScalaWS#Configuring-WS"));
items.add(new ConfigurationItem("play.ws.useProxyProperties", "To use the system http proxy settings(http.proxyHost, http.proxyPort) (default is true)", "https://www.playframework.com/documentation/2.4.x/ScalaWS#Configuring-WS"));
items.add(new ConfigurationItem("play.ws.useragent", "To configure the User-Agent header field.", "https://www.playframework.com/documentation/2.4.x/ScalaWS#Configuring-WS"));
items.add(new ConfigurationItem("play.ws.compressionEnabled", "Set it to true to use gzip/deflater encoding (default is false).", "https://www.playframework.com/documentation/2.4.x/ScalaWS#Configuring-WS"));
items.add(new ConfigurationItem("play.ws.timeout.connection", "The maximum time to wait when connecting to the remote host (default is 120 seconds).", "https://www.playframework.com/documentation/2.4.x/ScalaWS#Configuring-Timeouts"));
items.add(new ConfigurationItem("play.ws.timeout.idle", "The maximum time the request can stay idle (connection is established but waiting for more data) (default is 120 seconds).", "https://www.playframework.com/documentation/2.4.x/ScalaWS#Configuring-Timeouts"));
items.add(new ConfigurationItem("play.ws.timeout.request", "The total time you accept a request to take (it will be interrupted even if the remote host is still sending data) (default is 120 seconds).", "https://www.playframework.com/documentation/2.4.x/ScalaWS#Configuring-Timeouts"));
items.add(new ConfigurationItem("play.ws.ning.allowPoolingConnection", "Set true if connection can be pooled by a ConnectionsPool.", "https://www.playframework.com/documentation/2.4.x/ScalaWS#Configuring-AsyncHttpClientConfig"));
items.add(new ConfigurationItem("play.ws.ning.allowSslConnectionPool", "Return true is if connections pooling is enabled.", "https://www.playframework.com/documentation/2.4.x/ScalaWS#Configuring-AsyncHttpClientConfig"));
items.add(new ConfigurationItem("play.ws.ning.ioThreadMultiplier", "", "https://www.playframework.com/documentation/2.4.x/ScalaWS#Configuring-AsyncHttpClientConfig"));
items.add(new ConfigurationItem("play.ws.ning.maxConnectionsPerHost", " the maximum number of connections per host an AsyncHttpClient can handle.", "https://www.playframework.com/documentation/2.4.x/ScalaWS#Configuring-AsyncHttpClientConfig"));
items.add(new ConfigurationItem("play.ws.ning.maxConnectionsTotal", "Set the maximum number of connections an AsyncHttpClient can handle.", "https://www.playframework.com/documentation/2.4.x/ScalaWS#Configuring-AsyncHttpClientConfig"));
items.add(new ConfigurationItem("play.ws.ning.maxConnectionLifeTime", "", "https://www.playframework.com/documentation/2.4.x/ScalaWS#Configuring-AsyncHttpClientConfig"));
items.add(new ConfigurationItem("play.ws.ning.idleConnectionInPoolTimeout", "Set the maximum time an AsyncHttpClient will keep connection idle in pool.", "https://www.playframework.com/documentation/2.4.x/ScalaWS#Configuring-AsyncHttpClientConfig"));
items.add(new ConfigurationItem("play.ws.ning.webSocketIdleTimeout", "", "https://www.playframework.com/documentation/2.4.x/ScalaWS#Configuring-AsyncHttpClientConfig"));
items.add(new ConfigurationItem("play.ws.ning.maxNumberOfRedirects", "Set the maximum number of HTTP redirect", "https://www.playframework.com/documentation/2.4.x/ScalaWS#Configuring-AsyncHttpClientConfig"));
items.add(new ConfigurationItem("play.ws.ning.maxRequestRetry", "Set the number of time a request will be retried when an IOException occurs because of a Network exception.", "https://www.playframework.com/documentation/2.4.x/ScalaWS#Configuring-AsyncHttpClientConfig"));
items.add(new ConfigurationItem("play.ws.ning.disableUrlEncoding", "", "https://www.playframework.com/documentation/2.4.x/ScalaWS#Configuring-AsyncHttpClientConfig"));
items.add(new ConfigurationItem("play.akka.config", "In case you want to use the akka.* settings for another Akka actor system, you can tell Play to load its Akka settings from another location. play.akka.config = \"my-akka\"", "https://www.playframework.com/documentation/2.4.x/ScalaAkka#Changing-configuration-prefix"));
items.add(new ConfigurationItem("play.akka.actor-system", "By default the name of the Play actor system is application. You can change this via an entry in the conf/application.conf:\n"
+ "play.akka.actor-system = \"custom-name\" \n Note: This feature is useful if you want to put your play application ActorSystem in an Akka cluster.", "https://www.playframework.com/documentation/2.4.x/ScalaAkka#Built-in-actor-system-name"));
items.add(new ConfigurationItem("play.i18n.langs", "specify the languages supported by your application in the conf/application.conf file:\n"
+ "play.i18n.langs = [ \"en\", \"en-US\", \"fr\" ]", "https://www.playframework.com/documentation/2.4.x/ScalaI18N#Specifying-languages-supported-by-your-application"));
items.add(new ConfigurationItem("play.http.requestHandler", "If you don’t want to place your request handler in the root package, or if you want to be able to configure different request handlers for different\n"
+ "environments, you can do this by configuring the play.http.requestHandler configuration property in application.conf:\n"
+ "play.http.requestHandler = \"com.example.RequestHandler\"", "https://www.playframework.com/documentation/2.4.x/ScalaHttpRequestHandlers#Configuring-the-http-request-handler"));
items.add(new ConfigurationItem("play.modules.enabled", "To register this module with Play, append it’s fully qualified class name to the play.modules.enabled list in application.conf:\n"
+ "play.modules.enabled += \"modules.HelloModule\"", "https://www.playframework.com/documentation/2.4.x/ScalaDependencyInjection#Programmatic-bindings"));
items.add(new ConfigurationItem("play.modules.disabled", "If there is a module that you don’t want to be loaded, you can exclude it by appending it to the play.modules.disabled property in\n"
+ "application.conf:\n"
+ "play.modules.disabled += \"play.api.db.evolutions.EvolutionsModule\"", "https://www.playframework.com/documentation/2.4.x/ScalaDependencyInjection#Excluding-modules"));
items.add(new ConfigurationItem("play.application.loader", "When you override the ApplicationLoader\n"
+ "you need to tell Play. Add the following setting to your application.conf:\n"
+ "play.application.loader = \"modules.CustomApplicationLoader\"", "https://www.playframework.com/documentation/2.4.x/ScalaCompileTimeDependencyInjection#Application-entry-point"));
items.add(new ConfigurationItem("jpa.default", "Finally you have to tell Play, which persistent unit should be used by your JPA provider. This is done by the jpa.default property in your\n"
+ "application.conf.\n"
+ "jpa.default=defaultPersistenceUnit", "https://www.playframework.com/documentation/2.4.x/JavaJPA#Creating-a-persistence-unit"));
items.add(new ConfigurationItem("ebean.default", "The runtime library can be configured by putting the list of packages and/or classes that your Ebean models live in your application configuration\n"
+ "file. For example, if all your models are in the models package, add the following to conf/application.conf:\n"
+ "ebean.default = [\"models.*\"]", "https://www.playframework.com/documentation/2.4.x/JavaEbean#Configuring-the-runtime-library"));
items.add(new ConfigurationItem("application.global", "By default, this object is defined in the root package, but you can define it wherever you want and then configure it in your application.conf\n"
+ "using application.global property.", "https://www.playframework.com/documentation/2.4.x/JavaGlobal#The-Global-object"));
items.add(new ConfigurationItem("play.crypto.secret", "It is configured in application.conf, with the property name play.crypto.secret, and defaults to changeme. As the default suggests, it\n"
+ "should be changed for production. When started in prod mode, if Play finds that the secret is not set, or if it is set to changeme, Play will throw an error.", "https://www.playframework.com/documentation/2.4.x/ApplicationSecret#The-Application-Secret"));
items.add(new ConfigurationItem("include", "Includes another conf file. Example: include \"development.conf\"", "https://www.playframework.com/documentation/2.4.x/ProductionConfiguration#Using--Dconfig.file"));
items.add(new ConfigurationItem("play.filters.headers.frameOptions", "sets X-Frame-Options \"DENY\" by default. Any of the headers can be disabled by setting a configuration value of null, for example: play.filters.headers.frameOptions = null", "https://www.playframework.com/documentation/2.4.x/SecurityHeaders#Configuring-the-security-headers"));
items.add(new ConfigurationItem("play.filters.headers.xssProtection", "sets X-XSS-Protection, “1; mode=block” by default. Any of the headers can be disabled by setting a configuration value of null, for example: play.filters.headers.frameOptions = null", "https://www.playframework.com/documentation/2.4.x/SecurityHeaders#Configuring-the-security-headers"));
items.add(new ConfigurationItem("play.filters.headers.contentTypeOptions", "sets X-Content-Type-Options, “nosniff” by default. Any of the headers can be disabled by setting a configuration value of null, for example: play.filters.headers.frameOptions = null", "https://www.playframework.com/documentation/2.4.x/SecurityHeaders#Configuring-the-security-headers"));
items.add(new ConfigurationItem("play.filters.headers.permittedCrossDomainPolicies", "sets X-Permitted-Cross-Domain-Policies, “master-only” by default. Any of the headers can be disabled by setting a configuration value of null, for example: play.filters.headers.frameOptions = null", "https://www.playframework.com/documentation/2.4.x/SecurityHeaders#Configuring-the-security-headers"));
items.add(new ConfigurationItem("play.filters.headers.contentSecurityPolicy", "sets Content-Security-Policy, “default-src ‘self’” by default. Any of the headers can be disabled by setting a configuration value of null, for example: play.filters.headers.frameOptions = null", "https://www.playframework.com/documentation/2.4.x/SecurityHeaders#Configuring-the-security-headers"));
items.add(new ConfigurationItem("play.filters.cors.pathPrefixes", "filter paths by a whitelist of path prefixes", "https://www.playframework.com/documentation/2.4.x/CorsFilter#Configuring-the-CORS-filter"));
items.add(new ConfigurationItem("play.filters.cors.allowedOrigins", "allow only requests with origins from a whitelist (by default all origins are allowed)", "https://www.playframework.com/documentation/2.4.x/CorsFilter#Configuring-the-CORS-filter"));
items.add(new ConfigurationItem("play.filters.cors.allowedHttpMethods", "allow only HTTP methods from a whitelist for preflight requests (by default all methods are allowed)", "https://www.playframework.com/documentation/2.4.x/CorsFilter#Configuring-the-CORS-filter"));
items.add(new ConfigurationItem("play.filters.cors.allowedHttpHeaders", "allow only HTTP headers from a whitelist for preflight requests (by default all headers are allowed)", "https://www.playframework.com/documentation/2.4.x/CorsFilter#Configuring-the-CORS-filter"));
items.add(new ConfigurationItem("play.filters.cors.exposedHeaders", "set custom HTTP headers to be exposed in the response (by default no headers are exposed)", "https://www.playframework.com/documentation/2.4.x/CorsFilter#Configuring-the-CORS-filter"));
items.add(new ConfigurationItem("play.filters.cors.supportsCredentials", "disable/enable support for credentials (by default credentials support is enabled)", "https://www.playframework.com/documentation/2.4.x/CorsFilter#Configuring-the-CORS-filter"));
items.add(new ConfigurationItem("play.filters.cors.preflightMaxAge", "set how long the results of a preflight request can be cached in a preflight result cache (by default 1 hour)", "https://www.playframework.com/documentation/2.4.x/CorsFilter#Configuring-the-CORS-filter"));
items.add(new ConfigurationItem("play.ws.ssl.hostnameVerifierClass", "If you need to specify a different hostname verifier, you can configure application.conf to provide your own custom HostnameVerifier: play.ws.ssl.hostnameVerifierClass=org.example.MyHostnameVerifier", "https://www.playframework.com/documentation/2.4.x/HostnameVerification#Modifying-the-Hostname-Verifier"));
items.add(new ConfigurationItem("logger.play.api.libs.ws.ssl", "To see the behavior of WS, you can configuring the SLF4J logger for debug output:\n"
+ "logger.play.api.libs.ws.ssl=DEBUG", "https://www.playframework.com/documentation/2.4.x/DebuggingSSL#Verbose-Debugging"));
items.add(new ConfigurationItem("play.evolutions.enabled", "Evolutions are automatically activated if a database is configured in application.conf and evolution scripts are present. You can disable\n"
+ "them by setting play.evolutions.enabled=false.", "https://www.playframework.com/documentation/2.4.x/Evolutions#Evolutions-configuration"));
items.add(new ConfigurationItem("play.evolutions.autocommit", "Whether autocommit should be used. If false, evolutions will be applied in a single transaction. Defaults to true.", "https://www.playframework.com/documentation/2.4.x/Evolutions#Evolutions-configuration"));
items.add(new ConfigurationItem("play.evolutions.useLocks", "Whether a locks table should be used. This must be used if you have many Play nodes that may potentially run evolutions, but\n"
+ "you want to ensure that only one does. It will create a table called play_evolutions_lock, and use a SELECT FOR UPDATE NOWAIT or\n"
+ "SELECT FOR UPDATE to lock it. This will only work for Postgres, Oracle, and MySQL InnoDB. It will not work for other databases. Defaults to\n"
+ "false.", "https://www.playframework.com/documentation/2.4.x/Evolutions#Evolutions-configuration"));
items.add(new ConfigurationItem("play.evolutions.autoApply", "Whether evolutions should be automatically applied. In dev mode, this will cause both ups and downs evolutions to be\n"
+ "automatically applied. In prod mode, it will cause only ups evolutions to be automatically applied. Defaults to false.", "https://www.playframework.com/documentation/2.4.x/Evolutions#Evolutions-configuration"));
items.add(new ConfigurationItem("play.evolutions.autoApplyDowns", "Whether down evolutions should be automatically applied. In prod mode, this will cause down evolutions to be\n"
+ "automatically applied. Has no effect in dev mode. Defaults to false.", "https://www.playframework.com/documentation/2.4.x/Evolutions#Evolutions-configuration"));
//Play Server Settings
items.add(new ConfigurationItem("play.server.http.port", "# The HTTP port of the server. Use a value of \"disabled\" if the server shouldn't bind an HTTP port.", "https://www.playframework.com/documentation/2.4.x/ProductionConfiguration#Server-configuration-options"));
items.add(new ConfigurationItem("play.server.http.address", "The interface address to bind to. i.e: address = \"0.0.0.0\"", "https://www.playframework.com/documentation/2.4.x/ProductionConfiguration#Server-configuration-options"));
items.add(new ConfigurationItem("play.server.https.port", "THe HTTPS port of the server", "https://www.playframework.com/documentation/2.4.x/ProductionConfiguration#Server-configuration-options"));
items.add(new ConfigurationItem("play.server.https.engineProvider", "The SSL engine provider. i.e: engineProvider = \"play.core.server.ssl.DefaultSSLEngineProvider\"", "https://www.playframework.com/documentation/2.4.x/ProductionConfiguration#Server-configuration-options"));
items.add(new ConfigurationItem("play.server.https.keyStore.path", "The path to the keystore", "https://www.playframework.com/documentation/2.4.x/ProductionConfiguration#Server-configuration-options"));
items.add(new ConfigurationItem("play.server.https.keyStore.type", "The type of the keystore. i.e: type = \"JKS\"", "https://www.playframework.com/documentation/2.4.x/ProductionConfiguration#Server-configuration-options"));
items.add(new ConfigurationItem("play.server.https.keyStore.password", "The password for the keystore", "https://www.playframework.com/documentation/2.4.x/ProductionConfiguration#Server-configuration-options"));
items.add(new ConfigurationItem("play.server.https.keyStore.algorithm", "The algorithm to use. If not set, uses the platform default algorithm.", "https://www.playframework.com/documentation/2.4.x/ProductionConfiguration#Server-configuration-options"));
items.add(new ConfigurationItem("play.server.https.trustStore.noCaVerification", "If true, does not do CA verification on client side certificates", "https://www.playframework.com/documentation/2.4.x/ProductionConfiguration#Server-configuration-options"));
items.add(new ConfigurationItem("play.server.provider", "The type of ServerProvider that should be used to create the server. If not provided, the ServerStart class that instantiates the server will provide a default value.", "https://www.playframework.com/documentation/2.4.x/ProductionConfiguration#Server-configuration-options"));
items.add(new ConfigurationItem("play.server.pidfile.path", "The path to the process id file created by the server when it runs. If set to \"/dev/null\" then no pid file will be created.", "https://www.playframework.com/documentation/2.4.x/ProductionConfiguration#Server-configuration-options"));
items.add(new ConfigurationItem("play.server.netty.maxInitialLineLength", "The maximum length of the initial line. This effectively restricts the maximum length of a URL that the server will accept, the initial line consists of the method (3-7 characters), the URL, and the HTTP version (8 characters), including typical whitespace, the maximum URL length will be this number - 18.", "https://www.playframework.com/documentation/2.4.x/ProductionConfiguration#Server-configuration-options"));
items.add(new ConfigurationItem("play.server.netty.maxHeaderSize", "The maximum length of the HTTP headers. The most common effect of this is a restriction in cookie length, including number of cookies and size of cookie values.", "https://www.playframework.com/documentation/2.4.x/ProductionConfiguration#Server-configuration-options"));
items.add(new ConfigurationItem("play.server.netty.maxChunkSize", "# The maximum length of body bytes that Netty will read into memory at a time.\n"
+ " # This is used in many ways. Note that this setting has no relation to HTTP chunked transfer encoding - Netty will\n"
+ " # read \"chunks\", that is, byte buffers worth of content at a time and pass it to Play, regardless of whether the body\n"
+ " # is using HTTP chunked transfer encoding. A single HTTP chunk could span multiple Netty chunks if it exceeds this.\n"
+ " # A body that is not HTTP chunked will span multiple Netty chunks if it exceeds this or if no content length is\n"
+ " # specified. This only controls the maximum length of the Netty chunk byte buffers.", "https://www.playframework.com/documentation/2.4.x/ProductionConfiguration#Server-configuration-options"));
items.add(new ConfigurationItem("play.server.netty.log.wire", "Whether the Netty wire should be logged. Defaults to false", "https://www.playframework.com/documentation/2.4.x/ProductionConfiguration#Server-configuration-options"));
return items;
}
}
|
apache-2.0
|
tensorflow/agents
|
tf_agents/typing/types.py
|
5023
|
# coding=utf-8
# Copyright 2020 The TF-Agents Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Common types used in TF-Agents."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import sys
import typing
from typing import Callable, Iterable, Mapping, Optional, Sequence, Text, TypeVar, Union
import numpy as np
import tensorflow as tf
import tensorflow_probability as tfp
# pylint:disable=g-direct-tensorflow-import
from tensorflow.python.framework.ops import EagerTensor # TF internal
# pylint:enable=g-direct-tensorflow-import
if sys.version_info < (3, 7):
ForwardRef = typing._ForwardRef # pylint: disable=protected-access
else:
ForwardRef = typing.ForwardRef
ArraySpec = ForwardRef('tf_agents.specs.array_spec.ArraySpec')
Network = ForwardRef('tf_agents.networks.network.Network') # pylint: disable=invalid-name
BoundedTensorSpec = ForwardRef('tf_agents.specs.tensor_spec.BoundedTensorSpec') # pylint: disable=invalid-name
DistributionSpecV2 = ForwardRef(
'tf_agents.distributions.utils.DistributionSpecV2') # pylint: disable=invalid-name
Tensor = Union[tf.Tensor, tf.SparseTensor, tf.RaggedTensor, EagerTensor]
# Note: EagerTensor is array-like because it supports __array__. We
# can replace this with np.typing.ArrayLike once we/tf require numpy v1.21+.
Array = Union[np.ndarray, int, float, str, bool, EagerTensor] # pylint: disable=invalid-name
TensorOrArray = Union[Tensor, Array]
Distribution = tfp.distributions.Distribution
TensorSpec = Union[
tf.TypeSpec, tf.TensorSpec, tf.RaggedTensorSpec, tf.SparseTensorSpec,
DistributionSpecV2
]
Spec = Union[TensorSpec, ArraySpec]
SpecTensorOrArray = Union[Spec, Tensor, Array]
# Note that this is effectively treated as `Any`; see b/109648354.
Tnest = TypeVar('Tnest')
Trecursive = TypeVar('Trecursive')
Nested = Union[Tnest, Iterable[Trecursive], Mapping[Text, Trecursive]]
NestedTensor = Nested[Tensor, 'NestedTensor']
NestedVariable = Nested[tf.Variable, 'NestedVariable']
NestedArray = Nested[Array, 'NestedArray']
NestedDistribution = Nested[
tfp.distributions.Distribution, 'NestedDistribution']
NestedPlaceHolder = Nested[tf.compat.v1.placeholder, 'NestedPlaceholder']
NestedTensorSpec = Nested[TensorSpec, 'NestedTensorSpec']
NestedBoundedTensorSpec = Nested[BoundedTensorSpec, 'NestedBoundedTensorSpec']
NestedArraySpec = Nested[ArraySpec, 'NestedArraySpec']
NestedLayer = Nested[tf.keras.layers.Layer, 'NestedLayer']
NestedNetwork = Nested[Network, 'NestedNetwork']
NestedSpec = Union[NestedTensorSpec, NestedArraySpec]
NestedTensorOrArray = Union[NestedTensor, NestedArray]
NestedSpecTensorOrArray = Union[NestedSpec, NestedTensor, NestedArray]
Int = Union[int, np.int16, np.int32, np.int64, Tensor, Array]
Bool = Union[bool, np.bool, Tensor, Array]
Float = Union[float, np.float16, np.float32, np.float64, Tensor, Array]
FloatOrReturningFloat = Union[Float, Callable[[], Float]]
Shape = Union[TensorOrArray, Sequence[Optional[int]], tf.TensorShape]
ShapeSequence = Sequence[Optional[int]]
Splitter = Optional[Callable[
[NestedSpecTensorOrArray], Iterable[NestedSpecTensorOrArray]]]
Seed = Union[int, Sequence[int], Tensor, Array]
TimeStep = ForwardRef('tf_agents.trajectories.time_step.TimeStep') # pylint: disable=invalid-name
PolicyStep = ForwardRef('tf_agents.trajectories.policy_step.PolicyStep') # pylint: disable=invalid-name
Trajectory = ForwardRef('tf_agents.trajectories.trajectory.Trajectory') # pylint: disable=invalid-name
GymEnv = ForwardRef('gym.Env') # pylint: disable=invalid-name
GymEnvWrapper = Callable[[GymEnv], GymEnv]
PyEnv = ForwardRef('tf_agents.environments.py_environment.PyEnvironment') # pylint: disable=invalid-name
PyEnvWrapper = Callable[[PyEnv], PyEnv]
Observer = Callable[[Trajectory], None]
ComparatorFn = Callable[[Float, Float], Bool]
LossFn = Callable[..., Tensor]
Optimizer = Union[tf.keras.optimizers.Optimizer, tf.compat.v1.train.Optimizer]
# We use lazy loading of Reverb, so we predeclare common Reverb objects
ReverbServer = ForwardRef('reverb.Server')
ReverbTable = ForwardRef('reverb.Table')
ReverbClient = ForwardRef('reverb.Client')
ReverbTFClient = ForwardRef('reverb.TFClient')
ReverbSampleInfo = ForwardRef('reverb.replay_sample.SampleInfo')
ReverbReplaySample = ForwardRef('reverb.replay_sample.ReplaySample')
LookupLayer = Union[
tf.compat.v2.keras.layers.experimental.preprocessing.IntegerLookup,
tf.compat.v2.keras.layers.experimental.preprocessing.StringLookup]
|
apache-2.0
|
dscdtc/follow-novel
|
src/store/mutations.js
|
317
|
import * as types from './mutation-type'
const mutations = {
[types.SET_SEARCH_HISTORY] (state, history) {
state.searchHistory = history
},
[types.SET_BOOK_ID] (state, id) {
state.bookId = id
},
[types.SET_READ_RECORD] (state, record) {
state.readRecord = record
}
}
export default mutations
|
apache-2.0
|
gostor/gotgt
|
pkg/scsi/sbc.go
|
24241
|
/*
Copyright 2017 The GoStor Authors All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// SCSI block command processing
package scsi
import (
"encoding/binary"
"fmt"
"unsafe"
"github.com/gostor/gotgt/pkg/api"
"github.com/gostor/gotgt/pkg/util"
"github.com/gostor/gotgt/pkg/version"
log "github.com/sirupsen/logrus"
)
const (
PR_SPECIAL = (1 << 5)
PR_WE_FA = (1 << 4)
PR_EA_FA = (1 << 3)
PR_RR_FR = (1 << 2)
PR_WE_FN = (1 << 1)
PR_EA_FN = (1 << 0)
)
var (
EnableORWrite16 = true
EnablePersistentReservation = true
)
type SBCSCSIDeviceProtocol struct {
BaseSCSIDeviceProtocol
}
func (sbc SBCSCSIDeviceProtocol) PerformCommand(opcode int) interface{} {
return sbc.SCSIDeviceOps[opcode]
}
func (sbc SBCSCSIDeviceProtocol) PerformServiceAction(opcode int, action uint8) interface{} {
var sa *SCSIServiceAction
for _, sa = range sbc.SCSIDeviceOps[opcode].ServiceAction {
if sa.ServiceAction == action {
return sa
}
}
return nil
}
func (sbc SBCSCSIDeviceProtocol) InitLu(lu *api.SCSILu) error {
// init LU's phy attribute
lu.Attrs.DeviceType = sbc.DeviceType
lu.Attrs.Qualifier = false
lu.Attrs.ThinProvisioning = false
lu.Attrs.Removable = false
lu.Attrs.Readonly = false
lu.Attrs.SWP = false
lu.Attrs.SenseFormat = false
lu.Attrs.VendorID = SCSIVendorID
lu.Attrs.ProductID = SCSIProductID
lu.Attrs.ProductRev = version.SCSIVersion
lu.Attrs.SCSIID = fmt.Sprintf("gotgt-scsi-%d%d", 0, lu.UUID)
lu.Attrs.SCSISN = fmt.Sprintf("gotgt-beaf-%d%d", 0, lu.UUID)
/*
SCSIID for PAGE83 T10 VENDOR IDENTIFICATION field
It is going to be the iSCSI target iqn name
leave it with a default target name
*/
lu.Attrs.SCSIID = SCSIID
/*
The PRODUCT SERIAL NUMBER field contains
right-aligned ASCII data (see 4.3.1)
that is a vendor specific serial number.
If the product serial number is not available,
the device server shall return ASCII spaces (20h) in this field.
leave it with 4 spaces (20h)
*/
lu.Attrs.SCSISN = " "
lu.Attrs.VersionDesction = [8]uint16{
0x0320, // SBC-2 no version claimed
0x0960, // iSCSI no version claimed
0x0300, // SPC-3 no version claimed
0x0060, // SAM-3 no version claimed
}
if lu.BlockShift == 0 {
lu.BlockShift = api.DefaultBlockShift
}
pages := []api.ModePage{}
// Vendor uniq - However most apps seem to call for mode page 0
//pages = append(pages, api.ModePage{0, 0, []byte{}})
// Disconnect page
pages = append(pages, api.ModePage{2, 0, 14, []byte{0x80, 0x80, 0, 0xa, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}})
// Caching Page
pages = append(pages, api.ModePage{8, 0, 18, []byte{0x14, 0, 0xff, 0xff, 0, 0, 0xff, 0xff, 0xff, 0xff, 0x80, 0x14, 0, 0, 0, 0, 0, 0}})
// Control page
pages = append(pages, api.ModePage{0x0a, 0, 10, []byte{2, 0x10, 0, 0, 0, 0, 0, 0, 2, 0, 0x08, 0, 0, 0, 0, 0, 0, 0}})
// Control Extensions mode page: TCMOS:1
pages = append(pages, api.ModePage{0x0a, 1, 0x1c, []byte{0x04, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}})
// Informational Exceptions Control page
pages = append(pages, api.ModePage{0x1c, 0, 10, []byte{8, 0, 0, 0, 0, 0, 0, 0, 0, 0}})
lu.ModePages = pages
mbd := util.MarshalUint32(uint32(0xffffffff))
if size := lu.Size >> lu.BlockShift; size>>32 == 0 {
mbd = util.MarshalUint32(uint32(size))
}
lu.ModeBlockDescriptor = append(mbd, util.MarshalUint32(uint32(1<<lu.BlockShift))...)
return nil
}
func (sbc SBCSCSIDeviceProtocol) ConfigLu(lu *api.SCSILu) error {
return nil
}
func (sbc SBCSCSIDeviceProtocol) OnlineLu(lu *api.SCSILu) error {
return nil
}
func (sbc SBCSCSIDeviceProtocol) OfflineLu(lu *api.SCSILu) error {
return nil
}
func (sbc SBCSCSIDeviceProtocol) ExitLu(lu *api.SCSILu) error {
return nil
}
func NewSBCDevice(deviceType api.SCSIDeviceType) api.SCSIDeviceProtocol {
var sbc = SBCSCSIDeviceProtocol{
BaseSCSIDeviceProtocol{
DeviceType: deviceType,
SCSIDeviceOps: []SCSIDeviceOperation{},
},
}
for i := 0; i < 256; i++ {
sbc.SCSIDeviceOps = append(sbc.SCSIDeviceOps, NewSCSIDeviceOperation(SPCIllegalOp, nil, 0))
}
sbc.SCSIDeviceOps[api.TEST_UNIT_READY] = NewSCSIDeviceOperation(SPCTestUnit, nil, 0)
sbc.SCSIDeviceOps[api.REQUEST_SENSE] = NewSCSIDeviceOperation(SPCRequestSense, nil, 0)
sbc.SCSIDeviceOps[api.FORMAT_UNIT] = NewSCSIDeviceOperation(SBCFormatUnit, nil, 0)
sbc.SCSIDeviceOps[api.READ_6] = NewSCSIDeviceOperation(SBCReadWrite, nil, PR_EA_FA|PR_EA_FN)
sbc.SCSIDeviceOps[api.WRITE_6] = NewSCSIDeviceOperation(SBCReadWrite, nil, PR_EA_FA|PR_EA_FN|PR_WE_FA|PR_WE_FN)
sbc.SCSIDeviceOps[api.INQUIRY] = NewSCSIDeviceOperation(SPCInquiry, nil, 0)
sbc.SCSIDeviceOps[api.MODE_SELECT] = NewSCSIDeviceOperation(SBCModeSelect, nil, PR_WE_FA|PR_EA_FA|PR_WE_FN|PR_EA_FN)
sbc.SCSIDeviceOps[api.RESERVE] = NewSCSIDeviceOperation(SBCReserve, nil, 0)
sbc.SCSIDeviceOps[api.RELEASE] = NewSCSIDeviceOperation(SBCRelease, nil, 0)
sbc.SCSIDeviceOps[api.MODE_SENSE] = NewSCSIDeviceOperation(SBCModeSense, nil, PR_WE_FA|PR_EA_FA|PR_EA_FN|PR_WE_FN)
sbc.SCSIDeviceOps[api.START_STOP] = NewSCSIDeviceOperation(SPCStartStop, nil, PR_SPECIAL)
sbc.SCSIDeviceOps[api.SEND_DIAGNOSTIC] = NewSCSIDeviceOperation(SPCSendDiagnostics, nil, 0)
sbc.SCSIDeviceOps[api.ALLOW_MEDIUM_REMOVAL] = NewSCSIDeviceOperation(SPCPreventAllowMediaRemoval, nil, 0)
sbc.SCSIDeviceOps[api.READ_CAPACITY] = NewSCSIDeviceOperation(SBCReadCapacity, nil, 0)
sbc.SCSIDeviceOps[api.READ_10] = NewSCSIDeviceOperation(SBCReadWrite, nil, PR_EA_FA|PR_EA_FN)
sbc.SCSIDeviceOps[api.WRITE_10] = NewSCSIDeviceOperation(SBCReadWrite, nil, PR_WE_FA|PR_EA_FA|PR_EA_FN|PR_WE_FN)
sbc.SCSIDeviceOps[api.WRITE_VERIFY] = NewSCSIDeviceOperation(SBCReadWrite, nil, PR_EA_FA|PR_EA_FN)
sbc.SCSIDeviceOps[api.VERIFY_10] = NewSCSIDeviceOperation(SBCVerify, nil, PR_EA_FA|PR_EA_FN)
sbc.SCSIDeviceOps[api.PRE_FETCH_10] = NewSCSIDeviceOperation(SBCReadWrite, nil, PR_EA_FA|PR_EA_FN)
sbc.SCSIDeviceOps[api.SYNCHRONIZE_CACHE] = NewSCSIDeviceOperation(SBCSyncCache, nil, PR_WE_FA|PR_EA_FA|PR_WE_FN|PR_EA_FN)
sbc.SCSIDeviceOps[api.WRITE_SAME] = NewSCSIDeviceOperation(SBCReadWrite, nil, 0)
sbc.SCSIDeviceOps[api.UNMAP] = NewSCSIDeviceOperation(SBCUnmap, nil, 0)
sbc.SCSIDeviceOps[api.MODE_SELECT_10] = NewSCSIDeviceOperation(SBCModeSelect, nil, PR_WE_FA|PR_EA_FA|PR_EA_FN|PR_WE_FN)
sbc.SCSIDeviceOps[api.MODE_SENSE_10] = NewSCSIDeviceOperation(SBCModeSense, nil, PR_WE_FA|PR_WE_FN|PR_EA_FA|PR_EA_FN)
if EnablePersistentReservation {
sbc.SCSIDeviceOps[api.PERSISTENT_RESERVE_IN] = NewSCSIDeviceOperation(SPCServiceAction, []*SCSIServiceAction{
{ServiceAction: PR_IN_READ_KEYS, CommandPerformFunc: SPCPRReadKeys},
{ServiceAction: PR_IN_READ_RESERVATION, CommandPerformFunc: SPCPRReadReservation},
{ServiceAction: PR_IN_REPORT_CAPABILITIES, CommandPerformFunc: SPCPRReportCapabilities},
}, 0)
sbc.SCSIDeviceOps[api.PERSISTENT_RESERVE_OUT] = NewSCSIDeviceOperation(SPCServiceAction, []*SCSIServiceAction{
{ServiceAction: PR_OUT_REGISTER, CommandPerformFunc: SPCPRRegister},
{ServiceAction: PR_OUT_RESERVE, CommandPerformFunc: SPCPRReserve},
{ServiceAction: PR_OUT_RELEASE, CommandPerformFunc: SPCPRRelease},
{ServiceAction: PR_OUT_CLEAR, CommandPerformFunc: SPCPRClear},
{ServiceAction: PR_OUT_PREEMPT, CommandPerformFunc: SPCPRPreempt},
// {ServiceAction: PR_OUT_PREEMPT_AND_ABORT, CommandPerformFunc: SPCPRPreempt},
{ServiceAction: PR_OUT_REGISTER_AND_IGNORE_EXISTING_KEY, CommandPerformFunc: SPCPRRegister},
{ServiceAction: PR_OUT_REGISTER_AND_MOVE, CommandPerformFunc: SPCPRRegisterAndMove},
}, 0)
}
sbc.SCSIDeviceOps[api.READ_16] = NewSCSIDeviceOperation(SBCReadWrite, nil, PR_EA_FA|PR_EA_FN)
sbc.SCSIDeviceOps[api.WRITE_16] = NewSCSIDeviceOperation(SBCReadWrite, nil, PR_EA_FA|PR_EA_FN|PR_WE_FA|PR_WE_FN)
if EnableORWrite16 {
sbc.SCSIDeviceOps[api.ORWRITE_16] = NewSCSIDeviceOperation(SBCReadWrite, nil, PR_EA_FA|PR_EA_FN)
}
sbc.SCSIDeviceOps[api.WRITE_VERIFY_16] = NewSCSIDeviceOperation(SBCReadWrite, nil, PR_EA_FA|PR_EA_FN)
sbc.SCSIDeviceOps[api.VERIFY_16] = NewSCSIDeviceOperation(SBCVerify, nil, PR_EA_FA|PR_EA_FN)
sbc.SCSIDeviceOps[api.PRE_FETCH_16] = NewSCSIDeviceOperation(SBCReadWrite, nil, PR_EA_FA|PR_EA_FN)
sbc.SCSIDeviceOps[api.SYNCHRONIZE_CACHE_16] = NewSCSIDeviceOperation(SBCSyncCache, nil, PR_EA_FA|PR_EA_FN|PR_WE_FA|PR_WE_FN)
sbc.SCSIDeviceOps[api.WRITE_SAME_16] = NewSCSIDeviceOperation(SBCReadWrite, nil, 0)
sbc.SCSIDeviceOps[api.SERVICE_ACTION_IN] = NewSCSIDeviceOperation(SBCServiceAction, nil, 0)
sbc.SCSIDeviceOps[api.REPORT_LUNS] = NewSCSIDeviceOperation(SPCReportLuns, nil, 0)
sbc.SCSIDeviceOps[api.MAINT_PROTOCOL_IN] = NewSCSIDeviceOperation(SPCServiceAction, []*SCSIServiceAction{
{ServiceAction: 0x0C, CommandPerformFunc: SPCReportSupportedOperationCodes},
}, 0)
sbc.SCSIDeviceOps[api.EXCHANGE_MEDIUM] = NewSCSIDeviceOperation(SPCIllegalOp, nil, 0)
sbc.SCSIDeviceOps[api.READ_12] = NewSCSIDeviceOperation(SBCReadWrite, nil, PR_EA_FA|PR_EA_FN)
sbc.SCSIDeviceOps[api.WRITE_12] = NewSCSIDeviceOperation(SBCReadWrite, nil, PR_WE_FA|PR_EA_FA|PR_WE_FA|PR_WE_FN)
sbc.SCSIDeviceOps[api.WRITE_VERIFY_12] = NewSCSIDeviceOperation(SBCReadWrite, nil, PR_EA_FA|PR_EA_FN)
sbc.SCSIDeviceOps[api.VERIFY_12] = NewSCSIDeviceOperation(SBCVerify, nil, PR_EA_FA|PR_EA_FN)
return sbc
}
func SBCModeSelect(host int, cmd *api.SCSICommand) api.SAMStat {
return api.SAMStatGood
}
func SBCModeSense(host int, cmd *api.SCSICommand) api.SAMStat {
// DPOFUA = 0x10
var deviceSpecific uint8 = 0x10
if err := SPCModeSense(host, cmd); err.Err != nil {
return err
}
// If this is a read-only lun, we must set the write protect bit
if cmd.Device.Attrs.Readonly || cmd.Device.Attrs.SWP {
deviceSpecific |= 0x80
}
if cmd.SCB[0] == 0x1a {
cmd.InSDBBuffer.Buffer[2] = deviceSpecific
} else {
cmd.InSDBBuffer.Buffer[3] = deviceSpecific
}
return api.SAMStatGood
}
/*
* SBCFormatUnit Implements SCSI FORMAT UNIT command
* The FORMAT UNIT command requests that the device server format the medium into application client
* accessible logical blocks as specified in the number of blocks and block length values received
* in the last mode parameter block descriptor in a MODE SELECT command (see SPC-3). In addition,
* the device server may certify the medium and create control structures for the management of the medium and defects.
* The degree that the medium is altered by this command is vendor-specific.
*
* Reference : SBC2r16
* 5.2 - FORMAT UNIT
*/
func SBCFormatUnit(host int, cmd *api.SCSICommand) api.SAMStat {
var (
key = ILLEGAL_REQUEST
asc = ASC_INVALID_FIELD_IN_CDB
)
if err := deviceReserve(cmd); err != nil {
return api.SAMStatReservationConflict
}
if !cmd.Device.Attrs.Online {
key = NOT_READY
asc = ASC_MEDIUM_NOT_PRESENT
goto sense
}
if cmd.Device.Attrs.Readonly || cmd.Device.Attrs.SWP {
key = DATA_PROTECT
asc = ASC_WRITE_PROTECT
goto sense
}
if cmd.SCB[1]&0x80 != 0 {
// we dont support format protection information
goto sense
}
if cmd.SCB[1]&0x10 != 0 {
// we dont support format data
goto sense
}
if cmd.SCB[1]&0x07 != 0 {
// defect list format must be 0
goto sense
}
return api.SAMStatGood
sense:
BuildSenseData(cmd, key, asc)
return api.SAMStatCheckCondition
}
func SBCUnmap(host int, cmd *api.SCSICommand) api.SAMStat {
// check ANCHOR
if cmd.SCB[1]&0x01 != 0 {
BuildSenseData(cmd, ILLEGAL_REQUEST, ASC_INVALID_FIELD_IN_CDB)
return api.SAMStatCheckCondition
}
const blockDescLen = 16
var blockDescs []api.UnmapBlockDescriptor
for off := 8; uint32(off+blockDescLen) <= cmd.OutSDBBuffer.Length; off += blockDescLen {
lba := binary.BigEndian.Uint64(cmd.OutSDBBuffer.Buffer[off : off+8])
num := binary.BigEndian.Uint32(cmd.OutSDBBuffer.Buffer[off+8 : off+12])
blockDescs = append(blockDescs, api.UnmapBlockDescriptor{
Offset: lba << cmd.Device.BlockShift,
TL: num << cmd.Device.BlockShift,
})
}
if len(blockDescs) == 0 {
return api.SAMStatGood
}
if err := cmd.Device.Storage.Unmap(blockDescs); err != nil {
BuildSenseData(cmd, MEDIUM_ERROR, NO_ADDITIONAL_SENSE)
return api.SAMStatCheckCondition
}
return api.SAMStatGood
}
/*
* SBCReadWrite Implements SCSI READ(10/12/16), WRITE(10/12/16), WRITE AND VERIFY(10/12/16), WRITE SAME(10/12/16)
* The READ command requests that the device server read the specified logical block(s) and transfer them to the data-in buffer.
* The WRITE command requests that the device server transfer the specified logical block(s) from the data-out buffer and write them.
* The WRITE AND VERIFY command requests that the device server transfer the specified logical block(s) from the data-out buffer,
* write them to the medium, and then verify that they are correctly written.
*
* Reference : SBC2r16
* 5.6 - READ (10)
* 5.7 - READ (12)
* 5.8 - READ (16)
* 5.25 - WRITE (10)
* 5.26 - WRITE (12)
* 5.27 - WRITE (16)
* 5.29 - WRITE AND VERIFY (10)
* 5.30 - WRITE AND VERIFY (12)
* 5.31 - WRITE AND VERIFY (16)
*/
func SBCReadWrite(host int, cmd *api.SCSICommand) api.SAMStat {
var (
key = ILLEGAL_REQUEST
asc = ASC_INVALID_FIELD_IN_CDB
dev = cmd.Device
scb = cmd.SCB
opcode = api.SCSICommandType(scb[0])
lba uint64
tl uint32
err error
)
if dev.Attrs.Removable && !dev.Attrs.Online {
key = NOT_READY
asc = ASC_MEDIUM_NOT_PRESENT
log.Warnf("sense")
goto sense
}
switch opcode {
case api.READ_10, api.READ_12, api.READ_16, api.WRITE_10, api.WRITE_12, api.WRITE_16, api.ORWRITE_16,
api.WRITE_VERIFY, api.WRITE_VERIFY_12, api.WRITE_VERIFY_16, api.COMPARE_AND_WRITE:
// We only support protection information type 0
if scb[1]&0xe0 != 0 {
key = ILLEGAL_REQUEST
asc = ASC_INVALID_FIELD_IN_CDB
log.Warnf("sense data(ILLEGAL_REQUEST,ASC_INVALID_FIELD_IN_CDB) encounter")
goto sense
}
case api.WRITE_SAME, api.WRITE_SAME_16:
// We dont support resource-provisioning so ANCHOR bit == 1 is an error.
if scb[1]&0x10 != 0 {
key = ILLEGAL_REQUEST
asc = ASC_INVALID_FIELD_IN_CDB
goto sense
}
// We only support unmap for thin provisioned LUNS
if (scb[1]&0x08 != 0) && !dev.Attrs.ThinProvisioning {
key = ILLEGAL_REQUEST
asc = ASC_INVALID_FIELD_IN_CDB
goto sense
}
// We only support protection information type 0
if scb[1]&0xe0 != 0 {
key = ILLEGAL_REQUEST
asc = ASC_INVALID_FIELD_IN_CDB
goto sense
}
// LBDATA and PBDATA can not both be set
if (scb[1] & 0x06) == 0x06 {
key = ILLEGAL_REQUEST
asc = ASC_INVALID_FIELD_IN_CDB
goto sense
}
}
if dev.Attrs.Readonly || dev.Attrs.SWP {
switch opcode {
case api.WRITE_6, api.WRITE_10, api.WRITE_12, api.WRITE_16, api.ORWRITE_16,
api.WRITE_VERIFY, api.WRITE_VERIFY_12, api.WRITE_VERIFY_16, api.WRITE_SAME, api.WRITE_SAME_16,
api.PRE_FETCH_10, api.PRE_FETCH_16, api.COMPARE_AND_WRITE:
key = DATA_PROTECT
asc = ASC_WRITE_PROTECT
log.Warnf("sense data(data protect) and asc(ASC_WRITE_PROTECT) encounter")
goto sense
}
}
lba = getSCSIReadWriteOffset(scb)
tl = getSCSIReadWriteCount(scb)
// Verify that we are not doing i/o beyond the end-of-lun
if tl != 0 {
if lba+uint64(tl) < lba || lba+uint64(tl) > dev.Size>>dev.BlockShift {
key = ILLEGAL_REQUEST
asc = ASC_LBA_OUT_OF_RANGE
log.Warnf("sense data(ILLEGAL_REQUEST,ASC_LBA_OUT_OF_RANGE) encounter: lba: %d, tl: %d, size: %d", lba, tl, dev.Size>>dev.BlockShift)
goto sense
}
} else {
if lba >= dev.Size>>dev.BlockShift {
key = ILLEGAL_REQUEST
asc = ASC_LBA_OUT_OF_RANGE
log.Warnf("sense data(ILLEGAL_REQUEST,ASC_LBA_OUT_OF_RANGE) encounter: lba: %d, size: %d", lba, dev.Size>>dev.BlockShift)
goto sense
}
}
cmd.Offset = lba << dev.BlockShift
cmd.TL = tl << dev.BlockShift
// Handle residuals
switch opcode {
case api.READ_6, api.READ_10, api.READ_12, api.READ_16:
/*
if (cmd->tl != scsi_get_in_length(cmd))
scsi_set_in_resid_by_actual(cmd, cmd->tl);
*/
case api.WRITE_6, api.WRITE_10, api.WRITE_12, api.WRITE_16, api.WRITE_VERIFY, api.WRITE_VERIFY_12, api.WRITE_VERIFY_16:
/*
if (cmd->tl != scsi_get_out_length(cmd)) {
scsi_set_out_resid_by_actual(cmd, cmd->tl);
/* We need to clamp the size of the in-buffer
* so that we dont try to write > cmd->tl in the
* backend store.
*
if (cmd->tl < scsi_get_out_length(cmd)) {
scsi_set_out_length(cmd, cmd->tl);
}
}
*/
}
err, key, asc = bsPerformCommand(dev.Storage, cmd)
if err != nil {
log.Errorf("Error from backend: %v", err)
BuildSenseData(cmd, key, asc)
return api.SAMStatBusy
} else {
return api.SAMStatGood
}
sense:
BuildSenseData(cmd, key, asc)
return api.SAMStatCheckCondition
}
func SBCReserve(host int, cmd *api.SCSICommand) api.SAMStat {
if err := deviceReserve(cmd); err != nil {
return api.SAMStatReservationConflict
}
return api.SAMStatGood
}
func SBCRelease(host int, cmd *api.SCSICommand) api.SAMStat {
lun := *(*uint64)(unsafe.Pointer(&cmd.Lun))
if err := deviceRelease(cmd.Target.TID, cmd.ITNexusID, lun, false); err != nil {
return api.SAMStatReservationConflict
}
return api.SAMStatGood
}
/*
* SBCReadCapacity Implements SCSI READ CAPACITY(10) command
* The READ CAPACITY (10) command requests that the device server transfer 8 bytes of parameter data
* describing the capacity and medium format of the direct-access block device to the data-in buffer.
* This command may be processed as if it has a HEAD OF QUEUE task attribute. If the logical unit supports
* protection information, the application client should use the READ CAPACITY (16) command instead of
* the READ CAPACITY (10) command.
*
* Reference : SBC2r16
* 5.10 - READ CAPACITY(10)
*/
func SBCReadCapacity(host int, cmd *api.SCSICommand) api.SAMStat {
var (
scb = cmd.SCB
key = ILLEGAL_REQUEST
asc = ASC_LUN_NOT_SUPPORTED
bshift = cmd.Device.BlockShift
size = cmd.Device.Size >> bshift
)
if cmd.Device.Attrs.Removable && !cmd.Device.Attrs.Online {
key = NOT_READY
asc = ASC_MEDIUM_NOT_PRESENT
goto sense
}
if (scb[8]&0x1 == 0) && (scb[2]|scb[3]|scb[4]|scb[5]) != 0 {
asc = ASC_INVALID_FIELD_IN_CDB
goto sense
}
if cmd.InSDBBuffer.Length < 8 {
goto overflow
}
// data[0] = (size >> 32) ? __cpu_to_be32(0xffffffff) : __cpu_to_be32(size - 1);
if size>>32 != 0 {
copy(cmd.InSDBBuffer.Buffer, util.MarshalUint32(uint32(0xffffffff)))
} else {
copy(cmd.InSDBBuffer.Buffer, util.MarshalUint32(uint32(size-1)))
}
// data[1] = __cpu_to_be32(1U << bshift);
copy(cmd.InSDBBuffer.Buffer[4:], util.MarshalUint32(uint32(1<<bshift)))
overflow:
cmd.InSDBBuffer.Resid = 8
return api.SAMStatGood
sense:
if cmd.InSDBBuffer != nil {
cmd.InSDBBuffer.Resid = 0
}
BuildSenseData(cmd, key, asc)
return api.SAMStatCheckCondition
}
/* SBCVerify Implements SCSI VERIFY(10) command
* The VERIFY (10) command requests that the device server verify the specified logical block(s) on the medium.
*
* Reference : SBC2r16
* 5.20 - VERIFY(10)
*/
func SBCVerify(host int, cmd *api.SCSICommand) api.SAMStat {
var (
key = ILLEGAL_REQUEST
asc = ASC_INVALID_FIELD_IN_CDB
dev = cmd.Device
scb = cmd.SCB
lba uint64
tl uint32
err error
)
if dev.Attrs.Removable && !dev.Attrs.Online {
key = NOT_READY
asc = ASC_MEDIUM_NOT_PRESENT
goto sense
}
if scb[1]&0xe0 != 0 {
// We only support protection information type 0
key = ILLEGAL_REQUEST
asc = ASC_INVALID_FIELD_IN_CDB
goto sense
}
if scb[1]&0x02 == 0 {
// no data compare with the media
return api.SAMStatGood
}
lba = getSCSIReadWriteOffset(scb)
tl = getSCSIReadWriteCount(scb)
// Verify that we are not doing i/o beyond the end-of-lun
if tl != 0 {
if lba+uint64(tl) < lba || lba+uint64(tl) > dev.Size>>dev.BlockShift {
key = ILLEGAL_REQUEST
asc = ASC_LBA_OUT_OF_RANGE
log.Warnf("sense: lba: %d, tl: %d, size: %d", lba, tl, dev.Size>>dev.BlockShift)
goto sense
}
} else {
if lba >= dev.Size>>dev.BlockShift {
key = ILLEGAL_REQUEST
asc = ASC_LBA_OUT_OF_RANGE
log.Warnf("sense")
goto sense
}
}
cmd.Offset = lba << dev.BlockShift
cmd.TL = tl << dev.BlockShift
err, key, asc = bsPerformCommand(dev.Storage, cmd)
if err != nil {
goto sense
}
return api.SAMStatGood
sense:
BuildSenseData(cmd, key, asc)
return api.SAMStatCheckCondition
}
/*
* SBCReadCapacity16 Implements SCSI READ CAPACITY(16) command
* The READ CAPACITY (16) command requests that the device server transfer parameter data
* describing the capacity and medium format of the direct-access block device to the data-in buffer.
*
* Reference : SBC2r16
* 5.11 - READ CAPACITY(16)
*/
func SBCReadCapacity16(host int, cmd *api.SCSICommand) api.SAMStat {
var (
bshift = cmd.Device.BlockShift
size = cmd.Device.Size >> bshift
allocationLength uint32
)
allocationLength = util.GetUnalignedUint32(cmd.SCB[10:14])
copy(cmd.InSDBBuffer.Buffer, util.MarshalUint64(uint64(size-1)))
if allocationLength > 12 {
copy(cmd.InSDBBuffer.Buffer[8:], util.MarshalUint32(uint32(1<<bshift)))
if allocationLength > 16 {
var lbpme int
if cmd.Device.Attrs.ThinProvisioning {
lbpme = 1
}
val := (cmd.Device.Attrs.Lbppbe << 16) | (lbpme << 15) | cmd.Device.Attrs.LowestAlignedLBA
copy(cmd.InSDBBuffer.Buffer[12:], util.MarshalUint32(uint32(val)))
}
}
return api.SAMStatGood
}
func SBCGetLbaStatus(host int, cmd *api.SCSICommand) api.SAMStat {
var (
key = ILLEGAL_REQUEST
asc = ASC_INVALID_FIELD_IN_CDB
dev = cmd.Device
scb = cmd.SCB
lba uint64
tl uint32
)
if dev.Attrs.Removable && !dev.Attrs.Online {
key = NOT_READY
asc = ASC_MEDIUM_NOT_PRESENT
goto sense
}
if scb[1]&0xe0 != 0 {
// We only support protection information type 0
key = ILLEGAL_REQUEST
asc = ASC_INVALID_FIELD_IN_CDB
goto sense
}
if scb[1]&0x02 == 0 {
// no data compare with the media
return api.SAMStatGood
}
lba = getSCSIReadWriteOffset(scb)
tl = getSCSIReadWriteCount(scb)
// Verify that we are not doing i/o beyond the end-of-lun
if tl != 0 {
if lba+uint64(tl) < lba || lba+uint64(tl) > dev.Size>>dev.BlockShift {
key = ILLEGAL_REQUEST
asc = ASC_LBA_OUT_OF_RANGE
log.Warnf("sense: lba: %d, tl: %d, size: %d", lba, tl, dev.Size>>dev.BlockShift)
goto sense
}
} else {
if lba >= dev.Size>>dev.BlockShift {
key = ILLEGAL_REQUEST
asc = ASC_LBA_OUT_OF_RANGE
log.Warnf("sense")
goto sense
}
}
return api.SAMStatGood
sense:
if cmd.InSDBBuffer != nil {
cmd.InSDBBuffer.Resid = 0
}
BuildSenseData(cmd, key, asc)
return api.SAMStatCheckCondition
}
func SBCServiceAction(host int, cmd *api.SCSICommand) api.SAMStat {
opcode := api.SCSICommandType(cmd.SCB[1] & 0x1f)
switch opcode {
case api.READ_CAPACITY:
return SBCReadCapacity(host, cmd)
case api.SAI_READ_CAPACITY_16:
return SBCReadCapacity16(host, cmd)
case api.SAI_GET_LBA_STATUS:
return SBCGetLbaStatus(host, cmd)
}
return api.SAMStatGood
}
/*
* SBCSyncCache Implements SCSI SYNCHRONIZE CACHE(10) and SYNCHRONIZE CACHE(16) command
* The SYNCHRONIZE CACHE command requests that the device server ensure that
* the specified logical blocks have their most recent data values recorded in
* non-volatile cache and/or on the medium, based on the SYNC_NV bit.
*
* Reference : SBC2r16
* 5.18 - SYNCHRONIZE CACHE (10)
* 5.19 - SYNCHRONIZE CACHE (16)
*/
func SBCSyncCache(host int, cmd *api.SCSICommand) api.SAMStat {
scb := cmd.SCB
lba := getSCSIReadWriteOffset(scb)
tl := getSCSIReadWriteCount(scb)
dev := cmd.Device
cmd.Offset = lba << dev.BlockShift
cmd.TL = tl << dev.BlockShift
err, key, asc := bsPerformCommand(dev.Storage, cmd)
if err != nil {
BuildSenseData(cmd, key, asc)
return api.SAMStatCheckCondition
}
return api.SAMStatGood
}
|
apache-2.0
|
clockworkorange/grails-core
|
grails-core/src/main/groovy/org/grails/core/exceptions/GrailsRuntimeException.java
|
1136
|
/*
* Copyright 2004-2005 Graeme Rocher
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.grails.core.exceptions;
/**
* Generic global runtime exception.
*
* @author Graeme Rocher
* @since 0.4
*/
public class GrailsRuntimeException extends GrailsException {
private static final long serialVersionUID = -1335036736215845295L;
public GrailsRuntimeException(String message) {
super(message);
}
public GrailsRuntimeException(String message, Throwable cause) {
super(message, cause);
}
public GrailsRuntimeException(Throwable cause) {
super(cause);
}
}
|
apache-2.0
|
reportportal/service-ui
|
app/src/pages/common/modals/importModal/constants.js
|
1194
|
/*
* Copyright 2019 EPAM Systems
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { ZIP, JAR } from 'common/constants/fileTypes';
export const MODAL_TYPE_IMPORT_LAUNCH = 'import';
export const MODAL_TYPE_UPLOAD_PLUGIN = 'upload';
export const ACCEPT_FILE_MIME_TYPES = {
[MODAL_TYPE_IMPORT_LAUNCH]: [
'application/zip',
'application/x-zip-compressed',
'application/zip-compressed',
],
[MODAL_TYPE_UPLOAD_PLUGIN]: ['.jar'],
};
export const ACCEPT_FILE_TYPES_ABBR = {
[MODAL_TYPE_IMPORT_LAUNCH]: ZIP,
[MODAL_TYPE_UPLOAD_PLUGIN]: JAR,
};
export const MAX_FILE_SIZES = {
[MODAL_TYPE_IMPORT_LAUNCH]: 33554432,
[MODAL_TYPE_UPLOAD_PLUGIN]: 134217728,
};
|
apache-2.0
|
gavscode/gavscode-android
|
checkstyle/checkstyle-checks/src/test/java/com/srclib/android/checkstyle/LogSparinglyCheckTest.java
|
1510
|
/*
* Copyright 2013 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.srclib.android.checkstyle;
import java.io.File;
import java.io.IOException;
import org.junit.Test;
import com.puppycrawl.tools.checkstyle.Checker;
public class LogSparinglyCheckTest extends CheckerTest {
@Test
public void test() throws IOException {
Checker c = createChecker(LogSparinglyCheck.class);
verify(c, new File[] { getSrcFile("LogSparinglyCheckExample.java") },
new String[] {
"LogSparinglyCheckExample.java at line 25: required to be surrounded by an if (DEBUG) block with no active logic, use a static final boolean expression",
"LogSparinglyCheckExample.java at line 31: required to be surrounded by an if (DEBUG) block with no active logic, use a static final boolean expression",
"LogSparinglyCheckExample.java at line 39: required to be surrounded by an if (DEBUG) block with no active logic, use a static final boolean expression"});
}
}
|
apache-2.0
|
Buggaboo/j2objc
|
translator/src/test/java/com/google/devtools/j2objc/translate/UnsequencedExpressionRewriterTest.java
|
9129
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.devtools.j2objc.translate;
import com.google.devtools.j2objc.GenerationTest;
import com.google.devtools.j2objc.Options;
import java.io.IOException;
/**
* Unit tests for {@link UnsequencedExpressionRewriter}.
*
* @author Keith Stanger
*/
public class UnsequencedExpressionRewriterTest extends GenerationTest {
@Override
protected void setUp() throws IOException {
super.setUp();
Options.enableExtractUnsequencedModifications();
}
public void testUnsequencedPrefixExpression() throws IOException {
String translation = translateSourceFile(
"class Test { void test(int i) { int j = ++i - ++i; } }", "Test", "Test.m");
assertTranslatedLines(translation,
"jint unseq$1 = ++i;",
"jint j = unseq$1 - ++i;");
}
public void testUnsequencedAssignmentExpression() throws IOException {
String translation = translateSourceFile(
"class Test { int test(int[] data, int i) { return data[i += 2] + i; } }",
"Test", "Test.m");
assertTranslatedLines(translation,
"int unseq$1 = i += 2;",
"return IOSIntArray_Get(nil_chk(data), unseq$1) + i;");
}
public void testUnsequencedConditionalInfixExpression() throws IOException {
String translation = translateSourceFile(
"class Test { boolean test(int i) { "
+ "return i == 0 || i == 1 || ++i + i == 2 || i++ + i == 3 || i == 4; } }",
"Test", "Test.m");
assertTranslatedLines(translation,
"jboolean unseq$1;",
"if (!(unseq$1 = (i == 0 || i == 1))) {",
" jint unseq$2 = ++i;",
" if (!(unseq$1 = (unseq$2 + i == 2))) {",
" jint unseq$3 = i++;",
" unseq$1 = (unseq$1 || unseq$3 + i == 3 || i == 4);",
" }",
"}",
"return unseq$1;");
}
public void testUnsequencedConditionalExpression() throws IOException {
String translation = translateSourceFile(
"class Test {"
+ " boolean test(int i) { return i == 0 ? i++ + i == 0 || i++ + i == 0 : ++i == 1; }"
+ " boolean test2(int i) { return i == 0 ? ++i == 1 : i++ + i == 0 || i++ + i == 0; } }",
"Test", "Test.m");
assertTranslatedLines(translation,
"- (jboolean)testWithInt:(jint)i {",
" jboolean unseq$1;",
" if (i == 0) {",
" jint unseq$2 = i++;",
" jboolean unseq$3;",
" if (!(unseq$3 = (unseq$2 + i == 0))) {",
" jint unseq$4 = i++;",
" unseq$3 = (unseq$3 || unseq$4 + i == 0);",
" }",
" unseq$1 = unseq$3;",
" }",
" else {",
" unseq$1 = (++i == 1);",
" }",
" return unseq$1;",
"}");
assertTranslatedLines(translation,
"- (jboolean)test2WithInt:(jint)i {",
" jboolean unseq$1;",
" if (i == 0) {",
" unseq$1 = (++i == 1);",
" }",
" else {",
" jint unseq$2 = i++;",
" jboolean unseq$3;",
" if (!(unseq$3 = (unseq$2 + i == 0))) {",
" jint unseq$4 = i++;",
" unseq$3 = (unseq$3 || unseq$4 + i == 0);",
" }",
" unseq$1 = unseq$3;",
" }",
" return unseq$1;",
"}");
}
public void testWhileLoop() throws IOException {
String translation = translateSourceFile(
"class Test { void test(int i) { while (i + i++ < 10) {} } }", "Test", "Test.m");
assertTranslatedLines(translation,
"while (true) {",
" jint unseq$1 = i;",
" if (!(unseq$1 + i++ < 10)) break;");
}
public void testVariableDeclarationStatementIsSplit() throws IOException {
String translation = translateSourceFile(
"class Test { void test() { int i = 0, j = i++ + i, k = j, l = --k - k, m = 1; } }",
"Test", "Test.m");
assertTranslatedLines(translation,
"jint i = 0;",
"jint unseq$1 = i++;",
"jint j = unseq$1 + i, k = j;",
"jint unseq$2 = --k;",
"jint l = unseq$2 - k, m = 1;");
}
public void testAssertStatement() throws IOException {
String translation = translateSourceFile(
"class Test { void test(int i) { assert i++ + i++ == 0 : \"foo\" + i++ + i++; } }",
"Test", "Test.m");
assertTranslatedLines(translation,
"jint unseq$1 = i++;",
"jboolean unseq$2 = unseq$1 + i++ == 0;",
"jint unseq$3 = i++;",
"JreAssert((unseq$2), (JreStrcat(\"$II\", @\"foo\", unseq$3, i++)));");
}
public void testForInitStatements() throws IOException {
String translation = translateSourceFile(
"class Test { void test() { int i = 0, j = 0, k = 0; "
+ "for (i = i++ + i++, j = i++ + i++, k = i++ + i++;;) { } } }",
"Test", "Test.m");
assertTranslatedLines(translation,
"jint i = 0, j = 0, k = 0;",
"jint unseq$1 = i++;",
"jint unseq$2 = i++;",
"i = unseq$1 + unseq$2;",
"jint unseq$3 = i++;",
"j = unseq$3 + i++;",
"jint unseq$4 = i++;",
"for (k = unseq$4 + i++; ; ) {",
"}");
}
public void testForInitWithDeclaration() throws IOException {
String translation = translateSourceFile(
"class Test { void test() { int k = 0; "
+ "for (int i = k++ + k++, j = i++ + i++;;) { } } }",
"Test", "Test.m");
assertTranslatedLines(translation,
"jint k = 0;",
"jint unseq$1 = k++;",
"jint i = unseq$1 + k++;",
"jint unseq$2 = i++;",
"for (jint j = unseq$2 + i++; ; ) {",
"}");
}
public void testIfConditionAndUpdaters() throws IOException {
String translation = translateSourceFile(
"class Test { void test() { int k = 0; "
+ "for (int i = k++ + k++; i++ + i++ < 10; i++, k = i++ + i++) { "
+ " String s = \"foo\" + i; } } }",
"Test", "Test.m");
assertTranslatedLines(translation,
"jint k = 0;",
"jint unseq$1 = k++;",
"for (jint i = unseq$1 + k++; ; ) {",
" jint unseq$2 = i++;",
" if (!(unseq$2 + i++ < 10)) break;",
" NSString *s = JreStrcat(\"$I\", @\"foo\", i);",
" i++;",
" jint unseq$3 = i++;",
" k = unseq$3 + i++;",
"}");
}
public void testIfStatement() throws IOException {
String translation = translateSourceFile(
"class Test { void test(int i) { "
+ "if (i++ + i++ == 0) {} else if (i++ + i++ == 1) {} else {} } }",
"Test", "Test.m");
assertTranslatedLines(translation,
"jint unseq$1 = i++;",
"if (unseq$1 + i++ == 0) {",
"}",
"else {",
" jint unseq$2 = i++;",
" if (unseq$2 + i++ == 1) {",
" }",
" else {",
" }",
"}");
}
public void testAssignToArray() throws IOException {
String translation = translateSourceFile(
"class Test { void test(int[] arr, int i) { arr[i] = i++; } }", "Test", "Test.m");
assertTranslatedLines(translation,
"jint unseq$1 = i;",
"*IOSIntArray_GetRef(nil_chk(arr), unseq$1) = i++;");
}
// Make sure that a conditional access remains conditional. Even if the access
// is not a modification, it might have been modified by the condition.
public void testConditionalAccess() throws IOException {
String translation = translateSourceFile(
"class Test { boolean foo(int i, int j) { return i < j; }"
+ " boolean test1(boolean b, int i) { return b || foo(i, i++); }"
+ " boolean test2(boolean b, int i) { return b ? foo(i, i++) : false; } }",
"Test", "Test.m");
// test1
assertTranslatedLines(translation,
"jboolean unseq$1;",
"if (!(unseq$1 = b)) {",
" jint unseq$2 = i;",
" unseq$1 = (unseq$1 || [self fooWithInt:unseq$2 withInt:i++]);",
"}",
"return unseq$1;");
// test2
assertTranslatedLines(translation,
"jboolean unseq$1;",
"if (b) {",
" jint unseq$2 = i;",
" unseq$1 = [self fooWithInt:unseq$2 withInt:i++];",
"}",
"else {",
" unseq$1 = false;",
"}",
"return unseq$1;");
}
// Instance variables do not appear to produce any unsequenced errors.
// Regression test for Issue #748.
public void testInstanceVarIsNotUnsequenced() throws IOException {
String translation = translateSourceFile(
"class Test { int i; void test() { this.i = this.i + this.i++; } }", "Test", "Test.m");
assertTranslation(translation, "self->i_ = self->i_ + self->i_++;");
}
}
|
apache-2.0
|
lefou/AsciidocFX
|
src/main/java/com/kodcu/logging/TableViewLogAppender.java
|
3244
|
package com.kodcu.logging;
import ch.qos.logback.classic.Level;
import ch.qos.logback.classic.encoder.PatternLayoutEncoder;
import ch.qos.logback.classic.spi.ILoggingEvent;
import ch.qos.logback.classic.spi.IThrowableProxy;
import ch.qos.logback.classic.spi.ThrowableProxyUtil;
import ch.qos.logback.core.UnsynchronizedAppenderBase;
import com.kodcu.service.ThreadService;
import javafx.application.Platform;
import javafx.collections.ObservableList;
import javafx.scene.control.Label;
import javafx.scene.control.TableView;
import javafx.scene.control.ToggleButton;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import java.util.Objects;
import java.util.concurrent.TimeUnit;
/**
* Created by usta on 02.06.2015.
*/
public class TableViewLogAppender extends UnsynchronizedAppenderBase<ILoggingEvent> {
private static TableView<MyLog> logViewer;
private static ObservableList<MyLog> logList;
private static List<MyLog> buffer = Collections.synchronizedList(new LinkedList<MyLog>());
private static Label logShortMessage;
private static ThreadService threadService;
PatternLayoutEncoder encoder;
private static ToggleButton logShowHider;
public static void setLogViewer(TableView<MyLog> logViewer) {
TableViewLogAppender.logViewer = logViewer;
}
public static void setLogList(ObservableList<MyLog> logList) {
TableViewLogAppender.logList = logList;
}
public static void setStatusMessage(Label logShortMessage) {
TableViewLogAppender.logShortMessage = logShortMessage;
}
public static void setShowHideLogs(ToggleButton logShowHider) {
TableViewLogAppender.logShowHider = logShowHider;
}
public static ToggleButton getLogShowHider() {
return logShowHider;
}
@Override
protected void append(ILoggingEvent event) {
if (Objects.isNull(logViewer))
return;
String message = event.getFormattedMessage();
String level = event.getLevel().toString();
if (event.getLevel() == Level.ERROR) {
logShowHider.getStyleClass().add("red-label");
}
final String finalMessage = message;
threadService.runActionLater(() -> {
logShortMessage.setText(finalMessage);
});
IThrowableProxy tp = event.getThrowableProxy();
if (Objects.nonNull(tp) && event.getLevel() == Level.ERROR) {
String tpMessage = ThrowableProxyUtil.asString(tp);
message += "\n" + tpMessage;
}
MyLog myLog = new MyLog(level, message);
buffer.add(myLog);
threadService.buff("logAppender").schedule(() -> {
final List<MyLog> clone = new LinkedList<>(buffer);
buffer.clear();
threadService.runActionLater(() -> {
logList.addAll(clone);
});
}, 2, TimeUnit.SECONDS);
}
public PatternLayoutEncoder getEncoder() {
return encoder;
}
public void setEncoder(PatternLayoutEncoder encoder) {
this.encoder = encoder;
}
public static void setThreadService(ThreadService threadService) {
TableViewLogAppender.threadService = threadService;
}
}
|
apache-2.0
|
Activiti/Activiti
|
activiti-core/activiti-api-impl/activiti-api-process-model-impl/src/main/java/org/activiti/api/runtime/model/impl/StringToLocalDateTimeConverter.java
|
1067
|
/*
* Copyright 2010-2020 Alfresco Software, Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.activiti.api.runtime.model.impl;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import org.springframework.core.convert.converter.Converter;
@ProcessVariableTypeConverter
public class StringToLocalDateTimeConverter implements Converter<String, LocalDateTime> {
@Override
public LocalDateTime convert(String source) {
return LocalDateTime.parse(source, DateTimeFormatter.ISO_DATE_TIME);
}
}
|
apache-2.0
|
TouK/nussknacker
|
utils/avro-util/src/main/scala/pl/touk/nussknacker/engine/avro/schema/DefaultAvroSchemaEvolution.scala
|
4070
|
package pl.touk.nussknacker.engine.avro.schema
import org.apache.avro.Schema
import org.apache.avro.generic._
import org.apache.avro.io.{DatumReader, DecoderFactory, EncoderFactory}
import pl.touk.nussknacker.engine.avro.{AvroUtils, RuntimeSchemaData}
import java.io.{ByteArrayOutputStream, IOException}
import java.nio.ByteBuffer
import scala.util.{Try, Using}
/**
* It's base implementation of AvroSchemaEvolution. In this case strategy to evolve record to schema is as follows:
*
* serialize record to record schema -> deserialize record to provided schema (final schema)
*
* This strategy is based on Confluent implementation of: serialization and deserialization method. But we don't
* allocate bytes for MagicByte and Id, because we don't need it.
*
* For now it's easiest way to convert GenericContainer record to wanted schema.
*/
class DefaultAvroSchemaEvolution extends AvroSchemaEvolution with DatumReaderWriterMixin with RecordDeserializer {
/**
* In future we can try to configure it
*/
protected final val useSchemaReflection = false
protected final val encoderFactory: EncoderFactory = EncoderFactory.get
override protected final val decoderFactory = DecoderFactory.get
override def alignRecordToSchema(record: GenericContainer, schema: Schema): Any = {
val writerSchema = record.getSchema
if (writerSchema.equals(schema)) {
record
} else {
val serializedObject = serializeRecord(record)
deserializePayloadToSchema(serializedObject, writerSchema, schema)
}
}
override def canBeEvolved(record: GenericContainer, schema: Schema): Boolean =
Try(alignRecordToSchema(record, schema)).isSuccess
/**
* It's copy paste from AbstractKafkaAvroDeserializer#DeserializationContext.read with some modification.
* We pass there record buffer data and schema which will be used to convert record.
*/
protected def deserializePayloadToSchema(payload: Array[Byte], writerSchema: Schema, readerSchema: Schema): Any = {
try {
// We always want to create generic record at the end, because speecific can has other fields than expected
val reader = StringForcingDatumReaderProvider.genericDatumReader[AnyRef](writerSchema, readerSchema, AvroUtils.genericData).asInstanceOf[DatumReader[AnyRef]]
val buffer = ByteBuffer.wrap(payload)
deserializeRecord(RuntimeSchemaData(readerSchema, None), reader, buffer, 0)
} catch {
case exc@(_: RuntimeException | _: IOException) =>
// avro deserialization may throw IOException, AvroRuntimeException, NullPointerException, etc
throw new AvroSchemaEvolutionException(s"Error at deserialization payload to record.", exc)
}
}
// Currently schema evolution doesn't support schema id serialization. We assume that it is used only on the end of process,
// when there won't be any subsequent serializations done
override protected def schemaIdSerializationEnabled: Boolean = false
/**
* Record serialization method, kind of copy paste from AbstractKafkaAvroSerializer#DeserializationContext.read.
* We use confluent serialization mechanism without some specifics features like:
*
* - fetching schema from registry
* - fetching schema Id
* - we don't serialize MagicByte and version
*
* To serialization we use schema from record.
*/
protected def serializeRecord(record: GenericContainer): Array[Byte] = {
Using.resource(new ByteArrayOutputStream) { out =>
try {
val encoder = encoderFactory.directBinaryEncoder(out, null)
val writer = createDatumWriter(record, record.getSchema, useSchemaReflection = useSchemaReflection)
writer.write(record, encoder)
encoder.flush()
out.toByteArray
} catch {
case exc@(_: RuntimeException | _: IOException) =>
// avro serialization may throw IOException, AvroRuntimeException, NullPointerException, etc
throw new AvroSchemaEvolutionException(s"Error at serialization record to payload.", exc)
}
}
}
}
|
apache-2.0
|
DelphiRace/File-management
|
public/include/js/config/config.js
|
272
|
var apurl="http://211.21.170.18:99";
//var apurl="http://127.0.0.1:88";
var originUrl = location.origin+'/';
var configObject = {
"LoginUrl": originUrl+"login",
"processLoginUrl": originUrl+"menter/setlogin",
//"langSet":"http://211.21.170.18:99/lang/page",
};
|
apache-2.0
|
erdi/grails-core
|
grails-core/src/main/groovy/org/codehaus/groovy/grails/commons/ComponentCapableDomainClass.java
|
1119
|
/* Copyright 2011 SpringSource
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.codehaus.groovy.grails.commons;
import java.util.List;
/**
* Interface for domains capable of supporting components.
*
* @author Graeme Rocher
* @since 2.0
*/
public interface ComponentCapableDomainClass {
/**
* Adds a component
*
* @param component The component
*/
void addComponent(GrailsDomainClass component);
/**
* Gets all the components for this domain class
*
* @return The list of components
*/
List<GrailsDomainClass> getComponents();
}
|
apache-2.0
|
trasa/aws-sdk-java
|
aws-java-sdk-kinesis/src/main/java/com/amazonaws/services/kinesis/model/transform/DescribeStreamResultJsonUnmarshaller.java
|
2898
|
/*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.kinesis.model.transform;
import java.util.Map;
import java.util.Map.Entry;
import com.amazonaws.services.kinesis.model.*;
import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*;
import com.amazonaws.transform.*;
import com.fasterxml.jackson.core.JsonToken;
import static com.fasterxml.jackson.core.JsonToken.*;
/**
* DescribeStreamResult JSON Unmarshaller
*/
public class DescribeStreamResultJsonUnmarshaller implements
Unmarshaller<DescribeStreamResult, JsonUnmarshallerContext> {
public DescribeStreamResult unmarshall(JsonUnmarshallerContext context)
throws Exception {
DescribeStreamResult describeStreamResult = new DescribeStreamResult();
int originalDepth = context.getCurrentDepth();
String currentParentElement = context.getCurrentParentElement();
int targetDepth = originalDepth + 1;
JsonToken token = context.getCurrentToken();
if (token == null)
token = context.nextToken();
if (token == VALUE_NULL)
return null;
while (true) {
if (token == null)
break;
if (token == FIELD_NAME || token == START_OBJECT) {
if (context.testExpression("StreamDescription", targetDepth)) {
context.nextToken();
describeStreamResult
.setStreamDescription(StreamDescriptionJsonUnmarshaller
.getInstance().unmarshall(context));
}
} else if (token == END_ARRAY || token == END_OBJECT) {
if (context.getLastParsedParentElement() == null
|| context.getLastParsedParentElement().equals(
currentParentElement)) {
if (context.getCurrentDepth() <= originalDepth)
break;
}
}
token = context.nextToken();
}
return describeStreamResult;
}
private static DescribeStreamResultJsonUnmarshaller instance;
public static DescribeStreamResultJsonUnmarshaller getInstance() {
if (instance == null)
instance = new DescribeStreamResultJsonUnmarshaller();
return instance;
}
}
|
apache-2.0
|
andreacastello/eclipse-hatom-plugin
|
src/it/pronetics/madstore/hatom/eclipse/validator/ValidatorEngine.java
|
8327
|
/**
* Copyright 2008 - 2009 Pro-Netics S.P.A.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package it.pronetics.madstore.hatom.eclipse.validator;
import static it.pronetics.madstore.hatom.eclipse.validator.BaseAnalyzer.HATOM_HENTRY_ATTRIBUTES;
import static it.pronetics.madstore.hatom.eclipse.validator.HentryChildAnalyzer.HENTRY_CHILDREN;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import javax.swing.text.BadLocationException;
import org.w3c.dom.Document;
import org.w3c.dom.NamedNodeMap;
import org.w3c.dom.Node;
import org.w3c.dom.traversal.NodeIterator;
/**
* Handler for rule validation tasks.<br>
*
* @author Andrea Castello
* @version 1.6
*/
public class ValidatorEngine {
// List of hAtom keywords: they're widely used in the whole validation process
public final static String[] ALL_KEYWORDS;// = Arrays.copyOf( HENTRY_CHILDREN, HATOM_HENTRY_ATTRIBUTES.size() + 3);
static {
ALL_KEYWORDS = new String[HATOM_HENTRY_ATTRIBUTES.size() + 3];
System.arraycopy(HENTRY_CHILDREN, 0, ALL_KEYWORDS, 0, HATOM_HENTRY_ATTRIBUTES.size());
ALL_KEYWORDS[ALL_KEYWORDS.length - 3] = HfeedAnalyzer.KEYWORD_HFEED;
ALL_KEYWORDS[ALL_KEYWORDS.length - 2] = HfeedAnalyzer.FEED_KEY;
ALL_KEYWORDS[ALL_KEYWORDS.length - 1] = HfeedAnalyzer.KEYWORD_HENTRY;
}
// attribute names usually associated with hAtom keywords
public static final String ATTR_CLASS = "class"; // many classes will use this
public static final String ATTR_REL = "rel";
public static final String MATCHING_LIST = "matching";
public static final String UNMATCHING_LIST = "unmatching";
// List of error reports found during the validation process
private List<Report> reports;
// Name of the document open in the IDE, that will be validated by this engine
private String documentName = "";
// DOM object that represents the document to be validated
private Document xhtmlDoc;
/**
* Creates a new engine instance and initialized its internal report list.<br>
*/
public ValidatorEngine(){
reports = new ArrayList<Report>();
}
/**
* Checks and reports invalid hfeed attributes that are placed outside hfeed elements.
* @param node Node to be checked
* @param keyword hatom keyword to be checked
*/
private static void checkInvalidHatomAttributes(Node node, String keyword, String docName) {
if (node != null) {
NamedNodeMap nnmap = node.getAttributes();
if (nnmap != null) {
for (int i = 0; i < nnmap.getLength(); i++) {
Node child = nnmap.item(i);
checkInvalidAttribute(node, child, keyword, docName);
}
}
}
}
/**
* Convenience method used inside <code>checkInvalidHatomAttributes</code> method.
* It creates an error report if the hAtom <code>keyword</code> is found inside the
* given <code>node</code>.
* <br>
* Note that the <code>docName</code> is just used for reporting purposes.<br>
*
* @param parent Parent node of the node that we have to analyze
* @param child Node to be analyzed
* @param keyword hatom keyword that must be checked for the given node
* @param docName the document's name
*/
private static void checkInvalidAttribute(Node parent, Node child, String keyword, String docName ) {
Report report;
String nodeValue;
// We search just attribute values
if (child.getNodeType() == Node.ATTRIBUTE_NODE) {
nodeValue = child.getNodeValue();
if (nodeValue != null && XMLUtils.attributeValueMatches(nodeValue, keyword)) {
report = new Report();
report.setNode(parent);
StringBuffer message = new StringBuffer("Attribute ").append(child.getNodeName());
message.append(" with value ").append(nodeValue).append(" is in invalid position. \n");
message.append("Please check that node is inside his regular parent node \n");
report.setMessage(message.toString());
ValidatorEngine engine = ValidatorCache.getInstance().getEngine(docName);
engine.addReport(report);
}
}
}
/**
* Returns the list of report objects that contains all the validation error found during the process.<br>
* @return
*/
public List<Report> getReports(){
return reports;
}
/**
* Adds a new report object to the report list.<br>
* @param report A report object.
*/
public void addReport(Report report){
reports.add(report);
}
/**
* Performs validation on a DOM object which is obtained from Netbeans' <code>StyleDocument</code>.
*
* @param doc Netbeans StyedDocument that must be converted into a DOM object
* @throws BadLocationException in case the styledDocument cannot return the document as string
* @throws IOException in case Document object creation fails.
*/
public void validate(String stringDoc) throws BadLocationException, IOException {
this.xhtmlDoc = XMLUtils.getDocument(new ByteArrayInputStream( stringDoc.getBytes() ));
HfeedAnalyzer analyzer = new HfeedAnalyzer();
analyzer.init(xhtmlDoc);
analyzer.setDocumentName(this.documentName);
analyzer.analyze();
}
/**
* Check if there are hAtom microformats outside the validated html.<br>
*
* @param unmatchingNodes nodes that should not contain hAtom microformat
*/
public static void analyzeUnmatchingNodes(Document doc, List<Node> unmatchingNodes, String documentName, String[] targetKeywords){
for (int i=0; i<targetKeywords.length; i++){
for(Node node: unmatchingNodes){
checkInvalidHatomAttributes(node, targetKeywords[i], documentName);
}
}
}
/**
* Searches for hAtom keywords that can be found outside their valid position (ie: a hentry is found outside
* a hfeed element).<br>
*
* @param doc the whole DOM object representing the XHTML document; it is used to create a node iterator
* @param rootNode node that will be the root under which iterator will be built.
* @param docName the name of the document opened in the IDE
*/
public static void analyzeUnmatchingNodes(Document doc, Node rootNode, String docName, String[] targetKeywords){
NodeIterator iterator = XMLUtils.getNodeIterator(doc, rootNode);
for (int i=0; i<targetKeywords.length; i++){
Node node;
while((node = iterator.nextNode() ) != null){
checkInvalidHatomAttributes(node, targetKeywords[i], docName);
}
// Bring iterator back to first node
while (( node = iterator.previousNode())!=null){}
}
iterator.detach();
}
/**
* Returns the name of the document to be validated.<br>
* @return
*/
public String getDocumentName() {
return documentName;
}
/**
* Sets the name of the document to be validated.<br>
* @return
*/
public void setDocumentName(String documentName) {
this.documentName = documentName;
}
}
|
apache-2.0
|
blobcity/db-java-adapter
|
src/main/java/com/blobcity/db/enums/IndexType.java
|
622
|
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package com.blobcity.db.enums;
/**
*
* @author Prikshit Kumar
* @author Sanket Sarang
*/
public enum IndexType {
NONE("none"),
UNIQUE("unique"),
BTREE("btree"),
HASHED("hashed"),
BITMAP("bitmap"),
TIMESERIES("timeseries"),
GEO_SPATIAL("geo-spatial");
private String type;
IndexType(final String type) {
this.type = type;
}
public String getType() {
return type;
}
}
|
apache-2.0
|
aslucky/StockHelper
|
src/dataProvider.py
|
4949
|
# coding:utf8
import os
import datetime
import tushare as ts
import pandas as pd
from src.utils import get_legal_trade_date
class DataProvider:
"""
对外提供数据
dataPath:本地数据路径
"""
def __init__(self, app_path):
self.appPath = app_path
self.errString = ''
self.lastTradeDate = get_legal_trade_date()
def get_last_trade_data(self, code):
data = ts.get_hist_data(code, self.lastTradeDate)
data.rename(columns={'p_change': 'changepercent','close':'trade','turnover':'turnoverratio'}, inplace=True)
return data
def get_code_list(self, dataPath=None, dataType=None):
"""
获取当天的股票代码列表
:param tdxPath: not None 遍历目录获取代码, is None 使用tushare获取当前交易日的股票列表
:param dataType: 数据类型, 0 通达信数据
:return: dataframe 股票代码列表
code,代码
name,名称
industry,所属行业
area,地区
pe,市盈率
outstanding,流通股本
totals,总股本(万)
totalAssets,总资产(万)
liquidAssets,流动资产
fixedAssets,固定资产
reserved,公积金
reservedPerShare,每股公积金
eps,每股收益
bvps,每股净资
pb,市净率
timeToMarket,上市日期
"""
if dataPath is not None:
if dataType is 0:
# 使用通达信的数据
codeList = []
for root, dirs, files in os.walk(dataPath):
for fn in files:
codeList.append(fn[0:-4])
return codeList
else:
self.errString = '不支持的数据类型dataType:%d' % dataType
return []
else:
if not os.path.isfile(self.appPath + '/' + self.lastTradeDate + '_Datas.csv'):
codeList = ts.get_stock_basics()
if codeList is None:
print 'None data fetched'
return []
# 格式和涨幅排行榜格式不一致,所以不保存
# codeList.to_csv(self.appPath + '/' + self.lastTradeDate + '_Datas.csv', encoding='utf8')
else:
codeList = pd.read_csv(self.appPath + '/' + self.lastTradeDate + '_Datas.csv', encoding='utf8',
index_col=0, dtype={'code': str})
return codeList
def get_day_rise(self):
if not os.path.isfile(self.appPath + '/datas/' + self.lastTradeDate + '_Datas.csv'):
try:
codeList = ts.get_today_all()
except Exception:
return []
codeList.to_csv(self.appPath + '/datas/' + self.lastTradeDate + '_Datas.csv', encoding='utf8')
else:
codeList = pd.read_csv(self.appPath + '/datas/' + self.lastTradeDate + '_Datas.csv', encoding='utf8',index_col=0,dtype={'code': str})
return codeList
def get_data_by_count(self, stock_code, trade_date, count, kline_type, dataPath=None, dataType=None):
"""
获取到指定日期的count根k线数据,通达信目前只支持日线数据
:param stock_code:
:param trade_date: 指定日期的数据
:param count:
:param kline_type: 数据类型,D=日k线 W=周 M=月 5=5分钟 15=15分钟 30=30分钟 60=60分钟,默认为D
:param dataPath: 数据路径
:param dataType: 数据类型, 0 通达信数据
:return: dataframe 从小到大日期排序
"""
# 获取count日内的k线数据
holidays = (count / 5) * 3
startDate = trade_date + datetime.timedelta(days=-(count + holidays))
try:
spy = ts.get_hist_data(stock_code, start=startDate.strftime("%Y-%m-%d"),
end=trade_date.strftime("%Y-%m-%d"),
ktype=kline_type)
for i in range(4):
if len(spy) < count:
holidays *= 2
startDate = trade_date + datetime.timedelta(days=-(count + holidays))
spy = ts.get_hist_data(stock_code, start=startDate.strftime("%Y-%m-%d"),
end=trade_date.strftime("%Y-%m-%d"),
ktype=kline_type)
else:
break
except (RuntimeError, TypeError, NameError, IOError, ValueError):
return []
return spy[:count].sort_index()
def makeDataFrame(self, dataList, colName):
return pd.DataFrame(dataList, columns=colName)
if __name__ == '__main__':
dp = DataProvider()
print dp.dataPath
dp1 = DataProvider('e:\\ss')
print dp1.dataPath
print dp.get_code_list(os.path.split(os.path.realpath(__file__))[0])
|
apache-2.0
|
florianpirchner/mobadsl
|
org.mobadsl.semantic.model/src/org/mobadsl/semantic/model/moba/MobaModelFeature.java
|
3207
|
/**
*/
package org.mobadsl.semantic.model.moba;
/**
* <!-- begin-user-doc -->
* A representation of the model object '<em><b>Model Feature</b></em>'.
* <!-- end-user-doc -->
*
* <p>
* The following features are supported:
* </p>
* <ul>
* <li>{@link org.mobadsl.semantic.model.moba.MobaModelFeature#getId <em>Id</em>}</li>
* <li>{@link org.mobadsl.semantic.model.moba.MobaModelFeature#getName <em>Name</em>}</li>
* <li>{@link org.mobadsl.semantic.model.moba.MobaModelFeature#getVersion <em>Version</em>}</li>
* </ul>
*
* @see org.mobadsl.semantic.model.moba.MobaPackage#getMobaModelFeature()
* @model abstract="true"
* @generated
*/
public interface MobaModelFeature extends MobaFriendsAble {
/**
* Returns the value of the '<em><b>Id</b></em>' attribute.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Id</em>' attribute isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Id</em>' attribute.
* @see #isSetId()
* @see org.mobadsl.semantic.model.moba.MobaPackage#getMobaModelFeature_Id()
* @model unsettable="true" transient="true" changeable="false" volatile="true" derived="true"
* @generated
*/
String getId();
/**
* Returns whether the value of the '{@link org.mobadsl.semantic.model.moba.MobaModelFeature#getId <em>Id</em>}' attribute is set.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return whether the value of the '<em>Id</em>' attribute is set.
* @see #getId()
* @generated
*/
boolean isSetId();
/**
* Returns the value of the '<em><b>Name</b></em>' attribute.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Name</em>' attribute isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Name</em>' attribute.
* @see #setName(String)
* @see org.mobadsl.semantic.model.moba.MobaPackage#getMobaModelFeature_Name()
* @model
* @generated
*/
String getName();
/**
* Sets the value of the '{@link org.mobadsl.semantic.model.moba.MobaModelFeature#getName <em>Name</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Name</em>' attribute.
* @see #getName()
* @generated
*/
void setName(String value);
/**
* Returns the value of the '<em><b>Version</b></em>' attribute.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Version</em>' attribute isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Version</em>' attribute.
* @see #setVersion(String)
* @see org.mobadsl.semantic.model.moba.MobaPackage#getMobaModelFeature_Version()
* @model
* @generated
*/
String getVersion();
/**
* Sets the value of the '{@link org.mobadsl.semantic.model.moba.MobaModelFeature#getVersion <em>Version</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Version</em>' attribute.
* @see #getVersion()
* @generated
*/
void setVersion(String value);
} // MobaModelFeature
|
apache-2.0
|
xthgrey/intelligentassistant
|
app/src/main/java/com/xth/intelligentassistant/gson/News.java
|
698
|
package com.xth.intelligentassistant.gson;
import com.google.gson.annotations.SerializedName;
/**
* Created by XTH on 2017/6/7.
*/
public class News {
public int code;
public String text;
@SerializedName("list")
public NewsList list[];
public class NewsList {
public String article;
public String source;
public String icon;
public String detailurl;
public NewsList() {
article = "";
source = "";
icon = "";
detailurl = "";
}
}
public News() {
code = 0;
text = "";
for (int i=0;i<3;i++){
list[i] = new NewsList();
}
}
}
|
apache-2.0
|
ankit-gaur/Coding
|
355C.cpp
|
1039
|
#include<bits/stdc++.h>
using namespace std;
#define V vector
typedef long long int LL;
typedef V<int> vi;
typedef V<LL> vl;
typedef V<pair<int ,int>> vpii;
#define rep(i,a) for(int i = 0; i<a ; i++)
#define fov(i,v) rep(i,v.size())
#define fs first
#define sc second
#define mp make_pair
#define pb push_back
#define el cout<<endl
const int inf = numeric_limits<int>::max();
const LL linf = numeric_limits<LL>::max();
LL n,cl,cr,ql,qr;
vl vw;
LL minCostUtil(int l,int r, int d){
if(l>=r){
if(d==0)return min(ql+vw[l]*l,vw[l]*r);
else return min(qr+vw[l]*r,vw[l]*l);
}
LL costL = vw[l];
LL costR = vw[r];
if(d==0)
return min(costL*cl+ql+minCostUtil(l+1,r,0),costR*cr+minCostUtil(l,r-1,1));
else return min(costL*cl+minCostUtil(l+1,r,0),costR*cr+qr+minCostUtil(l,r-1,1));
}
LL minCost(){
LL costL = vw[0];
LL costR = vw[n-1];
return min(costL*cl+minCostUtil(1,n-1,0),costR*cr+minCostUtil(0,n-2,1));
}
int main(){
cin>>n>>cl>>cr>>ql>>qr;
rep(i,n){
LL w; cin>>w;
vw.pb(w);
}
cout<<minCost();
return 0;
}
|
apache-2.0
|
trask/glowroot
|
agent/embedded/src/test/java/org/glowroot/agent/embedded/util/CappedDatabaseOutputStreamTest.java
|
11666
|
/*
* Copyright 2012-2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.glowroot.agent.embedded.util;
import java.io.File;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.io.RandomAccessFile;
import java.io.Writer;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import com.google.common.base.Ticker;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import static org.assertj.core.api.Assertions.assertThat;
public class CappedDatabaseOutputStreamTest {
private static final int BLOCK_HEADER_SIZE = 8;
private File tempFile;
private ScheduledExecutorService scheduledExecutor;
private CappedDatabaseOutputStream cappedOut;
private RandomAccessFile in;
@Before
public void onBefore() throws IOException {
tempFile = File.createTempFile("glowroot-test-", ".capped.txt");
scheduledExecutor = Executors.newSingleThreadScheduledExecutor();
cappedOut = CappedDatabaseOutputStream.create(tempFile, 10, scheduledExecutor,
Ticker.systemTicker());
in = new RandomAccessFile(tempFile, "r");
}
@After
public void onAfter() throws IOException {
scheduledExecutor.shutdownNow();
cappedOut.close();
in.close();
tempFile.delete();
}
@Test
public void shouldWrite() throws IOException {
// given
Writer out = new OutputStreamWriter(cappedOut);
String text = "0123456789";
// when
cappedOut.startBlock();
out.write(text);
out.flush();
long cappedId = cappedOut.endBlock();
// then
assertWrite(text, cappedId);
}
@Test
public void shouldWriteUsingByteArray() throws IOException {
// given
String text = "0123456789";
// when
cappedOut.startBlock();
cappedOut.write(text.getBytes());
cappedOut.flush();
long cappedId = cappedOut.endBlock();
// then
assertWrite(text, cappedId);
}
@Test
public void shouldWriteUsingSingleBytes() throws IOException {
// when
cappedOut.startBlock();
cappedOut.write('0');
cappedOut.write('1');
cappedOut.write('2');
cappedOut.write('3');
cappedOut.write('4');
cappedOut.write('5');
cappedOut.write('6');
cappedOut.write('7');
cappedOut.write('8');
cappedOut.write('9');
cappedOut.flush();
long cappedId = cappedOut.endBlock();
// then
assertWrite("0123456789", cappedId);
}
@Test
public void shouldWrap() throws IOException {
// given
Writer out = new OutputStreamWriter(cappedOut);
StringBuilder sb = new StringBuilder();
for (int i = 0; i < 600; i++) {
sb.append("0123456789");
}
String text = sb.toString();
cappedOut.startBlock();
out.write(text);
out.flush();
cappedOut.endBlock();
// when
out = new OutputStreamWriter(cappedOut);
cappedOut.startBlock();
out.write(text);
out.flush();
long cappedId = cappedOut.endBlock();
// then
assertThat(cappedId).isEqualTo(6000 + BLOCK_HEADER_SIZE);
long currIndex = in.readLong();
int cappedDatabaseSizeKb = in.readInt();
long lastCompactionBaseIndex = in.readLong();
assertThat(currIndex).isEqualTo(12000 + 2 * BLOCK_HEADER_SIZE);
assertThat(cappedDatabaseSizeKb).isEqualTo(10);
assertThat(lastCompactionBaseIndex).isEqualTo(0);
in.seek(CappedDatabaseOutputStream.HEADER_SKIP_BYTES + 6000 + BLOCK_HEADER_SIZE);
long blockSize = in.readLong();
assertThat(blockSize).isEqualTo(6000);
byte[] bytes = new byte[(int) blockSize];
int remaining = 10240 - 6000 - 2 * BLOCK_HEADER_SIZE;
in.readFully(bytes, 0, remaining);
in.seek(CappedDatabaseOutputStream.HEADER_SKIP_BYTES);
in.readFully(bytes, remaining, 6000 - remaining);
String content = new String(bytes);
assertThat(content).isEqualTo(text);
}
@Test
public void shouldWrapAndKeepGoing() throws IOException {
// given
Writer out = new OutputStreamWriter(cappedOut);
StringBuilder sb = new StringBuilder();
for (int i = 0; i < 600; i++) {
sb.append("0123456789");
}
String text = sb.toString();
cappedOut.startBlock();
out.write(text);
out.flush();
cappedOut.endBlock();
cappedOut.startBlock();
out.write(text);
out.flush();
cappedOut.endBlock();
// when
out = new OutputStreamWriter(cappedOut);
cappedOut.startBlock();
out.write(text);
out.flush();
long cappedId = cappedOut.endBlock();
// then
assertThat(cappedId).isEqualTo(12000 + 2 * BLOCK_HEADER_SIZE);
long currIndex = in.readLong();
int cappedDatabaseSizeKb = in.readInt();
long lastCompactionBaseIndex = in.readLong();
assertThat(currIndex).isEqualTo(18000 + 3 * BLOCK_HEADER_SIZE);
assertThat(cappedDatabaseSizeKb).isEqualTo(10);
assertThat(lastCompactionBaseIndex).isEqualTo(0);
int totalOfFirstTwoBlocks = 2 * (6000 + BLOCK_HEADER_SIZE);
in.seek(CappedDatabaseOutputStream.HEADER_SKIP_BYTES + totalOfFirstTwoBlocks - 10240);
long blockSize = in.readLong();
assertThat(blockSize).isEqualTo(6000);
byte[] bytes = new byte[(int) blockSize];
in.readFully(bytes, 0, bytes.length);
String content = new String(bytes);
assertThat(content).isEqualTo(text);
}
@Test
public void shouldWrapAndResize() throws IOException {
// given
Writer out = new OutputStreamWriter(cappedOut);
StringBuilder sb = new StringBuilder();
for (int i = 0; i < 600; i++) {
sb.append("0123456789");
}
String text = sb.toString();
cappedOut.startBlock();
out.write(text);
out.flush();
cappedOut.endBlock();
cappedOut.startBlock();
out.write(text);
out.flush();
long cappedId = cappedOut.endBlock();
// when
// have to close in before resizing
in.close();
cappedOut.resize(20);
in = new RandomAccessFile(tempFile, "r");
// then
assertThat(cappedId).isEqualTo(6000 + BLOCK_HEADER_SIZE);
long currIndex = in.readLong();
int cappedDatabaseSizeKb = in.readInt();
long lastCompactionBaseIndex = in.readLong();
assertThat(currIndex).isEqualTo(12000 + 2 * BLOCK_HEADER_SIZE);
assertThat(cappedDatabaseSizeKb).isEqualTo(20);
int total = 2 * (6000 + BLOCK_HEADER_SIZE);
assertThat(lastCompactionBaseIndex).isEqualTo(total - 10240);
int totalOfFirstBlock = 6000 + BLOCK_HEADER_SIZE;
in.seek(CappedDatabaseOutputStream.HEADER_SKIP_BYTES + 10240 - totalOfFirstBlock);
long blockSize = in.readLong();
assertThat(blockSize).isEqualTo(6000);
byte[] bytes = new byte[(int) blockSize];
in.readFully(bytes, 0, 6000);
String content = new String(bytes);
assertThat(content).isEqualTo(text);
}
@Test
public void shouldWrapAndResizeVerySmall() throws IOException {
// given
Writer out = new OutputStreamWriter(cappedOut);
StringBuilder sb = new StringBuilder();
for (int i = 0; i < 60; i++) {
sb.append("0123456789");
}
String text = sb.toString();
for (int i = 0; i < 9; i++) {
cappedOut.startBlock();
out.write(text);
out.flush();
cappedOut.endBlock();
}
cappedOut.startBlock();
out.write(text);
out.flush();
long cappedId = cappedOut.endBlock();
// when
// have to close in before resizing
in.close();
cappedOut.resize(1);
in = new RandomAccessFile(tempFile, "r");
// then
assertThat(cappedId).isEqualTo(9 * (600 + BLOCK_HEADER_SIZE));
long currIndex = in.readLong();
int cappedDatabaseSizeKb = in.readInt();
long lastCompactionBaseIndex = in.readLong();
assertThat(currIndex).isEqualTo(10 * (600 + BLOCK_HEADER_SIZE));
assertThat(cappedDatabaseSizeKb).isEqualTo(1);
int total = 10 * (600 + BLOCK_HEADER_SIZE);
assertThat(lastCompactionBaseIndex).isEqualTo(total - 1024);
in.seek(CappedDatabaseOutputStream.HEADER_SKIP_BYTES + 416);
long blockSize = in.readLong();
assertThat(blockSize).isEqualTo(600);
byte[] bytes = new byte[(int) blockSize];
in.readFully(bytes, 0, 600);
String content = new String(bytes);
assertThat(content).isEqualTo(text);
}
@Test
public void shouldWrapWithoutEnoughSpaceAtEndForContiguousBlockHeader() throws IOException {
// given
String text = "0123456789";
cappedOut.startBlock();
int numBytesToWrite = 10240 - BLOCK_HEADER_SIZE - 1;
for (int i = 0; i < numBytesToWrite; i++) {
cappedOut.write(0);
}
cappedOut.flush();
cappedOut.endBlock();
// when
Writer out = new OutputStreamWriter(cappedOut);
out = new OutputStreamWriter(cappedOut);
cappedOut.startBlock();
out.write(text);
out.flush();
long cappedId = cappedOut.endBlock();
// then
assertThat(cappedId).isEqualTo(10240);
long currIndex = in.readLong();
int cappedDatabaseSizeKb = in.readInt();
long lastCompactionBaseIndex = in.readLong();
assertThat(currIndex).isEqualTo(10240 + BLOCK_HEADER_SIZE + text.length());
assertThat(cappedDatabaseSizeKb).isEqualTo(10);
assertThat(lastCompactionBaseIndex).isEqualTo(0);
in.seek(CappedDatabaseOutputStream.HEADER_SKIP_BYTES);
long blockSize = in.readLong();
assertThat(blockSize).isEqualTo(text.length());
byte[] bytes = new byte[(int) blockSize];
in.readFully(bytes, 0, text.length());
String content = new String(bytes);
assertThat(content).isEqualTo(text);
}
private void assertWrite(String text, long cappedId) throws IOException {
assertThat(cappedId).isEqualTo(0);
long currIndex = in.readLong();
int cappedDatabaseSizeKb = in.readInt();
long lastCompactionBaseIndex = in.readLong();
assertThat(currIndex).isEqualTo(10 + BLOCK_HEADER_SIZE);
assertThat(cappedDatabaseSizeKb).isEqualTo(10);
assertThat(lastCompactionBaseIndex).isEqualTo(0);
long blockSize = in.readLong();
assertThat(blockSize).isEqualTo(10);
byte[] bytes = new byte[(int) blockSize];
in.readFully(bytes, 0, bytes.length);
String content = new String(bytes);
assertThat(content).isEqualTo(text);
}
}
|
apache-2.0
|
alibaba/nacos
|
console-ui/src/components/RegionGroup/RegionGroup.js
|
9949
|
/*
* Copyright 1999-2018 Alibaba Group Holding Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import React from 'react';
import PropTypes from 'prop-types';
import $ from 'jquery';
import { Button } from '@alifd/next';
import NameSpaceList from '../NameSpaceList';
import { setParams, request } from '../../globalLib';
import './index.scss';
class RegionGroup extends React.Component {
static propTypes = {
url: PropTypes.string,
left: PropTypes.any,
right: PropTypes.any,
namespaceCallBack: PropTypes.func,
setNowNameSpace: PropTypes.func,
};
constructor(props) {
super(props);
this.state = {
instanceData: [],
currRegionId: '',
url: props.url || '/diamond-ops/env/domain',
left: props.left,
right: props.right,
regionWidth: 700,
hideRegionList: false,
};
this.currRegionId = '';
this.styles = {
title: {
// marginTop: '8px',
// marginBottom: '8px',
margin: 0,
lineHeight: '32px',
display: 'inline-block',
textIndent: '8px',
marginRight: '8px',
borderLeft: '2px solid #88b7E0',
fontSize: '16px',
},
};
this.nameSpaceList = React.createRef();
this.mainRef = null;
this.titleRef = null;
this.regionRef = null;
this.extraRef = null;
this.resizer = null;
this.timer = null;
this.handleResize = this.handleResize.bind(this);
this.handleAliyunNav = this.handleAliyunNav.bind(this);
!window.viewframeSetting && (window.viewframeSetting = {});
}
componentDidMount() {
// this.setRegionWidth();
// window.postMessage({ type: 'CONSOLE_HAS_REGION' }, window.location)
// $(".aliyun-console-regionbar").show();
// $(window).bind("resize", this.handleResize);
// window.addEventListener("message", this.handleAliyunNav);
// this.getRegionList();
// setTimeout(() => {
// this.setRegionWidth();
// this.handleRegionListStatus();
// });
const nameSpaceList = this.nameSpaceList.current;
if (nameSpaceList) {
nameSpaceList.getInstance().getNameSpaces();
}
}
componentWillUnmount() {
$(window).unbind('resize', this.handleResize);
window.postMessage({ type: 'CONSOLE_HIDE_REGION' }, window.location);
$('.aliyun-console-regionbar').hide();
}
UNSAFE_componentWillReceiveProps(nextProps) {
this.setState({
url: nextProps.url,
left: nextProps.left,
right: nextProps.right,
});
}
handleAliyunNav(event = {}) {
const { type, payload } = (event && event.data) || {};
switch (type) {
case 'TOPBAR_SIDEBAR_DID_MOUNT':
// this.getRegionList();
this.handleRegionListStatus();
this.changeRegionBarRegionId(this.currRegionId);
setTimeout(() => {
this.changeRegionBarRegionId(this.currRegionId);
}, 1000);
break;
case 'CONSOLE_REGION_CHANGE':
this.changeTableData(payload.toRegionId);
break;
default:
break;
}
}
handleRegionListStatus() {
const isPrivateClound = window.globalConfig && window.globalConfig.isParentEdas();
this.setState(
{
hideRegionList: isPrivateClound
? false
: window.location.search.indexOf('hideTopbar=') === -1,
},
() => this.setRegionWidth()
);
}
handleResize() {
clearTimeout(this.timer);
this.timer = setTimeout(() => {
this.setRegionWidth();
}, 100);
}
setRegionWidth() {
try {
const mainWidth = $(this.mainRef).width();
const titleWidth = $(this.titleRef).width();
const extraWidth = $(this.extraRef).width();
const regionWidth = mainWidth - extraWidth - titleWidth - 50;
this.setState({
regionWidth: regionWidth > 100 ? regionWidth : 100,
});
} catch (error) {}
}
getRegionList() {
if (window._regionList) {
this.handleRegionList(window._regionList);
} else {
// TODO
const nameSpaceList = this.nameSpaceList.current;
if (nameSpaceList) {
nameSpaceList.getInstance().getNameSpaces();
}
request({
url: this.state.url,
data: {},
success: res => {
if (res && res.data) {
window._regionList = res.data;
this.handleRegionList(res.data);
}
},
});
}
}
handleRegionList(data = {}) {
let envcontent = '';
const { envGroups } = data;
let instanceData = [];
for (let i = 0; i < envGroups.length; i++) {
const obj = envGroups[i].envs || [];
instanceData = obj;
for (let j = 0; j < obj.length; j++) {
if (obj[j].active) {
envcontent = obj[j].serverId;
}
}
}
this.currRegionId = envcontent || (instanceData[0] && instanceData[0].serverId);
setParams('serverId', this.currRegionId);
this.setRegionBarRegionList(instanceData, this.currRegionId);
this.changeRegionBarRegionId(this.currRegionId);
setTimeout(() => {
this.changeRegionBarRegionId(this.currRegionId);
}, 1000);
const nameSpaceList = this.nameSpaceList.current;
if (nameSpaceList) {
nameSpaceList.getInstance().getNameSpaces();
}
this.setState({
currRegionId: envcontent,
instanceData,
});
}
changeTableData(serverId) {
setParams('serverId', serverId);
if (this.state.currRegionId === serverId) {
return;
}
this.currRegionId = serverId;
const { instanceData } = this.state;
let inEdas = false;
if (window.globalConfig.isParentEdas()) {
inEdas = true;
}
instanceData.forEach(obj => {
if (obj.serverId === serverId) {
const lastHash = window.location.hash.split('?')[0];
if (inEdas) {
setParams('serverId', obj.serverId);
const url = window.location.href;
window.location.href = url;
} else {
let url = obj.domain + window.location.search + lastHash;
if (lastHash.indexOf('serverId') === -1) {
if (lastHash.indexOf('?') === -1) {
url += `?serverId=${serverId}`;
} else {
url += `&serverId=${serverId}`;
}
}
window.location.href = `${window.location.protocol}//${url}`;
}
}
});
}
setRegionBarRegionList(regionList, regionId) {
if (window.viewframeSetting) {
window.viewframeSetting.regionList = regionList;
window.postMessage(
{ type: 'TOGGLE_REGIONBAR_STATUS', payload: { regionList, defaultRegionId: regionId } },
window.location
);
}
}
changeRegionBarRegionId(regionId) {
window.viewframeSetting && (window.viewframeSetting.defaultRegionId = regionId);
window.postMessage(
{ type: 'SET_ACTIVE_REGION_ID', payload: { defaultRegionId: regionId } },
window.location
);
}
render() {
return (
<div>
<div ref={ref => (this.mainRef = ref)} className="clearfix">
<div style={{ overflow: 'hidden' }}>
<div id="left" style={{ float: 'left', display: 'inline-block', marginRight: 20 }}>
<div
ref={ref => (this.titleRef = ref)}
style={{ display: 'inline-block', verticalAlign: 'top' }}
>
{typeof this.state.left === 'string' ? (
<h5 style={this.styles.title}>{this.state.left}</h5>
) : (
this.state.left
)}
</div>
{this.state.hideRegionList ? null : (
<div
ref={ref => (this.regionRef = ref)}
style={{
width: this.state.regionWidth,
display: 'inline-block',
lineHeight: '40px',
marginLeft: 20,
}}
>
{this.state.instanceData.map((val, key) => (
<Button
key={val.serverId}
type={this.state.currRegionId === val.serverId ? 'primary' : 'normal'}
style={{
fontSize: '12px',
marginRight: 10,
backgroundColor:
this.state.currRegionId === val.serverId ? '#546478' : '#D9DEE4',
}}
onClick={this.changeTableData.bind(this, val.serverId)}
>
{' '}
{val.name}{' '}
</Button>
))}
</div>
)}
</div>
<div
ref={ref => (this.extraRef = ref)}
style={{ float: 'right', display: 'inline-block', paddingTop: 6 }}
>
{Object.prototype.toString.call(this.state.right) === '[object Function]'
? this.state.right()
: this.state.right}
</div>
</div>
{this.props.namespaceCallBack && (
<div>
<NameSpaceList
ref={this.nameSpaceList}
namespaceCallBack={this.props.namespaceCallBack}
setNowNameSpace={this.props.setNowNameSpace}
/>
</div>
)}
</div>
</div>
);
}
}
export default RegionGroup;
|
apache-2.0
|
LindaLawton/Google-APIs-PHP-Samples
|
Samples/DCM/DFA Reporting And Trafficking API/v2.8/OperatingSystemVersionsGetSample.php
|
3959
|
<?php
// Copyright 2017 DAIMTO ([Linda Lawton](https://twitter.com/LindaLawtonDK)) : [www.daimto.com](http://www.daimto.com/)
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
// the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
// an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
// specific language governing permissions and limitations under the License.
//------------------------------------------------------------------------------
// <auto-generated>
// This code was generated by DAIMTO-Google-apis-Sample-generator 1.0.0
// Template File Name: methodTemplate.tt
// Build date: 2017-10-08
// PHP generator version: 1.0.0
//
// Changes to this file may cause incorrect behavior and will be lost if
// the code is regenerated.
// </auto-generated>
//------------------------------------------------------------------------------
// About
//
// Unofficial sample for the dfareporting v2.8 API for PHP.
// This sample is designed to be used with the Google PHP client library. (https://github.com/google/google-api-php-client)
//
// API Description: Manages your DoubleClick Campaign Manager ad campaigns and reports.
// API Documentation Link https://developers.google.com/doubleclick-advertisers/
//
// Discovery Doc https://www.googleapis.com/discovery/v1/apis/dfareporting/v2_8/rest
//
//------------------------------------------------------------------------------
// Installation
//
// The preferred method is via https://getcomposer.org. Follow the installation instructions https://getcomposer.org/doc/00-intro.md
// if you do not already have composer installed.
//
// Once composer is installed, execute the following command in your project root to install this library:
//
// composer require google/apiclient:^2.0
//
//------------------------------------------------------------------------------
// Load the Google API PHP Client Library.
require_once __DIR__ . '/vendor/autoload.php';
session_start();
/***************************************************
* Include this line for service account authencation. Note: Not all APIs support service accounts.
//require_once __DIR__ . '/ServiceAccount.php';
* Include the following four lines Oauth2 authencation.
* require_once __DIR__ . '/Oauth2Authentication.php';
* $_SESSION['mainScript'] = basename($_SERVER['PHP_SELF']); // Oauth2callback.php will return here.
* $client = getGoogleClient();
* $service = new Google_Service_Dfareporting($client);
****************************************************/
// Option paramaters can be set as needed.
$optParams = array(
'fields' => '*'
);
// Single Request.
$results = operatingSystemVersionsGetExample($service, $profileId, $id, $optParams);
/**
* Gets one operating system version by ID.
* @service Authenticated Dfareporting service.
* @optParams Optional paramaters are not required by a request.
* @id Operating system version ID.
* @profileId User profile ID associated with this request.
* @return OperatingSystemVersion
*/
function operatingSystemVersionsGetExample($service, $profileId, $id, $optParams)
{
try
{
// Parameter validation.
if ($service == null)
throw new Exception("service is required.");
if ($optParams == null)
throw new Exception("optParams is required.");
if (id == null)
throw new Exception("id is required.");
if (profileId == null)
throw new Exception("profileId is required.");
// Make the request and return the results.
return $service->operatingSystemVersions->GetOperatingSystemVersions($profileId, $id, $optParams);
}
catch (Exception $e)
{
print "An error occurred: " . $e->getMessage();
}
}
?>
|
apache-2.0
|
google/data-transfer-project
|
portability-types-common/src/main/java/org/datatransferproject/types/common/models/playlists/MusicRecording.java
|
1121
|
package org.datatransferproject.types.common.models.playlists;
import org.datatransferproject.types.common.models.CreativeWork;
/**
* POJO for https://schema.org/MusicRecording
*/
public class MusicRecording extends CreativeWork {
// Note this is only a partial implementation for fields needed so far, feel free to add more
// from the spec as needed.
private String isrcCode;
private MusicAlbum musicAlbum;
private MusicGroup byArtist;
public MusicRecording(
String identifier,
String headline,
String isrcCode,
MusicAlbum musicAlbum,
MusicGroup byArtist) {
super(identifier);
setHeadline(headline);
this.isrcCode = isrcCode;
this.musicAlbum = musicAlbum;
this.byArtist = byArtist;
}
public String getIsrcCode() {
return isrcCode;
}
public MusicAlbum getMusicAlbum() {
return musicAlbum;
}
public MusicGroup getByArtist() {
return byArtist;
}
@Override
public String toString() {
return String.format("MusicRecording{id: %s, headline: %s, isrc: %s}",
getIdentifier(), getHeadline(), getIsrcCode());
}
}
|
apache-2.0
|
kubukoz/scala-99
|
src/test/scala/com/kubukoz/scala99/P10Tests.scala
|
529
|
package com.kubukoz.scala99
import com.kubukoz.scala99.P10.encode
import org.scalatest.{FlatSpec, Matchers}
class P10Tests extends FlatSpec with Matchers {
"encode" should "encode Nil" in {
encode(Nil) shouldBe Nil
}
it should "encode a list with a single element" in {
encode(List('a)) shouldBe List((1, 'a))
}
it should "encode the supplied case" in {
encode(List('a, 'a, 'a, 'a, 'b, 'c, 'c, 'a, 'a, 'd, 'e, 'e, 'e, 'e)) shouldBe
List((4, 'a), (1, 'b), (2, 'c), (2, 'a), (1, 'd), (4, 'e))
}
}
|
apache-2.0
|
b002368/chef-repo
|
lib/chef/chef_fs/config.rb
|
12102
|
#
# Author:: John Keiser (<jkeiser@chef.io>)
# Copyright:: Copyright 2012-2016, Chef Software Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require "chef/log"
require "chef/chef_fs/path_utils"
class Chef
module ChefFS
#
# Helpers to take Chef::Config and create chef_fs and local_fs (ChefFS
# objects representing the server and local repository, respectively).
#
class Config
# Not all of our object types pluralize by adding an 's', so we map them
# out here:
INFLECTIONS = {
"acls" => "acl",
"client_keys" => "client_key",
"clients" => "client",
"cookbooks" => "cookbook",
"cookbook_artifacts" => "cookbook_artifact",
"containers" => "container",
"data_bags" => "data_bag",
"environments" => "environment",
"groups" => "group",
"nodes" => "node",
"roles" => "role",
"users" => "user",
"policies" => "policy",
"policy_groups" => "policy_group",
}
INFLECTIONS.each { |k, v| k.freeze; v.freeze }
INFLECTIONS.freeze
# ChefFS supports three modes of operation: "static", "everything", and
# "hosted_everything". These names are antiquated since Chef 12 moved
# multi-tenant and RBAC to the open source product. In practice, they
# mean:
#
# * static: just static objects that are included in a traditional
# chef-repo, with no support for anything introduced in Chef 12 or
# later.
# * everything: all of the objects supported by the open source Chef
# Server 11.x
# * hosted_everything: (the name comes from Hosted Chef) supports
# everything in Chef Server 12 and later, including RBAC objects and
# Policyfile objects.
#
# The "static" and "everything" modes are used for backup and
# upgrade/migration of older Chef Servers, so they should be considered
# frozen in time.
CHEF_11_OSS_STATIC_OBJECTS = %w{cookbooks cookbook_artifacts data_bags environments roles}.freeze
CHEF_11_OSS_DYNAMIC_OBJECTS = %w{clients nodes users}.freeze
RBAC_OBJECT_NAMES = %w{acls containers groups }.freeze
CHEF_12_OBJECTS = %w{ cookbook_artifacts policies policy_groups client_keys }.freeze
STATIC_MODE_OBJECT_NAMES = CHEF_11_OSS_STATIC_OBJECTS
EVERYTHING_MODE_OBJECT_NAMES = (CHEF_11_OSS_STATIC_OBJECTS + CHEF_11_OSS_DYNAMIC_OBJECTS).freeze
HOSTED_EVERYTHING_MODE_OBJECT_NAMES = (EVERYTHING_MODE_OBJECT_NAMES + RBAC_OBJECT_NAMES + CHEF_12_OBJECTS).freeze
#
# Create a new Config object which can produce a chef_fs and local_fs.
#
# ==== Arguments
#
# [chef_config]
# A hash that looks suspiciously like +Chef::Config+. These hash keys
# include:
#
# :chef_repo_path::
# The root where all local chef object data is stored. Mirrors
# +Chef::Config.chef_repo_path+
# :cookbook_path, node_path, ...::
# Paths to cookbooks/, nodes/, data_bags/, etc. Mirrors
# +Chef::Config.cookbook_path+, etc. Defaults to
# +<chef_repo_path>/cookbooks+, etc.
# :repo_mode::
# The directory format on disk. 'everything', 'hosted_everything' and
# 'static'. Default: autodetected based on whether the URL has
# "/organizations/NAME."
# :versioned_cookbooks::
# If true, the repository contains cookbooks with versions in their
# name (apache2-1.0.0). If false, the repository just has one version
# of each cookbook and the directory has the cookbook name (apache2).
# Default: +false+
# :chef_server_url::
# The URL to the Chef server, e.g. https://api.opscode.com/organizations/foo.
# Used as the server for the remote chef_fs, and to "guess" repo_mode
# if not specified.
# :node_name:: The username to authenticate to the Chef server with.
# :client_key:: The private key for the user for authentication
# :environment:: The environment in which you are presently working
# :repo_mode::
# The repository mode, :hosted_everything, :everything or :static.
# This determines the set of subdirectories the Chef server will offer
# up.
# :versioned_cookbooks:: Whether or not to include versions in cookbook names
#
# [cwd]
# The current working directory to base relative Chef paths from.
# Defaults to +Dir.pwd+.
#
# [options]
# A hash of other, not-suspiciously-like-chef-config options:
# :cookbook_version::
# When downloading cookbooks, download this cookbook version instead
# of the latest.
#
# [ui]
# The object to print output to, with "output", "warn" and "error"
# (looks a little like a Chef::Knife::UI object, obtainable from
# Chef::Knife.ui).
#
# ==== Example
#
# require 'chef/chef_fs/config'
# config = Chef::ChefFS::Config.new
# config.chef_fs.child('cookbooks').children.each do |cookbook|
# puts "Cookbook on server: #{cookbook.name}"
# end
# config.local_fs.child('cookbooks').children.each do |cookbook|
# puts "Local cookbook: #{cookbook.name}"
# end
#
def initialize(chef_config = Chef::Config, cwd = Dir.pwd, options = {}, ui = nil)
@chef_config = chef_config
@cwd = File.expand_path(cwd)
@cookbook_version = options[:cookbook_version]
if @chef_config[:repo_mode] == "everything" && is_hosted? && !ui.nil?
ui.warn %Q{You have repo_mode set to 'everything', but your chef_server_url
looks like it might be a hosted setup. If this is the case please use
hosted_everything or allow repo_mode to default}
end
# Default to getting *everything* from the server.
if !@chef_config[:repo_mode]
if is_hosted?
@chef_config[:repo_mode] = "hosted_everything"
else
@chef_config[:repo_mode] = "everything"
end
end
end
attr_reader :chef_config
attr_reader :cwd
attr_reader :cookbook_version
def is_hosted?
@chef_config[:chef_server_url] =~ /\/+organizations\/.+/
end
def chef_fs
@chef_fs ||= create_chef_fs
end
def create_chef_fs
require "chef/chef_fs/file_system/chef_server/chef_server_root_dir"
Chef::ChefFS::FileSystem::ChefServer::ChefServerRootDir.new("remote", @chef_config, :cookbook_version => @cookbook_version)
end
def local_fs
@local_fs ||= create_local_fs
end
def create_local_fs
require "chef/chef_fs/file_system/repository/chef_repository_file_system_root_dir"
Chef::ChefFS::FileSystem::Repository::ChefRepositoryFileSystemRootDir.new(object_paths, Array(chef_config[:chef_repo_path]).flatten, @chef_config)
end
# Returns the given real path's location relative to the server root.
#
# If chef_repo is /home/jkeiser/chef_repo,
# and pwd is /home/jkeiser/chef_repo/cookbooks,
# server_path('blah') == '/cookbooks/blah'
# server_path('../roles/blah.json') == '/roles/blah'
# server_path('../../readme.txt') == nil
# server_path('*/*ab*') == '/cookbooks/*/*ab*'
# server_path('/home/jkeiser/chef_repo/cookbooks/blah') == '/cookbooks/blah'
# server_path('/home/*/chef_repo/cookbooks/blah') == nil
#
# If there are multiple different, manually specified paths to object locations
# (cookbooks, roles, data bags, etc. can all have separate paths), and cwd+the
# path reaches into one of them, we will return a path relative to the first
# one to match it. Otherwise we expect the path provided to be to the chef
# repo path itself. Paths that are not available on the server are not supported.
#
# Globs are allowed as well, but globs outside server paths are NOT
# (presently) supported. See above examples. TODO support that.
#
# If the path does not reach into ANY specified directory, nil is returned.
def server_path(file_path)
target_path = Chef::ChefFS::PathUtils.realest_path(file_path, @cwd)
# Check all object paths (cookbooks_dir, data_bags_dir, etc.)
# These are either manually specified by the user or autogenerated relative
# to chef_repo_path.
object_paths.each_pair do |name, paths|
paths.each do |path|
object_abs_path = Chef::ChefFS::PathUtils.realest_path(path, @cwd)
if relative_path = PathUtils.descendant_path(target_path, object_abs_path)
return Chef::ChefFS::PathUtils.join("/#{name}", relative_path)
end
end
end
# Check chef_repo_path
Array(@chef_config[:chef_repo_path]).flatten.each do |chef_repo_path|
# We're using realest_path here but we really don't need to - we can just expand the
# path and use realpath because a repo_path if provided *must* exist.
realest_chef_repo_path = Chef::ChefFS::PathUtils.realest_path(chef_repo_path, @cwd)
if Chef::ChefFS::PathUtils.os_path_eq?(target_path, realest_chef_repo_path)
return "/"
end
end
nil
end
# The current directory, relative to server root. This is a case-sensitive server path.
# It only exists if the current directory is a child of one of the recognized object_paths below.
def base_path
@base_path ||= server_path(@cwd)
end
# Print the given server path, relative to the current directory
def format_path(entry)
server_path = entry.respond_to?(:display_path) ? entry.display_path : entry.path
if base_path && server_path[0, base_path.length] == base_path
if server_path == base_path
return "."
elsif server_path[base_path.length, 1] == "/"
return server_path[base_path.length + 1, server_path.length - base_path.length - 1]
elsif base_path == "/" && server_path[0, 1] == "/"
return server_path[1, server_path.length - 1]
end
end
server_path
end
private
def object_paths
@object_paths ||= begin
result = {}
case @chef_config[:repo_mode]
when "static"
object_names = STATIC_MODE_OBJECT_NAMES
when "hosted_everything"
object_names = HOSTED_EVERYTHING_MODE_OBJECT_NAMES
else
object_names = EVERYTHING_MODE_OBJECT_NAMES
end
object_names.each do |object_name|
# cookbooks -> cookbook_path
singular_name = INFLECTIONS[object_name]
raise "Unknown object name #{object_name}" unless singular_name
variable_name = "#{singular_name}_path"
paths = Array(@chef_config[variable_name]).flatten
result[object_name] = paths.map { |path| File.expand_path(path) }
end
result
end
end
end
end
end
|
apache-2.0
|
tudarmstadt-lt/topicrawler
|
lt.lm/src/main/java/de/tudarmstadt/lt/lm/app/Ngrams.java
|
10325
|
/*
* Copyright 2014
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.tudarmstadt.lt.lm.app;
import java.io.File;
import java.io.FileFilter;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.PrintStream;
import java.io.Reader;
import java.util.List;
import java.util.zip.GZIPOutputStream;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.cli.Options;
import org.apache.commons.io.LineIterator;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import de.tudarmstadt.lt.lm.AbstractLanguageModel;
import de.tudarmstadt.lt.lm.DummyLM;
import de.tudarmstadt.lt.lm.service.AbstractStringProvider;
import de.tudarmstadt.lt.lm.service.LMProviderUtils;
import de.tudarmstadt.lt.lm.service.LtSegProvider;
import de.tudarmstadt.lt.lm.util.Properties;
import de.tudarmstadt.lt.utilities.cli.CliUtils;
import de.tudarmstadt.lt.utilities.cli.ExtendedGnuParser;
import de.tudarmstadt.lt.utilities.collections.FixedSizeFifoLinkedList;
/**
* TODO: parallelize, merge GenerateNgrams into this
*
* @author Steffen Remus
*/
public class Ngrams implements Runnable{
private final static String USAGE_HEADER = "Options:";
private static Logger LOG = LoggerFactory.getLogger(Ngrams.class);
public static void main(String[] args) throws ClassNotFoundException {
new Ngrams(args).run();
}
public Ngrams() { /* default constructor, pass options via new Ngrams(){{ param=value; ... }} */ }
@SuppressWarnings("static-access")
public Ngrams(String[] args) {
Options opts = new Options();
opts.addOption(OptionBuilder.withLongOpt("help").withDescription("Display help message.").create("?"));
opts.addOption(OptionBuilder.withLongOpt("ptype").withArgName("class").hasArg().withDescription("specify the instance of the language model provider that you want to use: {LtSegProvider, BreakIteratorStringProvider, UimaStringProvider, PreTokenizedStringProvider} (default: LtSegProvider)").create("p"));
opts.addOption(OptionBuilder.withLongOpt("cardinality").withArgName("ngram-order").hasArg().withDescription("Specify the cardinality of the ngrams (min. 1). Specify a range using 'from-to'. (Examples: 5 = extract 5grams; 1-5 = extract 1grams, 2grams, ..., 5grams; default: 1-5).").create("n"));
opts.addOption(OptionBuilder.withLongOpt("file").withArgName("filename").hasArg().withDescription("specify the file to read from. Specify '-' to read from stdin. (default: '-')").create("f"));
opts.addOption(OptionBuilder.withLongOpt("out").withArgName("name").hasArg().withDescription("Specify the output file. Specify '-' to use stdout. (default: '-').").create("o"));
opts.addOption(OptionBuilder.withLongOpt("accross_sentences").hasOptionalArg().withArgName("{true|false}").withDescription("Generate Ngrams across sentence boundaries.").create("a"));
try {
CommandLine cmd = new ExtendedGnuParser(true).parse(opts, args);
if (cmd.hasOption("help"))
CliUtils.print_usage_quit(System.err, Ngrams.class.getSimpleName(), opts, USAGE_HEADER, null, 0);
_provider_type = cmd.getOptionValue("ptype", LtSegProvider.class.getSimpleName());
_file = cmd.getOptionValue("file", "-");
_out = cmd.getOptionValue("out", "-");
_accross_sentences = cmd.hasOption("accross_sentences");
String order = cmd.getOptionValue("cardinality","1-5");
if(_accross_sentences && cmd.getOptionValue("accross_sentences") != null)
_accross_sentences = Boolean.parseBoolean(cmd.getOptionValue("accross_sentences"));
int dash_index = order.indexOf('-');
_order_to = Integer.parseInt(order.substring(dash_index + 1, order.length()).trim());
_order_from = _order_to;
if(dash_index == 0)
_order_from = 1;
if(dash_index > 0)
_order_from = Math.max(1, Integer.parseInt(order.substring(0,dash_index).trim()));
} catch (Exception e) {
CliUtils.print_usage_quit(System.err, Ngrams.class.getSimpleName(), opts, USAGE_HEADER, String.format("%s: %s%n", e.getClass().getSimpleName(), e.getMessage()), 1);
}
}
String _provider_type;
String _file;
String _out;
int _order_to;
int _order_from;
PrintStream _pout;
boolean _accross_sentences;
boolean _insert_bos = Properties.insertSentenceTags() == 1 || Properties.insertSentenceTags() == 3;
boolean _insert_eos = Properties.insertSentenceTags() == 2 || Properties.insertSentenceTags() == 3;
String _lang = Properties.defaultLanguageCode();
AbstractStringProvider _prvdr;
List<String> _ngram;
long _num_ngrams;
/* (non-Javadoc)
* @see java.lang.Runnable#run()
*/
@Override
public void run() {
_num_ngrams = 0l;
_ngram = new FixedSizeFifoLinkedList<>(_order_to);
_pout = System.out;
if(!"-".equals(_out.trim())){
try {
if(_out.endsWith(".gz"))
_pout = new PrintStream(new GZIPOutputStream(new FileOutputStream(new File(_out))));
else
_pout = new PrintStream(new FileOutputStream(new File(_out), true));
} catch (IOException e) {
LOG.error("Could not open ouput file '{}' for writing.", _out, e);
System.exit(1);
}
}
try{
if(_prvdr == null){
_prvdr = StartLM.getStringProviderInstance(_provider_type);
_prvdr.setLanguageModel(new DummyLM<>(_order_to));
}
}catch(Exception e){
LOG.error("Could not initialize Ngram generator. {}: {}", e.getClass(), e.getMessage(), e);
}
if("-".equals(_file.trim())){
LOG.info("Processing text from stdin ('{}').", _file);
try{run(new InputStreamReader(System.in, "UTF-8"), _file);}catch(Exception e){LOG.error("Could not generate ngram from from file '{}'.", _file, e);}
}else{
File f_or_d = new File(_file);
if(!f_or_d.exists())
throw new Error(String.format("File or directory '%s' not found.", _file));
if(f_or_d.isFile()){
LOG.info("Processing file '{}'.", f_or_d.getAbsolutePath());
try{
run(new InputStreamReader(new FileInputStream(f_or_d), "UTF-8"), _file);
}catch(Exception e){
LOG.error("Could not generate ngrams from file '{}'.", f_or_d.getAbsolutePath(), e);
}
}
if(f_or_d.isDirectory()){
File[] txt_files = f_or_d.listFiles(new FileFilter(){
@Override
public boolean accept(File f) {
return f.isFile() && f.getName().endsWith(".txt");
}});
for(int i = 0; i < txt_files.length; i++){
File f = txt_files[i];
LOG.info("Processing file '{}' ({}/{}).", f.getAbsolutePath(), i + 1, txt_files.length);
try{
run(new InputStreamReader(new FileInputStream(f), "UTF-8"), f.getAbsolutePath());
}catch(Exception e){
LOG.error("Could not generate ngrams from file '{}'.", f.getAbsolutePath(), e);
}
}
}
}
LOG.info("Generated {} ngrams.", _num_ngrams);
if(!"-".equals(_out.trim()))
_pout.close();
}
public void run(Reader r, String f){
if(!_accross_sentences)
run_within_sentences(r, f);
else
run_across_sentences(r, f);
}
public void run_within_sentences(Reader r, String f) {
LineIterator liter = new LineIterator(r);
for(long lc = 0; liter.hasNext();){
if(++lc % 1000 == 0)
LOG.info("Processing line {}:{}", f, lc);
try{
String line = liter.next();
if(line.trim().isEmpty())
continue;
List<String> sentences = _prvdr.splitSentences(line);
if(sentences == null || sentences.isEmpty())
continue;
for(String sentence : sentences){
if(sentence == null || sentence.trim().isEmpty())
continue;
for(int n = _order_from; n <= _order_to; n++){
List<String>[] ngrams = null;
try{
List<String> tokens = _prvdr.tokenizeSentence(sentence);
if(tokens == null || tokens.isEmpty())
continue;
ngrams = _prvdr.getNgramSequence(tokens, n);
if(ngrams == null || ngrams.length < 1)
continue;
}
catch(Exception e){
LOG.warn("Could not get ngram of cardinality {} from String '{}' in line '{}' from file '{}'.", n, StringUtils.abbreviate(line, 100), lc, f);
continue;
}
for(List<String> ngram : ngrams){
if(ngram == null || ngram.isEmpty())
continue;
_pout.println(StringUtils.join(ngram, " "));
}
_pout.flush();
_num_ngrams += ngrams.length;
}
}
}catch(Exception e){
LOG.warn("Could not process line '{}' in file '{}'.", lc, f);
}
}
}
public void run_across_sentences(Reader r, String f) {
LineIterator liter = new LineIterator(r);
for(long lc = 0; liter.hasNext();){
if(++lc % 1000 == 0)
LOG.info("Processing line {}:{}", f, lc);
try{
String line = liter.next();
if(line.trim().isEmpty())
continue;
List<String> sentences = _prvdr.splitSentences(line);
if(sentences == null || sentences.isEmpty())
continue;
for(String sentence : sentences){
if(sentence == null || sentence.isEmpty())
continue;
List<String> tokens = null;
try{
tokens = _prvdr.tokenizeSentence(sentence);
if(tokens == null || tokens.isEmpty())
continue;
}
catch(Exception e){
LOG.warn("Could not get tokens from from String '{}' in line '{}' from file '{}'.", StringUtils.abbreviate(line, 100), lc, f);
continue;
}
for(String word : tokens){
if(word == null || word.trim().isEmpty())
continue;
_ngram.add(word);
for(int n = Math.max(_ngram.size()-_order_to,0); n <= Math.min(_ngram.size() - _order_from, _ngram.size()-1); n++)
_pout.println(StringUtils.join(_ngram.subList(n, _ngram.size()), " "));
_num_ngrams++;
}
_pout.flush();
}
}catch(Exception e){
LOG.warn("Could not process line '{}' in file '{}'.", lc, f);
}
}
}
}
|
apache-2.0
|
fpompermaier/onvif
|
onvif-ws-client/src/main/java/org/onvif/ver10/deviceio/wsdl/Get.java
|
1132
|
package org.onvif.ver10.deviceio.wsdl;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlType;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.cxf.xjc.runtime.JAXBToStringStyle;
/**
* <p>Java class for Get complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="Get">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "Get")
public class Get {
/**
* Generates a String representation of the contents of this type.
* This is an extension method, produced by the 'ts' xjc plugin
*
*/
@Override
public String toString() {
return ToStringBuilder.reflectionToString(this, JAXBToStringStyle.DEFAULT_STYLE);
}
}
|
apache-2.0
|
kosenkoandrey/mailiq-pult
|
protected/render/logs/admin/dashboard/js.php
|
17012
|
<?
APP::$insert['js_flot'] = ['js', 'file', 'before', '</body>', APP::Module('Routing')->root . 'public/ui/vendors/bower_components/flot/jquery.flot.js'];
APP::$insert['js_flot_resize'] = ['js', 'file', 'before', '</body>', APP::Module('Routing')->root . 'public/ui/vendors/bower_components/flot/jquery.flot.resize.js'];
APP::$insert['js_flot_time'] = ['js', 'file', 'before', '</body>', APP::Module('Routing')->root . 'public/ui/vendors/bower_components/flot/jquery.flot.time.js'];
APP::$insert['js_moment'] = ['js', 'file', 'before', '</body>', APP::Module('Routing')->root . 'public/ui/vendors/bower_components/moment/min/moment.min.js'];
APP::$insert['js_datetimepicker'] = ['js', 'file', 'before', '</body>', APP::Module('Routing')->root . 'public/ui/vendors/bower_components/eonasdan-bootstrap-datetimepicker/build/js/bootstrap-datetimepicker.min.js'];
ob_start();
?>
<script>
function strtotime(text, now) {
var parsed
var match
var today
var year
var date
var days
var ranges
var len
var times
var regex
var i
var fail = false
if (!text) {
return fail
}
// Unecessary spaces
text = text.replace(/^\s+|\s+$/g, '')
.replace(/\s{2,}/g, ' ')
.replace(/[\t\r\n]/g, '')
.toLowerCase()
// in contrast to php, js Date.parse function interprets:
// dates given as yyyy-mm-dd as in timezone: UTC,
// dates with "." or "-" as MDY instead of DMY
// dates with two-digit years differently
// etc...etc...
// ...therefore we manually parse lots of common date formats
var pattern = new RegExp([
'^(\\d{1,4})',
'([\\-\\.\\/:])',
'(\\d{1,2})',
'([\\-\\.\\/:])',
'(\\d{1,4})',
'(?:\\s(\\d{1,2}):(\\d{2})?:?(\\d{2})?)?',
'(?:\\s([A-Z]+)?)?$'
].join(''))
match = text.match(pattern)
if (match && match[2] === match[4]) {
if (match[1] > 1901) {
switch (match[2]) {
case '-':
// YYYY-M-D
if (match[3] > 12 || match[5] > 31) {
return fail
}
return new Date(match[1], parseInt(match[3], 10) - 1, match[5],
match[6] || 0, match[7] || 0, match[8] || 0, match[9] || 0) / 1000
case '.':
// YYYY.M.D is not parsed by strtotime()
return fail
case '/':
// YYYY/M/D
if (match[3] > 12 || match[5] > 31) {
return fail
}
return new Date(match[1], parseInt(match[3], 10) - 1, match[5],
match[6] || 0, match[7] || 0, match[8] || 0, match[9] || 0) / 1000
}
} else if (match[5] > 1901) {
switch (match[2]) {
case '-':
// D-M-YYYY
if (match[3] > 12 || match[1] > 31) {
return fail
}
return new Date(match[5], parseInt(match[3], 10) - 1, match[1],
match[6] || 0, match[7] || 0, match[8] || 0, match[9] || 0) / 1000
case '.':
// D.M.YYYY
if (match[3] > 12 || match[1] > 31) {
return fail
}
return new Date(match[5], parseInt(match[3], 10) - 1, match[1],
match[6] || 0, match[7] || 0, match[8] || 0, match[9] || 0) / 1000
case '/':
// M/D/YYYY
if (match[1] > 12 || match[3] > 31) {
return fail
}
return new Date(match[5], parseInt(match[1], 10) - 1, match[3],
match[6] || 0, match[7] || 0, match[8] || 0, match[9] || 0) / 1000
}
} else {
switch (match[2]) {
case '-':
// YY-M-D
if (match[3] > 12 || match[5] > 31 || (match[1] < 70 && match[1] > 38)) {
return fail
}
year = match[1] >= 0 && match[1] <= 38 ? +match[1] + 2000 : match[1]
return new Date(year, parseInt(match[3], 10) - 1, match[5],
match[6] || 0, match[7] || 0, match[8] || 0, match[9] || 0) / 1000
case '.':
// D.M.YY or H.MM.SS
if (match[5] >= 70) {
// D.M.YY
if (match[3] > 12 || match[1] > 31) {
return fail
}
return new Date(match[5], parseInt(match[3], 10) - 1, match[1],
match[6] || 0, match[7] || 0, match[8] || 0, match[9] || 0) / 1000
}
if (match[5] < 60 && !match[6]) {
// H.MM.SS
if (match[1] > 23 || match[3] > 59) {
return fail
}
today = new Date()
return new Date(today.getFullYear(), today.getMonth(), today.getDate(),
match[1] || 0, match[3] || 0, match[5] || 0, match[9] || 0) / 1000
}
// invalid format, cannot be parsed
return fail
case '/':
// M/D/YY
if (match[1] > 12 || match[3] > 31 || (match[5] < 70 && match[5] > 38)) {
return fail
}
year = match[5] >= 0 && match[5] <= 38 ? +match[5] + 2000 : match[5]
return new Date(year, parseInt(match[1], 10) - 1, match[3],
match[6] || 0, match[7] || 0, match[8] || 0, match[9] || 0) / 1000
case ':':
// HH:MM:SS
if (match[1] > 23 || match[3] > 59 || match[5] > 59) {
return fail
}
today = new Date()
return new Date(today.getFullYear(), today.getMonth(), today.getDate(),
match[1] || 0, match[3] || 0, match[5] || 0) / 1000
}
}
}
// other formats and "now" should be parsed by Date.parse()
if (text === 'now') {
return now === null || isNaN(now)
? new Date().getTime() / 1000 | 0
: now | 0
}
if (!isNaN(parsed = Date.parse(text))) {
return parsed / 1000 | 0
}
// Browsers !== Chrome have problems parsing ISO 8601 date strings, as they do
// not accept lower case characters, space, or shortened time zones.
// Therefore, fix these problems and try again.
// Examples:
// 2015-04-15 20:33:59+02
// 2015-04-15 20:33:59z
// 2015-04-15t20:33:59+02:00
pattern = new RegExp([
'^([0-9]{4}-[0-9]{2}-[0-9]{2})',
'[ t]',
'([0-9]{2}:[0-9]{2}:[0-9]{2}(\\.[0-9]+)?)',
'([\\+-][0-9]{2}(:[0-9]{2})?|z)'
].join(''))
match = text.match(pattern)
if (match) {
// @todo: time zone information
if (match[4] === 'z') {
match[4] = 'Z'
} else if (match[4].match(/^([\+-][0-9]{2})$/)) {
match[4] = match[4] + ':00'
}
if (!isNaN(parsed = Date.parse(match[1] + 'T' + match[2] + match[4]))) {
return parsed / 1000 | 0
}
}
date = now ? new Date(now * 1000) : new Date()
days = {
'sun': 0,
'mon': 1,
'tue': 2,
'wed': 3,
'thu': 4,
'fri': 5,
'sat': 6
}
ranges = {
'yea': 'FullYear',
'mon': 'Month',
'day': 'Date',
'hou': 'Hours',
'min': 'Minutes',
'sec': 'Seconds'
}
function lastNext (type, range, modifier) {
var diff
var day = days[range]
if (typeof day !== 'undefined') {
diff = day - date.getDay()
if (diff === 0) {
diff = 7 * modifier
} else if (diff > 0 && type === 'last') {
diff -= 7
} else if (diff < 0 && type === 'next') {
diff += 7
}
date.setDate(date.getDate() + diff)
}
}
function process (val) {
// @todo: Reconcile this with regex using \s, taking into account
// browser issues with split and regexes
var splt = val.split(' ')
var type = splt[0]
var range = splt[1].substring(0, 3)
var typeIsNumber = /\d+/.test(type)
var ago = splt[2] === 'ago'
var num = (type === 'last' ? -1 : 1) * (ago ? -1 : 1)
if (typeIsNumber) {
num *= parseInt(type, 10)
}
if (ranges.hasOwnProperty(range) && !splt[1].match(/^mon(day|\.)?$/i)) {
return date['set' + ranges[range]](date['get' + ranges[range]]() + num)
}
if (range === 'wee') {
return date.setDate(date.getDate() + (num * 7))
}
if (type === 'next' || type === 'last') {
lastNext(type, range, num)
} else if (!typeIsNumber) {
return false
}
return true
}
times = '(years?|months?|weeks?|days?|hours?|minutes?|min|seconds?|sec' +
'|sunday|sun\\.?|monday|mon\\.?|tuesday|tue\\.?|wednesday|wed\\.?' +
'|thursday|thu\\.?|friday|fri\\.?|saturday|sat\\.?)'
regex = '([+-]?\\d+\\s' + times + '|' + '(last|next)\\s' + times + ')(\\sago)?'
match = text.match(new RegExp(regex, 'gi'))
if (!match) {
return fail
}
for (i = 0, len = match.length; i < len; i++) {
if (!process(match[i])) {
return fail
}
}
return (date.getTime() / 1000)
}
</script>
<?
APP::$insert['js_strtotime'] = ['js', 'code', 'before', '</body>', ob_get_contents()];
ob_end_clean();
?>
<script>
function GetLogErrors(nav) {
$('#logs-errors-period > button').removeAttr('disabled');
if (nav) $('#logs-errors-period > button[data-period="' + nav + '"]').attr('disabled', 'disabled');
$('#error-log-chart').html('<div class="text-center"><div class="preloader pl-xxl"><svg class="pl-circular" viewBox="25 25 50 50"><circle class="plc-path" cx="50" cy="50" r="20" /></svg></div></div>');
$.ajax({
url: '<?= APP::Module('Routing')->root ?>admin/logs/api/dashboard.json',
data: {
date: {
from: $('#error-log-date-from').val(),
to: $('#error-log-date-to').val()
}
},
type: 'POST',
dataType: 'json',
success: function(data) {
$.plot("#error-log-chart", [
{
label: "Errors",
data: data
}
], {
series: {
lines: {
show: true
},
points: {
show: true
}
},
grid : {
borderWidth: 1,
borderColor: '#eee',
show : true,
hoverable : true,
clickable : true
},
legend : {
show : true,
noColumns:0,
container: $('#error-log-legend')
},
yaxis: {
tickColor: '#eee',
tickDecimals: 0,
font :{
lineHeight: 13,
style: "normal",
color: "#9f9f9f",
},
shadowSize: 0
},
xaxis: {
mode: "time",
tickColor: '#fff',
tickDecimals: 0,
font :{
lineHeight: 13,
style: "normal",
color: "#9f9f9f"
},
shadowSize: 0
}
});
$('<div id="card-<?= $data['hash'] ?>-tooltip"></div>').css({
position: "absolute",
display: "none",
border: "1px solid #fdd",
padding: "2px",
"background-color": "#fee",
opacity: 0.80
}).appendTo("body");
$("#error-log-chart").bind("plothover", function (event, pos, item) {
if (item) {
var date = new Date(item.datapoint[0]);
$("#card-<?= $data['hash'] ?>-tooltip")
.html(item.datapoint[1] + ' ' + item.series.label + ' of ' + date.getDate() + '-' + (date.getMonth() + 1) + '-' + date.getFullYear())
.css({
top: item.pageY+5,
left: item.pageX+5
})
.fadeIn(200);
} else {
$("#card-<?= $data['hash'] ?>-tooltip").hide();
}
});
}
});
}
$(document).on('click', "#logs-errors-period > button",function() {
var period = $(this).data('period');
var to = Math.round(new Date().getTime() / 1000);
var from = strtotime("-" + period, to);
var to_date = new Date(to * 1000);
var from_date = new Date(from * 1000);
$('#error-log-date-to').val(to);
$('#error-log-date-from').val(from);
$('#logs-errors-calendar-from').html(from_date.getDate() + '.' + (from_date.getMonth() + 1) + '.' + from_date.getFullYear());
$('#logs-errors-calendar-to').html(to_date.getDate() + '.' + (to_date.getMonth() + 1) + '.' + to_date.getFullYear());
GetLogErrors(period);
});
$('#logs-errors-calendar').popover({
html: true,
content: [
'<div class="form-group">',
'<div class="row">',
'<div class="col-md-6">',
'<div id="error-log-calendar-from-block"></div>',
'</div>',
'<div class="col-md-6">',
'<div id="error-log-calendar-to-block"></div>',
'</div>',
'</div>',
'</div>'
].join(''),
placement: 'bottom',
title: 'Выбор даты',
trigger: 'click'
}).on('show.bs.popover', function() {
$(this).data('bs.popover').tip().css({
'max-width': '640px',
'width': '640px'
});
}).on('shown.bs.popover', function() {
var to_date = new Date(parseInt($('#error-log-date-to').val()) * 1000);
var from_date = new Date(parseInt($('#error-log-date-from').val()) * 1000);
$('#error-log-calendar-from-block').datetimepicker({
inline: true,
locale: 'ru',
sideBySide: true,
format: 'DD/MM/YYYY'
});
$('#error-log-calendar-to-block').datetimepicker({
useCurrent: false,
locale: 'ru',
inline: true,
sideBySide: true,
format: 'DD/MM/YYYY'
});
$('#error-log-calendar-from-block').on('dp.change', function(e) {
$('#error-log-date-from').val(Math.round(e.date._d.getTime() / 1000));
$('#logs-errors-period > button').removeAttr('disabled');
$('#error-log-calendar-to-block').data('DateTimePicker').minDate(e.date);
$('#logs-errors-calendar-from').html(e.date._d.getDate() + '.' + (e.date._d.getMonth() + 1) + '.' + e.date._d.getFullYear());
GetLogErrors(false);
});
$('#error-log-calendar-to-block').on('dp.change', function(e) {
$('#error-log-date-to').val(Math.round(e.date._d.getTime() / 1000));
$('#logs-errors-period > button').removeAttr('disabled');
$('#error-log-calendar-from-block').data('DateTimePicker').maxDate(e.date);
$('#logs-errors-calendar-to').html(e.date._d.getDate() + '.' + (e.date._d.getMonth() + 1) + '.' + e.date._d.getFullYear());
GetLogErrors(false);
});
$('#error-log-calendar-from-block').data('DateTimePicker').date(moment(from_date));
$('#error-log-calendar-to-block').data('DateTimePicker').date(moment(to_date));
});
$('#logs-errors-period > button[data-period="1 months"]').trigger('click');
</script>
|
apache-2.0
|
google/active-learning
|
sampling_methods/simulate_batch.py
|
10261
|
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Select a new batch based on results of simulated trajectories."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import copy
import math
import numpy as np
from sampling_methods.wrapper_sampler_def import AL_MAPPING
from sampling_methods.wrapper_sampler_def import WrapperSamplingMethod
class SimulateBatchSampler(WrapperSamplingMethod):
"""Creates batch based on trajectories simulated using smaller batch sizes.
Current support use case: simulate smaller batches than the batch size
actually indicated to emulate which points would be selected in a
smaller batch setting. This method can do better than just selecting
a batch straight out if smaller batches perform better and the simulations
are informative enough and are not hurt too much by labeling noise.
"""
def __init__(self,
X,
y,
seed,
samplers=[{'methods': ('margin', 'uniform'),'weight': (1, 0)}],
n_sims=10,
train_per_sim=10,
return_type='best_sim'):
""" Initialize sampler with options.
Args:
X: training data
y: labels may be used by base sampling methods
seed: seed for np.random
samplers: list of dicts with two fields
'samplers': list of named samplers
'weights': percentage of batch to allocate to each sampler
n_sims: number of total trajectories to simulate
train_per_sim: number of minibatches to split the batch into
return_type: two return types supported right now
best_sim: return points selected by the best trajectory
frequency: returns points selected the most over all trajectories
"""
self.name = 'simulate_batch'
self.X = X
self.y = y
self.seed = seed
self.n_sims = n_sims
self.train_per_sim = train_per_sim
self.return_type = return_type
self.samplers_list = samplers
self.initialize_samplers(self.samplers_list)
self.trace = []
self.selected = []
np.random.seed(seed)
def simulate_batch(self, sampler, N, already_selected, y, model, X_test,
y_test, **kwargs):
"""Simulates smaller batches by using hallucinated y to select next batch.
Assumes that select_batch is only dependent on already_selected and not on
any other states internal to the sampler. i.e. this would not work with
BanditDiscreteSampler but will work with margin, hierarchical, and uniform.
Args:
sampler: dict with two fields
'samplers': list of named samplers
'weights': percentage of batch to allocate to each sampler
N: batch size
already_selected: indices already labeled
y: y to use for training
model: model to use for margin calc
X_test: validaiton data
y_test: validation labels
Returns:
- mean accuracy
- indices selected by best hallucinated trajectory
- best accuracy achieved by one of the trajectories
"""
minibatch = max(int(math.ceil(N / self.train_per_sim)), 1)
results = []
best_acc = 0
best_inds = []
self.selected = []
n_minibatch = int(N/minibatch) + (N % minibatch > 0)
for _ in range(self.n_sims):
inds = []
hallucinated_y = []
# Copy these objects to make sure they are not modified while simulating
# trajectories as they are used later by the main run_experiment script.
kwargs['already_selected'] = copy.copy(already_selected)
kwargs['y'] = copy.copy(y)
# Assumes that model has already by fit using all labeled data so
# the probabilities can be used immediately to hallucinate labels
kwargs['model'] = copy.deepcopy(model)
for _ in range(n_minibatch):
batch_size = min(minibatch, N-len(inds))
if batch_size > 0:
kwargs['N'] = batch_size
new_inds = sampler.select_batch(**kwargs)
inds.extend(new_inds)
# All models need to have predict_proba method
probs = kwargs['model'].predict_proba(self.X[new_inds])
# Hallucinate labels for selected datapoints to be label
# using class probabilities from model
try:
classes = kwargs['model'].best_estimator_.classes_
except:
classes = kwargs['model'].classes_
new_y = ([
np.random.choice(classes, p=probs[i, :])
for i in range(batch_size)
])
hallucinated_y.extend(new_y)
# Not saving already_selected here, if saving then should sort
# only for the input to fit but preserve ordering of indices in
# already_selected
kwargs['already_selected'] = sorted(kwargs['already_selected']
+ new_inds)
kwargs['y'][new_inds] = new_y
kwargs['model'].fit(self.X[kwargs['already_selected']],
kwargs['y'][kwargs['already_selected']])
acc_hallucinated = kwargs['model'].score(X_test, y_test)
if acc_hallucinated > best_acc:
best_acc = acc_hallucinated
best_inds = inds
kwargs['model'].fit(self.X[kwargs['already_selected']],
y[kwargs['already_selected']])
# Useful to know how accuracy compares for model trained on hallucinated
# labels vs trained on true labels. But can remove this train to speed
# up simulations. Won't speed up significantly since many more models
# are being trained inside the loop above.
acc_true = kwargs['model'].score(X_test, y_test)
results.append([acc_hallucinated, acc_true])
print('Hallucinated acc: %.3f, Actual Acc: %.3f' % (acc_hallucinated,
acc_true))
# Save trajectory for reference
t = {}
t['arm'] = sampler
t['data_size'] = len(kwargs['already_selected'])
t['inds'] = inds
t['y_hal'] = hallucinated_y
t['acc_hal'] = acc_hallucinated
t['acc_true'] = acc_true
self.trace.append(t)
self.selected.extend(inds)
# Delete created copies
del kwargs['model']
del kwargs['already_selected']
results = np.array(results)
return np.mean(results, axis=0), best_inds, best_acc
def sampler_select_batch(self, sampler, N, already_selected, y, model, X_test, y_test, **kwargs):
"""Calculate the performance of the model if the batch had been selected using the base method without simulation.
Args:
sampler: dict with two fields
'samplers': list of named samplers
'weights': percentage of batch to allocate to each sampler
N: batch size
already_selected: indices already selected
y: labels to use for training
model: model to use for training
X_test, y_test: validation set
Returns:
- indices selected by base method
- validation accuracy of model trained on new batch
"""
m = copy.deepcopy(model)
kwargs['y'] = y
kwargs['model'] = m
kwargs['already_selected'] = copy.copy(already_selected)
inds = []
kwargs['N'] = N
inds.extend(sampler.select_batch(**kwargs))
kwargs['already_selected'] = sorted(kwargs['already_selected'] + inds)
m.fit(self.X[kwargs['already_selected']], y[kwargs['already_selected']])
acc = m.score(X_test, y_test)
del m
del kwargs['already_selected']
return inds, acc
def select_batch_(self, N, already_selected, y, model,
X_test, y_test, **kwargs):
""" Returns a batch of size N selected by using the best sampler in simulation
Args:
samplers: list of sampling methods represented by dict with two fields
'samplers': list of named samplers
'weights': percentage of batch to allocate to each sampler
N: batch size
already_selected: indices of datapoints already labeled
y: actual labels, used to compare simulation with actual
model: training model to use to evaluate different samplers. Model must
have a predict_proba method with same signature as that in sklearn
n_sims: the number of simulations to perform for each sampler
minibatch: batch size to use for simulation
"""
results = []
# THE INPUTS CANNOT BE MODIFIED SO WE MAKE COPIES FOR THE CHECK LATER
# Should check model but kernel_svm does not have coef_ so need better
# handling here
copy_selected = copy.copy(already_selected)
copy_y = copy.copy(y)
for s in self.samplers:
sim_results, sim_inds, sim_acc = self.simulate_batch(
s, N, already_selected, y, model, X_test, y_test, **kwargs)
real_inds, acc = self.sampler_select_batch(
s, N, already_selected, y, model, X_test, y_test, **kwargs)
print('Best simulated acc: %.3f, Actual acc: %.3f' % (sim_acc, acc))
results.append([sim_results, sim_inds, real_inds, acc])
best_s = np.argmax([r[0][0] for r in results])
# Make sure that model object fed in did not change during simulations
assert all(y == copy_y)
assert all([copy_selected[i] == already_selected[i]
for i in range(len(already_selected))])
# Return indices based on return type specified
if self.return_type == 'best_sim':
return results[best_s][1]
elif self.return_type == 'frequency':
unique, counts = np.unique(self.selected, return_counts=True)
argcount = np.argsort(-counts)
return list(unique[argcount[0:N]])
return results[best_s][2]
def to_dict(self):
output = {}
output['simulated_trajectories'] = self.trace
return output
|
apache-2.0
|
sapia-oss/corus_iop
|
modules/client/src/main/java/org/sapia/corus/interop/helpers/RequestListener.java
|
3050
|
package org.sapia.corus.interop.helpers;
import java.util.List;
import org.sapia.corus.interop.api.message.ConfirmShutdownMessageCommand;
import org.sapia.corus.interop.api.message.MessageCommand;
import org.sapia.corus.interop.api.message.PollMessageCommand;
import org.sapia.corus.interop.api.message.ProcessMessageHeader;
import org.sapia.corus.interop.api.message.RestartMessageCommand;
import org.sapia.corus.interop.api.message.StatusMessageCommand;
import org.sapia.corus.interop.protobuf.CorusInteroperability.Status;
/**
* This insterface can conveniently implemented by servers that handle
* Corus's Interoperability Protocol. The interface specifies callbacks that
* are called for each possible request that can be send by Corus clients.
*
* @see ServerStatelessStreamHelper
*
* @author yduchesne
*/
public interface RequestListener {
/**
* Called when a dynamic process confirms that it has proceeded to its own shutdown.
*
* @param proc a {@link ProcessMessageHeader} object, encapsulating the corus process ID of the request's originator, and
* a request identifier.
* @param confirm a {@link ConfirmShutdownMessageCommand} instance.
* @throws Exception if an error occurs when processing the given command.
*/
public void onConfirmShutdown(ProcessMessageHeader proc, ConfirmShutdownMessageCommand confirm) throws Exception;
/**
* Called when a dynamic process notifies its corus server about its status.
*
* @param proc a {@link ProcessMessageHeader} object, encapsulating the corus process ID of the request's originator, and
* a request identifier.
* @param stat a {@link Status} instance.
* @throws Exception if an error occurs when processing the given command.
* @return the {@link List} of commands that were pending in the process queue, within the
* Corus server.
*/
public List<MessageCommand> onStatus(ProcessMessageHeader proc, StatusMessageCommand stat) throws Exception;
/**
* Called when a dynamic process polls its corus server.
*
* @param proc a {@link ProcessMessageHeader} object, encapsulating the corus process ID of the request's originator, and
* a request identifier.
* @param poll a {@link PollMessageCommand} instance.
* @throws Exception if an error occurs when processing the given command.
* @return the {@link List} of commands that were pending in the process queue, within the
* Corus server.
*/
public List<MessageCommand> onPoll(ProcessMessageHeader proc, PollMessageCommand poll) throws Exception;
/**
* Called when a dynamic process notifies its corus server that it wishes to be restarted.
*
* @param proc a {@link ProcessMessageHeader} object, encapsulating the corus process ID of the request's originator, and
* a request identifier.
* @param res a {@link RestartMessageCommand} instance.
* @throws Exception if an error occurs when processing the given command.
*/
public void onRestart(ProcessMessageHeader proc, RestartMessageCommand res) throws Exception;
}
|
apache-2.0
|
OpenGamma/Strata
|
modules/basics/src/main/java/com/opengamma/strata/basics/date/GlobalHolidayCalendars.java
|
55262
|
/*
* Copyright (C) 2014 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.strata.basics.date;
import static java.time.DayOfWeek.FRIDAY;
import static java.time.DayOfWeek.MONDAY;
import static java.time.DayOfWeek.SATURDAY;
import static java.time.DayOfWeek.SUNDAY;
import static java.time.DayOfWeek.THURSDAY;
import static java.time.DayOfWeek.TUESDAY;
import static java.time.DayOfWeek.WEDNESDAY;
import static java.time.temporal.TemporalAdjusters.dayOfWeekInMonth;
import static java.time.temporal.TemporalAdjusters.firstInMonth;
import static java.time.temporal.TemporalAdjusters.lastInMonth;
import static java.time.temporal.TemporalAdjusters.nextOrSame;
import static java.time.temporal.TemporalAdjusters.previous;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.time.DayOfWeek;
import java.time.LocalDate;
import java.time.temporal.TemporalAdjusters;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import com.google.common.io.Files;
/**
* Implementation of some common global holiday calendars.
* <p>
* The data provided here has been identified through direct research and is not
* derived from a vendor of holiday calendar data.
* This data may or may not be sufficient for your production needs.
*/
final class GlobalHolidayCalendars {
// WARNING!!
// If you change this file, you must run the main method to update the binary file
// which is used at runtime (for performance reasons)
/** Where to store the file. */
private static final File DATA_FILE =
new File("src/main/resources/com/opengamma/strata/basics/date/GlobalHolidayCalendars.bin");
//-------------------------------------------------------------------------
/**
* Used to generate a binary holiday data file.
*
* @param args ignored
* @throws IOException if an IO error occurs
*/
public static void main(String[] args) throws IOException {
Files.createParentDirs(DATA_FILE);
ImmutableHolidayCalendar[] calendars = {
generateLondon(),
generateParis(),
generateFrankfurt(),
generateZurich(),
generateEuropeanTarget(),
generateUsGovtSecurities(),
generateUsNewYork(),
generateNewYorkFed(),
generateNewYorkStockExchange(),
generateTokyo(),
generateSydney(),
generateBrazil(),
generateMontreal(),
generateToronto(),
generatePrague(),
generateCopenhagen(),
generateBudapest(),
generateMexicoCity(),
generateOslo(),
generateAuckland(),
generateWellington(),
generateNewZealand(),
generateWarsaw(),
generateStockholm(),
generateJohannesburg(),
};
try (FileOutputStream fos = new FileOutputStream(DATA_FILE)) {
try (DataOutputStream out = new DataOutputStream(fos)) {
out.writeByte('H');
out.writeByte('C');
out.writeByte('a');
out.writeByte('l');
out.writeShort(calendars.length);
for (ImmutableHolidayCalendar cal : calendars) {
cal.writeExternal(out);
}
}
}
}
/**
* Restricted constructor.
*/
private GlobalHolidayCalendars() {
}
//-------------------------------------------------------------------------
// generate GBLO
// common law (including before 1871) good friday and christmas day (unadjusted for weekends)
// from 1871 easter monday, whit monday, first Mon in Aug and boxing day
// from 1965 to 1970, first in Aug moved to Mon after last Sat in Aug
// from 1971, whitsun moved to last Mon in May, last Mon in Aug
// from 1974, added new year
// from 1978, added first Mon in May
// see Hansard for specific details
// 1965, Whitsun, Last Mon Aug - http://hansard.millbanksystems.com/commons/1964/mar/04/staggered-holidays
// 1966, Whitsun May - http://hansard.millbanksystems.com/commons/1964/mar/04/staggered-holidays
// 1966, 29th Aug - http://hansard.millbanksystems.com/written_answers/1965/nov/25/august-bank-holiday
// 1967, 29th May, 28th Aug - http://hansard.millbanksystems.com/written_answers/1965/jun/03/bank-holidays-1967-and-1968
// 1968, 3rd Jun, 2nd Sep - http://hansard.millbanksystems.com/written_answers/1965/jun/03/bank-holidays-1967-and-1968
// 1969, 26th May, 1st Sep - http://hansard.millbanksystems.com/written_answers/1967/mar/21/bank-holidays-1969-dates
// 1970, 25th May, 31st Aug - http://hansard.millbanksystems.com/written_answers/1967/jul/28/bank-holidays
// 2022, 2nd and 3rd Jun - https://www.gov.uk/government/news/extra-bank-holiday-to-mark-the-queens-platinum-jubilee-in-2022
static ImmutableHolidayCalendar generateLondon() {
List<LocalDate> holidays = new ArrayList<>(2000);
for (int year = 1950; year <= 2099; year++) {
// new year
if (year >= 1974) {
holidays.add(bumpToMon(first(year, 1)));
}
// easter
holidays.add(easter(year).minusDays(2));
holidays.add(easter(year).plusDays(1));
// early May
if (year == 1995 || year == 2020) {
// ve day
holidays.add(date(year, 5, 8));
} else if (year >= 1978) {
holidays.add(first(year, 5).with(firstInMonth(MONDAY)));
}
// spring
if (year == 2002) {
// golden jubilee
holidays.add(date(2002, 6, 3));
holidays.add(date(2002, 6, 4));
} else if (year == 2012) {
// diamond jubilee
holidays.add(date(2012, 6, 4));
holidays.add(date(2012, 6, 5));
} else if (year == 2022) {
// platinum jubilee
holidays.add(date(2022, 6, 2));
holidays.add(date(2022, 6, 3));
} else if (year == 1967 || year == 1970) {
holidays.add(first(year, 5).with(lastInMonth(MONDAY)));
} else if (year < 1971) {
// whitsun
holidays.add(easter(year).plusDays(50));
} else {
holidays.add(first(year, 5).with(lastInMonth(MONDAY)));
}
// summer
if (year < 1965) {
holidays.add(first(year, 8).with(firstInMonth(MONDAY)));
} else if (year < 1971) {
holidays.add(first(year, 8).with(lastInMonth(SATURDAY)).plusDays(2));
} else {
holidays.add(first(year, 8).with(lastInMonth(MONDAY)));
}
// christmas
holidays.add(christmasBumpedSatSun(year));
holidays.add(boxingDayBumpedSatSun(year));
}
holidays.add(date(2011, 4, 29)); // royal wedding
holidays.add(date(1999, 12, 31)); // millennium
removeSatSun(holidays);
return ImmutableHolidayCalendar.of(HolidayCalendarIds.GBLO, holidays, SATURDAY, SUNDAY);
}
//-------------------------------------------------------------------------
// generate FRPA
// data sources
// http://www.legifrance.gouv.fr/affichCodeArticle.do?idArticle=LEGIARTI000006902611&cidTexte=LEGITEXT000006072050
// http://jollyday.sourceforge.net/data/fr.html
// Euronext holidays only New Year, Good Friday, Easter Monday, Labour Day, Christmas Day, Boxing Day
// New Years Eve is holiday for cash markets and derivatives in 2015
// https://www.euronext.com/en/holidays-and-hours
// https://www.euronext.com/en/trading/nyse-euronext-trading-calendar/archives
// some sources have Monday is holiday when Tuesday is, and Friday is holiday when Thursday is (not applying this)
static ImmutableHolidayCalendar generateParis() {
List<LocalDate> holidays = new ArrayList<>(2000);
for (int year = 1950; year <= 2099; year++) {
holidays.add(date(year, 1, 1)); // new year
holidays.add(easter(year).minusDays(2)); // good friday
holidays.add(easter(year).plusDays(1)); // easter monday
holidays.add(date(year, 5, 1)); // labour day
holidays.add(date(year, 5, 8)); // victory in europe
holidays.add(easter(year).plusDays(39)); // ascension day
if (year <= 2004 || year >= 2008) {
holidays.add(easter(year).plusDays(50)); // whit monday
}
holidays.add(date(year, 7, 14)); // bastille
holidays.add(date(year, 8, 15)); // assumption of mary
holidays.add(date(year, 11, 1)); // all saints
holidays.add(date(year, 11, 11)); // armistice day
holidays.add(date(year, 12, 25)); // christmas day
holidays.add(date(year, 12, 26)); // saint stephen
}
holidays.add(date(1999, 12, 31)); // millennium
removeSatSun(holidays);
return ImmutableHolidayCalendar.of(HolidayCalendarIds.FRPA, holidays, SATURDAY, SUNDAY);
}
//-------------------------------------------------------------------------
// generate DEFR
// data sources
// https://www.feiertagskalender.ch/index.php?geo=3122&klasse=3&jahr=2017&hl=en
// http://jollyday.sourceforge.net/data/de.html
// http://en.boerse-frankfurt.de/basics-marketplaces-tradingcalendar2019
static ImmutableHolidayCalendar generateFrankfurt() {
List<LocalDate> holidays = new ArrayList<>(2000);
for (int year = 1950; year <= 2099; year++) {
holidays.add(date(year, 1, 1)); // new year
holidays.add(easter(year).minusDays(2)); // good friday
holidays.add(easter(year).plusDays(1)); // easter monday
holidays.add(date(year, 5, 1)); // labour day
holidays.add(easter(year).plusDays(39)); // ascension day
holidays.add(easter(year).plusDays(50)); // whit monday
holidays.add(easter(year).plusDays(60)); // corpus christi
if (year >= 2000) {
holidays.add(date(year, 10, 3)); // german unity
}
if (year <= 1994) {
// Wed before the Sunday that is 2 weeks before first advent, which is 4th Sunday before Christmas
holidays.add(date(year, 12, 25).with(previous(SUNDAY)).minusWeeks(6).minusDays(4)); // repentance
}
holidays.add(date(year, 12, 24)); // christmas eve
holidays.add(date(year, 12, 25)); // christmas day
holidays.add(date(year, 12, 26)); // saint stephen
holidays.add(date(year, 12, 31)); // new year
}
holidays.add(date(2017, 10, 31)); // reformation day
removeSatSun(holidays);
return ImmutableHolidayCalendar.of(HolidayCalendarIds.DEFR, holidays, SATURDAY, SUNDAY);
}
//-------------------------------------------------------------------------
// generate CHZU
// data sources
// http://jollyday.sourceforge.net/data/ch.html
// https://github.com/lballabio/quantlib/blob/master/QuantLib/ql/time/calendars/switzerland.cpp
// http://www.six-swiss-exchange.com/funds/trading/trading_and_settlement_calendar_en.html
// http://www.six-swiss-exchange.com/swx_messages/online/swx7299e.pdf
static ImmutableHolidayCalendar generateZurich() {
List<LocalDate> holidays = new ArrayList<>(2000);
for (int year = 1950; year <= 2099; year++) {
holidays.add(date(year, 1, 1)); // new year
holidays.add(date(year, 1, 2)); // saint berchtoldstag
holidays.add(easter(year).minusDays(2)); // good friday
holidays.add(easter(year).plusDays(1)); // easter monday
holidays.add(date(year, 5, 1)); // labour day
holidays.add(easter(year).plusDays(39)); // ascension day
holidays.add(easter(year).plusDays(50)); // whit monday
holidays.add(date(year, 8, 1)); // national day
holidays.add(date(year, 12, 25)); // christmas day
holidays.add(date(year, 12, 26)); // saint stephen
}
holidays.add(date(1999, 12, 31)); // millennium
holidays.add(date(2000, 1, 3)); // millennium
removeSatSun(holidays);
return ImmutableHolidayCalendar.of(HolidayCalendarIds.CHZU, holidays, SATURDAY, SUNDAY);
}
//-------------------------------------------------------------------------
// generate EUTA
// 1997 - 1998 (testing phase), Jan 1, christmas day
// https://www.ecb.europa.eu/pub/pdf/other/tagien.pdf
// in 1999, Jan 1, christmas day, Dec 26, Dec 31
// http://www.ecb.europa.eu/press/pr/date/1999/html/pr990715_1.en.html
// http://www.ecb.europa.eu/press/pr/date/1999/html/pr990331.en.html
// in 2000, Jan 1, good friday, easter monday, May 1, christmas day, Dec 26
// http://www.ecb.europa.eu/press/pr/date/1999/html/pr990715_1.en.html
// in 2001, Jan 1, good friday, easter monday, May 1, christmas day, Dec 26, Dec 31
// http://www.ecb.europa.eu/press/pr/date/2000/html/pr000525_2.en.html
// from 2002, Jan 1, good friday, easter monday, May 1, christmas day, Dec 26
// http://www.ecb.europa.eu/press/pr/date/2000/html/pr001214_4.en.html
static ImmutableHolidayCalendar generateEuropeanTarget() {
List<LocalDate> holidays = new ArrayList<>(2000);
for (int year = 1997; year <= 2099; year++) {
if (year >= 2000) {
holidays.add(date(year, 1, 1));
holidays.add(easter(year).minusDays(2));
holidays.add(easter(year).plusDays(1));
holidays.add(date(year, 5, 1));
holidays.add(date(year, 12, 25));
holidays.add(date(year, 12, 26));
} else { // 1997 to 1999
holidays.add(date(year, 1, 1));
holidays.add(date(year, 12, 25));
}
if (year == 1999 || year == 2001) {
holidays.add(date(year, 12, 31));
}
}
removeSatSun(holidays);
return ImmutableHolidayCalendar.of(HolidayCalendarIds.EUTA, holidays, SATURDAY, SUNDAY);
}
//-------------------------------------------------------------------------
// common US holidays
private static void usCommon(
List<LocalDate> holidays, int year, boolean bumpBack, boolean columbusVeteran, int mlkStartYear) {
// new year, adjusted if Sunday
holidays.add(bumpSunToMon(date(year, 1, 1)));
// martin luther king
if (year >= mlkStartYear) {
holidays.add(date(year, 1, 1).with(dayOfWeekInMonth(3, MONDAY)));
}
// washington
if (year < 1971) {
holidays.add(bumpSunToMon(date(year, 2, 22)));
} else {
holidays.add(date(year, 2, 1).with(dayOfWeekInMonth(3, MONDAY)));
}
// memorial
if (year < 1971) {
holidays.add(bumpSunToMon(date(year, 5, 30)));
} else {
holidays.add(date(year, 5, 1).with(lastInMonth(MONDAY)));
}
// labor day
holidays.add(date(year, 9, 1).with(firstInMonth(MONDAY)));
// columbus day
if (columbusVeteran) {
if (year < 1971) {
holidays.add(bumpSunToMon(date(year, 10, 12)));
} else {
holidays.add(date(year, 10, 1).with(dayOfWeekInMonth(2, MONDAY)));
}
}
// veterans day
if (columbusVeteran) {
if (year >= 1971 && year < 1978) {
holidays.add(date(year, 10, 1).with(dayOfWeekInMonth(4, MONDAY)));
} else {
holidays.add(bumpSunToMon(date(year, 11, 11)));
}
}
// thanksgiving
holidays.add(date(year, 11, 1).with(dayOfWeekInMonth(4, THURSDAY)));
// independence day & christmas day
if (bumpBack) {
holidays.add(bumpToFriOrMon(date(year, 7, 4)));
holidays.add(bumpToFriOrMon(date(year, 12, 25)));
} else {
holidays.add(bumpSunToMon(date(year, 7, 4)));
holidays.add(bumpSunToMon(date(year, 12, 25)));
}
}
// generate USGS
// http://www.sifma.org/services/holiday-schedule/
static ImmutableHolidayCalendar generateUsGovtSecurities() {
List<LocalDate> holidays = new ArrayList<>(2000);
for (int year = 1950; year <= 2099; year++) {
usCommon(holidays, year, true, true, 1986);
// good friday, in 1999/2007 only a partial holiday
holidays.add(easter(year).minusDays(2));
// hurricane sandy
if (year == 2012) {
holidays.add(date(year, 10, 30));
}
}
holidays.add(date(2018, 12, 5)); // Death of George H.W. Bush
removeSatSun(holidays);
return ImmutableHolidayCalendar.of(HolidayCalendarIds.USGS, holidays, SATURDAY, SUNDAY);
}
//-------------------------------------------------------------------------
// generate USNY
// http://www.cs.ny.gov/attendance_leave/2012_legal_holidays.cfm
// http://www.cs.ny.gov/attendance_leave/2013_legal_holidays.cfm
// etc
// ignore election day and lincoln day
static ImmutableHolidayCalendar generateUsNewYork() {
List<LocalDate> holidays = new ArrayList<>(2000);
for (int year = 1950; year <= 2099; year++) {
usCommon(holidays, year, false, true, 1986);
}
removeSatSun(holidays);
return ImmutableHolidayCalendar.of(HolidayCalendarIds.USNY, holidays, SATURDAY, SUNDAY);
}
//-------------------------------------------------------------------------
// generate NYFD
// http://www.ny.frb.org/aboutthefed/holiday_schedule.html
static ImmutableHolidayCalendar generateNewYorkFed() {
List<LocalDate> holidays = new ArrayList<>(2000);
for (int year = 1950; year <= 2099; year++) {
usCommon(holidays, year, false, true, 1986);
}
removeSatSun(holidays);
return ImmutableHolidayCalendar.of(HolidayCalendarIds.NYFD, holidays, SATURDAY, SUNDAY);
}
//-------------------------------------------------------------------------
// generate NYSE
// https://www.nyse.com/markets/hours-calendars
// http://www1.nyse.com/pdfs/closings.pdf
static ImmutableHolidayCalendar generateNewYorkStockExchange() {
List<LocalDate> holidays = new ArrayList<>(2000);
for (int year = 1950; year <= 2099; year++) {
usCommon(holidays, year, true, false, 1998);
// good friday
holidays.add(easter(year).minusDays(2));
}
// Lincoln day 1896-1953
// Columbus day 1909-1953
// Veterans day 1934-1953
for (int i = 1950; i <= 1953; i++) {
holidays.add(date(i, 2, 12));
holidays.add(date(i, 10, 12));
holidays.add(date(i, 11, 11));
}
// election day, Tue after first Monday of November
for (int i = 1950; i <= 1968; i++) {
holidays.add(date(i, 11, 1).with(TemporalAdjusters.nextOrSame(MONDAY)).plusDays(1));
}
holidays.add(date(1972, 11, 7));
holidays.add(date(1976, 11, 2));
holidays.add(date(1980, 11, 4));
// special days
holidays.add(date(1955, 12, 24)); // Christmas Eve
holidays.add(date(1956, 12, 24)); // Christmas Eve
holidays.add(date(1958, 12, 26)); // Day after Christmas
holidays.add(date(1961, 5, 29)); // Decoration day
holidays.add(date(1963, 11, 25)); // Death of John F Kennedy
holidays.add(date(1965, 12, 24)); // Christmas Eve
holidays.add(date(1968, 2, 12)); // Lincoln birthday
holidays.add(date(1968, 4, 9)); // Death of Martin Luther King
holidays.add(date(1968, 6, 12)); // Paperwork crisis
holidays.add(date(1968, 6, 19)); // Paperwork crisis
holidays.add(date(1968, 6, 26)); // Paperwork crisis
holidays.add(date(1968, 7, 3)); // Paperwork crisis
holidays.add(date(1968, 7, 5)); // Day after independence
holidays.add(date(1968, 7, 10)); // Paperwork crisis
holidays.add(date(1968, 7, 17)); // Paperwork crisis
holidays.add(date(1968, 7, 24)); // Paperwork crisis
holidays.add(date(1968, 7, 31)); // Paperwork crisis
holidays.add(date(1968, 8, 7)); // Paperwork crisis
holidays.add(date(1968, 8, 13)); // Paperwork crisis
holidays.add(date(1968, 8, 21)); // Paperwork crisis
holidays.add(date(1968, 8, 28)); // Paperwork crisis
holidays.add(date(1968, 9, 4)); // Paperwork crisis
holidays.add(date(1968, 9, 11)); // Paperwork crisis
holidays.add(date(1968, 9, 18)); // Paperwork crisis
holidays.add(date(1968, 9, 25)); // Paperwork crisis
holidays.add(date(1968, 10, 2)); // Paperwork crisis
holidays.add(date(1968, 10, 9)); // Paperwork crisis
holidays.add(date(1968, 10, 16)); // Paperwork crisis
holidays.add(date(1968, 10, 23)); // Paperwork crisis
holidays.add(date(1968, 10, 30)); // Paperwork crisis
holidays.add(date(1968, 11, 6)); // Paperwork crisis
holidays.add(date(1968, 11, 13)); // Paperwork crisis
holidays.add(date(1968, 11, 20)); // Paperwork crisis
holidays.add(date(1968, 11, 27)); // Paperwork crisis
holidays.add(date(1968, 12, 4)); // Paperwork crisis
holidays.add(date(1968, 12, 11)); // Paperwork crisis
holidays.add(date(1968, 12, 18)); // Paperwork crisis
holidays.add(date(1968, 12, 25)); // Paperwork crisis
holidays.add(date(1968, 12, 31)); // Paperwork crisis
holidays.add(date(1969, 2, 10)); // Snow
holidays.add(date(1969, 3, 31)); // Death of Dwight Eisenhower
holidays.add(date(1969, 7, 21)); // Lunar exploration
holidays.add(date(1972, 12, 28)); // Death of Harry Truman
holidays.add(date(1973, 1, 25)); // Death of Lyndon Johnson
holidays.add(date(1977, 7, 14)); // Blackout
holidays.add(date(1985, 9, 27)); // Hurricane Gloria
holidays.add(date(1994, 4, 27)); // Death of Richard Nixon
holidays.add(date(2001, 9, 11)); // 9/11 attack
holidays.add(date(2001, 9, 12)); // 9/11 attack
holidays.add(date(2001, 9, 13)); // 9/11 attack
holidays.add(date(2001, 9, 14)); // 9/11 attack
holidays.add(date(2004, 6, 11)); // Death of Ronald Reagan
holidays.add(date(2007, 1, 2)); // Death of Gerald Ford
holidays.add(date(2012, 10, 30)); // Hurricane Sandy
holidays.add(date(2018, 12, 5)); // Death of George H.W. Bush
removeSatSun(holidays);
return ImmutableHolidayCalendar.of(HolidayCalendarIds.NYSE, holidays, SATURDAY, SUNDAY);
}
//-------------------------------------------------------------------------
// generate JPTO
// data sources
// https://www.boj.or.jp/en/about/outline/holi.htm/
// http://web.archive.org/web/20110513190217/http://www.boj.or.jp/en/about/outline/holi.htm/
// http://web.archive.org/web/20130502031733/http://www.boj.or.jp/en/about/outline/holi.htm
// http://www8.cao.go.jp/chosei/shukujitsu/gaiyou.html (law)
// http://www.nao.ac.jp/faq/a0301.html (equinox)
// http://eco.mtk.nao.ac.jp/koyomi/faq/holiday.html.en
// https://www.jpx.co.jp/english/announce/market-holidays.html
// https://www.loc.gov/law/foreign-news/article/japan-three-holidays-to-be-moved-to-ease-2020-olympic-ceremony-traffic/
// https://www.nippon.com/en/japan-data/h00738/
static ImmutableHolidayCalendar generateTokyo() {
List<LocalDate> holidays = new ArrayList<>(2000);
for (int year = 1950; year <= 2099; year++) {
// new year
holidays.add(date(year, 1, 1));
holidays.add(date(year, 1, 2));
holidays.add(date(year, 1, 3));
// coming of age
if (year >= 2000) {
holidays.add(date(year, 1, 1).with(dayOfWeekInMonth(2, MONDAY)));
} else {
holidays.add(bumpSunToMon(date(year, 1, 15)));
}
// national foundation
if (year >= 1967) {
holidays.add(bumpSunToMon(date(year, 2, 11)));
}
// vernal equinox (from 1948), 20th or 21st (predictions/facts 2000 to 2030)
if (year == 2000 || year == 2001 || year == 2004 || year == 2005 || year == 2008 || year == 2009 ||
year == 2012 || year == 2013 || year == 2016 || year == 2017 ||
year == 2020 || year == 2021 || year == 2024 || year == 2025 || year == 2026 || year == 2028 ||
year == 2029 || year == 2030) {
holidays.add(bumpSunToMon(date(year, 3, 20)));
} else {
holidays.add(bumpSunToMon(date(year, 3, 21)));
}
// showa (from 2007 onwards), greenery (from 1989 to 2006), emperor (before 1989)
// http://news.bbc.co.uk/1/hi/world/asia-pacific/4543461.stm
holidays.add(bumpSunToMon(date(year, 4, 29)));
// constitution (from 1948)
// greenery (from 2007 onwards), holiday between two other holidays before that (from 1985)
// children (from 1948)
if (year >= 1985) {
holidays.add(bumpSunToMon(date(year, 5, 3)));
holidays.add(bumpSunToMon(date(year, 5, 4)));
holidays.add(bumpSunToMon(date(year, 5, 5)));
if (year >= 2007 && (date(year, 5, 3).getDayOfWeek() == SUNDAY || date(year, 5, 4).getDayOfWeek() == SUNDAY)) {
holidays.add(date(year, 5, 6));
}
} else {
holidays.add(bumpSunToMon(date(year, 5, 3)));
holidays.add(bumpSunToMon(date(year, 5, 5)));
}
// marine
if (year == 2021) {
// moved because of the Olympics
holidays.add(date(year, 7, 22));
} else if (year == 2020) {
// moved because of the Olympics (day prior to opening ceremony)
holidays.add(date(year, 7, 23));
} else if (year >= 2003) {
holidays.add(date(year, 7, 1).with(dayOfWeekInMonth(3, MONDAY)));
} else if (year >= 1996) {
holidays.add(bumpSunToMon(date(year, 7, 20)));
}
// mountain
if (year == 2021) {
// moved because of the Olympics
holidays.add(date(year, 8, 9));
} else if (year == 2020) {
// moved because of the Olympics (day after closing ceremony)
holidays.add(date(year, 8, 10));
} else if (year >= 2016) {
holidays.add(bumpSunToMon(date(year, 8, 11)));
}
// aged
if (year >= 2003) {
holidays.add(date(year, 9, 1).with(dayOfWeekInMonth(3, MONDAY)));
} else if (year >= 1966) {
holidays.add(bumpSunToMon(date(year, 9, 15)));
}
// autumn equinox (from 1948), 22nd or 23rd (predictions/facts 2000 to 2030)
if (year == 2012 || year == 2016 || year == 2020 || year == 2024 || year == 2028) {
holidays.add(bumpSunToMon(date(year, 9, 22)));
} else {
holidays.add(bumpSunToMon(date(year, 9, 23)));
}
citizensDay(holidays, date(year, 9, 20), date(year, 9, 22));
citizensDay(holidays, date(year, 9, 21), date(year, 9, 23));
// health-sports
if (year == 2021) {
// moved because of the Olympics
holidays.add(date(year, 7, 23));
} else if (year == 2020) {
// moved because of the Olympics (day of opening ceremony)
holidays.add(date(year, 7, 24));
} else if (year >= 2000) {
holidays.add(date(year, 10, 1).with(dayOfWeekInMonth(2, MONDAY)));
} else if (year >= 1966) {
holidays.add(bumpSunToMon(date(year, 10, 10)));
}
// culture (from 1948)
holidays.add(bumpSunToMon(date(year, 11, 3)));
// labor (from 1948)
holidays.add(bumpSunToMon(date(year, 11, 23)));
// emperor (current emporer birthday)
if (year >= 1990 && year < 2019) {
holidays.add(bumpSunToMon(date(year, 12, 23)));
} else if (year >= 2020) {
holidays.add(bumpSunToMon(date(year, 2, 23)));
}
// new years eve - bank of Japan, but not national holiday
holidays.add(bumpSunToMon(date(year, 12, 31)));
}
holidays.add(date(1959, 4, 10)); // marriage akihito
holidays.add(date(1989, 2, 24)); // funeral showa
holidays.add(date(1990, 11, 12)); // enthrone akihito
holidays.add(date(1993, 6, 9)); // marriage naruhito
holidays.add(date(2019, 4, 30)); // abdication
holidays.add(date(2019, 5, 1)); // accession
holidays.add(date(2019, 5, 2)); // accession
holidays.add(date(2019, 10, 22)); // enthronement
removeSatSun(holidays);
return ImmutableHolidayCalendar.of(HolidayCalendarIds.JPTO, holidays, SATURDAY, SUNDAY);
}
// extra day between two other holidays, appears to exclude weekends
private static void citizensDay(List<LocalDate> holidays, LocalDate date1, LocalDate date2) {
if (holidays.contains(date1) && holidays.contains(date2)) {
if (date1.getDayOfWeek() == MONDAY || date1.getDayOfWeek() == TUESDAY || date1.getDayOfWeek() == WEDNESDAY) {
holidays.add(date1.plusDays(1));
}
}
}
//-------------------------------------------------------------------------
// generate CAMO
// data sources
// https://www.cnt.gouv.qc.ca/en/leaves-and-absences/statutory-holidays/index.html
// https://www.canada.ca/en/revenue-agency/services/tax/public-holidays.html
// http://www.statutoryholidayscanada.com/
static ImmutableHolidayCalendar generateMontreal() {
List<LocalDate> holidays = new ArrayList<>(2000);
for (int year = 1950; year <= 2099; year++) {
// new year
holidays.add(bumpToMon(date(year, 1, 1)));
// good friday
holidays.add(easter(year).minusDays(2));
// patriots
holidays.add(date(year, 5, 25).with(TemporalAdjusters.previous(MONDAY)));
// fete nationale quebec
holidays.add(bumpToMon(date(year, 6, 24)));
// canada
holidays.add(bumpToMon(date(year, 7, 1)));
// labour
holidays.add(first(year, 9).with(dayOfWeekInMonth(1, MONDAY)));
// thanksgiving
holidays.add(first(year, 10).with(dayOfWeekInMonth(2, MONDAY)));
// christmas
holidays.add(christmasBumpedSatSun(year));
}
removeSatSun(holidays);
return ImmutableHolidayCalendar.of(HolidayCalendarId.of("CAMO"), holidays, SATURDAY, SUNDAY);
}
//-------------------------------------------------------------------------
// generate CATO
// data sources
// http://www.labour.gov.on.ca/english/es/pubs/guide/publicholidays.php
// http://www.cra-arc.gc.ca/tx/hldys/menu-eng.html
// http://www.tmxmoney.com/en/investor_tools/market_hours.html
// http://www.statutoryholidayscanada.com/
// http://www.osc.gov.on.ca/en/SecuritiesLaw_csa_20151209_13-315_sra-closed-dates.htm
static ImmutableHolidayCalendar generateToronto() {
List<LocalDate> holidays = new ArrayList<>(2000);
for (int year = 1950; year <= 2099; year++) {
// new year (public)
holidays.add(bumpToMon(date(year, 1, 1)));
// family (public)
if (year >= 2008) {
holidays.add(first(year, 2).with(dayOfWeekInMonth(3, MONDAY)));
}
// good friday (public)
holidays.add(easter(year).minusDays(2));
// victoria (public)
holidays.add(date(year, 5, 25).with(TemporalAdjusters.previous(MONDAY)));
// canada (public)
holidays.add(bumpToMon(date(year, 7, 1)));
// civic
holidays.add(first(year, 8).with(dayOfWeekInMonth(1, MONDAY)));
// labour (public)
holidays.add(first(year, 9).with(dayOfWeekInMonth(1, MONDAY)));
// thanksgiving (public)
holidays.add(first(year, 10).with(dayOfWeekInMonth(2, MONDAY)));
// remembrance
holidays.add(bumpToMon(date(year, 11, 11)));
// christmas (public)
holidays.add(christmasBumpedSatSun(year));
// boxing (public)
holidays.add(boxingDayBumpedSatSun(year));
}
removeSatSun(holidays);
return ImmutableHolidayCalendar.of(HolidayCalendarId.of("CATO"), holidays, SATURDAY, SUNDAY);
}
//-------------------------------------------------------------------------
// generate DKCO
// data sources
// http://www.finansraadet.dk/Bankkunde/Pages/bankhelligdage.aspx
// web archive history of those pages
static ImmutableHolidayCalendar generateCopenhagen() {
List<LocalDate> holidays = new ArrayList<>(2000);
for (int year = 1950; year <= 2099; year++) {
// new year
holidays.add(date(year, 1, 1));
// maundy thursday
holidays.add(easter(year).minusDays(3));
// good friday
holidays.add(easter(year).minusDays(2));
// easter monday
holidays.add(easter(year).plusDays(1));
// prayer day (Friday)
holidays.add(easter(year).plusDays(26));
// ascension (Thursday)
holidays.add(easter(year).plusDays(39));
// ascension + 1 (Friday)
holidays.add(easter(year).plusDays(40));
// whit monday
holidays.add(easter(year).plusDays(50));
// constitution
holidays.add(date(year, 6, 5));
// christmas eve
holidays.add(date(year, 12, 24));
// christmas
holidays.add(date(year, 12, 25));
// boxing
holidays.add(date(year, 12, 26));
// new years eve
holidays.add(date(year, 12, 31));
}
removeSatSun(holidays);
return ImmutableHolidayCalendar.of(HolidayCalendarId.of("DKCO"), holidays, SATURDAY, SUNDAY);
}
//-------------------------------------------------------------------------
// generate NOOS
// data sources
// http://www.oslobors.no/ob_eng/Oslo-Boers/About-Oslo-Boers/Opening-hours
// http://www.oslobors.no/Oslo-Boers/Om-Oslo-Boers/AApningstider
// web archive history of those pages
static ImmutableHolidayCalendar generateOslo() {
List<LocalDate> holidays = new ArrayList<>(2000);
for (int year = 1950; year <= 2099; year++) {
// new year
holidays.add(date(year, 1, 1));
// maundy thursday
holidays.add(easter(year).minusDays(3));
// good friday
holidays.add(easter(year).minusDays(2));
// easter monday
holidays.add(easter(year).plusDays(1));
// labour
holidays.add(date(year, 5, 1));
// constitution
holidays.add(date(year, 5, 17));
// ascension
holidays.add(easter(year).plusDays(39));
// whit monday
holidays.add(easter(year).plusDays(50));
// christmas eve
holidays.add(date(year, 12, 24));
// christmas
holidays.add(date(year, 12, 25));
// boxing
holidays.add(date(year, 12, 26));
// new years eve
holidays.add(date(year, 12, 31));
}
removeSatSun(holidays);
return ImmutableHolidayCalendar.of(HolidayCalendarId.of("NOOS"), holidays, SATURDAY, SUNDAY);
}
//-------------------------------------------------------------------------
// generate NZAU
// https://www.nzfma.org/Site/practices_standards/market_conventions.aspx
static ImmutableHolidayCalendar generateAuckland() {
List<LocalDate> holidays = new ArrayList<>(2000);
for (int year = 1950; year <= 2099; year++) {
newZealand(holidays, year);
// auckland anniversary day
holidays.add(date(year, 1, 29).minusDays(3).with(nextOrSame(MONDAY)));
}
removeSatSun(holidays);
return ImmutableHolidayCalendar.of(HolidayCalendarId.of("NZAU"), holidays, SATURDAY, SUNDAY);
}
// generate NZWE
// https://www.nzfma.org/Site/practices_standards/market_conventions.aspx
static ImmutableHolidayCalendar generateWellington() {
List<LocalDate> holidays = new ArrayList<>(2000);
for (int year = 1950; year <= 2099; year++) {
newZealand(holidays, year);
// wellington anniversary day
holidays.add(date(year, 1, 22).minusDays(3).with(nextOrSame(MONDAY)));
}
removeSatSun(holidays);
return ImmutableHolidayCalendar.of(HolidayCalendarId.of("NZWE"), holidays, SATURDAY, SUNDAY);
}
// generate NZBD
// https://www.nzfma.org/Site/practices_standards/market_conventions.aspx
static ImmutableHolidayCalendar generateNewZealand() {
// artificial non-ISDA definition named after BRBD for Brazil
// this is needed as NZD-BBR index is published on both Wellington and Auckland anniversary days
List<LocalDate> holidays = new ArrayList<>(2000);
for (int year = 1950; year <= 2099; year++) {
newZealand(holidays, year);
}
removeSatSun(holidays);
return ImmutableHolidayCalendar.of(HolidayCalendarId.of("NZBD"), holidays, SATURDAY, SUNDAY);
}
private static void newZealand(List<LocalDate> holidays, int year) {
// new year and day after
LocalDate newYear = bumpToMon(date(year, 1, 1));
holidays.add(newYear);
holidays.add(bumpToMon(newYear.plusDays(1)));
// waitangi day
// https://www.employment.govt.nz/leave-and-holidays/public-holidays/public-holidays-and-anniversary-dates/
if (year >= 2014) {
holidays.add(bumpToMon(date(year, 2, 6)));
} else {
holidays.add(date(year, 2, 6));
}
// good friday
holidays.add(easter(year).minusDays(2));
// easter monday
holidays.add(easter(year).plusDays(1));
// anzac day
// https://www.employment.govt.nz/leave-and-holidays/public-holidays/public-holidays-and-anniversary-dates/
if (year >= 2014) {
holidays.add(bumpToMon(date(year, 4, 25)));
} else {
holidays.add(date(year, 4, 25));
}
// queens birthday
holidays.add(first(year, 6).with(firstInMonth(MONDAY)));
// labour day
holidays.add(first(year, 10).with(dayOfWeekInMonth(4, MONDAY)));
// christmas
holidays.add(christmasBumpedSatSun(year));
holidays.add(boxingDayBumpedSatSun(year));
}
//-------------------------------------------------------------------------
// generate PLWA
// data sources#
// http://isap.sejm.gov.pl/DetailsServlet?id=WDU19510040028 and linked pages
// https://www.gpw.pl/dni_bez_sesji_en
// http://jollyday.sourceforge.net/data/pl.html
// https://www.gpw.pl/session-details
// https://www.gpw.pl/news?cmn_id=107609&title=No+exchange+trading+session+on+12+November+2018
// https://www.gpw.pl/news?cmn_id=107794&title=December+24%2C+2018+-+Closing+day
static ImmutableHolidayCalendar generateWarsaw() {
// holiday law dates from 1951, but don't know situation before then, so ignore 1951 date
List<LocalDate> holidays = new ArrayList<>(2000);
for (int year = 1950; year <= 2099; year++) {
// new year
holidays.add(date(year, 1, 1));
// epiphany
if (year < 1961 || year >= 2011) {
holidays.add(date(year, 1, 6));
}
// easter monday
holidays.add(easter(year).plusDays(1));
// state
holidays.add(date(year, 5, 1));
// constitution
if (year >= 1990) {
holidays.add(date(year, 5, 3));
}
// rebirth/national
if (year < 1990) {
holidays.add(date(year, 7, 22));
}
// corpus christi
holidays.add(easter(year).plusDays(60));
// assumption
if (year < 1961 || year >= 1989) {
holidays.add(date(year, 8, 15));
}
// all saints
holidays.add(date(year, 11, 1));
// independence
if (year >= 1990) {
holidays.add(date(year, 11, 11));
}
// christmas (exchange)
holidays.add(date(year, 12, 24));
// christmas
holidays.add(date(year, 12, 25));
// boxing
holidays.add(date(year, 12, 26));
// new years eve (exchange, rule based on sample data)
LocalDate nyeve = date(year, 12, 31);
if (nyeve.getDayOfWeek() == MONDAY || nyeve.getDayOfWeek() == THURSDAY || nyeve.getDayOfWeek() == FRIDAY) {
holidays.add(nyeve);
}
}
// 100th independence day anniversary
holidays.add(date(2018, 11, 12));
removeSatSun(holidays);
return ImmutableHolidayCalendar.of(HolidayCalendarId.of("PLWA"), holidays, SATURDAY, SUNDAY);
}
//-------------------------------------------------------------------------
// generate SEST
// data sources - history of dates that STIBOR fixing occurred
// http://www.riksbank.se/en/Interest-and-exchange-rates/search-interest-rates-exchange-rates/?g5-SEDP1MSTIBOR=on&from=2016-01-01&to=2016-10-05&f=Day&cAverage=Average&s=Comma#search
static ImmutableHolidayCalendar generateStockholm() {
List<LocalDate> holidays = new ArrayList<>(2000);
for (int year = 1950; year <= 2099; year++) {
// new year
holidays.add(date(year, 1, 1));
// epiphany
holidays.add(date(year, 1, 6));
// good friday
holidays.add(easter(year).minusDays(2));
// easter monday
holidays.add(easter(year).plusDays(1));
// labour
holidays.add(date(year, 5, 1));
// ascension
holidays.add(easter(year).plusDays(39));
// midsummer friday
holidays.add(date(year, 6, 19).with(nextOrSame(FRIDAY)));
// national
if (year > 2005) {
holidays.add(date(year, 6, 6));
}
// christmas
holidays.add(date(year, 12, 24));
// christmas
holidays.add(date(year, 12, 25));
// boxing
holidays.add(date(year, 12, 26));
// new years eve (fixings, rule based on sample data)
holidays.add(date(year, 12, 31));
}
removeSatSun(holidays);
return ImmutableHolidayCalendar.of(HolidayCalendarId.of("SEST"), holidays, SATURDAY, SUNDAY);
}
//-------------------------------------------------------------------------
// http://www.rba.gov.au/schedules-events/bank-holidays/bank-holidays-2016.html
// http://www.rba.gov.au/schedules-events/bank-holidays/bank-holidays-2017.html
// web archive history of those pages
static ImmutableHolidayCalendar generateSydney() {
List<LocalDate> holidays = new ArrayList<>(2000);
for (int year = 1950; year <= 2099; year++) {
// new year
holidays.add(bumpToMon(date(year, 1, 1)));
// australia day
holidays.add(bumpToMon(date(year, 1, 26)));
// good friday
holidays.add(easter(year).minusDays(2));
// easter monday
holidays.add(easter(year).plusDays(1));
// anzac day
holidays.add(date(year, 4, 25));
// queen's birthday
holidays.add(first(year, 6).with(dayOfWeekInMonth(2, MONDAY)));
// bank holiday
holidays.add(first(year, 8).with(dayOfWeekInMonth(1, MONDAY)));
// labour day
holidays.add(first(year, 10).with(dayOfWeekInMonth(1, MONDAY)));
// christmas
holidays.add(christmasBumpedSatSun(year));
// boxing
holidays.add(boxingDayBumpedSatSun(year));
}
removeSatSun(holidays);
return ImmutableHolidayCalendar.of(HolidayCalendarId.of("AUSY"), holidays, SATURDAY, SUNDAY);
}
//-------------------------------------------------------------------------
// http://www.gov.za/about-sa/public-holidays
// http://www.gov.za/sites/www.gov.za/files/Act36of1994.pdf
// http://www.gov.za/sites/www.gov.za/files/Act48of1995.pdf
// 27th Dec when Tue http://www.gov.za/sites/www.gov.za/files/34881_proc72.pdf
static ImmutableHolidayCalendar generateJohannesburg() {
List<LocalDate> holidays = new ArrayList<>(2000);
for (int year = 1950; year <= 2099; year++) {
// from 1995 (act of 7 Dec 1994)
// older act from 1952 not implemented here
// new year
holidays.add(bumpSunToMon(date(year, 1, 1)));
// human rights day
holidays.add(bumpSunToMon(date(year, 3, 21)));
// good friday
holidays.add(easter(year).minusDays(2));
// family day (easter monday)
holidays.add(easter(year).plusDays(1));
// freedom day
holidays.add(bumpSunToMon(date(year, 4, 27)));
// workers day
holidays.add(bumpSunToMon(date(year, 5, 1)));
// youth day
holidays.add(bumpSunToMon(date(year, 6, 16)));
// womens day
holidays.add(bumpSunToMon(date(year, 8, 9)));
// heritage day
holidays.add(bumpSunToMon(date(year, 9, 24)));
// reconcilliation
holidays.add(bumpSunToMon(date(year, 12, 16)));
// christmas
holidays.add(christmasBumpedSun(year));
// goodwill
holidays.add(boxingDayBumpedSun(year));
}
// mostly election days
// http://www.gov.za/sites/www.gov.za/files/40125_proc%2045.pdf
holidays.add(date(2016, 8, 3));
// http://www.gov.za/sites/www.gov.za/files/37376_proc13.pdf
holidays.add(date(2014, 5, 7));
// http://www.gov.za/sites/www.gov.za/files/34127_proc27.pdf
holidays.add(date(2011, 5, 18));
// http://www.gov.za/sites/www.gov.za/files/32039_17.pdf
holidays.add(date(2009, 4, 22));
// http://www.gov.za/sites/www.gov.za/files/30900_7.pdf (moved human rights day)
holidays.add(date(2008, 5, 2));
// http://www.gov.za/sites/www.gov.za/files/28442_0.pdf
holidays.add(date(2006, 3, 1));
// http://www.gov.za/sites/www.gov.za/files/26075.pdf
holidays.add(date(2004, 4, 14));
// http://www.gov.za/sites/www.gov.za/files/20032_0.pdf
holidays.add(date(1999, 12, 31));
holidays.add(date(2000, 1, 1));
holidays.add(date(2000, 1, 2));
removeSatSun(holidays);
return ImmutableHolidayCalendar.of(HolidayCalendarId.of("ZAJO"), holidays, SATURDAY, SUNDAY);
}
//-------------------------------------------------------------------------
// http://www.magyarkozlony.hu/dokumentumok/b0d596a3e6ce15a2350a9e138c058a78dd8622d0/megtekintes (article 148)
// http://www.mfa.gov.hu/NR/rdonlyres/18C1949E-D740-45E0-923A-BDFC81EC44C8/0/ListofHolidays2016.pdf
// http://jollyday.sourceforge.net/data/hu.html
// https://englishhungary.wordpress.com/2012/01/15/bridge-days/
// http://www.ucmsgroup.hu/newsletter/public-holiday-and-related-work-schedule-changes-in-2015/
// http://www.ucmsgroup.hu/newsletter/public-holiday-and-related-work-schedule-changes-in-2014/
// https://www.bse.hu/Products-and-Services/Trading-information/tranding-calendar-2019
// https://www.bse.hu/Products-and-Services/Trading-information/trading-calendar-2020
static ImmutableHolidayCalendar generateBudapest() {
List<LocalDate> holidays = new ArrayList<>(2000);
Set<LocalDate> workDays = new HashSet<>(500);
for (int year = 1950; year <= 2099; year++) {
// new year
addDateWithHungarianBridging(date(year, 1, 1), -1, 1, holidays, workDays);
// national day
addDateWithHungarianBridging(date(year, 3, 15), -2, 1, holidays, workDays);
if (year >= 2017) {
// good friday
holidays.add(easter(year).minusDays(2));
}
// easter monday
holidays.add(easter(year).plusDays(1));
// labour day
addDateWithHungarianBridging(date(year, 5, 1), 0, 1, holidays, workDays);
// pentecost monday
holidays.add(easter(year).plusDays(50));
// state foundation day
// in 2015 the working saturday was 2 weeks before, in 2020 it was 1 week after
// unclear what the logic behind this is,
int foundationDayThuRelativeWeeks = year == 2020 ? 1 : -2;
addDateWithHungarianBridging(date(year, 8, 20), 0 , foundationDayThuRelativeWeeks, holidays, workDays);
// national day
addDateWithHungarianBridging(date(year, 10, 23), 0, -1, holidays, workDays);
// all saints day
addDateWithHungarianBridging(date(year, 11, 1), -3, 1, holidays, workDays);
// christmas
holidays.add(date(year, 12, 24));
holidays.add(date(year, 12, 25));
holidays.add(date(year, 12, 26));
if (date(year, 12, 25).getDayOfWeek() == TUESDAY) {
holidays.add(date(year, 12, 24));
workDays.add(date(year, 12, 15));
} else if (date(year, 12, 25).getDayOfWeek() == WEDNESDAY) {
holidays.add(date(year, 12, 24));
holidays.add(date(year, 12, 27));
workDays.add(date(year, 12, 7));
workDays.add(date(year, 12, 21));
} else if (date(year, 12, 25).getDayOfWeek() == THURSDAY) {
holidays.add(date(year, 12, 24));
} else if (date(year, 12, 25).getDayOfWeek() == FRIDAY) {
holidays.add(date(year, 12, 24));
workDays.add(date(year, 12, 12));
}
}
// some Saturdays are work days
addHungarianSaturdays(holidays, workDays);
return ImmutableHolidayCalendar.of(HolidayCalendarId.of("HUBU"), holidays, SUNDAY, SUNDAY);
}
// an attempt to divine the official rules from the data available
private static void addDateWithHungarianBridging(
LocalDate date,
int relativeWeeksTue,
int relativeWeeksThu,
List<LocalDate> holidays,
Set<LocalDate> workDays) {
DayOfWeek dow = date.getDayOfWeek();
switch (dow) {
case MONDAY:
case WEDNESDAY:
case FRIDAY:
holidays.add(date);
return;
case TUESDAY:
holidays.add(date.minusDays(1));
holidays.add(date);
workDays.add(date.plusDays(4).plusWeeks(relativeWeeksTue)); // a Saturday is now a workday
return;
case THURSDAY:
holidays.add(date.plusDays(1));
holidays.add(date);
workDays.add(date.plusDays(2).plusWeeks(relativeWeeksThu)); // a Saturday is now a workday
return;
case SATURDAY:
case SUNDAY:
default:
return;
}
}
private static void addHungarianSaturdays(List<LocalDate> holidays, Set<LocalDate> workDays) {
// remove all saturdays and sundays
removeSatSun(holidays);
// add all saturdays
LocalDate endDate = LocalDate.of(2099, 12, 31);
LocalDate date = LocalDate.of(1950, 1, 7);
while (date.isBefore(endDate)) {
if (!workDays.contains(date)) {
holidays.add(date);
}
date = date.plusDays(7);
}
}
//-------------------------------------------------------------------------
// generate MXMC
// dates of published fixings - https://twitter.com/Banxico
// http://www.banxico.org.mx/SieInternet/consultarDirectorioInternetAction.do?accion=consultarCuadro&idCuadro=CF111&locale=en
// http://www.gob.mx/cms/uploads/attachment/file/161094/calendario_vacaciones2016.pdf
static ImmutableHolidayCalendar generateMexicoCity() {
List<LocalDate> holidays = new ArrayList<>(2000);
for (int year = 1950; year <= 2099; year++) {
// new year
holidays.add(date(year, 1, 1));
// constitution
holidays.add(first(year, 2).with(firstInMonth(MONDAY)));
// president
holidays.add(first(year, 3).with(firstInMonth(MONDAY)).plusWeeks(2));
// maundy thursday
holidays.add(easter(year).minusDays(3));
// good friday
holidays.add(easter(year).minusDays(2));
// labour
holidays.add(date(year, 5, 1));
// independence
holidays.add(date(year, 9, 16));
// dead
holidays.add(date(year, 11, 2));
// revolution
holidays.add(first(year, 11).with(firstInMonth(MONDAY)).plusWeeks(2));
// guadalupe
holidays.add(date(year, 12, 12));
// christmas
holidays.add(date(year, 12, 25));
}
removeSatSun(holidays);
return ImmutableHolidayCalendar.of(HolidayCalendarId.of("MXMC"), holidays, SATURDAY, SUNDAY);
}
// generate BRBD
// a holiday in this calendar is only declared if there is a holiday in Sao Paulo, Rio de Janeiro and Brasilia
// http://www.planalto.gov.br/ccivil_03/leis/l0662.htm
// http://www.planalto.gov.br/ccivil_03/Leis/L6802.htm
// http://www.planalto.gov.br/ccivil_03/leis/2002/L10607.htm
static ImmutableHolidayCalendar generateBrazil() {
// base law is from 1949, reworded in 2002
List<LocalDate> holidays = new ArrayList<>(2000);
for (int year = 1950; year <= 2099; year++) {
// new year
holidays.add(date(year, 1, 1));
// carnival
holidays.add(easter(year).minusDays(48));
holidays.add(easter(year).minusDays(47));
// tiradentes
holidays.add(date(year, 4, 21));
// good friday
holidays.add(easter(year).minusDays(2));
// labour
holidays.add(date(year, 5, 1));
// corpus christi
holidays.add(easter(year).plusDays(60));
// independence
holidays.add(date(year, 9, 7));
// aparedica
if (year >= 1980) {
holidays.add(date(year, 10, 12));
}
// dead
holidays.add(date(year, 11, 2));
// republic
holidays.add(date(year, 11, 15));
// christmas
holidays.add(date(year, 12, 25));
}
removeSatSun(holidays);
return ImmutableHolidayCalendar.of(HolidayCalendarId.of("BRBD"), holidays, SATURDAY, SUNDAY);
}
// generate CZPR
// https://www.cnb.cz/en/public/media_service/schedules/media_svatky.html
static ImmutableHolidayCalendar generatePrague() {
// dates are fixed - no moving Sunday to Monday or similar
List<LocalDate> holidays = new ArrayList<>(2000);
for (int year = 1950; year <= 2099; year++) {
// new year
holidays.add(date(year, 1, 1));
// good friday
if (year > 2015) {
holidays.add(easter(year).minusDays(2));
}
// easter monday
holidays.add(easter(year).plusDays(1));
// may day
holidays.add(date(year, 5, 1));
// liberation from fascism
holidays.add(date(year, 5, 8));
// cyril and methodius
holidays.add(date(year, 7, 5));
// jan hus
holidays.add(date(year, 7, 6));
// statehood
holidays.add(date(year, 9, 28));
// republic
holidays.add(date(year, 10, 28));
// freedom and democracy
holidays.add(date(year, 11, 17));
// christmas eve
holidays.add(date(year, 12, 24));
// christmas
holidays.add(date(year, 12, 25));
// boxing
holidays.add(date(year, 12, 26));
}
removeSatSun(holidays);
return ImmutableHolidayCalendar.of(HolidayCalendarId.of("CZPR"), holidays, SATURDAY, SUNDAY);
}
//-------------------------------------------------------------------------
// date
private static LocalDate date(int year, int month, int day) {
return LocalDate.of(year, month, day);
}
// bump to following Monday
private static LocalDate bumpToMon(LocalDate date) {
if (date.getDayOfWeek() == SATURDAY) {
return date.plusDays(2);
} else if (date.getDayOfWeek() == SUNDAY) {
return date.plusDays(1);
}
return date;
}
// bump Sunday to following Monday
private static LocalDate bumpSunToMon(LocalDate date) {
if (date.getDayOfWeek() == SUNDAY) {
return date.plusDays(1);
}
return date;
}
// bump to Saturday to Friday and Sunday to Monday
private static LocalDate bumpToFriOrMon(LocalDate date) {
if (date.getDayOfWeek() == SATURDAY) {
return date.minusDays(1);
} else if (date.getDayOfWeek() == SUNDAY) {
return date.plusDays(1);
}
return date;
}
// christmas
private static LocalDate christmasBumpedSatSun(int year) {
LocalDate base = LocalDate.of(year, 12, 25);
if (base.getDayOfWeek() == SATURDAY || base.getDayOfWeek() == SUNDAY) {
return LocalDate.of(year, 12, 27);
}
return base;
}
// christmas (if Christmas is Sunday, moved to Monday)
private static LocalDate christmasBumpedSun(int year) {
LocalDate base = LocalDate.of(year, 12, 25);
if (base.getDayOfWeek() == SUNDAY) {
return LocalDate.of(year, 12, 26);
}
return base;
}
// boxing day
private static LocalDate boxingDayBumpedSatSun(int year) {
LocalDate base = LocalDate.of(year, 12, 26);
if (base.getDayOfWeek() == SATURDAY || base.getDayOfWeek() == SUNDAY) {
return LocalDate.of(year, 12, 28);
}
return base;
}
// boxing day (if Christmas is Sunday, boxing day moved from Monday to Tuesday)
private static LocalDate boxingDayBumpedSun(int year) {
LocalDate base = LocalDate.of(year, 12, 26);
if (base.getDayOfWeek() == MONDAY) {
return LocalDate.of(year, 12, 27);
}
return base;
}
// first of a month
private static LocalDate first(int year, int month) {
return LocalDate.of(year, month, 1);
}
// remove any holidays covered by Sat/Sun
private static void removeSatSun(List<LocalDate> holidays) {
holidays.removeIf(date -> date.getDayOfWeek() == SATURDAY || date.getDayOfWeek() == SUNDAY);
}
// calculate easter day by Delambre
static LocalDate easter(int year) {
int a = year % 19;
int b = year / 100;
int c = year % 100;
int d = b / 4;
int e = b % 4;
int f = (b + 8) / 25;
int g = (b - f + 1) / 3;
int h = (19 * a + b - d - g + 15) % 30;
int i = c / 4;
int k = c % 4;
int l = (32 + 2 * e + 2 * i - h - k) % 7;
int m = (a + 11 * h + 22 * l) / 451;
int month = (h + l - 7 * m + 114) / 31;
int day = ((h + l - 7 * m + 114) % 31) + 1;
return LocalDate.of(year, month, day);
}
}
|
apache-2.0
|
lapanen/stealth
|
stealth-core/src/main/java/org/lapanen/stealth/maven/event/ArtifactBuildEvent.java
|
544
|
package org.lapanen.stealth.maven.event;
import org.lapanen.stealth.event.AbstractStealthEvent;
import org.lapanen.stealth.maven.artifact.Artifact;
import org.lapanen.stealth.naming.EventIdentifier;
public class ArtifactBuildEvent extends AbstractStealthEvent {
private final Artifact artifact;
protected ArtifactBuildEvent(final EventIdentifier eventIdentifier, final Artifact artifact) {
super(eventIdentifier);
this.artifact = artifact;
}
public Artifact getArtifact() {
return artifact;
}
}
|
apache-2.0
|
nita22/OpenTheDoor
|
app/src/main/java/com/jiazi/openthedoor/Util/CustomBufferHead.java
|
140
|
package com.jiazi.openthedoor.Util;
/**
* 缓存头部类
*/
public class CustomBufferHead {
public int startcode;
public int length;
}
|
apache-2.0
|
dvstate/minio
|
cmd/config-migrate_test.go
|
10360
|
/*
* Minio Cloud Storage, (C) 2016, 2017 Minio, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cmd
import (
"fmt"
"io/ioutil"
"os"
"testing"
)
// Test if config v1 is purged
func TestServerConfigMigrateV1(t *testing.T) {
rootPath, err := newTestConfig(globalMinioDefaultRegion)
if err != nil {
t.Fatalf("Init Test config failed")
}
// remove the root directory after the test ends.
defer removeAll(rootPath)
setConfigDir(rootPath)
// Create a V1 config json file and store it
configJSON := "{ \"version\":\"1\", \"accessKeyId\":\"abcde\", \"secretAccessKey\":\"abcdefgh\"}"
configPath := rootPath + "/fsUsers.json"
if err := ioutil.WriteFile(configPath, []byte(configJSON), 0644); err != nil {
t.Fatal("Unexpected error: ", err)
}
// Fire a migrateConfig()
if err := migrateConfig(); err != nil {
t.Fatal("Unexpected error: ", err)
}
// Check if config v1 is removed from filesystem
if _, err := osStat(configPath); err == nil || !os.IsNotExist(err) {
t.Fatal("Config V1 file is not purged")
}
// Initialize server config and check again if everything is fine
if err := loadConfig(); err != nil {
t.Fatalf("Unable to initialize from updated config file %s", err)
}
}
// Test if all migrate code returns nil when config file does not
// exist
func TestServerConfigMigrateInexistentConfig(t *testing.T) {
rootPath, err := newTestConfig(globalMinioDefaultRegion)
if err != nil {
t.Fatalf("Init Test config failed")
}
// remove the root directory after the test ends.
defer removeAll(rootPath)
setConfigDir(rootPath)
configPath := rootPath + "/" + minioConfigFile
// Remove config file
if err := os.Remove(configPath); err != nil {
t.Fatal("Unexpected error: ", err)
}
if err := migrateV2ToV3(); err != nil {
t.Fatal("migrate v2 to v3 should succeed when no config file is found")
}
if err := migrateV3ToV4(); err != nil {
t.Fatal("migrate v3 to v4 should succeed when no config file is found")
}
if err := migrateV4ToV5(); err != nil {
t.Fatal("migrate v4 to v5 should succeed when no config file is found")
}
if err := migrateV5ToV6(); err != nil {
t.Fatal("migrate v5 to v6 should succeed when no config file is found")
}
if err := migrateV6ToV7(); err != nil {
t.Fatal("migrate v6 to v7 should succeed when no config file is found")
}
if err := migrateV7ToV8(); err != nil {
t.Fatal("migrate v7 to v8 should succeed when no config file is found")
}
if err := migrateV8ToV9(); err != nil {
t.Fatal("migrate v8 to v9 should succeed when no config file is found")
}
if err := migrateV9ToV10(); err != nil {
t.Fatal("migrate v9 to v10 should succeed when no config file is found")
}
if err := migrateV10ToV11(); err != nil {
t.Fatal("migrate v10 to v11 should succeed when no config file is found")
}
if err := migrateV11ToV12(); err != nil {
t.Fatal("migrate v11 to v12 should succeed when no config file is found")
}
if err := migrateV12ToV13(); err != nil {
t.Fatal("migrate v12 to v13 should succeed when no config file is found")
}
if err := migrateV13ToV14(); err != nil {
t.Fatal("migrate v13 to v14 should succeed when no config file is found")
}
if err := migrateV14ToV15(); err != nil {
t.Fatal("migrate v14 to v15 should succeed when no config file is found")
}
if err := migrateV15ToV16(); err != nil {
t.Fatal("migrate v15 to v16 should succeed when no config file is found")
}
if err := migrateV16ToV17(); err != nil {
t.Fatal("migrate v16 to v17 should succeed when no config file is found")
}
if err := migrateV17ToV18(); err != nil {
t.Fatal("migrate v17 to v18 should succeed when no config file is found")
}
if err := migrateV18ToV19(); err != nil {
t.Fatal("migrate v18 to v19 should succeed when no config file is found")
}
}
// Test if a config migration from v2 to v19 is successfully done
func TestServerConfigMigrateV2toV19(t *testing.T) {
rootPath, err := newTestConfig(globalMinioDefaultRegion)
if err != nil {
t.Fatalf("Init Test config failed")
}
// remove the root directory after the test ends.
defer removeAll(rootPath)
setConfigDir(rootPath)
configPath := rootPath + "/" + minioConfigFile
// Create a corrupted config file
if err := ioutil.WriteFile(configPath, []byte("{ \"version\":\"2\","), 0644); err != nil {
t.Fatal("Unexpected error: ", err)
}
// Fire a migrateConfig()
if err := migrateConfig(); err == nil {
t.Fatal("migration should fail with corrupted config file")
}
accessKey := "accessfoo"
secretKey := "secretfoo"
// Create a V2 config json file and store it
configJSON := "{ \"version\":\"2\", \"credentials\": {\"accessKeyId\":\"" + accessKey + "\", \"secretAccessKey\":\"" + secretKey + "\", \"region\":\"us-east-1\"}, \"mongoLogger\":{\"addr\":\"127.0.0.1:3543\", \"db\":\"foodb\", \"collection\":\"foo\"}, \"syslogLogger\":{\"network\":\"127.0.0.1:543\", \"addr\":\"addr\"}, \"fileLogger\":{\"filename\":\"log.out\"}}"
if err := ioutil.WriteFile(configPath, []byte(configJSON), 0644); err != nil {
t.Fatal("Unexpected error: ", err)
}
// Fire a migrateConfig()
if err := migrateConfig(); err != nil {
t.Fatal("Unexpected error: ", err)
}
// Initialize server config and check again if everything is fine
if err := loadConfig(); err != nil {
t.Fatalf("Unable to initialize from updated config file %s", err)
}
// Check the version number in the upgraded config file
expectedVersion := v19
if serverConfig.Version != expectedVersion {
t.Fatalf("Expect version "+expectedVersion+", found: %v", serverConfig.Version)
}
// Check if accessKey and secretKey are not altered during migration
if serverConfig.Credential.AccessKey != accessKey {
t.Fatalf("Access key lost during migration, expected: %v, found:%v", accessKey, serverConfig.Credential.AccessKey)
}
if serverConfig.Credential.SecretKey != secretKey {
t.Fatalf("Secret key lost during migration, expected: %v, found: %v", secretKey, serverConfig.Credential.SecretKey)
}
}
// Test if all migrate code returns error with corrupted config files
func TestServerConfigMigrateFaultyConfig(t *testing.T) {
rootPath, err := newTestConfig(globalMinioDefaultRegion)
if err != nil {
t.Fatalf("Init Test config failed")
}
// remove the root directory after the test ends.
defer removeAll(rootPath)
setConfigDir(rootPath)
configPath := rootPath + "/" + minioConfigFile
// Create a corrupted config file
if err := ioutil.WriteFile(configPath, []byte("{ \"version\":\"2\", \"test\":"), 0644); err != nil {
t.Fatal("Unexpected error: ", err)
}
// Test different migrate versions and be sure they are returning an error
if err := migrateV2ToV3(); err == nil {
t.Fatal("migrateConfigV2ToV3() should fail with a corrupted json")
}
if err := migrateV3ToV4(); err == nil {
t.Fatal("migrateConfigV3ToV4() should fail with a corrupted json")
}
if err := migrateV4ToV5(); err == nil {
t.Fatal("migrateConfigV4ToV5() should fail with a corrupted json")
}
if err := migrateV5ToV6(); err == nil {
t.Fatal("migrateConfigV5ToV6() should fail with a corrupted json")
}
if err := migrateV6ToV7(); err == nil {
t.Fatal("migrateConfigV6ToV7() should fail with a corrupted json")
}
if err := migrateV7ToV8(); err == nil {
t.Fatal("migrateConfigV7ToV8() should fail with a corrupted json")
}
if err := migrateV8ToV9(); err == nil {
t.Fatal("migrateConfigV8ToV9() should fail with a corrupted json")
}
if err := migrateV9ToV10(); err == nil {
t.Fatal("migrateConfigV9ToV10() should fail with a corrupted json")
}
if err := migrateV10ToV11(); err == nil {
t.Fatal("migrateConfigV10ToV11() should fail with a corrupted json")
}
if err := migrateV11ToV12(); err == nil {
t.Fatal("migrateConfigV11ToV12() should fail with a corrupted json")
}
if err := migrateV12ToV13(); err == nil {
t.Fatal("migrateConfigV12ToV13() should fail with a corrupted json")
}
if err := migrateV13ToV14(); err == nil {
t.Fatal("migrateConfigV13ToV14() should fail with a corrupted json")
}
if err := migrateV14ToV15(); err == nil {
t.Fatal("migrateConfigV14ToV15() should fail with a corrupted json")
}
if err := migrateV15ToV16(); err == nil {
t.Fatal("migrateConfigV15ToV16() should fail with a corrupted json")
}
if err := migrateV16ToV17(); err == nil {
t.Fatal("migrateConfigV16ToV17() should fail with a corrupted json")
}
if err := migrateV17ToV18(); err == nil {
t.Fatal("migrateConfigV17ToV18() should fail with a corrupted json")
}
if err := migrateV18ToV19(); err == nil {
t.Fatal("migrateConfigV18ToV19() should fail with a corrupted json")
}
}
// Test if all migrate code returns error with corrupted config files
func TestServerConfigMigrateCorruptedConfig(t *testing.T) {
rootPath, err := newTestConfig(globalMinioDefaultRegion)
if err != nil {
t.Fatalf("Init Test config failed")
}
// remove the root directory after the test ends.
defer removeAll(rootPath)
setConfigDir(rootPath)
configPath := rootPath + "/" + minioConfigFile
for i := 3; i <= 17; i++ {
// Create a corrupted config file
if err = ioutil.WriteFile(configPath, []byte(fmt.Sprintf("{ \"version\":\"%d\", \"credential\": { \"accessKey\": 1 } }", i)),
0644); err != nil {
t.Fatal("Unexpected error: ", err)
}
// Test different migrate versions and be sure they are returning an error
if err = migrateConfig(); err == nil {
t.Fatal("migrateConfig() should fail with a corrupted json")
}
}
// Create a corrupted config file for version '2'.
if err = ioutil.WriteFile(configPath, []byte("{ \"version\":\"2\", \"credentials\": { \"accessKeyId\": 1 } }"), 0644); err != nil {
t.Fatal("Unexpected error: ", err)
}
// Test different migrate versions and be sure they are returning an error
if err = migrateConfig(); err == nil {
t.Fatal("migrateConfig() should fail with a corrupted json")
}
}
|
apache-2.0
|
zeratel/gankmaku
|
app/src/main/java/me/pkliang/gankmaku/ganhuo/BaseGanhuoFragment.java
|
3219
|
package me.pkliang.gankmaku.ganhuo;
import android.graphics.Color;
import android.os.Bundle;
import android.support.v4.widget.SwipeRefreshLayout;
import android.support.v7.widget.LinearLayoutManager;
import android.view.View;
import android.webkit.WebChromeClient;
import android.webkit.WebView;
import com.lsjwzh.widget.recyclerviewpager.RecyclerViewPager;
import java.util.ArrayList;
import butterknife.Bind;
import hugo.weaving.DebugLog;
import master.flame.danmaku.danmaku.parser.BaseDanmakuParser;
import master.flame.danmaku.ui.widget.DanmakuSurfaceView;
import me.pkliang.gankmaku.R;
import me.pkliang.gankmaku.base.presenter.PaginationRxPresenter;
import me.pkliang.gankmaku.base.view.BaseDanmakuFragment;
import me.pkliang.gankmaku.base.view.IDataAdapter;
import me.pkliang.gankmaku.domain.entity.Entry;
import me.pkliang.gankmaku.domain.entity.Response;
/**
* Created by Omistaja on 8/12/2015.
*/
public abstract class BaseGanhuoFragment<P extends PaginationRxPresenter<GanhuoView, Response>> extends BaseDanmakuFragment<SwipeRefreshLayout, Response, GanhuoView, P>
implements RecyclerViewPager.OnPageChangedListener {
@Bind(R.id.sv_danmaku)
DanmakuSurfaceView mDanmakuView;
protected GanhuoAdapter adapter;
private BaseDanmakuParser mParser;
@Override
public void onViewCreated(View view, Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
Response response = new Response();
response.setResults(new ArrayList<>());
//adapter = new FragmentsAdapter(getChildFragmentManager(),response);
WebChromeClient chromeClient = new WebChromeClient() {
public void onProgressChanged(WebView view, int progress) {
getActivity().setProgress(progress * 1000);
}
};
adapter = new GanhuoAdapter(response, chromeClient);
emptyRecyclerView.setAdapter(adapter);
emptyRecyclerView.setPadding(0, 0, 0, 0);
//emptyRecyclerView.setFlingFactor(0);
emptyRecyclerView.setLayoutManager(new LinearLayoutManager(getActivity(), LinearLayoutManager.HORIZONTAL, false));
emptyView.setText(getText(R.string.no_data));
emptyRecyclerView.addOnPageChangedListener(this);
mParser = createParser(null);
initDanmaku(mDanmakuView, mParser);
}
@Override
public void onDestroyView() {
super.onDestroyView();
destroyDanmaku(mDanmakuView);
}
@Override
protected IDataAdapter<Response> getIDataAdapter() {
return adapter;
}
@Override
protected int getFirstPage() {
return 1;
}
@Override
protected int getPageSize() {
return 10;
}
@DebugLog
@Override
public void OnPageChanged(int oldPos, int newPos) {
mDanmakuView.clearDanmakusOnScreen();
Entry entry = getIDataAdapter().getData().getResults().get(newPos);
testDanMu(mDanmakuView, mParser, entry.getDesc(), Color.BLACK, Color.TRANSPARENT);
testDanMu(mDanmakuView, mParser, "by " + entry.getWho(), Color.BLACK, Color.TRANSPARENT);
}
@Override
protected int getLayoutRes() {
return R.layout.fragment_main;
}
}
|
apache-2.0
|
RandomLyrics/aaabbbcdefgh
|
PromosiMVC/Test/DataGenerator.cs
|
5037
|
using PromosiMVC.Helpers;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
namespace PromosiMVC.Test
{
public class DataGenerator
{
private PromosiDBModelContainer DB;
private static int count = 6;
public DataGenerator(PromosiDBModelContainer db)
{
this.DB = db;
}
public void Generate()
{
//randoms
Random rnd = new Random();
var streets = new List<string>
{
"Lipowa",
"Warszawska",
"Magnata",
"Ulkowska",
"Jarzębska",
"Żadna"
};
var names = new List<string>
{
"Ryszard",
"Jadwiga",
"Mariusz",
"John",
"Helmut",
"Barbara"
};
var cnames = new List<string>
{
"Maltex",
"Kuraż",
"Dentix",
"Xanox",
"Iga",
"Świeżo"
};
var dom = new List<string>
{
"@gmail.com",
"@hotmail.com",
"@op.pl",
"@wp.pl",
"@onet.pl",
"@dragon.com"
};
//City
var cities = new List<City>()
{
{new City { Name = "Warszawa" } },
{new City { Name = "Wrocław" } },
{new City { Name = "Zakopane" } },
{new City { Name = "Gdańsk" } }
};
DB.CitySet.AddRange(cities);
DB.SaveChanges();
//Branch
var branches = new List<Branch>
{
{new Branch { Name = "Uroda", Description = "Salon fryzjerski, kosmetyczny"} },
{new Branch { Name = "Medycyna", Description = "Dentysta, gabinet lekarski" } },
{new Branch { Name = "Motoryzacja", Description = "Warsztat samochodowy, wulkanizator" } },
{new Branch { Name = "Gastronomia", Description = "Restauracje, fast-food" } }
};
DB.BranchSet.AddRange(branches);
DB.SaveChanges();
//Company
for (int i = 0; i < cnames.Count; i++)
{
var c = new Company();
c.Adress = streets.PickRandom() + " " + rnd.Next(1, 80);
c.Branch = DB.BranchSet.PickRandom();
c.Name = cnames[i];
c.NIP = rnd.Next(100, 999).ToString() + "-" + rnd.Next(100, 999).ToString() + "-" + rnd.Next(10, 99).ToString() + "-" + rnd.Next(10, 99).ToString();
c.ChannelName = c.Name;
c.Phonenumber = "+48 " + rnd.Next(100000000, 999999999).ToString();
c.City = DB.CitySet.PickRandom();
c.Email = c.Name + dom.PickRandom();
c.Password = TestHelpers.GenerateCode(6);
DB.CompanySet.Add(c);
}
DB.SaveChanges();
//User
for (int i = 0; i < count*3; i++)
{
var c = new User();
c.Surname = streets.PickRandom();
c.Name = names.PickRandom();
c.DeviceToken = TestHelpers.GenerateCode(12);
c.Password = TestHelpers.GenerateCode(8);
c.RegistrationId = TestHelpers.GenerateCode(14);
c.Phonenumber = rnd.Next(100000000, 999999999).ToString();
c.Email = c.Name + dom.PickRandom();
DB.UserSet.Add(c);
}
DB.SaveChanges();
//Followings
foreach (var item in DB.UserSet)
{
if (rnd.Next(0, 100) >= 80)
{
var f = new Followings();
f.User = item;
f.Company = DB.CompanySet.PickRandom();
DB.FollowingsSet.Add(f);
}
}
DB.SaveChanges();
////Push
//for (int i = 0; i < count; i++)
//{
// var c = new Push();
// // if (rnd.Next(0,1) == 1)
// c.Company = DB.CompanySet.PickRandom();
// c.Available = true;
// c.DateBegin = DateTime.Today;
// c.DateEnd = c.DateBegin.AddHours(rnd.Next(4, 48));
// c.Description = TestHelpers.GenerateText(64);
// c.Text = TestHelpers.GenerateText(32);
// DB.PushSet.Add(c);
//}
//DB.SaveChanges();
}
}
}
|
apache-2.0
|
HybridF5/jacket
|
jacket/tests/compute/unit/test_instance_types_extra_specs.py
|
5838
|
# Copyright 2011 University of Southern California
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Unit Tests for instance types extra specs code
"""
from jacket.compute.cloud import arch
from jacket import context
from jacket.db import compute
from jacket.compute import exception
from jacket.compute import test
class InstanceTypeExtraSpecsTestCase(test.TestCase):
def setUp(self):
super(InstanceTypeExtraSpecsTestCase, self).setUp()
self.context = context.get_admin_context()
values = dict(name="cg1.4xlarge",
memory_mb=22000,
vcpus=8,
root_gb=1690,
ephemeral_gb=2000,
flavorid=105)
self.specs = dict(cpu_arch=arch.X86_64,
cpu_model="Nehalem",
xpu_arch="fermi",
xpus="2",
xpu_model="Tesla 2050")
values['extra_specs'] = self.specs
ref = compute.flavor_create(self.context,
values)
self.instance_type_id = ref["id"]
self.flavorid = ref["flavorid"]
def tearDown(self):
# Remove the instance type from the database
compute.flavor_destroy(self.context, "cg1.4xlarge")
super(InstanceTypeExtraSpecsTestCase, self).tearDown()
def test_instance_type_specs_get(self):
actual_specs = compute.flavor_extra_specs_get(
self.context,
self.flavorid)
self.assertEqual(self.specs, actual_specs)
def test_flavor_extra_specs_delete(self):
del self.specs["xpu_model"]
compute.flavor_extra_specs_delete(self.context,
self.flavorid,
"xpu_model")
actual_specs = compute.flavor_extra_specs_get(
self.context,
self.flavorid)
self.assertEqual(self.specs, actual_specs)
def test_instance_type_extra_specs_update(self):
self.specs["cpu_model"] = "Sandy Bridge"
compute.flavor_extra_specs_update_or_create(
self.context,
self.flavorid,
dict(cpu_model="Sandy Bridge"))
actual_specs = compute.flavor_extra_specs_get(
self.context,
self.flavorid)
self.assertEqual(self.specs, actual_specs)
def test_instance_type_extra_specs_update_with_nonexisting_flavor(self):
extra_specs = dict(cpu_arch=arch.X86_64)
nonexisting_flavorid = "some_flavor_that_does_not_exist"
self.assertRaises(exception.FlavorNotFound,
compute.flavor_extra_specs_update_or_create,
self.context, nonexisting_flavorid, extra_specs)
def test_instance_type_extra_specs_create(self):
net_attrs = {
"net_arch": "ethernet",
"net_mbps": "10000"
}
self.specs.update(net_attrs)
compute.flavor_extra_specs_update_or_create(
self.context,
self.flavorid,
net_attrs)
actual_specs = compute.flavor_extra_specs_get(
self.context,
self.flavorid)
self.assertEqual(self.specs, actual_specs)
def test_instance_type_get_with_extra_specs(self):
instance_type = compute.flavor_get(
self.context,
self.instance_type_id)
self.assertEqual(instance_type['extra_specs'],
self.specs)
instance_type = compute.flavor_get(
self.context,
5)
self.assertEqual(instance_type['extra_specs'], {})
def test_instance_type_get_by_name_with_extra_specs(self):
instance_type = compute.flavor_get_by_name(
self.context,
"cg1.4xlarge")
self.assertEqual(instance_type['extra_specs'],
self.specs)
instance_type = compute.flavor_get_by_name(
self.context,
"m1.small")
self.assertEqual(instance_type['extra_specs'], {})
def test_instance_type_get_by_flavor_id_with_extra_specs(self):
instance_type = compute.flavor_get_by_flavor_id(
self.context,
105)
self.assertEqual(instance_type['extra_specs'],
self.specs)
instance_type = compute.flavor_get_by_flavor_id(
self.context,
2)
self.assertEqual(instance_type['extra_specs'], {})
def test_instance_type_get_all(self):
types = compute.flavor_get_all(self.context)
name2specs = {}
for instance_type in types:
name = instance_type['name']
name2specs[name] = instance_type['extra_specs']
self.assertEqual(name2specs['cg1.4xlarge'], self.specs)
self.assertEqual(name2specs['m1.small'], {})
|
apache-2.0
|
troels/nz-presto
|
presto-base-jdbc/src/main/java/com/facebook/presto/plugin/jdbc/QueryBuilder.java
|
13553
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.plugin.jdbc;
import com.facebook.presto.spi.ColumnHandle;
import com.facebook.presto.spi.predicate.Domain;
import com.facebook.presto.spi.predicate.Range;
import com.facebook.presto.spi.predicate.TupleDomain;
import com.facebook.presto.spi.type.BigintType;
import com.facebook.presto.spi.type.BooleanType;
import com.facebook.presto.spi.type.DateType;
import com.facebook.presto.spi.type.DoubleType;
import com.facebook.presto.spi.type.IntegerType;
import com.facebook.presto.spi.type.RealType;
import com.facebook.presto.spi.type.SmallintType;
import com.facebook.presto.spi.type.TimeType;
import com.facebook.presto.spi.type.TimeWithTimeZoneType;
import com.facebook.presto.spi.type.TimestampType;
import com.facebook.presto.spi.type.TimestampWithTimeZoneType;
import com.facebook.presto.spi.type.TinyintType;
import com.facebook.presto.spi.type.Type;
import com.facebook.presto.spi.type.VarcharType;
import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import io.airlift.log.Logger;
import io.airlift.slice.Slice;
import org.joda.time.DateTimeZone;
import java.sql.Connection;
import java.sql.Date;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Time;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import static com.facebook.presto.spi.type.DateTimeEncoding.unpackMillisUtc;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.base.Strings.isNullOrEmpty;
import static com.google.common.collect.Iterables.getOnlyElement;
import static java.lang.Float.intBitsToFloat;
import static java.util.Collections.nCopies;
import static java.util.Objects.requireNonNull;
import static java.util.concurrent.TimeUnit.DAYS;
import static java.util.stream.Collectors.joining;
import static org.joda.time.DateTimeZone.UTC;
public class QueryBuilder
{
private static final Logger log = Logger.get(QueryBuilder.class);
private final String quote;
private static class TypeAndValue
{
private final Type type;
private final Object value;
public TypeAndValue(Type type, Object value)
{
this.type = requireNonNull(type, "type is null");
this.value = requireNonNull(value, "value is null");
}
public Type getType()
{
return type;
}
public Object getValue()
{
return value;
}
}
public QueryBuilder(String quote)
{
this.quote = requireNonNull(quote, "quote is null");
}
public PreparedStatement buildSql(JdbcClient client, Connection connection, String catalog, String schema, String table, List<JdbcColumnHandle> columns, TupleDomain<ColumnHandle> tupleDomain)
throws SQLException
{
StringBuilder sql = new StringBuilder();
String columnNames = columns.stream()
.map(JdbcColumnHandle::getColumnName)
.map(this::quote)
.collect(joining(", "));
sql.append("SELECT ");
sql.append(columnNames);
if (columns.isEmpty()) {
sql.append("null");
}
sql.append(" FROM ");
if (!isNullOrEmpty(catalog)) {
sql.append(quote(catalog)).append('.');
}
if (!isNullOrEmpty(schema)) {
sql.append(quote(schema)).append('.');
}
sql.append(quote(table));
List<TypeAndValue> accumulator = new ArrayList<>();
List<String> clauses = toConjuncts(columns, tupleDomain, accumulator);
if (!clauses.isEmpty()) {
sql.append(" WHERE ")
.append(Joiner.on(" AND ").join(clauses));
}
PreparedStatement statement = client.getPreparedStatement(connection, sql.toString());
for (int i = 0; i < accumulator.size(); i++) {
TypeAndValue typeAndValue = accumulator.get(i);
if (typeAndValue.getType().equals(BigintType.BIGINT)) {
statement.setLong(i + 1, (long) typeAndValue.getValue());
}
else if (typeAndValue.getType().equals(IntegerType.INTEGER)) {
statement.setInt(i + 1, ((Number) typeAndValue.getValue()).intValue());
}
else if (typeAndValue.getType().equals(SmallintType.SMALLINT)) {
statement.setShort(i + 1, ((Number) typeAndValue.getValue()).shortValue());
}
else if (typeAndValue.getType().equals(TinyintType.TINYINT)) {
statement.setByte(i + 1, ((Number) typeAndValue.getValue()).byteValue());
}
else if (typeAndValue.getType().equals(DoubleType.DOUBLE)) {
statement.setDouble(i + 1, (double) typeAndValue.getValue());
}
else if (typeAndValue.getType().equals(RealType.REAL)) {
statement.setFloat(i + 1, intBitsToFloat(((Number) typeAndValue.getValue()).intValue()));
}
else if (typeAndValue.getType().equals(BooleanType.BOOLEAN)) {
statement.setBoolean(i + 1, (boolean) typeAndValue.getValue());
}
else if (typeAndValue.getType().equals(DateType.DATE)) {
long millis = DAYS.toMillis((long) typeAndValue.getValue());
statement.setDate(i + 1, new Date(UTC.getMillisKeepLocal(DateTimeZone.getDefault(), millis)));
}
else if (typeAndValue.getType().equals(TimeType.TIME)) {
statement.setTime(i + 1, new Time((long) typeAndValue.getValue()));
}
else if (typeAndValue.getType().equals(TimeWithTimeZoneType.TIME_WITH_TIME_ZONE)) {
statement.setTime(i + 1, new Time(unpackMillisUtc((long) typeAndValue.getValue())));
}
else if (typeAndValue.getType().equals(TimestampType.TIMESTAMP)) {
statement.setTimestamp(i + 1, new Timestamp((long) typeAndValue.getValue()));
}
else if (typeAndValue.getType().equals(TimestampWithTimeZoneType.TIMESTAMP_WITH_TIME_ZONE)) {
statement.setTimestamp(i + 1, new Timestamp(unpackMillisUtc((long) typeAndValue.getValue())));
}
else if (typeAndValue.getType() instanceof VarcharType) {
statement.setString(i + 1, ((Slice) typeAndValue.getValue()).toStringUtf8());
}
else {
throw new UnsupportedOperationException("Can't handle type: " + typeAndValue.getType());
}
}
return statement;
}
private static boolean isAcceptedType(Type type)
{
Type validType = requireNonNull(type, "type is null");
return validType.equals(BigintType.BIGINT) ||
validType.equals(TinyintType.TINYINT) ||
validType.equals(SmallintType.SMALLINT) ||
validType.equals(IntegerType.INTEGER) ||
validType.equals(DoubleType.DOUBLE) ||
validType.equals(RealType.REAL) ||
validType.equals(BooleanType.BOOLEAN) ||
validType.equals(DateType.DATE) ||
validType.equals(TimeType.TIME) ||
validType.equals(TimeWithTimeZoneType.TIME_WITH_TIME_ZONE) ||
validType.equals(TimestampType.TIMESTAMP) ||
validType.equals(TimestampWithTimeZoneType.TIMESTAMP_WITH_TIME_ZONE) ||
validType instanceof VarcharType;
}
private List<String> toConjuncts(List<JdbcColumnHandle> columns, TupleDomain<ColumnHandle> tupleDomain, List<TypeAndValue> accumulator)
{
ImmutableList.Builder<String> builder = ImmutableList.builder();
ImmutableSet.Builder<JdbcColumnHandle> columnHandles = ImmutableSet.builder();
Map<ColumnHandle, Domain> domains = tupleDomain.getDomains().orElse(ImmutableMap.of());
for (ColumnHandle handle : domains.keySet()) {
columnHandles.add((JdbcColumnHandle) handle);
}
columnHandles.addAll(columns);
for (JdbcColumnHandle column : columnHandles.build()) {
Type type = column.getColumnType();
if (isAcceptedType(type)) {
Domain domain = domains.get(column);
if (domain != null) {
builder.add(toPredicate(column.getColumnName(), domain, type, accumulator));
}
}
}
return builder.build();
}
private String toPredicate(String columnName, Domain domain, Type type, List<TypeAndValue> accumulator)
{
checkArgument(domain.getType().isOrderable(), "Domain type must be orderable");
if (domain.getValues().isNone()) {
return domain.isNullAllowed() ? quote(columnName) + " IS NULL" : "FALSE";
}
if (domain.getValues().isAll()) {
return domain.isNullAllowed() ? "TRUE" : quote(columnName) + " IS NOT NULL";
}
List<String> disjuncts = new ArrayList<>();
List<Object> singleValues = new ArrayList<>();
for (Range range : domain.getValues().getRanges().getOrderedRanges()) {
checkState(!range.isAll()); // Already checked
if (range.isSingleValue()) {
singleValues.add(range.getLow().getValue());
}
else {
List<String> rangeConjuncts = new ArrayList<>();
if (!range.getLow().isLowerUnbounded()) {
switch (range.getLow().getBound()) {
case ABOVE:
rangeConjuncts.add(toPredicate(columnName, ">", range.getLow().getValue(), type, accumulator));
break;
case EXACTLY:
rangeConjuncts.add(toPredicate(columnName, ">=", range.getLow().getValue(), type, accumulator));
break;
case BELOW:
throw new IllegalArgumentException("Low marker should never use BELOW bound");
default:
throw new AssertionError("Unhandled bound: " + range.getLow().getBound());
}
}
if (!range.getHigh().isUpperUnbounded()) {
switch (range.getHigh().getBound()) {
case ABOVE:
throw new IllegalArgumentException("High marker should never use ABOVE bound");
case EXACTLY:
rangeConjuncts.add(toPredicate(columnName, "<=", range.getHigh().getValue(), type, accumulator));
break;
case BELOW:
rangeConjuncts.add(toPredicate(columnName, "<", range.getHigh().getValue(), type, accumulator));
break;
default:
throw new AssertionError("Unhandled bound: " + range.getHigh().getBound());
}
}
// If rangeConjuncts is null, then the range was ALL, which should already have been checked for
checkState(!rangeConjuncts.isEmpty());
disjuncts.add("(" + Joiner.on(" AND ").join(rangeConjuncts) + ")");
}
}
// Add back all of the possible single values either as an equality or an IN predicate
if (singleValues.size() == 1) {
disjuncts.add(toPredicate(columnName, "=", getOnlyElement(singleValues), type, accumulator));
}
else if (singleValues.size() > 1) {
for (Object value : singleValues) {
bindValue(value, type, accumulator);
}
String values = Joiner.on(",").join(nCopies(singleValues.size(), "?"));
disjuncts.add(quote(columnName) + " IN (" + values + ")");
}
// Add nullability disjuncts
checkState(!disjuncts.isEmpty());
if (domain.isNullAllowed()) {
disjuncts.add(quote(columnName) + " IS NULL");
}
return "(" + Joiner.on(" OR ").join(disjuncts) + ")";
}
private String toPredicate(String columnName, String operator, Object value, Type type, List<TypeAndValue> accumulator)
{
bindValue(value, type, accumulator);
return quote(columnName) + " " + operator + " ?";
}
private String quote(String name)
{
name = name.replace(quote, quote + quote);
return quote + name + quote;
}
private static void bindValue(Object value, Type type, List<TypeAndValue> accumulator)
{
checkArgument(isAcceptedType(type), "Can't handle type: %s", type);
accumulator.add(new TypeAndValue(type, value));
}
}
|
apache-2.0
|
chmulato/helianto-seed
|
src/main/java/com/iservport/report/controller/ReportPhaseController.java
|
3566
|
package com.iservport.report.controller;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.inject.Inject;
import org.helianto.core.internal.QualifierAdapter;
import org.helianto.security.internal.UserAuthentication;
import org.helianto.task.repository.FolderReadAdapter;
import org.helianto.task.repository.ReportAdapter;
import org.helianto.task.repository.ReportPhaseAdapter;
import org.helianto.user.domain.User;
import org.helianto.user.service.UserQueryService;
import org.joda.time.DateTime;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.core.io.ClassPathResource;
import org.springframework.core.io.Resource;
import org.springframework.data.domain.Page;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.bind.annotation.RestController;
import com.iservport.report.domain.Project;
import com.iservport.report.domain.ReportBaseLine;
import com.iservport.report.domain.ReportData;
import com.iservport.report.repository.ProjectReadAdapter;
import com.iservport.report.repository.ReportReviewReadAdapter;
import com.iservport.report.repository.StaffMemberReadAdapter;
import com.iservport.report.service.ProjectCommandService;
import com.iservport.report.service.ReportBaseLineCommandService;
import com.iservport.report.service.ReportBaseLineQueryService;
import com.iservport.report.service.ReportCommandService;
import com.iservport.report.service.ReportDataCommandService;
import com.iservport.report.service.ReportDataQueryService;
import com.iservport.report.service.ReportQueryService;
/**
* controlador de projetos
*
* @author mauriciofernandesdecastro
*/
@RestController
@RequestMapping("/api/report/phase")
@PreAuthorize("isAuthenticated()")
public class ReportPhaseController {
private static final Logger logger = LoggerFactory.getLogger(ReportPhaseController.class);
@Inject
private ReportCommandService reportCommandService;
@Inject
private ReportQueryService reportQueryService;
/**
* Seleciona reportPhase por folder id.
*
* GET /api/report/phase?folderId
*/
@RequestMapping(method=RequestMethod.GET, params={"folderId"})
public ReportPhaseAdapter reportPhaseOne(UserAuthentication userAuthentication, @RequestParam Integer folderId) {
return reportQueryService.reportPhaseOpen(folderId);
}
/**
* Novo reportPhase.
*
* POST /api/report/phase?folderId
*/
@RequestMapping(method=RequestMethod.POST , params={"folderId"})
public ReportPhaseAdapter reportPhaseNew(UserAuthentication userAuthentication, @RequestParam Integer folderId) {
return reportCommandService.reportPhaseNew(folderId);
}
/**
* Atualiza reportPhase.
*
* PUT /api/report
*/
@RequestMapping(method=RequestMethod.PUT , consumes="application/json")
public ReportPhaseAdapter reportPhase(UserAuthentication userAuthentication, @RequestBody ReportPhaseAdapter command) {
return reportCommandService.reportPhase(command);
}
}
|
apache-2.0
|
vivantech/kc_fixes
|
src/main/java/org/kuali/kra/irb/onlinereview/authorization/RejectProtocolOnlineReviewAuthorizer.java
|
2494
|
/*
* Copyright 2005-2014 The Kuali Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.osedu.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.kra.irb.onlinereview.authorization;
import org.kuali.kra.infrastructure.PermissionConstants;
import org.kuali.kra.irb.onlinereview.ProtocolOnlineReview;
/**
* The Modify Protocol Online Review Authorizer checks to see if the user has
* permission to modify a protocol online review.
*
* Authorization depends on the users role. If the user is a member of the IRB Administrator
* role for the related protocol's unit, then that user is always authorized to alter the protocol online review.
* If the user is the online reviewer, then they are allowed to edit the document provided that
* they have an outstanding approve request on the document.
*
*/
public class RejectProtocolOnlineReviewAuthorizer extends ProtocolOnlineReviewAuthorizer {
/**
* @see org.kuali.kra.irb.auth.ProtocolAuthorizer#isAuthorized(java.lang.String, org.kuali.kra.irb.auth.ProtocolTask)
*/
public boolean isAuthorized(String userId, ProtocolOnlineReviewTask task) {
boolean hasPermission = false;
ProtocolOnlineReview protocolOnlineReview = task.getProtocolOnlineReview();
if ( protocolOnlineReview.getProtocolOnlineReviewId() == null ) {
//we never authorize edits on a review, the reviews are created
//by the system on behalf of the users.
} else {
hasPermission = getKraAuthorizationService().hasPermission(userId, protocolOnlineReview.getProtocol(), PermissionConstants.MAINTAIN_ONLINE_REVIEWS);
hasPermission &= !protocolOnlineReview.getProtocolOnlineReviewDocument().isViewOnly();
hasPermission &= kraWorkflowService.isEnRoute(task.getProtocolOnlineReviewDocument());
hasPermission &= kraWorkflowService.isUserApprovalRequested(task.getProtocolOnlineReviewDocument(), userId);
}
return hasPermission;
}
}
|
apache-2.0
|
syphr42/libmythtv-java
|
protocol/src/main/java/org/syphr/mythtv/protocol/impl/Command63QueryRecorderGetFramesWritten.java
|
2002
|
/*
* Copyright 2011-2012 Gregory P. Moyer
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.syphr.mythtv.protocol.impl;
import java.util.List;
import org.syphr.mythtv.commons.exception.CommandException;
import org.syphr.mythtv.commons.exception.ProtocolException;
import org.syphr.mythtv.commons.exception.ProtocolException.Direction;
import org.syphr.mythtv.commons.translate.Translator;
/* default */class Command63QueryRecorderGetFramesWritten extends AbstractCommand63QueryRecorder<Long>
{
public Command63QueryRecorderGetFramesWritten(Translator translator, Parser parser, int recorderId)
{
super(translator, parser, recorderId);
}
@Override
protected String getSubCommand() throws ProtocolException
{
return "GET_FRAMES_WRITTEN";
}
@Override
public Long parseResponse(String response) throws ProtocolException, CommandException
{
List<String> args = getParser().splitArguments(response);
if (args.size() != 2)
{
throw new ProtocolException(response, Direction.RECEIVE);
}
if ("-1".equals(args.get(0)))
{
throw new CommandException("Unable to determine number of frames written");
}
try
{
return ProtocolUtils.combineInts(args.get(0), args.get(1));
}
catch (NumberFormatException e)
{
throw new ProtocolException(response, Direction.RECEIVE, e);
}
}
}
|
apache-2.0
|
wlin12/JNN
|
src/jnn/functions/composite/lstm/LSTMDecoderState.java
|
2246
|
package jnn.functions.composite.lstm;
import jnn.decoder.state.DenseNeuronState;
import jnn.neuron.DenseNeuronArray;
import jnn.neuron.NeuronArray;
public class LSTMDecoderState extends DenseNeuronState{
public DenseNeuronArray input;
public DenseNeuronArray lstmState;
public DenseNeuronArray lstmCell;
public LSTMDecoderState(double score, boolean isFinal,
DenseNeuronArray output, DenseNeuronArray lstmState,
DenseNeuronArray lstmCell, DenseNeuronArray input) {
super(score, isFinal, output);
this.lstmState = lstmState;
this.lstmCell = lstmCell;
this.input = input;
}
public static DenseNeuronArray[] getInputs(LSTMDecoderState[] states){
DenseNeuronArray[] inputs = new DenseNeuronArray[states.length];
for(int i = 0; i < inputs.length; i++){
inputs[i] = states[i].input;
}
return inputs;
}
public static DenseNeuronArray[] getStates(LSTMDecoderState[] states){
DenseNeuronArray[] inputs = new DenseNeuronArray[states.length];
for(int i = 0; i < inputs.length; i++){
inputs[i] = states[i].lstmState;
}
return inputs;
}
public static DenseNeuronArray[] getCells(LSTMDecoderState[] states){
DenseNeuronArray[] inputs = new DenseNeuronArray[states.length];
for(int i = 0; i < inputs.length; i++){
inputs[i] = states[i].lstmCell;
}
return inputs;
}
public static DenseNeuronArray[] getOutputs(LSTMDecoderState[] states){
DenseNeuronArray[] inputs = new DenseNeuronArray[states.length*2];
for(int i = 0; i < states.length; i++){
inputs[i] = states[i].lstmState;
inputs[i+states.length] = states[i].lstmCell;
}
return inputs;
}
public static LSTMDecoderState[] buildStateSequence(DenseNeuronArray[] inputs, int stateSize){
LSTMDecoderState[] states = new LSTMDecoderState[inputs.length];
for(int i = 0; i < states.length; i++){
boolean isFinal = i == states.length-1;
DenseNeuronArray lstmState = new DenseNeuronArray(stateSize);
lstmState.setName("lstm decoder state " + i);
DenseNeuronArray lstmCell = new DenseNeuronArray(stateSize);
lstmCell.setName("lstm decoder cell " + i);
LSTMDecoderState state = new LSTMDecoderState(-1, isFinal, lstmState, lstmState, lstmCell, inputs[i]);
states[i] = state;
}
return states;
}
}
|
apache-2.0
|
D3-LucaPiombino/MassTransit
|
src/MassTransit/Testing/TestDecorators/SagaRepositoryTestDecorator.cs
|
5818
|
// Copyright 2007-2015 Chris Patterson, Dru Sellers, Travis Smith, et. al.
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use
// this file except in compliance with the License. You may obtain a copy of the
// License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software distributed
// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
// specific language governing permissions and limitations under the License.
namespace MassTransit.Testing.TestDecorators
{
using System;
using System.Threading.Tasks;
using Monitoring.Introspection;
using Pipeline;
using Saga;
public class SagaRepositoryTestDecorator<TSaga> :
ISagaRepository<TSaga>
where TSaga : class, ISaga
{
readonly SagaListImpl<TSaga> _created;
readonly ReceivedMessageList _received;
readonly ISagaRepository<TSaga> _sagaRepository;
readonly SagaListImpl<TSaga> _sagas;
public SagaRepositoryTestDecorator(ISagaRepository<TSaga> sagaRepository, ReceivedMessageList received, SagaListImpl<TSaga> created,
SagaListImpl<TSaga> sagas)
{
_sagaRepository = sagaRepository;
_received = received;
_created = created;
_sagas = sagas;
}
void IProbeSite.Probe(ProbeContext context)
{
_sagaRepository.Probe(context);
}
Task ISagaRepository<TSaga>.Send<T>(ConsumeContext<T> context, ISagaPolicy<TSaga, T> policy, IPipe<SagaConsumeContext<TSaga, T>> next)
{
var interceptPipe = new InterceptPipe<T>(_sagas, _received, next);
var interceptPolicy = new InterceptPolicy<T>(_created, policy);
return _sagaRepository.Send(context, interceptPolicy, interceptPipe);
}
Task ISagaRepository<TSaga>.SendQuery<T>(SagaQueryConsumeContext<TSaga, T> context, ISagaPolicy<TSaga, T> policy,
IPipe<SagaConsumeContext<TSaga, T>> next)
{
var interceptPipe = new InterceptPipe<T>(_sagas, _received, next);
var interceptPolicy = new InterceptPolicy<T>(_created, policy);
return _sagaRepository.SendQuery(context, interceptPolicy, interceptPipe);
}
class InterceptPipe<TMessage> :
IPipe<SagaConsumeContext<TSaga, TMessage>>
where TMessage : class
{
readonly IPipe<SagaConsumeContext<TSaga, TMessage>> _pipe;
readonly ReceivedMessageList _received;
readonly SagaListImpl<TSaga> _sagas;
public InterceptPipe(SagaListImpl<TSaga> sagas, ReceivedMessageList received, IPipe<SagaConsumeContext<TSaga, TMessage>> pipe)
{
_sagas = sagas;
_received = received;
_pipe = pipe;
}
void IProbeSite.Probe(ProbeContext context)
{
_pipe.Probe(context);
}
public async Task Send(SagaConsumeContext<TSaga, TMessage> context)
{
_sagas.Add(context);
try
{
await _pipe.Send(context).ConfigureAwait(false);
_received.Add(context);
}
catch (Exception ex)
{
_received.Add(context, ex);
throw;
}
}
}
class InterceptPolicy<TMessage> :
ISagaPolicy<TSaga, TMessage>
where TMessage : class
{
readonly SagaListImpl<TSaga> _created;
readonly ISagaPolicy<TSaga, TMessage> _policy;
public InterceptPolicy(SagaListImpl<TSaga> created, ISagaPolicy<TSaga, TMessage> policy)
{
_created = created;
_policy = policy;
}
public bool PreInsertInstance(ConsumeContext<TMessage> context, out TSaga instance)
{
return _policy.PreInsertInstance(context, out instance);
}
public Task Existing(SagaConsumeContext<TSaga, TMessage> context, IPipe<SagaConsumeContext<TSaga, TMessage>> next)
{
return _policy.Existing(context, next);
}
public async Task Missing(ConsumeContext<TMessage> context, IPipe<SagaConsumeContext<TSaga, TMessage>> next)
{
var interceptPipe = new InterceptPolicyPipe(_created, next);
await _policy.Missing(context, interceptPipe).ConfigureAwait(false);
}
class InterceptPolicyPipe :
IPipe<SagaConsumeContext<TSaga, TMessage>>
{
readonly SagaListImpl<TSaga> _created;
readonly IPipe<SagaConsumeContext<TSaga, TMessage>> _pipe;
public InterceptPolicyPipe(SagaListImpl<TSaga> created, IPipe<SagaConsumeContext<TSaga, TMessage>> pipe)
{
_created = created;
_pipe = pipe;
}
void IProbeSite.Probe(ProbeContext context)
{
_pipe.Probe(context);
}
public async Task Send(SagaConsumeContext<TSaga, TMessage> context)
{
_created.Add(context);
await _pipe.Send(context).ConfigureAwait(false);
}
}
}
}
}
|
apache-2.0
|
Bitergia/allura
|
Allura/allura/lib/utils.py
|
15569
|
import time
import string
import hashlib
import binascii
import logging.handlers
import codecs
import os.path
import datetime
import random
import mimetypes
import re
import magic
from itertools import groupby
import tg
import pylons
import webob.multidict
from formencode import Invalid
from tg.decorators import before_validate
from pylons import response
from pylons import tmpl_context as c
from paste.httpheaders import CACHE_CONTROL, EXPIRES
from webhelpers.html import literal
from webob import exc
from pygments.formatters import HtmlFormatter
from ew import jinja2_ew as ew
from ming.utils import LazyProperty
import pysvn
def permanent_redirect(url):
try:
tg.redirect(url)
except exc.HTTPFound, err:
raise exc.HTTPMovedPermanently(location=err.location)
def cache_forever():
headers = [
(k,v) for k,v in response.headers.items()
if k.lower() not in ('pragma', 'cache-control') ]
delta = CACHE_CONTROL.apply(
headers,
public=True,
max_age=60*60*24*365)
EXPIRES.update(headers, delta=delta)
response.headers.pop('cache-control', None)
response.headers.pop('pragma', None)
response.headers.update(headers)
class memoize_on_request(object):
def __init__(self, *key, **kwargs):
self.key = key
self.include_func_in_key = kwargs.pop(
'include_func_in_key', False)
assert not kwargs, 'Extra args'
def __call__(self, func):
def wrapper(*args, **kwargs):
cache = c.memoize_cache
if self.include_func_in_key:
key = (func, self.key, args, tuple(kwargs.iteritems()))
else:
key = (self.key, args, tuple(kwargs.iteritems()))
if key in cache:
result = cache[key]
else:
result = cache[key] = func(*args, **kwargs)
return result
wrapper.__name__ = 'wrap(%s)' % func.__name__
return wrapper
def guess_mime_type(filename):
'''Guess MIME type based on filename.
Applies heuristics, tweaks, and defaults in centralized manner.
'''
# Consider changing to strict=False
content_type = mimetypes.guess_type(filename, strict=True)
if content_type[0]:
content_type = content_type[0]
else:
content_type = 'application/octet-stream'
return content_type
class ConfigProxy(object):
'''Wrapper for loading config values at module-scope so we don't
have problems when a module is imported before tg.config is initialized
'''
def __init__(self, **kw):
self._kw = kw
def __getattr__(self, k):
return tg.config[self._kw[k]]
class lazy_logger(object):
'''Lazy instatiation of a logger, to ensure that it does not get
created before logging is configured (which would make it disabled)'''
def __init__(self, name):
self._name = name
@LazyProperty
def _logger(self):
return logging.getLogger(self._name)
def __getattr__(self, name):
if name.startswith('_'): raise AttributeError, name
return getattr(self._logger, name)
class TimedRotatingHandler(logging.handlers.BaseRotatingHandler):
def __init__(self, strftime_pattern):
self.pattern = strftime_pattern
self.last_filename = self.current_filename()
logging.handlers.BaseRotatingHandler.__init__(self, self.last_filename, 'a')
def current_filename(self):
return os.path.abspath(datetime.datetime.utcnow().strftime(self.pattern))
def shouldRollover(self, record):
'Inherited from BaseRotatingFileHandler'
return self.current_filename() != self.last_filename
def doRollover(self):
self.stream.close()
self.baseFilename = self.current_filename()
if self.encoding:
self.stream = codecs.open(self.baseFilename, 'w', self.encoding)
else:
self.stream = open(self.baseFilename, 'w')
class StatsHandler(TimedRotatingHandler):
fields=('action', 'action_type', 'tool_type', 'tool_mount', 'project', 'neighborhood',
'username', 'url', 'ip_address')
def __init__(self,
strftime_pattern,
module='allura',
page=1,
**kwargs):
self.page = page
self.module = module
TimedRotatingHandler.__init__(self, strftime_pattern)
def emit(self, record):
if not hasattr(record, 'action'):
return
kwpairs = dict(
module=self.module,
page=self.page)
for name in self.fields:
kwpairs[name] = getattr(record, name, None)
kwpairs.update(getattr(record, 'kwpairs', {}))
record.kwpairs = ','.join(
'%s=%s' % (k,v) for k,v in sorted(kwpairs.iteritems())
if v is not None)
record.exc_info = None # Never put tracebacks in the rtstats log
TimedRotatingHandler.emit(self, record)
def chunked_find(cls, query=None, pagesize=1024, sort_key=None, sort_dir=1):
if query is None: query = {}
page = 0
while True:
q = cls.query.find(query).skip(pagesize * page).limit(pagesize)
if sort_key:
q.sort(sort_key, sort_dir)
results = (q.all())
if not results: break
yield results
page += 1
def lsub_utf8(s, n):
'''Useful for returning n bytes of a UTF-8 string, rather than characters'''
while len(s) > n:
k = n
while (ord(s[k]) & 0xc0) == 0x80:
k -= 1
return s[:k]
return s
def chunked_list(l, n):
""" Yield successive n-sized chunks from l.
"""
for i in xrange(0, len(l), n):
yield l[i:i+n]
def chunked_iter(iterable, max_size):
'''return iterable 'chunks' from the iterable of max size max_size'''
eiter = enumerate(iterable)
keyfunc = lambda (i,x): i//max_size
for _, chunk in groupby(eiter, keyfunc):
yield (x for i,x in chunk)
class AntiSpam(object):
'''Helper class for bot-protecting forms'''
honey_field_template=string.Template('''<p class="$honey_class">
<label for="$fld_id">You seem to have CSS turned off.
Please don't fill out this field.</label><br>
<input id="$fld_id" name="$fld_name" type="text"><br></p>''')
def __init__(self, request=None, num_honey=2):
self.num_honey = num_honey
if request is None or request.method == 'GET':
self.request = pylons.request
self.timestamp = int(time.time())
self.spinner = self.make_spinner()
self.timestamp_text = str(self.timestamp)
self.spinner_text = self._wrap(self.spinner)
else:
self.request = request
self.timestamp_text = request.params['timestamp']
self.spinner_text = request.params['spinner']
self.timestamp = int(self.timestamp_text)
self.spinner = self._unwrap(self.spinner_text)
self.spinner_ord = map(ord, self.spinner)
self.random_padding = [ random.randint(0,255) for x in self.spinner ]
self.honey_class = self.enc(self.spinner_text, css_safe=True)
# The counter is to ensure that multiple forms in the same page
# don't end up with the same id. Instead of doing:
#
# honey0, honey1
# which just relies on 0..num_honey we include a counter
# which is incremented every time extra_fields is called:
#
# honey00, honey 01, honey10, honey11
self.counter = 0
@staticmethod
def _wrap(s):
'''Encode a string to make it HTML id-safe (starts with alpha, includes
only digits, hyphens, underscores, colons, and periods). Luckily, base64
encoding doesn't use hyphens, underscores, colons, nor periods, so we'll
use these characters to replace its plus, slash, equals, and newline.
'''
tx_tbl = string.maketrans('+/', '-_')
s = binascii.b2a_base64(s)
s = s.rstrip('=\n')
s = s.translate(tx_tbl)
s = 'X' + s
return s
@staticmethod
def _unwrap(s):
tx_tbl = string.maketrans('-_', '+/')
s = s[1:]
s = str(s).translate(tx_tbl)
i = len(s) % 4
if i > 0:
s += '=' * (4 - i)
s = binascii.a2b_base64(s + '\n')
return s
def enc(self, plain, css_safe=False):
'''Stupid fieldname encryption. Not production-grade, but
hopefully "good enough" to stop spammers. Basically just an
XOR of the spinner with the unobfuscated field name
'''
# Plain starts with its length, includes the ordinals for its
# characters, and is padded with random data
plain = ([ len(plain) ]
+ map(ord, plain)
+ self.random_padding[:len(self.spinner_ord) - len(plain) - 1])
enc = ''.join(chr(p^s) for p, s in zip(plain, self.spinner_ord))
enc = self._wrap(enc)
if css_safe:
enc = ''.join(ch for ch in enc if ch.isalpha())
return enc
def dec(self, enc):
enc = self._unwrap(enc)
enc = list(map(ord, enc))
plain = [e^s for e,s in zip(enc, self.spinner_ord)]
plain = plain[1:1+plain[0]]
plain = ''.join(map(chr, plain))
return plain
def extra_fields(self):
yield ew.HiddenField(name='timestamp', value=self.timestamp_text).display()
yield ew.HiddenField(name='spinner', value=self.spinner_text).display()
for fldno in range(self.num_honey):
fld_name = self.enc('honey%d' % (fldno))
fld_id = self.enc('honey%d%d' % (self.counter, fldno))
yield literal(self.honey_field_template.substitute(
honey_class=self.honey_class,
fld_id=fld_id,
fld_name=fld_name))
self.counter += 1
def make_spinner(self, timestamp=None):
if timestamp is None: timestamp = self.timestamp
try:
client_ip = self.request.headers.get('X_FORWARDED_FOR', self.request.remote_addr)
client_ip = client_ip.split(',')[0].strip()
except (TypeError, AttributeError), err:
client_ip = '127.0.0.1'
plain = '%d:%s:%s' % (
timestamp, client_ip, pylons.config.get('spinner_secret', 'abcdef'))
return hashlib.sha1(plain).digest()
@classmethod
def validate_request(cls, request=None, now=None, params=None):
if request is None: request = pylons.request
if params is None: params = request.params
new_params = dict(params)
if not request.method == 'GET':
new_params.pop('timestamp', None)
new_params.pop('spinner', None)
obj = cls(request)
if now is None: now = time.time()
if obj.timestamp > now + 5:
raise ValueError, 'Post from the future'
if now - obj.timestamp > 60*60:
raise ValueError, 'Post from the 1hr+ past'
if obj.spinner != obj.make_spinner(obj.timestamp):
raise ValueError, 'Bad spinner value'
for k in new_params.keys():
new_params[obj.dec(k)] = new_params.pop(k)
for fldno in range(obj.num_honey):
value = new_params.pop('honey%s' % fldno)
if value:
raise ValueError, 'Value in honeypot field: %s' % value
return new_params
@classmethod
def validate(cls, error_msg):
'''Controller decorator to raise Invalid errors if bot protection is engaged'''
def antispam_hook(remainder, params):
'''Converts various errors in validate_request to a single Invalid message'''
try:
new_params = cls.validate_request(params=params)
params.update(new_params)
except (ValueError, TypeError, binascii.Error):
raise Invalid(error_msg, params, None)
return before_validate(antispam_hook)
class TruthyCallable(object):
'''
Wraps a callable to make it truthy in a boolean context.
Assumes the callable returns a truthy value and can be called with no args.
'''
def __init__(self, callable):
self.callable = callable
def __call__(self, *args, **kw):
return self.callable(*args, **kw)
def __nonzero__(self):
return self.callable()
class CaseInsensitiveDict(dict):
def __init__(self, *args, **kwargs):
super(CaseInsensitiveDict, self).__init__(*args, **kwargs)
self._reindex()
def _reindex(self):
items = self.items()
self.clear()
self._index = {}
for k,v in items:
self[k] = v
assert len(self) == len(items), 'Duplicate (case-insensitive) key'
def __getitem__(self, name):
return super(CaseInsensitiveDict, self).__getitem__(name.lower())
def __setitem__(self, name, value):
lname = name.lower()
super(CaseInsensitiveDict, self).__setitem__(lname, value)
self._index[lname] = name
def __delitem__(self, name):
super(CaseInsensitiveDict, self).__delitem__(name.lower())
def pop(self, k, *args):
return super(CaseInsensitiveDict, self).pop(k.lower(), *args)
def popitem(self):
k,v = super(CaseInsensitiveDict, self).popitem()
return self._index[k], v
def update(self, *args, **kwargs):
super(CaseInsensitiveDict, self).update(*args, **kwargs)
self._reindex()
def postmortem_hook(etype, value, tb): # pragma no cover
import sys, pdb, traceback
try:
from IPython.ipapi import make_session; make_session()
from IPython.Debugger import Pdb
sys.stderr.write('Entering post-mortem IPDB shell\n')
p = Pdb(color_scheme='Linux')
p.reset()
p.setup(None, tb)
p.print_stack_trace()
sys.stderr.write('%s: %s\n' % ( etype, value))
p.cmdloop()
p.forget()
# p.interaction(None, tb)
except ImportError:
sys.stderr.write('Entering post-mortem PDB shell\n')
traceback.print_exception(etype, value, tb)
pdb.post_mortem(tb)
class LineAnchorCodeHtmlFormatter(HtmlFormatter):
def _wrap_pre(self, inner):
style = []
if self.prestyles:
style.append(self.prestyles)
if self.noclasses:
style.append('line-height: 125%')
style = '; '.join(style)
num = self.linenostart
yield 0, ('<pre' + (style and ' style="%s"' % style) + '>')
for tup in inner:
yield (tup[0], '<div id="l%s" class="code_block">%s</div>' % (num, tup[1]))
num += 1
yield 0, '</pre>'
def generate_code_stats(blob):
stats = {'line_count': 0,
'code_size': 0,
'data_line_count': 0}
code = blob.text
lines = code.split('\n')
stats['code_size'] = blob.size
stats['line_count'] = len(lines)
spaces = re.compile(r'^\s*$')
stats['data_line_count'] = sum([1 for l in lines if not spaces.match(l)])
return stats
def svn_path_exists(path):
svn = pysvn.Client()
try:
svn.info2(path)
return True
except pysvn.ClientError, e:
return False
def is_text_file(file):
msg = magic.from_buffer(file[:1024])
if ("text" in msg) or ("empty" in msg):
return True
return False
def take_while_true(source):
x = source()
while x:
yield x
x = source()
|
apache-2.0
|
Keabot-Studios/DungeonRun2
|
Dungeon Run 2/src/net/keabotstudios/dr2/game/gui/GuiStatBar.java
|
1260
|
package net.keabotstudios.dr2.game.gui;
import net.keabotstudios.dr2.game.gui.GuiRenderer.GuiBarColor;
import net.keabotstudios.dr2.gfx.Bitmap;
public class GuiStatBar extends GuiHudLabel {
private GuiBarColor barColor;
private int value, maxValue;
public GuiStatBar(int x, int y, int size, String label, int value, int maxValue, GuiBarColor color, GuiBarColor labelColor, GuiBarColor barColor) {
super(x, y, size, label, color, labelColor);
this.value = value;
this.maxValue = maxValue;
this.barColor = barColor;
}
public GuiBarColor getBarColor() {
return barColor;
}
public void setBarColor(GuiBarColor barColor) {
this.barColor = barColor;
}
public int getValue() {
return value;
}
public void setValue(int value) {
if (value < 0)
value = 0;
if (value > maxValue)
value = maxValue;
this.value = value;
}
public int getMaxValue() {
return maxValue;
}
public void setMaxValue(int maxValue) {
if (maxValue < 0)
maxValue = 0;
if (value > maxValue)
value = maxValue;
this.maxValue = maxValue;
}
public void render(Bitmap bitmap) {
GuiRenderer.renderStatBar(bitmap, label, x, y, size, value, maxValue, color, barColor, labelColor);
}
}
|
apache-2.0
|
patrickvanamstel/SimplerInvoicing
|
si-sender/src/main/java/nl/kaninefatendreef/si/smp/SmpLookupService.java
|
13153
|
package nl.kaninefatendreef.si.smp;
import java.io.ByteArrayInputStream;
import java.net.URL;
import java.net.URLDecoder;
import java.net.URLEncoder;
import java.security.cert.CertificateException;
import java.security.cert.CertificateFactory;
import java.security.cert.X509Certificate;
import java.util.ArrayList;
import java.util.List;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.Unmarshaller;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import nl.kaninefatendreef.si.constant.SIConfigurationProperties;
import nl.kaninefatendreef.si.document.SIProxy;
import org.busdox.smp.EndpointType;
import org.busdox.smp.ProcessIdentifierType;
import org.busdox.smp.SignedServiceMetadataType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.env.Environment;
import org.springframework.stereotype.Component;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.NodeList;
import org.xml.sax.InputSource;
import eu.peppol.start.model.ParticipantId;
import eu.peppol.start.model.PeppolDocumentTypeId;
import eu.peppol.start.model.PeppolProcessTypeId;
@Component
public class SmpLookupService implements InitializingBean{
private static Logger _logger = LoggerFactory.getLogger(SmpLookupService.class);
@Autowired
private DNSLookupService _dnsLookupService ;
@Autowired
private Environment _environment ;
@Autowired(required=false)
private SIProxy _siProxy = null;
private String _smlPeppolCentralDNS = "sml.peppolcentral.org";
private String _ipAddressSmlCentral = null;
/**
* @param participant
* @param documentTypeIdentifier
* @return The endpoint address for the participant and DocumentId
* @throws SmpSignedServiceMetaDataException
* @throws SmpParticipantNotFoundException
* @throws RuntimeException If the end point address cannot be resolved for the participant. This is caused by a {@link java.net.UnknownHostException}
*/
public URL getEndpointAddress(ParticipantId participant, PeppolDocumentTypeId documentTypeIdentifier) throws SmpParticipantNotFoundException, SmpSignedServiceMetaDataException {
EndpointType endpointType = getEndpointType(participant, documentTypeIdentifier);
String address = endpointType.getEndpointReference().getAddress().getValue();
_logger.info("Found endpoint address for " + participant.stringValue() + " from SMP: " + address);
try {
return new URL(address);
} catch (Exception e) {
throw new RuntimeException("SMP returned invalid URL", e);
}
}
/**
* Retrieves the end point certificate for the given combination of receiving participant id and document type identifer.
*
* @param participant receiving participant
* @param documentTypeIdentifier document type to be sent
* @return The X509Certificate for the given ParticipantId and DocumentId
* @throws SmpSignedServiceMetaDataException
* @throws SmpParticipantNotFoundException
* @throws RuntimeException If the end point address cannot be resolved for the participant. This is caused by a {@link java.net.UnknownHostException}
*/
public X509Certificate getEndpointCertificate(ParticipantId participant, PeppolDocumentTypeId documentTypeIdentifier) throws SmpParticipantNotFoundException, SmpSignedServiceMetaDataException {
try {
String body = getEndpointType(participant, documentTypeIdentifier).getCertificate();
String endpointCertificate = "-----BEGIN CERTIFICATE-----\n" + body + "\n-----END CERTIFICATE-----";
CertificateFactory certificateFactory = CertificateFactory.getInstance("X.509");
return (X509Certificate) certificateFactory.generateCertificate(new ByteArrayInputStream(endpointCertificate.getBytes()));
} catch (CertificateException e) {
throw new RuntimeException("Failed to get certificate from SMP for " + ParticipantId.getScheme() + ":" + participant.stringValue());
}
}
private EndpointType getEndpointType(ParticipantId participant, PeppolDocumentTypeId documentTypeIdentifier) throws SmpParticipantNotFoundException, SmpSignedServiceMetaDataException {
SignedServiceMetadataType serviceMetadata = getServiceMetaData(participant, documentTypeIdentifier);
return serviceMetadata
.getServiceMetadata()
.getServiceInformation()
.getProcessList()
.getProcess()
.get(0)
.getServiceEndpointList()
.getEndpoint()
.get(0);
}
private SignedServiceMetadataType getServiceMetaData(ParticipantId participant, PeppolDocumentTypeId documentTypeIdentifier) throws SmpSignedServiceMetaDataException, SmpParticipantNotFoundException {
URL smpUrl = null;
try {
smpUrl = getSmpUrl(participant, documentTypeIdentifier);
} catch (Exception e) {
throw new IllegalStateException("Unable to construct URL for " + participant + ", documentType" + documentTypeIdentifier + "; " + e.getMessage(), e);
}
InputSource smpContents = null;
try {
_logger.debug("Constructed SMP url: " + smpUrl.toExternalForm());
smpContents = SmpLookupServiceDelegate.getUrlContent(smpUrl , null);
}catch (SmpParticipantNotFoundException se){
se.setParticipantId(participant);
throw (se);
}catch (Exception e) {
throw new SmpSignedServiceMetaDataException(participant, documentTypeIdentifier, smpUrl, e);
}
try {
// Parses the XML response from the SMP
DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance();
documentBuilderFactory.setNamespaceAware(true);
DocumentBuilder documentBuilder = documentBuilderFactory.newDocumentBuilder();
Document document = documentBuilder.parse(smpContents);
// Validates the signature
SmpResponseValidator smpResponseValidator = new SmpResponseValidator(document);
if (!smpResponseValidator.isSmpSignatureValid()) {
throw new IllegalStateException("SMP response contained invalid signature");
}
/**
* Uncomment code below if PEPPOL decides we need to follow the chain of trust for the SMP certificate.
*/
// Validates the certificate supplied with the signature
/*
if (!OxalisCertificateValidator.getInstance().validate(smpResponseValidator.getCertificate())) {
throw new IllegalStateException("SMP Certificate not valid for " + smpUrl);
}
*/
Unmarshaller unmarshaller = JAXBContext.newInstance(SignedServiceMetadataType.class).createUnmarshaller();
return unmarshaller.unmarshal(document, SignedServiceMetadataType.class).getValue();
} catch (Exception e) {
throw new SmpSignedServiceMetaDataException(participant, documentTypeIdentifier, smpUrl, e);
}
}
private URL getSmpUrl(ParticipantId participantId, PeppolDocumentTypeId documentTypeIdentifier) throws Exception {
String scheme = ParticipantId.getScheme();
String value = participantId.stringValue();
String hostname = "B-" + SmpLookupServiceDelegate.calculateMD5(value.toLowerCase()) + "." + scheme + "." + _smlPeppolCentralDNS;
String encodedParticipant = URLEncoder.encode(scheme + "::" + value, "UTF-8");
String encodedDocumentId = URLEncoder.encode(eu.peppol.start.model.PeppolDocumentTypeIdAcronym.getScheme() + "::" + documentTypeIdentifier.stringValue(), "UTF-8");
if (_ipAddressSmlCentral != null){
_logger.debug("Using ip address to fetch endpoints url.");
hostname = _ipAddressSmlCentral;
}
return new URL("http://" + hostname + "/" + encodedParticipant + "/services/" + encodedDocumentId);
}
private URL getServiceGroupURL(ParticipantId participantId) throws SmpLookupException {
String scheme = ParticipantId.getScheme();
String value = participantId.stringValue();
try {
String hostname = "B-" + SmpLookupServiceDelegate.calculateMD5(value.toLowerCase()) + "." + scheme + "." + _smlPeppolCentralDNS;
// Example: iso6523-actorid-upis%3A%3A9908:810017902
String encodedParticipant = URLEncoder.encode(scheme + "::", "UTF-8") + value;
return new URL("http://" + hostname + "/" + encodedParticipant);
} catch (Exception e) {
throw new SmpLookupException(participantId, e);
}
}
/**
* Retrieves a group of URLs representing the documents accepted by the given participant id
*
* @param participantId participant id to look up
* @return list of URLs representing each document type accepted
* @throws SmpParticipantNotFoundException
*/
public List<PeppolDocumentTypeId> getServiceGroups(ParticipantId participantId) throws SmpLookupException, ParticipantNotRegisteredException, SmpParticipantNotFoundException {
// Creates the URL for the service meta data for the supplied participant
URL serviceGroupURL = getServiceGroupURL(participantId);
if (!isParticipantRegistered(serviceGroupURL)) {
throw new ParticipantNotRegisteredException(participantId);
}
InputSource smpContents = SmpLookupServiceDelegate.getUrlContent(serviceGroupURL , null);
// Parses the XML response from the SMP
DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance();
documentBuilderFactory.setNamespaceAware(true);
DocumentBuilder documentBuilder = null;
Document document;
try {
documentBuilder = documentBuilderFactory.newDocumentBuilder();
document = documentBuilder.parse(smpContents);
// Locates the namespace URI of the root element
String nameSpaceURI = document.getDocumentElement().getNamespaceURI();
NodeList nodes = document.getElementsByTagNameNS(nameSpaceURI, "ServiceMetadataReference");
List<PeppolDocumentTypeId> result = new ArrayList<PeppolDocumentTypeId>();
for (int i = 0; i < nodes.getLength(); i++) {
Element element = (Element) nodes.item(i);
String hrefAsString = element.getAttribute("href");
// Gets rid of all the funny %3A's...
hrefAsString = URLDecoder.decode(hrefAsString, "UTF-8");
// Grabs the entire text string after "busdox-docid-qns::"
String docTypeAsString = hrefAsString.substring(hrefAsString.indexOf("busdox-docid-qns::") + "busdox-docid-qns::".length());
// Parses and creates the document type id
PeppolDocumentTypeId peppolDocumentTypeId = PeppolDocumentTypeId.valueOf(docTypeAsString);
result.add(peppolDocumentTypeId);
}
return result;
} catch (Exception e) {
throw new SmpLookupException(participantId, serviceGroupURL , e);
}
}
/**
* Each participant has its own sub-domain in peppolcentral, therefore if one does not
* exist it means participant is not registered.
*/
private boolean isParticipantRegistered(URL serviceGroupURL) {
return _dnsLookupService.domainExists(serviceGroupURL);
}
public PeppolProcessTypeId getProcessIdentifierForDocumentType(ParticipantId participantId, PeppolDocumentTypeId documentTypeIdentifier) throws SmpSignedServiceMetaDataException, SmpParticipantNotFoundException {
SignedServiceMetadataType serviceMetaData = getServiceMetaData(participantId, documentTypeIdentifier);
// SOAP generated type...
ProcessIdentifierType processIdentifier = serviceMetaData.getServiceMetadata().getServiceInformation().getProcessList().getProcess().get(0).getProcessIdentifier();
// Converts SOAP generated type into something nicer
return PeppolProcessTypeId.valueOf(processIdentifier.getValue());
}
@Override
public void afterPropertiesSet() throws Exception {
if (_environment.containsProperty(SIConfigurationProperties.PEPPOL_SML_DNS_NAME.getValue())){
_smlPeppolCentralDNS = _environment.getProperty(SIConfigurationProperties.PEPPOL_SML_DNS_NAME.getValue());
}
if (_environment.containsProperty(SIConfigurationProperties.PEPPOL_SML_DNS_IP.getValue())){
_ipAddressSmlCentral = _environment.getProperty(SIConfigurationProperties.PEPPOL_SML_DNS_IP.getValue());
}
if (_siProxy != null){
_siProxy.configure();
}
if (_environment.containsProperty(SIConfigurationProperties.PEPPOL_SML_DNS_PROXY_NAME.getValue()))
{
// Need input
}
}
}
|
apache-2.0
|
votca/xtp
|
src/libxtp/gwbse/bse.cc
|
21470
|
/*
* Copyright 2009-2020 The VOTCA Development Team
* (http://www.votca.org)
*
* Licensed under the Apache License, Version 2.0 (the "License")
*
* You may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
// Standard includes
#include <chrono>
#include <iostream>
// VOTCA includes
#include <votca/tools/linalg.h>
// Local VOTCA includes
#include "votca/xtp/bse.h"
#include "votca/xtp/bse_operator.h"
#include "votca/xtp/bseoperator_btda.h"
#include "votca/xtp/davidsonsolver.h"
#include "votca/xtp/populationanalysis.h"
#include "votca/xtp/qmfragment.h"
#include "votca/xtp/rpa.h"
#include "votca/xtp/vc2index.h"
using boost::format;
using std::flush;
namespace votca {
namespace xtp {
void BSE::configure(const options& opt, const Eigen::VectorXd& RPAInputEnergies,
const Eigen::MatrixXd& Hqp_in) {
opt_ = opt;
bse_vmax_ = opt_.homo;
bse_cmin_ = opt_.homo + 1;
bse_vtotal_ = bse_vmax_ - opt_.vmin + 1;
bse_ctotal_ = opt_.cmax - bse_cmin_ + 1;
bse_size_ = bse_vtotal_ * bse_ctotal_;
max_dyn_iter_ = opt_.max_dyn_iter;
dyn_tolerance_ = opt_.dyn_tolerance;
if (opt_.use_Hqp_offdiag) {
Hqp_ = AdjustHqpSize(Hqp_in, RPAInputEnergies);
} else {
Hqp_ = AdjustHqpSize(Hqp_in, RPAInputEnergies).diagonal().asDiagonal();
}
SetupDirectInteractionOperator(RPAInputEnergies, 0.0);
}
Eigen::MatrixXd BSE::AdjustHqpSize(const Eigen::MatrixXd& Hqp,
const Eigen::VectorXd& RPAInputEnergies) {
Index hqp_size = bse_vtotal_ + bse_ctotal_;
Index gwsize = opt_.qpmax - opt_.qpmin + 1;
Index RPAoffset = opt_.vmin - opt_.rpamin;
Eigen::MatrixXd Hqp_BSE = Eigen::MatrixXd::Zero(hqp_size, hqp_size);
if (opt_.vmin >= opt_.qpmin) {
Index start = opt_.vmin - opt_.qpmin;
if (opt_.cmax <= opt_.qpmax) {
Hqp_BSE = Hqp.block(start, start, hqp_size, hqp_size);
} else {
Index virtoffset = gwsize - start;
Hqp_BSE.topLeftCorner(virtoffset, virtoffset) =
Hqp.block(start, start, virtoffset, virtoffset);
Index virt_extra = opt_.cmax - opt_.qpmax;
Hqp_BSE.diagonal().tail(virt_extra) =
RPAInputEnergies.segment(RPAoffset + virtoffset, virt_extra);
}
}
if (opt_.vmin < opt_.qpmin) {
Index occ_extra = opt_.qpmin - opt_.vmin;
Hqp_BSE.diagonal().head(occ_extra) =
RPAInputEnergies.segment(RPAoffset, occ_extra);
Hqp_BSE.block(occ_extra, occ_extra, gwsize, gwsize) = Hqp;
if (opt_.cmax > opt_.qpmax) {
Index virtoffset = occ_extra + gwsize;
Index virt_extra = opt_.cmax - opt_.qpmax;
Hqp_BSE.diagonal().tail(virt_extra) =
RPAInputEnergies.segment(RPAoffset + virtoffset, virt_extra);
}
}
return Hqp_BSE;
}
void BSE::SetupDirectInteractionOperator(
const Eigen::VectorXd& RPAInputEnergies, double energy) {
RPA rpa = RPA(log_, Mmn_);
rpa.configure(opt_.homo, opt_.rpamin, opt_.rpamax);
rpa.setRPAInputEnergies(RPAInputEnergies);
Eigen::SelfAdjointEigenSolver<Eigen::MatrixXd> es(
rpa.calculate_epsilon_r(energy));
Mmn_.MultiplyRightWithAuxMatrix(es.eigenvectors());
epsilon_0_inv_ = Eigen::VectorXd::Zero(es.eigenvalues().size());
for (Index i = 0; i < es.eigenvalues().size(); ++i) {
if (es.eigenvalues()(i) > 1e-8) {
epsilon_0_inv_(i) = 1 / es.eigenvalues()(i);
}
}
}
template <typename BSE_OPERATOR>
void BSE::configureBSEOperator(BSE_OPERATOR& H) const {
BSEOperator_Options opt;
opt.cmax = opt_.cmax;
opt.homo = opt_.homo;
opt.qpmin = opt_.qpmin;
opt.rpamin = opt_.rpamin;
opt.vmin = opt_.vmin;
H.configure(opt);
}
tools::EigenSystem BSE::Solve_triplets_TDA() const {
TripletOperator_TDA Ht(epsilon_0_inv_, Mmn_, Hqp_);
configureBSEOperator(Ht);
return solve_hermitian(Ht);
}
void BSE::Solve_singlets(Orbitals& orb) const {
orb.setTDAApprox(opt_.useTDA);
if (opt_.useTDA) {
orb.BSESinglets() = Solve_singlets_TDA();
} else {
orb.BSESinglets() = Solve_singlets_BTDA();
}
orb.CalcCoupledTransition_Dipoles();
}
void BSE::Solve_triplets(Orbitals& orb) const {
orb.setTDAApprox(opt_.useTDA);
if (opt_.useTDA) {
orb.BSETriplets() = Solve_triplets_TDA();
} else {
orb.BSETriplets() = Solve_triplets_BTDA();
}
}
tools::EigenSystem BSE::Solve_singlets_TDA() const {
SingletOperator_TDA Hs(epsilon_0_inv_, Mmn_, Hqp_);
configureBSEOperator(Hs);
XTP_LOG(Log::error, log_)
<< TimeStamp() << " Setup TDA singlet hamiltonian " << flush;
return solve_hermitian(Hs);
}
SingletOperator_TDA BSE::getSingletOperator_TDA() const {
SingletOperator_TDA Hs(epsilon_0_inv_, Mmn_, Hqp_);
configureBSEOperator(Hs);
return Hs;
}
TripletOperator_TDA BSE::getTripletOperator_TDA() const {
TripletOperator_TDA Ht(epsilon_0_inv_, Mmn_, Hqp_);
configureBSEOperator(Ht);
return Ht;
}
template <typename BSE_OPERATOR>
tools::EigenSystem BSE::solve_hermitian(BSE_OPERATOR& h) const {
std::chrono::time_point<std::chrono::system_clock> start =
std::chrono::system_clock::now();
tools::EigenSystem result;
DavidsonSolver DS(log_);
DS.set_correction(opt_.davidson_correction);
DS.set_tolerance(opt_.davidson_tolerance);
DS.set_size_update(opt_.davidson_update);
DS.set_iter_max(opt_.davidson_maxiter);
DS.set_max_search_space(10 * opt_.nmax);
DS.solve(h, opt_.nmax);
result.eigenvalues() = DS.eigenvalues();
result.eigenvectors() = DS.eigenvectors();
std::chrono::time_point<std::chrono::system_clock> end =
std::chrono::system_clock::now();
std::chrono::duration<double> elapsed_time = end - start;
XTP_LOG(Log::info, log_) << TimeStamp() << " Diagonalization done in "
<< elapsed_time.count() << " secs" << flush;
return result;
}
tools::EigenSystem BSE::Solve_singlets_BTDA() const {
SingletOperator_TDA A(epsilon_0_inv_, Mmn_, Hqp_);
configureBSEOperator(A);
SingletOperator_BTDA_B B(epsilon_0_inv_, Mmn_, Hqp_);
configureBSEOperator(B);
XTP_LOG(Log::error, log_)
<< TimeStamp() << " Setup Full singlet hamiltonian " << flush;
return Solve_nonhermitian_Davidson(A, B);
}
tools::EigenSystem BSE::Solve_triplets_BTDA() const {
TripletOperator_TDA A(epsilon_0_inv_, Mmn_, Hqp_);
configureBSEOperator(A);
Hd2Operator B(epsilon_0_inv_, Mmn_, Hqp_);
configureBSEOperator(B);
XTP_LOG(Log::error, log_)
<< TimeStamp() << " Setup Full triplet hamiltonian " << flush;
return Solve_nonhermitian_Davidson(A, B);
}
template <typename BSE_OPERATOR_A, typename BSE_OPERATOR_B>
tools::EigenSystem BSE::Solve_nonhermitian_Davidson(BSE_OPERATOR_A& Aop,
BSE_OPERATOR_B& Bop) const {
std::chrono::time_point<std::chrono::system_clock> start =
std::chrono::system_clock::now();
// operator
HamiltonianOperator<BSE_OPERATOR_A, BSE_OPERATOR_B> Hop(Aop, Bop);
// Davidson solver
DavidsonSolver DS(log_);
DS.set_correction(opt_.davidson_correction);
DS.set_tolerance(opt_.davidson_tolerance);
DS.set_size_update(opt_.davidson_update);
DS.set_iter_max(opt_.davidson_maxiter);
DS.set_max_search_space(10 * opt_.nmax);
DS.set_matrix_type("HAM");
DS.solve(Hop, opt_.nmax);
// results
tools::EigenSystem result;
result.eigenvalues() = DS.eigenvalues();
Eigen::MatrixXd tmpX = DS.eigenvectors().topRows(Aop.rows());
Eigen::MatrixXd tmpY = DS.eigenvectors().bottomRows(Bop.rows());
// // normalization so that eigenvector^2 - eigenvector2^2 = 1
Eigen::VectorXd normX = tmpX.colwise().squaredNorm();
Eigen::VectorXd normY = tmpY.colwise().squaredNorm();
Eigen::ArrayXd sqinvnorm = (normX - normY).array().inverse().cwiseSqrt();
result.eigenvectors() = tmpX * sqinvnorm.matrix().asDiagonal();
result.eigenvectors2() = tmpY * sqinvnorm.matrix().asDiagonal();
std::chrono::time_point<std::chrono::system_clock> end =
std::chrono::system_clock::now();
std::chrono::duration<double> elapsed_time = end - start;
XTP_LOG(Log::info, log_) << TimeStamp() << " Diagonalization done in "
<< elapsed_time.count() << " secs" << flush;
return result;
}
void BSE::printFragInfo(const std::vector<QMFragment<BSE_Population> >& frags,
Index state) const {
for (const QMFragment<BSE_Population>& frag : frags) {
double dq = frag.value().H[state] + frag.value().E[state];
double qeff = dq + frag.value().Gs;
XTP_LOG(Log::error, log_)
<< format(
" Fragment %1$4d -- hole: %2$5.1f%% electron: "
"%3$5.1f%% dQ: %4$+5.2f Qeff: %5$+5.2f") %
int(frag.getId()) % (100.0 * frag.value().H[state]) %
(-100.0 * frag.value().E[state]) % dq % qeff
<< flush;
}
return;
}
void BSE::PrintWeights(const Eigen::VectorXd& weights) const {
vc2index vc = vc2index(opt_.vmin, bse_cmin_, bse_ctotal_);
for (Index i_bse = 0; i_bse < bse_size_; ++i_bse) {
double weight = weights(i_bse);
if (weight > opt_.min_print_weight) {
XTP_LOG(Log::error, log_)
<< format(" HOMO-%1$-3d -> LUMO+%2$-3d : %3$3.1f%%") %
(opt_.homo - vc.v(i_bse)) % (vc.c(i_bse) - opt_.homo - 1) %
(100.0 * weight)
<< flush;
}
}
return;
}
void BSE::Analyze_singlets(std::vector<QMFragment<BSE_Population> > fragments,
const Orbitals& orb) const {
QMStateType singlet = QMStateType(QMStateType::Singlet);
Eigen::VectorXd oscs = orb.Oscillatorstrengths();
Interaction act = Analyze_eh_interaction(singlet, orb);
if (fragments.size() > 0) {
Lowdin low;
low.CalcChargeperFragment(fragments, orb, singlet);
}
const Eigen::VectorXd& energies = orb.BSESinglets().eigenvalues();
double hrt2ev = tools::conv::hrt2ev;
XTP_LOG(Log::error, log_)
<< " ====== singlet energies (eV) ====== " << flush;
for (Index i = 0; i < opt_.nmax; ++i) {
Eigen::VectorXd weights =
orb.BSESinglets().eigenvectors().col(i).cwiseAbs2();
if (!orb.getTDAApprox()) {
weights -= orb.BSESinglets().eigenvectors2().col(i).cwiseAbs2();
}
double osc = oscs[i];
XTP_LOG(Log::error, log_)
<< format(
" S = %1$4d Omega = %2$+1.12f eV lamdba = %3$+3.2f nm <FT> "
"= %4$+1.4f <K_x> = %5$+1.4f <K_d> = %6$+1.4f") %
(i + 1) % (hrt2ev * energies(i)) %
(1240.0 / (hrt2ev * energies(i))) %
(hrt2ev * act.qp_contrib(i)) %
(hrt2ev * act.exchange_contrib(i)) %
(hrt2ev * act.direct_contrib(i))
<< flush;
const Eigen::Vector3d& trdip = orb.TransitionDipoles()[i];
XTP_LOG(Log::error, log_)
<< format(
" TrDipole length gauge[e*bohr] dx = %1$+1.4f dy = "
"%2$+1.4f dz = %3$+1.4f |d|^2 = %4$+1.4f f = %5$+1.4f") %
trdip[0] % trdip[1] % trdip[2] % (trdip.squaredNorm()) % osc
<< flush;
PrintWeights(weights);
if (fragments.size() > 0) {
printFragInfo(fragments, i);
}
XTP_LOG(Log::error, log_) << flush;
}
return;
}
void BSE::Analyze_triplets(std::vector<QMFragment<BSE_Population> > fragments,
const Orbitals& orb) const {
QMStateType triplet = QMStateType(QMStateType::Triplet);
Interaction act = Analyze_eh_interaction(triplet, orb);
if (fragments.size() > 0) {
Lowdin low;
low.CalcChargeperFragment(fragments, orb, triplet);
}
const Eigen::VectorXd& energies = orb.BSETriplets().eigenvalues();
XTP_LOG(Log::error, log_)
<< " ====== triplet energies (eV) ====== " << flush;
for (Index i = 0; i < opt_.nmax; ++i) {
Eigen::VectorXd weights =
orb.BSETriplets().eigenvectors().col(i).cwiseAbs2();
if (!orb.getTDAApprox()) {
weights -= orb.BSETriplets().eigenvectors2().col(i).cwiseAbs2();
}
XTP_LOG(Log::error, log_)
<< format(
" T = %1$4d Omega = %2$+1.12f eV lamdba = %3$+3.2f nm <FT> "
"= %4$+1.4f <K_d> = %5$+1.4f") %
(i + 1) % (tools::conv::hrt2ev * energies(i)) %
(1240.0 / (tools::conv::hrt2ev * energies(i))) %
(tools::conv::hrt2ev * act.qp_contrib(i)) %
(tools::conv::hrt2ev * act.direct_contrib(i))
<< flush;
PrintWeights(weights);
if (fragments.size() > 0) {
printFragInfo(fragments, i);
}
XTP_LOG(Log::error, log_) << format(" ") << flush;
}
return;
}
template <class OP>
Eigen::VectorXd ExpValue(const Eigen::MatrixXd& state1, OP OPxstate2) {
return state1.cwiseProduct(OPxstate2.eval()).colwise().sum().transpose();
}
Eigen::VectorXd ExpValue(const Eigen::MatrixXd& state1,
const Eigen::MatrixXd& OPxstate2) {
return state1.cwiseProduct(OPxstate2).colwise().sum().transpose();
}
template <typename BSE_OPERATOR>
BSE::ExpectationValues BSE::ExpectationValue_Operator(
const QMStateType& type, const Orbitals& orb, const BSE_OPERATOR& H) const {
const tools::EigenSystem& BSECoefs =
(type == QMStateType::Singlet) ? orb.BSESinglets() : orb.BSETriplets();
ExpectationValues expectation_values;
const Eigen::MatrixXd temp = H * BSECoefs.eigenvectors();
expectation_values.direct_term = ExpValue(BSECoefs.eigenvectors(), temp);
if (!orb.getTDAApprox()) {
expectation_values.direct_term +=
ExpValue(BSECoefs.eigenvectors2(), H * BSECoefs.eigenvectors2());
expectation_values.cross_term =
2 * ExpValue(BSECoefs.eigenvectors2(), temp);
} else {
expectation_values.cross_term = Eigen::VectorXd::Zero(0);
}
return expectation_values;
}
template <typename BSE_OPERATOR>
BSE::ExpectationValues BSE::ExpectationValue_Operator_State(
const QMState& state, const Orbitals& orb, const BSE_OPERATOR& H) const {
const tools::EigenSystem& BSECoefs = (state.Type() == QMStateType::Singlet)
? orb.BSESinglets()
: orb.BSETriplets();
ExpectationValues expectation_values;
const Eigen::MatrixXd BSECoefs_state =
BSECoefs.eigenvectors().col(state.StateIdx());
const Eigen::MatrixXd temp = H * BSECoefs_state;
expectation_values.direct_term = ExpValue(BSECoefs_state, temp);
if (!orb.getTDAApprox()) {
const Eigen::MatrixXd BSECoefs2_state =
BSECoefs.eigenvectors2().col(state.StateIdx());
expectation_values.direct_term +=
ExpValue(BSECoefs2_state, H * BSECoefs2_state);
expectation_values.cross_term = 2 * ExpValue(BSECoefs2_state, temp);
} else {
expectation_values.cross_term = Eigen::VectorXd::Zero(0);
}
return expectation_values;
}
// Composition of the excitation energy in terms of QP, direct (screened),
// and exchance contributions in the BSE
// Full BSE:
//
// | A* | | H K | | A |
// | -B* | | -K -H | | B | = A*.H.A + B*.H.B + 2A*.K.B
//
// with: H = H_qp + H_d + eta.H_x
// K = H_d2 + eta.H_x
//
// reports composition for FULL BSE as
// <FT> = A*.H_qp.A + B*.H_qp.B
// <Kx> = eta.(A*.H_x.A + B*.H_x.B + 2A*.H_x.B)
// <Kd> = A*.H_d.A + B*.H_d.B + 2A*.H_d2.B
BSE::Interaction BSE::Analyze_eh_interaction(const QMStateType& type,
const Orbitals& orb) const {
Interaction analysis;
{
HqpOperator hqp(epsilon_0_inv_, Mmn_, Hqp_);
configureBSEOperator(hqp);
ExpectationValues expectation_values =
ExpectationValue_Operator(type, orb, hqp);
analysis.qp_contrib = expectation_values.direct_term;
}
{
HdOperator hd(epsilon_0_inv_, Mmn_, Hqp_);
configureBSEOperator(hd);
ExpectationValues expectation_values =
ExpectationValue_Operator(type, orb, hd);
analysis.direct_contrib = expectation_values.direct_term;
}
if (!orb.getTDAApprox()) {
Hd2Operator hd2(epsilon_0_inv_, Mmn_, Hqp_);
configureBSEOperator(hd2);
ExpectationValues expectation_values =
ExpectationValue_Operator(type, orb, hd2);
analysis.direct_contrib += expectation_values.cross_term;
}
if (type == QMStateType::Singlet) {
HxOperator hx(epsilon_0_inv_, Mmn_, Hqp_);
configureBSEOperator(hx);
ExpectationValues expectation_values =
ExpectationValue_Operator(type, orb, hx);
analysis.exchange_contrib = 2.0 * expectation_values.direct_term;
if (!orb.getTDAApprox()) {
analysis.exchange_contrib += 2.0 * expectation_values.cross_term;
}
} else {
analysis.exchange_contrib = Eigen::VectorXd::Zero(0);
}
return analysis;
}
// Dynamical Screening in BSE as perturbation to static excitation energies
// as in Phys. Rev. B 80, 241405 (2009) for the TDA case
void BSE::Perturbative_DynamicalScreening(const QMStateType& type,
Orbitals& orb) {
const tools::EigenSystem& BSECoefs =
(type == QMStateType::Singlet) ? orb.BSESinglets() : orb.BSETriplets();
const Eigen::VectorXd& RPAInputEnergies = orb.RPAInputEnergies();
// static case as reference
SetupDirectInteractionOperator(RPAInputEnergies, 0.0);
HdOperator Hd_static(epsilon_0_inv_, Mmn_, Hqp_);
configureBSEOperator(Hd_static);
ExpectationValues expectation_values =
ExpectationValue_Operator(type, orb, Hd_static);
Eigen::VectorXd Hd_static_contribution = expectation_values.direct_term;
if (!orb.getTDAApprox()) {
Hd2Operator Hd2_static(epsilon_0_inv_, Mmn_, Hqp_);
configureBSEOperator(Hd2_static);
expectation_values = ExpectationValue_Operator(type, orb, Hd2_static);
Hd_static_contribution += expectation_values.cross_term;
}
const Eigen::VectorXd& BSEenergies = BSECoefs.eigenvalues();
// initial copy of static BSE energies to dynamic
Eigen::VectorXd BSEenergies_dynamic = BSEenergies;
// recalculate Hd at the various energies
for (Index i_exc = 0; i_exc < BSEenergies.size(); i_exc++) {
XTP_LOG(Log::info, log_) << "Dynamical Screening BSE, Excitation " << i_exc
<< " static " << BSEenergies(i_exc) << flush;
for (Index iter = 0; iter < max_dyn_iter_; iter++) {
// setup the direct operator with the last energy as screening frequency
double old_energy = BSEenergies_dynamic(i_exc);
SetupDirectInteractionOperator(RPAInputEnergies, old_energy);
HdOperator Hd_dyn(epsilon_0_inv_, Mmn_, Hqp_);
configureBSEOperator(Hd_dyn);
// get the contribution of Hd for the dynamic case
QMState state(type, i_exc, false);
expectation_values = ExpectationValue_Operator_State(state, orb, Hd_dyn);
Eigen::VectorXd Hd_dynamic_contribution = expectation_values.direct_term;
if (!orb.getTDAApprox()) {
Hd2Operator Hd2_dyn(epsilon_0_inv_, Mmn_, Hqp_);
configureBSEOperator(Hd2_dyn);
expectation_values =
ExpectationValue_Operator_State(state, orb, Hd2_dyn);
Hd_dynamic_contribution += expectation_values.cross_term;
}
// new energy perturbatively
BSEenergies_dynamic(i_exc) = BSEenergies(i_exc) +
Hd_static_contribution(i_exc) -
Hd_dynamic_contribution(0);
XTP_LOG(Log::info, log_)
<< "Dynamical Screening BSE, excitation " << i_exc << " iteration "
<< iter << " dynamic " << BSEenergies_dynamic(i_exc) << flush;
// check tolerance
if (std::abs(BSEenergies_dynamic(i_exc) - old_energy) < dyn_tolerance_) {
break;
}
}
}
double hrt2ev = tools::conv::hrt2ev;
if (type == QMStateType::Singlet) {
orb.BSESinglets_dynamic() = BSEenergies_dynamic;
XTP_LOG(Log::error, log_) << " ====== singlet energies with perturbative "
"dynamical screening (eV) ====== "
<< flush;
Eigen::VectorXd oscs = orb.Oscillatorstrengths();
for (Index i = 0; i < opt_.nmax; ++i) {
double osc = oscs[i];
XTP_LOG(Log::error, log_)
<< format(
" S(dynamic) = %1$4d Omega = %2$+1.12f eV lamdba = %3$+3.2f "
"nm f "
"= %4$+1.4f") %
(i + 1) % (hrt2ev * BSEenergies_dynamic(i)) %
(1240.0 / (hrt2ev * BSEenergies_dynamic(i))) %
(osc * BSEenergies_dynamic(i) / BSEenergies(i))
<< flush;
}
} else {
orb.BSETriplets_dynamic() = BSEenergies_dynamic;
XTP_LOG(Log::error, log_) << " ====== triplet energies with perturbative "
"dynamical screening (eV) ====== "
<< flush;
for (Index i = 0; i < opt_.nmax; ++i) {
XTP_LOG(Log::error, log_)
<< format(
" T(dynamic) = %1$4d Omega = %2$+1.12f eV lamdba = %3$+3.2f "
"nm ") %
(i + 1) % (hrt2ev * BSEenergies_dynamic(i)) %
(1240.0 / (hrt2ev * BSEenergies_dynamic(i)))
<< flush;
}
}
}
} // namespace xtp
} // namespace votca
|
apache-2.0
|
aajjbb/contest-files
|
LiveArchive/FunColoring.cpp
|
1896
|
#include <bits/stdc++.h>
using namespace std;
const int MAXN = 115;
int T, N, M;
string S;
bitset<MAXN> bi[MAXN];
int main() {
cin >> T;
for (int t = 1; t <= T; t++) {
cin >> N >> M;
for (int i = 0; i < MAXN; i++) {
bi[i].reset();
}
getline(cin, S);
for (int i = 0; i < M; i++) {
getline(cin, S);
int a = -1, b = -1, c = -1;
sscanf(S.c_str(), "%d%d%d", &a, &b, &c);
a -= 1;
b -= 1;
c -= 1;
//cout << a << " " << b << " " << c << endl;
if (a >= 0) {
bi[i].set(a);
}
if (b >= 0) {
bi[i].set(b);
}
if (c >= 0) {
bi[i].set(c);
}
}
bool ans = false;
for (int i = 0; i < (1 << N); i++) {
bool fine = true;
for (int j = 0; j < M; j++) {
int a = 0;
int b = 0;
if (bi[j].count() >= 2) {
for (int k = 0; k < N; k++) {
if (bi[j].test(k)) {
if (i & (1 << k)) {
a += 1;
} else {
b += 1;
}
}
}
//cout << a << " " << b << endl;
if (a == 0 or b == 0) {
fine = false;
break;
}
}
}
if (fine) {
ans = true;
break;
}
}
if (ans) {
cout << "Y";
} else {
cout << "N";
}
}
//cout << "\n";
return 0;
}
|
apache-2.0
|
googleads/google-api-ads-ruby
|
adwords_api/examples/v201809/advanced_operations/add_dynamic_search_ads_campaign.rb
|
7384
|
#!/usr/bin/env ruby
# Encoding: utf-8
#
# Copyright:: Copyright 2017, Google Inc. All Rights Reserved.
#
# License:: Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# This code example adds a Dynamic Search Ads campaign. To get campaigns, run
# get_campaigns.rb.
require 'date'
require 'adwords_api'
def add_dynamic_search_ads_campaign()
# AdwordsApi::Api will read a config file from ENV['HOME']/adwords_api.yml
# when called without parameters.
adwords = AdwordsApi::Api.new
# To enable logging of SOAP requests, set the log_level value to 'DEBUG' in
# the configuration file or provide your own logger:
# adwords.logger = Logger.new('adwords_xml.log')
budget = create_budget(adwords)
campaign = create_campaign(adwords, budget)
ad_group = create_ad_group(adwords, campaign)
create_expanded_dsa(adwords, ad_group)
add_web_page_criteria(adwords, ad_group)
end
def create_budget(adwords)
budget_srv = adwords.service(:BudgetService, API_VERSION)
shared_budget = {
:name => "Interplanetary Cruise #%d" % (Time.now.to_f * 1000).to_i,
:amount => {
:micro_amount => 50_000_000
},
:delivery_method => 'STANDARD'
}
budget_operation = {
:operand => shared_budget,
:operator => 'ADD'
}
budget = budget_srv.mutate([budget_operation])[:value].first
return budget
end
def create_campaign(adwords, budget)
campaign_srv = adwords.service(:CampaignService, API_VERSION)
campaign = {
:name => "Interplanetary Cruise #%d" % (Time.now.to_f * 1000).to_i,
:advertising_channel_type => 'SEARCH',
# Recommendation: Set the campaign to PAUSED when creating it to prevent the
# ads from immediately serving. Set to ENABLED once you've added targeting
# and the ads are ready to serve.
:status => 'PAUSED',
:bidding_strategy_configuration => {
:bidding_strategy_type => 'MANUAL_CPC'
},
# Only the budgetId should be sent; all other fields will be ignored by
# CampaignService.
:budget => {
:budget_id => budget[:budget_id]
},
:settings => [
:xsi_type => 'DynamicSearchAdsSetting',
:domain_name => 'example.com',
:language_code => 'en'
],
# Optional: Set the start and end dates.
:start_date => DateTime.parse((Date.today + 1).to_s).strftime('%Y%m%d'),
:end_date => DateTime.parse(Date.today.next_year.to_s).strftime('%Y%m%d')
}
operation = {
:operand => campaign,
:operator => 'ADD'
}
new_campaign = campaign_srv.mutate([operation])[:value].first
puts "Campaign with name '%s' and ID %d was added." %
[new_campaign[:name], new_campaign[:id]]
return new_campaign
end
def create_ad_group(adwords, campaign)
ad_group_srv = adwords.service(:AdGroupService, API_VERSION)
ad_group = {
# Required: Set the ad group's tpe to Dynamic Search Ads.
:ad_group_type => 'SEARCH_DYNAMIC_ADS',
:name => "Earth to Mars Cruises #%d" % (Time.now.to_f * 1000).to_i,
:campaign_id => campaign[:id],
:status => 'PAUSED',
# Recommended: Set a tracking URL template for your ad group if you want to
# use URL tracking software.
:tracking_url_template =>
'http://tracker.example.com/traveltracker/{escapedlpurl}',
:bidding_strategy_configuration => {
:bids => [{
:xsi_type => 'CpcBid',
:bid => {
:micro_amount => 3_000_000
}
}]
}
}
operation = {
:operand => ad_group,
:operator => 'ADD'
}
new_ad_group = ad_group_srv.mutate([operation])[:value].first
puts "Ad group with name '%s' and ID %d was added." %
[new_ad_group[:name], new_ad_group[:id]]
return new_ad_group
end
def create_expanded_dsa(adwords, ad_group)
ad_group_ad_srv = adwords.service(:AdGroupAdService, API_VERSION)
# Create the expanded Dynamic Search Ad. This ad will have its headline and
# final URL auto-generated at serving time according to domain name specific
# information provided by DynamicSearchAdsSetting at the campaign level.
expanded_dsa = {
:xsi_type => 'ExpandedDynamicSearchAd',
:description => 'Buy your tickets now!',
:description2 => 'Discount ends soon'
}
ad_group_ad = {
:ad_group_id => ad_group[:id],
:ad => expanded_dsa,
# Optional: Set the status.
:status => 'PAUSED'
}
operation = {
:operand => ad_group_ad,
:operator => 'ADD'
}
new_ad_group_ad = ad_group_ad_srv.mutate([operation])[:value].first
new_expanded_dsa = new_ad_group_ad[:ad]
puts ("Expanded Dynamic Search Ad with ID %d, description '%s', and " +
"description 2 '%s' was added.") % [new_expanded_dsa[:id],
new_expanded_dsa[:description], new_expanded_dsa[:description2]]
end
def add_web_page_criteria(adwords, ad_group)
ad_group_criterion_srv =
adwords.service(:AdGroupCriterionService, API_VERSION)
webpage = {
:xsi_type => 'Webpage',
:parameter => {
:criterion_name => 'Special offers',
:conditions => [
{
:operand => 'URL',
:argument => '/specialoffers'
},
{
:operand => 'PAGE_TITLE',
:argument => 'Special Offer'
}
]
}
}
biddable_ad_group_criterion = {
:xsi_type => 'BiddableAdGroupCriterion',
:ad_group_id => ad_group[:id],
:criterion => webpage,
:user_status => 'PAUSED',
# Optional: set a custom bid.
:bidding_strategy_configuration => {
:bids => [{
:xsi_type => 'CpcBid',
:bid => {
:micro_amount => 10_000_000
}
}]
}
}
operation = {
:operand => biddable_ad_group_criterion,
:operator => 'ADD'
}
new_ad_group_criterion =
ad_group_criterion_srv.mutate([operation])[:value].first
puts "Webpage criterion with ID %d was added to ad group ID %d." % [
new_ad_group_criterion[:criterion][:id],
new_ad_group_criterion[:ad_group_id]
]
end
if __FILE__ == $0
API_VERSION = :v201809
begin
add_dynamic_search_ads_campaign()
# Authorization error.
rescue AdsCommon::Errors::OAuth2VerificationRequired => e
puts "Authorization credentials are not valid. Edit adwords_api.yml for " +
"OAuth2 client ID and secret and run misc/setup_oauth2.rb example " +
"to retrieve and store OAuth2 tokens."
puts "See this wiki page for more details:\n\n " +
'https://github.com/googleads/google-api-ads-ruby/wiki/OAuth2'
# HTTP errors.
rescue AdsCommon::Errors::HttpError => e
puts "HTTP Error: %s" % e
# API errors.
rescue AdwordsApi::Errors::ApiException => e
puts "Message: %s" % e.message
puts 'Errors:'
e.errors.each_with_index do |error, index|
puts "\tError [%d]:" % (index + 1)
error.each do |field, value|
puts "\t\t%s: %s" % [field, value]
end
end
end
end
|
apache-2.0
|
adviti/melange
|
tests/app/soc/logic/test_validate.py
|
3570
|
#!/usr/bin/env python2.5
#
# Copyright 2011 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from soc.logic import validate
class ValidateTest(unittest.TestCase):
"""Tests related to the validation helper functions.
"""
def testIsFeedURLValid(self):
"""Tests whether the urls are valid feed urls.
"""
#invalid: not a feed url
self.assertFalse(validate.isFeedURLValid('http://www.google.com'))
self.assertFalse(validate.isFeedURLValid(''))
#valid feed url
self.assertTrue(validate.isFeedURLValid(
'http://googlesummerofcode.blogspot.com/feeds/posts/default'))
#invalid: wrong protocol
self.assertFalse(validate.isFeedURLValid(
'htp://googlesummerofcode.blogspot.com/feeds/posts/default'))
def testIsLinkIdFormatValid(self):
"""Tests the validity of Link Ids.
"""
#valid: starts with lowercase, no double underscores, does not end
#with an underscore
self.assertTrue(validate.isLinkIdFormatValid('sfd32'))
#invalid: starts with a number
self.assertFalse(validate.isLinkIdFormatValid('9s8whhu'))
#invalid: starts with an underscore
self.assertFalse(validate.isLinkIdFormatValid('_jhja87'))
#valid: double underscore
self.assertTrue(validate.isLinkIdFormatValid('kjnask__j87'))
#valid: trailing underscore
self.assertTrue(validate.isLinkIdFormatValid('jhsdfj_'))
#invalid: starting and trailing underscores
self.assertFalse(validate.isLinkIdFormatValid('_jhsj38_'))
#invalid: starts with uppercase
self.assertFalse(validate.isLinkIdFormatValid('Ukkjs'))
#valid: underscore in the middle and rest in lowercase
self.assertTrue(validate.isLinkIdFormatValid('a_b'))
#invalid: a capital letter in the middle
self.assertFalse(validate.isLinkIdFormatValid('aBc'))
def testIsScopePathFormatValid(self):
"""Tests the validity of Scope Paths.
Scope paths are group of Link Ids separated by '/'.
"""
#invalid: empty string
self.assertFalse(validate.isScopePathFormatValid(''))
#valid: single chunk
self.assertTrue(validate.isScopePathFormatValid('addvw'))
#invalid: starts with an underscore
self.assertFalse(validate.isScopePathFormatValid('_jhads/sdafsa'))
#valid: chunks separated by '/'
self.assertTrue(validate.isScopePathFormatValid('adhcd/dfds'))
#valid: has a double underscore
self.assertTrue(validate.isScopePathFormatValid('ndfnsj__nj'))
#invalid: starts with a capital letter
self.assertFalse(validate.isScopePathFormatValid('Usdn_/sdfa'))
#invalid: second chunk ends with '/'
self.assertFalse(validate.isScopePathFormatValid('adsf/sdfgr/'))
#invalid: first chunk should not start with a '/'
self.assertFalse(validate.isScopePathFormatValid('/abc'))
#invalid: has a capital letter
self.assertFalse(validate.isScopePathFormatValid('aBc/def'))
#valid: underscore in the middle and rest of the letters in lowercase
self.assertTrue(validate.isScopePathFormatValid('a_b/cde'))
|
apache-2.0
|
chuckjaz/TypeScript
|
src/compiler/transformers/generators.ts
|
135833
|
/// <reference path="../factory.ts" />
/// <reference path="../visitor.ts" />
// Transforms generator functions into a compatible ES5 representation with similar runtime
// semantics. This is accomplished by first transforming the body of each generator
// function into an intermediate representation that is the compiled into a JavaScript
// switch statement.
//
// Many functions in this transformer will contain comments indicating the expected
// intermediate representation. For illustrative purposes, the following intermediate
// language is used to define this intermediate representation:
//
// .nop - Performs no operation.
// .local NAME, ... - Define local variable declarations.
// .mark LABEL - Mark the location of a label.
// .br LABEL - Jump to a label. If jumping out of a protected
// region, all .finally blocks are executed.
// .brtrue LABEL, (x) - Jump to a label IIF the expression `x` is truthy.
// If jumping out of a protected region, all .finally
// blocks are executed.
// .brfalse LABEL, (x) - Jump to a label IIF the expression `x` is falsey.
// If jumping out of a protected region, all .finally
// blocks are executed.
// .yield (x) - Yield the value of the optional expression `x`.
// Resume at the next label.
// .yieldstar (x) - Delegate yield to the value of the optional
// expression `x`. Resume at the next label.
// NOTE: `x` must be an Iterator, not an Iterable.
// .loop CONTINUE, BREAK - Marks the beginning of a loop. Any "continue" or
// "break" abrupt completions jump to the CONTINUE or
// BREAK labels, respectively.
// .endloop - Marks the end of a loop.
// .with (x) - Marks the beginning of a WithStatement block, using
// the supplied expression.
// .endwith - Marks the end of a WithStatement.
// .switch - Marks the beginning of a SwitchStatement.
// .endswitch - Marks the end of a SwitchStatement.
// .labeled NAME - Marks the beginning of a LabeledStatement with the
// supplied name.
// .endlabeled - Marks the end of a LabeledStatement.
// .try TRY, CATCH, FINALLY, END - Marks the beginning of a protected region, and the
// labels for each block.
// .catch (x) - Marks the beginning of a catch block.
// .finally - Marks the beginning of a finally block.
// .endfinally - Marks the end of a finally block.
// .endtry - Marks the end of a protected region.
// .throw (x) - Throws the value of the expression `x`.
// .return (x) - Returns the value of the expression `x`.
//
// In addition, the illustrative intermediate representation introduces some special
// variables:
//
// %sent% - Either returns the next value sent to the generator,
// returns the result of a delegated yield, or throws
// the exception sent to the generator.
// %error% - Returns the value of the current exception in a
// catch block.
//
// This intermediate representation is then compiled into JavaScript syntax. The resulting
// compilation output looks something like the following:
//
// function f() {
// var /*locals*/;
// /*functions*/
// return __generator(function (state) {
// switch (state.label) {
// /*cases per label*/
// }
// });
// }
//
// Each of the above instructions corresponds to JavaScript emit similar to the following:
//
// .local NAME | var NAME;
// -------------------------------|----------------------------------------------
// .mark LABEL | case LABEL:
// -------------------------------|----------------------------------------------
// .br LABEL | return [3 /*break*/, LABEL];
// -------------------------------|----------------------------------------------
// .brtrue LABEL, (x) | if (x) return [3 /*break*/, LABEL];
// -------------------------------|----------------------------------------------
// .brfalse LABEL, (x) | if (!(x)) return [3, /*break*/, LABEL];
// -------------------------------|----------------------------------------------
// .yield (x) | return [4 /*yield*/, x];
// .mark RESUME | case RESUME:
// a = %sent%; | a = state.sent();
// -------------------------------|----------------------------------------------
// .yieldstar (x) | return [5 /*yield**/, x];
// .mark RESUME | case RESUME:
// a = %sent%; | a = state.sent();
// -------------------------------|----------------------------------------------
// .with (_a) | with (_a) {
// a(); | a();
// | }
// | state.label = LABEL;
// .mark LABEL | case LABEL:
// | with (_a) {
// b(); | b();
// | }
// .endwith |
// -------------------------------|----------------------------------------------
// | case 0:
// | state.trys = [];
// | ...
// .try TRY, CATCH, FINALLY, END |
// .mark TRY | case TRY:
// | state.trys.push([TRY, CATCH, FINALLY, END]);
// .nop |
// a(); | a();
// .br END | return [3 /*break*/, END];
// .catch (e) |
// .mark CATCH | case CATCH:
// | e = state.sent();
// b(); | b();
// .br END | return [3 /*break*/, END];
// .finally |
// .mark FINALLY | case FINALLY:
// c(); | c();
// .endfinally | return [7 /*endfinally*/];
// .endtry |
// .mark END | case END:
/*@internal*/
namespace ts {
type Label = number;
const enum OpCode {
Nop, // No operation, used to force a new case in the state machine
Statement, // A regular javascript statement
Assign, // An assignment
Break, // A break instruction used to jump to a label
BreakWhenTrue, // A break instruction used to jump to a label if a condition evaluates to true
BreakWhenFalse, // A break instruction used to jump to a label if a condition evaluates to false
Yield, // A completion instruction for the `yield` keyword
YieldStar, // A completion instruction for the `yield*` keyword (not implemented, but reserved for future use)
Return, // A completion instruction for the `return` keyword
Throw, // A completion instruction for the `throw` keyword
Endfinally // Marks the end of a `finally` block
}
type OperationArguments = [Label] | [Label, Expression] | [Statement] | [Expression] | [Expression, Expression];
// whether a generated code block is opening or closing at the current operation for a FunctionBuilder
const enum BlockAction {
Open,
Close,
}
// the kind for a generated code block in a FunctionBuilder
const enum CodeBlockKind {
Exception,
With,
Switch,
Loop,
Labeled
}
// the state for a generated code exception block
const enum ExceptionBlockState {
Try,
Catch,
Finally,
Done
}
// A generated code block
interface CodeBlock {
kind: CodeBlockKind;
}
// a generated exception block, used for 'try' statements
interface ExceptionBlock extends CodeBlock {
state: ExceptionBlockState;
startLabel: Label;
catchVariable?: Identifier;
catchLabel?: Label;
finallyLabel?: Label;
endLabel: Label;
}
// A generated code that tracks the target for 'break' statements in a LabeledStatement.
interface LabeledBlock extends CodeBlock {
labelText: string;
isScript: boolean;
breakLabel: Label;
}
// a generated block that tracks the target for 'break' statements in a 'switch' statement
interface SwitchBlock extends CodeBlock {
isScript: boolean;
breakLabel: Label;
}
// a generated block that tracks the targets for 'break' and 'continue' statements, used for iteration statements
interface LoopBlock extends CodeBlock {
continueLabel: Label;
isScript: boolean;
breakLabel: Label;
}
// a generated block associated with a 'with' statement
interface WithBlock extends CodeBlock {
expression: Identifier;
startLabel: Label;
endLabel: Label;
}
// NOTE: changes to this enum should be reflected in the __generator helper.
const enum Instruction {
Next = 0,
Throw = 1,
Return = 2,
Break = 3,
Yield = 4,
YieldStar = 5,
Catch = 6,
Endfinally = 7,
}
function getInstructionName(instruction: Instruction): string {
switch (instruction) {
case Instruction.Return: return "return";
case Instruction.Break: return "break";
case Instruction.Yield: return "yield";
case Instruction.YieldStar: return "yield*";
case Instruction.Endfinally: return "endfinally";
}
}
export function transformGenerators(context: TransformationContext) {
const {
resumeLexicalEnvironment,
endLexicalEnvironment,
hoistFunctionDeclaration,
hoistVariableDeclaration
} = context;
const compilerOptions = context.getCompilerOptions();
const languageVersion = getEmitScriptTarget(compilerOptions);
const resolver = context.getEmitResolver();
const previousOnSubstituteNode = context.onSubstituteNode;
context.onSubstituteNode = onSubstituteNode;
let currentSourceFile: SourceFile;
let renamedCatchVariables: Map<boolean>;
let renamedCatchVariableDeclarations: Identifier[];
let inGeneratorFunctionBody: boolean;
let inStatementContainingYield: boolean;
// The following three arrays store information about generated code blocks.
// All three arrays are correlated by their index. This approach is used over allocating
// objects to store the same information to avoid GC overhead.
//
let blocks: CodeBlock[]; // Information about the code block
let blockOffsets: number[]; // The operation offset at which a code block begins or ends
let blockActions: BlockAction[]; // Whether the code block is opened or closed
let blockStack: CodeBlock[]; // A stack of currently open code blocks
// Labels are used to mark locations in the code that can be the target of a Break (jump)
// operation. These are translated into case clauses in a switch statement.
// The following two arrays are correlated by their index. This approach is used over
// allocating objects to store the same information to avoid GC overhead.
//
let labelOffsets: number[]; // The operation offset at which the label is defined.
let labelExpressions: LiteralExpression[][]; // The NumericLiteral nodes bound to each label.
let nextLabelId = 1; // The next label id to use.
// Operations store information about generated code for the function body. This
// Includes things like statements, assignments, breaks (jumps), and yields.
// The following three arrays are correlated by their index. This approach is used over
// allocating objects to store the same information to avoid GC overhead.
//
let operations: OpCode[]; // The operation to perform.
let operationArguments: OperationArguments[]; // The arguments to the operation.
let operationLocations: TextRange[]; // The source map location for the operation.
let state: Identifier; // The name of the state object used by the generator at runtime.
// The following variables store information used by the `build` function:
//
let blockIndex = 0; // The index of the current block.
let labelNumber = 0; // The current label number.
let labelNumbers: number[][];
let lastOperationWasAbrupt: boolean; // Indicates whether the last operation was abrupt (break/continue).
let lastOperationWasCompletion: boolean; // Indicates whether the last operation was a completion (return/throw).
let clauses: CaseClause[]; // The case clauses generated for labels.
let statements: Statement[]; // The statements for the current label.
let exceptionBlockStack: ExceptionBlock[]; // A stack of containing exception blocks.
let currentExceptionBlock: ExceptionBlock; // The current exception block.
let withBlockStack: WithBlock[]; // A stack containing `with` blocks.
return transformSourceFile;
function transformSourceFile(node: SourceFile) {
if (node.isDeclarationFile || (node.transformFlags & TransformFlags.ContainsGenerator) === 0) {
return node;
}
currentSourceFile = node;
const visited = visitEachChild(node, visitor, context);
addEmitHelpers(visited, context.readEmitHelpers());
currentSourceFile = undefined;
return visited;
}
/**
* Visits a node.
*
* @param node The node to visit.
*/
function visitor(node: Node): VisitResult<Node> {
const transformFlags = node.transformFlags;
if (inStatementContainingYield) {
return visitJavaScriptInStatementContainingYield(node);
}
else if (inGeneratorFunctionBody) {
return visitJavaScriptInGeneratorFunctionBody(node);
}
else if (transformFlags & TransformFlags.Generator) {
return visitGenerator(node);
}
else if (transformFlags & TransformFlags.ContainsGenerator) {
return visitEachChild(node, visitor, context);
}
else {
return node;
}
}
/**
* Visits a node that is contained within a statement that contains yield.
*
* @param node The node to visit.
*/
function visitJavaScriptInStatementContainingYield(node: Node): VisitResult<Node> {
switch (node.kind) {
case SyntaxKind.DoStatement:
return visitDoStatement(<DoStatement>node);
case SyntaxKind.WhileStatement:
return visitWhileStatement(<WhileStatement>node);
case SyntaxKind.SwitchStatement:
return visitSwitchStatement(<SwitchStatement>node);
case SyntaxKind.LabeledStatement:
return visitLabeledStatement(<LabeledStatement>node);
default:
return visitJavaScriptInGeneratorFunctionBody(node);
}
}
/**
* Visits a node that is contained within a generator function.
*
* @param node The node to visit.
*/
function visitJavaScriptInGeneratorFunctionBody(node: Node): VisitResult<Node> {
switch (node.kind) {
case SyntaxKind.FunctionDeclaration:
return visitFunctionDeclaration(<FunctionDeclaration>node);
case SyntaxKind.FunctionExpression:
return visitFunctionExpression(<FunctionExpression>node);
case SyntaxKind.GetAccessor:
case SyntaxKind.SetAccessor:
return visitAccessorDeclaration(<AccessorDeclaration>node);
case SyntaxKind.VariableStatement:
return visitVariableStatement(<VariableStatement>node);
case SyntaxKind.ForStatement:
return visitForStatement(<ForStatement>node);
case SyntaxKind.ForInStatement:
return visitForInStatement(<ForInStatement>node);
case SyntaxKind.BreakStatement:
return visitBreakStatement(<BreakStatement>node);
case SyntaxKind.ContinueStatement:
return visitContinueStatement(<ContinueStatement>node);
case SyntaxKind.ReturnStatement:
return visitReturnStatement(<ReturnStatement>node);
default:
if (node.transformFlags & TransformFlags.ContainsYield) {
return visitJavaScriptContainingYield(node);
}
else if (node.transformFlags & (TransformFlags.ContainsGenerator | TransformFlags.ContainsHoistedDeclarationOrCompletion)) {
return visitEachChild(node, visitor, context);
}
else {
return node;
}
}
}
/**
* Visits a node that contains a YieldExpression.
*
* @param node The node to visit.
*/
function visitJavaScriptContainingYield(node: Node): VisitResult<Node> {
switch (node.kind) {
case SyntaxKind.BinaryExpression:
return visitBinaryExpression(<BinaryExpression>node);
case SyntaxKind.ConditionalExpression:
return visitConditionalExpression(<ConditionalExpression>node);
case SyntaxKind.YieldExpression:
return visitYieldExpression(<YieldExpression>node);
case SyntaxKind.ArrayLiteralExpression:
return visitArrayLiteralExpression(<ArrayLiteralExpression>node);
case SyntaxKind.ObjectLiteralExpression:
return visitObjectLiteralExpression(<ObjectLiteralExpression>node);
case SyntaxKind.ElementAccessExpression:
return visitElementAccessExpression(<ElementAccessExpression>node);
case SyntaxKind.CallExpression:
return visitCallExpression(<CallExpression>node);
case SyntaxKind.NewExpression:
return visitNewExpression(<NewExpression>node);
default:
return visitEachChild(node, visitor, context);
}
}
/**
* Visits a generator function.
*
* @param node The node to visit.
*/
function visitGenerator(node: Node): VisitResult<Node> {
switch (node.kind) {
case SyntaxKind.FunctionDeclaration:
return visitFunctionDeclaration(<FunctionDeclaration>node);
case SyntaxKind.FunctionExpression:
return visitFunctionExpression(<FunctionExpression>node);
default:
Debug.failBadSyntaxKind(node);
return visitEachChild(node, visitor, context);
}
}
/**
* Visits a function declaration.
*
* This will be called when one of the following conditions are met:
* - The function declaration is a generator function.
* - The function declaration is contained within the body of a generator function.
*
* @param node The node to visit.
*/
function visitFunctionDeclaration(node: FunctionDeclaration): Statement {
// Currently, we only support generators that were originally async functions.
if (node.asteriskToken) {
node = setOriginalNode(
setTextRange(
createFunctionDeclaration(
/*decorators*/ undefined,
node.modifiers,
/*asteriskToken*/ undefined,
node.name,
/*typeParameters*/ undefined,
visitParameterList(node.parameters, visitor, context),
/*type*/ undefined,
transformGeneratorFunctionBody(node.body)
),
/*location*/ node
),
node
);
}
else {
const savedInGeneratorFunctionBody = inGeneratorFunctionBody;
const savedInStatementContainingYield = inStatementContainingYield;
inGeneratorFunctionBody = false;
inStatementContainingYield = false;
node = visitEachChild(node, visitor, context);
inGeneratorFunctionBody = savedInGeneratorFunctionBody;
inStatementContainingYield = savedInStatementContainingYield;
}
if (inGeneratorFunctionBody) {
// Function declarations in a generator function body are hoisted
// to the top of the lexical scope and elided from the current statement.
hoistFunctionDeclaration(node);
return undefined;
}
else {
return node;
}
}
/**
* Visits a function expression.
*
* This will be called when one of the following conditions are met:
* - The function expression is a generator function.
* - The function expression is contained within the body of a generator function.
*
* @param node The node to visit.
*/
function visitFunctionExpression(node: FunctionExpression): Expression {
// Currently, we only support generators that were originally async functions.
if (node.asteriskToken) {
node = setOriginalNode(
setTextRange(
createFunctionExpression(
/*modifiers*/ undefined,
/*asteriskToken*/ undefined,
node.name,
/*typeParameters*/ undefined,
visitParameterList(node.parameters, visitor, context),
/*type*/ undefined,
transformGeneratorFunctionBody(node.body)
),
/*location*/ node
),
node
);
}
else {
const savedInGeneratorFunctionBody = inGeneratorFunctionBody;
const savedInStatementContainingYield = inStatementContainingYield;
inGeneratorFunctionBody = false;
inStatementContainingYield = false;
node = visitEachChild(node, visitor, context);
inGeneratorFunctionBody = savedInGeneratorFunctionBody;
inStatementContainingYield = savedInStatementContainingYield;
}
return node;
}
/**
* Visits a get or set accessor declaration.
*
* This will be called when one of the following conditions are met:
* - The accessor is contained within the body of a generator function.
*
* @param node The node to visit.
*/
function visitAccessorDeclaration(node: AccessorDeclaration) {
const savedInGeneratorFunctionBody = inGeneratorFunctionBody;
const savedInStatementContainingYield = inStatementContainingYield;
inGeneratorFunctionBody = false;
inStatementContainingYield = false;
node = visitEachChild(node, visitor, context);
inGeneratorFunctionBody = savedInGeneratorFunctionBody;
inStatementContainingYield = savedInStatementContainingYield;
return node;
}
/**
* Transforms the body of a generator function declaration.
*
* @param node The function body to transform.
*/
function transformGeneratorFunctionBody(body: Block) {
// Save existing generator state
const statements: Statement[] = [];
const savedInGeneratorFunctionBody = inGeneratorFunctionBody;
const savedInStatementContainingYield = inStatementContainingYield;
const savedBlocks = blocks;
const savedBlockOffsets = blockOffsets;
const savedBlockActions = blockActions;
const savedBlockStack = blockStack;
const savedLabelOffsets = labelOffsets;
const savedLabelExpressions = labelExpressions;
const savedNextLabelId = nextLabelId;
const savedOperations = operations;
const savedOperationArguments = operationArguments;
const savedOperationLocations = operationLocations;
const savedState = state;
// Initialize generator state
inGeneratorFunctionBody = true;
inStatementContainingYield = false;
blocks = undefined;
blockOffsets = undefined;
blockActions = undefined;
blockStack = undefined;
labelOffsets = undefined;
labelExpressions = undefined;
nextLabelId = 1;
operations = undefined;
operationArguments = undefined;
operationLocations = undefined;
state = createTempVariable(/*recordTempVariable*/ undefined);
// Build the generator
resumeLexicalEnvironment();
const statementOffset = addPrologue(statements, body.statements, /*ensureUseStrict*/ false, visitor);
transformAndEmitStatements(body.statements, statementOffset);
const buildResult = build();
addRange(statements, endLexicalEnvironment());
statements.push(createReturn(buildResult));
// Restore previous generator state
inGeneratorFunctionBody = savedInGeneratorFunctionBody;
inStatementContainingYield = savedInStatementContainingYield;
blocks = savedBlocks;
blockOffsets = savedBlockOffsets;
blockActions = savedBlockActions;
blockStack = savedBlockStack;
labelOffsets = savedLabelOffsets;
labelExpressions = savedLabelExpressions;
nextLabelId = savedNextLabelId;
operations = savedOperations;
operationArguments = savedOperationArguments;
operationLocations = savedOperationLocations;
state = savedState;
return setTextRange(createBlock(statements, body.multiLine), body);
}
/**
* Visits a variable statement.
*
* This will be called when one of the following conditions are met:
* - The variable statement is contained within the body of a generator function.
*
* @param node The node to visit.
*/
function visitVariableStatement(node: VariableStatement): Statement {
if (node.transformFlags & TransformFlags.ContainsYield) {
transformAndEmitVariableDeclarationList(node.declarationList);
return undefined;
}
else {
// Do not hoist custom prologues.
if (getEmitFlags(node) & EmitFlags.CustomPrologue) {
return node;
}
for (const variable of node.declarationList.declarations) {
hoistVariableDeclaration(<Identifier>variable.name);
}
const variables = getInitializedVariables(node.declarationList);
if (variables.length === 0) {
return undefined;
}
return setSourceMapRange(
createStatement(
inlineExpressions(
map(variables, transformInitializedVariable)
)
),
node
);
}
}
/**
* Visits a binary expression.
*
* This will be called when one of the following conditions are met:
* - The node contains a YieldExpression.
*
* @param node The node to visit.
*/
function visitBinaryExpression(node: BinaryExpression): Expression {
switch (getExpressionAssociativity(node)) {
case Associativity.Left:
return visitLeftAssociativeBinaryExpression(node);
case Associativity.Right:
return visitRightAssociativeBinaryExpression(node);
default:
Debug.fail("Unknown associativity.");
}
}
function isCompoundAssignment(kind: BinaryOperator): kind is CompoundAssignmentOperator {
return kind >= SyntaxKind.FirstCompoundAssignment
&& kind <= SyntaxKind.LastCompoundAssignment;
}
function getOperatorForCompoundAssignment(kind: CompoundAssignmentOperator): BitwiseOperatorOrHigher {
switch (kind) {
case SyntaxKind.PlusEqualsToken: return SyntaxKind.PlusToken;
case SyntaxKind.MinusEqualsToken: return SyntaxKind.MinusToken;
case SyntaxKind.AsteriskEqualsToken: return SyntaxKind.AsteriskToken;
case SyntaxKind.AsteriskAsteriskEqualsToken: return SyntaxKind.AsteriskAsteriskToken;
case SyntaxKind.SlashEqualsToken: return SyntaxKind.SlashToken;
case SyntaxKind.PercentEqualsToken: return SyntaxKind.PercentToken;
case SyntaxKind.LessThanLessThanEqualsToken: return SyntaxKind.LessThanLessThanToken;
case SyntaxKind.GreaterThanGreaterThanEqualsToken: return SyntaxKind.GreaterThanGreaterThanToken;
case SyntaxKind.GreaterThanGreaterThanGreaterThanEqualsToken: return SyntaxKind.GreaterThanGreaterThanGreaterThanToken;
case SyntaxKind.AmpersandEqualsToken: return SyntaxKind.AmpersandToken;
case SyntaxKind.BarEqualsToken: return SyntaxKind.BarToken;
case SyntaxKind.CaretEqualsToken: return SyntaxKind.CaretToken;
}
}
/**
* Visits a right-associative binary expression containing `yield`.
*
* @param node The node to visit.
*/
function visitRightAssociativeBinaryExpression(node: BinaryExpression) {
const { left, right } = node;
if (containsYield(right)) {
let target: Expression;
switch (left.kind) {
case SyntaxKind.PropertyAccessExpression:
// [source]
// a.b = yield;
//
// [intermediate]
// .local _a
// _a = a;
// .yield resumeLabel
// .mark resumeLabel
// _a.b = %sent%;
target = updatePropertyAccess(
<PropertyAccessExpression>left,
cacheExpression(visitNode((<PropertyAccessExpression>left).expression, visitor, isLeftHandSideExpression)),
(<PropertyAccessExpression>left).name
);
break;
case SyntaxKind.ElementAccessExpression:
// [source]
// a[b] = yield;
//
// [intermediate]
// .local _a, _b
// _a = a;
// _b = b;
// .yield resumeLabel
// .mark resumeLabel
// _a[_b] = %sent%;
target = updateElementAccess(<ElementAccessExpression>left,
cacheExpression(visitNode((<ElementAccessExpression>left).expression, visitor, isLeftHandSideExpression)),
cacheExpression(visitNode((<ElementAccessExpression>left).argumentExpression, visitor, isExpression))
);
break;
default:
target = visitNode(left, visitor, isExpression);
break;
}
const operator = node.operatorToken.kind;
if (isCompoundAssignment(operator)) {
return setTextRange(
createAssignment(
target,
setTextRange(
createBinary(
cacheExpression(target),
getOperatorForCompoundAssignment(operator),
visitNode(right, visitor, isExpression)
),
node
)
),
node
);
}
else {
return updateBinary(node, target, visitNode(right, visitor, isExpression));
}
}
return visitEachChild(node, visitor, context);
}
function visitLeftAssociativeBinaryExpression(node: BinaryExpression) {
if (containsYield(node.right)) {
if (isLogicalOperator(node.operatorToken.kind)) {
return visitLogicalBinaryExpression(node);
}
else if (node.operatorToken.kind === SyntaxKind.CommaToken) {
return visitCommaExpression(node);
}
// [source]
// a() + (yield) + c()
//
// [intermediate]
// .local _a
// _a = a();
// .yield resumeLabel
// _a + %sent% + c()
const clone = getMutableClone(node);
clone.left = cacheExpression(visitNode(node.left, visitor, isExpression));
clone.right = visitNode(node.right, visitor, isExpression);
return clone;
}
return visitEachChild(node, visitor, context);
}
/**
* Visits a logical binary expression containing `yield`.
*
* @param node A node to visit.
*/
function visitLogicalBinaryExpression(node: BinaryExpression) {
// Logical binary expressions (`&&` and `||`) are shortcutting expressions and need
// to be transformed as such:
//
// [source]
// x = a() && yield;
//
// [intermediate]
// .local _a
// _a = a();
// .brfalse resultLabel, (_a)
// .yield resumeLabel
// .mark resumeLabel
// _a = %sent%;
// .mark resultLabel
// x = _a;
//
// [source]
// x = a() || yield;
//
// [intermediate]
// .local _a
// _a = a();
// .brtrue resultLabel, (_a)
// .yield resumeLabel
// .mark resumeLabel
// _a = %sent%;
// .mark resultLabel
// x = _a;
const resultLabel = defineLabel();
const resultLocal = declareLocal();
emitAssignment(resultLocal, visitNode(node.left, visitor, isExpression), /*location*/ node.left);
if (node.operatorToken.kind === SyntaxKind.AmpersandAmpersandToken) {
// Logical `&&` shortcuts when the left-hand operand is falsey.
emitBreakWhenFalse(resultLabel, resultLocal, /*location*/ node.left);
}
else {
// Logical `||` shortcuts when the left-hand operand is truthy.
emitBreakWhenTrue(resultLabel, resultLocal, /*location*/ node.left);
}
emitAssignment(resultLocal, visitNode(node.right, visitor, isExpression), /*location*/ node.right);
markLabel(resultLabel);
return resultLocal;
}
/**
* Visits a comma expression containing `yield`.
*
* @param node The node to visit.
*/
function visitCommaExpression(node: BinaryExpression) {
// [source]
// x = a(), yield, b();
//
// [intermediate]
// a();
// .yield resumeLabel
// .mark resumeLabel
// x = %sent%, b();
let pendingExpressions: Expression[] = [];
visit(node.left);
visit(node.right);
return inlineExpressions(pendingExpressions);
function visit(node: Expression) {
if (isBinaryExpression(node) && node.operatorToken.kind === SyntaxKind.CommaToken) {
visit(node.left);
visit(node.right);
}
else {
if (containsYield(node) && pendingExpressions.length > 0) {
emitWorker(OpCode.Statement, [createStatement(inlineExpressions(pendingExpressions))]);
pendingExpressions = [];
}
pendingExpressions.push(visitNode(node, visitor, isExpression));
}
}
}
/**
* Visits a conditional expression containing `yield`.
*
* @param node The node to visit.
*/
function visitConditionalExpression(node: ConditionalExpression): Expression {
// [source]
// x = a() ? yield : b();
//
// [intermediate]
// .local _a
// .brfalse whenFalseLabel, (a())
// .yield resumeLabel
// .mark resumeLabel
// _a = %sent%;
// .br resultLabel
// .mark whenFalseLabel
// _a = b();
// .mark resultLabel
// x = _a;
// We only need to perform a specific transformation if a `yield` expression exists
// in either the `whenTrue` or `whenFalse` branches.
// A `yield` in the condition will be handled by the normal visitor.
if (containsYield(node.whenTrue) || containsYield(node.whenFalse)) {
const whenFalseLabel = defineLabel();
const resultLabel = defineLabel();
const resultLocal = declareLocal();
emitBreakWhenFalse(whenFalseLabel, visitNode(node.condition, visitor, isExpression), /*location*/ node.condition);
emitAssignment(resultLocal, visitNode(node.whenTrue, visitor, isExpression), /*location*/ node.whenTrue);
emitBreak(resultLabel);
markLabel(whenFalseLabel);
emitAssignment(resultLocal, visitNode(node.whenFalse, visitor, isExpression), /*location*/ node.whenFalse);
markLabel(resultLabel);
return resultLocal;
}
return visitEachChild(node, visitor, context);
}
/**
* Visits a `yield` expression.
*
* @param node The node to visit.
*/
function visitYieldExpression(node: YieldExpression): LeftHandSideExpression {
// [source]
// x = yield a();
//
// [intermediate]
// .yield resumeLabel, (a())
// .mark resumeLabel
// x = %sent%;
const resumeLabel = defineLabel();
const expression = visitNode(node.expression, visitor, isExpression);
if (node.asteriskToken) {
const iterator = (getEmitFlags(node.expression) & EmitFlags.Iterator) === 0
? createValuesHelper(context, expression, /*location*/ node)
: expression;
emitYieldStar(iterator, /*location*/ node);
}
else {
emitYield(expression, /*location*/ node);
}
markLabel(resumeLabel);
return createGeneratorResume(/*location*/ node);
}
/**
* Visits an ArrayLiteralExpression that contains a YieldExpression.
*
* @param node The node to visit.
*/
function visitArrayLiteralExpression(node: ArrayLiteralExpression) {
return visitElements(node.elements, /*leadingElement*/ undefined, /*location*/ undefined, node.multiLine);
}
/**
* Visits an array of expressions containing one or more YieldExpression nodes
* and returns an expression for the resulting value.
*
* @param elements The elements to visit.
* @param multiLine Whether array literals created should be emitted on multiple lines.
*/
function visitElements(elements: NodeArray<Expression>, leadingElement?: Expression, location?: TextRange, multiLine?: boolean) {
// [source]
// ar = [1, yield, 2];
//
// [intermediate]
// .local _a
// _a = [1];
// .yield resumeLabel
// .mark resumeLabel
// ar = _a.concat([%sent%, 2]);
const numInitialElements = countInitialNodesWithoutYield(elements);
let temp: Identifier;
if (numInitialElements > 0) {
temp = declareLocal();
const initialElements = visitNodes(elements, visitor, isExpression, 0, numInitialElements);
emitAssignment(temp,
createArrayLiteral(
leadingElement
? [leadingElement, ...initialElements]
: initialElements
)
);
leadingElement = undefined;
}
const expressions = reduceLeft(elements, reduceElement, <Expression[]>[], numInitialElements);
return temp
? createArrayConcat(temp, [createArrayLiteral(expressions, multiLine)])
: setTextRange(
createArrayLiteral(leadingElement ? [leadingElement, ...expressions] : expressions, multiLine),
location
);
function reduceElement(expressions: Expression[], element: Expression) {
if (containsYield(element) && expressions.length > 0) {
const hasAssignedTemp = temp !== undefined;
if (!temp) {
temp = declareLocal();
}
emitAssignment(
temp,
hasAssignedTemp
? createArrayConcat(
temp,
[createArrayLiteral(expressions, multiLine)]
)
: createArrayLiteral(
leadingElement ? [leadingElement, ...expressions] : expressions,
multiLine
)
);
leadingElement = undefined;
expressions = [];
}
expressions.push(visitNode(element, visitor, isExpression));
return expressions;
}
}
function visitObjectLiteralExpression(node: ObjectLiteralExpression) {
// [source]
// o = {
// a: 1,
// b: yield,
// c: 2
// };
//
// [intermediate]
// .local _a
// _a = {
// a: 1
// };
// .yield resumeLabel
// .mark resumeLabel
// o = (_a.b = %sent%,
// _a.c = 2,
// _a);
const properties = node.properties;
const multiLine = node.multiLine;
const numInitialProperties = countInitialNodesWithoutYield(properties);
const temp = declareLocal();
emitAssignment(temp,
createObjectLiteral(
visitNodes(properties, visitor, isObjectLiteralElementLike, 0, numInitialProperties),
multiLine
)
);
const expressions = reduceLeft(properties, reduceProperty, <Expression[]>[], numInitialProperties);
expressions.push(multiLine ? startOnNewLine(getMutableClone(temp)) : temp);
return inlineExpressions(expressions);
function reduceProperty(expressions: Expression[], property: ObjectLiteralElementLike) {
if (containsYield(property) && expressions.length > 0) {
emitStatement(createStatement(inlineExpressions(expressions)));
expressions = [];
}
const expression = createExpressionForObjectLiteralElementLike(node, property, temp);
const visited = visitNode(expression, visitor, isExpression);
if (visited) {
if (multiLine) {
visited.startsOnNewLine = true;
}
expressions.push(visited);
}
return expressions;
}
}
/**
* Visits an ElementAccessExpression that contains a YieldExpression.
*
* @param node The node to visit.
*/
function visitElementAccessExpression(node: ElementAccessExpression) {
if (containsYield(node.argumentExpression)) {
// [source]
// a = x[yield];
//
// [intermediate]
// .local _a
// _a = x;
// .yield resumeLabel
// .mark resumeLabel
// a = _a[%sent%]
const clone = getMutableClone(node);
clone.expression = cacheExpression(visitNode(node.expression, visitor, isLeftHandSideExpression));
clone.argumentExpression = visitNode(node.argumentExpression, visitor, isExpression);
return clone;
}
return visitEachChild(node, visitor, context);
}
function visitCallExpression(node: CallExpression) {
if (forEach(node.arguments, containsYield)) {
// [source]
// a.b(1, yield, 2);
//
// [intermediate]
// .local _a, _b, _c
// _b = (_a = a).b;
// _c = [1];
// .yield resumeLabel
// .mark resumeLabel
// _b.apply(_a, _c.concat([%sent%, 2]));
const { target, thisArg } = createCallBinding(node.expression, hoistVariableDeclaration, languageVersion, /*cacheIdentifiers*/ true);
return setOriginalNode(
createFunctionApply(
cacheExpression(visitNode(target, visitor, isLeftHandSideExpression)),
thisArg,
visitElements(node.arguments),
/*location*/ node
),
node
);
}
return visitEachChild(node, visitor, context);
}
function visitNewExpression(node: NewExpression) {
if (forEach(node.arguments, containsYield)) {
// [source]
// new a.b(1, yield, 2);
//
// [intermediate]
// .local _a, _b, _c
// _b = (_a = a.b).bind;
// _c = [1];
// .yield resumeLabel
// .mark resumeLabel
// new (_b.apply(_a, _c.concat([%sent%, 2])));
const { target, thisArg } = createCallBinding(createPropertyAccess(node.expression, "bind"), hoistVariableDeclaration);
return setOriginalNode(
setTextRange(
createNew(
createFunctionApply(
cacheExpression(visitNode(target, visitor, isExpression)),
thisArg,
visitElements(
node.arguments,
/*leadingElement*/ createVoidZero()
)
),
/*typeArguments*/ undefined,
[]
),
node
),
node
);
}
return visitEachChild(node, visitor, context);
}
function transformAndEmitStatements(statements: ReadonlyArray<Statement>, start = 0) {
const numStatements = statements.length;
for (let i = start; i < numStatements; i++) {
transformAndEmitStatement(statements[i]);
}
}
function transformAndEmitEmbeddedStatement(node: Statement) {
if (isBlock(node)) {
transformAndEmitStatements(node.statements);
}
else {
transformAndEmitStatement(node);
}
}
function transformAndEmitStatement(node: Statement): void {
const savedInStatementContainingYield = inStatementContainingYield;
if (!inStatementContainingYield) {
inStatementContainingYield = containsYield(node);
}
transformAndEmitStatementWorker(node);
inStatementContainingYield = savedInStatementContainingYield;
}
function transformAndEmitStatementWorker(node: Statement): void {
switch (node.kind) {
case SyntaxKind.Block:
return transformAndEmitBlock(<Block>node);
case SyntaxKind.ExpressionStatement:
return transformAndEmitExpressionStatement(<ExpressionStatement>node);
case SyntaxKind.IfStatement:
return transformAndEmitIfStatement(<IfStatement>node);
case SyntaxKind.DoStatement:
return transformAndEmitDoStatement(<DoStatement>node);
case SyntaxKind.WhileStatement:
return transformAndEmitWhileStatement(<WhileStatement>node);
case SyntaxKind.ForStatement:
return transformAndEmitForStatement(<ForStatement>node);
case SyntaxKind.ForInStatement:
return transformAndEmitForInStatement(<ForInStatement>node);
case SyntaxKind.ContinueStatement:
return transformAndEmitContinueStatement(<ContinueStatement>node);
case SyntaxKind.BreakStatement:
return transformAndEmitBreakStatement(<BreakStatement>node);
case SyntaxKind.ReturnStatement:
return transformAndEmitReturnStatement(<ReturnStatement>node);
case SyntaxKind.WithStatement:
return transformAndEmitWithStatement(<WithStatement>node);
case SyntaxKind.SwitchStatement:
return transformAndEmitSwitchStatement(<SwitchStatement>node);
case SyntaxKind.LabeledStatement:
return transformAndEmitLabeledStatement(<LabeledStatement>node);
case SyntaxKind.ThrowStatement:
return transformAndEmitThrowStatement(<ThrowStatement>node);
case SyntaxKind.TryStatement:
return transformAndEmitTryStatement(<TryStatement>node);
default:
return emitStatement(visitNode(node, visitor, isStatement));
}
}
function transformAndEmitBlock(node: Block): void {
if (containsYield(node)) {
transformAndEmitStatements(node.statements);
}
else {
emitStatement(visitNode(node, visitor, isStatement));
}
}
function transformAndEmitExpressionStatement(node: ExpressionStatement) {
emitStatement(visitNode(node, visitor, isStatement));
}
function transformAndEmitVariableDeclarationList(node: VariableDeclarationList): VariableDeclarationList {
for (const variable of node.declarations) {
const name = getSynthesizedClone(<Identifier>variable.name);
setCommentRange(name, variable.name);
hoistVariableDeclaration(name);
}
const variables = getInitializedVariables(node);
const numVariables = variables.length;
let variablesWritten = 0;
let pendingExpressions: Expression[] = [];
while (variablesWritten < numVariables) {
for (let i = variablesWritten; i < numVariables; i++) {
const variable = variables[i];
if (containsYield(variable.initializer) && pendingExpressions.length > 0) {
break;
}
pendingExpressions.push(transformInitializedVariable(variable));
}
if (pendingExpressions.length) {
emitStatement(createStatement(inlineExpressions(pendingExpressions)));
variablesWritten += pendingExpressions.length;
pendingExpressions = [];
}
}
return undefined;
}
function transformInitializedVariable(node: VariableDeclaration) {
return setSourceMapRange(
createAssignment(
setSourceMapRange(<Identifier>getSynthesizedClone(node.name), node.name),
visitNode(node.initializer, visitor, isExpression)
),
node
);
}
function transformAndEmitIfStatement(node: IfStatement) {
if (containsYield(node)) {
// [source]
// if (x)
// /*thenStatement*/
// else
// /*elseStatement*/
//
// [intermediate]
// .brfalse elseLabel, (x)
// /*thenStatement*/
// .br endLabel
// .mark elseLabel
// /*elseStatement*/
// .mark endLabel
if (containsYield(node.thenStatement) || containsYield(node.elseStatement)) {
const endLabel = defineLabel();
const elseLabel = node.elseStatement ? defineLabel() : undefined;
emitBreakWhenFalse(node.elseStatement ? elseLabel : endLabel, visitNode(node.expression, visitor, isExpression), /*location*/ node.expression);
transformAndEmitEmbeddedStatement(node.thenStatement);
if (node.elseStatement) {
emitBreak(endLabel);
markLabel(elseLabel);
transformAndEmitEmbeddedStatement(node.elseStatement);
}
markLabel(endLabel);
}
else {
emitStatement(visitNode(node, visitor, isStatement));
}
}
else {
emitStatement(visitNode(node, visitor, isStatement));
}
}
function transformAndEmitDoStatement(node: DoStatement) {
if (containsYield(node)) {
// [source]
// do {
// /*body*/
// }
// while (i < 10);
//
// [intermediate]
// .loop conditionLabel, endLabel
// .mark loopLabel
// /*body*/
// .mark conditionLabel
// .brtrue loopLabel, (i < 10)
// .endloop
// .mark endLabel
const conditionLabel = defineLabel();
const loopLabel = defineLabel();
beginLoopBlock(/*continueLabel*/ conditionLabel);
markLabel(loopLabel);
transformAndEmitEmbeddedStatement(node.statement);
markLabel(conditionLabel);
emitBreakWhenTrue(loopLabel, visitNode(node.expression, visitor, isExpression));
endLoopBlock();
}
else {
emitStatement(visitNode(node, visitor, isStatement));
}
}
function visitDoStatement(node: DoStatement) {
if (inStatementContainingYield) {
beginScriptLoopBlock();
node = visitEachChild(node, visitor, context);
endLoopBlock();
return node;
}
else {
return visitEachChild(node, visitor, context);
}
}
function transformAndEmitWhileStatement(node: WhileStatement) {
if (containsYield(node)) {
// [source]
// while (i < 10) {
// /*body*/
// }
//
// [intermediate]
// .loop loopLabel, endLabel
// .mark loopLabel
// .brfalse endLabel, (i < 10)
// /*body*/
// .br loopLabel
// .endloop
// .mark endLabel
const loopLabel = defineLabel();
const endLabel = beginLoopBlock(loopLabel);
markLabel(loopLabel);
emitBreakWhenFalse(endLabel, visitNode(node.expression, visitor, isExpression));
transformAndEmitEmbeddedStatement(node.statement);
emitBreak(loopLabel);
endLoopBlock();
}
else {
emitStatement(visitNode(node, visitor, isStatement));
}
}
function visitWhileStatement(node: WhileStatement) {
if (inStatementContainingYield) {
beginScriptLoopBlock();
node = visitEachChild(node, visitor, context);
endLoopBlock();
return node;
}
else {
return visitEachChild(node, visitor, context);
}
}
function transformAndEmitForStatement(node: ForStatement) {
if (containsYield(node)) {
// [source]
// for (var i = 0; i < 10; i++) {
// /*body*/
// }
//
// [intermediate]
// .local i
// i = 0;
// .loop incrementLabel, endLoopLabel
// .mark conditionLabel
// .brfalse endLoopLabel, (i < 10)
// /*body*/
// .mark incrementLabel
// i++;
// .br conditionLabel
// .endloop
// .mark endLoopLabel
const conditionLabel = defineLabel();
const incrementLabel = defineLabel();
const endLabel = beginLoopBlock(incrementLabel);
if (node.initializer) {
const initializer = node.initializer;
if (isVariableDeclarationList(initializer)) {
transformAndEmitVariableDeclarationList(initializer);
}
else {
emitStatement(
setTextRange(
createStatement(
visitNode(initializer, visitor, isExpression)
),
initializer
)
);
}
}
markLabel(conditionLabel);
if (node.condition) {
emitBreakWhenFalse(endLabel, visitNode(node.condition, visitor, isExpression));
}
transformAndEmitEmbeddedStatement(node.statement);
markLabel(incrementLabel);
if (node.incrementor) {
emitStatement(
setTextRange(
createStatement(
visitNode(node.incrementor, visitor, isExpression)
),
node.incrementor
)
);
}
emitBreak(conditionLabel);
endLoopBlock();
}
else {
emitStatement(visitNode(node, visitor, isStatement));
}
}
function visitForStatement(node: ForStatement) {
if (inStatementContainingYield) {
beginScriptLoopBlock();
}
const initializer = node.initializer;
if (initializer && isVariableDeclarationList(initializer)) {
for (const variable of initializer.declarations) {
hoistVariableDeclaration(<Identifier>variable.name);
}
const variables = getInitializedVariables(initializer);
node = updateFor(node,
variables.length > 0
? inlineExpressions(map(variables, transformInitializedVariable))
: undefined,
visitNode(node.condition, visitor, isExpression),
visitNode(node.incrementor, visitor, isExpression),
visitNode(node.statement, visitor, isStatement, liftToBlock)
);
}
else {
node = visitEachChild(node, visitor, context);
}
if (inStatementContainingYield) {
endLoopBlock();
}
return node;
}
function transformAndEmitForInStatement(node: ForInStatement) {
// TODO(rbuckton): Source map locations
if (containsYield(node)) {
// [source]
// for (var p in o) {
// /*body*/
// }
//
// [intermediate]
// .local _a, _b, _i
// _a = [];
// for (_b in o) _a.push(_b);
// _i = 0;
// .loop incrementLabel, endLoopLabel
// .mark conditionLabel
// .brfalse endLoopLabel, (_i < _a.length)
// p = _a[_i];
// /*body*/
// .mark incrementLabel
// _b++;
// .br conditionLabel
// .endloop
// .mark endLoopLabel
const keysArray = declareLocal(); // _a
const key = declareLocal(); // _b
const keysIndex = createLoopVariable(); // _i
const initializer = node.initializer;
hoistVariableDeclaration(keysIndex);
emitAssignment(keysArray, createArrayLiteral());
emitStatement(
createForIn(
key,
visitNode(node.expression, visitor, isExpression),
createStatement(
createCall(
createPropertyAccess(keysArray, "push"),
/*typeArguments*/ undefined,
[key]
)
)
)
);
emitAssignment(keysIndex, createLiteral(0));
const conditionLabel = defineLabel();
const incrementLabel = defineLabel();
const endLabel = beginLoopBlock(incrementLabel);
markLabel(conditionLabel);
emitBreakWhenFalse(endLabel, createLessThan(keysIndex, createPropertyAccess(keysArray, "length")));
let variable: Expression;
if (isVariableDeclarationList(initializer)) {
for (const variable of initializer.declarations) {
hoistVariableDeclaration(<Identifier>variable.name);
}
variable = <Identifier>getSynthesizedClone(initializer.declarations[0].name);
}
else {
variable = visitNode(initializer, visitor, isExpression);
Debug.assert(isLeftHandSideExpression(variable));
}
emitAssignment(variable, createElementAccess(keysArray, keysIndex));
transformAndEmitEmbeddedStatement(node.statement);
markLabel(incrementLabel);
emitStatement(createStatement(createPostfixIncrement(keysIndex)));
emitBreak(conditionLabel);
endLoopBlock();
}
else {
emitStatement(visitNode(node, visitor, isStatement));
}
}
function visitForInStatement(node: ForInStatement) {
// [source]
// for (var x in a) {
// /*body*/
// }
//
// [intermediate]
// .local x
// .loop
// for (x in a) {
// /*body*/
// }
// .endloop
if (inStatementContainingYield) {
beginScriptLoopBlock();
}
const initializer = node.initializer;
if (isVariableDeclarationList(initializer)) {
for (const variable of initializer.declarations) {
hoistVariableDeclaration(<Identifier>variable.name);
}
node = updateForIn(node,
<Identifier>initializer.declarations[0].name,
visitNode(node.expression, visitor, isExpression),
visitNode(node.statement, visitor, isStatement, liftToBlock)
);
}
else {
node = visitEachChild(node, visitor, context);
}
if (inStatementContainingYield) {
endLoopBlock();
}
return node;
}
function transformAndEmitContinueStatement(node: ContinueStatement): void {
const label = findContinueTarget(node.label ? unescapeLeadingUnderscores(node.label.text) : undefined);
Debug.assert(label > 0, "Expected continue statment to point to a valid Label.");
emitBreak(label, /*location*/ node);
}
function visitContinueStatement(node: ContinueStatement): Statement {
if (inStatementContainingYield) {
const label = findContinueTarget(node.label && unescapeLeadingUnderscores(node.label.text));
if (label > 0) {
return createInlineBreak(label, /*location*/ node);
}
}
return visitEachChild(node, visitor, context);
}
function transformAndEmitBreakStatement(node: BreakStatement): void {
const label = findBreakTarget(node.label ? unescapeLeadingUnderscores(node.label.text) : undefined);
Debug.assert(label > 0, "Expected break statment to point to a valid Label.");
emitBreak(label, /*location*/ node);
}
function visitBreakStatement(node: BreakStatement): Statement {
if (inStatementContainingYield) {
const label = findBreakTarget(node.label && unescapeLeadingUnderscores(node.label.text));
if (label > 0) {
return createInlineBreak(label, /*location*/ node);
}
}
return visitEachChild(node, visitor, context);
}
function transformAndEmitReturnStatement(node: ReturnStatement): void {
emitReturn(
visitNode(node.expression, visitor, isExpression),
/*location*/ node
);
}
function visitReturnStatement(node: ReturnStatement) {
return createInlineReturn(
visitNode(node.expression, visitor, isExpression),
/*location*/ node
);
}
function transformAndEmitWithStatement(node: WithStatement) {
if (containsYield(node)) {
// [source]
// with (x) {
// /*body*/
// }
//
// [intermediate]
// .with (x)
// /*body*/
// .endwith
beginWithBlock(cacheExpression(visitNode(node.expression, visitor, isExpression)));
transformAndEmitEmbeddedStatement(node.statement);
endWithBlock();
}
else {
emitStatement(visitNode(node, visitor, isStatement));
}
}
function transformAndEmitSwitchStatement(node: SwitchStatement) {
if (containsYield(node.caseBlock)) {
// [source]
// switch (x) {
// case a:
// /*caseStatements*/
// case b:
// /*caseStatements*/
// default:
// /*defaultStatements*/
// }
//
// [intermediate]
// .local _a
// .switch endLabel
// _a = x;
// switch (_a) {
// case a:
// .br clauseLabels[0]
// }
// switch (_a) {
// case b:
// .br clauseLabels[1]
// }
// .br clauseLabels[2]
// .mark clauseLabels[0]
// /*caseStatements*/
// .mark clauseLabels[1]
// /*caseStatements*/
// .mark clauseLabels[2]
// /*caseStatements*/
// .endswitch
// .mark endLabel
const caseBlock = node.caseBlock;
const numClauses = caseBlock.clauses.length;
const endLabel = beginSwitchBlock();
const expression = cacheExpression(visitNode(node.expression, visitor, isExpression));
// Create labels for each clause and find the index of the first default clause.
const clauseLabels: Label[] = [];
let defaultClauseIndex = -1;
for (let i = 0; i < numClauses; i++) {
const clause = caseBlock.clauses[i];
clauseLabels.push(defineLabel());
if (clause.kind === SyntaxKind.DefaultClause && defaultClauseIndex === -1) {
defaultClauseIndex = i;
}
}
// Emit switch statements for each run of case clauses either from the first case
// clause or the next case clause with a `yield` in its expression, up to the next
// case clause with a `yield` in its expression.
let clausesWritten = 0;
let pendingClauses: CaseClause[] = [];
while (clausesWritten < numClauses) {
let defaultClausesSkipped = 0;
for (let i = clausesWritten; i < numClauses; i++) {
const clause = caseBlock.clauses[i];
if (clause.kind === SyntaxKind.CaseClause) {
const caseClause = <CaseClause>clause;
if (containsYield(caseClause.expression) && pendingClauses.length > 0) {
break;
}
pendingClauses.push(
createCaseClause(
visitNode(caseClause.expression, visitor, isExpression),
[
createInlineBreak(clauseLabels[i], /*location*/ caseClause.expression)
]
)
);
}
else {
defaultClausesSkipped++;
}
}
if (pendingClauses.length) {
emitStatement(createSwitch(expression, createCaseBlock(pendingClauses)));
clausesWritten += pendingClauses.length;
pendingClauses = [];
}
if (defaultClausesSkipped > 0) {
clausesWritten += defaultClausesSkipped;
defaultClausesSkipped = 0;
}
}
if (defaultClauseIndex >= 0) {
emitBreak(clauseLabels[defaultClauseIndex]);
}
else {
emitBreak(endLabel);
}
for (let i = 0; i < numClauses; i++) {
markLabel(clauseLabels[i]);
transformAndEmitStatements(caseBlock.clauses[i].statements);
}
endSwitchBlock();
}
else {
emitStatement(visitNode(node, visitor, isStatement));
}
}
function visitSwitchStatement(node: SwitchStatement) {
if (inStatementContainingYield) {
beginScriptSwitchBlock();
}
node = visitEachChild(node, visitor, context);
if (inStatementContainingYield) {
endSwitchBlock();
}
return node;
}
function transformAndEmitLabeledStatement(node: LabeledStatement) {
if (containsYield(node)) {
// [source]
// x: {
// /*body*/
// }
//
// [intermediate]
// .labeled "x", endLabel
// /*body*/
// .endlabeled
// .mark endLabel
beginLabeledBlock(unescapeLeadingUnderscores(node.label.text));
transformAndEmitEmbeddedStatement(node.statement);
endLabeledBlock();
}
else {
emitStatement(visitNode(node, visitor, isStatement));
}
}
function visitLabeledStatement(node: LabeledStatement) {
if (inStatementContainingYield) {
beginScriptLabeledBlock(unescapeLeadingUnderscores(node.label.text));
}
node = visitEachChild(node, visitor, context);
if (inStatementContainingYield) {
endLabeledBlock();
}
return node;
}
function transformAndEmitThrowStatement(node: ThrowStatement): void {
emitThrow(
visitNode(node.expression, visitor, isExpression),
/*location*/ node
);
}
function transformAndEmitTryStatement(node: TryStatement) {
if (containsYield(node)) {
// [source]
// try {
// /*tryBlock*/
// }
// catch (e) {
// /*catchBlock*/
// }
// finally {
// /*finallyBlock*/
// }
//
// [intermediate]
// .local _a
// .try tryLabel, catchLabel, finallyLabel, endLabel
// .mark tryLabel
// .nop
// /*tryBlock*/
// .br endLabel
// .catch
// .mark catchLabel
// _a = %error%;
// /*catchBlock*/
// .br endLabel
// .finally
// .mark finallyLabel
// /*finallyBlock*/
// .endfinally
// .endtry
// .mark endLabel
beginExceptionBlock();
transformAndEmitEmbeddedStatement(node.tryBlock);
if (node.catchClause) {
beginCatchBlock(node.catchClause.variableDeclaration);
transformAndEmitEmbeddedStatement(node.catchClause.block);
}
if (node.finallyBlock) {
beginFinallyBlock();
transformAndEmitEmbeddedStatement(node.finallyBlock);
}
endExceptionBlock();
}
else {
emitStatement(visitEachChild(node, visitor, context));
}
}
function containsYield(node: Node) {
return node && (node.transformFlags & TransformFlags.ContainsYield) !== 0;
}
function countInitialNodesWithoutYield(nodes: NodeArray<Node>) {
const numNodes = nodes.length;
for (let i = 0; i < numNodes; i++) {
if (containsYield(nodes[i])) {
return i;
}
}
return -1;
}
function onSubstituteNode(hint: EmitHint, node: Node): Node {
node = previousOnSubstituteNode(hint, node);
if (hint === EmitHint.Expression) {
return substituteExpression(<Expression>node);
}
return node;
}
function substituteExpression(node: Expression): Expression {
if (isIdentifier(node)) {
return substituteExpressionIdentifier(node);
}
return node;
}
function substituteExpressionIdentifier(node: Identifier) {
if (!isGeneratedIdentifier(node) && renamedCatchVariables && renamedCatchVariables.has(unescapeLeadingUnderscores(node.text))) {
const original = getOriginalNode(node);
if (isIdentifier(original) && original.parent) {
const declaration = resolver.getReferencedValueDeclaration(original);
if (declaration) {
const name = renamedCatchVariableDeclarations[getOriginalNodeId(declaration)];
if (name) {
const clone = getMutableClone(name);
setSourceMapRange(clone, node);
setCommentRange(clone, node);
return clone;
}
}
}
}
return node;
}
function cacheExpression(node: Expression): Identifier {
let temp: Identifier;
if (isGeneratedIdentifier(node) || getEmitFlags(node) & EmitFlags.HelperName) {
return <Identifier>node;
}
temp = createTempVariable(hoistVariableDeclaration);
emitAssignment(temp, node, /*location*/ node);
return temp;
}
function declareLocal(name?: string): Identifier {
const temp = name
? createUniqueName(name)
: createTempVariable(/*recordTempVariable*/ undefined);
hoistVariableDeclaration(temp);
return temp;
}
/**
* Defines a label, uses as the target of a Break operation.
*/
function defineLabel(): Label {
if (!labelOffsets) {
labelOffsets = [];
}
const label = nextLabelId;
nextLabelId++;
labelOffsets[label] = -1;
return label;
}
/**
* Marks the current operation with the specified label.
*/
function markLabel(label: Label): void {
Debug.assert(labelOffsets !== undefined, "No labels were defined.");
labelOffsets[label] = operations ? operations.length : 0;
}
/**
* Begins a block operation (With, Break/Continue, Try/Catch/Finally)
*
* @param block Information about the block.
*/
function beginBlock(block: CodeBlock): number {
if (!blocks) {
blocks = [];
blockActions = [];
blockOffsets = [];
blockStack = [];
}
const index = blockActions.length;
blockActions[index] = BlockAction.Open;
blockOffsets[index] = operations ? operations.length : 0;
blocks[index] = block;
blockStack.push(block);
return index;
}
/**
* Ends the current block operation.
*/
function endBlock(): CodeBlock {
const block = peekBlock();
Debug.assert(block !== undefined, "beginBlock was never called.");
const index = blockActions.length;
blockActions[index] = BlockAction.Close;
blockOffsets[index] = operations ? operations.length : 0;
blocks[index] = block;
blockStack.pop();
return block;
}
/**
* Gets the current open block.
*/
function peekBlock() {
return lastOrUndefined(blockStack);
}
/**
* Gets the kind of the current open block.
*/
function peekBlockKind(): CodeBlockKind {
const block = peekBlock();
return block && block.kind;
}
/**
* Begins a code block for a generated `with` statement.
*
* @param expression An identifier representing expression for the `with` block.
*/
function beginWithBlock(expression: Identifier): void {
const startLabel = defineLabel();
const endLabel = defineLabel();
markLabel(startLabel);
beginBlock(<WithBlock>{
kind: CodeBlockKind.With,
expression,
startLabel,
endLabel
});
}
/**
* Ends a code block for a generated `with` statement.
*/
function endWithBlock(): void {
Debug.assert(peekBlockKind() === CodeBlockKind.With);
const block = <WithBlock>endBlock();
markLabel(block.endLabel);
}
function isWithBlock(block: CodeBlock): block is WithBlock {
return block.kind === CodeBlockKind.With;
}
/**
* Begins a code block for a generated `try` statement.
*/
function beginExceptionBlock(): Label {
const startLabel = defineLabel();
const endLabel = defineLabel();
markLabel(startLabel);
beginBlock(<ExceptionBlock>{
kind: CodeBlockKind.Exception,
state: ExceptionBlockState.Try,
startLabel,
endLabel
});
emitNop();
return endLabel;
}
/**
* Enters the `catch` clause of a generated `try` statement.
*
* @param variable The catch variable.
*/
function beginCatchBlock(variable: VariableDeclaration): void {
Debug.assert(peekBlockKind() === CodeBlockKind.Exception);
// generated identifiers should already be unique within a file
let name: Identifier;
if (isGeneratedIdentifier(variable.name)) {
name = variable.name;
hoistVariableDeclaration(variable.name);
}
else {
const text = unescapeLeadingUnderscores((<Identifier>variable.name).text);
name = declareLocal(text);
if (!renamedCatchVariables) {
renamedCatchVariables = createMap<boolean>();
renamedCatchVariableDeclarations = [];
context.enableSubstitution(SyntaxKind.Identifier);
}
renamedCatchVariables.set(text, true);
renamedCatchVariableDeclarations[getOriginalNodeId(variable)] = name;
}
const exception = <ExceptionBlock>peekBlock();
Debug.assert(exception.state < ExceptionBlockState.Catch);
const endLabel = exception.endLabel;
emitBreak(endLabel);
const catchLabel = defineLabel();
markLabel(catchLabel);
exception.state = ExceptionBlockState.Catch;
exception.catchVariable = name;
exception.catchLabel = catchLabel;
emitAssignment(name, createCall(createPropertyAccess(state, "sent"), /*typeArguments*/ undefined, []));
emitNop();
}
/**
* Enters the `finally` block of a generated `try` statement.
*/
function beginFinallyBlock(): void {
Debug.assert(peekBlockKind() === CodeBlockKind.Exception);
const exception = <ExceptionBlock>peekBlock();
Debug.assert(exception.state < ExceptionBlockState.Finally);
const endLabel = exception.endLabel;
emitBreak(endLabel);
const finallyLabel = defineLabel();
markLabel(finallyLabel);
exception.state = ExceptionBlockState.Finally;
exception.finallyLabel = finallyLabel;
}
/**
* Ends the code block for a generated `try` statement.
*/
function endExceptionBlock(): void {
Debug.assert(peekBlockKind() === CodeBlockKind.Exception);
const exception = <ExceptionBlock>endBlock();
const state = exception.state;
if (state < ExceptionBlockState.Finally) {
emitBreak(exception.endLabel);
}
else {
emitEndfinally();
}
markLabel(exception.endLabel);
emitNop();
exception.state = ExceptionBlockState.Done;
}
function isExceptionBlock(block: CodeBlock): block is ExceptionBlock {
return block.kind === CodeBlockKind.Exception;
}
/**
* Begins a code block that supports `break` or `continue` statements that are defined in
* the source tree and not from generated code.
*
* @param labelText Names from containing labeled statements.
*/
function beginScriptLoopBlock(): void {
beginBlock(<LoopBlock>{
kind: CodeBlockKind.Loop,
isScript: true,
breakLabel: -1,
continueLabel: -1
});
}
/**
* Begins a code block that supports `break` or `continue` statements that are defined in
* generated code. Returns a label used to mark the operation to which to jump when a
* `break` statement targets this block.
*
* @param continueLabel A Label used to mark the operation to which to jump when a
* `continue` statement targets this block.
*/
function beginLoopBlock(continueLabel: Label): Label {
const breakLabel = defineLabel();
beginBlock(<LoopBlock>{
kind: CodeBlockKind.Loop,
isScript: false,
breakLabel,
continueLabel,
});
return breakLabel;
}
/**
* Ends a code block that supports `break` or `continue` statements that are defined in
* generated code or in the source tree.
*/
function endLoopBlock(): void {
Debug.assert(peekBlockKind() === CodeBlockKind.Loop);
const block = <SwitchBlock>endBlock();
const breakLabel = block.breakLabel;
if (!block.isScript) {
markLabel(breakLabel);
}
}
/**
* Begins a code block that supports `break` statements that are defined in the source
* tree and not from generated code.
*
*/
function beginScriptSwitchBlock(): void {
beginBlock(<SwitchBlock>{
kind: CodeBlockKind.Switch,
isScript: true,
breakLabel: -1
});
}
/**
* Begins a code block that supports `break` statements that are defined in generated code.
* Returns a label used to mark the operation to which to jump when a `break` statement
* targets this block.
*/
function beginSwitchBlock(): Label {
const breakLabel = defineLabel();
beginBlock(<SwitchBlock>{
kind: CodeBlockKind.Switch,
isScript: false,
breakLabel,
});
return breakLabel;
}
/**
* Ends a code block that supports `break` statements that are defined in generated code.
*/
function endSwitchBlock(): void {
Debug.assert(peekBlockKind() === CodeBlockKind.Switch);
const block = <SwitchBlock>endBlock();
const breakLabel = block.breakLabel;
if (!block.isScript) {
markLabel(breakLabel);
}
}
function beginScriptLabeledBlock(labelText: string) {
beginBlock(<LabeledBlock>{
kind: CodeBlockKind.Labeled,
isScript: true,
labelText,
breakLabel: -1
});
}
function beginLabeledBlock(labelText: string) {
const breakLabel = defineLabel();
beginBlock(<LabeledBlock>{
kind: CodeBlockKind.Labeled,
isScript: false,
labelText,
breakLabel
});
}
function endLabeledBlock() {
Debug.assert(peekBlockKind() === CodeBlockKind.Labeled);
const block = <LabeledBlock>endBlock();
if (!block.isScript) {
markLabel(block.breakLabel);
}
}
/**
* Indicates whether the provided block supports `break` statements.
*
* @param block A code block.
*/
function supportsUnlabeledBreak(block: CodeBlock): block is SwitchBlock | LoopBlock {
return block.kind === CodeBlockKind.Switch
|| block.kind === CodeBlockKind.Loop;
}
/**
* Indicates whether the provided block supports `break` statements with labels.
*
* @param block A code block.
*/
function supportsLabeledBreakOrContinue(block: CodeBlock): block is LabeledBlock {
return block.kind === CodeBlockKind.Labeled;
}
/**
* Indicates whether the provided block supports `continue` statements.
*
* @param block A code block.
*/
function supportsUnlabeledContinue(block: CodeBlock): block is LoopBlock {
return block.kind === CodeBlockKind.Loop;
}
function hasImmediateContainingLabeledBlock(labelText: string, start: number) {
for (let j = start; j >= 0; j--) {
const containingBlock = blockStack[j];
if (supportsLabeledBreakOrContinue(containingBlock)) {
if (containingBlock.labelText === labelText) {
return true;
}
}
else {
break;
}
}
return false;
}
/**
* Finds the label that is the target for a `break` statement.
*
* @param labelText An optional name of a containing labeled statement.
*/
function findBreakTarget(labelText?: string): Label {
Debug.assert(blocks !== undefined);
if (labelText) {
for (let i = blockStack.length - 1; i >= 0; i--) {
const block = blockStack[i];
if (supportsLabeledBreakOrContinue(block) && block.labelText === labelText) {
return block.breakLabel;
}
else if (supportsUnlabeledBreak(block) && hasImmediateContainingLabeledBlock(labelText, i - 1)) {
return block.breakLabel;
}
}
}
else {
for (let i = blockStack.length - 1; i >= 0; i--) {
const block = blockStack[i];
if (supportsUnlabeledBreak(block)) {
return block.breakLabel;
}
}
}
return 0;
}
/**
* Finds the label that is the target for a `continue` statement.
*
* @param labelText An optional name of a containing labeled statement.
*/
function findContinueTarget(labelText?: string): Label {
Debug.assert(blocks !== undefined);
if (labelText) {
for (let i = blockStack.length - 1; i >= 0; i--) {
const block = blockStack[i];
if (supportsUnlabeledContinue(block) && hasImmediateContainingLabeledBlock(labelText, i - 1)) {
return block.continueLabel;
}
}
}
else {
for (let i = blockStack.length - 1; i >= 0; i--) {
const block = blockStack[i];
if (supportsUnlabeledContinue(block)) {
return block.continueLabel;
}
}
}
return 0;
}
/**
* Creates an expression that can be used to indicate the value for a label.
*
* @param label A label.
*/
function createLabel(label: Label): Expression {
if (label > 0) {
if (labelExpressions === undefined) {
labelExpressions = [];
}
const expression = createLiteral(-1);
if (labelExpressions[label] === undefined) {
labelExpressions[label] = [expression];
}
else {
labelExpressions[label].push(expression);
}
return expression;
}
return createOmittedExpression();
}
/**
* Creates a numeric literal for the provided instruction.
*/
function createInstruction(instruction: Instruction): NumericLiteral {
const literal = createLiteral(instruction);
addSyntheticTrailingComment(literal, SyntaxKind.MultiLineCommentTrivia, getInstructionName(instruction));
return literal;
}
/**
* Creates a statement that can be used indicate a Break operation to the provided label.
*
* @param label A label.
* @param location An optional source map location for the statement.
*/
function createInlineBreak(label: Label, location?: TextRange): ReturnStatement {
Debug.assert(label > 0, `Invalid label: ${label}`);
return setTextRange(
createReturn(
createArrayLiteral([
createInstruction(Instruction.Break),
createLabel(label)
])
),
location
);
}
/**
* Creates a statement that can be used indicate a Return operation.
*
* @param expression The expression for the return statement.
* @param location An optional source map location for the statement.
*/
function createInlineReturn(expression?: Expression, location?: TextRange): ReturnStatement {
return setTextRange(
createReturn(
createArrayLiteral(expression
? [createInstruction(Instruction.Return), expression]
: [createInstruction(Instruction.Return)]
)
),
location
);
}
/**
* Creates an expression that can be used to resume from a Yield operation.
*/
function createGeneratorResume(location?: TextRange): LeftHandSideExpression {
return setTextRange(
createCall(
createPropertyAccess(state, "sent"),
/*typeArguments*/ undefined,
[]
),
location
);
}
/**
* Emits an empty instruction.
*/
function emitNop() {
emitWorker(OpCode.Nop);
}
/**
* Emits a Statement.
*
* @param node A statement.
*/
function emitStatement(node: Statement): void {
if (node) {
emitWorker(OpCode.Statement, [node]);
}
else {
emitNop();
}
}
/**
* Emits an Assignment operation.
*
* @param left The left-hand side of the assignment.
* @param right The right-hand side of the assignment.
* @param location An optional source map location for the assignment.
*/
function emitAssignment(left: Expression, right: Expression, location?: TextRange): void {
emitWorker(OpCode.Assign, [left, right], location);
}
/**
* Emits a Break operation to the specified label.
*
* @param label A label.
* @param location An optional source map location for the assignment.
*/
function emitBreak(label: Label, location?: TextRange): void {
emitWorker(OpCode.Break, [label], location);
}
/**
* Emits a Break operation to the specified label when a condition evaluates to a truthy
* value at runtime.
*
* @param label A label.
* @param condition The condition.
* @param location An optional source map location for the assignment.
*/
function emitBreakWhenTrue(label: Label, condition: Expression, location?: TextRange): void {
emitWorker(OpCode.BreakWhenTrue, [label, condition], location);
}
/**
* Emits a Break to the specified label when a condition evaluates to a falsey value at
* runtime.
*
* @param label A label.
* @param condition The condition.
* @param location An optional source map location for the assignment.
*/
function emitBreakWhenFalse(label: Label, condition: Expression, location?: TextRange): void {
emitWorker(OpCode.BreakWhenFalse, [label, condition], location);
}
/**
* Emits a YieldStar operation for the provided expression.
*
* @param expression An optional value for the yield operation.
* @param location An optional source map location for the assignment.
*/
function emitYieldStar(expression?: Expression, location?: TextRange): void {
emitWorker(OpCode.YieldStar, [expression], location);
}
/**
* Emits a Yield operation for the provided expression.
*
* @param expression An optional value for the yield operation.
* @param location An optional source map location for the assignment.
*/
function emitYield(expression?: Expression, location?: TextRange): void {
emitWorker(OpCode.Yield, [expression], location);
}
/**
* Emits a Return operation for the provided expression.
*
* @param expression An optional value for the operation.
* @param location An optional source map location for the assignment.
*/
function emitReturn(expression?: Expression, location?: TextRange): void {
emitWorker(OpCode.Return, [expression], location);
}
/**
* Emits a Throw operation for the provided expression.
*
* @param expression A value for the operation.
* @param location An optional source map location for the assignment.
*/
function emitThrow(expression: Expression, location?: TextRange): void {
emitWorker(OpCode.Throw, [expression], location);
}
/**
* Emits an Endfinally operation. This is used to handle `finally` block semantics.
*/
function emitEndfinally(): void {
emitWorker(OpCode.Endfinally);
}
/**
* Emits an operation.
*
* @param code The OpCode for the operation.
* @param args The optional arguments for the operation.
*/
function emitWorker(code: OpCode, args?: OperationArguments, location?: TextRange): void {
if (operations === undefined) {
operations = [];
operationArguments = [];
operationLocations = [];
}
if (labelOffsets === undefined) {
// mark entry point
markLabel(defineLabel());
}
const operationIndex = operations.length;
operations[operationIndex] = code;
operationArguments[operationIndex] = args;
operationLocations[operationIndex] = location;
}
/**
* Builds the generator function body.
*/
function build() {
blockIndex = 0;
labelNumber = 0;
labelNumbers = undefined;
lastOperationWasAbrupt = false;
lastOperationWasCompletion = false;
clauses = undefined;
statements = undefined;
exceptionBlockStack = undefined;
currentExceptionBlock = undefined;
withBlockStack = undefined;
const buildResult = buildStatements();
return createGeneratorHelper(
context,
setEmitFlags(
createFunctionExpression(
/*modifiers*/ undefined,
/*asteriskToken*/ undefined,
/*name*/ undefined,
/*typeParameters*/ undefined,
[createParameter(/*decorators*/ undefined, /*modifiers*/ undefined, /*dotDotDotToken*/ undefined, state)],
/*type*/ undefined,
createBlock(
buildResult,
/*multiLine*/ buildResult.length > 0
)
),
EmitFlags.ReuseTempVariableScope
)
);
}
/**
* Builds the statements for the generator function body.
*/
function buildStatements(): Statement[] {
if (operations) {
for (let operationIndex = 0; operationIndex < operations.length; operationIndex++) {
writeOperation(operationIndex);
}
flushFinalLabel(operations.length);
}
else {
flushFinalLabel(0);
}
if (clauses) {
const labelExpression = createPropertyAccess(state, "label");
const switchStatement = createSwitch(labelExpression, createCaseBlock(clauses));
switchStatement.startsOnNewLine = true;
return [switchStatement];
}
if (statements) {
return statements;
}
return [];
}
/**
* Flush the current label and advance to a new label.
*/
function flushLabel(): void {
if (!statements) {
return;
}
appendLabel(/*markLabelEnd*/ !lastOperationWasAbrupt);
lastOperationWasAbrupt = false;
lastOperationWasCompletion = false;
labelNumber++;
}
/**
* Flush the final label of the generator function body.
*/
function flushFinalLabel(operationIndex: number): void {
if (isFinalLabelReachable(operationIndex)) {
tryEnterLabel(operationIndex);
withBlockStack = undefined;
writeReturn(/*expression*/ undefined, /*operationLocation*/ undefined);
}
if (statements && clauses) {
appendLabel(/*markLabelEnd*/ false);
}
updateLabelExpressions();
}
/**
* Tests whether the final label of the generator function body
* is reachable by user code.
*/
function isFinalLabelReachable(operationIndex: number) {
// if the last operation was *not* a completion (return/throw) then
// the final label is reachable.
if (!lastOperationWasCompletion) {
return true;
}
// if there are no labels defined or referenced, then the final label is
// not reachable.
if (!labelOffsets || !labelExpressions) {
return false;
}
// if the label for this offset is referenced, then the final label
// is reachable.
for (let label = 0; label < labelOffsets.length; label++) {
if (labelOffsets[label] === operationIndex && labelExpressions[label]) {
return true;
}
}
return false;
}
/**
* Appends a case clause for the last label and sets the new label.
*
* @param markLabelEnd Indicates that the transition between labels was a fall-through
* from a previous case clause and the change in labels should be
* reflected on the `state` object.
*/
function appendLabel(markLabelEnd: boolean): void {
if (!clauses) {
clauses = [];
}
if (statements) {
if (withBlockStack) {
// The previous label was nested inside one or more `with` blocks, so we
// surround the statements in generated `with` blocks to create the same environment.
for (let i = withBlockStack.length - 1; i >= 0; i--) {
const withBlock = withBlockStack[i];
statements = [createWith(withBlock.expression, createBlock(statements))];
}
}
if (currentExceptionBlock) {
// The previous label was nested inside of an exception block, so we must
// indicate entry into a protected region by pushing the label numbers
// for each block in the protected region.
const { startLabel, catchLabel, finallyLabel, endLabel } = currentExceptionBlock;
statements.unshift(
createStatement(
createCall(
createPropertyAccess(createPropertyAccess(state, "trys"), "push"),
/*typeArguments*/ undefined,
[
createArrayLiteral([
createLabel(startLabel),
createLabel(catchLabel),
createLabel(finallyLabel),
createLabel(endLabel)
])
]
)
)
);
currentExceptionBlock = undefined;
}
if (markLabelEnd) {
// The case clause for the last label falls through to this label, so we
// add an assignment statement to reflect the change in labels.
statements.push(
createStatement(
createAssignment(
createPropertyAccess(state, "label"),
createLiteral(labelNumber + 1)
)
)
);
}
}
clauses.push(
createCaseClause(
createLiteral(labelNumber),
statements || []
)
);
statements = undefined;
}
/**
* Tries to enter into a new label at the current operation index.
*/
function tryEnterLabel(operationIndex: number): void {
if (!labelOffsets) {
return;
}
for (let label = 0; label < labelOffsets.length; label++) {
if (labelOffsets[label] === operationIndex) {
flushLabel();
if (labelNumbers === undefined) {
labelNumbers = [];
}
if (labelNumbers[labelNumber] === undefined) {
labelNumbers[labelNumber] = [label];
}
else {
labelNumbers[labelNumber].push(label);
}
}
}
}
/**
* Updates literal expressions for labels with actual label numbers.
*/
function updateLabelExpressions() {
if (labelExpressions !== undefined && labelNumbers !== undefined) {
for (let labelNumber = 0; labelNumber < labelNumbers.length; labelNumber++) {
const labels = labelNumbers[labelNumber];
if (labels !== undefined) {
for (const label of labels) {
const expressions = labelExpressions[label];
if (expressions !== undefined) {
for (const expression of expressions) {
expression.text = String(labelNumber);
}
}
}
}
}
}
}
/**
* Tries to enter or leave a code block.
*/
function tryEnterOrLeaveBlock(operationIndex: number): void {
if (blocks) {
for (; blockIndex < blockActions.length && blockOffsets[blockIndex] <= operationIndex; blockIndex++) {
const block = blocks[blockIndex];
const blockAction = blockActions[blockIndex];
if (isExceptionBlock(block)) {
if (blockAction === BlockAction.Open) {
if (!exceptionBlockStack) {
exceptionBlockStack = [];
}
if (!statements) {
statements = [];
}
exceptionBlockStack.push(currentExceptionBlock);
currentExceptionBlock = block;
}
else if (blockAction === BlockAction.Close) {
currentExceptionBlock = exceptionBlockStack.pop();
}
}
else if (isWithBlock(block)) {
if (blockAction === BlockAction.Open) {
if (!withBlockStack) {
withBlockStack = [];
}
withBlockStack.push(block);
}
else if (blockAction === BlockAction.Close) {
withBlockStack.pop();
}
}
}
}
}
/**
* Writes an operation as a statement to the current label's statement list.
*
* @param operation The OpCode of the operation
*/
function writeOperation(operationIndex: number): void {
tryEnterLabel(operationIndex);
tryEnterOrLeaveBlock(operationIndex);
// early termination, nothing else to process in this label
if (lastOperationWasAbrupt) {
return;
}
lastOperationWasAbrupt = false;
lastOperationWasCompletion = false;
const opcode = operations[operationIndex];
if (opcode === OpCode.Nop) {
return;
}
else if (opcode === OpCode.Endfinally) {
return writeEndfinally();
}
const args = operationArguments[operationIndex];
if (opcode === OpCode.Statement) {
return writeStatement(<Statement>args[0]);
}
const location = operationLocations[operationIndex];
switch (opcode) {
case OpCode.Assign:
return writeAssign(<Expression>args[0], <Expression>args[1], location);
case OpCode.Break:
return writeBreak(<Label>args[0], location);
case OpCode.BreakWhenTrue:
return writeBreakWhenTrue(<Label>args[0], <Expression>args[1], location);
case OpCode.BreakWhenFalse:
return writeBreakWhenFalse(<Label>args[0], <Expression>args[1], location);
case OpCode.Yield:
return writeYield(<Expression>args[0], location);
case OpCode.YieldStar:
return writeYieldStar(<Expression>args[0], location);
case OpCode.Return:
return writeReturn(<Expression>args[0], location);
case OpCode.Throw:
return writeThrow(<Expression>args[0], location);
}
}
/**
* Writes a statement to the current label's statement list.
*
* @param statement A statement to write.
*/
function writeStatement(statement: Statement): void {
if (statement) {
if (!statements) {
statements = [statement];
}
else {
statements.push(statement);
}
}
}
/**
* Writes an Assign operation to the current label's statement list.
*
* @param left The left-hand side of the assignment.
* @param right The right-hand side of the assignment.
* @param operationLocation The source map location for the operation.
*/
function writeAssign(left: Expression, right: Expression, operationLocation: TextRange): void {
writeStatement(setTextRange(createStatement(createAssignment(left, right)), operationLocation));
}
/**
* Writes a Throw operation to the current label's statement list.
*
* @param expression The value to throw.
* @param operationLocation The source map location for the operation.
*/
function writeThrow(expression: Expression, operationLocation: TextRange): void {
lastOperationWasAbrupt = true;
lastOperationWasCompletion = true;
writeStatement(setTextRange(createThrow(expression), operationLocation));
}
/**
* Writes a Return operation to the current label's statement list.
*
* @param expression The value to return.
* @param operationLocation The source map location for the operation.
*/
function writeReturn(expression: Expression, operationLocation: TextRange): void {
lastOperationWasAbrupt = true;
lastOperationWasCompletion = true;
writeStatement(
setEmitFlags(
setTextRange(
createReturn(
createArrayLiteral(expression
? [createInstruction(Instruction.Return), expression]
: [createInstruction(Instruction.Return)]
)
),
operationLocation
),
EmitFlags.NoTokenSourceMaps
)
);
}
/**
* Writes a Break operation to the current label's statement list.
*
* @param label The label for the Break.
* @param operationLocation The source map location for the operation.
*/
function writeBreak(label: Label, operationLocation: TextRange): void {
lastOperationWasAbrupt = true;
writeStatement(
setEmitFlags(
setTextRange(
createReturn(
createArrayLiteral([
createInstruction(Instruction.Break),
createLabel(label)
])
),
operationLocation
),
EmitFlags.NoTokenSourceMaps
)
);
}
/**
* Writes a BreakWhenTrue operation to the current label's statement list.
*
* @param label The label for the Break.
* @param condition The condition for the Break.
* @param operationLocation The source map location for the operation.
*/
function writeBreakWhenTrue(label: Label, condition: Expression, operationLocation: TextRange): void {
writeStatement(
setEmitFlags(
createIf(
condition,
setEmitFlags(
setTextRange(
createReturn(
createArrayLiteral([
createInstruction(Instruction.Break),
createLabel(label)
])
),
operationLocation
),
EmitFlags.NoTokenSourceMaps
)
),
EmitFlags.SingleLine
)
);
}
/**
* Writes a BreakWhenFalse operation to the current label's statement list.
*
* @param label The label for the Break.
* @param condition The condition for the Break.
* @param operationLocation The source map location for the operation.
*/
function writeBreakWhenFalse(label: Label, condition: Expression, operationLocation: TextRange): void {
writeStatement(
setEmitFlags(
createIf(
createLogicalNot(condition),
setEmitFlags(
setTextRange(
createReturn(
createArrayLiteral([
createInstruction(Instruction.Break),
createLabel(label)
])
),
operationLocation
),
EmitFlags.NoTokenSourceMaps
)
),
EmitFlags.SingleLine
)
);
}
/**
* Writes a Yield operation to the current label's statement list.
*
* @param expression The expression to yield.
* @param operationLocation The source map location for the operation.
*/
function writeYield(expression: Expression, operationLocation: TextRange): void {
lastOperationWasAbrupt = true;
writeStatement(
setEmitFlags(
setTextRange(
createReturn(
createArrayLiteral(
expression
? [createInstruction(Instruction.Yield), expression]
: [createInstruction(Instruction.Yield)]
)
),
operationLocation
),
EmitFlags.NoTokenSourceMaps
)
);
}
/**
* Writes a YieldStar instruction to the current label's statement list.
*
* @param expression The expression to yield.
* @param operationLocation The source map location for the operation.
*/
function writeYieldStar(expression: Expression, operationLocation: TextRange): void {
lastOperationWasAbrupt = true;
writeStatement(
setEmitFlags(
setTextRange(
createReturn(
createArrayLiteral([
createInstruction(Instruction.YieldStar),
expression
])
),
operationLocation
),
EmitFlags.NoTokenSourceMaps
)
);
}
/**
* Writes an Endfinally instruction to the current label's statement list.
*/
function writeEndfinally(): void {
lastOperationWasAbrupt = true;
writeStatement(
createReturn(
createArrayLiteral([
createInstruction(Instruction.Endfinally)
])
)
);
}
}
function createGeneratorHelper(context: TransformationContext, body: FunctionExpression) {
context.requestEmitHelper(generatorHelper);
return createCall(
getHelperName("__generator"),
/*typeArguments*/ undefined,
[createThis(), body]);
}
// The __generator helper is used by down-level transformations to emulate the runtime
// semantics of an ES2015 generator function. When called, this helper returns an
// object that implements the Iterator protocol, in that it has `next`, `return`, and
// `throw` methods that step through the generator when invoked.
//
// parameters:
// thisArg The value to use as the `this` binding for the transformed generator body.
// body A function that acts as the transformed generator body.
//
// variables:
// _ Persistent state for the generator that is shared between the helper and the
// generator body. The state object has the following members:
// sent() - A method that returns or throws the current completion value.
// label - The next point at which to resume evaluation of the generator body.
// trys - A stack of protected regions (try/catch/finally blocks).
// ops - A stack of pending instructions when inside of a finally block.
// f A value indicating whether the generator is executing.
// y An iterator to delegate for a yield*.
// t A temporary variable that holds one of the following values (note that these
// cases do not overlap):
// - The completion value when resuming from a `yield` or `yield*`.
// - The error value for a catch block.
// - The current protected region (array of try/catch/finally/end labels).
// - The verb (`next`, `throw`, or `return` method) to delegate to the expression
// of a `yield*`.
// - The result of evaluating the verb delegated to the expression of a `yield*`.
//
// functions:
// verb(n) Creates a bound callback to the `step` function for opcode `n`.
// step(op) Evaluates opcodes in a generator body until execution is suspended or
// completed.
//
// The __generator helper understands a limited set of instructions:
// 0: next(value?) - Start or resume the generator with the specified value.
// 1: throw(error) - Resume the generator with an exception. If the generator is
// suspended inside of one or more protected regions, evaluates
// any intervening finally blocks between the current label and
// the nearest catch block or function boundary. If uncaught, the
// exception is thrown to the caller.
// 2: return(value?) - Resume the generator as if with a return. If the generator is
// suspended inside of one or more protected regions, evaluates any
// intervening finally blocks.
// 3: break(label) - Jump to the specified label. If the label is outside of the
// current protected region, evaluates any intervening finally
// blocks.
// 4: yield(value?) - Yield execution to the caller with an optional value. When
// resumed, the generator will continue at the next label.
// 5: yield*(value) - Delegates evaluation to the supplied iterator. When
// delegation completes, the generator will continue at the next
// label.
// 6: catch(error) - Handles an exception thrown from within the generator body. If
// the current label is inside of one or more protected regions,
// evaluates any intervening finally blocks between the current
// label and the nearest catch block or function boundary. If
// uncaught, the exception is thrown to the caller.
// 7: endfinally - Ends a finally block, resuming the last instruction prior to
// entering a finally block.
//
// For examples of how these are used, see the comments in ./transformers/generators.ts
const generatorHelper: EmitHelper = {
name: "typescript:generator",
scoped: false,
priority: 6,
text: `
var __generator = (this && this.__generator) || function (thisArg, body) {
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
function verb(n) { return function (v) { return step([n, v]); }; }
function step(op) {
if (f) throw new TypeError("Generator is already executing.");
while (_) try {
if (f = 1, y && (t = y[op[0] & 2 ? "return" : op[0] ? "throw" : "next"]) && !(t = t.call(y, op[1])).done) return t;
if (y = 0, t) op = [0, t.value];
switch (op[0]) {
case 0: case 1: t = op; break;
case 4: _.label++; return { value: op[1], done: false };
case 5: _.label++; y = op[1]; op = [0]; continue;
case 7: op = _.ops.pop(); _.trys.pop(); continue;
default:
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
if (t[2]) _.ops.pop();
_.trys.pop(); continue;
}
op = body.call(thisArg, _);
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
}
};`
};
}
|
apache-2.0
|
olw/cdn
|
milton-client-app/src/main/java/bradswebdavclient/Configuration.java
|
7586
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package bradswebdavclient;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.util.Map;
import java.util.Properties;
public class Configuration {
private final File configFile;
private final Properties props;
public final ToolbarElements toolbars = new ToolbarElements();
public final Property[] allProperties = new Property[]{};
public Configuration() {
String s = System.getProperty("user.home");
s = s + "/.webdav.properties";
configFile = new File(s);
props = new Properties();
try {
if (configFile.exists()) {
FileInputStream in = new FileInputStream(configFile);
props.load(in);
} else {
InputStream in = this.getClass().getResourceAsStream("default.properties");
props.load(in);
save();
}
} catch (FileNotFoundException ex) {
throw new RuntimeException(ex);
} catch (IOException ex) {
throw new RuntimeException(ex);
}
}
public void save() {
try {
FileOutputStream out = new FileOutputStream(configFile);
props.store(out, null);
} catch (FileNotFoundException ex) {
throw new RuntimeException(ex);
} catch (IOException ex) {
throw new RuntimeException(ex);
}
}
public abstract class ConfigElement<T> {
public final String name;
public abstract T get();
ConfigElement(String name) {
this.name = name;
}
public String name() {
return name;
}
@Override
public final String toString() {
Object o = get();
if (o == null) {
return "";
}
return o.toString();
}
}
public class ToolbarElements extends ConfigElement<Map<String, Runnable>> {
ToolbarElements() {
super("toolbars");
}
public Map<String, Runnable> get() {
try {
String s = props.getProperty(name);
if (s == null || s.length() == 0) {
return null;
}
byte[] arr = StringEncrypter.decodeBase64(s);
ByteArrayInputStream bin = new ByteArrayInputStream(arr);
ObjectInputStream oin = new ObjectInputStream(bin);
Object o = oin.readObject();
Map<String, Runnable> map = (Map<String, Runnable>) o;
return map;
} catch (ClassNotFoundException ex) {
throw new RuntimeException(ex);
} catch (IOException ex) {
throw new RuntimeException(ex);
}
}
public void set(Map<String, Runnable> map) {
try {
ByteArrayOutputStream bout = new ByteArrayOutputStream();
ObjectOutputStream oos = new ObjectOutputStream(bout);
oos.writeObject(map);
byte[] arr = bout.toByteArray();
String s = StringEncrypter.encodeBase64(arr);
props.setProperty(name, s);
} catch (IOException ex) {
throw new RuntimeException(ex);
}
}
}
public abstract class Property extends ConfigElement<String> {
Property(String name) {
super(name);
}
}
public class EditableProperty extends Property {
private final String defValue;
EditableProperty(String name, String defValue) {
super(name);
this.defValue = defValue;
}
EditableProperty(String name) {
super(name);
defValue = null;
}
public String get() {
String s = props.getProperty(name);
if( s == null ) s = defValue;
return s;
}
public void set(String value) {
props.setProperty(name, value);
}
}
public class SecretProperty extends EditableProperty {
SecretProperty(String name) {
super(name);
}
@Override
public String get() {
String s = props.getProperty(name);
if (s == null || s.length() == 0) {
return null;
}
return decrypt(s);
}
@Override
public void set(String value) {
if( value != null && value.length() > 0 ) {
String s = encrypt(value);
props.setProperty(name, s);
} else {
props.setProperty(name, "");
}
}
String encrypt(String x) {
try {
return StringEncrypter.getInstance().encrypt(x);
} catch (StringEncrypter.EncryptionException ex) {
throw new RuntimeException(ex);
}
}
String decrypt(String s) {
try {
return StringEncrypter.getInstance().decrypt(s);
} catch (StringEncrypter.EncryptionException ex) {
throw new RuntimeException(ex);
}
}
}
public class SuperSecretProperty extends Property {
SuperSecretProperty(String name) {
super(name);
}
public String get() {
String s = props.getProperty(name);
if (s == null || s.length() == 0) {
return null;
}
return decrypt(s);
}
public void set(String value) {
String s = encrypt(value);
props.setProperty(name, s);
}
String encrypt(String x) {
try {
return StringEncrypter.getInstance().encrypt(x);
} catch (StringEncrypter.EncryptionException ex) {
throw new RuntimeException(ex);
}
}
String decrypt(String s) {
try {
return StringEncrypter.getInstance().decrypt(s);
} catch (StringEncrypter.EncryptionException ex) {
throw new RuntimeException(ex);
}
}
}
}
|
apache-2.0
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.