repo_name
stringlengths 4
116
| path
stringlengths 4
379
| size
stringlengths 1
7
| content
stringlengths 3
1.05M
| license
stringclasses 15
values |
---|---|---|---|---|
rndsolutions/hawkcd
|
Server/ui/src/app/pipelines/services/pipelineConfig.service.js
|
24557
|
/* Copyright (C) 2016 R&D Solutions Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
angular
.module('hawk.pipelinesManagement')
.factory('pipeConfigService', ['jsonHandlerService', 'websocketSenderService', function (jsonHandlerService, websocketSenderService) {
var pipeConfigService = this;
//region Senders
//region /pipeline
pipeConfigService.getAllPipelineDefinitions = function () {
var methodName = "getAll";
var className = "PipelineDefinitionService";
var packageName = "io.hawkcd.services";
var result = "";
var args = [];
var error = "";
var json = jsonHandlerService.createJson(className, packageName, methodName, result, error, args);
websocketSenderService.call(json);
};
pipeConfigService.getPipelineDefinitionById = function (id) {
var methodName = "getById";
var className = "PipelineDefinitionService";
var packageName = "io.hawkcd.services";
var result = "";
var args = ["{\"packageName\": \"java.lang.String\", \"object\": \"" + id + "\"}"];
var error = "";
var json = jsonHandlerService.createJson(className, packageName, methodName, result, error, args);
websocketSenderService.call(json);
};
pipeConfigService.getAllPipelineGroupDTOs = function () {
var methodName = "getAllPipelineGroupDTOs";
var className = "PipelineGroupService";
var packageName = "io.hawkcd.services";
var result = "";
var args = [];
var error = "";
var json = jsonHandlerService.createJson(className, packageName, methodName, result, error, args);
websocketSenderService.call(json);
};
pipeConfigService.addPipelineDefinition = function (pipelineDefinition) {
var methodName = "add";
var className = "PipelineDefinitionService";
var packageName = "io.hawkcd.services";
var result = "";
var args = ["{\"packageName\": \"io.hawkcd.model.PipelineDefinition\", \"object\": " + JSON.stringify(pipelineDefinition) + "}"];
var error = "";
var json = jsonHandlerService.createJson(className, packageName, methodName, result, error, args);
websocketSenderService.call(json);
};
pipeConfigService.addPipelineDefinitionWithMaterial = function (pipelineDefinition,materialDefinition) {
var methodName = "addWithMaterialDefinition";
var className = "PipelineDefinitionService";
var packageName = "io.hawkcd.services";
var result = "";
var args = ["{\"packageName\": \"io.hawkcd.model.PipelineDefinition\", \"object\": " + JSON.stringify(pipelineDefinition) + "}," + "{\"packageName\": \"io.hawkcd.model.MaterialDefinition\", \"object\": " + JSON.stringify(materialDefinition) + "}"];
var error = "";
var json = jsonHandlerService.createJson(className, packageName, methodName, result, error, args);
websocketSenderService.call(json);
};
pipeConfigService.addPipelineDefinitionWithExistingMaterial = function (pipelineDefinition,materialDefinition) {
var methodName = "addWithMaterialDefinition";
var className = "PipelineDefinitionService";
var packageName = "io.hawkcd.services";
var result = "";
var args = ["{\"packageName\": \"io.hawkcd.model.PipelineDefinition\", \"object\": " + JSON.stringify(pipelineDefinition) + "}," + "{\"packageName\": \"java.lang.String\", \"object\": \"" + materialDefinition + "\"}"];
var error = "";
var json = jsonHandlerService.createJson(className, packageName, methodName, result, error, args);
websocketSenderService.call(json);
};
pipeConfigService.updatePipelineDefinition = function (pipelineDefinition) {
var methodName = "update";
var className = "PipelineDefinitionService";
var packageName = "io.hawkcd.services";
var result = "";
var args = ["{\"packageName\": \"io.hawkcd.model.PipelineDefinition\", \"object\": " + JSON.stringify(pipelineDefinition) + "}"];
var error = "";
var json = jsonHandlerService.createJson(className, packageName, methodName, result, error, args);
websocketSenderService.call(json);
};
//TODO: Send Pipeline Definition and Pipeline Group to assign to (and possibly Pipeline Group that is assigned from)
pipeConfigService.assignPipelineDefinition = function (pipelineDefinitionId, pipelineGroupId, pipelineGroupName) {
var methodName = "assignPipelineToGroup";
var className = "PipelineDefinitionService";
var packageName = "io.hawkcd.services";
var result = "";
var args = ["{\"packageName\": \"java.lang.String\", \"object\": \"" + pipelineDefinitionId + "\"}, " +
"{\"packageName\": \"java.lang.String\", \"object\": \"" + pipelineGroupId + "\"}, " +
"{\"packageName\": \"java.lang.String\", \"object\": \"" + pipelineGroupName + "\"}"];
var error = "";
var json = jsonHandlerService.createJson(className, packageName, methodName, result, error, args);
websocketSenderService.call(json);
};
//TODO: Send Pipeline Definition to be unassigned
pipeConfigService.unassignPipelineDefinition = function (pipelineDefinitionId) {
var methodName = "unassignPipelineFromGroup";
var className = "PipelineDefinitionService";
var packageName = "io.hawkcd.services";
var result = "";
var args = ["{\"packageName\": \"java.lang.String\", \"object\": \"" + pipelineDefinitionId + "\"}"];
var error = "";
var json = jsonHandlerService.createJson(className, packageName, methodName, result, error, args);
websocketSenderService.call(json);
};
//TODO: Send Pipeline Definition to be deleted
pipeConfigService.deletePipelineDefinition = function (pipelineDefinition) {
var methodName = "delete";
var className = "PipelineDefinitionService";
var packageName = "io.hawkcd.services";
var result = "";
var args = ["{\"packageName\": \"io.hawkcd.model.PipelineDefinition\", \"object\": " + JSON.stringify(pipelineDefinition) + "}"];
var error = "";
var json = jsonHandlerService.createJson(className, packageName, methodName, result, error, args);
websocketSenderService.call(json);
};
pipeConfigService.createPipeline = function (pipeline, token) {
};
pipeConfigService.deletePipeline = function (pipeName, token) {
};
pipeConfigService.getPipelineDef = function (pipeName, token) {
};
pipeConfigService.updatePipeline = function (pipeName, pipeline, token) {
};
//endregion
//region /pipelines_groups
pipeConfigService.getAllGroups = function (token) {
};
pipeConfigService.createGroup = function (pipeGroup, token) {
};
pipeConfigService.deleteGroup = function (pipeGroupName, token) {
};
pipeConfigService.getGroup = function (pipeGroupName, token) {
};
pipeConfigService.updateGroup = function (pipeGroupName, pipeGroup, token) {
};
//endregion
//region /stages
pipeConfigService.getAllStageDefinitions = function () {
var methodName = "getAll";
var className = "StageDefinitionService";
var packageName = "io.hawkcd.services";
var result = "";
var args = ["{\"packageName\": \"\", \"object\": \"\"}"];
var error = "";
var json = jsonHandlerService.createJson(className, packageName, methodName, result, error, args);
websocketSenderService.call(json);
};
pipeConfigService.getStageDefinitionById = function (id) {
var methodName = "getById";
var className = "StageDefinitionService";
var packageName = "io.hawkcd.services";
var result = "";
var args = ["{\"packageName\": \"java.lang.String\", \"object\": \"" + id + "\"}"];
var error = "";
var json = jsonHandlerService.createJson(className, packageName, methodName, result, error, args);
websocketSenderService.call(json);
};
pipeConfigService.addStageDefinition = function (stageDefinition) {
var methodName = "add";
var className = "StageDefinitionService";
var packageName = "io.hawkcd.services";
var result = "";
var args = ["{\"packageName\": \"io.hawkcd.model.StageDefinition\", \"object\": " + JSON.stringify(stageDefinition) + "}"];
var error = "";
var json = jsonHandlerService.createJson(className, packageName, methodName, result, error, args);
websocketSenderService.call(json);
};
pipeConfigService.updateStageDefinition = function (stageDefinition) {
var methodName = "update";
var className = "StageDefinitionService";
var packageName = "io.hawkcd.services";
var result = "";
var args = ["{\"packageName\": \"io.hawkcd.model.StageDefinition\", \"object\": " + JSON.stringify(stageDefinition) + "}"];
var error = "";
var json = jsonHandlerService.createJson(className, packageName, methodName, result, error, args);
websocketSenderService.call(json);
};
//TODO: Send Stage Definition to be deleted
pipeConfigService.deleteStageDefinition = function (stageDefinition) {
var methodName = "delete";
var className = "StageDefinitionService";
var packageName = "io.hawkcd.services";
var result = "";
var args = ["{\"packageName\": \"io.hawkcd.model.StageDefinition\", \"object\": " + JSON.stringify(stageDefinition) + "}"];
var error = "";
var json = jsonHandlerService.createJson(className, packageName, methodName, result, error, args);
websocketSenderService.call(json);
};
//endregion
//region /jobs
pipeConfigService.getAllJobDefinitions = function () {
var methodName = "getAll";
var className = "JobDefinitionService";
var packageName = "io.hawkcd.services";
var result = "";
var args = ["{\"packageName\": \"\", \"object\": \"\"}"];
var error = "";
var json = jsonHandlerService.createJson(className, packageName, methodName, result, error, args);
websocketSenderService.call(json);
};
pipeConfigService.getJobDefinitionById = function (id) {
var methodName = "getById";
var className = "JobDefinitionService";
var packageName = "io.hawkcd.services";
var result = "";
var args = ["{\"packageName\": \"java.lang.String\", \"object\": \"" + id + "\"}"];
var error = "";
var json = jsonHandlerService.createJson(className, packageName, methodName, result, error, args);
websocketSenderService.call(json);
};
pipeConfigService.addJobDefinition = function (jobDefinition) {
var methodName = "add";
var className = "JobDefinitionService";
var packageName = "io.hawkcd.services";
var result = "";
var args = ["{\"packageName\": \"io.hawkcd.model.JobDefinition\", \"object\": " + JSON.stringify(jobDefinition) + "}"];
var error = "";
var json = jsonHandlerService.createJson(className, packageName, methodName, result, error, args);
websocketSenderService.call(json);
};
pipeConfigService.updateJobDefinition = function (jobDefinition) {
var methodName = "update";
var className = "JobDefinitionService";
var packageName = "io.hawkcd.services";
var result = "";
var args = ["{\"packageName\": \"io.hawkcd.model.JobDefinition\", \"object\": " + JSON.stringify(jobDefinition) + "}"];
var error = "";
var json = jsonHandlerService.createJson(className, packageName, methodName, result, error, args);
websocketSenderService.call(json);
};
//TODO: Send Job Definition to be deleted
pipeConfigService.deleteJobDefinition = function (jobDefinition) {
var methodName = "delete";
var className = "JobDefinitionService";
var packageName = "io.hawkcd.services";
var result = "";
var args = ["{\"packageName\": \"io.hawkcd.model.JobDefinition\", \"object\": " + JSON.stringify(jobDefinition) + "}"];
var error = "";
var json = jsonHandlerService.createJson(className, packageName, methodName, result, error, args);
websocketSenderService.call(json);
};
//endregion
//region /tasks
pipeConfigService.addTaskDefinition = function (taskDefinition){
var methodName = "add";
var className = "TaskDefinitionService";
var packageName = "io.hawkcd.services";
var result = "";
var args = ["{\"packageName\": \"io.hawkcd.model.TaskDefinition\", \"object\": " + JSON.stringify(taskDefinition) + "}"];
var error = "";
var json = jsonHandlerService.createJson(className, packageName, methodName, result, error, args);
websocketSenderService.call(json);
};
pipeConfigService.updateTaskDefinition = function (taskDefinition) {
var methodName = "update";
var className = "TaskDefinitionService";
var packageName = "io.hawkcd.services";
var result = "";
var args = ["{\"packageName\": \"io.hawkcd.model.TaskDefinition\", \"object\": " + JSON.stringify(taskDefinition) + "}"];
var error = "";
var json = jsonHandlerService.createJson(className, packageName, methodName, result, error, args);
websocketSenderService.call(json);
};
//TODO: Send Task Definition to be deleted
pipeConfigService.deleteTaskDefinition = function (taskDefinition) {
var methodName = "delete";
var className = "TaskDefinitionService";
var packageName = "io.hawkcd.services";
var result = "";
var args = ["{\"packageName\": \"io.hawkcd.model.TaskDefinition\", \"object\": " + JSON.stringify(taskDefinition) + "}"];
var error = "";
var json = jsonHandlerService.createJson(className, packageName, methodName, result, error, args);
websocketSenderService.call(json);
};
pipeConfigService.getAllMaterialDefinitions = function () {
var methodName = "getAll";
var className = "MaterialDefinitionService";
var packageName = "io.hawkcd.services";
var result = "";
var args = [];
var error = "";
var json = jsonHandlerService.createJson(className, packageName, methodName, result, error, args);
websocketSenderService.call(json);
};
pipeConfigService.getMaterialDefinitionById = function (id) {
var methodName = "getById";
var className = "MaterialDefinitionService";
var packageName = "io.hawkcd.services";
var result = "";
var args = ["{\"packageName\": \"java.lang.String\", \"object\": \"" + id + "\"}"];
var error = "";
var json = jsonHandlerService.createJson(className, packageName, methodName, result, error, args);
websocketSenderService.call(json);
};
pipeConfigService.addGitMaterialDefinition = function (materialDefinition) {
var methodName = "add";
var className = "MaterialDefinitionService";
var packageName = "io.hawkcd.services";
var result = "";
var args = ["{\"packageName\": \"io.hawkcd.model.GitMaterial\", \"object\": " + JSON.stringify(materialDefinition) + "}"];
var error = "";
var json = jsonHandlerService.createJson(className, packageName, methodName, result, error, args);
websocketSenderService.call(json);
};
pipeConfigService.addNugetMaterialDefinition = function (materialDefinition) {
var methodName = "add";
var className = "MaterialDefinitionService";
var packageName = "io.hawkcd.services";
var result = "";
var args = ["{\"packageName\": \"io.hawkcd.model.NugetMaterial\", \"object\": " + JSON.stringify(materialDefinition) + "}"];
var error = "";
var json = jsonHandlerService.createJson(className, packageName, methodName, result, error, args);
websocketSenderService.call(json);
};
pipeConfigService.updateGitMaterialDefinition = function (materialDefinition) {
var methodName = "update";
var className = "MaterialDefinitionService";
var packageName = "io.hawkcd.services";
var result = "";
var args = ["{\"packageName\": \"io.hawkcd.model.GitMaterial\", \"object\": " + JSON.stringify(materialDefinition) + "}"];
var error = "";
var json = jsonHandlerService.createJson(className, packageName, methodName, result, error, args);
websocketSenderService.call(json);
};
pipeConfigService.updateNugetMaterialDefinition = function (materialDefinition) {
var methodName = "update";
var className = "MaterialDefinitionService";
var packageName = "io.hawkcd.services";
var result = "";
var args = ["{\"packageName\": \"io.hawkcd.model.NugetMaterial\", \"object\": " + JSON.stringify(materialDefinition) + "}"];
var error = "";
var json = jsonHandlerService.createJson(className, packageName, methodName, result, error, args);
websocketSenderService.call(json);
};
pipeConfigService.deleteMaterialDefinition = function (materialDefinition) {
var methodName = "delete";
var className = "MaterialDefinitionService";
var packageName = "io.hawkcd.services";
var result = "";
var args = ["{\"packageName\": \"io.hawkcd.model.MaterialDefinition\", \"object\": " + JSON.stringify(materialDefinition) + "}"];
var error = "";
var json = jsonHandlerService.createJson(className, packageName, methodName, result, error, args);
websocketSenderService.call(json);
};
pipeConfigService.getAllTasks = function (pipeName, stageName, jobName, token) {
};
pipeConfigService.deleteTask = function (pipeName, stageName, jobName, taskIndex, token) {
};
pipeConfigService.getTask = function (pipeName, stageName, jobName, taskIndex, token) {
};
pipeConfigService.createExecTask = function (pipeName, stageName, jobName, task, token) {
};
pipeConfigService.createFetchArtifactTask = function (pipeName, stageName, jobName, task, token) {
};
pipeConfigService.createFetchMaterialTask = function (pipeName, stageName, jobName, task, token) {
};
pipeConfigService.createUploadArtifactTask = function (pipeName, stageName, jobName, task, token) {
};
pipeConfigService.updateExecTask = function (pipeName, stageName, jobName, taskIndex, task, token) {
};
pipeConfigService.updateFetchArtifactTask = function (pipeName, stageName, jobName, taskIndex, task, token) {
};
pipeConfigService.updateFetchMaterialTask = function (pipeName, stageName, jobName, taskIndex, task, token) {
};
pipeConfigService.updateUploadArtifactTask = function (pipeName, stageName, jobName, taskIndex, task, token) {
};
//endregion
//region /artifacts
pipeConfigService.getAllArtifacts = function (pipeName, stageName, jobName, token) {
};
pipeConfigService.createArtifact = function (pipeName, stageName, jobName, artifactIndex, token) {
};
pipeConfigService.deleteArtifact = function (pipeName, stageName, jobName, artifactIndex, token) {
};
pipeConfigService.getArtifact = function (pipeName, stageName, jobName, artifactIndex, token) {
};
pipeConfigService.updateArtifact = function (pipeName, stageName, jobName, artifactIndex, artifact, token) {
};
//endregion
//region /materials
pipeConfigService.getAllMaterials = function (pipeName, token) {
};
pipeConfigService.getMaterial = function (pipeName, materialName, token) {
};
pipeConfigService.createMaterial = function (pipeName, material, token) {
};
pipeConfigService.deleteMaterial = function (pipeName, materialName, token) {
};
pipeConfigService.updateMaterial = function (pipeName, materialName, material, token) {
};
//endregion
//region /environments
pipeConfigService.getAllEnvironments = function (token) {
};
pipeConfigService.createEnvironment = function (environment, token) {
};
pipeConfigService.deleteEnvironment = function (environmentName, token) {
};
pipeConfigService.getEnvironment = function (environmentName, token) {
};
pipeConfigService.updateEnvironment = function (environmentName, environment, token) {
};
//endregion
pipeConfigService.getAllPipelineVars = function (pipelineName, token) {
};
pipeConfigService.getAllStageVars = function (pipelineName, stageName, token) {
};
pipeConfigService.getAllJobVars = function (pipelineName, stageName, jobName, token) {
};
pipeConfigService.getPipelineVar = function (pipelineName, variable, token) {
};
pipeConfigService.getStageVar = function (pipelineName, stageName, variable, token) {
};
pipeConfigService.getJobVar = function (pipelineName, stageName, jobName, variable, token) {
};
pipeConfigService.createPipelineVar = function (pipelineName, variable, token) {
};
pipeConfigService.createStageVar = function (pipelineName, stageName, variable, token) {
};
pipeConfigService.createJobVar = function (pipelineName, stageName, jobName, variable, token) {
};
pipeConfigService.deletePipelineVar = function (pipelineName, variableName, token) {
};
pipeConfigService.deleteStageVar = function (pipelineName, stageName, variableName, token) {
};
pipeConfigService.deleteJobVar = function (pipelineName, stageName, jobName, variableName, token) {
};
pipeConfigService.updatePipelineVar = function (pipelineName, variableName, variable, token) {
};
pipeConfigService.updateStageVar = function (pipelineName, stageName, variableName, variable, token) {
};
pipeConfigService.updateJobVar = function (pipelineName, stageName, jobName, variableName, variable, token) {
};
pipeConfigService.getLatestCommit = function (token) {
};
//endregion
return pipeConfigService;
}]);
|
apache-2.0
|
jitt-lv/salidzini-plugin-for-nopcommerce
|
Data/SalidziniFeedSettingsMap.cs
|
940
|
using System;
using System.Collections.Generic;
using System.Data.Entity.ModelConfiguration;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Nop.Plugin.Feed.Salidzini
{
public class SalidziniFeedSettingsMap : EntityTypeConfiguration<SalidziniFeedSettings>
{
public SalidziniFeedSettingsMap()
{
ToTable(typeof(SalidziniFeedSettings).Name);
//Map the primary key
HasKey(m => m.Id);
Property(m => m.ShopId);
/*
//Map the additional properties
Property(m => m.ProductId);
//Avoiding truncation/failure
//so we set the same max length used in the product tame
Property(m => m.ProductName).HasMaxLength(400);
Property(m => m.IpAddress);
Property(m => m.CustomerId);
Property(m => m.IsRegistered);
*/
}
}
}
|
apache-2.0
|
raistlic/raistlic-lib-commons-core
|
src/main/java/org/raistlic/common/assertion/GenericAssertionAbstract.java
|
10287
|
/*
* Copyright 2016 Lei CHEN (raistlic@gmail.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.raistlic.common.assertion;
import org.raistlic.common.precondition.Precondition;
import org.raistlic.common.predicate.Predicates;
import java.util.Objects;
import java.util.function.Function;
import java.util.function.Predicate;
/**
* The class helps to check a specified {@code candidate} with certain expectations, and throws
* a custom runtime exception when the check fails.
*/
public abstract class GenericAssertionAbstract<C, E extends Assertion<C, E>> implements Assertion<C, E> {
abstract E getThis();
abstract C getCandidate();
abstract Function<String, ? extends RuntimeException> getExceptionMapper();
/**
* The method claims that the {@code candidate} should be {@code null}, otherwise a runtime
* exception will be thrown.
*
* @return the expectation instance itself, for method calling chain.
* @throws java.lang.RuntimeException the exception of a specific type, depending on the context
* where the expectation is used.
*/
@Override
public E isNull() {
return isNull("Candidate should be null, but it is " + getCandidate());
}
/**
* The method claims that the {@code candidate} should be {@code null}, otherwise a runtime
* exception with the specified {@code message} will be thrown.
*
* @param message the message to be thrown with the exception, in case the check fails.
* @return the expectation instance itself, for method calling chain.
* @throws java.lang.RuntimeException the exception of a specific type, depending on the context
* where the expectation is used.
*/
public E isNull(String message) {
if (getCandidate() != null) {
throw getExceptionMapper().apply(message);
}
return getThis();
}
/**
* The method claims that the {@code candidate} should not be {@code null}, otherwise a runtime
* exception will be thrown.
*
* @return the expectation instance itself, for method calling chain.
* @throws java.lang.RuntimeException the exception of a specific type, depending on the context
* where the expectation is used.
*/
public E isNotNull() {
return isNotNull("Candidate should not be null, but it is.");
}
/**
* The method claims that the {@code candidate} should not be {@code null}, otherwise a runtime
* exception with the specified {@code message} will be thrown.
*
* @param message the message to be thrown with the exception, in case the check fails.
* @return the expectation instance itself, for method calling chain.
* @throws java.lang.RuntimeException the exception of a specific type, depending on the context
* where the expectation is used.
*/
public E isNotNull(String message) {
if (getCandidate() == null) {
throw getExceptionMapper().apply(message);
}
return getThis();
}
/**
* The method claims that the {@code candidate} should be equal to the {@code target}, otherwise a
* runtime exception will be thrown.
*
* @param target the reference target which the candidate should be equal to, or {@code null} if
* the candidate should be {@code null}.
* @return the expectation instance itself, for method calling chain.
* @throws java.lang.RuntimeException the exception of a specific type, depending on the context
* where the expectation is used.
*/
public E isEqualTo(C target) {
if (!Objects.equals(getCandidate(), target)) {
String message = "'" + getCandidate() + "' and '" + target + "' is not equal.";
throw getExceptionMapper().apply(message);
}
return getThis();
}
/**
* The method claims that the {@code candidate} should be equal to the {@code target}, otherwise a
* runtime exception with the specified {@code message} will be thrown.
*
* @param target the reference target which the candidate should be equal to, or {@code null} if
* the candidate should be {@code null}.
* @param message the message to be thrown with the exception, in case the check fails.
* @return the expectation instance itself, for method calling chain.
* @throws java.lang.RuntimeException the exception of a specific type, depending on the context
* where the expectation is used.
*/
public E isEqualTo(C target, String message) {
if (!Objects.equals(getCandidate(), target)) {
throw getExceptionMapper().apply(message);
}
return getThis();
}
/**
* The method claims that the {@code candidate} should NOT be equal to the {@code target},
* otherwise a runtime exception will be thrown.
*
* @param target the reference target which the candidate should be equal to, or {@code null} if
* the candidate should be {@code null}.
* @return the expectation instance itself, for method calling chain.
* @throws java.lang.RuntimeException the exception of a specific type, depending on the context
* where the expectation is used.
*/
public E isNotEqualTo(C target) {
if (Objects.equals(getCandidate(), target)) {
String message = "'" + getCandidate() + "' and '" + target + "' are (unexpectedly) equal.";
throw getExceptionMapper().apply(message);
}
return getThis();
}
/**
* The method claims that the {@code candidate} should NOT be equal to the {@code target},
* otherwise a runtime exception with the specified {@code message} will be thrown.
*
* @param target the reference target which the candidate should be equal to, or {@code null} if
* the candidate should be {@code null}.
* @param message the message to be thrown with the exception, in case the check fails.
* @return the expectation instance itself, for method calling chain.
* @throws java.lang.RuntimeException the exception of a specific type, depending on the context
* where the expectation is used.
*/
public E isNotEqualTo(C target, String message) {
if (Objects.equals(getCandidate(), target)) {
throw getExceptionMapper().apply(message);
}
return getThis();
}
/**
* The method claims that the {@code candidate} should be an instance of the specified {@code type} ,
* otherwise a runtime exception will be thrown.
*
* @param type the type of which the {@code candidate} claims to be.
* @return the expectation instance itself, for method calling chain.
*/
public E isInstanceOf(Class<?> type) {
Precondition.assertParam(type != null, "'type' should not be null, but it is.");
if (!Predicates.instanceOf(type).test(getCandidate())) {
String message = "'" + getCandidate() + "' should be instance of type '" + type + "', but is " +
(getCandidate() == null ? "null" : "not (actual type: '" + getCandidate().getClass().getName() + "')");
throw getExceptionMapper().apply(message);
}
return getThis();
}
/**
* The method claims that the {@code candidate} should be an instance of the specified {@code type} ,
* otherwise a runtime exception with the specified {@code message} will be thrown.
*
* @param type the type of which the {@code candidate} claims to be.
* @param message the message to be thrown with the exception, in case the check fails.
* @return the expectation instance itself, for method calling chain.
*/
public E isInstanceOf(Class<?> type, String message) {
Precondition.assertParam(type != null, "'type' should not be null, but it is.");
if (!Predicates.instanceOf(type).test(getCandidate())) {
throw getExceptionMapper().apply(message);
}
return getThis();
}
/**
* The methods claims that the {@code candidate} should match the specified {@code predicate} ,
* otherwise a runtime exception will be thrown.
*
* @param predicate the predicate that's used to test the {@code candidate} , cannot be {@code null}.
* @return the expectation instance itself, for method calling chain.
* @throws java.lang.RuntimeException the exception of a specific type, depending on the context
* where the expectation is used.
*/
public E matches(Predicate<? super C> predicate) {
Precondition.assertParam(predicate != null, "'predicate' should not be null, but it is.");
if (!predicate.test(getCandidate())) {
String message = "'" + getCandidate() + "' does not match the specified predicate: '" + predicate + "'";
throw getExceptionMapper().apply(message);
}
return getThis();
}
/**
* The methods claims that the {@code candidate} should match the specified {@code predicate} ,
* otherwise a runtime exception with the {@code message} will be thrown.
*
* @param predicate the predicate that's used to test the {@code candidate} , cannot be {@code null}.
* @param message the message to be thrown with the exception, in case the check fails.
* @return the expectation instance itself, for method calling chain.
* @throws java.lang.RuntimeException the exception of a specific type, depending on the context
* where the expectation is used.
*/
public E matches(Predicate<? super C> predicate, String message) {
Precondition.assertParam(predicate != null, "'predicate' should not be null, but it is.");
if (!predicate.test(getCandidate())) {
throw getExceptionMapper().apply(message);
}
return getThis();
}
}
|
apache-2.0
|
nla/tinycdxserver
|
test/outbackcdx/RegexFilterTest.java
|
914
|
package outbackcdx;
import org.junit.Test;
import static org.junit.Assert.*;
public class RegexFilterTest {
@Test
public void test() {
Capture one = new Capture();
one.file = "one.warc.gz";
one.status = 201;
Capture two = new Capture();
two.file = "two.warc.gz";
two.status = 202;
assertTrue(new RegexFilter("filename:one.*").test(one));
assertFalse(new RegexFilter("filename:one.*").test(two));
assertTrue(new RegexFilter("status:20.").test(one));
assertTrue(new RegexFilter("status:20.").test(two));
assertTrue(new RegexFilter("status:201").test(one));
assertFalse(new RegexFilter("status:201").test(two));
assertTrue(new RegexFilter("!status:201").test(two));
}
@Test(expected = IllegalArgumentException.class)
public void bogusField() {
new RegexFilter("bogus:.*");
}
}
|
apache-2.0
|
stankovski/azure-sdk-for-net
|
sdk/synapse/Azure.Analytics.Synapse.Artifacts/src/Generated/Models/NotebookCell.Serialization.cs
|
4988
|
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// <auto-generated/>
#nullable disable
using System.Collections.Generic;
using System.Text.Json;
using Azure.Core;
namespace Azure.Analytics.Synapse.Artifacts.Models
{
public partial class NotebookCell : IUtf8JsonSerializable
{
void IUtf8JsonSerializable.Write(Utf8JsonWriter writer)
{
writer.WriteStartObject();
writer.WritePropertyName("cell_type");
writer.WriteStringValue(CellType);
writer.WritePropertyName("metadata");
writer.WriteObjectValue(Metadata);
writer.WritePropertyName("source");
writer.WriteStartArray();
foreach (var item in Source)
{
writer.WriteStringValue(item);
}
writer.WriteEndArray();
if (Attachments != null)
{
writer.WritePropertyName("attachments");
writer.WriteObjectValue(Attachments);
}
if (Outputs != null)
{
writer.WritePropertyName("outputs");
writer.WriteStartArray();
foreach (var item in Outputs)
{
writer.WriteObjectValue(item);
}
writer.WriteEndArray();
}
foreach (var item in AdditionalProperties)
{
writer.WritePropertyName(item.Key);
writer.WriteObjectValue(item.Value);
}
writer.WriteEndObject();
}
internal static NotebookCell DeserializeNotebookCell(JsonElement element)
{
string cellType = default;
object metadata = default;
IList<string> source = default;
object attachments = default;
IList<NotebookCellOutputItem> outputs = default;
IDictionary<string, object> additionalProperties = default;
Dictionary<string, object> additionalPropertiesDictionary = default;
foreach (var property in element.EnumerateObject())
{
if (property.NameEquals("cell_type"))
{
cellType = property.Value.GetString();
continue;
}
if (property.NameEquals("metadata"))
{
metadata = property.Value.GetObject();
continue;
}
if (property.NameEquals("source"))
{
List<string> array = new List<string>();
foreach (var item in property.Value.EnumerateArray())
{
if (item.ValueKind == JsonValueKind.Null)
{
array.Add(null);
}
else
{
array.Add(item.GetString());
}
}
source = array;
continue;
}
if (property.NameEquals("attachments"))
{
if (property.Value.ValueKind == JsonValueKind.Null)
{
continue;
}
attachments = property.Value.GetObject();
continue;
}
if (property.NameEquals("outputs"))
{
if (property.Value.ValueKind == JsonValueKind.Null)
{
continue;
}
List<NotebookCellOutputItem> array = new List<NotebookCellOutputItem>();
foreach (var item in property.Value.EnumerateArray())
{
if (item.ValueKind == JsonValueKind.Null)
{
array.Add(null);
}
else
{
array.Add(NotebookCellOutputItem.DeserializeNotebookCellOutputItem(item));
}
}
outputs = array;
continue;
}
additionalPropertiesDictionary ??= new Dictionary<string, object>();
if (property.Value.ValueKind == JsonValueKind.Null)
{
additionalPropertiesDictionary.Add(property.Name, null);
}
else
{
additionalPropertiesDictionary.Add(property.Name, property.Value.GetObject());
}
}
additionalProperties = additionalPropertiesDictionary;
return new NotebookCell(cellType, metadata, source, attachments, outputs, additionalProperties);
}
}
}
|
apache-2.0
|
Dennis-Koch/ambeth
|
jambeth/jambeth-test/src/test/java/com/koch/ambeth/relations/many/lazy/link/reverse/none/EntityB.java
|
1165
|
package com.koch.ambeth.relations.many.lazy.link.reverse.none;
/*-
* #%L
* jambeth-test
* %%
* Copyright (C) 2017 Koch Softwaredevelopment
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
* #L%
*/
import java.util.List;
import com.koch.ambeth.model.AbstractEntity;
public class EntityB extends AbstractEntity {
protected String name;
protected List<EntityA> entityAs;
protected EntityB() {
// Intended blank
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public List<EntityA> getEntityAs() {
return entityAs;
}
public void setEntityAs(List<EntityA> entityAs) {
this.entityAs = entityAs;
}
}
|
apache-2.0
|
alvescaio/SisGO
|
system/database/DB_active_rec.php
|
43058
|
<?php if ( ! defined('BASEPATH')) exit('No direct script access allowed');
/**
* CodeIgniter
*
* An open source application development framework for PHP 5.1.6 or newer
*
* @package CodeIgniter
* @author EllisLab Dev Team
* @copyright Copyright (c) 2008 - 2014, EllisLab, Inc.
* @copyright Copyright (c) 2014 - 2015, British Columbia Institute of Technology (http://bcit.ca/)
* @license http://codeigniter.com/user_guide/license.html
* @link http://codeigniter.com
* @since Version 1.0
* @filesource
*/
// ------------------------------------------------------------------------
/**
* Active Record Class
*
* This is the platform-independent base Active Record implementation class.
*
* @package CodeIgniter
* @subpackage Drivers
* @category Database
* @author EllisLab Dev Team
* @link http://codeigniter.com/user_guide/database/
*/
class CI_DB_active_record extends CI_DB_driver {
var $ar_select = array();
var $ar_distinct = FALSE;
var $ar_from = array();
var $ar_join = array();
var $ar_where = array();
var $ar_like = array();
var $ar_groupby = array();
var $ar_having = array();
var $ar_keys = array();
var $ar_limit = FALSE;
var $ar_offset = FALSE;
var $ar_order = FALSE;
var $ar_orderby = array();
var $ar_set = array();
var $ar_wherein = array();
var $ar_aliased_tables = array();
var $ar_store_array = array();
// Active Record Caching variables
var $ar_caching = FALSE;
var $ar_cache_exists = array();
var $ar_cache_select = array();
var $ar_cache_from = array();
var $ar_cache_join = array();
var $ar_cache_where = array();
var $ar_cache_like = array();
var $ar_cache_groupby = array();
var $ar_cache_having = array();
var $ar_cache_orderby = array();
var $ar_cache_set = array();
var $ar_no_escape = array();
var $ar_cache_no_escape = array();
// --------------------------------------------------------------------
/**
* Select
*
* Generates the SELECT portion of the query
*
* @param string
* @return object
*/
public function select($select = '*', $escape = NULL)
{
if (is_string($select))
{
$select = explode(',', $select);
}
foreach ($select as $val)
{
$val = trim($val);
if ($val != '')
{
$this->ar_select[] = $val;
$this->ar_no_escape[] = $escape;
if ($this->ar_caching === TRUE)
{
$this->ar_cache_select[] = $val;
$this->ar_cache_exists[] = 'select';
$this->ar_cache_no_escape[] = $escape;
}
}
}
return $this;
}
// --------------------------------------------------------------------
/**
* Select Max
*
* Generates a SELECT MAX(field) portion of a query
*
* @param string the field
* @param string an alias
* @return object
*/
public function select_max($select = '', $alias = '')
{
return $this->_max_min_avg_sum($select, $alias, 'MAX');
}
// --------------------------------------------------------------------
/**
* Select Min
*
* Generates a SELECT MIN(field) portion of a query
*
* @param string the field
* @param string an alias
* @return object
*/
public function select_min($select = '', $alias = '')
{
return $this->_max_min_avg_sum($select, $alias, 'MIN');
}
// --------------------------------------------------------------------
/**
* Select Average
*
* Generates a SELECT AVG(field) portion of a query
*
* @param string the field
* @param string an alias
* @return object
*/
public function select_avg($select = '', $alias = '')
{
return $this->_max_min_avg_sum($select, $alias, 'AVG');
}
// --------------------------------------------------------------------
/**
* Select Sum
*
* Generates a SELECT SUM(field) portion of a query
*
* @param string the field
* @param string an alias
* @return object
*/
public function select_sum($select = '', $alias = '')
{
return $this->_max_min_avg_sum($select, $alias, 'SUM');
}
// --------------------------------------------------------------------
/**
* Processing Function for the four functions above:
*
* select_max()
* select_min()
* select_avg()
* select_sum()
*
* @param string the field
* @param string an alias
* @return object
*/
protected function _max_min_avg_sum($select = '', $alias = '', $type = 'MAX')
{
if ( ! is_string($select) OR $select == '')
{
$this->display_error('db_invalid_query');
}
$type = strtoupper($type);
if ( ! in_array($type, array('MAX', 'MIN', 'AVG', 'SUM')))
{
show_error('Invalid function type: '.$type);
}
if ($alias == '')
{
$alias = $this->_create_alias_from_table(trim($select));
}
$sql = $type.'('.$this->_protect_identifiers(trim($select)).') AS '.$alias;
$this->ar_select[] = $sql;
if ($this->ar_caching === TRUE)
{
$this->ar_cache_select[] = $sql;
$this->ar_cache_exists[] = 'select';
}
return $this;
}
// --------------------------------------------------------------------
/**
* Determines the alias name based on the table
*
* @param string
* @return string
*/
protected function _create_alias_from_table($item)
{
if (strpos($item, '.') !== FALSE)
{
return end(explode('.', $item));
}
return $item;
}
// --------------------------------------------------------------------
/**
* DISTINCT
*
* Sets a flag which tells the query string compiler to add DISTINCT
*
* @param bool
* @return object
*/
public function distinct($val = TRUE)
{
$this->ar_distinct = (is_bool($val)) ? $val : TRUE;
return $this;
}
// --------------------------------------------------------------------
/**
* From
*
* Generates the FROM portion of the query
*
* @param mixed can be a string or array
* @return object
*/
public function from($from)
{
foreach ((array) $from as $val)
{
if (strpos($val, ',') !== FALSE)
{
foreach (explode(',', $val) as $v)
{
$v = trim($v);
$this->_track_aliases($v);
$this->ar_from[] = $this->_protect_identifiers($v, TRUE, NULL, FALSE);
if ($this->ar_caching === TRUE)
{
$this->ar_cache_from[] = $this->_protect_identifiers($v, TRUE, NULL, FALSE);
$this->ar_cache_exists[] = 'from';
}
}
}
else
{
$val = trim($val);
// Extract any aliases that might exist. We use this information
// in the _protect_identifiers to know whether to add a table prefix
$this->_track_aliases($val);
$this->ar_from[] = $this->_protect_identifiers($val, TRUE, NULL, FALSE);
if ($this->ar_caching === TRUE)
{
$this->ar_cache_from[] = $this->_protect_identifiers($val, TRUE, NULL, FALSE);
$this->ar_cache_exists[] = 'from';
}
}
}
return $this;
}
// --------------------------------------------------------------------
/**
* Join
*
* Generates the JOIN portion of the query
*
* @param string
* @param string the join condition
* @param string the type of join
* @return object
*/
public function join($table, $cond, $type = '')
{
if ($type != '')
{
$type = strtoupper(trim($type));
if ( ! in_array($type, array('LEFT', 'RIGHT', 'OUTER', 'INNER', 'LEFT OUTER', 'RIGHT OUTER')))
{
$type = '';
}
else
{
$type .= ' ';
}
}
// Extract any aliases that might exist. We use this information
// in the _protect_identifiers to know whether to add a table prefix
$this->_track_aliases($table);
// Strip apart the condition and protect the identifiers
if (preg_match('/([\w\.]+)([\W\s]+)(.+)/', $cond, $match))
{
$match[1] = $this->_protect_identifiers($match[1]);
$match[3] = $this->_protect_identifiers($match[3]);
$cond = $match[1].$match[2].$match[3];
}
// Assemble the JOIN statement
$join = $type.'JOIN '.$this->_protect_identifiers($table, TRUE, NULL, FALSE).' ON '.$cond;
$this->ar_join[] = $join;
if ($this->ar_caching === TRUE)
{
$this->ar_cache_join[] = $join;
$this->ar_cache_exists[] = 'join';
}
return $this;
}
// --------------------------------------------------------------------
/**
* Where
*
* Generates the WHERE portion of the query. Separates
* multiple calls with AND
*
* @param mixed
* @param mixed
* @return object
*/
public function where($key, $value = NULL, $escape = TRUE)
{
return $this->_where($key, $value, 'AND ', $escape);
}
// --------------------------------------------------------------------
/**
* OR Where
*
* Generates the WHERE portion of the query. Separates
* multiple calls with OR
*
* @param mixed
* @param mixed
* @return object
*/
public function or_where($key, $value = NULL, $escape = TRUE)
{
return $this->_where($key, $value, 'OR ', $escape);
}
// --------------------------------------------------------------------
/**
* Where
*
* Called by where() or or_where()
*
* @param mixed
* @param mixed
* @param string
* @return object
*/
protected function _where($key, $value = NULL, $type = 'AND ', $escape = NULL)
{
if ( ! is_array($key))
{
$key = array($key => $value);
}
// If the escape value was not set will will base it on the global setting
if ( ! is_bool($escape))
{
$escape = $this->_protect_identifiers;
}
foreach ($key as $k => $v)
{
$prefix = (count($this->ar_where) == 0 AND count($this->ar_cache_where) == 0) ? '' : $type;
if (is_null($v) && ! $this->_has_operator($k))
{
// value appears not to have been set, assign the test to IS NULL
$k .= ' IS NULL';
}
if ( ! is_null($v))
{
if ($escape === TRUE)
{
$k = $this->_protect_identifiers($k, FALSE, $escape);
$v = ' '.$this->escape($v);
}
if ( ! $this->_has_operator($k))
{
$k .= ' = ';
}
}
else
{
$k = $this->_protect_identifiers($k, FALSE, $escape);
}
$this->ar_where[] = $prefix.$k.$v;
if ($this->ar_caching === TRUE)
{
$this->ar_cache_where[] = $prefix.$k.$v;
$this->ar_cache_exists[] = 'where';
}
}
return $this;
}
// --------------------------------------------------------------------
/**
* Where_in
*
* Generates a WHERE field IN ('item', 'item') SQL query joined with
* AND if appropriate
*
* @param string The field to search
* @param array The values searched on
* @return object
*/
public function where_in($key = NULL, $values = NULL)
{
return $this->_where_in($key, $values);
}
// --------------------------------------------------------------------
/**
* Where_in_or
*
* Generates a WHERE field IN ('item', 'item') SQL query joined with
* OR if appropriate
*
* @param string The field to search
* @param array The values searched on
* @return object
*/
public function or_where_in($key = NULL, $values = NULL)
{
return $this->_where_in($key, $values, FALSE, 'OR ');
}
// --------------------------------------------------------------------
/**
* Where_not_in
*
* Generates a WHERE field NOT IN ('item', 'item') SQL query joined
* with AND if appropriate
*
* @param string The field to search
* @param array The values searched on
* @return object
*/
public function where_not_in($key = NULL, $values = NULL)
{
return $this->_where_in($key, $values, TRUE);
}
// --------------------------------------------------------------------
/**
* Where_not_in_or
*
* Generates a WHERE field NOT IN ('item', 'item') SQL query joined
* with OR if appropriate
*
* @param string The field to search
* @param array The values searched on
* @return object
*/
public function or_where_not_in($key = NULL, $values = NULL)
{
return $this->_where_in($key, $values, TRUE, 'OR ');
}
// --------------------------------------------------------------------
/**
* Where_in
*
* Called by where_in, where_in_or, where_not_in, where_not_in_or
*
* @param string The field to search
* @param array The values searched on
* @param boolean If the statement would be IN or NOT IN
* @param string
* @return object
*/
protected function _where_in($key = NULL, $values = NULL, $not = FALSE, $type = 'AND ')
{
if ($key === NULL OR $values === NULL)
{
return;
}
if ( ! is_array($values))
{
$values = array($values);
}
$not = ($not) ? ' NOT' : '';
foreach ($values as $value)
{
$this->ar_wherein[] = $this->escape($value);
}
$prefix = (count($this->ar_where) == 0) ? '' : $type;
$where_in = $prefix . $this->_protect_identifiers($key) . $not . " IN (" . implode(", ", $this->ar_wherein) . ") ";
$this->ar_where[] = $where_in;
if ($this->ar_caching === TRUE)
{
$this->ar_cache_where[] = $where_in;
$this->ar_cache_exists[] = 'where';
}
// reset the array for multiple calls
$this->ar_wherein = array();
return $this;
}
// --------------------------------------------------------------------
/**
* Like
*
* Generates a %LIKE% portion of the query. Separates
* multiple calls with AND
*
* @param mixed
* @param mixed
* @return object
*/
public function like($field, $match = '', $side = 'both')
{
return $this->_like($field, $match, 'AND ', $side);
}
// --------------------------------------------------------------------
/**
* Not Like
*
* Generates a NOT LIKE portion of the query. Separates
* multiple calls with AND
*
* @param mixed
* @param mixed
* @return object
*/
public function not_like($field, $match = '', $side = 'both')
{
return $this->_like($field, $match, 'AND ', $side, 'NOT');
}
// --------------------------------------------------------------------
/**
* OR Like
*
* Generates a %LIKE% portion of the query. Separates
* multiple calls with OR
*
* @param mixed
* @param mixed
* @return object
*/
public function or_like($field, $match = '', $side = 'both')
{
return $this->_like($field, $match, 'OR ', $side);
}
// --------------------------------------------------------------------
/**
* OR Not Like
*
* Generates a NOT LIKE portion of the query. Separates
* multiple calls with OR
*
* @param mixed
* @param mixed
* @return object
*/
public function or_not_like($field, $match = '', $side = 'both')
{
return $this->_like($field, $match, 'OR ', $side, 'NOT');
}
// --------------------------------------------------------------------
/**
* Like
*
* Called by like() or orlike()
*
* @param mixed
* @param mixed
* @param string
* @return object
*/
protected function _like($field, $match = '', $type = 'AND ', $side = 'both', $not = '')
{
if ( ! is_array($field))
{
$field = array($field => $match);
}
foreach ($field as $k => $v)
{
$k = $this->_protect_identifiers($k);
$prefix = (count($this->ar_like) == 0) ? '' : $type;
$v = $this->escape_like_str($v);
if ($side == 'none')
{
$like_statement = $prefix." $k $not LIKE '{$v}'";
}
elseif ($side == 'before')
{
$like_statement = $prefix." $k $not LIKE '%{$v}'";
}
elseif ($side == 'after')
{
$like_statement = $prefix." $k $not LIKE '{$v}%'";
}
else
{
$like_statement = $prefix." $k $not LIKE '%{$v}%'";
}
// some platforms require an escape sequence definition for LIKE wildcards
if ($this->_like_escape_str != '')
{
$like_statement = $like_statement.sprintf($this->_like_escape_str, $this->_like_escape_chr);
}
$this->ar_like[] = $like_statement;
if ($this->ar_caching === TRUE)
{
$this->ar_cache_like[] = $like_statement;
$this->ar_cache_exists[] = 'like';
}
}
return $this;
}
// --------------------------------------------------------------------
/**
* GROUP BY
*
* @param string
* @return object
*/
public function group_by($by)
{
if (is_string($by))
{
$by = explode(',', $by);
}
foreach ($by as $val)
{
$val = trim($val);
if ($val != '')
{
$this->ar_groupby[] = $this->_protect_identifiers($val);
if ($this->ar_caching === TRUE)
{
$this->ar_cache_groupby[] = $this->_protect_identifiers($val);
$this->ar_cache_exists[] = 'groupby';
}
}
}
return $this;
}
// --------------------------------------------------------------------
/**
* Sets the HAVING value
*
* Separates multiple calls with AND
*
* @param string
* @param string
* @return object
*/
public function having($key, $value = '', $escape = TRUE)
{
return $this->_having($key, $value, 'AND ', $escape);
}
// --------------------------------------------------------------------
/**
* Sets the OR HAVING value
*
* Separates multiple calls with OR
*
* @param string
* @param string
* @return object
*/
public function or_having($key, $value = '', $escape = TRUE)
{
return $this->_having($key, $value, 'OR ', $escape);
}
// --------------------------------------------------------------------
/**
* Sets the HAVING values
*
* Called by having() or or_having()
*
* @param string
* @param string
* @return object
*/
protected function _having($key, $value = '', $type = 'AND ', $escape = TRUE)
{
if ( ! is_array($key))
{
$key = array($key => $value);
}
foreach ($key as $k => $v)
{
$prefix = (count($this->ar_having) == 0) ? '' : $type;
if ($escape === TRUE)
{
$k = $this->_protect_identifiers($k);
}
if ( ! $this->_has_operator($k))
{
$k .= ' = ';
}
if ($v != '')
{
$v = ' '.$this->escape($v);
}
$this->ar_having[] = $prefix.$k.$v;
if ($this->ar_caching === TRUE)
{
$this->ar_cache_having[] = $prefix.$k.$v;
$this->ar_cache_exists[] = 'having';
}
}
return $this;
}
// --------------------------------------------------------------------
/**
* Sets the ORDER BY value
*
* @param string
* @param string direction: asc or desc
* @return object
*/
public function order_by($orderby, $direction = '')
{
if (strtolower($direction) == 'random')
{
$orderby = ''; // Random results want or don't need a field name
$direction = $this->_random_keyword;
}
elseif (trim($direction) != '')
{
$direction = (in_array(strtoupper(trim($direction)), array('ASC', 'DESC'), TRUE)) ? ' '.$direction : ' ASC';
}
if (strpos($orderby, ',') !== FALSE)
{
$temp = array();
foreach (explode(',', $orderby) as $part)
{
$part = trim($part);
if ( ! in_array($part, $this->ar_aliased_tables))
{
$part = $this->_protect_identifiers(trim($part));
}
$temp[] = $part;
}
$orderby = implode(', ', $temp);
}
else if ($direction != $this->_random_keyword)
{
$orderby = $this->_protect_identifiers($orderby);
}
$orderby_statement = $orderby.$direction;
$this->ar_orderby[] = $orderby_statement;
if ($this->ar_caching === TRUE)
{
$this->ar_cache_orderby[] = $orderby_statement;
$this->ar_cache_exists[] = 'orderby';
}
return $this;
}
// --------------------------------------------------------------------
/**
* Sets the LIMIT value
*
* @param integer the limit value
* @param integer the offset value
* @return object
*/
public function limit($value, $offset = '')
{
$this->ar_limit = (int) $value;
if ($offset != '')
{
$this->ar_offset = (int) $offset;
}
return $this;
}
// --------------------------------------------------------------------
/**
* Sets the OFFSET value
*
* @param integer the offset value
* @return object
*/
public function offset($offset)
{
$this->ar_offset = $offset;
return $this;
}
// --------------------------------------------------------------------
/**
* The "set" function. Allows key/value pairs to be set for inserting or updating
*
* @param mixed
* @param string
* @param boolean
* @return object
*/
public function set($key, $value = '', $escape = TRUE)
{
$key = $this->_object_to_array($key);
if ( ! is_array($key))
{
$key = array($key => $value);
}
foreach ($key as $k => $v)
{
if ($escape === FALSE)
{
$this->ar_set[$this->_protect_identifiers($k)] = $v;
}
else
{
$this->ar_set[$this->_protect_identifiers($k, FALSE, TRUE)] = $this->escape($v);
}
}
return $this;
}
// --------------------------------------------------------------------
/**
* Get
*
* Compiles the select statement based on the other functions called
* and runs the query
*
* @param string the table
* @param string the limit clause
* @param string the offset clause
* @return object
*/
public function get($table = '', $limit = null, $offset = null)
{
if ($table != '')
{
$this->_track_aliases($table);
$this->from($table);
}
if ( ! is_null($limit))
{
$this->limit($limit, $offset);
}
$sql = $this->_compile_select();
$result = $this->query($sql);
$this->_reset_select();
return $result;
}
/**
* "Count All Results" query
*
* Generates a platform-specific query string that counts all records
* returned by an Active Record query.
*
* @param string
* @return string
*/
public function count_all_results($table = '')
{
if ($table != '')
{
$this->_track_aliases($table);
$this->from($table);
}
$sql = $this->_compile_select($this->_count_string . $this->_protect_identifiers('numrows'));
$query = $this->query($sql);
$this->_reset_select();
if ($query->num_rows() == 0)
{
return 0;
}
$row = $query->row();
return (int) $row->numrows;
}
// --------------------------------------------------------------------
/**
* Get_Where
*
* Allows the where clause, limit and offset to be added directly
*
* @param string the where clause
* @param string the limit clause
* @param string the offset clause
* @return object
*/
public function get_where($table = '', $where = null, $limit = null, $offset = null)
{
if ($table != '')
{
$this->from($table);
}
if ( ! is_null($where))
{
$this->where($where);
}
if ( ! is_null($limit))
{
$this->limit($limit, $offset);
}
$sql = $this->_compile_select();
$result = $this->query($sql);
$this->_reset_select();
return $result;
}
// --------------------------------------------------------------------
/**
* Insert_Batch
*
* Compiles batch insert strings and runs the queries
*
* @param string the table to retrieve the results from
* @param array an associative array of insert values
* @return object
*/
public function insert_batch($table = '', $set = NULL)
{
if ( ! is_null($set))
{
$this->set_insert_batch($set);
}
if (count($this->ar_set) == 0)
{
if ($this->db_debug)
{
//No valid data array. Folds in cases where keys and values did not match up
return $this->display_error('db_must_use_set');
}
return FALSE;
}
if ($table == '')
{
if ( ! isset($this->ar_from[0]))
{
if ($this->db_debug)
{
return $this->display_error('db_must_set_table');
}
return FALSE;
}
$table = $this->ar_from[0];
}
// Batch this baby
for ($i = 0, $total = count($this->ar_set); $i < $total; $i = $i + 100)
{
$sql = $this->_insert_batch($this->_protect_identifiers($table, TRUE, NULL, FALSE), $this->ar_keys, array_slice($this->ar_set, $i, 100));
//echo $sql;
$this->query($sql);
}
$this->_reset_write();
return TRUE;
}
// --------------------------------------------------------------------
/**
* The "set_insert_batch" function. Allows key/value pairs to be set for batch inserts
*
* @param mixed
* @param string
* @param boolean
* @return object
*/
public function set_insert_batch($key, $value = '', $escape = TRUE)
{
$key = $this->_object_to_array_batch($key);
if ( ! is_array($key))
{
$key = array($key => $value);
}
$keys = array_keys(current($key));
sort($keys);
foreach ($key as $row)
{
if (count(array_diff($keys, array_keys($row))) > 0 OR count(array_diff(array_keys($row), $keys)) > 0)
{
// batch function above returns an error on an empty array
$this->ar_set[] = array();
return;
}
ksort($row); // puts $row in the same order as our keys
if ($escape === FALSE)
{
$this->ar_set[] = '('.implode(',', $row).')';
}
else
{
$clean = array();
foreach ($row as $value)
{
$clean[] = $this->escape($value);
}
$this->ar_set[] = '('.implode(',', $clean).')';
}
}
foreach ($keys as $k)
{
$this->ar_keys[] = $this->_protect_identifiers($k);
}
return $this;
}
// --------------------------------------------------------------------
/**
* Insert
*
* Compiles an insert string and runs the query
*
* @param string the table to insert data into
* @param array an associative array of insert values
* @return object
*/
function insert($table = '', $set = NULL)
{
if ( ! is_null($set))
{
$this->set($set);
}
if (count($this->ar_set) == 0)
{
if ($this->db_debug)
{
return $this->display_error('db_must_use_set');
}
return FALSE;
}
if ($table == '')
{
if ( ! isset($this->ar_from[0]))
{
if ($this->db_debug)
{
return $this->display_error('db_must_set_table');
}
return FALSE;
}
$table = $this->ar_from[0];
}
$sql = $this->_insert($this->_protect_identifiers($table, TRUE, NULL, FALSE), array_keys($this->ar_set), array_values($this->ar_set));
$this->_reset_write();
return $this->query($sql);
}
// --------------------------------------------------------------------
/**
* Replace
*
* Compiles an replace into string and runs the query
*
* @param string the table to replace data into
* @param array an associative array of insert values
* @return object
*/
public function replace($table = '', $set = NULL)
{
if ( ! is_null($set))
{
$this->set($set);
}
if (count($this->ar_set) == 0)
{
if ($this->db_debug)
{
return $this->display_error('db_must_use_set');
}
return FALSE;
}
if ($table == '')
{
if ( ! isset($this->ar_from[0]))
{
if ($this->db_debug)
{
return $this->display_error('db_must_set_table');
}
return FALSE;
}
$table = $this->ar_from[0];
}
$sql = $this->_replace($this->_protect_identifiers($table, TRUE, NULL, FALSE), array_keys($this->ar_set), array_values($this->ar_set));
$this->_reset_write();
return $this->query($sql);
}
// --------------------------------------------------------------------
/**
* Update
*
* Compiles an update string and runs the query
*
* @param string the table to retrieve the results from
* @param array an associative array of update values
* @param mixed the where clause
* @return object
*/
public function update($table = '', $set = NULL, $where = NULL, $limit = NULL)
{
// Combine any cached components with the current statements
$this->_merge_cache();
if ( ! is_null($set))
{
$this->set($set);
}
if (count($this->ar_set) == 0)
{
if ($this->db_debug)
{
return $this->display_error('db_must_use_set');
}
return FALSE;
}
if ($table == '')
{
if ( ! isset($this->ar_from[0]))
{
if ($this->db_debug)
{
return $this->display_error('db_must_set_table');
}
return FALSE;
}
$table = $this->ar_from[0];
}
if ($where != NULL)
{
$this->where($where);
}
if ($limit != NULL)
{
$this->limit($limit);
}
$sql = $this->_update($this->_protect_identifiers($table, TRUE, NULL, FALSE), $this->ar_set, $this->ar_where, $this->ar_orderby, $this->ar_limit);
$this->_reset_write();
return $this->query($sql);
}
// --------------------------------------------------------------------
/**
* Update_Batch
*
* Compiles an update string and runs the query
*
* @param string the table to retrieve the results from
* @param array an associative array of update values
* @param string the where key
* @return object
*/
public function update_batch($table = '', $set = NULL, $index = NULL)
{
// Combine any cached components with the current statements
$this->_merge_cache();
if (is_null($index))
{
if ($this->db_debug)
{
return $this->display_error('db_must_use_index');
}
return FALSE;
}
if ( ! is_null($set))
{
$this->set_update_batch($set, $index);
}
if (count($this->ar_set) == 0)
{
if ($this->db_debug)
{
return $this->display_error('db_must_use_set');
}
return FALSE;
}
if ($table == '')
{
if ( ! isset($this->ar_from[0]))
{
if ($this->db_debug)
{
return $this->display_error('db_must_set_table');
}
return FALSE;
}
$table = $this->ar_from[0];
}
// Batch this baby
for ($i = 0, $total = count($this->ar_set); $i < $total; $i = $i + 100)
{
$sql = $this->_update_batch($this->_protect_identifiers($table, TRUE, NULL, FALSE), array_slice($this->ar_set, $i, 100), $this->_protect_identifiers($index), $this->ar_where);
$this->query($sql);
}
$this->_reset_write();
}
// --------------------------------------------------------------------
/**
* The "set_update_batch" function. Allows key/value pairs to be set for batch updating
*
* @param array
* @param string
* @param boolean
* @return object
*/
public function set_update_batch($key, $index = '', $escape = TRUE)
{
$key = $this->_object_to_array_batch($key);
if ( ! is_array($key))
{
// @todo error
}
foreach ($key as $k => $v)
{
$index_set = FALSE;
$clean = array();
foreach ($v as $k2 => $v2)
{
if ($k2 == $index)
{
$index_set = TRUE;
}
else
{
$not[] = $k2.'-'.$v2;
}
if ($escape === FALSE)
{
$clean[$this->_protect_identifiers($k2)] = $v2;
}
else
{
$clean[$this->_protect_identifiers($k2)] = $this->escape($v2);
}
}
if ($index_set == FALSE)
{
return $this->display_error('db_batch_missing_index');
}
$this->ar_set[] = $clean;
}
return $this;
}
// --------------------------------------------------------------------
/**
* Empty Table
*
* Compiles a delete string and runs "DELETE FROM table"
*
* @param string the table to empty
* @return object
*/
public function empty_table($table = '')
{
if ($table == '')
{
if ( ! isset($this->ar_from[0]))
{
if ($this->db_debug)
{
return $this->display_error('db_must_set_table');
}
return FALSE;
}
$table = $this->ar_from[0];
}
else
{
$table = $this->_protect_identifiers($table, TRUE, NULL, FALSE);
}
$sql = $this->_delete($table);
$this->_reset_write();
return $this->query($sql);
}
// --------------------------------------------------------------------
/**
* Truncate
*
* Compiles a truncate string and runs the query
* If the database does not support the truncate() command
* This function maps to "DELETE FROM table"
*
* @param string the table to truncate
* @return object
*/
public function truncate($table = '')
{
if ($table == '')
{
if ( ! isset($this->ar_from[0]))
{
if ($this->db_debug)
{
return $this->display_error('db_must_set_table');
}
return FALSE;
}
$table = $this->ar_from[0];
}
else
{
$table = $this->_protect_identifiers($table, TRUE, NULL, FALSE);
}
$sql = $this->_truncate($table);
$this->_reset_write();
return $this->query($sql);
}
// --------------------------------------------------------------------
/**
* Delete
*
* Compiles a delete string and runs the query
*
* @param mixed the table(s) to delete from. String or array
* @param mixed the where clause
* @param mixed the limit clause
* @param boolean
* @return object
*/
public function delete($table = '', $where = '', $limit = NULL, $reset_data = TRUE)
{
// Combine any cached components with the current statements
$this->_merge_cache();
if ($table == '')
{
if ( ! isset($this->ar_from[0]))
{
if ($this->db_debug)
{
return $this->display_error('db_must_set_table');
}
return FALSE;
}
$table = $this->ar_from[0];
}
elseif (is_array($table))
{
foreach ($table as $single_table)
{
$this->delete($single_table, $where, $limit, FALSE);
}
$this->_reset_write();
return;
}
else
{
$table = $this->_protect_identifiers($table, TRUE, NULL, FALSE);
}
if ($where != '')
{
$this->where($where);
}
if ($limit != NULL)
{
$this->limit($limit);
}
if (count($this->ar_where) == 0 && count($this->ar_wherein) == 0 && count($this->ar_like) == 0)
{
if ($this->db_debug)
{
return $this->display_error('db_del_must_use_where');
}
return FALSE;
}
$sql = $this->_delete($table, $this->ar_where, $this->ar_like, $this->ar_limit);
if ($reset_data)
{
$this->_reset_write();
}
return $this->query($sql);
}
// --------------------------------------------------------------------
/**
* DB Prefix
*
* Prepends a database prefix if one exists in configuration
*
* @param string the table
* @return string
*/
public function dbprefix($table = '')
{
if ($table == '')
{
$this->display_error('db_table_name_required');
}
return $this->dbprefix.$table;
}
// --------------------------------------------------------------------
/**
* Set DB Prefix
*
* Set's the DB Prefix to something new without needing to reconnect
*
* @param string the prefix
* @return string
*/
public function set_dbprefix($prefix = '')
{
return $this->dbprefix = $prefix;
}
// --------------------------------------------------------------------
/**
* Track Aliases
*
* Used to track SQL statements written with aliased tables.
*
* @param string The table to inspect
* @return string
*/
protected function _track_aliases($table)
{
if (is_array($table))
{
foreach ($table as $t)
{
$this->_track_aliases($t);
}
return;
}
// Does the string contain a comma? If so, we need to separate
// the string into discreet statements
if (strpos($table, ',') !== FALSE)
{
return $this->_track_aliases(explode(',', $table));
}
// if a table alias is used we can recognize it by a space
if (strpos($table, " ") !== FALSE)
{
// if the alias is written with the AS keyword, remove it
$table = preg_replace('/\s+AS\s+/i', ' ', $table);
// Grab the alias
$table = trim(strrchr($table, " "));
// Store the alias, if it doesn't already exist
if ( ! in_array($table, $this->ar_aliased_tables))
{
$this->ar_aliased_tables[] = $table;
}
}
}
// --------------------------------------------------------------------
/**
* Compile the SELECT statement
*
* Generates a query string based on which functions were used.
* Should not be called directly. The get() function calls it.
*
* @return string
*/
protected function _compile_select($select_override = FALSE)
{
// Combine any cached components with the current statements
$this->_merge_cache();
// ----------------------------------------------------------------
// Write the "select" portion of the query
if ($select_override !== FALSE)
{
$sql = $select_override;
}
else
{
$sql = ( ! $this->ar_distinct) ? 'SELECT ' : 'SELECT DISTINCT ';
if (count($this->ar_select) == 0)
{
$sql .= '*';
}
else
{
// Cycle through the "select" portion of the query and prep each column name.
// The reason we protect identifiers here rather then in the select() function
// is because until the user calls the from() function we don't know if there are aliases
foreach ($this->ar_select as $key => $val)
{
$no_escape = isset($this->ar_no_escape[$key]) ? $this->ar_no_escape[$key] : NULL;
$this->ar_select[$key] = $this->_protect_identifiers($val, FALSE, $no_escape);
}
$sql .= implode(', ', $this->ar_select);
}
}
// ----------------------------------------------------------------
// Write the "FROM" portion of the query
if (count($this->ar_from) > 0)
{
$sql .= "\nFROM ";
$sql .= $this->_from_tables($this->ar_from);
}
// ----------------------------------------------------------------
// Write the "JOIN" portion of the query
if (count($this->ar_join) > 0)
{
$sql .= "\n";
$sql .= implode("\n", $this->ar_join);
}
// ----------------------------------------------------------------
// Write the "WHERE" portion of the query
if (count($this->ar_where) > 0 OR count($this->ar_like) > 0)
{
$sql .= "\nWHERE ";
}
$sql .= implode("\n", $this->ar_where);
// ----------------------------------------------------------------
// Write the "LIKE" portion of the query
if (count($this->ar_like) > 0)
{
if (count($this->ar_where) > 0)
{
$sql .= "\nAND ";
}
$sql .= implode("\n", $this->ar_like);
}
// ----------------------------------------------------------------
// Write the "GROUP BY" portion of the query
if (count($this->ar_groupby) > 0)
{
$sql .= "\nGROUP BY ";
$sql .= implode(', ', $this->ar_groupby);
}
// ----------------------------------------------------------------
// Write the "HAVING" portion of the query
if (count($this->ar_having) > 0)
{
$sql .= "\nHAVING ";
$sql .= implode("\n", $this->ar_having);
}
// ----------------------------------------------------------------
// Write the "ORDER BY" portion of the query
if (count($this->ar_orderby) > 0)
{
$sql .= "\nORDER BY ";
$sql .= implode(', ', $this->ar_orderby);
if ($this->ar_order !== FALSE)
{
$sql .= ($this->ar_order == 'desc') ? ' DESC' : ' ASC';
}
}
// ----------------------------------------------------------------
// Write the "LIMIT" portion of the query
if (is_numeric($this->ar_limit))
{
$sql .= "\n";
$sql = $this->_limit($sql, $this->ar_limit, $this->ar_offset);
}
return $sql;
}
// --------------------------------------------------------------------
/**
* Object to Array
*
* Takes an object as input and converts the class variables to array key/vals
*
* @param object
* @return array
*/
public function _object_to_array($object)
{
if ( ! is_object($object))
{
return $object;
}
$array = array();
foreach (get_object_vars($object) as $key => $val)
{
// There are some built in keys we need to ignore for this conversion
if ( ! is_object($val) && ! is_array($val) && $key != '_parent_name')
{
$array[$key] = $val;
}
}
return $array;
}
// --------------------------------------------------------------------
/**
* Object to Array
*
* Takes an object as input and converts the class variables to array key/vals
*
* @param object
* @return array
*/
public function _object_to_array_batch($object)
{
if ( ! is_object($object))
{
return $object;
}
$array = array();
$out = get_object_vars($object);
$fields = array_keys($out);
foreach ($fields as $val)
{
// There are some built in keys we need to ignore for this conversion
if ($val != '_parent_name')
{
$i = 0;
foreach ($out[$val] as $data)
{
$array[$i][$val] = $data;
$i++;
}
}
}
return $array;
}
// --------------------------------------------------------------------
/**
* Start Cache
*
* Starts AR caching
*
* @return void
*/
public function start_cache()
{
$this->ar_caching = TRUE;
}
// --------------------------------------------------------------------
/**
* Stop Cache
*
* Stops AR caching
*
* @return void
*/
public function stop_cache()
{
$this->ar_caching = FALSE;
}
// --------------------------------------------------------------------
/**
* Flush Cache
*
* Empties the AR cache
*
* @access public
* @return void
*/
public function flush_cache()
{
$this->_reset_run(array(
'ar_cache_select' => array(),
'ar_cache_from' => array(),
'ar_cache_join' => array(),
'ar_cache_where' => array(),
'ar_cache_like' => array(),
'ar_cache_groupby' => array(),
'ar_cache_having' => array(),
'ar_cache_orderby' => array(),
'ar_cache_set' => array(),
'ar_cache_exists' => array(),
'ar_cache_no_escape' => array()
));
}
// --------------------------------------------------------------------
/**
* Merge Cache
*
* When called, this function merges any cached AR arrays with
* locally called ones.
*
* @return void
*/
protected function _merge_cache()
{
if (count($this->ar_cache_exists) == 0)
{
return;
}
foreach ($this->ar_cache_exists as $val)
{
$ar_variable = 'ar_'.$val;
$ar_cache_var = 'ar_cache_'.$val;
if (count($this->$ar_cache_var) == 0)
{
continue;
}
$this->$ar_variable = array_unique(array_merge($this->$ar_cache_var, $this->$ar_variable));
}
// If we are "protecting identifiers" we need to examine the "from"
// portion of the query to determine if there are any aliases
if ($this->_protect_identifiers === TRUE AND count($this->ar_cache_from) > 0)
{
$this->_track_aliases($this->ar_from);
}
$this->ar_no_escape = $this->ar_cache_no_escape;
}
// --------------------------------------------------------------------
/**
* Resets the active record values. Called by the get() function
*
* @param array An array of fields to reset
* @return void
*/
protected function _reset_run($ar_reset_items)
{
foreach ($ar_reset_items as $item => $default_value)
{
if ( ! in_array($item, $this->ar_store_array))
{
$this->$item = $default_value;
}
}
}
// --------------------------------------------------------------------
/**
* Resets the active record values. Called by the get() function
*
* @return void
*/
protected function _reset_select()
{
$ar_reset_items = array(
'ar_select' => array(),
'ar_from' => array(),
'ar_join' => array(),
'ar_where' => array(),
'ar_like' => array(),
'ar_groupby' => array(),
'ar_having' => array(),
'ar_orderby' => array(),
'ar_wherein' => array(),
'ar_aliased_tables' => array(),
'ar_no_escape' => array(),
'ar_distinct' => FALSE,
'ar_limit' => FALSE,
'ar_offset' => FALSE,
'ar_order' => FALSE,
);
$this->_reset_run($ar_reset_items);
}
// --------------------------------------------------------------------
/**
* Resets the active record "write" values.
*
* Called by the insert() update() insert_batch() update_batch() and delete() functions
*
* @return void
*/
protected function _reset_write()
{
$ar_reset_items = array(
'ar_set' => array(),
'ar_from' => array(),
'ar_where' => array(),
'ar_like' => array(),
'ar_orderby' => array(),
'ar_keys' => array(),
'ar_limit' => FALSE,
'ar_order' => FALSE
);
$this->_reset_run($ar_reset_items);
}
}
/* End of file DB_active_rec.php */
/* Location: ./system/database/DB_active_rec.php */
|
apache-2.0
|
tectronics/creditsuite
|
1.6/src/org/drip/analytics/creator/CreditCurveBuilder.java
|
8846
|
package org.drip.analytics.creator;
/*
* -*- mode: java; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
*/
/*!
* Copyright (C) 2012 Lakshmi Krishnamurthy
*
* This file is part of CreditAnalytics, a free-software/open-source library for fixed income analysts and
* developers - http://www.credit-trader.org
*
* CreditAnalytics is a free, full featured, fixed income credit analytics library, developed with a special
* focus towards the needs of the bonds and credit products community.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
*
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* This class contains the baseline credit curve builder object. It contains static functions that build
* bootstrapped and other types of credit curve from differing types of inputs.
*
* @author Lakshmi Krishnamurthy
*/
public class CreditCurveBuilder {
/**
* Creates a CreditCurve instance from a single node hazard rate
*
* @param dblStartDate Curve epoch date
* @param strName Credit Curve Name
* @param dblHazardRate Curve hazard rate
* @param dblRecovery Curve recovery
*
* @return CreditCurve instance
*/
public static final org.drip.analytics.definition.CreditCurve FromFlatHazard (
final double dblStartDate,
final java.lang.String strName,
final double dblHazardRate,
final double dblRecovery)
{
if (java.lang.Double.isNaN (dblStartDate) || java.lang.Double.isNaN (dblHazardRate) ||
java.lang.Double.isNaN (dblRecovery))
return null;
double[] adblHazard = new double[1];
double[] adblRecovery = new double[1];
double[] adblHazardDate = new double[1];
double[] adblRecoveryDate = new double[1];
adblHazard[0] = dblHazardRate;
adblRecovery[0] = dblRecovery;
adblHazardDate[0] = dblStartDate;
adblRecoveryDate[0] = dblStartDate;
try {
return new org.drip.analytics.curve.CalibratedCreditCurve (dblStartDate, strName, adblHazard,
adblHazardDate, adblRecovery, adblRecoveryDate, java.lang.Double.NaN);
} catch (java.lang.Exception e) {
e.printStackTrace();
}
return null;
}
/**
* Creates a CreditCurve instance from the input array of survival probabilities
*
* @param dblStartDate Start Date
* @param strName Credit Curve Name
* @param adblSurvivalDate Array of dates
* @param adblSurvivalProbability Array of survival probabilities
* @param dblRecovery Recovery
*
* @return CreditCurve instance
*/
public static final org.drip.analytics.definition.CreditCurve FromSurvival (
final double dblStartDate,
final java.lang.String strName,
final double[] adblSurvivalDate,
final double[] adblSurvivalProbability,
final double dblRecovery)
{
if (java.lang.Double.isNaN (dblStartDate) || java.lang.Double.isNaN (dblRecovery)) return null;
try {
double dblSurvivalBegin = 1.;
double dblPeriodBegin = dblStartDate;
double[] adblHazard = new double[adblSurvivalProbability.length];
double[] adblRecovery = new double[1];
double[] adblRecoveryDate = new double[1];
adblRecovery[0] = dblRecovery;
adblRecoveryDate[0] = dblStartDate;
for (int i = 0; i < adblSurvivalProbability.length; ++i) {
if (java.lang.Double.isNaN (adblSurvivalDate[i]) || java.lang.Double.isNaN
(adblSurvivalProbability[i]) || adblSurvivalDate[i] <= dblPeriodBegin || dblSurvivalBegin
<= adblSurvivalProbability[i])
return null;
adblHazard[i] = 365.25 / (adblSurvivalDate[i] - dblPeriodBegin) * java.lang.Math.log
(dblSurvivalBegin / adblSurvivalProbability[i]);
dblPeriodBegin = adblSurvivalDate[i];
dblSurvivalBegin = adblSurvivalProbability[i];
}
return new org.drip.analytics.curve.CalibratedCreditCurve (dblStartDate, strName, adblHazard,
adblSurvivalDate, adblRecovery, adblRecoveryDate, java.lang.Double.NaN);
} catch (java.lang.Exception e) {
e.printStackTrace();
}
return null;
}
/**
* Creates an instance of the CreditCurve object from a solitary hazard rate node
*
* @param dblStartDate The Curve epoch date
* @param strName Credit Curve Name
* @param dblHazardRate The solo hazard rate
* @param dblHazardDate Date
* @param dblRecovery Recovery
*
* @return CreditCurve instance
*/
public static final org.drip.analytics.definition.CreditCurve FromHazardNode (
final double dblStartDate,
final java.lang.String strName,
final double dblHazardRate,
final double dblHazardDate,
final double dblRecovery)
{
if (java.lang.Double.isNaN (dblStartDate) || java.lang.Double.isNaN (dblHazardRate) ||
java.lang.Double.isNaN (dblHazardDate) || java.lang.Double.isNaN (dblRecovery)) {
System.out.println ("Invalid inputs into CreditCurve.FromHazardNode");
return null;
}
double[] adblHazard = new double[1];
double[] adblRecovery = new double[1];
double[] adblHazardDate = new double[1];
double[] adblRecoveryDate = new double[1];
adblHazard[0] = dblHazardRate;
adblRecovery[0] = dblRecovery;
adblHazardDate[0] = dblHazardDate;
adblRecoveryDate[0] = dblStartDate;
try {
return new org.drip.analytics.curve.CalibratedCreditCurve (dblStartDate, strName, adblHazard,
adblHazardDate, adblRecovery, adblRecoveryDate, java.lang.Double.NaN);
} catch (java.lang.Exception e) {
e.printStackTrace();
}
return null;
}
/**
* Creates a credit curve from an array of dates and hazard rates
*
* @param dtStart Curve epoch date
* @param strName Credit Curve Name
* @param adblDate Array of dates
* @param adblHazardRate Array of hazard rates
* @param dblRecovery Recovery
*
* @return CreditCurve instance
*/
public static final org.drip.analytics.definition.CreditCurve CreateCreditCurve (
final org.drip.analytics.date.JulianDate dtStart,
final java.lang.String strName,
final double[] adblDate,
final double[] adblHazardRate,
final double dblRecovery)
{
if (null == dtStart || null == adblHazardRate || null == adblDate || adblHazardRate.length !=
adblDate.length || java.lang.Double.isNaN (dblRecovery)) {
System.out.println ("Invalid Credit curve ctr params!");
return null;
}
try {
double[] adblRecovery = new double[1];
double[] adblRecoveryDate = new double[1];
adblRecovery[0] = dblRecovery;
adblRecoveryDate[0] = dtStart.getJulian();
return new org.drip.analytics.curve.CalibratedCreditCurve (dtStart.getJulian(), strName,
adblHazardRate, adblDate, adblRecovery, adblRecoveryDate, java.lang.Double.NaN);
} catch (java.lang.Exception e) {
e.printStackTrace();
}
return null;
}
/**
* Creates the credit curve from the given byte array
*
* @param ab Byte Array
*
* @return The credit curve instance
*/
public static final org.drip.analytics.definition.CreditCurve FromByteArray (
final byte[] ab)
{
if (null == ab || 0 == ab.length) return null;
try {
return new org.drip.analytics.curve.CalibratedCreditCurve (ab);
} catch (java.lang.Exception e) {
e.printStackTrace();
}
return null;
}
/**
* Creates a credit curve from hazard rate and recovery rate term structures
*
* @param dblStart Curve Epoch date
* @param strName Credit Curve Name
* @param adblHazardRate Matched array of hazard rates
* @param adblHazardDate Matched array of hazard dates
* @param adblRecoveryRate Matched array of recovery rates
* @param adblRecoveryDate Matched array of recovery dates
* @param dblSpecificDefaultDate (Optional) Specific Default Date
*
* @return CreditCurve instance
*/
public static final org.drip.analytics.definition.CreditCurve CreateCreditCurve (
final double dblStart,
final java.lang.String strName,
final double adblHazardRate[],
final double adblHazardDate[],
final double[] adblRecoveryRate,
final double[] adblRecoveryDate,
final double dblSpecificDefaultDate)
{
try {
return new org.drip.analytics.curve.CalibratedCreditCurve (dblStart, strName, adblHazardRate,
adblHazardDate, adblRecoveryRate, adblRecoveryDate, dblSpecificDefaultDate);
} catch (java.lang.Exception e) {
e.printStackTrace();
}
return null;
}
}
|
apache-2.0
|
dbeaver/dbeaver
|
plugins/org.jkiss.dbeaver.model/src/org/jkiss/dbeaver/runtime/encode/ContentEncrypter.java
|
2965
|
/*
* DBeaver - Universal Database Manager
* Copyright (C) 2010-2022 DBeaver Corp and others
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jkiss.dbeaver.runtime.encode;
import org.jkiss.utils.IOUtils;
import javax.crypto.Cipher;
import javax.crypto.CipherInputStream;
import javax.crypto.CipherOutputStream;
import javax.crypto.SecretKey;
import javax.crypto.spec.IvParameterSpec;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
import java.security.InvalidAlgorithmParameterException;
import java.security.InvalidKeyException;
/**
* Content encryption/description
*/
public class ContentEncrypter {
public static final String CIPHER_NAME = "AES/CBC/PKCS5Padding";
public static final String KEY_ALGORITHM = "AES";
private SecretKey secretKey;
private Cipher cipher;
public ContentEncrypter(SecretKey secretKey) {
this.secretKey = secretKey;
try {
this.cipher = Cipher.getInstance(CIPHER_NAME);
} catch (Exception e) {
throw new IllegalStateException("Internal error during encrypted init", e);
}
}
public byte[] encrypt(String content) throws InvalidKeyException, IOException {
cipher.init(Cipher.ENCRYPT_MODE, secretKey);
byte[] iv = cipher.getIV();
ByteArrayOutputStream resultBuffer = new ByteArrayOutputStream();
try (CipherOutputStream cipherOut = new CipherOutputStream(resultBuffer, cipher)) {
resultBuffer.write(iv);
cipherOut.write(content.getBytes(StandardCharsets.UTF_8));
}
return resultBuffer.toByteArray();
}
public String decrypt(byte[] contents) throws InvalidAlgorithmParameterException, InvalidKeyException, IOException {
try (InputStream byteStream = new ByteArrayInputStream(contents)) {
byte[] fileIv = new byte[16];
byteStream.read(fileIv);
cipher.init(Cipher.DECRYPT_MODE, secretKey, new IvParameterSpec(fileIv));
try (CipherInputStream cipherIn = new CipherInputStream(byteStream, cipher)) {
ByteArrayOutputStream resultBuffer = new ByteArrayOutputStream();
IOUtils.copyStream(cipherIn, resultBuffer);
return new String(resultBuffer.toByteArray(), StandardCharsets.UTF_8);
}
}
}
}
|
apache-2.0
|
Interel-Group/core3
|
src/main/scala/core3/workflows/definitions/SystemUpdateLocalUserMetadata.scala
|
3535
|
/**
* Copyright 2017 Interel
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package core3.workflows.definitions
import core3.database.containers.core
import core3.database.{ObjectID, RevisionID, RevisionSequenceNumber}
import core3.security.UserTokenBase
import core3.workflows._
import play.api.libs.json.{JsValue, Json}
import scala.concurrent.{ExecutionContext, Future}
object SystemUpdateLocalUserMetadata extends WorkflowBase {
case class SystemUpdateLocalUserMetadataParameters(
userUUID: ObjectID,
revision: RevisionID,
revisionNumber: RevisionSequenceNumber,
metadata: JsValue
) extends WorkflowParameters {
override def asJson: JsValue = Json.obj(
"userUUID" -> userUUID,
"revision" -> revision,
"revisionNumber" -> revisionNumber,
"metadata" -> metadata
)
}
case class SystemUpdateLocalUserMetadataInputData(user: core.LocalUser) extends InputData {
override def asJson: JsValue = Json.obj(
"user" -> user
)
}
override def name: String = "SystemUpdateLocalUserMetadata"
override def readOnly: Boolean = false
override def withSensitiveParams: Boolean = false
override def withSensitiveData: Boolean = true
override def parseParameters(rawParams: JsValue)(implicit ec: ExecutionContext): Future[WorkflowParameters] = {
Future {
SystemUpdateLocalUserMetadataParameters(
(rawParams \ "userUUID").as[ObjectID],
(rawParams \ "revision").as[RevisionID],
(rawParams \ "revisionNumber").as[RevisionSequenceNumber],
(rawParams \ "metadata").as[JsValue]
)
}
}
override def loadData(params: WorkflowParameters, queryHandlers: DataQueryHandlers)(implicit ec: ExecutionContext): Future[InputData] = {
params match {
case actualParams: SystemUpdateLocalUserMetadataParameters =>
for {
user <- queryHandlers.getContainerWithRevision(
"LocalUser",
actualParams.userUUID,
actualParams.revision,
actualParams.revisionNumber
).map(_.asInstanceOf[core.LocalUser])
} yield {
SystemUpdateLocalUserMetadataInputData(user)
}
}
}
override def executeAction(requestID: RequestID, user: UserTokenBase, params: WorkflowParameters, data: InputData)(implicit ec: ExecutionContext): Future[(WorkflowResult, OutputData)] = {
(params, data) match {
case (actualParams: SystemUpdateLocalUserMetadataParameters, actualData: SystemUpdateLocalUserMetadataInputData) =>
actualData.user.metadata = actualParams.metadata
Future.successful((WorkflowResult(wasSuccessful = true, requestID), OutputData(update = Vector(actualData.user))))
case _ =>
Future.failed(
new IllegalArgumentException(s"core3.workflows.definitions.SystemUpdateLocalUserMetadata::executeAction > " +
s"Unexpected params of type [${params.getClass.getName}] or data of type [${data.getClass.getName}] supplied.")
)
}
}
}
|
apache-2.0
|
Epi-Info/Epi-Info-Cloud-Contact-Tracing
|
Cloud Enter/Epi.Compatibility/EpiInfoPlugin/IDataSource.cs
|
325
|
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Data;
namespace EpiInfo.Plugin
{
public interface IDataSource
{
System.Data.IDataReader GetDataTableReader(string pSQL);
object GetScalar(string pSQL);
bool ExecuteSQL(string pSQL);
}
}
|
apache-2.0
|
DaffyNinja/Cute-Game
|
Cutesy Game/Assets/Scripts/CameraDetectScreen.cs
|
1082
|
using UnityEngine;
using System.Collections;
public class CameraDetectScreen : MonoBehaviour {
// Use this for initialization
void Start ()
{
}
// Update is called once per frame
void Update ()
{
//// Vector3 viewPos = Camera.main.WorldToViewportPoint(transform.position);
// Vector3 viewPos = Camera.main.WorldToScreenPoint(transform.position);
// //viewPos.x = Mathf.Clamp01(viewPos.x);
// //viewPos.y = Mathf.Clamp01(viewPos.y);
// transform.position = Camera.main.ViewportToWorldPoint(viewPos);
//Vector3 pos = Camera.main.WorldToViewportPoint(transform.position);
//if (pos.x < 0.0) Debug.Log("I am left of the camera's view.");
//if (1.0 < pos.x) Debug.Log("I am right of the camera's view.");
//if (pos.y < 0.0) Debug.Log("I am below the camera's view.");
//if (1.0 < pos.y) Debug.Log("I am above the camera's view.");
//if (GetComponent<Renderer>().IsVisibleFrom(Camera.main)) Debug.Log("Visible");
//else Debug.Log("Not visible");
}
}
|
apache-2.0
|
jamessdixon/PanzerGeneral
|
Tff.PanzerGeneral_Solution/Tff.PanzerGeneral.UI.WindowsPhone7/Models/Battle/BattleOutcomeEnum.cs
|
346
|
using System;
namespace Tff.Panzer.Models.Battle
{
public enum BattleOutcomeEnum
{
AggressorSurvives_ProtectorHolds = 0,
AggressorSurvives_ProtectorRetreats = 1,
AggressorSurvives_ProtectorDestroyed = 2,
AggressorDestroyed_ProtectorHolds = 3,
AggressorDestroyed_ProtectorDestroyed = 4
}
}
|
apache-2.0
|
daniellemayne/dasein-cloud-cloudstack_old
|
src/main/java/org/dasein/cloud/cloudstack/identity/Keypair.java
|
9920
|
/**
* Copyright (C) 2009-2014 Dell, Inc.
*
* ====================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ====================================================================
*/
package org.dasein.cloud.cloudstack.identity;
import org.dasein.cloud.CloudException;
import org.dasein.cloud.InternalException;
import org.dasein.cloud.OperationNotSupportedException;
import org.dasein.cloud.ProviderContext;
import org.dasein.cloud.Requirement;
import org.dasein.cloud.cloudstack.CSCloud;
import org.dasein.cloud.cloudstack.CSMethod;
import org.dasein.cloud.cloudstack.Param;
import org.dasein.cloud.identity.SSHKeypair;
import org.dasein.cloud.identity.ServiceAction;
import org.dasein.cloud.identity.ShellKeyCapabilities;
import org.dasein.cloud.identity.ShellKeySupport;
import org.dasein.cloud.util.APITrace;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Locale;
/**
* Implements the CSCloud 3.0 SSH keypair support
* @author George Reese
* @since 2012.02
* @version 2012.02
*/
public class Keypair implements ShellKeySupport {
private CSCloud provider;
private transient volatile KeypairCapabilities capabilities;
Keypair(@Nonnull CSCloud provider) { this.provider = provider; }
@Override
public @Nonnull SSHKeypair createKeypair(@Nonnull String name) throws InternalException, CloudException {
APITrace.begin(provider, "Keypair.createKeypair");
try {
ProviderContext ctx = provider.getContext();
if( ctx == null ) {
throw new CloudException("No context was set for this request");
}
CSMethod method = new CSMethod(provider);
Document doc = method.get(method.buildUrl(CSMethod.CREATE_KEYPAIR, new Param("name", name)), CSMethod.CREATE_KEYPAIR);
NodeList matches = doc.getElementsByTagName("keypair");
for( int i=0; i<matches.getLength(); i++ ) {
SSHKeypair key = toKeypair(ctx, matches.item(i));
if( key != null ) {
return key;
}
}
throw new CloudException("Request did not error, but no keypair was generated");
}
finally {
APITrace.end();
}
}
@Override
public void deleteKeypair(@Nonnull String providerId) throws InternalException, CloudException {
APITrace.begin(provider, "Keypair.deleteKeypair");
try {
CSMethod method = new CSMethod(provider);
method.get(method.buildUrl(CSMethod.DELETE_KEYPAIR, new Param("name", providerId)), CSMethod.DELETE_KEYPAIR);
}
finally {
APITrace.end();
}
}
@Override
public @Nullable String getFingerprint(@Nonnull String providerId) throws InternalException, CloudException {
APITrace.begin(provider, "Keypair.getFingerprint");
try {
SSHKeypair keypair = getKeypair(providerId);
return (keypair == null ? null : keypair.getFingerprint());
}
finally {
APITrace.end();
}
}
@Override
@Deprecated
public Requirement getKeyImportSupport() throws CloudException, InternalException {
return Requirement.NONE;
}
@Override
public @Nullable SSHKeypair getKeypair(@Nonnull String providerId) throws InternalException, CloudException {
APITrace.begin(provider, "Keypair.getKeypair");
try {
ProviderContext ctx = provider.getContext();
if( ctx == null ) {
throw new CloudException("No context was set for this request");
}
CSMethod method = new CSMethod(provider);
Document doc = method.get(method.buildUrl(CSMethod.LIST_KEYPAIRS, new Param("name", providerId)), CSMethod.LIST_KEYPAIRS);
NodeList matches = doc.getElementsByTagName("sshkeypair");
for( int i=0; i<matches.getLength(); i++ ) {
SSHKeypair key = toKeypair(ctx, matches.item(i));
if( key != null ) {
return key;
}
}
return null;
}
finally {
APITrace.end();
}
}
@Override
@Deprecated
public @Nonnull String getProviderTermForKeypair(@Nonnull Locale locale) {
return "SSH keypair";
}
@Nonnull @Override public ShellKeyCapabilities getCapabilities() throws CloudException, InternalException {
if( capabilities == null ) {
capabilities = new KeypairCapabilities(provider);
}
return capabilities;
}
@Override
public @Nonnull SSHKeypair importKeypair(@Nonnull String name, @Nonnull String publicKey) throws InternalException, CloudException {
throw new OperationNotSupportedException("Import of keypairs is not supported");
}
@Override
public boolean isSubscribed() throws CloudException, InternalException {
APITrace.begin(provider, "Keypair.isSubscribed");
try {
return provider.getComputeServices().getVirtualMachineSupport().isSubscribed();
}
finally {
APITrace.end();
}
}
@Override
public @Nonnull Collection<SSHKeypair> list() throws InternalException, CloudException {
APITrace.begin(provider, "Keypair.list");
try {
ProviderContext ctx = provider.getContext();
if( ctx == null ) {
throw new CloudException("No context was set for this request");
}
CSMethod method = new CSMethod(provider);
Document doc = method.get(method.buildUrl(CSMethod.LIST_KEYPAIRS), CSMethod.LIST_KEYPAIRS);
ArrayList<SSHKeypair> keys = new ArrayList<SSHKeypair>();
int numPages = 1;
NodeList nodes = doc.getElementsByTagName("count");
Node n = nodes.item(0);
if (n != null) {
String value = n.getFirstChild().getNodeValue().trim();
int count = Integer.parseInt(value);
numPages = count/500;
int remainder = count % 500;
if (remainder > 0) {
numPages++;
}
}
for (int page = 1; page <= numPages; page++) {
if (page > 1) {
String nextPage = String.valueOf(page);
doc = method.get(method.buildUrl(CSMethod.LIST_KEYPAIRS, new Param("pagesize", "500"), new Param("page", nextPage)), CSMethod.LIST_KEYPAIRS);
}
NodeList matches = doc.getElementsByTagName("sshkeypair");
for( int i=0; i<matches.getLength(); i++ ) {
SSHKeypair key = toKeypair(ctx, matches.item(i));
if( key != null ) {
keys.add(key);
}
}
}
return keys;
}
finally {
APITrace.end();
}
}
@Override
public @Nonnull String[] mapServiceAction(@Nonnull ServiceAction action) {
return new String[0];
}
private @Nullable SSHKeypair toKeypair(@Nonnull ProviderContext ctx, @Nullable Node node) throws CloudException, InternalException {
if( node == null || !node.hasChildNodes() ) {
return null;
}
String regionId = ctx.getRegionId();
if( regionId == null ) {
throw new CloudException("No region is part of this request");
}
NodeList attributes = node.getChildNodes();
SSHKeypair kp = new SSHKeypair();
String privateKey = null;
String fingerprint = null;
String name = null;
for( int i=0; i<attributes.getLength(); i++ ) {
Node attribute = attributes.item(i);
if( attribute != null ) {
String nodeName = attribute.getNodeName();
if( nodeName.equalsIgnoreCase("name") && attribute.hasChildNodes() ) {
name = attribute.getFirstChild().getNodeValue().trim();
}
else if( nodeName.equalsIgnoreCase("fingerprint") && attribute.hasChildNodes() ) {
fingerprint = attribute.getFirstChild().getNodeValue().trim();
}
else if( nodeName.equalsIgnoreCase("privatekey") && attribute.hasChildNodes() ) {
privateKey = attribute.getFirstChild().getNodeValue().trim();
}
}
}
if( name == null || fingerprint == null ) {
return null;
}
kp.setProviderRegionId(regionId);
kp.setProviderOwnerId(ctx.getAccountNumber());
kp.setProviderKeypairId(name);
kp.setName(name);
kp.setFingerprint(fingerprint);
if( privateKey != null ) {
try {
kp.setPrivateKey(privateKey.getBytes("utf-8"));
}
catch( UnsupportedEncodingException e ) {
throw new InternalException(e);
}
}
return kp;
}
}
|
apache-2.0
|
germamix/sepa-pain-lib
|
sepa-pain-lib/src/main/java/ws/michalski/sepa/pain/Currency.java
|
1807
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package ws.michalski.sepa.pain;
public enum Currency {
AED, AFN, ALL, AMD, ANG, AOA, ARS, AUD, AWG,
AZN, BAM, BBD, BDT, BGN, BHD, BIF, BMD, BND, BOB, BRL, BSD, BTN, BWP,
BYR, BZD, CAD, CDF, CHF, CLP, CNY, COP, CRC, CSD, CUP, CVE, CYP, CZK,
DJF, DKK, DOP, DZD, EEK, EGP, ETB, ERN, EUR, FJD, FKP, GBP, GEL, GHC,
GIP, GMD, GNF, GTQ, GYD, HKD, HNL, HRK, HTG, HUF, IDR, ILS, INR, IQD,
IRR, ISK, JMD, JOD, JPY, KES, KGS, KHR, KMF, KPW, KRW, KWD, KYD, KZT,
LAK, LBP, LKR, LRD, LSL, LTL, LVL, LYD, MAD, MDL, MGA, MKD, MMK, MNT,
MOP, MRO, MTL, MUR, MVR, MWK, MXN, MYR, MZM, NAD, NGN, NIO, NOK, NPR,
NZD, OMR, PAB, PEN, PGK, PHP, PKR, PLN, PYG, QAR, RON, RUB, RWF, SAR,
SBD, SCR, SDD, SEK, SGD, SHP, SIT, SKK, SLL, SOS, SRD, STD, SVC, SYP,
SZL, THB, TJS, TMM, TND, TOP, TRY, TTD, TWD, TZS, UAH, UGX, USD, UYU,
UZS, VEB, VND, VUV, WST, XAF, XCD, XDR, XOF, XPF, YER, ZAR, ZMK, ZWD
}
|
apache-2.0
|
oehme/analysing-gradle-performance
|
my-app/src/test/java/org/gradle/test/performance/mediummonolithicjavaproject/p321/Test6423.java
|
2111
|
package org.gradle.test.performance.mediummonolithicjavaproject.p321;
import org.junit.Test;
import static org.junit.Assert.*;
public class Test6423 {
Production6423 objectUnderTest = new Production6423();
@Test
public void testProperty0() {
String value = "value";
objectUnderTest.setProperty0(value);
assertEquals(value, objectUnderTest.getProperty0());
}
@Test
public void testProperty1() {
String value = "value";
objectUnderTest.setProperty1(value);
assertEquals(value, objectUnderTest.getProperty1());
}
@Test
public void testProperty2() {
String value = "value";
objectUnderTest.setProperty2(value);
assertEquals(value, objectUnderTest.getProperty2());
}
@Test
public void testProperty3() {
String value = "value";
objectUnderTest.setProperty3(value);
assertEquals(value, objectUnderTest.getProperty3());
}
@Test
public void testProperty4() {
String value = "value";
objectUnderTest.setProperty4(value);
assertEquals(value, objectUnderTest.getProperty4());
}
@Test
public void testProperty5() {
String value = "value";
objectUnderTest.setProperty5(value);
assertEquals(value, objectUnderTest.getProperty5());
}
@Test
public void testProperty6() {
String value = "value";
objectUnderTest.setProperty6(value);
assertEquals(value, objectUnderTest.getProperty6());
}
@Test
public void testProperty7() {
String value = "value";
objectUnderTest.setProperty7(value);
assertEquals(value, objectUnderTest.getProperty7());
}
@Test
public void testProperty8() {
String value = "value";
objectUnderTest.setProperty8(value);
assertEquals(value, objectUnderTest.getProperty8());
}
@Test
public void testProperty9() {
String value = "value";
objectUnderTest.setProperty9(value);
assertEquals(value, objectUnderTest.getProperty9());
}
}
|
apache-2.0
|
ddepaoli3/fuel-library-dev
|
tests/noop/spec/hosts/openstack-haproxy/openstack-haproxy-radosgw_spec.rb
|
1176
|
require 'spec_helper'
require 'shared-examples'
manifest = 'openstack-haproxy/openstack-haproxy-radosgw.pp'
describe manifest do
shared_examples 'catalog' do
images_ceph = Noop.hiera_structure 'storage/images_ceph'
objects_ceph = Noop.hiera_structure 'storage/objects_ceph'
if images_ceph and objects_ceph
ironic_enabled = Noop.hiera_structure 'ironic/enabled'
if ironic_enabled
baremetal_virtual_ip = Noop.hiera_structure 'network_metadata/vips/baremetal/ipaddr'
it 'should declare ::openstack::ha::radosgw class with baremetal_virtual_ip' do
should contain_class('openstack::ha::radosgw').with(
'baremetal_virtual_ip' => baremetal_virtual_ip,
)
end
it 'should declare openstack::ha::haproxy_service with name radosgw-baremetal' do
should contain_openstack__ha__haproxy_service('radosgw-baremetal').with(
'order' => '135',
'public_virtual_ip' => false,
'internal_virtual_ip' => baremetal_virtual_ip
)
end
end
end
end # end of shared_examples
test_ubuntu_and_centos manifest
end
|
apache-2.0
|
DigAg/digag-server
|
src/main/java/com/digag/domain/Repository/ArticleRepository.java
|
396
|
package com.digag.domain.Repository;
import com.digag.domain.Article;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.rest.core.annotation.RepositoryRestResource;
/**
* Created by Yuicon on 2017/7/8.
* https://github.com/Yuicon
*/
@RepositoryRestResource
public interface ArticleRepository extends JpaRepository<Article,String> {
}
|
apache-2.0
|
darknessitachi/jenode
|
jenode/src/main/java/ENode/Infrastructure/Dapper/DynamicParameters.java
|
9770
|
package ENode.Infrastructure.Dapper;
//
// License: http://www.apache.org/licenses/LICENSE-2.0
// Home page: http://code.google.com/p/dapper-dot-net/
//
// Note: to build on C# 3.0 + .NET 3.5, include the CSHARP30 compiler symbol (and yes,
// I know the difference between language and runtime versions; this is a compromise).
//
public class DynamicParameters implements SqlMapper.IDynamicParameters
{
public static final DbType EnumerableMultiParameter = (DbType)(-1);
private static java.util.HashMap<SqlMapper.Identity, Action<IDbCommand, Object>> paramReaderCache = new java.util.HashMap<SqlMapper.Identity, Action<IDbCommand, Object>>();
private java.util.HashMap<String, ParamInfo> parameters = new java.util.HashMap<String, ParamInfo>();
private java.util.ArrayList<Object> templates;
private static class ParamInfo
{
private String privateName;
public final String getName()
{
return privateName;
}
public final void setName(String value)
{
privateName = value;
}
private Object privateValue;
public final Object getValue()
{
return privateValue;
}
public final void setValue(Object value)
{
privateValue = value;
}
private ParameterDirection privateParameterDirection;
public final ParameterDirection getParameterDirection()
{
return privateParameterDirection;
}
public final void setParameterDirection(ParameterDirection value)
{
privateParameterDirection = value;
}
private DbType privateDbType;
public final DbType getDbType()
{
return privateDbType;
}
public final void setDbType(DbType value)
{
privateDbType = value;
}
private Integer privateSize;
public final Integer getSize()
{
return privateSize;
}
public final void setSize(Integer value)
{
privateSize = value;
}
private IDbDataParameter privateAttachedParam;
public final IDbDataParameter getAttachedParam()
{
return privateAttachedParam;
}
public final void setAttachedParam(IDbDataParameter value)
{
privateAttachedParam = value;
}
}
/**
construct a dynamic parameter bag
*/
public DynamicParameters()
{
}
/**
construct a dynamic parameter bag
@param template can be an anonymous type or a DynamicParameters bag
*/
public DynamicParameters(Object template)
{
AddDynamicParams(template);
}
/**
Append a whole object full of params to the dynamic
EG: AddDynamicParams(new {A = 1, B = 2}) // will add property A and B to the dynamic
@param param
*/
//C# TO JAVA CONVERTER TODO TASK: There is no preprocessor in Java:
//#if CSHARP30
public final void AddDynamicParams(Object param)
//#else
public final void AddDynamicParams(dynamic param)
//#endif
{
//C# TO JAVA CONVERTER TODO TASK: There is no equivalent to implicit typing in Java:
var obj = (Object)((param instanceof Object) ? param : null);
if (obj != null)
{
//C# TO JAVA CONVERTER TODO TASK: There is no equivalent to implicit typing in Java:
var subDynamic = (DynamicParameters)((obj instanceof DynamicParameters) ? obj : null);
if (subDynamic == null)
{
//C# TO JAVA CONVERTER TODO TASK: There is no equivalent to implicit typing in Java:
var dictionary = (Iterable<java.util.Map.Entry<String, Object>>)((obj instanceof Iterable<java.util.Map.Entry<String, Object>>) ? obj : null);
if (dictionary == null)
{
templates = (templates != null) ? templates : new java.util.ArrayList<Object>();
templates.add(obj);
}
else
{
//C# TO JAVA CONVERTER TODO TASK: There is no equivalent to implicit typing in Java:
for (var kvp : dictionary)
{
//C# TO JAVA CONVERTER TODO TASK: There is no preprocessor in Java:
//#if CSHARP30
Add(kvp.getKey(), kvp.getValue(), null, null, null);
//#else
Add(kvp.getKey(), kvp.getValue());
//#endif
}
}
}
else
{
if (subDynamic.parameters != null)
{
//C# TO JAVA CONVERTER TODO TASK: There is no equivalent to implicit typing in Java:
for (var kvp : subDynamic.parameters)
{
parameters.put(kvp.getKey(), kvp.getValue());
}
}
if (subDynamic.templates != null)
{
templates = (templates != null) ? templates : new java.util.ArrayList<Object>();
//C# TO JAVA CONVERTER TODO TASK: There is no equivalent to implicit typing in Java:
for (var t : subDynamic.templates)
{
templates.add(t);
}
}
}
}
}
/**
Add a parameter to this dynamic parameter list
@param name
@param value
@param dbType
@param direction
@param size
*/
//C# TO JAVA CONVERTER TODO TASK: There is no preprocessor in Java:
//#if CSHARP30
public final void Add(String name, Object value, DbType dbType, ParameterDirection direction, Integer size)
//#else
//C# TO JAVA CONVERTER TODO TASK: C# optional parameters are not converted to Java:
//ORIGINAL LINE: public void Add(string name, object value = null, Nullable<DbType> dbType = null, Nullable<ParameterDirection> direction = null, Nullable<int> size = null)
public final void Add(String name, Object value, DbType dbType, ParameterDirection direction, Integer size)
//#endif
{
ParamInfo tempVar = new ParamInfo();
tempVar.setName(name);
tempVar.setValue(value);
tempVar.setParameterDirection((direction != null) ? direction : ParameterDirection.Input);
tempVar.setDbType(dbType);
tempVar.setSize(size);
parameters.put(Clean(name), tempVar);
}
private static String Clean(String name)
{
if (!DotNetToJavaStringHelper.isNullOrEmpty(name))
{
//C# TO JAVA CONVERTER NOTE: The following 'switch' operated on a string member and was converted to Java 'if-else' logic:
// switch (name[0])
//ORIGINAL LINE: case '@':
if (name.charAt(0) == '@' || name.charAt(0) == ':' || name.charAt(0) == '?')
{
return name.substring(1);
}
}
return name;
}
private void AddParameters(IDbCommand command, SqlMapper.Identity identity)
{
AddParameters(command, identity);
}
/**
Add all the parameters needed to the command just before it executes
@param command The raw command prior to execution
@param identity Information about the query
*/
protected final void AddParameters(IDbCommand command, SqlMapper.Identity identity)
{
if (templates != null)
{
//C# TO JAVA CONVERTER TODO TASK: There is no equivalent to implicit typing in Java:
for (var template : templates)
{
//C# TO JAVA CONVERTER TODO TASK: There is no equivalent to implicit typing in Java:
var newIdent = identity.ForDynamicParameters(template.getClass());
Action<IDbCommand, Object> appender = null;
synchronized (paramReaderCache)
{
if (!((appender = paramReaderCache.get(newIdent)) != null))
{
appender = SqlMapper.CreateParamInfoGenerator(newIdent, true);
paramReaderCache.put(newIdent, appender);
}
}
appender(command, template);
}
}
//C# TO JAVA CONVERTER TODO TASK: There is no equivalent to implicit typing in Java:
for (var param : parameters.values())
{
//C# TO JAVA CONVERTER TODO TASK: There is no equivalent to implicit typing in Java:
var dbType = param.DbType;
//C# TO JAVA CONVERTER TODO TASK: There is no equivalent to implicit typing in Java:
var val = param.getValue();
String name = Clean(param.getName());
if (dbType == null && val != null)
{
dbType = SqlMapper.LookupDbType(val.getClass(), name);
}
if (dbType == DynamicParameters.EnumerableMultiParameter)
{
//C# TO JAVA CONVERTER TODO TASK: There is no preprocessor in Java:
///#pragma warning disable 612, 618
SqlMapper.PackListParameters(command, name, val);
//C# TO JAVA CONVERTER TODO TASK: There is no preprocessor in Java:
///#pragma warning restore 612, 618
}
else
{
boolean add = !command.Parameters.Contains(name);
IDbDataParameter p;
if (add)
{
p = command.CreateParameter();
p.ParameterName = name;
}
else
{
p = (IDbDataParameter)command.Parameters[name];
}
p.setValue((val != null) ? val : DBNull.getValue());
p.Direction = param.ParameterDirection;
//C# TO JAVA CONVERTER TODO TASK: There is no equivalent to implicit typing in Java:
var s = (String)((val instanceof String) ? val : null);
if (s != null)
{
if (s.getLength() <= 4000)
{
p.Size = 4000;
}
}
if (param.Size != null)
{
p.Size = param.Size.getValue();
}
if (dbType != null)
{
p.DbType = dbType.getValue();
}
if (add)
{
command.Parameters.Add(p);
}
param.AttachedParam = p;
}
}
}
/**
All the names of the param in the bag, use Get to yank them out
*/
public final Iterable<String> getParameterNames()
{
//C# TO JAVA CONVERTER TODO TASK: Lambda expressions and anonymous methods are not converted by C# to Java Converter:
return parameters.Select(p => p.getKey());
}
/**
Get the value of a parameter
<typeparam name="T"></typeparam>
@param name
@return The value, note DBNull.Value is not returned, instead the value is returned as null
*/
public final <T> T Get(String name)
{
//C# TO JAVA CONVERTER TODO TASK: There is no equivalent to implicit typing in Java:
var val = parameters.get(Clean(name)).getAttachedParam().getValue();
if (val == DBNull.getValue())
{
if (null != null)
{
throw new ApplicationException("Attempting to cast a DBNull to a non nullable type!");
}
return null;
}
return (T)val;
}
}
|
apache-2.0
|
trasa/aws-sdk-java
|
aws-java-sdk-iam/src/main/java/com/amazonaws/services/identitymanagement/model/transform/AccessKeyMetadataStaxUnmarshaller.java
|
3268
|
/*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.identitymanagement.model.transform;
import java.util.Map;
import java.util.Map.Entry;
import javax.xml.stream.events.XMLEvent;
import com.amazonaws.services.identitymanagement.model.*;
import com.amazonaws.transform.Unmarshaller;
import com.amazonaws.transform.MapEntry;
import com.amazonaws.transform.StaxUnmarshallerContext;
import com.amazonaws.transform.SimpleTypeStaxUnmarshallers.*;
/**
* AccessKeyMetadata StAX Unmarshaller
*/
public class AccessKeyMetadataStaxUnmarshaller implements
Unmarshaller<AccessKeyMetadata, StaxUnmarshallerContext> {
public AccessKeyMetadata unmarshall(StaxUnmarshallerContext context)
throws Exception {
AccessKeyMetadata accessKeyMetadata = new AccessKeyMetadata();
int originalDepth = context.getCurrentDepth();
int targetDepth = originalDepth + 1;
if (context.isStartOfDocument())
targetDepth += 1;
while (true) {
XMLEvent xmlEvent = context.nextEvent();
if (xmlEvent.isEndDocument())
return accessKeyMetadata;
if (xmlEvent.isAttribute() || xmlEvent.isStartElement()) {
if (context.testExpression("UserName", targetDepth)) {
accessKeyMetadata.setUserName(StringStaxUnmarshaller
.getInstance().unmarshall(context));
continue;
}
if (context.testExpression("AccessKeyId", targetDepth)) {
accessKeyMetadata.setAccessKeyId(StringStaxUnmarshaller
.getInstance().unmarshall(context));
continue;
}
if (context.testExpression("Status", targetDepth)) {
accessKeyMetadata.setStatus(StringStaxUnmarshaller
.getInstance().unmarshall(context));
continue;
}
if (context.testExpression("CreateDate", targetDepth)) {
accessKeyMetadata.setCreateDate(DateStaxUnmarshaller
.getInstance().unmarshall(context));
continue;
}
} else if (xmlEvent.isEndElement()) {
if (context.getCurrentDepth() < originalDepth) {
return accessKeyMetadata;
}
}
}
}
private static AccessKeyMetadataStaxUnmarshaller instance;
public static AccessKeyMetadataStaxUnmarshaller getInstance() {
if (instance == null)
instance = new AccessKeyMetadataStaxUnmarshaller();
return instance;
}
}
|
apache-2.0
|
xc35/dragontoolkit
|
src/main/java/edu/drexel/cis/dragon/ir/index/IRRelation.java
|
3577
|
/* */ package edu.drexel.cis.dragon.ir.index;
/* */
/* */ import edu.drexel.cis.dragon.nlp.compare.FrequencySortable;
/* */ import edu.drexel.cis.dragon.nlp.compare.IndexSortable;
/* */ import java.io.Serializable;
/* */
/* */ public class IRRelation
/* */ implements IRSignature, IndexSortable, FrequencySortable, Comparable, Serializable
/* */ {
/* */ private static final long serialVersionUID = 1L;
/* */ private int first;
/* */ private int second;
/* */ private int docFrequency;
/* */ private int frequency;
/* */ private int index;
/* */
/* */ public IRRelation(int firstTermIndex, int secondTermIndex, int frequency)
/* */ {
/* 23 */ this.first = firstTermIndex;
/* 24 */ this.second = secondTermIndex;
/* 25 */ this.index = -1;
/* 26 */ this.frequency = frequency;
/* 27 */ this.docFrequency = 0;
/* */ }
/* */
/* */ public IRRelation(int index, int firstTermIndex, int secondTermIndex, int frequency, int docFrequency) {
/* 31 */ this.first = firstTermIndex;
/* 32 */ this.second = secondTermIndex;
/* 33 */ this.index = index;
/* 34 */ this.frequency = frequency;
/* 35 */ this.docFrequency = docFrequency;
/* */ }
/* */
/* */ public IRRelation copy() {
/* 39 */ return new IRRelation(this.index, this.first, this.second, this.frequency, this.docFrequency);
/* */ }
/* */
/* */ public int getFirstTerm() {
/* 43 */ return this.first;
/* */ }
/* */
/* */ public void setFirstTerm(int first) {
/* 47 */ this.first = first;
/* */ }
/* */
/* */ public int getSecondTerm() {
/* 51 */ return this.second;
/* */ }
/* */
/* */ public void setSecondTerm(int second) {
/* 55 */ this.second = second;
/* */ }
/* */
/* */ public void setFrequency(int freq) {
/* 59 */ this.frequency = freq;
/* */ }
/* */
/* */ public void addFrequency(int inc) {
/* 63 */ this.frequency += inc;
/* */ }
/* */
/* */ public int getFrequency() {
/* 67 */ return this.frequency;
/* */ }
/* */
/* */ public int getIndex() {
/* 71 */ return this.index;
/* */ }
/* */
/* */ public void setIndex(int index) {
/* 75 */ this.index = index;
/* */ }
/* */
/* */ public int getDocFrequency() {
/* 79 */ return this.docFrequency;
/* */ }
/* */
/* */ public void addDocFrequency(int inc) {
/* 83 */ this.docFrequency += inc;
/* */ }
/* */
/* */ public void setDocFrequency(int freq) {
/* 87 */ this.docFrequency = freq;
/* */ }
/* */
/* */ public int compareTo(Object obj)
/* */ {
/* 93 */ int indexObj = ((IRRelation)obj).getFirstTerm();
/* 94 */ if (this.first == indexObj) {
/* 95 */ indexObj = ((IRRelation)obj).getSecondTerm();
/* 96 */ if (this.second == indexObj)
/* 97 */ return 0;
/* 98 */ if (this.second > indexObj) {
/* 99 */ return 1;
/* */ }
/* 101 */ return -1;
/* */ }
/* 103 */ if (this.first > indexObj) {
/* 104 */ return 1;
/* */ }
/* 106 */ return -1;
/* */ }
/* */ }
/* Location: C:\dragontoolikt\dragontool.jar
* Qualified Name: dragon.ir.index.IRRelation
* JD-Core Version: 0.6.2
*/
|
apache-2.0
|
openegovplatform/OEPv2
|
oep-ssomgt-portlet/docroot/WEB-INF/service/org/oep/ssomgt/model/ApplicationWrapper.java
|
14135
|
/**
* Copyright (c) 2000-present Liferay, Inc. All rights reserved.
*
* This library is free software; you can redistribute it and/or modify it under
* the terms of the GNU Lesser General Public License as published by the Free
* Software Foundation; either version 2.1 of the License, or (at your option)
* any later version.
*
* This library is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
* details.
*/
package org.oep.ssomgt.model;
import com.liferay.portal.kernel.util.Validator;
import com.liferay.portal.model.ModelWrapper;
import java.sql.Blob;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
/**
* <p>
* This class is a wrapper for {@link Application}.
* </p>
*
* @author trungdk
* @see Application
* @generated
*/
public class ApplicationWrapper implements Application,
ModelWrapper<Application> {
public ApplicationWrapper(Application application) {
_application = application;
}
@Override
public Class<?> getModelClass() {
return Application.class;
}
@Override
public String getModelClassName() {
return Application.class.getName();
}
@Override
public Map<String, Object> getModelAttributes() {
Map<String, Object> attributes = new HashMap<String, Object>();
attributes.put("applicationId", getApplicationId());
attributes.put("userId", getUserId());
attributes.put("groupId", getGroupId());
attributes.put("companyId", getCompanyId());
attributes.put("createDate", getCreateDate());
attributes.put("modifiedDate", getModifiedDate());
attributes.put("appCode", getAppCode());
attributes.put("appName", getAppName());
attributes.put("appPin", getAppPin());
attributes.put("appUrl", getAppUrl());
attributes.put("appBigIcon", getAppBigIcon());
attributes.put("appSmallIcon", getAppSmallIcon());
attributes.put("pingTime", getPingTime());
attributes.put("sequenceNo", getSequenceNo());
attributes.put("publicKey", getPublicKey());
return attributes;
}
@Override
public void setModelAttributes(Map<String, Object> attributes) {
Long applicationId = (Long)attributes.get("applicationId");
if (applicationId != null) {
setApplicationId(applicationId);
}
Long userId = (Long)attributes.get("userId");
if (userId != null) {
setUserId(userId);
}
Long groupId = (Long)attributes.get("groupId");
if (groupId != null) {
setGroupId(groupId);
}
Long companyId = (Long)attributes.get("companyId");
if (companyId != null) {
setCompanyId(companyId);
}
Date createDate = (Date)attributes.get("createDate");
if (createDate != null) {
setCreateDate(createDate);
}
Date modifiedDate = (Date)attributes.get("modifiedDate");
if (modifiedDate != null) {
setModifiedDate(modifiedDate);
}
String appCode = (String)attributes.get("appCode");
if (appCode != null) {
setAppCode(appCode);
}
String appName = (String)attributes.get("appName");
if (appName != null) {
setAppName(appName);
}
String appPin = (String)attributes.get("appPin");
if (appPin != null) {
setAppPin(appPin);
}
String appUrl = (String)attributes.get("appUrl");
if (appUrl != null) {
setAppUrl(appUrl);
}
Blob appBigIcon = (Blob)attributes.get("appBigIcon");
if (appBigIcon != null) {
setAppBigIcon(appBigIcon);
}
Blob appSmallIcon = (Blob)attributes.get("appSmallIcon");
if (appSmallIcon != null) {
setAppSmallIcon(appSmallIcon);
}
Date pingTime = (Date)attributes.get("pingTime");
if (pingTime != null) {
setPingTime(pingTime);
}
Integer sequenceNo = (Integer)attributes.get("sequenceNo");
if (sequenceNo != null) {
setSequenceNo(sequenceNo);
}
String publicKey = (String)attributes.get("publicKey");
if (publicKey != null) {
setPublicKey(publicKey);
}
}
/**
* Returns the primary key of this application.
*
* @return the primary key of this application
*/
@Override
public long getPrimaryKey() {
return _application.getPrimaryKey();
}
/**
* Sets the primary key of this application.
*
* @param primaryKey the primary key of this application
*/
@Override
public void setPrimaryKey(long primaryKey) {
_application.setPrimaryKey(primaryKey);
}
/**
* Returns the application ID of this application.
*
* @return the application ID of this application
*/
@Override
public long getApplicationId() {
return _application.getApplicationId();
}
/**
* Sets the application ID of this application.
*
* @param applicationId the application ID of this application
*/
@Override
public void setApplicationId(long applicationId) {
_application.setApplicationId(applicationId);
}
/**
* Returns the user ID of this application.
*
* @return the user ID of this application
*/
@Override
public long getUserId() {
return _application.getUserId();
}
/**
* Sets the user ID of this application.
*
* @param userId the user ID of this application
*/
@Override
public void setUserId(long userId) {
_application.setUserId(userId);
}
/**
* Returns the user uuid of this application.
*
* @return the user uuid of this application
* @throws SystemException if a system exception occurred
*/
@Override
public java.lang.String getUserUuid()
throws com.liferay.portal.kernel.exception.SystemException {
return _application.getUserUuid();
}
/**
* Sets the user uuid of this application.
*
* @param userUuid the user uuid of this application
*/
@Override
public void setUserUuid(java.lang.String userUuid) {
_application.setUserUuid(userUuid);
}
/**
* Returns the group ID of this application.
*
* @return the group ID of this application
*/
@Override
public long getGroupId() {
return _application.getGroupId();
}
/**
* Sets the group ID of this application.
*
* @param groupId the group ID of this application
*/
@Override
public void setGroupId(long groupId) {
_application.setGroupId(groupId);
}
/**
* Returns the company ID of this application.
*
* @return the company ID of this application
*/
@Override
public long getCompanyId() {
return _application.getCompanyId();
}
/**
* Sets the company ID of this application.
*
* @param companyId the company ID of this application
*/
@Override
public void setCompanyId(long companyId) {
_application.setCompanyId(companyId);
}
/**
* Returns the create date of this application.
*
* @return the create date of this application
*/
@Override
public java.util.Date getCreateDate() {
return _application.getCreateDate();
}
/**
* Sets the create date of this application.
*
* @param createDate the create date of this application
*/
@Override
public void setCreateDate(java.util.Date createDate) {
_application.setCreateDate(createDate);
}
/**
* Returns the modified date of this application.
*
* @return the modified date of this application
*/
@Override
public java.util.Date getModifiedDate() {
return _application.getModifiedDate();
}
/**
* Sets the modified date of this application.
*
* @param modifiedDate the modified date of this application
*/
@Override
public void setModifiedDate(java.util.Date modifiedDate) {
_application.setModifiedDate(modifiedDate);
}
/**
* Returns the app code of this application.
*
* @return the app code of this application
*/
@Override
public java.lang.String getAppCode() {
return _application.getAppCode();
}
/**
* Sets the app code of this application.
*
* @param appCode the app code of this application
*/
@Override
public void setAppCode(java.lang.String appCode) {
_application.setAppCode(appCode);
}
/**
* Returns the app name of this application.
*
* @return the app name of this application
*/
@Override
public java.lang.String getAppName() {
return _application.getAppName();
}
/**
* Sets the app name of this application.
*
* @param appName the app name of this application
*/
@Override
public void setAppName(java.lang.String appName) {
_application.setAppName(appName);
}
/**
* Returns the app pin of this application.
*
* @return the app pin of this application
*/
@Override
public java.lang.String getAppPin() {
return _application.getAppPin();
}
/**
* Sets the app pin of this application.
*
* @param appPin the app pin of this application
*/
@Override
public void setAppPin(java.lang.String appPin) {
_application.setAppPin(appPin);
}
/**
* Returns the app url of this application.
*
* @return the app url of this application
*/
@Override
public java.lang.String getAppUrl() {
return _application.getAppUrl();
}
/**
* Sets the app url of this application.
*
* @param appUrl the app url of this application
*/
@Override
public void setAppUrl(java.lang.String appUrl) {
_application.setAppUrl(appUrl);
}
/**
* Returns the app big icon of this application.
*
* @return the app big icon of this application
*/
@Override
public java.sql.Blob getAppBigIcon() {
return _application.getAppBigIcon();
}
/**
* Sets the app big icon of this application.
*
* @param appBigIcon the app big icon of this application
*/
@Override
public void setAppBigIcon(java.sql.Blob appBigIcon) {
_application.setAppBigIcon(appBigIcon);
}
/**
* Returns the app small icon of this application.
*
* @return the app small icon of this application
*/
@Override
public java.sql.Blob getAppSmallIcon() {
return _application.getAppSmallIcon();
}
/**
* Sets the app small icon of this application.
*
* @param appSmallIcon the app small icon of this application
*/
@Override
public void setAppSmallIcon(java.sql.Blob appSmallIcon) {
_application.setAppSmallIcon(appSmallIcon);
}
/**
* Returns the ping time of this application.
*
* @return the ping time of this application
*/
@Override
public java.util.Date getPingTime() {
return _application.getPingTime();
}
/**
* Sets the ping time of this application.
*
* @param pingTime the ping time of this application
*/
@Override
public void setPingTime(java.util.Date pingTime) {
_application.setPingTime(pingTime);
}
/**
* Returns the sequence no of this application.
*
* @return the sequence no of this application
*/
@Override
public int getSequenceNo() {
return _application.getSequenceNo();
}
/**
* Sets the sequence no of this application.
*
* @param sequenceNo the sequence no of this application
*/
@Override
public void setSequenceNo(int sequenceNo) {
_application.setSequenceNo(sequenceNo);
}
/**
* Returns the public key of this application.
*
* @return the public key of this application
*/
@Override
public java.lang.String getPublicKey() {
return _application.getPublicKey();
}
/**
* Sets the public key of this application.
*
* @param publicKey the public key of this application
*/
@Override
public void setPublicKey(java.lang.String publicKey) {
_application.setPublicKey(publicKey);
}
@Override
public boolean isNew() {
return _application.isNew();
}
@Override
public void setNew(boolean n) {
_application.setNew(n);
}
@Override
public boolean isCachedModel() {
return _application.isCachedModel();
}
@Override
public void setCachedModel(boolean cachedModel) {
_application.setCachedModel(cachedModel);
}
@Override
public boolean isEscapedModel() {
return _application.isEscapedModel();
}
@Override
public java.io.Serializable getPrimaryKeyObj() {
return _application.getPrimaryKeyObj();
}
@Override
public void setPrimaryKeyObj(java.io.Serializable primaryKeyObj) {
_application.setPrimaryKeyObj(primaryKeyObj);
}
@Override
public com.liferay.portlet.expando.model.ExpandoBridge getExpandoBridge() {
return _application.getExpandoBridge();
}
@Override
public void setExpandoBridgeAttributes(
com.liferay.portal.model.BaseModel<?> baseModel) {
_application.setExpandoBridgeAttributes(baseModel);
}
@Override
public void setExpandoBridgeAttributes(
com.liferay.portlet.expando.model.ExpandoBridge expandoBridge) {
_application.setExpandoBridgeAttributes(expandoBridge);
}
@Override
public void setExpandoBridgeAttributes(
com.liferay.portal.service.ServiceContext serviceContext) {
_application.setExpandoBridgeAttributes(serviceContext);
}
@Override
public java.lang.Object clone() {
return new ApplicationWrapper((Application)_application.clone());
}
@Override
public int compareTo(org.oep.ssomgt.model.Application application) {
return _application.compareTo(application);
}
@Override
public int hashCode() {
return _application.hashCode();
}
@Override
public com.liferay.portal.model.CacheModel<org.oep.ssomgt.model.Application> toCacheModel() {
return _application.toCacheModel();
}
@Override
public org.oep.ssomgt.model.Application toEscapedModel() {
return new ApplicationWrapper(_application.toEscapedModel());
}
@Override
public org.oep.ssomgt.model.Application toUnescapedModel() {
return new ApplicationWrapper(_application.toUnescapedModel());
}
@Override
public java.lang.String toString() {
return _application.toString();
}
@Override
public java.lang.String toXmlString() {
return _application.toXmlString();
}
@Override
public void persist()
throws com.liferay.portal.kernel.exception.SystemException {
_application.persist();
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (!(obj instanceof ApplicationWrapper)) {
return false;
}
ApplicationWrapper applicationWrapper = (ApplicationWrapper)obj;
if (Validator.equals(_application, applicationWrapper._application)) {
return true;
}
return false;
}
/**
* @deprecated As of 6.1.0, replaced by {@link #getWrappedModel}
*/
public Application getWrappedApplication() {
return _application;
}
@Override
public Application getWrappedModel() {
return _application;
}
@Override
public void resetOriginalValues() {
_application.resetOriginalValues();
}
private Application _application;
}
|
apache-2.0
|
stweil/tesseract-ocr.github.io
|
4.00.00dev/dir_0d5c9f9724b36e17d4bc3a711a460b02.js
|
203
|
var dir_0d5c9f9724b36e17d4bc3a711a460b02 =
[
[ "CMakeFiles", "dir_2320ac9f4598d7432d93f6bdc14a853d.html", "dir_2320ac9f4598d7432d93f6bdc14a853d" ],
[ "config_auto.h", "a06452.html", "a06452" ]
];
|
apache-2.0
|
googleads/google-ads-php
|
src/Google/Ads/GoogleAds/V8/Common/TargetRoas.php
|
5939
|
<?php
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/ads/googleads/v8/common/bidding.proto
namespace Google\Ads\GoogleAds\V8\Common;
use Google\Protobuf\Internal\GPBType;
use Google\Protobuf\Internal\RepeatedField;
use Google\Protobuf\Internal\GPBUtil;
/**
* An automated bidding strategy that helps you maximize revenue while
* averaging a specific target return on ad spend (ROAS).
*
* Generated from protobuf message <code>google.ads.googleads.v8.common.TargetRoas</code>
*/
class TargetRoas extends \Google\Protobuf\Internal\Message
{
/**
* Required. The desired revenue (based on conversion data) per unit of spend.
* Value must be between 0.01 and 1000.0, inclusive.
*
* Generated from protobuf field <code>double target_roas = 4;</code>
*/
protected $target_roas = null;
/**
* Maximum bid limit that can be set by the bid strategy.
* The limit applies to all keywords managed by the strategy.
* This should only be set for portfolio bid strategies.
*
* Generated from protobuf field <code>int64 cpc_bid_ceiling_micros = 5;</code>
*/
protected $cpc_bid_ceiling_micros = null;
/**
* Minimum bid limit that can be set by the bid strategy.
* The limit applies to all keywords managed by the strategy.
* This should only be set for portfolio bid strategies.
*
* Generated from protobuf field <code>int64 cpc_bid_floor_micros = 6;</code>
*/
protected $cpc_bid_floor_micros = null;
/**
* Constructor.
*
* @param array $data {
* Optional. Data for populating the Message object.
*
* @type float $target_roas
* Required. The desired revenue (based on conversion data) per unit of spend.
* Value must be between 0.01 and 1000.0, inclusive.
* @type int|string $cpc_bid_ceiling_micros
* Maximum bid limit that can be set by the bid strategy.
* The limit applies to all keywords managed by the strategy.
* This should only be set for portfolio bid strategies.
* @type int|string $cpc_bid_floor_micros
* Minimum bid limit that can be set by the bid strategy.
* The limit applies to all keywords managed by the strategy.
* This should only be set for portfolio bid strategies.
* }
*/
public function __construct($data = NULL) {
\GPBMetadata\Google\Ads\GoogleAds\V8\Common\Bidding::initOnce();
parent::__construct($data);
}
/**
* Required. The desired revenue (based on conversion data) per unit of spend.
* Value must be between 0.01 and 1000.0, inclusive.
*
* Generated from protobuf field <code>double target_roas = 4;</code>
* @return float
*/
public function getTargetRoas()
{
return isset($this->target_roas) ? $this->target_roas : 0.0;
}
public function hasTargetRoas()
{
return isset($this->target_roas);
}
public function clearTargetRoas()
{
unset($this->target_roas);
}
/**
* Required. The desired revenue (based on conversion data) per unit of spend.
* Value must be between 0.01 and 1000.0, inclusive.
*
* Generated from protobuf field <code>double target_roas = 4;</code>
* @param float $var
* @return $this
*/
public function setTargetRoas($var)
{
GPBUtil::checkDouble($var);
$this->target_roas = $var;
return $this;
}
/**
* Maximum bid limit that can be set by the bid strategy.
* The limit applies to all keywords managed by the strategy.
* This should only be set for portfolio bid strategies.
*
* Generated from protobuf field <code>int64 cpc_bid_ceiling_micros = 5;</code>
* @return int|string
*/
public function getCpcBidCeilingMicros()
{
return isset($this->cpc_bid_ceiling_micros) ? $this->cpc_bid_ceiling_micros : 0;
}
public function hasCpcBidCeilingMicros()
{
return isset($this->cpc_bid_ceiling_micros);
}
public function clearCpcBidCeilingMicros()
{
unset($this->cpc_bid_ceiling_micros);
}
/**
* Maximum bid limit that can be set by the bid strategy.
* The limit applies to all keywords managed by the strategy.
* This should only be set for portfolio bid strategies.
*
* Generated from protobuf field <code>int64 cpc_bid_ceiling_micros = 5;</code>
* @param int|string $var
* @return $this
*/
public function setCpcBidCeilingMicros($var)
{
GPBUtil::checkInt64($var);
$this->cpc_bid_ceiling_micros = $var;
return $this;
}
/**
* Minimum bid limit that can be set by the bid strategy.
* The limit applies to all keywords managed by the strategy.
* This should only be set for portfolio bid strategies.
*
* Generated from protobuf field <code>int64 cpc_bid_floor_micros = 6;</code>
* @return int|string
*/
public function getCpcBidFloorMicros()
{
return isset($this->cpc_bid_floor_micros) ? $this->cpc_bid_floor_micros : 0;
}
public function hasCpcBidFloorMicros()
{
return isset($this->cpc_bid_floor_micros);
}
public function clearCpcBidFloorMicros()
{
unset($this->cpc_bid_floor_micros);
}
/**
* Minimum bid limit that can be set by the bid strategy.
* The limit applies to all keywords managed by the strategy.
* This should only be set for portfolio bid strategies.
*
* Generated from protobuf field <code>int64 cpc_bid_floor_micros = 6;</code>
* @param int|string $var
* @return $this
*/
public function setCpcBidFloorMicros($var)
{
GPBUtil::checkInt64($var);
$this->cpc_bid_floor_micros = $var;
return $this;
}
}
|
apache-2.0
|
mldbai/mldb
|
builtin/python/python_plugin_context.cc
|
22789
|
/** python_plugin_context.cc
Francois Maillet, 6 mars 2015
Copyright (c) 2015 mldb.ai inc. All rights reserved.
This file is part of MLDB. Copyright 2015 mldb.ai inc. All rights reserved.
*/
#include "python_plugin_context.h"
#include "mldb/engine/static_content_handler.h"
#include "mldb/utils/string_functions.h"
#include "mldb/utils/for_each_line.h"
#include "mldb/vfs/fs_utils.h"
#include "mldb/vfs/filter_streams.h"
#include "mldb/base/optimized_path.h"
#include "mldb/utils/log.h"
#include "mldb/base/scope.h"
#include <regex>
#include <boost/algorithm/string.hpp>
#include <memory>
#include "frameobject.h"
#include "pointer_fix.h"
#include "capture_stream.h"
using namespace std;
namespace fs = std::filesystem;
namespace MLDB {
// Once the MLDB runtime is loaded, this replaces the weak findEnvironmentImpl in
// find_mldb_environment.cc with this one, which looks in the MldbPythonInterpreter
// for the right context for the current interpreter.
std::shared_ptr<MldbPythonContext>
findEnvironmentImpl()
{
return MldbPythonInterpreter::findEnvironment();
}
namespace {
// Protected by the GIL. Functions that manipulate must only be called with it held.
std::unordered_map<PyInterpreterState *, std::weak_ptr<MldbPythonContext> > environments;
} // file scope
// Used by modules to find the MLDB environment associated with our interpreter
std::shared_ptr<MldbPythonContext>
MldbPythonInterpreter::
findEnvironment()
{
PyThreadState * st = PyThreadState_Get();
ExcAssert(st);
PyInterpreterState * interp = st->interp;
auto it = environments.find(interp);
if (it == environments.end())
return nullptr;
return it->second.lock();
}
MldbPythonInterpreter::
MldbPythonInterpreter(std::shared_ptr<PythonContext> context)
: context(context)
{
auto enterThread = mainThread().enter();
mldb = std::make_shared<MldbPythonContext>(context);
main_module = boost::python::import("__main__");
main_namespace = main_module.attr("__dict__");
main_namespace["__mldb_environment__"]
= boost::python::object(boost::python::ptr(mldb.get()));
injectOutputLoggingCode(*enterThread);
environments[interpState.get()->interp] = mldb;
}
MldbPythonInterpreter::
~MldbPythonInterpreter()
{
if (stdOutCapture) {
cerr << "MldbPythonInterpreter: destroy() not called "
<< "before destruction after initialization" << endl;
abort();
}
}
void
MldbPythonInterpreter::
destroy()
{
{
auto enterGuard = mainThread().enter();
main_module = boost::python::object();
main_namespace = boost::python::object();
stdOutCapture.reset();
stdErrCapture.reset();
environments.erase(interpState.get()->interp);
}
PythonInterpreter::destroy();
}
ScriptException
MldbPythonInterpreter::
convertException(const EnterThreadToken & threadToken,
const boost::python::error_already_set & exc2,
const std::string & context)
{
try {
PyFrameObject* frame = PyEval_GetFrame();
PyThreadState *tstate = PyThreadState_GET();
if (NULL != tstate && NULL != tstate->frame) {
frame = tstate->frame;
}
ScriptException result;
using namespace boost::python;
using namespace boost;
PyObject *exc,*val,*tb;
object formatted_list, formatted;
PyErr_Fetch(&exc,&val,&tb);
if(val && PyUnicode_Check(val)) {
result.message = Utf8String(extract<string>(val));
}
PyErr_NormalizeException(&exc, &val, &tb);
handle<> hexc(exc),hval(allow_null(val)),htb(allow_null(tb));
// Attempt to extract the type name
{
PyObject * repr = PyObject_Repr(exc);
Scope_Exit(Py_DECREF(repr));
std::string reprUtf8 = PyUnicode_AsUTF8(repr);
static std::regex typePattern("<class '(.*)'>");
std::smatch what;
if (std::regex_match(reprUtf8, what, typePattern)) {
result.type = what[1];
}
}
if(val && PyUnicode_Check(val)) {
result.message = Utf8String(extract<string>(val));
}
else if (val) {
PyObject * str = PyObject_Str(val);
Scope_Exit(Py_DECREF(str));
result.message = PyUnicode_AsUTF8(str);
}
if(htb) {
object tbb(htb);
result.lineNumber = extract<long>(tbb.attr("tb_lineno"));
PyTracebackObject * ptb = (PyTracebackObject*)tb;
while (ptb) {
auto frame = ptb->tb_frame;
long lineno = PyFrame_GetLineNumber(frame);
PyObject *filename = frame->f_code->co_filename;
const char * fn = PyUnicode_AsUTF8(filename);
const char * func = PyUnicode_AsUTF8(frame->f_code->co_name);
ScriptStackFrame sframe;
sframe.scriptUri = fn;
sframe.functionName = func;
sframe.lineNumber = lineno;
sframe.where = Utf8String("File \"") + fn + "\", line "
+ std::to_string(lineno) + ", in " + func;
result.stack.push_back(sframe);
ptb = ptb->tb_next;
}
}
if (result.type == "SyntaxError" && hval) {
// Extra fixups required to parse the syntax error fields
object oval(hval);
result.lineNumber = boost::python::extract<long>(oval.attr("lineno"));
result.scriptUri = boost::python::extract<std::string>(oval.attr("filename"));
if (oval.attr("text")) {
result.lineContents = boost::python::extract<std::string>(oval.attr("text"));
}
result.columnStart = boost::python::extract<long>(oval.attr("offset"));
PyObject * str = PyObject_Str(val);
Scope_Exit(Py_DECREF(str));
result.message = PyUnicode_AsUTF8(str);
}
else if (!result.stack.empty()) {
result.where = result.stack.back().where;
result.scriptUri = result.stack.back().scriptUri;
result.lineNumber = result.stack.back().lineNumber;
result.columnStart = result.stack.back().columnStart;
}
result.context = {context};
return result;
} catch (const boost::python::error_already_set & exc) {
PyErr_Print();
throw;
}
}
/*****************************************************************************/
/* PYTHON STDOUT/ERR EXTRACTION CODE */
/*****************************************************************************/
void
MldbPythonInterpreter::
injectOutputLoggingCode(const EnterThreadToken & threadToken)
{
stdOutCapture.reset();
stdErrCapture.reset();
stdOutCapture
= setStdStream(threadToken,
[this] (const EnterThreadToken & threadToken,
std::string message)
{
this->logMessage(threadToken, "stdout",
std::move(message));
},
"stdout");
stdErrCapture
= setStdStream(threadToken,
[this] (const EnterThreadToken & threadToken,
std::string message)
{
this->logMessage(threadToken, "stderr",
std::move(message));
},
"stderr");
}
void
MldbPythonInterpreter::
logMessage(const EnterThreadToken & threadToken,
const char * stream, std::string message)
{
Date ts = Date::now();
if (message != "\n") {
context->logToStream(stream, message);
}
BufferState & buffer = buffers[stream];
// Just a newline? Flush it out
if (message == "\n") {
if (!buffer.empty) {
logs.emplace_back(buffer.ts, stream, std::move(buffer.message));
buffer.message = std::string();
buffer.empty = true;
}
return;
}
// Message with a newline at the end? Print it including the buffer
// contents
if (!message.empty() && message[message.length() - 1] == '\n') {
message = std::string(message, 0, message.length() - 1);
if (!buffer.empty) {
message = buffer.message + message;
ts = buffer.ts;
buffer.empty = true;
buffer.message = std::string();
}
logs.emplace_back(ts, stream, std::move(message));
}
else {
// No newline. Buffer until we get one.
if (buffer.empty) {
buffer.ts = ts;
buffer.message = std::move(message);
buffer.empty = false;
}
else {
buffer.message += message;
}
}
}
void
MldbPythonInterpreter::
getOutputFromPy(const EnterThreadToken & threadToken,
ScriptOutput & result,
bool reset)
{
ExcAssert(reset);
// Flush the buffers
for (auto & p: buffers) {
const auto & stream = p.first;
BufferState & buf = p.second;
if (!buf.empty) {
logs.emplace_back(buf.ts, stream, std::move(buf.message));
buf.empty = true;
buf.message = std::string();
}
}
result.logs.insert(result.logs.end(),
std::make_move_iterator(logs.begin()),
std::make_move_iterator(logs.end()));
logs.clear();
};
ScriptOutput
MldbPythonInterpreter::
exceptionToScriptOutput(const EnterThreadToken & thread,
ScriptException & exc,
const string & context)
{
ScriptOutput result;
result.exception = std::make_shared<ScriptException>(std::move(exc));
result.exception->context.push_back(context);
getOutputFromPy(thread, result);
return result;
}
ScriptOutput
MldbPythonInterpreter::
runPythonScript(const EnterThreadToken & threadToken,
Utf8String scriptSource,
Utf8String scriptUri,
boost::python::object globals,
boost::python::object locals)
{
ScriptOutput result;
try {
MLDB_TRACE_EXCEPTIONS(false);
boost::python::object obj =
PythonThread
::exec(threadToken,
scriptSource,
scriptUri,
globals,
locals);
getOutputFromPy(threadToken, result);
}
catch (const boost::python::error_already_set & exc) {
ScriptException pyexc
= convertException(threadToken, exc,
"Running python script");
{
std::unique_lock<std::mutex> guard(this->context->logMutex);
LOG(this->context->loader) << jsonEncode(pyexc) << endl;
}
getOutputFromPy(threadToken, result);
result.exception = std::make_shared<ScriptException>(std::move(pyexc));
result.exception->context.push_back("Executing Python script");
result.setReturnCode(400);
}
return result;
}
/****************************************************************************/
/* PythonRestRequest */
/****************************************************************************/
PythonRestRequest::
PythonRestRequest(const RestRequest & request,
RestRequestParsingContext & context)
{
remaining = context.remaining;
verb = request.verb;
resource = request.resource;
payload = request.payload;
contentType = request.header.contentType;
contentLength = request.header.contentLength;
for(const std::pair<Utf8String, Utf8String> & p : request.params) {
boost::python::list inner_list;
inner_list.append(p.first);
inner_list.append(p.second);
restParams.append(inner_list);
}
for(auto it = request.header.headers.begin();
it != request.header.headers.end(); it++) {
headers[it->first] = it->second;
}
}
void
PythonRestRequest::
setReturnValue(const Json::Value & rtnVal, unsigned returnCode)
{
this->returnValue = rtnVal;
this->returnCode = returnCode;
}
void
PythonRestRequest::
setReturnValue1(const Json::Value & rtnVal)
{
setReturnValue(rtnVal, 200);
}
/****************************************************************************/
/* PYTHON CONTEXT */
/****************************************************************************/
PythonContext::
PythonContext(const Utf8String & name, MldbEngine * engine)
: category((name + " plugin").rawString().c_str()),
loader("loader", category),
stdout("stdout", category),
stderr("stderr", category),
engine(engine)
{
ExcAssert(engine);
}
PythonContext::
~PythonContext()
{
}
void
PythonContext::
log(const std::string & message)
{
std::unique_lock<std::mutex> guard(logMutex);
LOG(category) << message << endl;
logs.emplace_back(Date::now(), "log", Utf8String(message));
}
void
PythonContext::
logToStream(const char * stream,
const std::string & message)
{
std::unique_lock<std::mutex> guard(logMutex);
if (strcmp(stream, "stdout")) {
LOG(stdout) << message << endl;
}
else if (strcmp(stream, "stderr")) {
LOG(stderr) << message << endl;
}
}
/****************************************************************************/
/* PYTHON PLUGIN CONTEXT */
/****************************************************************************/
PythonPluginContext::
PythonPluginContext(const Utf8String & pluginName,
MldbEngine * engine,
std::shared_ptr<LoadedPluginResource> pluginResource)
: PythonContext(pluginName, engine),
hasRequestHandler(false),
pluginResource(pluginResource)
{
hasRequestHandler =
pluginResource->packageElementExists(PackageElement::ROUTES);
}
PythonPluginContext::
~PythonPluginContext()
{
}
Json::Value
PythonPluginContext::
getArgs() const
{
return jsonEncode(pluginResource->args);
}
void
PythonPluginContext::
serveStaticFolder(const std::string & route, const std::string & dir)
{
if(route.empty() || dir.empty()) {
throw MLDB::Exception("Route and static directory cannot be empty "
"for serving static folder");
}
fs::path fullDir(fs::path(getPluginDirectory()) / fs::path(dir));
if(!fs::exists(fullDir)) {
throw MLDB::Exception("Cannot serve static folder for path that does "
"not exist: " + fullDir.string());
}
string route_pattern = "/" + boost::replace_all_copy(route, "/", "") + "/(.*)";
router.addRoute(Rx(route_pattern, "<resource>"),
"GET", "Static content",
engine->getStaticRouteHandler("file://" + fullDir.string()),
Json::Value());
}
void PythonPluginContext::
serveDocumentationFolder(const std::string & dir)
{
if(dir.empty()) {
throw MLDB::Exception("Documentation directory cannot be empty");
}
fs::path fullDir(fs::path(getPluginDirectory()) / fs::path(dir));
if(!fs::exists(fullDir)) {
throw MLDB::Exception("Cannot serve documentation folder for path that does "
"not exist: " + fullDir.string());
}
handleDocumentation = engine->getStaticRouteHandler("file://" + fullDir.string());
}
std::string PythonPluginContext::
getPluginDirectory() const
{
return pluginResource->getPluginDir().string();
}
/****************************************************************************/
/* PYTHON SCRIPT CONTEXT */
/****************************************************************************/
PythonScriptContext::
PythonScriptContext(const std::string & pluginName, MldbEngine * engine,
std::shared_ptr<LoadedPluginResource> pluginResource)
: PythonContext(pluginName, engine),
pluginResource(std::move(pluginResource))
{
}
PythonScriptContext::
~PythonScriptContext()
{
}
Json::Value
PythonScriptContext::
getArgs() const
{
return jsonEncode(pluginResource->args);
}
/****************************************************************************/
/* MLDB PYTHON CONTEXT */
/****************************************************************************/
MldbPythonContext::
MldbPythonContext(std::shared_ptr<PythonContext> context)
{
bool isScript
= dynamic_pointer_cast<PythonScriptContext>(context)
!= nullptr;
// Perform a downcast depending upon the context
if(isScript) {
setScript(static_pointer_cast<PythonScriptContext>(context));
}
else {
setPlugin(static_pointer_cast<PythonPluginContext>(context));
}
}
void
MldbPythonContext::
log(const std::string & message)
{
getPyContext()->log(message);
}
void
MldbPythonContext::
logJsVal(const Json::Value & jsVal)
{
if(jsVal.isObject() || jsVal.isArray()) {
getPyContext()->log(jsVal.toStyledString());
}
else if(jsVal.isIntegral()) {
getPyContext()->log(std::to_string(jsVal.asInt()));
}
else if(jsVal.isDouble()) {
getPyContext()->log(jsVal.toStringNoNewLine());
}
else {
getPyContext()->log(jsVal.asString());
}
}
void MldbPythonContext::
logUnicode(const Utf8String & msg)
{
getPyContext()->log(msg.rawString());
}
PythonContext* MldbPythonContext::
getPyContext()
{
if(script && plugin)
throw MLDB::Exception("Both script and plugin are defined!!");
if(script) return script.get();
if(plugin) return plugin.get();
throw MLDB::Exception("Neither script or plugin is defined!");
}
void MldbPythonContext::
setPlugin(std::shared_ptr<PythonPluginContext> pluginCtx) {
plugin = pluginCtx;
}
void MldbPythonContext::
setScript(std::shared_ptr<PythonScriptContext> scriptCtx) {
script = scriptCtx;
}
std::shared_ptr<PythonPluginContext> MldbPythonContext::
getPlugin()
{
return plugin;
}
std::shared_ptr<PythonScriptContext> MldbPythonContext::
getScript()
{
return script;
}
void
MldbPythonContext::
setPathOptimizationLevel(const std::string & val)
{
std::string valLc;
for (auto c: val)
valLc += tolower(c);
int level = -1;
if (valLc == "always") {
level = OptimizedPath::ALWAYS;
}
else if (valLc == "never") {
level = OptimizedPath::NEVER;
}
else if (valLc == "sometimes") {
level = OptimizedPath::SOMETIMES;
}
else throw MLDB::Exception("Couldn't parse path optimization level '"
+ val + "': accepted are 'always', 'never' "
"and 'sometimes'");
OptimizedPath::setDefault(level);
}
Json::Value
MldbPythonContext::
perform2(const std::string & verb,
const std::string & resource)
{
return perform(verb, resource);
}
Json::Value
MldbPythonContext::
perform3(const std::string & verb,
const std::string & resource,
const RestParams & params)
{
return perform(verb, resource, params);
}
Json::Value
MldbPythonContext::
perform4(const std::string & verb,
const std::string & resource,
const RestParams & params,
Json::Value payload)
{
return perform(verb, resource, params, payload);
}
Json::Value
MldbPythonContext::
perform(const std::string & verb,
const std::string & resource,
const RestParams & params,
Json::Value payload,
const RestParams & headers)
{
HttpHeader header;
header.verb = verb;
header.resource = resource;
header.queryParams = params;
for (auto & h: headers)
header.headers.insert({h.first.toLower().extractAscii(), h.second.extractAscii()});
RestRequest request(header, payload.toString());
auto connection = InProcessRestConnection::create();
{
auto noGil = releaseGil();
this->getPyContext()->engine->handleRequest(*connection, request);
}
connection->waitForResponse();
Json::Value result;
result["statusCode"] = connection->responseCode();
if (!connection->contentType().empty())
result["contentType"] = connection->contentType();
if (!connection->headers().empty()) {
Json::Value headers(Json::ValueType::arrayValue);
for(const pair<Utf8String, Utf8String> & h : connection->headers()) {
Json::Value elem(Json::ValueType::arrayValue);
elem.append(h.first);
elem.append(h.second);
headers.append(elem);
}
result["headers"] = headers;
}
if (!connection->response().empty())
result["response"] = connection->response();
return result;
}
Json::Value
MldbPythonContext::
readLines1(const std::string & path)
{
return readLines(path);
}
Json::Value
MldbPythonContext::
readLines(const std::string & path, int maxLines)
{
filter_istream stream(path);
Json::Value lines(Json::arrayValue);
auto onLine = [&] (const char * line,
size_t length,
int64_t lineNum)
{
lines.append(line);
};
auto logger = getMldbLog("python");
forEachLine(stream, onLine, logger, 1 /* numThreads */, false /* ignore exc */,
maxLines);
return lines;
}
Json::Value
MldbPythonContext::
ls(const std::string & dir)
{
std::vector<std::string> dirs;
std::map<std::string, FsObjectInfo> objects;
auto onSubdir = [&] (const std::string & dirName,
int depth)
{
dirs.push_back(dirName);
return false;
};
auto onObject = [&] (const std::string & uri,
const FsObjectInfo & info,
const OpenUriObject & open,
int depth)
{
objects[uri] = info;
return true;
};
forEachUriObject(dir, onObject, onSubdir);
Json::Value result;
result["dirs"] = jsonEncode(dirs);
result["objects"] = jsonEncode(objects);
return result;
}
string
MldbPythonContext::
getHttpBoundAddress()
{
return this->getPyContext()->engine->getHttpBoundAddress();
}
} // namespace MLDB
|
apache-2.0
|
Principe92/CCM_AudioVisual
|
src/prince/app/ccm/delete/Activity_Manuals.java
|
5718
|
package prince.app.ccm.delete;
import java.io.File;
import java.util.ArrayList;
import prince.app.ccm.R;
import prince.app.ccm.R.array;
import prince.app.ccm.R.drawable;
import prince.app.ccm.R.id;
import prince.app.ccm.R.layout;
import prince.app.ccm.R.string;
import prince.app.ccm.tools.ActivityBase;
import prince.app.ccm.tools.ManualAdapter;
import prince.app.ccm.tools.ManualHolder;
import prince.app.ccm.tools.Tool;
import android.content.res.Resources;
import android.graphics.drawable.Drawable;
import android.os.Bundle;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.support.v7.widget.Toolbar;
public class Activity_Manuals extends ActivityBase {
private static final String TAG = Activity_Manuals.class.getSimpleName();
private static final String ACTIVE = "active tasks";
private static String TITLE;
private ManualAdapter ca;
private ArrayList<String> mActiveTasks;
private Toolbar mToolBar;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.layout_manuals);
TITLE = getResources().getStringArray(R.array.array_navigation)[1];
// Set up the tool bar
mToolBar = (Toolbar) findViewById(R.id.my_toolbar);
setSupportActionBar(mToolBar);
RecyclerView recList = (RecyclerView) findViewById(R.id.cardList);
recList.setHasFixedSize(true);
LinearLayoutManager llm = new LinearLayoutManager(this);
llm.setOrientation(LinearLayoutManager.VERTICAL);
recList.setLayoutManager(llm);
mActiveTasks = new ArrayList<String>();
if (savedInstanceState != null) mActiveTasks = savedInstanceState.getStringArrayList(ACTIVE);
ca = new ManualAdapter(createList(), mActiveTasks, this);
recList.setAdapter(ca);
initNavigationDrawer();
}
private ArrayList<ManualHolder> createList() {
ArrayList<ManualHolder> result = new ArrayList<ManualHolder>();
String[] titles = getResources().getStringArray(R.array.array_manuals);
for (int i=0; i < titles.length; i++) {
ManualHolder ci = new ManualHolder();
ci.mManualImage = getResources().getDrawable(R.drawable.manual2); // fetchImage(i);
ci.mManualTitle = titles[i];
ci.mURL = fetchURL(i);
ci.mFileName = fetchFileName(ci.mURL);
result.add(ci);
}
return result;
}
@Override
public void onSaveInstanceState(Bundle oldState){
super.onSaveInstanceState(oldState);
oldState.putStringArrayList(ACTIVE, ca.getActiveTasks());
}
@Override
protected void onStop(){
super.onStop();
// Don't refresh the UI with new data from Async Task
Tool.EXIT_TASK = true;
}
@Override
protected void onResume(){
super.onResume();
// Refresh the UI with new data from Async Task
Tool.EXIT_TASK = false;
}
public static boolean fileExist(String name){
// create new folder or reference existing one
File file = new File(Tool.APP_DIR, name);
return (file.exists() && file.length() > 0);
}
private Drawable fetchImage(int position){
switch(position){
case 2:
return getResources().getDrawable(R.drawable.manual_2);
default:
return getResources().getDrawable(R.drawable.manual2);
}
}
private String fetchURL(int position){
Resources rs = getResources();
switch(position){
case 0:
return rs.getString(R.string.escaladeplanos_av); // escaladeplanos_av.pdf
case 1:
return rs.getString(R.string.zonas_filmacion_pm); // zonas_filmacion_pm.pdf
case 2:
return rs.getString(R.string.tomasutiles_110424); // tomasutiles_110424.pdf
case 3:
return rs.getString(R.string.salle_montaje_110213); // salle_montaje_110213.pdf
case 4:
return rs.getString(R.string.salle_montaje_110814); // salle_montaje_110814.pdf
case 5:
return rs.getString(R.string.salle_conexionado_110814); // salle_conexionado_110814.pdf
case 6:
return rs.getString(R.string.publicacion_predicaciones_web); // publicacion_predicaciones_web.pdf
case 7:
return rs.getString(R.string.flash); // index.htm
case 8:
return rs.getString(R.string.tomas_frecuentes_pm); // tomas_frecuentes_pm.pdf
case 9:
return rs.getString(R.string.glosariodecine_av); // glosariodecine_av.pdf
case 10:
return rs.getString(R.string.camara_sony); // camara_sony.pdf
case 11:
return rs.getString(R.string.introduccion_xhtml); // introduccion_xhtml.pdf
case 12:
return rs.getString(R.string.editor_casablanca); // editor_casablanca.pdf
case 13:
return rs.getString(R.string.proyector_lcd_xl5980u); // XL5980U_5900U_Esp.pdf
default:
return "";
}
}
private String fetchFileName(String url){
int position = url.lastIndexOf("/");
if (position != -1){
String web = url.substring((position + 1), url.length());
return web;
}
return "";
}
/* @Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.menu_help, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
if (id == R.id.action_settings) {
return true;
}
return super.onOptionsItemSelected(item);
} */
@Override
public Toolbar getToolBar() {
// TODO Auto-generated method stub
return mToolBar;
}
@Override
public String getActionBarTitle() {
// TODO Auto-generated method stub
return TITLE;
}
@Override
public void actionBarRefresh() {
// TODO Auto-generated method stub
}
}
|
apache-2.0
|
vsch/idea-multimarkdown
|
src/main/java/com/vladsch/md/nav/util/NotNullNullableBiFunction.java
|
491
|
// Copyright (c) 2015-2020 Vladimir Schneider <vladimir.schneider@gmail.com> Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.vladsch.md.nav.util;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.function.BiFunction;
public interface NotNullNullableBiFunction<T, U, R> extends BiFunction<T, U, R> {
@Override
@NotNull
R apply(@NotNull T t, @Nullable U u);
}
|
apache-2.0
|
kunickiaj/datacollector
|
docs/generated/oxygen-webhelp/app/nav-links/json/concept_sh1_khh_cy-d46e122550.js
|
281
|
define({"topics" : [{"title":"Viewing Record Header Attributes","href":"datacollector\/UserGuide\/Processors\/JavaScript.html#concept_zvs_whh_cy","attributes": {"data-id":"concept_zvs_whh_cy",},"menu": {"hasChildren":false,},"tocID":"concept_zvs_whh_cy-d46e122643","topics":[]}]});
|
apache-2.0
|
hankcs/HanLP
|
plugins/hanlp_demo/hanlp_demo/mul/demo_mtl.py
|
579
|
# -*- coding:utf-8 -*-
# Author: hankcs
# Date: 2020-12-31 13:51
import hanlp
from hanlp_common.document import Document
HanLP = hanlp.load(hanlp.pretrained.mtl.UD_ONTONOTES_TOK_POS_LEM_FEA_NER_SRL_DEP_SDP_CON_XLMR_BASE)
doc: Document = HanLP([
'In 2021, HanLPv2.1 delivers state-of-the-art multilingual NLP techniques to production environment.',
'2021年、HanLPv2.1は次世代の最先端多言語NLP技術を本番環境に導入します。',
'2021年 HanLPv2.1为生产环境带来次世代最先进的多语种NLP技术。',
])
print(doc)
doc.pretty_print()
|
apache-2.0
|
sunrain990/gemview
|
config/db/my.js
|
3809
|
/**
* Created by kevin on 15/11/27.
*/
var mysql = require('mysql');
var conn;
//var logger = require('../log/log4-js');
var os = require('os');
function handleError () {
var ipv4;
if(os.networkInterfaces().eth1){
for(var i=0;i<os.networkInterfaces().eth1.length;i++){
if(os.networkInterfaces().eth1[i].family=='IPv4'){
ipv4=os.networkInterfaces().eth1[i].address;
}
}
var hostname = os.hostname();
//console.log(hostname,ipv4);
if(ipv4 == '121.41.41.46'){
conn = mysql.createConnection({
host: 'rdsf39n5tp6w482946xa.mysql.rds.aliyuncs.com',
user: 'ecp_test',
password: 'ecp_test',
database: 'project',
port: 3306
});
console.log('dev');
}else if(ipv4 == '120.55.90.62'){
conn = mysql.createConnection({
host: 'rdsvy6jrfrbi2a2.mysql.rds.aliyuncs.com',
user: 'ecp',
password: 'CqmygDsx2s_MYSQL',
database: 'project',
port: 3306
});
console.log('node formal');
}else if(ipv4 == '121.41.123.2'){
conn = mysql.createConnection({
host: 'rdsvy6jrfrbi2a2.mysql.rds.aliyuncs.com',
user: 'ecp',
password: 'CqmygDsx2s_MYSQL',
database: 'project',
port: 3306
});
console.log('formal');
}else if(ipv4 == '120.26.245.233'){
conn = mysql.createConnection({
host: 'rdsf39n5tp6w482946xa.mysql.rds.aliyuncs.com',
user: 'ecp_test',
password: 'ecp_test',
database: 'project',
port: 3306
});
console.log('test');
}
}else if(os.networkInterfaces().lo0){
for(var i=0;i<os.networkInterfaces().lo0.length;i++){
if(os.networkInterfaces().lo0[i].family=='IPv4'){
ipv4=os.networkInterfaces().lo0[i].address;
}
}
if(ipv4 == '127.0.0.1'){
conn = mysql.createConnection({
host: 'localhost',
user: 'root',
password: 'root',
database: 'project',
port: 3306
//,
//multipleStatements: true
});
console.log('localhost');
}
}
//conn = mysql.createConnection({
// host:'rdsvy6jrfrbi2a2.mysql.rds.aliyuncs.com',
// user:'ecp',
// password:'CqmygDsx2s_MYSQL',
// database:'project',
// port:'3306'
//});
//连接错误,2秒重试
conn.connect(function (err) {
if (err) {
console.log('error when connecting to db:', err);
setTimeout(handleError , 2000);
}
});
conn.on('error', function (err) {
console.log('db error', err);
//logger.info(err);
// 如果是连接断开,自动重新连接
if (err.code === 'PROTOCOL_CONNECTION_LOST') {
//logger.info('connection_lost');
handleError();
}
if(err.code === 'PROTOCOL_ENQUEUE_AFTER_FATAL_ERROR'){
//logger.info(new Date()+'enqueue');
handleError();
} else {
//logger.info('else');
handleError();
}
});
setInterval(function(){
conn.query('SELECT 1 + 1 AS solution', function(err, rows, fields) {
if (err) throw err;
console.log('The solution is: ', rows[0].solution);
});
},3600000);
console.log('mysql ready!');
}
handleError();
var Mysql = {
project:conn
};
module.exports = Mysql;
|
apache-2.0
|
dmilos/color
|
src/color/hsv/set/green.hpp
|
1128
|
#ifndef color_hsv_set_green
#define color_hsv_set_green
// ::color::set::green( c )
#include "../category.hpp"
#include "../../rgb/place/place.hpp"
#include "../../rgb/akin/hsv.hpp"
#include "../../rgb/trait/component.hpp"
namespace color
{
namespace set
{
template< typename tag_name >
inline
void
green
(
::color::model< ::color::category::hsv< tag_name > > & color_parameter
,typename ::color::trait::component< typename ::color::akin::rgb< ::color::category::hsv< tag_name > >::akin_type >::model_type component_parameter
)
{
typedef ::color::category::hsv< tag_name > category_type;
typedef typename ::color::akin::rgb< category_type >::akin_type akin_type;
enum { green_p = ::color::place::_internal::green<akin_type>::position_enum };
::color::model< akin_type > rgb( color_parameter );
rgb.template set< green_p > ( component_parameter );
color_parameter = rgb;
}
}
}
#endif
|
apache-2.0
|
asebak/rapbattleonline
|
WindowsPhone/Classes/CRUD/Delete.cs
|
1040
|
using System;
using System.Net;
using Common.Types;
using Newtonsoft.Json;
namespace FreestyleOnline___WP.Classes.CRUD
{
public class Delete
{
/// <summary>
/// Initializes a new instance of the <see cref="Post" /> class.
/// </summary>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="id">The identifier.</param>
/// <param name="e">The e.</param>
/// <param name="actionName">Name of the action.</param>
public Delete(string controllerName, int id, UploadStringCompletedEventHandler e = null, string actionName = "")
{
var api = new WebApi(controllerName);
var webClient = new RapWebClient {CookieContainer = RapClientCookie.Current};
webClient.UploadStringCompleted += e;
webClient.UploadStringAsync(string.IsNullOrEmpty(actionName) ?
new Uri(api.Delete(id)) :
new Uri(api.DeleteByAction(id, actionName)), "DELETE", "");
}
}
}
|
apache-2.0
|
gjhiggins/pyralod
|
run_tests.py
|
2490
|
#!/home/gjh/.virtualens/py3work/bin/python
# -*- coding: utf-8 -*-
"""
Testing with Nose
=================
This test runner uses Nose for test discovery and running. It uses the argument
spec of Nose, but with some options pre-set. To begin with, make sure you have
Nose installed, e.g.:
$ sudo easy_install nose
For daily test runs, use:
$ ./run_tests.py
If you supply attributes, the default ones defined in ``DEFAULT_ATTRS`` will be
ignored. So to run e.g. all tests marked ``slowtest`` or ``non_standard_dep``,
do:
$ ./run_tests.py -a slowtest,non_standard_dep
See <http://code.google.com/p/python-nose/> for furher details. An excellent
article is also available at <http://ivory.idyll.org/articles/nose-intro.html>.
Note that this is just a convenience script. You can use ``nosetests`` directly
if it's on $PATH, with the difference that you have to supply the options
pre-set here manually.
Coverage
========
If ``coverage.py`` is placed in $PYTHONPATH, it can be used to create coverage
information (using the built-in coverage plugin of Nose) if the default
option "--with-coverage" is supplied (which also enables some additional
coverage options).
See <http://nedbatchelder.com/code/modules/coverage.html> for details.
"""
NOSE_ARGS = [
'--with-doctest',
'--doctest-extension=.doctest',
'--doctest-tests',
# '--with-EARL',
]
COVERAGE_EXTRA_ARGS = [
'--cover-package=pyralod',
'--cover-inclusive',
]
DEFAULT_ATTRS = [] # ['!known_issue', '!sparql']
DEFAULT_DIRS = ['test', 'pyralod']
if __name__ == '__main__':
from sys import argv, exit, stderr
try: import nose
except ImportError:
print("""\
Requires Nose. Try:
$ sudo easy_install nose
Exiting. """, file=stderr); exit(1)
if '--with-coverage' in argv:
try: import coverage
except ImportError:
print("No coverage module found, skipping code coverage.", file=stderr)
argv.remove('--with-coverage')
else:
NOSE_ARGS += COVERAGE_EXTRA_ARGS
if True not in [a.startswith('-a') or a.startswith('--attr=') for a in argv]:
argv.append('--attr=' + ','.join(DEFAULT_ATTRS))
if not [a for a in argv[1:] if not a.startswith('-')]:
argv += DEFAULT_DIRS # since nose doesn't look here by default..
finalArgs = argv + NOSE_ARGS
print("Running nose with:", " ".join(finalArgs[1:]))
nose.run_exit(argv=finalArgs)
|
apache-2.0
|
lsimons/phloc-schematron-standalone
|
phloc-commons/src/test/java/com/phloc/commons/supplementary/test/benchmark/BenchmarkSynchronizedVsLock.java
|
6480
|
/**
* Copyright (C) 2006-2013 phloc systems
* http://www.phloc.com
* office[at]phloc[dot]com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.phloc.commons.supplementary.test.benchmark;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import com.phloc.commons.concurrent.ManagedExecutorService;
import com.phloc.commons.system.SystemHelper;
/**
* Check if there is any difference between the different locking methods.
*/
public final class BenchmarkSynchronizedVsLock extends AbstractBenchmarkTask
{
private BenchmarkSynchronizedVsLock ()
{}
public static void main (final String [] aArgs) throws Exception
{
logSystemInfo ();
findWhetherSynchronizedOrLockAreFaster ();
}
private static Runnable _getRunnable (final int nThreads, final BaseClass aObj)
{
if (nThreads == 1)
return aObj;
return new Runnable ()
{
public void run ()
{
final ExecutorService aExecSvc = Executors.newFixedThreadPool (nThreads);
for (int i = 0; i < nThreads; ++i)
aExecSvc.submit (aObj);
new ManagedExecutorService (aExecSvc).shutdownAndWaitUntilAllTasksAreFinished ();
}
};
}
private static void findWhetherSynchronizedOrLockAreFaster ()
{
for (int i = 1; i <= SystemHelper.getNumberOfProcessors () * 2; ++i)
{
double dTime = benchmarkTask (_getRunnable (i, new UseSynchronizedMethod ()));
System.out.println ("Time using synchronized method[" + i + "]: " + dTime + " ns");
dTime = benchmarkTask (_getRunnable (i, new UseSynchronizedBlock ()));
System.out.println ("Time using synchronized block[" + i + "]: " + dTime + " ns");
dTime = benchmarkTask (_getRunnable (i, new UseUnfairLock ()));
System.out.println ("Time using unfair Lock[" + i + "]: " + dTime + " ns");
dTime = benchmarkTask (_getRunnable (i, new UseFairLock ()));
System.out.println ("Time using fair Lock[" + i + "]: " + dTime + " ns");
dTime = benchmarkTask (_getRunnable (i, new UseUnfairReadLock ()));
System.out.println ("Time using unfair ReadWriteLock.readLock ()[" + i + "]: " + dTime + " ns");
dTime = benchmarkTask (_getRunnable (i, new UseFairReadLock ()));
System.out.println ("Time using fair ReadWriteLock.readLock ()[" + i + "]: " + dTime + " ns");
dTime = benchmarkTask (_getRunnable (i, new UseUnfairWriteLock ()));
System.out.println ("Time using unfair ReadWriteLock.writeLock ()[" + i + "]: " + dTime + " ns");
dTime = benchmarkTask (_getRunnable (i, new UseFairWriteLock ()));
System.out.println ("Time using fair ReadWriteLock.writeLock ()[" + i + "]: " + dTime + " ns");
}
}
protected abstract static class BaseClass implements Runnable
{
private int m_nRuns = 10000;
public BaseClass ()
{}
protected final void performAction ()
{
m_nRuns--;
}
public abstract void performThreadSafeAction ();
public final void run ()
{
while (m_nRuns > 0)
performThreadSafeAction ();
}
}
private static final class UseSynchronizedMethod extends BaseClass
{
@Override
public synchronized void performThreadSafeAction ()
{
performAction ();
}
}
private static final class UseSynchronizedBlock extends BaseClass
{
@Override
public void performThreadSafeAction ()
{
synchronized (this)
{
performAction ();
}
}
}
private static final class UseUnfairLock extends BaseClass
{
private final Lock m_aLock = new ReentrantLock (false);
@Override
public void performThreadSafeAction ()
{
m_aLock.lock ();
try
{
performAction ();
}
finally
{
m_aLock.unlock ();
}
}
}
private static final class UseFairLock extends BaseClass
{
private final Lock m_aLock = new ReentrantLock (true);
@Override
public void performThreadSafeAction ()
{
m_aLock.lock ();
try
{
performAction ();
}
finally
{
m_aLock.unlock ();
}
}
}
private static final class UseUnfairReadLock extends BaseClass
{
private final ReadWriteLock m_aRWLock = new ReentrantReadWriteLock (false);
@Override
public void performThreadSafeAction ()
{
m_aRWLock.readLock ().lock ();
try
{
performAction ();
}
finally
{
m_aRWLock.readLock ().unlock ();
}
}
}
private static final class UseFairReadLock extends BaseClass
{
private final ReadWriteLock m_aRWLock = new ReentrantReadWriteLock (true);
@Override
public void performThreadSafeAction ()
{
m_aRWLock.readLock ().lock ();
try
{
performAction ();
}
finally
{
m_aRWLock.readLock ().unlock ();
}
}
}
private static final class UseUnfairWriteLock extends BaseClass
{
private final ReadWriteLock m_aRWLock = new ReentrantReadWriteLock (false);
@Override
public void performThreadSafeAction ()
{
m_aRWLock.writeLock ().lock ();
try
{
performAction ();
}
finally
{
m_aRWLock.writeLock ().unlock ();
}
}
}
private static final class UseFairWriteLock extends BaseClass
{
private final ReadWriteLock m_aRWLock = new ReentrantReadWriteLock (true);
@Override
public void performThreadSafeAction ()
{
m_aRWLock.writeLock ().lock ();
try
{
performAction ();
}
finally
{
m_aRWLock.writeLock ().unlock ();
}
}
}
}
|
apache-2.0
|
asakusafw/asakusafw
|
testing-project/asakusa-test-driver/src/test/java/com/asakusafw/testdriver/testing/compiler/MockCompilerToolkit.java
|
4847
|
/**
* Copyright 2011-2021 Asakusa Framework Team.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.asakusafw.testdriver.testing.compiler;
import java.io.IOException;
import java.util.function.Supplier;
import com.asakusafw.testdriver.compiler.ArtifactMirror;
import com.asakusafw.testdriver.compiler.CompilerConfiguration;
import com.asakusafw.testdriver.compiler.CompilerSession;
import com.asakusafw.testdriver.compiler.CompilerToolkit;
import com.asakusafw.testdriver.compiler.FlowPortMap;
import com.asakusafw.testdriver.compiler.basic.BasicCompilerConfiguration;
import com.asakusafw.vocabulary.flow.FlowDescription;
/**
* Mock implementation of {@link CompilerToolkit}.
* @since 0.9.0
*/
public class MockCompilerToolkit implements CompilerToolkit {
private Supplier<? extends FlowPortMap> portMaps = MockFlowPortMap::new;
private ClassCompiler batchCompiler = (conf, aClass) -> {
throw new AssertionError();
};
private ClassCompiler jobflowCompiler = (conf, aClass) -> {
throw new AssertionError();
};
private FlowCompiler flowCompiler = (conf, flow, ports) -> {
throw new AssertionError();
};
/**
* Sets a {@link FlowPortMap} supplier
* @param value the supplier
* @return this
*/
public MockCompilerToolkit withPortMap(Supplier<? extends FlowPortMap> value) {
this.portMaps = value;
return this;
}
/**
* Sets a compiler for batch classes.
* @param value the compiler
* @return this
*/
public MockCompilerToolkit withBatch(ClassCompiler value) {
this.batchCompiler = value;
return this;
}
/**
* Sets a compiler for batch classes.
* @param value the compiler
* @return this
*/
public MockCompilerToolkit withJobflow(ClassCompiler value) {
this.jobflowCompiler = value;
return this;
}
/**
* Sets a compiler for batch classes.
* @param value the compiler
* @return this
*/
public MockCompilerToolkit withFlow(FlowCompiler value) {
this.flowCompiler = value;
return this;
}
ArtifactMirror doCompileBatch(CompilerConfiguration configuration, Class<?> batchClass) throws IOException {
return batchCompiler.compile(configuration, batchClass);
}
ArtifactMirror doCompileJobflow(CompilerConfiguration configuration, Class<?> jobflowClass) throws IOException {
return jobflowCompiler.compile(configuration, jobflowClass);
}
ArtifactMirror doCompileFlow(
CompilerConfiguration configuration,
FlowDescription flow, FlowPortMap portMap) throws IOException {
return flowCompiler.compile(configuration, flow, portMap);
}
@Override
public String getName() {
return "mock";
}
@Override
public FlowPortMap newFlowPortMap() {
return portMaps.get();
}
@Override
public CompilerConfiguration newConfiguration() {
return new BasicCompilerConfiguration();
}
@Override
public CompilerSession newSession(CompilerConfiguration configuration) throws IOException {
return new MockCompilerSession(this, configuration);
}
/**
* Compiler for individual DSL classes.
* @since 0.9.0
*/
@FunctionalInterface
public interface ClassCompiler {
/**
* Compiles the target class.
* @param configuration the current configuration
* @param aClass the target class
* @return the compiled artifact
* @throws IOException if failed
*/
ArtifactMirror compile(CompilerConfiguration configuration, Class<?> aClass) throws IOException;
}
/**
* Compiler for individual flows.
* @since 0.9.0
*/
@FunctionalInterface
public interface FlowCompiler {
/**
* Compiles the target class.
* @param configuration the current configuration
* @param flow the target flow
* @param portMap the port map
* @return the compiled artifact
* @throws IOException if failed
*/
ArtifactMirror compile(
CompilerConfiguration configuration,
FlowDescription flow,
FlowPortMap portMap) throws IOException;
}
}
|
apache-2.0
|
tolo/JServer
|
src/java/com/teletalk/jserver/tcp/http/HttpRequest.java
|
10518
|
/*
* Copyright 2007 the project originators.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
TODO: getMultipartReader()
TODO: PUT support
*/
package com.teletalk.jserver.tcp.http;
import java.io.BufferedInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.util.StringTokenizer;
/**
* Class for representing an HTTP request sent by a client to the server. This class currently handles
* GET, HEAD and POST (also multipart) requests.
*
* @author Tobias Löfstrand
*
* @since The beginning
*
* @see com.teletalk.jserver.tcp.http.HttpMessage
* @see com.teletalk.jserver.tcp.http.HttpResponse
*/
public class HttpRequest extends HttpMessage
{
private String method = null; // e.g. "GET"
private String path = null; // e.g. "/index.html"
private String queryString = null; // e.g. "p1=1&p2=2"
private String version = null; // e.g. "HTTP/1.0"
private HttpRequestData requestData = null;
private final InputStream requestReader;
private boolean requestBodyRead = false;
/**
* Parse a request instance from the data received on the socket
* to a connected client.
*
* @param requestReader a buffered reader to read the request from.
*
* @throws java.io.IOException If communication errors should occur.
*/
public HttpRequest(InputStream requestReader) throws IOException
{
if( !(requestReader instanceof BufferedInputStream) )
{
requestReader = new BufferedInputStream(requestReader);
}
this.requestReader = requestReader;
super.readMessage(this.requestReader);
}
/**
* Create a new request that can be sent to an HTTP server.
*
* @param method The method for the request (i.e. "GET", "HEAD" or "POST", etc).
* @param path The path for the request (i.e. "/resource").
*/
public HttpRequest(String method, String path)
{
this.requestReader = null;
this.method = method.toUpperCase().trim();
this.path = path;
this.version = "HTTP/1.0";
this.requestData = null;
}
/**
* Create a new request that can be sent to an HTTP server.
*
* @param method The method for the request (i.e. "GET", "HEAD" or "POST", etc).
* @param path The path for the request (i.e. "/resource").
* @param version The version of the request (i.e. "HTTP/1.0").
*/
public HttpRequest(String method, String path, String version)
{
this.requestReader = null;
this.method = method.toUpperCase().trim();
this.path = path;
this.version = version;
this.requestData = null;
}
/**
* Called when reading a HTTP message to parse the start line (request or status line).
*
* @param startLine the HTTP message start line.
*/
protected void parseMessageStartLine(final String startLine) throws IOException
{
// Create a reader and a tokenizer on the first line
StringTokenizer st = new StringTokenizer(startLine, HttpConstants.SP, false);
// First comes the method (i.e. GET, HEAD or POST)
method = st.nextToken().toUpperCase();
if (method != null)
method = method.trim();
else
method = "";
if (!method.equals(REQUEST_METHOD_GET)
&& !method.equals(REQUEST_METHOD_HEAD)
&& !method.equals(REQUEST_METHOD_POST))
throw new IOException(
"Unable to handle request method '" + method + "'.");
// Then the path
this.path = st.nextToken();
// Make sure that unquoted spaces doesn't mess it up
while (st.countTokens() > 1)
this.path = this.path + "%20" + st.nextToken();
int index;
// Get query string
if ((index = this.path.indexOf('?')) != -1)
{
this.queryString = this.path.substring(index + 1);
this.path = this.path.substring(0, index);
}
// Last in this line comes the version (e.g. HTTP/1.0)
this.version = st.nextToken();
}
/**
* Parses the parameters of this http request, if it has not already been read.
*
* @exception IOException if an error occurs whie reading the request.
*/
public void readRequestBody() throws IOException
{
if (!requestBodyRead && (requestReader != null))
{
requestBodyRead = true;
if (method.equals(REQUEST_METHOD_GET) || method.equals(REQUEST_METHOD_HEAD))
{
if (this.queryString != null)
{
this.requestData = new HttpRequestData(this.queryString);
}
else
{
this.requestData = new HttpRequestData();
}
}
else if (this.method.equals(REQUEST_METHOD_POST))
{
//Parse arguments from the body of the request
if (hasHeader(CONTENT_LENGTH_HEADER_KEY))
{
String cType =
getHeaderSingleValue(CONTENT_TYPE_HEADER_KEY);
String cTypeLowerCase = cType.toLowerCase();
//Check if request is multipart
if (cTypeLowerCase.startsWith(CONTENT_TYPE_MULTIPART))
{
int boundaryIndex =
cTypeLowerCase.indexOf(
CONTENT_TYPE_MULTIPART_BOUNDARY);
if (boundaryIndex > 0) // Found boundary
{
int equalsSignIndex =
cTypeLowerCase.indexOf(
KEY_VALUE_SEPARATOR,
boundaryIndex
+ CONTENT_TYPE_MULTIPART_BOUNDARY
.length());
if (equalsSignIndex > 0) // Found equals sign
{
String boundary =
cType.substring(equalsSignIndex + 1).trim();
String sLength =
getHeaderSingleValue(CONTENT_LENGTH_HEADER_KEY);
int cLength = Integer.parseInt(sLength);
try
{
this.requestData =
new HttpRequestData(this.queryString,
requestReader,
cLength,
cType,
boundary);
}
catch (IOException e)
{
throw new IOException(
"Error while parsing request ("
+ toString()
+ ") - "
+ e.toString()
+ ".");
}
}
else
throw new IOException(
"Unable to parse multipart boundary from request - "
+ toString()
+ ".");
}
else
throw new IOException(
"Unable to parse multipart boundary from request - "
+ toString()
+ ".");
}
else //Not multipart (for instance application/x-www-form-urlencoded)
{
String sLength =
getHeaderSingleValue(CONTENT_LENGTH_HEADER_KEY);
int cLength = Integer.parseInt(sLength);
this.requestData =
new HttpRequestData(this.queryString, requestReader, cLength, cType);
}
}
else //Currently, POST requests without Content-Length header field specified are not handled.
{
throw new IOException(
"No 'Content-Length' header specified in the following request: "
+ this.toString()
+ ".");
//this.requestData = new HttpRequestData();
}
}
else
{
this.requestData = new HttpRequestData();
}
// Consume any extra CRLFs in request caused by buggy client implementations:
while (requestReader.available() > 0)
requestReader.read();
}
}
/**
* Tests if this is a valid http request, e.g if at least the method, path and version fields are specified.
*
* @return <code>true</code> if this is a valid http request, otherwise <code>false</code>.
*/
public boolean isValid()
{
return (this.method != null)
&& (this.path != null)
&& (this.version != null);
}
/**
* Get the method for this request.
*
* @return The method for the request
*/
public String getMethod()
{
return this.method;
}
/**
* Get the path for this request (excluding the query string).
*
* @return the path for the request.
*/
public String getPath()
{
return this.path;
}
/**
* Get the query string of this request, or <code>null</code> if the request didn't contain any query string.
*
* @return the query string of this request, or <code>null</code> if the request didn't contain any query string.
*/
public String getQueryString()
{
return this.queryString;
}
/**
* Get the version for this request.
*
* @return The version for the request.
*/
public String getVersion()
{
return this.version;
}
/**
* Parses and gets the request data (parameters) for this request.
*
* @return the request data for this request.
*/
public HttpRequestData getRequestData()
{
if (requestData == null)
{
try
{
this.readRequestBody();
}
catch (IOException e)
{
StringWriter strWriter = new StringWriter();
e.printStackTrace(new PrintWriter(strWriter));
throw new RuntimeException(strWriter.toString());
}
}
return requestData;
}
/**
* Gets the inputstream used to read this request.
*
* @return the inputstream used to read this request.
*/
public InputStream getRequestInputStream()
{
return this.requestReader;
}
/**
* Gets the request message (excluding the body) as a string formatted according to the HTTP specification.
*
* @return the request message as a string.
*
* @since 1.2
*/
public String getRequestMessage()
{
String formattedHeaders = super.formatHeaders();
StringBuffer reqLine = new StringBuffer();
reqLine.append(this.method);
reqLine.append(" ");
reqLine.append(this.path);
if (this.queryString != null)
{
reqLine.append("?");
reqLine.append(this.queryString);
}
reqLine.append(" ");
reqLine.append(this.version);
reqLine.append("\r\n");
return reqLine.toString() + formattedHeaders + "\r\n";
}
/**
* The representation of the HttpRequest. The returned string will not contain the body of the
* request, if any.
*
* @return Textual description of the request.
*/
public String toString()
{
return this.getRequestMessage();
}
}
|
apache-2.0
|
BlesseNtumble/GalaxySpace
|
src/main/java/galaxyspace/systems/BarnardsSystem/moons/barnarda_c1/dimension/WorldProviderBarnarda_C1_WE.java
|
9416
|
package galaxyspace.systems.BarnardsSystem.moons.barnarda_c1.dimension;
import java.util.List;
import javax.annotation.Nullable;
import asmodeuscore.api.dimension.IProviderFog;
import asmodeuscore.api.dimension.IProviderFreeze;
import asmodeuscore.core.astronomy.dimension.world.worldengine.WE_BiomeProvider;
import asmodeuscore.core.astronomy.dimension.world.worldengine.WE_ChunkProviderSpace;
import asmodeuscore.core.astronomy.dimension.world.worldengine.WE_WorldProviderSpace;
import asmodeuscore.core.astronomy.dimension.world.worldengine.biome.WE_BaseBiome;
import asmodeuscore.core.utils.worldengine.WE_Biome;
import asmodeuscore.core.utils.worldengine.WE_ChunkProvider;
import asmodeuscore.core.utils.worldengine.standardcustomgen.WE_BiomeLayer;
import asmodeuscore.core.utils.worldengine.standardcustomgen.WE_CaveGen;
import asmodeuscore.core.utils.worldengine.standardcustomgen.WE_RavineGen;
import asmodeuscore.core.utils.worldengine.standardcustomgen.WE_TerrainGenerator;
import galaxyspace.core.util.GSDimensions;
import galaxyspace.core.util.GSUtils;
import galaxyspace.systems.BarnardsSystem.BarnardsSystemBodies;
import galaxyspace.systems.BarnardsSystem.planets.barnarda_c.dimension.sky.SkyProviderBarnarda_C;
import micdoodle8.mods.galacticraft.api.galaxies.CelestialBody;
import micdoodle8.mods.galacticraft.api.vector.Vector3;
import micdoodle8.mods.galacticraft.core.client.CloudRenderer;
import micdoodle8.mods.galacticraft.core.util.ConfigManagerCore;
import net.minecraft.block.Block;
import net.minecraft.init.Blocks;
import net.minecraft.util.ResourceLocation;
import net.minecraft.util.math.BlockPos;
import net.minecraft.util.math.MathHelper;
import net.minecraft.world.DimensionType;
import net.minecraft.world.biome.BiomeProvider;
import net.minecraft.world.chunk.Chunk;
import net.minecraft.world.chunk.ChunkPrimer;
import net.minecraft.world.gen.IChunkGenerator;
import net.minecraftforge.client.IRenderHandler;
import net.minecraftforge.fml.relauncher.Side;
import net.minecraftforge.fml.relauncher.SideOnly;
public class WorldProviderBarnarda_C1_WE extends WE_WorldProviderSpace implements IProviderFreeze, IProviderFog{
@Override
public double getHorizon() {
return 44.0D;
}
@Override
public float getFallDamageModifier() {
return 0.16F;
}
@Override
public double getFuelUsageMultiplier() {
return 0.8;
}
@Override
public double getMeteorFrequency() {
return 0.9;
}
@Override
public float getSoundVolReductionAmount() {
return Float.MIN_VALUE;
}
@Override
public boolean canSnowAt(BlockPos pos, boolean checkLight)
{
return true;
}
@Override
public CelestialBody getCelestialBody() {
return BarnardsSystemBodies.Barnarda_C1;
}
@Override
public Class<? extends IChunkGenerator> getChunkProviderClass() {
return WE_ChunkProviderSpace.class;
}
@Override
public Class<? extends BiomeProvider> getBiomeProviderClass() {
return WE_BiomeProvider.class;
}
@Override
@SideOnly(Side.CLIENT)
public float getCloudHeight()
{
return 180.0F;
}
@Override
public boolean canRespawnHere()
{
return true;
}
@Nullable
@SideOnly(Side.CLIENT)
public float[] calcSunriseSunsetColors(float celestialAngle, float partialTicks)
{
return super.calcSunriseSunsetColors(celestialAngle, partialTicks);
/* float f = 0.4F;
float f1 = MathHelper.cos(celestialAngle * ((float)Math.PI * 2F)) - 0.0F;
float f2 = -0.0F;
if (f1 >= -0.4F && f1 <= 0.4F)
{
float f3 = (f1 - -0.0F) / 0.4F * 0.5F + 0.5F;
float f4 = 1.0F - (1.0F - MathHelper.sin(f3 * (float)Math.PI)) * 0.99F;
f4 = f4 * f4;
this.colorsSunriseSunset[0] = f3 * 0.3F + 0.7F;
this.colorsSunriseSunset[1] = f3 * f3 * 0.7F + 0.2F;
this.colorsSunriseSunset[2] = f3 * f3 * 0.0F + 0.2F;
this.colorsSunriseSunset[3] = f4;
return this.colorsSunriseSunset;
}
else
{
return null;
}*/
}
/*
@Override
@SideOnly(Side.CLIENT)
public Vector3 getFogColor() {
float f = 1.0F - this.getStarBrightness(1.0F);
return new Vector3(86 / 255.0F * f, 180 / 255.0F * f, 240 / 255.0F * f);
}
@Override
public Vector3 getSkyColor() {
float f = 0.6F - this.getStarBrightness(1.0F);
return new Vector3(100 / 255.0F * f, 220 / 255.0F * f, 250 / 255.0F * f);
}*/
@Override
@SideOnly(Side.CLIENT)
public Vector3 getFogColor() {
float f = 1.0F - this.getStarBrightness(1.0F);
return new Vector3(140 / 255.0F * f, 167 / 255.0F * f, 207 / 255.0F * f);
}
@Override
@SideOnly(Side.CLIENT)
public Vector3 getSkyColor() {
float f = 0.5F - this.getStarBrightness(1.0F);
if(world.isRaining())
{
f = 1.0F;
return new Vector3(47 / 255.0F * f, 47 / 255.0F * f, 47 / 255.0F * f);
}
return new Vector3(161 / 255.0F * f, 146 / 255.0F * f, 175 / 255.0F * f);
}
@Override
public boolean isSkyColored() {
return true;
}
@Override
public boolean hasSunset() {
return true;
}
@Override
public boolean shouldForceRespawn() {
return !ConfigManagerCore.forceOverworldRespawn;
}
@Override
@SideOnly(Side.CLIENT)
public float getStarBrightness(float par1)
{
float f = this.world.getCelestialAngle(par1);
float f1 = 1.0F - (MathHelper.cos(f * ((float)Math.PI * 2F)) * 2.0F + 0.25F);
f1 = MathHelper.clamp(f1, 0.0F, 1.0F);
return f1 * f1 * 0.5F;
}
@Override
@SideOnly(Side.CLIENT)
public float getSunBrightness(float par1) {
float f1 = this.world.getCelestialAngle(1.0F);
float f2 = 1.0F - (MathHelper.cos(f1 * 3.1415927F * 2.0F) * 2.0F + 0.2F);
f2 = MathHelper.clamp(f2, 0.0F, 1.0F);
f2 = 1.2F - f2;
return f2 * 0.8F;
}
@Override
public IRenderHandler getCloudRenderer(){
return new CloudRenderer();
}
@SideOnly(Side.CLIENT)
public IRenderHandler getSkyRenderer()
{
if (super.getSkyRenderer() == null)
{
this.setSkyRenderer(new SkyProviderBarnarda_C());
}
return super.getSkyRenderer();
}
@Override
public int getDungeonSpacing() {
return 0;
}
@Override
public ResourceLocation getDungeonChestType() {
return null;
}
@Override
public List<Block> getSurfaceBlocks() {
return null;
}
@Override
public DimensionType getDimensionType() {
return GSDimensions.BARNARDA_C;
}
@Override
public void genSettings(WE_ChunkProvider cp) {
cp.createChunkGen_List .clear();
cp.createChunkGen_InXZ_List .clear();
cp.createChunkGen_InXYZ_List.clear();
cp.decorateChunkGen_List .clear();
WE_Biome.setBiomeMap(cp, 1.4D, 4, 6400.0D, 1.0D);
WE_TerrainGenerator terrainGenerator = new WE_TerrainGenerator();
terrainGenerator.worldStoneBlock = Blocks.STONE.getDefaultState();
terrainGenerator.worldSeaGen = false;
terrainGenerator.worldSeaGenBlock = Blocks.ICE.getDefaultState();
terrainGenerator.worldSeaGenMaxY = 64;
cp.createChunkGen_List.add(terrainGenerator);
//-//
WE_CaveGen cg = new WE_CaveGen();
cg.replaceBlocksList .clear();
cg.addReplacingBlock(terrainGenerator.worldStoneBlock);
cg.lavaMaxY = 15;
cg.range = 64;
cp.createChunkGen_List.add(cg);
//-//
WE_RavineGen rg = new WE_RavineGen();
rg.replaceBlocksList .clear();
rg.addReplacingBlock(terrainGenerator.worldStoneBlock);
rg.lavaBlock = Blocks.LAVA.getDefaultState();
rg.lavaMaxY = 15;
rg.range = 32;
cp.createChunkGen_List.add(rg);
((WE_ChunkProviderSpace)cp).worldGenerators.clear();
cp.biomesList.clear();
WE_BiomeLayer layer = new WE_BiomeLayer();
layer.add(Blocks.BEDROCK.getDefaultState(), 0, 0, 1, 2, true);
layer.add(Blocks.PACKED_ICE.getDefaultState(), terrainGenerator.worldStoneBlock, -256, 0, -4, -10, true);
layer.add(Blocks.SNOW.getDefaultState(), Blocks.PACKED_ICE.getDefaultState(), -256, 0, -2, -1, false);
WE_Biome.addBiomeToGeneration(cp, new WE_BaseBiome(0D, 0D, 1.5F, 4, 90, 10, layer));
WE_Biome.addBiomeToGeneration(cp, new WE_BaseBiome(-0.5D, 0.5D, 2.0F, 6, 150, 3, layer));
WE_Biome.addBiomeToGeneration(cp, new WE_BaseBiome(-1.4D, 1.0D, 1.5F, 4, 90, 10, layer));
WE_Biome.addBiomeToGeneration(cp, new WE_BaseBiome(-1.0D, 1.5D, 1.5F, 4, 90, 20, layer));
WE_Biome.addBiomeToGeneration(cp, new WE_BaseBiome(-2.0D, 2.0D, 1.5F, 4, 40, 3, layer));
}
@Override
public boolean enableAdvancedThermalLevel() {
return true;
}
@Override
protected float getThermalValueMod()
{
return 0.4F;
}
@Override
public void onPopulate(int cX, int cZ) {
}
@Override
public void onChunkProvider(int cX, int cZ, ChunkPrimer primer) {
}
@Override
public void recreateStructures(Chunk chunkIn, int x, int z) {
}
@Override
public float getFogDensity(int x, int y, int z) {
if(this.world.isRaining()) return 0.1F;
return 0.4F;
}
@Override
public int getFogColor(int x, int y, int z) {
return GSUtils.getColor((int)(this.getFogColor().x * 255), (int)(this.getFogColor().y * 255), (int)(this.getFogColor().z * 255), 100);
}
}
|
apache-2.0
|
crrlos/Tienda
|
movil/plantillas/footer.php
|
41
|
</body>
</div><!-- div wrapper -->
<html>
|
apache-2.0
|
radiasoft/radtrack
|
tests/util/unitConversion_test.py
|
1578
|
from radtrack.util.unitConversion import convertUnitsString, \
convertUnitsStringToNumber, \
convertUnitsNumberToString, \
convertUnitsNumber
from radtrack.util.RbMath import roundSigFig
from math import pi
import pytest
def test_unit_conversion():
# Simple test
a = '12 in'
ac = convertUnitsString(a, 'km')
b = '1 ft'
bc = convertUnitsString(b, 'km')
assert ac == bc
# Compound test
a = '60 mi/hr'
ac = roundSigFig(convertUnitsStringToNumber(a, 'm/s'), 10)
b = '88 ft/sec'
bc = roundSigFig(convertUnitsStringToNumber(b, 'm/s'), 10)
assert ac == bc
# Invalid test
a = '4 score'
with pytest.raises(ValueError):
convertUnitsString(a, 'years')
# Higher dimension test
a = 16.7 # km^2,
ac = convertUnitsNumberToString(a, 'km^2', 'mi^2')
b = '6.44790604766 mi^2'
bc = convertUnitsString(b, 'mi^2')
assert ac == bc
# Angle test
a = 3 # radians
ac = roundSigFig(convertUnitsNumber(a, 'rad', 'deg'), 10)
b = 3*180/pi # 3 rad in degrees
bc = roundSigFig(b, 10)
assert ac == bc
# Compound units
a = "9.8 m/s^2"
ac = roundSigFig(convertUnitsStringToNumber(a, "ft/ms^2"), 6)
b = 3.21522e-5 # ft / (ms^2)
assert ac == b
# Inverse units
a = "10 1/s"
ac = convertUnitsString(a, 'Hz')
b = "10.0 Hz"
assert ac == b
a = "1 1/ns"
ac = convertUnitsString(a, 'GHz')
b = "1.0 GHz"
assert ac == b
|
apache-2.0
|
FIXTradingCommunity/timpani
|
src/main/webapp/websocket.js
|
2138
|
/**
* Copyright 2016 FIX Protocol Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*
*/
/**
* Functions for creating a websocket and handling its events
*/
var ws = (function() {
var websocket;
function wsUriFromHttp(wsPath) {
var httpUri = window.location.href;
// http => ws, https => wss
var wsUri = httpUri.replace("http", "ws");
// URI already ends in / so don't add another
return wsUri.concat(wsPath);
}
;
function initWebSocket(wsUri, messageEventHandler, errorEventHandler,
openEventHandler, closeEventHandler, protocol) {
websocket = new WebSocket(wsUri, protocol);
websocket.addEventListener("message", messageEventHandler);
websocket.addEventListener("error", errorEventHandler);
websocket.addEventListener("open", openEventHandler);
websocket.addEventListener("close", closeEventHandler);
}
;
return {
/**
* Opens a websocket to HTTP server URI is ws://[server]/[path]
*/
init : function(path, messageEventHandler, errorEventHandler,
openEventHandler, closeEventHandler, protocol) {
if ("WebSocket" in window) {
var wsPath = path || "/";
var wsUri = wsUriFromHttp(wsPath);
initWebSocket(wsUri, messageEventHandler, errorEventHandler,
openEventHandler, closeEventHandler, protocol);
} else {
alert("WebSocket NOT supported by your Browser!");
}
},
/**
* Message may be String, Blob, ArrayBuffer, or ArrayBufferView
* depending on text or binary subprotocol
*/
send : function(message) {
websocket.send(message);
},
/**
* Close the connection
*/
disconnect : function() {
websocket.close();
}
}
})();
|
apache-2.0
|
arina-ielchiieva/drill
|
exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/scan/v3/file/MockFileNames.java
|
1611
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec.physical.impl.scan.v3.file;
import org.apache.drill.common.types.TypeProtos.MajorType;
import org.apache.drill.exec.physical.impl.scan.v3.schema.ImplicitColumnResolver;
import org.apache.hadoop.fs.Path;
public interface MockFileNames {
String MOCK_FILE_NAME = "foo.csv";
String MOCK_FILE_DIR_PATH = "/w/x/y";
String MOCK_FILE_FQN = MOCK_FILE_DIR_PATH + "/" + MOCK_FILE_NAME;
String MOCK_FILE_SYSTEM_NAME = "file:" + MOCK_FILE_FQN;
Path MOCK_ROOT_PATH = new Path("file:/w");
String MOCK_SUFFIX = "csv";
String MOCK_DIR0 = "x";
String MOCK_DIR1 = "y";
Path MOCK_FILE_PATH = new Path(MOCK_FILE_SYSTEM_NAME);
MajorType IMPLICIT_COL_TYPE = ImplicitColumnResolver.IMPLICIT_COL_TYPE;
MajorType PARTITION_COL_TYPE = ImplicitColumnResolver.PARTITION_COL_TYPE;
}
|
apache-2.0
|
rynnwang/CommonSolution
|
Development/Beyova.Elastic/Model/Status/ElasticIndicesStatus.cs
|
1430
|
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Beyova;
using Beyova.ApiTracking;
using Beyova.ExceptionSystem;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
namespace Beyova.Elastic
{
/// <summary>
/// Class ElasticIndicesStatus.
/// </summary>
public class ElasticIndicesStatus
{
/// <summary>
/// Gets or sets the name.
/// </summary>
/// <value>
/// The name.
/// </value>
public string Name { get; set; }
/// <summary>
/// Gets or sets the document count.
/// </summary>
/// <value>
/// The document count.
/// </value>
public long? DocumentCount { get; set; }
/// <summary>
/// Gets or sets the index count.
/// </summary>
/// <value>
/// The index count.
/// </value>
public long? IndexCount { get; set; }
/// <summary>
/// Gets or sets the size of the storage.
/// </summary>
/// <value>
/// The size of the storage.
/// </value>
public long? StorageSize { get; set; }
/// <summary>
/// Gets or sets the size of the cache.
/// </summary>
/// <value>
/// The size of the cache.
/// </value>
public long? CacheSize { get; set; }
}
}
|
apache-2.0
|
nuclio/nuclio
|
test/_functions/common/reverser/python/reverser.py
|
841
|
# Copyright 2017 The Nuclio Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
def handler(context, event):
"""Return reversed body as string"""
try:
# decode the binary body into a string
body = event.body.decode('utf-8')
# return the reverse
return body[::-1]
except:
return ""
|
apache-2.0
|
luhn/AutobahnPython
|
autobahn/autobahn/websocket/compress_snappy.py
|
15503
|
###############################################################################
##
## Copyright 2013 Tavendo GmbH
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
###############################################################################
from __future__ import absolute_import
__all__ = (
'PerMessageSnappyMixin',
'PerMessageSnappyOffer',
'PerMessageSnappyOfferAccept',
'PerMessageSnappyResponse',
'PerMessageSnappyResponseAccept',
'PerMessageSnappy',
)
import snappy
from autobahn.websocket.compress_base import PerMessageCompressOffer, \
PerMessageCompressOfferAccept, \
PerMessageCompressResponse, \
PerMessageCompressResponseAccept, \
PerMessageCompress
class PerMessageSnappyMixin:
"""
Mixin class for this extension.
"""
EXTENSION_NAME = "permessage-snappy"
"""
Name of this WebSocket extension.
"""
class PerMessageSnappyOffer(PerMessageCompressOffer, PerMessageSnappyMixin):
"""
Set of extension parameters for `permessage-snappy` WebSocket extension
offered by a client to a server.
"""
@classmethod
def parse(cls, params):
"""
Parses a WebSocket extension offer for `permessage-snappy` provided by a client to a server.
:param params: Output from :func:`autobahn.websocket.WebSocketProtocol._parseExtensionsHeader`.
:type params: list
:returns: object -- A new instance of :class:`autobahn.compress.PerMessageSnappyOffer`.
"""
## extension parameter defaults
##
acceptNoContextTakeover = False
requestNoContextTakeover = False
##
## verify/parse client ("client-to-server direction") parameters of permessage-snappy offer
##
for p in params:
if len(params[p]) > 1:
raise Exception("multiple occurence of extension parameter '%s' for extension '%s'" % (p, cls.EXTENSION_NAME))
val = params[p][0]
if p == 'client_no_context_takeover':
if val != True:
raise Exception("illegal extension parameter value '%s' for parameter '%s' of extension '%s'" % (val, p, cls.EXTENSION_NAME))
else:
acceptNoContextTakeover = True
elif p == 'server_no_context_takeover':
if val != True:
raise Exception("illegal extension parameter value '%s' for parameter '%s' of extension '%s'" % (val, p, cls.EXTENSION_NAME))
else:
requestNoContextTakeover = True
else:
raise Exception("illegal extension parameter '%s' for extension '%s'" % (p, cls.EXTENSION_NAME))
offer = cls(acceptNoContextTakeover,
requestNoContextTakeover)
return offer
def __init__(self,
acceptNoContextTakeover = True,
requestNoContextTakeover = False):
"""
Constructor.
:param acceptNoContextTakeover: Iff true, client accepts "no context takeover" feature.
:type acceptNoContextTakeover: bool
:param requestNoContextTakeover: Iff true, client request "no context takeover" feature.
:type requestNoContextTakeover: bool
"""
if type(acceptNoContextTakeover) != bool:
raise Exception("invalid type %s for acceptNoContextTakeover" % type(acceptNoContextTakeover))
self.acceptNoContextTakeover = acceptNoContextTakeover
if type(requestNoContextTakeover) != bool:
raise Exception("invalid type %s for requestNoContextTakeover" % type(requestNoContextTakeover))
self.requestNoContextTakeover = requestNoContextTakeover
def getExtensionString(self):
"""
Returns the WebSocket extension configuration string as sent to the server.
:returns: str -- PMCE configuration string.
"""
pmceString = self.EXTENSION_NAME
if self.acceptNoContextTakeover:
pmceString += "; client_no_context_takeover"
if self.requestNoContextTakeover:
pmceString += "; server_no_context_takeover"
return pmceString
def __json__(self):
"""
Returns a JSON serializable object representation.
:returns: object -- JSON serializable represention.
"""
return {'extension': self.EXTENSION_NAME,
'acceptNoContextTakeover': self.acceptNoContextTakeover,
'requestNoContextTakeover': self.requestNoContextTakeover}
def __repr__(self):
"""
Returns Python object representation that can be eval'ed to reconstruct the object.
:returns: str -- Python string representation.
"""
return "PerMessageSnappyOffer(acceptNoContextTakeover = %s, requestNoContextTakeover = %s)" % (self.acceptNoContextTakeover, self.requestNoContextTakeover)
class PerMessageSnappyOfferAccept(PerMessageCompressOfferAccept, PerMessageSnappyMixin):
"""
Set of parameters with which to accept an `permessage-snappy` offer
from a client by a server.
"""
def __init__(self,
offer,
requestNoContextTakeover = False,
noContextTakeover = None):
"""
Constructor.
:param offer: The offer being accepted.
:type offer: Instance of :class:`autobahn.compress.PerMessageSnappyOffer`.
:param requestNoContextTakeover: Iff true, server request "no context takeover" feature.
:type requestNoContextTakeover: bool
:param noContextTakeover: Override server ("server-to-client direction") context takeover (this must be compatible with offer).
:type noContextTakeover: bool
"""
if not isinstance(offer, PerMessageSnappyOffer):
raise Exception("invalid type %s for offer" % type(offer))
self.offer = offer
if type(requestNoContextTakeover) != bool:
raise Exception("invalid type %s for requestNoContextTakeover" % type(requestNoContextTakeover))
if requestNoContextTakeover and not offer.acceptNoContextTakeover:
raise Exception("invalid value %s for requestNoContextTakeover - feature unsupported by client" % requestNoContextTakeover)
self.requestNoContextTakeover = requestNoContextTakeover
if noContextTakeover is not None:
if type(noContextTakeover) != bool:
raise Exception("invalid type %s for noContextTakeover" % type(noContextTakeover))
if offer.requestNoContextTakeover and not noContextTakeover:
raise Exception("invalid value %s for noContextTakeover - client requested feature" % noContextTakeover)
self.noContextTakeover = noContextTakeover
def getExtensionString(self):
"""
Returns the WebSocket extension configuration string as sent to the server.
:returns: str -- PMCE configuration string.
"""
pmceString = self.EXTENSION_NAME
if self.offer.requestNoContextTakeover:
pmceString += "; server_no_context_takeover"
if self.requestNoContextTakeover:
pmceString += "; client_no_context_takeover"
return pmceString
def __json__(self):
"""
Returns a JSON serializable object representation.
:returns: object -- JSON serializable represention.
"""
return {'extension': self.EXTENSION_NAME,
'offer': self.offer.__json__(),
'requestNoContextTakeover': self.requestNoContextTakeover,
'noContextTakeover': self.noContextTakeover}
def __repr__(self):
"""
Returns Python object representation that can be eval'ed to reconstruct the object.
:returns: str -- Python string representation.
"""
return "PerMessageSnappyAccept(offer = %s, requestNoContextTakeover = %s, noContextTakeover = %s)" % (self.offer.__repr__(), self.requestNoContextTakeover, self.noContextTakeover)
class PerMessageSnappyResponse(PerMessageCompressResponse, PerMessageSnappyMixin):
"""
Set of parameters for `permessage-snappy` responded by server.
"""
@classmethod
def parse(cls, params):
"""
Parses a WebSocket extension response for `permessage-snappy` provided by a server to a client.
:param params: Output from :func:`autobahn.websocket.WebSocketProtocol._parseExtensionsHeader`.
:type params: list
:returns: object -- A new instance of :class:`autobahn.compress.PerMessageSnappyResponse`.
"""
client_no_context_takeover = False
server_no_context_takeover = False
for p in params:
if len(params[p]) > 1:
raise Exception("multiple occurence of extension parameter '%s' for extension '%s'" % (p, cls.EXTENSION_NAME))
val = params[p][0]
if p == 'client_no_context_takeover':
if val != True:
raise Exception("illegal extension parameter value '%s' for parameter '%s' of extension '%s'" % (val, p, cls.EXTENSION_NAME))
else:
client_no_context_takeover = True
elif p == 'server_no_context_takeover':
if val != True:
raise Exception("illegal extension parameter value '%s' for parameter '%s' of extension '%s'" % (val, p, cls.EXTENSION_NAME))
else:
server_no_context_takeover = True
else:
raise Exception("illegal extension parameter '%s' for extension '%s'" % (p, cls.EXTENSION_NAME))
response = cls(client_no_context_takeover,
server_no_context_takeover)
return response
def __init__(self,
client_no_context_takeover,
server_no_context_takeover):
self.client_no_context_takeover = client_no_context_takeover
self.server_no_context_takeover = server_no_context_takeover
def __json__(self):
"""
Returns a JSON serializable object representation.
:returns: object -- JSON serializable represention.
"""
return {'extension': self.EXTENSION_NAME,
'client_no_context_takeover': self.client_no_context_takeover,
'server_no_context_takeover': self.server_no_context_takeover}
def __repr__(self):
"""
Returns Python object representation that can be eval'ed to reconstruct the object.
:returns: str -- Python string representation.
"""
return "PerMessageSnappyResponse(client_no_context_takeover = %s, server_no_context_takeover = %s)" % (self.client_no_context_takeover, self.server_no_context_takeover)
class PerMessageSnappyResponseAccept(PerMessageCompressResponseAccept, PerMessageSnappyMixin):
"""
Set of parameters with which to accept an `permessage-snappy` response
from a server by a client.
"""
def __init__(self,
response,
noContextTakeover = None):
"""
Constructor.
:param response: The response being accepted.
:type response: Instance of :class:`autobahn.compress.PerMessageSnappyResponse`.
:param noContextTakeover: Override client ("client-to-server direction") context takeover (this must be compatible with response).
:type noContextTakeover: bool
"""
if not isinstance(response, PerMessageSnappyResponse):
raise Exception("invalid type %s for response" % type(response))
self.response = response
if noContextTakeover is not None:
if type(noContextTakeover) != bool:
raise Exception("invalid type %s for noContextTakeover" % type(noContextTakeover))
if response.client_no_context_takeover and not noContextTakeover:
raise Exception("invalid value %s for noContextTakeover - server requested feature" % noContextTakeover)
self.noContextTakeover = noContextTakeover
def __json__(self):
"""
Returns a JSON serializable object representation.
:returns: object -- JSON serializable represention.
"""
return {'extension': self.EXTENSION_NAME,
'response': self.response.__json__(),
'noContextTakeover': self.noContextTakeover}
def __repr__(self):
"""
Returns Python object representation that can be eval'ed to reconstruct the object.
:returns: str -- Python string representation.
"""
return "PerMessageSnappyResponseAccept(response = %s, noContextTakeover = %s)" % (self.response.__repr__(), self.noContextTakeover)
class PerMessageSnappy(PerMessageCompress, PerMessageSnappyMixin):
"""
`permessage-snappy` WebSocket extension processor.
"""
@classmethod
def createFromResponseAccept(cls, isServer, accept):
pmce = cls(isServer,
accept.response.server_no_context_takeover,
accept.noContextTakeover if accept.noContextTakeover is not None else accept.response.client_no_context_takeover)
return pmce
@classmethod
def createFromOfferAccept(cls, isServer, accept):
pmce = cls(isServer,
accept.noContextTakeover if accept.noContextTakeover is not None else accept.offer.requestNoContextTakeover,
accept.requestNoContextTakeover)
return pmce
def __init__(self,
isServer,
server_no_context_takeover,
client_no_context_takeover):
self._isServer = isServer
self.server_no_context_takeover = server_no_context_takeover
self.client_no_context_takeover = client_no_context_takeover
self._compressor = None
self._decompressor = None
def __json__(self):
return {'extension': self.EXTENSION_NAME,
'server_no_context_takeover': self.server_no_context_takeover,
'client_no_context_takeover': self.client_no_context_takeover}
def __repr__(self):
return "PerMessageSnappy(isServer = %s, server_no_context_takeover = %s, client_no_context_takeover = %s)" % (self._isServer, self.server_no_context_takeover, self.client_no_context_takeover)
def startCompressMessage(self):
if self._isServer:
if self._compressor is None or self.server_no_context_takeover:
self._compressor = snappy.StreamCompressor()
else:
if self._compressor is None or self.client_no_context_takeover:
self._compressor = snappy.StreamCompressor()
def compressMessageData(self, data):
return self._compressor.add_chunk(data)
def endCompressMessage(self):
return ""
def startDecompressMessage(self):
if self._isServer:
if self._decompressor is None or self.client_no_context_takeover:
self._decompressor = snappy.StreamDecompressor()
else:
if self._decompressor is None or self.server_no_context_takeover:
self._decompressor = snappy.StreamDecompressor()
def decompressMessageData(self, data):
return self._decompressor.decompress(data)
def endDecompressMessage(self):
pass
|
apache-2.0
|
pdrados/cas
|
support/cas-server-support-oauth-core-api/src/main/java/org/apereo/cas/support/oauth/validator/token/OAuth20RefreshTokenGrantTypeTokenRequestValidator.java
|
2985
|
package org.apereo.cas.support.oauth.validator.token;
import org.apereo.cas.audit.AuditableContext;
import org.apereo.cas.support.oauth.OAuth20Constants;
import org.apereo.cas.support.oauth.OAuth20GrantTypes;
import org.apereo.cas.support.oauth.util.OAuth20Utils;
import org.apereo.cas.support.oauth.web.endpoints.OAuth20ConfigurationContext;
import org.apereo.cas.ticket.InvalidTicketException;
import org.apereo.cas.ticket.refreshtoken.OAuth20RefreshToken;
import org.apereo.cas.util.HttpRequestUtils;
import lombok.extern.slf4j.Slf4j;
import lombok.val;
import org.pac4j.core.context.JEEContext;
import org.pac4j.core.profile.ProfileManager;
import org.pac4j.core.profile.UserProfile;
/**
* This is {@link OAuth20RefreshTokenGrantTypeTokenRequestValidator}.
*
* @author Misagh Moayyed
* @since 5.3.0
*/
@Slf4j
public class OAuth20RefreshTokenGrantTypeTokenRequestValidator extends BaseOAuth20TokenRequestValidator {
public OAuth20RefreshTokenGrantTypeTokenRequestValidator(final OAuth20ConfigurationContext configurationContext) {
super(configurationContext);
}
@Override
protected boolean validateInternal(final JEEContext context, final String grantType,
final ProfileManager manager, final UserProfile uProfile) {
val request = context.getNativeRequest();
val clientId = OAuth20Utils.getClientIdAndClientSecret(context).getLeft();
if (!HttpRequestUtils.doesParameterExist(request, OAuth20Constants.REFRESH_TOKEN) || clientId.isEmpty()) {
return false;
}
val token = request.getParameter(OAuth20Constants.REFRESH_TOKEN);
try {
val refreshToken = getConfigurationContext().getCentralAuthenticationService().getTicket(token, OAuth20RefreshToken.class);
LOGGER.trace("Found valid refresh token [{}] in the registry", refreshToken);
} catch (final InvalidTicketException e) {
LOGGER.warn("Provided refresh token [{}] cannot be found in the registry or has expired", token);
return false;
}
LOGGER.debug("Received grant type [{}] with client id [{}]", grantType, clientId);
val registeredService = OAuth20Utils.getRegisteredOAuthServiceByClientId(
getConfigurationContext().getServicesManager(), clientId);
val audit = AuditableContext.builder()
.registeredService(registeredService)
.build();
val accessResult = getConfigurationContext().getRegisteredServiceAccessStrategyEnforcer().execute(audit);
accessResult.throwExceptionIfNeeded();
if (!isGrantTypeSupportedBy(registeredService, grantType)) {
LOGGER.warn("Requested grant type [{}] is not authorized by service definition [{}]", getGrantType(), registeredService.getServiceId());
return false;
}
return true;
}
@Override
protected OAuth20GrantTypes getGrantType() {
return OAuth20GrantTypes.REFRESH_TOKEN;
}
}
|
apache-2.0
|
BusyJay/tikv
|
components/cloud/src/blob.rs
|
5567
|
// Copyright 2021 TiKV Project Authors. Licensed under Apache-2.0.
use futures_io::AsyncRead;
pub use kvproto::brpb::CloudDynamic;
use std::io;
use std::marker::Unpin;
pub trait BlobConfig: 'static + Send + Sync {
fn name(&self) -> &'static str;
fn url(&self) -> io::Result<url::Url>;
}
/// An abstraction for blob storage.
/// Currently the same as ExternalStorage
pub trait BlobStorage: 'static + Send + Sync {
fn config(&self) -> Box<dyn BlobConfig>;
/// Write all contents of the read to the given path.
fn put(
&self,
name: &str,
reader: Box<dyn AsyncRead + Send + Unpin>,
content_length: u64,
) -> io::Result<()>;
/// Read all contents of the given path.
fn get(&self, name: &str) -> Box<dyn AsyncRead + Unpin + '_>;
}
impl BlobConfig for dyn BlobStorage {
fn name(&self) -> &'static str {
self.config().name()
}
fn url(&self) -> io::Result<url::Url> {
self.config().url()
}
}
impl BlobStorage for Box<dyn BlobStorage> {
fn config(&self) -> Box<dyn BlobConfig> {
(**self).config()
}
fn put(
&self,
name: &str,
reader: Box<dyn AsyncRead + Send + Unpin>,
content_length: u64,
) -> io::Result<()> {
(**self).put(name, reader, content_length)
}
fn get(&self, name: &str) -> Box<dyn AsyncRead + Unpin + '_> {
(**self).get(name)
}
}
#[derive(Clone, Debug, PartialEq)]
pub struct StringNonEmpty(String);
impl StringNonEmpty {
pub fn opt(s: String) -> Option<Self> {
if s.is_empty() { None } else { Some(Self(s)) }
}
pub fn opt2(s1: String, s2: String) -> Option<Self> {
Self::opt(s1).or_else(|| Self::opt(s2))
}
pub fn required_field(s: String, field: &str) -> io::Result<Self> {
Self::required_msg(s, &format!("field {}", field))
}
pub fn required_field2(s1: String, s2: String, field: &str) -> io::Result<Self> {
match Self::opt2(s1, s2) {
Some(sne) => Ok(sne),
None => Err(Self::error_required(&format!("field {}", field))),
}
}
pub fn required_msg(s: String, msg: &str) -> io::Result<Self> {
if !s.is_empty() {
Ok(Self(s))
} else {
Err(Self::error_required(&format!("Empty {}", msg)))
}
}
fn error_required(msg: &str) -> io::Error {
io::Error::new(io::ErrorKind::InvalidInput, msg)
}
pub fn required(s: String) -> io::Result<Self> {
Self::required_msg(s, "string")
}
pub fn static_str(s: &'static str) -> Self {
Self::required_msg(s.to_owned(), "static str").unwrap()
}
}
impl std::ops::Deref for StringNonEmpty {
type Target = String;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl std::fmt::Display for StringNonEmpty {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.0.fmt(f)
}
}
#[derive(Clone, Debug)]
pub struct BucketConf {
pub endpoint: Option<StringNonEmpty>,
pub region: Option<StringNonEmpty>,
pub bucket: StringNonEmpty,
pub prefix: Option<StringNonEmpty>,
pub storage_class: Option<StringNonEmpty>,
}
impl BucketConf {
pub fn default(bucket: StringNonEmpty) -> Self {
BucketConf {
bucket,
endpoint: None,
region: None,
prefix: None,
storage_class: None,
}
}
pub fn url(&self, scheme: &str) -> Result<url::Url, String> {
let path = none_to_empty(self.prefix.clone());
if let Some(ep) = &self.endpoint {
let mut u =
url::Url::parse(ep).map_err(|e| format!("invalid endpoint {}: {}", &ep, e))?;
u.set_path(&format!(
"{}/{}",
&self.bucket.trim_end_matches('/'),
&path.trim_start_matches('/')
));
Ok(u)
} else {
let mut u = url::Url::parse(&format!("{}://{}", &scheme, &self.bucket))
.map_err(|e| format!("{}", e))?;
u.set_path(&path);
Ok(u)
}
}
pub fn from_cloud_dynamic(cloud_dynamic: &CloudDynamic) -> io::Result<Self> {
let bucket = cloud_dynamic.bucket.clone().into_option().ok_or_else(|| {
io::Error::new(io::ErrorKind::Other, "Required field bucket is missing")
})?;
Ok(Self {
endpoint: StringNonEmpty::opt(bucket.endpoint),
bucket: StringNonEmpty::required_field(bucket.bucket, "bucket")?,
prefix: StringNonEmpty::opt(bucket.prefix),
storage_class: StringNonEmpty::opt(bucket.storage_class),
region: StringNonEmpty::opt(bucket.region),
})
}
}
pub fn none_to_empty(opt: Option<StringNonEmpty>) -> String {
if let Some(s) = opt {
s.0
} else {
"".to_owned()
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_url_of_bucket() {
let bucket_name = StringNonEmpty::required("bucket".to_owned()).unwrap();
let mut bucket = BucketConf::default(bucket_name);
bucket.prefix = StringNonEmpty::opt("/backup 01/prefix/".to_owned());
assert_eq!(
bucket.url("s3").unwrap().to_string(),
"s3://bucket/backup%2001/prefix/"
);
bucket.endpoint = Some(StringNonEmpty::static_str("http://endpoint.com"));
assert_eq!(
bucket.url("s3").unwrap().to_string(),
"http://endpoint.com/bucket/backup%2001/prefix/"
);
}
}
|
apache-2.0
|
stdlib-js/stdlib
|
lib/node_modules/@stdlib/utils/async/do-until/test/test.js
|
6866
|
/**
* @license Apache-2.0
*
* Copyright (c) 2018 The Stdlib Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
// MODULES //
var tape = require( 'tape' );
var noop = require( '@stdlib/utils/noop' );
var doUntilAsync = require( './../lib' );
// TESTS //
tape( 'main export is a function', function test( t ) {
t.ok( true, __filename );
t.strictEqual( typeof doUntilAsync, 'function', 'main export is a function' );
t.end();
});
tape( 'the function throws an error if not provided a function to invoke', function test( t ) {
var values;
var i;
values = [
'5',
5,
NaN,
true,
false,
null,
void 0,
{},
[],
/.*/,
new Date()
];
for ( i = 0; i < values.length; i++ ) {
t.throws( badValue( values[i] ), TypeError, 'throws a type error when provided '+values[i] );
}
t.end();
function badValue( value ) {
return function badValue() {
doUntilAsync( value, noop, noop );
};
}
});
tape( 'the function throws an error if not provided a predicate function', function test( t ) {
var values;
var i;
values = [
'5',
5,
NaN,
true,
false,
null,
void 0,
{},
[],
/.*/,
new Date()
];
for ( i = 0; i < values.length; i++ ) {
t.throws( badValue( values[i] ), TypeError, 'throws a type error when provided '+values[i] );
}
t.end();
function badValue( value ) {
return function badValue() {
doUntilAsync( noop, value, noop );
};
}
});
tape( 'the function throws an error if not provided a callback function', function test( t ) {
var values;
var i;
values = [
'5',
5,
NaN,
true,
false,
null,
void 0,
{},
[],
/.*/,
new Date()
];
for ( i = 0; i < values.length; i++ ) {
t.throws( badValue( values[i] ), TypeError, 'throws a type error when provided '+values[i] );
}
t.end();
function badValue( value ) {
return function badValue() {
doUntilAsync( noop, noop, value );
};
}
});
tape( 'the function invokes a provided function until a test condition is true', function test( t ) {
var indices1 = [ 0, 1, 2, 3, 4 ];
var indices2 = [ 1, 2, 3, 4, 5 ];
var j = -1;
var k = -1;
doUntilAsync( fcn, predicate, done );
function fcn( i, next ) {
k += 1;
t.strictEqual( i, indices1[ k ], 'provides expected value' );
setTimeout( onTimeout, 0 );
function onTimeout() {
next();
}
}
function predicate( i, clbk ) {
j += 1;
t.strictEqual( i, indices2[ j ], 'provides expected value' );
clbk( null, i >= 5 );
}
function done( error ) {
if ( error ) {
t.fail( error.message );
} else {
t.ok( true, 'does not return an error' );
}
t.end();
}
});
tape( 'the function invokes a provided function until a test condition is true and at least once (always true)', function test( t ) {
var indices1 = [ 0 ];
var indices2 = [ 1 ];
var j = -1;
var k = -1;
doUntilAsync( fcn, predicate, done );
function fcn( i, next ) {
k += 1;
t.strictEqual( i, indices1[ k ], 'provides expected value' );
setTimeout( onTimeout, 0 );
function onTimeout() {
next();
}
}
function predicate( i, clbk ) {
j += 1;
t.strictEqual( i, indices2[ j ], 'provides expected value' );
clbk( null, true );
}
function done( error ) {
if ( error ) {
t.fail( error.message );
} else {
t.ok( true, 'does not return an error' );
}
t.end();
}
});
tape( 'the function supports providing an execution context', function test( t ) {
var ctx = {
'count': 0
};
doUntilAsync( fcn, predicate, done, ctx );
function fcn( i, next ) {
this.count += 1; // eslint-disable-line no-invalid-this
setTimeout( onTimeout, 0 );
function onTimeout() {
next();
}
}
function predicate( i, clbk ) {
clbk( null, i >= 5 );
}
function done( error ) {
if ( error ) {
t.fail( error.message );
} else {
t.ok( true, 'does not return an error' );
}
t.strictEqual( ctx.count, 5, 'updates context' );
t.end();
}
});
tape( 'the function provides any results to a `done` callback', function test( t ) {
doUntilAsync( fcn, predicate, done );
function fcn( i, next ) {
setTimeout( onTimeout, 0 );
function onTimeout() {
next( null, 'beep'+i, 'boop'+i, 'woot'+i );
}
}
function predicate( i, clbk ) {
clbk( null, i >= 5 );
}
function done( error, str1, str2, str3 ) {
if ( error ) {
t.fail( error.message );
} else {
t.ok( true, 'does not return an error' );
}
t.strictEqual( str1, 'beep4', 'returns expected value' );
t.strictEqual( str2, 'boop4', 'returns expected value' );
t.strictEqual( str3, 'woot4', 'returns expected value' );
t.end();
}
});
tape( 'if an error is encountered when invoking the predicate function, the function suspends execution and immediately returns the `error` to the provided callback', function test( t ) {
var k = 0;
doUntilAsync( fcn, predicate, done );
function fcn( i, next ) {
setTimeout( onTimeout, 0 );
function onTimeout() {
next();
}
}
function predicate( i, clbk ) {
k += 1;
if ( i === 2 ) {
return clbk( new Error( 'beep' ) );
}
clbk( null, i >= 5 );
}
function done( error ) {
if ( error ) {
t.pass( error.message );
} else {
t.fail( 'should return an error' );
}
t.strictEqual( k, 2, 'expected number of invocations' );
t.end();
}
});
tape( 'if an error is encountered when invoking the provided function, the function suspends execution and immediately returns the `error` to the provided callback', function test( t ) {
var k = 0;
doUntilAsync( fcn, predicate, done );
function fcn( i, next ) {
setTimeout( onTimeout, 0 );
function onTimeout() {
if ( i === 2 ) {
return next( new Error( 'boop' ) );
}
next();
}
}
function predicate( i, clbk ) {
k += 1;
clbk( null, i >= 5 );
}
function done( error ) {
if ( error ) {
t.pass( error.message );
} else {
t.fail( 'should return an error' );
}
t.strictEqual( k, 2, 'expected number of invocations' );
t.end();
}
});
tape( 'the function does not guarantee asynchronous execution', function test( t ) {
var k = 0;
doUntilAsync( fcn, predicate, done );
k = 1;
function fcn( i, next ) {
next();
}
function predicate( i, clbk ) {
clbk( null, i >= 5 );
}
function done( error ) {
if ( error ) {
t.fail( error.message );
} else {
t.ok( true, 'does not return an error' );
}
t.strictEqual( k, 0, 'releases the zalgo' );
t.end();
}
});
|
apache-2.0
|
joris520/broodjesalami
|
php_cm/modules/model/service/assessmentInvitation/InvitationMessageService.class.php
|
3490
|
<?php
/**
* Description of InvitationMessageService
*
* @author ben.dokter
*/
require_once('modules/model/queries/assessmentInvitation/InvitationMessageQueries.class.php');
require_once('modules/model/value//batch/InvitationMessageTypeValue.class.php');
require_once('application/model/service/CustomerService.class.php');
class InvitationMessageService
{
static function insertInvitationMessage(InvitationMessageValueObject $valueObject)
{
// regelen opslaan message template...
$messageFrom = self::createMessageFrom(CUSTOMER_ID);
$languageText = self::createLanguageText(); // om de emails vertaalt te kunnen versturen van te voren de nodige woorden opslaan.
$invitationMessageId = InvitationMessageQueries::insertInvitationMessage( $valueObject->getType(),
$valueObject->getSubject(),
$messageFrom,
$valueObject->getMessage(),
$languageText);
return $invitationMessageId;
}
static function createLanguageText()
{
$messages = array();
$messages[] = TXT_UCF('EVALUATION_FORM');
$messages[] = TXT_UCF('EMPLOYEE');
$messages[] = TXT_UCF('DEPARTMENT');
$messages[] = TXT_UCF('JOB_PROFILE');
return implode("|", $messages);
}
static function createMessageFrom($customerId)
{
$valueObject = CustomerService::getInfoValueObject($customerId);
return $valueObject->getCompanyName();
}
static function validate(InvitationMessageValueObject $valueObject)
{
$hasError = false;
$messages = array();
$subject = $valueObject->getSubject();
if (empty($subject)) {
$hasError = true;
$messages[] = TXT_UCF('PLEASE_ENTER_A_SUBJECT');
}
$message = $valueObject->getMessage();
if (empty($message)) {
$hasError = true;
$messages[] = TXT_UCF('PLEASE_ENTER_A_MESSAGE');
}
// fout van de programmeur...
// if (InvitationMessageTypeValue::isValidValue($valueObject->getType())) {
// $hasError = true;
// $messages[] = TXT_UCF('NO_MESSAGE_TYPE');
// }
return array($hasError, $messages);
}
static function getLastInvitationMessage($messageTypeFilter)
{
$query = InvitationMessageQueries::getLastInvitationMessage($messageTypeFilter);
$invitationMessageData = mysql_fetch_assoc($query);
return InvitationMessageValueObject::createWithData($invitationMessageData);
}
static function getReminderMessage()
{
$messageTypeFilter = InvitationMessageTypeValue::INVITATION . ',' . InvitationMessageTypeValue::REMINDER;
$valueObject = self::getLastInvitationMessage($messageTypeFilter);
$messageSubject = $valueObject->getSubject();
if ($valueObject->getType() == InvitationMessageTypeValue::INVITATION) {
$valueObject->setSubject(TXT_UCF('REMINDER') . ': ' . $messageSubject);
}
if (empty($messageSubject)) {
$valueObject->setSubject(TXT_UCF('REMINDER_SELF_EVALUATION_MESSAGE_SUBJECT'));
}
return $valueObject;
}
}
?>
|
apache-2.0
|
nivanov/ignite
|
modules/platforms/cpp/core/src/impl/cluster/cluster_group_impl.cpp
|
2020
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "ignite/impl/cluster/cluster_group_impl.h"
using namespace ignite::jni::java;
using namespace ignite::impl::cluster;
namespace ignite
{
namespace impl
{
namespace cluster
{
enum Command
{
FOR_SERVERS = 23
};
ClusterGroupImpl::ClusterGroupImpl(SP_IgniteEnvironment env, jobject javaRef) :
InteropTarget(env, javaRef)
{
// No-op.
}
ClusterGroupImpl::~ClusterGroupImpl()
{
// No-op.
}
ClusterGroupImpl::SP_ClusterGroupImpl ClusterGroupImpl::ForServers(IgniteError& err)
{
JniErrorInfo jniErr;
jobject res = InOpObject(FOR_SERVERS, err);
if (jniErr.code != java::IGNITE_JNI_ERR_SUCCESS)
return SP_ClusterGroupImpl();
return FromTarget(res);
}
ClusterGroupImpl::SP_ClusterGroupImpl ClusterGroupImpl::FromTarget(jobject javaRef)
{
return SP_ClusterGroupImpl(new ClusterGroupImpl(GetEnvironmentPointer(), javaRef));
}
}
}
}
|
apache-2.0
|
melvinodsa/odsatransform
|
Documents/Documentation/html/navtreeindex0.js
|
15428
|
var NAVTREEINDEX0 =
{
"_reverse_transform_8c.html":[3,0,1,0],
"_reverse_transform_8c.html#abf9e6b7e6f15df4b525a2e7705ba3089":[3,0,1,0,0],
"_reverse_transform_8c_source.html":[3,0,1,0],
"_test___result___validator_8py.html":[3,0,0,6],
"_test___result___validator_8py.html#a5a7b9f22374e985fcc0bf82d1fcd7725":[3,0,0,6,2],
"_test___result___validator_8py.html#a7847560c748814fd3070e9149a9578bd":[3,0,0,6,1],
"_test___result___validator_8py.html#a99947a609168933468df2dff49072050":[3,0,0,6,0],
"_test___result___validator_8py.html#ad79587b77f45eb630126e0565b264bef":[3,0,0,6,3],
"_test___result___validator_8py_source.html":[3,0,0,6],
"_test___reverse___transform_8py.html":[3,0,0,7],
"_test___reverse___transform_8py.html#a0f9ff55eba71865df3ddf26fcb3d5aad":[3,0,0,7,5],
"_test___reverse___transform_8py.html#a16e18c2ef0a9b679e08e65e637e0df94":[3,0,0,7,9],
"_test___reverse___transform_8py.html#a6cf4ada7f58530327f44b710d00eb2f1":[3,0,0,7,2],
"_test___reverse___transform_8py.html#a7798439fa971f62e0ded208034a760ad":[3,0,0,7,7],
"_test___reverse___transform_8py.html#a80457631a76a55862e10c31897930fb3":[3,0,0,7,1],
"_test___reverse___transform_8py.html#aa57373453d89bed425e69f2dbf3e926f":[3,0,0,7,0],
"_test___reverse___transform_8py.html#aad8cfc5f3e2d25f25f6e077da0b06827":[3,0,0,7,4],
"_test___reverse___transform_8py.html#acf316e258896197f4e917a6547e49532":[3,0,0,7,10],
"_test___reverse___transform_8py.html#ada4f2f57dc2bc1d2862a24b5f4277558":[3,0,0,7,3],
"_test___reverse___transform_8py.html#ae247fdf0e0632eb8e8e02f7885333112":[3,0,0,7,8],
"_test___reverse___transform_8py.html#ae6a1a5af7f4ae27764ae869537837512":[3,0,0,7,6],
"_test___reverse___transform_8py.html#aff9953fd97f768fa915a863175e5f71f":[3,0,0,7,11],
"_test___reverse___transform_8py_source.html":[3,0,0,7],
"_test___transform_8py.html":[3,0,0,8],
"_test___transform_8py.html#a16e18c2ef0a9b679e08e65e637e0df94":[3,0,0,8,5],
"_test___transform_8py.html#a3fed5d834a18d93d9d097d143fb31803":[3,0,0,8,3],
"_test___transform_8py.html#a531af8eace089b74651cf6fc6a5a5e91":[3,0,0,8,0],
"_test___transform_8py.html#a6cf4ada7f58530327f44b710d00eb2f1":[3,0,0,8,2],
"_test___transform_8py.html#a858e01a6439d0f8e0f8565c0aac1c0ef":[3,0,0,8,4],
"_test___transform_8py.html#a85d897846bb64fe74a8ce36c2365e3b8":[3,0,0,8,1],
"_test___transform_8py_source.html":[3,0,0,8],
"_transform_8c.html":[3,0,1,2],
"_transform_8c.html#abf9e6b7e6f15df4b525a2e7705ba3089":[3,0,1,2,0],
"_transform_8c_source.html":[3,0,1,2],
"annotated.html":[2,0],
"bug.html":[0],
"classes.html":[2,1],
"dir_23fdee2f6995db16c755697cdf620cf4.html":[3,0,0],
"dir_37618a71d99bdfed5979e976980d5eec.html":[3,0,1],
"files.html":[3,0],
"functions.html":[2,2,0],
"functions_vars.html":[2,2,1],
"globals.html":[3,1,0],
"globals_enum.html":[3,1,4],
"globals_eval.html":[3,1,5],
"globals_func.html":[3,1,1],
"globals_type.html":[3,1,3],
"globals_vars.html":[3,1,2],
"group___forward-_transform.html":[1,5],
"group___forward-_transform.html#ga2673767a133bdd0bebc09424a49ec033":[3,0,0,2,0],
"group___forward-_transform.html#ga2673767a133bdd0bebc09424a49ec033":[3,0,0,3,0],
"group___forward-_transform.html#ga2673767a133bdd0bebc09424a49ec033":[1,5,0],
"group___forward-_transform.html#ga55f36ea4f900f97713f42841f87dea2d":[3,0,0,2,3],
"group___forward-_transform.html#ga55f36ea4f900f97713f42841f87dea2d":[3,0,0,3,3],
"group___forward-_transform.html#ga55f36ea4f900f97713f42841f87dea2d":[1,5,2],
"group___forward-_transform.html#ga79a544adff1e48c822da0c5a7469fc52":[3,0,0,2,4],
"group___forward-_transform.html#ga79a544adff1e48c822da0c5a7469fc52":[3,0,0,3,4],
"group___forward-_transform.html#ga79a544adff1e48c822da0c5a7469fc52":[1,5,3],
"group___forward-_transform.html#gaa29e58165b9dd9f21aa02b1f61c94e07":[3,0,0,2,1],
"group___forward-_transform.html#gaa29e58165b9dd9f21aa02b1f61c94e07":[3,0,0,3,1],
"group___forward-_transform.html#gaa29e58165b9dd9f21aa02b1f61c94e07":[1,5,1],
"group___forward-_transform.html#gaa7aa55b46527c094c2103c5d4e45d271":[3,0,0,2,9],
"group___forward-_transform.html#gaa7aa55b46527c094c2103c5d4e45d271":[3,0,0,3,9],
"group___forward-_transform.html#gaa7aa55b46527c094c2103c5d4e45d271":[1,5,5],
"group___forward-_transform.html#gab43324012715a8f7da8011b4fd4b9fb6":[3,0,0,2,11],
"group___forward-_transform.html#gab43324012715a8f7da8011b4fd4b9fb6":[3,0,0,3,10],
"group___forward-_transform.html#gab43324012715a8f7da8011b4fd4b9fb6":[1,5,6],
"group___forward-_transform.html#gab43324012715a8f7da8011b4fd4b9fb6":[1,5,7],
"group___forward-_transform.html#gadb9ffdc9b7aa66a2108971c4bed4ebc6":[3,0,0,2,6],
"group___forward-_transform.html#gadb9ffdc9b7aa66a2108971c4bed4ebc6":[3,0,0,3,6],
"group___forward-_transform.html#gadb9ffdc9b7aa66a2108971c4bed4ebc6":[1,5,4],
"group___output-_map.html":[1,4],
"group___output-_map.html#a0f11fc08c1ec2c8146ddd0d5b8fe8355":[1,4,0,0],
"group___output-_map.html#a2ad09485a275d837d775e06cc6319d09":[1,4,1,0],
"group___output-_map.html#a59046b4fc86b906391dc5cdaf475d9d7":[1,4,1,2],
"group___output-_map.html#a73834fe5af6128b5368d4b5b42954457":[1,4,1,1],
"group___output-_map.html#a832fc5252b84b9e4cbb37f436ae12a59":[1,4,0,2],
"group___output-_map.html#af9b9dc7981cf86744b798fec15e6044e":[1,4,0,1],
"group___output-_map.html#ga06e689232f86c39655d77deb159d241a":[1,4,21],
"group___output-_map.html#ga06e689232f86c39655d77deb159d241a":[3,0,0,0,17],
"group___output-_map.html#ga06e689232f86c39655d77deb159d241a":[3,0,0,1,17],
"group___output-_map.html#ga09dcd6c553c099ff827a1b596037b024":[1,4,26],
"group___output-_map.html#ga09dcd6c553c099ff827a1b596037b024":[3,0,0,0,22],
"group___output-_map.html#ga09dcd6c553c099ff827a1b596037b024":[3,0,0,1,22],
"group___output-_map.html#ga1591d8f1c51625608ecda91b9d6eb93c":[3,0,0,0,11],
"group___output-_map.html#ga1591d8f1c51625608ecda91b9d6eb93c":[3,0,0,1,11],
"group___output-_map.html#ga1591d8f1c51625608ecda91b9d6eb93c":[1,4,15],
"group___output-_map.html#ga22d2812add51b98f87904180253df403":[3,0,0,0,8],
"group___output-_map.html#ga22d2812add51b98f87904180253df403":[3,0,0,1,8],
"group___output-_map.html#ga22d2812add51b98f87904180253df403":[1,4,12],
"group___output-_map.html#ga2cf771af27a0c32417c93e43df660ffe":[1,4,23],
"group___output-_map.html#ga2cf771af27a0c32417c93e43df660ffe":[3,0,0,0,19],
"group___output-_map.html#ga2cf771af27a0c32417c93e43df660ffe":[3,0,0,1,19],
"group___output-_map.html#ga349701b87d6d6b2c38d3abd4522aedbd":[1,4,19],
"group___output-_map.html#ga349701b87d6d6b2c38d3abd4522aedbd":[3,0,0,0,15],
"group___output-_map.html#ga349701b87d6d6b2c38d3abd4522aedbd":[3,0,0,1,15],
"group___output-_map.html#ga3783d4f07b845145aedda045eb151c3a":[1,4,18],
"group___output-_map.html#ga3783d4f07b845145aedda045eb151c3a":[3,0,0,0,14],
"group___output-_map.html#ga3783d4f07b845145aedda045eb151c3a":[3,0,0,1,14],
"group___output-_map.html#ga3b41b9ad3b82219cccdb96e65a1455f0":[1,4,2],
"group___output-_map.html#ga3b41b9ad3b82219cccdb96e65a1455f0":[3,0,0,5,2],
"group___output-_map.html#ga3b7d3f7736690fa1d18adde27a55df63":[1,4,16],
"group___output-_map.html#ga3b7d3f7736690fa1d18adde27a55df63":[3,0,0,0,12],
"group___output-_map.html#ga3b7d3f7736690fa1d18adde27a55df63":[3,0,0,1,12],
"group___output-_map.html#ga40a5a6f02e87d05af5825fb250a9f0e8":[3,0,0,0,7],
"group___output-_map.html#ga40a5a6f02e87d05af5825fb250a9f0e8":[1,4,11],
"group___output-_map.html#ga40a5a6f02e87d05af5825fb250a9f0e8":[3,0,0,1,7],
"group___output-_map.html#ga41aaef8582e62f58120fe67097996ffe":[1,4,24],
"group___output-_map.html#ga41aaef8582e62f58120fe67097996ffe":[3,0,0,0,20],
"group___output-_map.html#ga41aaef8582e62f58120fe67097996ffe":[3,0,0,1,20],
"group___output-_map.html#ga539072da7ebc8ded9969f8fd90209ee7":[1,4,27],
"group___output-_map.html#ga539072da7ebc8ded9969f8fd90209ee7":[3,0,0,0,23],
"group___output-_map.html#ga539072da7ebc8ded9969f8fd90209ee7":[3,0,0,1,23],
"group___output-_map.html#ga5d741a42f479855e42c6a661f16c2d62":[3,0,0,0,5],
"group___output-_map.html#ga5d741a42f479855e42c6a661f16c2d62":[3,0,0,1,5],
"group___output-_map.html#ga5d741a42f479855e42c6a661f16c2d62":[1,4,9],
"group___output-_map.html#ga612b038c2c9ab2e467499c94ca1aef06":[3,0,0,0,2],
"group___output-_map.html#ga612b038c2c9ab2e467499c94ca1aef06":[1,4,6],
"group___output-_map.html#ga612b038c2c9ab2e467499c94ca1aef06":[3,0,0,1,2],
"group___output-_map.html#ga737e76fc01b80df08900bea085ca0d4a":[3,0,0,0,10],
"group___output-_map.html#ga737e76fc01b80df08900bea085ca0d4a":[3,0,0,1,10],
"group___output-_map.html#ga737e76fc01b80df08900bea085ca0d4a":[1,4,14],
"group___output-_map.html#ga772552c0554211e80f7106ed0d620cc5":[1,4,17],
"group___output-_map.html#ga772552c0554211e80f7106ed0d620cc5":[3,0,0,1,13],
"group___output-_map.html#ga772552c0554211e80f7106ed0d620cc5":[3,0,0,0,13],
"group___output-_map.html#ga8e27da05cf0aed640c3f66174c64761c":[1,4,25],
"group___output-_map.html#ga8e27da05cf0aed640c3f66174c64761c":[3,0,0,0,21],
"group___output-_map.html#ga8e27da05cf0aed640c3f66174c64761c":[3,0,0,1,21],
"group___output-_map.html#ga994b26634fe6c4947ec3ce6d1535ed35":[3,0,0,0,0],
"group___output-_map.html#ga994b26634fe6c4947ec3ce6d1535ed35":[1,4,4],
"group___output-_map.html#ga994b26634fe6c4947ec3ce6d1535ed35":[3,0,0,1,0],
"group___output-_map.html#gac59ddf2f1f8eb33f65bf567f66e680d4":[3,0,0,0,9],
"group___output-_map.html#gac59ddf2f1f8eb33f65bf567f66e680d4":[3,0,0,1,9],
"group___output-_map.html#gac59ddf2f1f8eb33f65bf567f66e680d4":[1,4,13],
"group___output-_map.html#gac60058cce49e733bf999d78d0c8bf79c":[3,0,0,0,24],
"group___output-_map.html#gac60058cce49e733bf999d78d0c8bf79c":[1,4,28],
"group___output-_map.html#gac60058cce49e733bf999d78d0c8bf79c":[3,0,0,1,24],
"group___output-_map.html#gacd41d5a826e786b4e7265bb1f6db00f1":[1,4,31],
"group___output-_map.html#gacd41d5a826e786b4e7265bb1f6db00f1":[3,0,0,0,26],
"group___output-_map.html#gacd41d5a826e786b4e7265bb1f6db00f1":[1,4,30],
"group___output-_map.html#gacd41d5a826e786b4e7265bb1f6db00f1":[3,0,0,1,26],
"group___output-_map.html#gacfc3f46a25058f35d3c9645ca0769a8b":[3,0,0,0,16],
"group___output-_map.html#gacfc3f46a25058f35d3c9645ca0769a8b":[1,4,20],
"group___output-_map.html#gacfc3f46a25058f35d3c9645ca0769a8b":[3,0,0,1,16],
"group___output-_map.html#gad36c5d66c179e5d2abf8c5af9aee7c81":[3,0,0,0,4],
"group___output-_map.html#gad36c5d66c179e5d2abf8c5af9aee7c81":[1,4,8],
"group___output-_map.html#gad36c5d66c179e5d2abf8c5af9aee7c81":[3,0,0,1,4],
"group___output-_map.html#gae02ca066026645750b0eb713e4dde4ba":[3,0,0,0,1],
"group___output-_map.html#gae02ca066026645750b0eb713e4dde4ba":[1,4,5],
"group___output-_map.html#gae02ca066026645750b0eb713e4dde4ba":[3,0,0,1,1],
"group___output-_map.html#gae9514db3e5e0b817b8eccf7f438a4628":[1,4,3],
"group___output-_map.html#gae9514db3e5e0b817b8eccf7f438a4628":[3,0,0,5,3],
"group___output-_map.html#gaeae7f0292a542c8a60b325383a7f3a97":[3,0,0,0,25],
"group___output-_map.html#gaeae7f0292a542c8a60b325383a7f3a97":[1,4,29],
"group___output-_map.html#gaeae7f0292a542c8a60b325383a7f3a97":[3,0,0,1,25],
"group___output-_map.html#gaf266d13a86772f616ac892ec91e39ca3":[3,0,0,1,27],
"group___output-_map.html#gaf266d13a86772f616ac892ec91e39ca3":[1,4,33],
"group___output-_map.html#gaf266d13a86772f616ac892ec91e39ca3":[3,0,0,0,27],
"group___output-_map.html#gaf266d13a86772f616ac892ec91e39ca3":[1,4,32],
"group___output-_map.html#gaf7e1ed41aa1374a20f24e15539dd04c2":[1,4,22],
"group___output-_map.html#gaf7e1ed41aa1374a20f24e15539dd04c2":[3,0,0,0,18],
"group___output-_map.html#gaf7e1ed41aa1374a20f24e15539dd04c2":[3,0,0,1,18],
"group___output-_map.html#gaf93b8531d75b4869f1348170fade09bc":[3,0,0,0,6],
"group___output-_map.html#gaf93b8531d75b4869f1348170fade09bc":[3,0,0,1,6],
"group___output-_map.html#gaf93b8531d75b4869f1348170fade09bc":[1,4,10],
"group___output-_map.html#gaf957b930462f30ee0ac70df1767229ba":[3,0,0,0,3],
"group___output-_map.html#gaf957b930462f30ee0ac70df1767229ba":[1,4,7],
"group___output-_map.html#gaf957b930462f30ee0ac70df1767229ba":[3,0,0,1,3],
"group___output-_map.html#structoutput__map__i":[1,4,1],
"group___output-_map.html#structoutput__map__l":[1,4,0],
"group___reverse-_transform.html":[1,6],
"group___reverse-_transform.html#ga6b7d72ebb3f33bf7716ce641994a3a65":[3,0,0,2,7],
"group___reverse-_transform.html#ga6b7d72ebb3f33bf7716ce641994a3a65":[3,0,0,3,7],
"group___reverse-_transform.html#ga6b7d72ebb3f33bf7716ce641994a3a65":[1,6,1],
"group___reverse-_transform.html#ga78df727bef00500f52b70a3683d9c033":[3,0,0,2,8],
"group___reverse-_transform.html#ga78df727bef00500f52b70a3683d9c033":[3,0,0,3,8],
"group___reverse-_transform.html#ga78df727bef00500f52b70a3683d9c033":[1,6,2],
"group___reverse-_transform.html#gad71800455c486b350261bdf555d2dd02":[3,0,0,2,2],
"group___reverse-_transform.html#gad71800455c486b350261bdf555d2dd02":[3,0,0,3,2],
"group___reverse-_transform.html#gad71800455c486b350261bdf555d2dd02":[1,6,0],
"index.html":[],
"index.html#build_sec":[1],
"index.html#intro_sec":[0],
"index.html#pyhton_sec":[3],
"index.html#run_sec":[2],
"modules.html":[1],
"odsaoutputmap_8c.html":[3,0,0,0],
"odsaoutputmap_8c_source.html":[3,0,0,0],
"odsaoutputmap_8h.html":[3,0,0,1],
"odsaoutputmap_8h_source.html":[3,0,0,1],
"odsatransform_8c.html":[3,0,0,2],
"odsatransform_8c.html#a0f55ac86aa9a797fb161b0d6d8d845a2":[3,0,0,2,5],
"odsatransform_8c.html#a1999ccf8a358abba304553a591dc85bb":[3,0,0,2,15],
"odsatransform_8c.html#a1a829802626206b2c7e319236c67158b":[3,0,0,2,13],
"odsatransform_8c.html#a890a195cb625c6a9874e97bd1ac62492":[3,0,0,2,14],
"odsatransform_8c.html#a98885503a233b50e6dbf99d70c3080d2":[3,0,0,2,10],
"odsatransform_8c.html#adf49b8f2eb835f31899dbb27d6354a7c":[3,0,0,2,12],
"odsatransform_8c_source.html":[3,0,0,2],
"odsatransform_8h.html":[3,0,0,3],
"odsatransform_8h.html#a22861ea8cd397c4b3488829efbdad3aa":[3,0,0,3,5],
"odsatransform_8h_source.html":[3,0,0,3],
"odsautil_8c.html":[3,0,0,4],
"odsautil_8c.html#a6fe75319add6900b39e235b47ebae7b1":[3,0,0,4,0],
"odsautil_8c_source.html":[3,0,0,4],
"odsautil_8h.html":[3,0,0,5],
"odsautil_8h.html#a02d7e048e2a023ef5774bb18702eede3":[3,0,0,5,5],
"odsautil_8h.html#a02d7e048e2a023ef5774bb18702eede3a890065937be27fa76ee9ee8e929cd365":[3,0,0,5,5,1],
"odsautil_8h.html#a02d7e048e2a023ef5774bb18702eede3ac20d7dd3c54177403d042280e7f74bb6":[3,0,0,5,5,2],
"odsautil_8h.html#a02d7e048e2a023ef5774bb18702eede3ade7ef34bbc4b0da7a1141a91f67dd568":[3,0,0,5,5,0],
"odsautil_8h.html#a02d7e048e2a023ef5774bb18702eede3afb1161383e516d2acffe75cf19c95d1c":[3,0,0,5,5,3],
"odsautil_8h.html#a1d69d6800743dd5cf473f8d06044b635":[3,0,0,5,0,5],
"odsautil_8h.html#a3a2e01ebc37de522d74081de9ec265b7":[3,0,0,5,0,6],
"odsautil_8h.html#a41a7e3baa3524781b6c5d25ee031dcc1":[3,0,0,5,0,3],
"odsautil_8h.html#a497fdbd693ea54a1e40ddb21e8b330a8":[3,0,0,5,0,2],
"odsautil_8h.html#a58b02a85d41375f435685469735406a4":[3,0,0,5,0,4],
"odsautil_8h.html#a6fe75319add6900b39e235b47ebae7b1":[3,0,0,5,6],
"odsautil_8h.html#a9a800523656d0d0b320e3b227085205b":[3,0,0,5,0,0],
"odsautil_8h.html#ade4f0da9c721091ec1fa39dcb4976c2a":[3,0,0,5,1],
"odsautil_8h.html#af69b768bd4f930e534e5fd697c4fec8d":[3,0,0,5,0,1],
"odsautil_8h.html#af6a258d8f3ee5206d682d799316314b1":[3,0,0,5,4],
"odsautil_8h.html#af6a258d8f3ee5206d682d799316314b1a08f175a5505a10b9ed657defeb050e4b":[3,0,0,5,4,1],
"odsautil_8h.html#af6a258d8f3ee5206d682d799316314b1ae9de385ef6fe9bf3360d1038396b884c":[3,0,0,5,4,0],
"odsautil_8h.html#structinput__data__structure":[3,0,0,5,0],
"odsautil_8h_source.html":[3,0,0,5],
"pages.html":[],
"testodsa_8c.html":[3,0,1,1],
"testodsa_8c.html#abf9e6b7e6f15df4b525a2e7705ba3089":[3,0,1,1,0],
"testodsa_8c_source.html":[3,0,1,1]
};
|
apache-2.0
|
mrinsss/Full-Repo
|
jobshoppa/system/application/controllers/admin/referral_setting_history.php
|
16531
|
<?php
/*********
* Author: Iman Biswas
* Date : 22 Sep 2011
* Modified By:
* Modified Date:
*
* Purpose:
* Controller For news
*
* @package Content Management
* @subpackage News
*
* @link InfController.php
* @link My_Controller.php
* @link model/news_model.php
* @link views/admin/news/
*/
class Referral_setting_history extends My_Controller implements InfController
{
public $cls_msg;//////All defined error messages.
public $pathtoclass;
public $uploaddir;
public $thumbdir;
public $showimgdir;
public function __construct()
{
try
{
parent::__construct();
$this->data['title']="Referral Setting History Report";////Browser Title
////////Define Errors Here//////
$this->cls_msg = array();
$this->cls_msg["no_result"]="No information found.";
$this->cls_msg["save_err"]="Information failed to save.";
$this->cls_msg["save_succ"]="Information saved successfully.";
$this->cls_msg["delete_err"]="Information failed to remove.";
$this->cls_msg["delete_succ"]="Information removed successfully.";
////////end Define Errors Here//////
$this->pathtoclass = admin_base_url().$this->router->fetch_class()."/";//for redirecting from this class
//////// loading default model here //////////////
$this->load->model("commission_waiver_model","mod_rect");
//////// end loading default model here //////////////
}
catch(Exception $err_obj)
{
show_error($err_obj->getMessage());
}
}
public function index()
{
try
{
redirect($this->pathtoclass."show_list");
}
catch(Exception $err_obj)
{
show_error($err_obj->getMessage());
}
}
/****
* Display the list of records
*
*/
public function show_list($start=NULL,$limit=NULL)
{
try
{
$this->data['heading']="Referral Setting History";////Package Name[@package] Panel Heading
///////////generating search query///////
////////Getting Posted or session values for search///
$s_search=(isset($_POST["h_search"])?$this->input->post("h_search"):$this->session->userdata("h_search"));
$dt_created_on=($this->input->post("h_search")?$this->input->post("txt_created_on"):$this->session->userdata("txt_created_on"));
////////end Getting Posted or session values for search///
$s_where="";
if($s_search=="basic")
{
/* $s_where=" WHERE n.s_title LIKE '%".get_formatted_string($s_news_title)."%' ";
/////Storing search values into session///
$this->session->set_userdata("txt_news_title",$s_news_title);
$this->session->set_userdata("h_search",$s_search);
$this->data["h_search"]=$s_search;
$this->data["txt_news_title"]=$s_news_title;
/////end Storing search values into session///
*/ }
elseif($s_search=="advanced")
{
//$s_where=" WHERE n.s_title LIKE '%".get_formatted_string($s_news_title)."%' ";
if(trim($dt_created_on)!="")
{
$dt_start=date("Y-m-d",strtotime(trim($dt_created_on." "))) ;
$s_where.=" WHERE FROM_UNIXTIME( n.dt_entry_date , '%Y-%m-%d' ) ='".$dt_start."' ";
unset($dt_start);
}
/////Storing search values into session///
$this->session->set_userdata("txt_created_on",$dt_created_on);
$this->session->set_userdata("h_search",$s_search);
$this->data["h_search"]=$s_search;
$this->data["txt_created_on"]=$dt_created_on;
/////end Storing search values into session///
}
else////List all records, **not done
{
$s_where="";
/////Releasing search values from session///
$this->session->unset_userdata("txt_created_on");
$this->session->unset_userdata("h_search");
$this->data["h_search"]=$s_search;
$this->data["txt_created_on"]="";
/////end Storing search values into session///
}
unset($s_search,$s_user_type,$dt_created_on);
///Setting Limits, If searched then start from 0////
if($this->input->post("h_search"))
{
$start=0;
}
else
{
$start=$this->uri->segment($this->i_uri_seg);
}
///////////end generating search query///////
$limit = $this->i_admin_page_limit;
$info = $this->mod_rect->fetch_multi($s_where,intval($start),$limit);
/////////Creating List view for displaying/////////
$table_view=array();
//////Table Headers, with width,alignment///////
$table_view["caption"]="Referral Setting History";
$table_view["total_rows"]=count($info);
$table_view["total_db_records"]=$this->mod_rect->gettotal_info($s_where);
$table_view["detail_view"] = FALSE;
$table_view["headers"][0]["width"] ="25%";
$table_view["headers"][0]["align"] ="left";
$table_view["headers"][0]["val"] ="Waiver Commission";
$table_view["headers"][1]["val"] ="Created On";
$table_view["headers"][2]["val"] ="Status";
//////end Table Headers, with width,alignment///////
/////////Table Data/////////
for($i=0; $i<$table_view["total_rows"]; $i++)
{
$i_col=0;
$table_view["tablerows"][$i][$i_col++] = encrypt($info[$i]["id"]);////Index 0 must be the encrypted PK
$table_view["tablerows"][$i][$i_col++] =$info[$i]["i_waiver_commission"];
$table_view["tablerows"][$i][$i_col++] =$info[$i]["dt_entry_date"];
$table_view["tablerows"][$i][$i_col++] =$info[$i]["s_is_active"];
}
/////////end Table Data/////////
unset($i,$i_col,$start,$limit);
$this->data["table_view"]=$this->admin_showin_table($table_view);
/////////Creating List view for displaying/////////
$this->data["search_action"]=$this->pathtoclass.$this->router->fetch_method();///used for search form action
//echo $this->data["search_action"];
$this->render();
unset($table_view,$info);
}
catch(Exception $err_obj)
{
show_error($err_obj->getMessage());
}
}
/* report list*/
public function show_report_list($order_name='',$order_by='asc',$start=NULL,$limit=NULL)
{
try
{
$this->data['heading']="Referral Setting History";////Package Name[@package] Panel Heading
///////////generating search query///////
////////Getting Posted or session values for search///
$s_search=(isset($_POST["h_search"])?$this->input->post("h_search"):$this->session->userdata("h_search"));
$dt_created_on=($this->input->post("h_search")?$this->input->post("txt_created_on"):$this->session->userdata("txt_created_on"));
////////end Getting Posted or session values for search///
$s_where="";
if($s_search=="basic")
{
/* $s_where=" WHERE n.s_title LIKE '%".get_formatted_string($s_news_title)."%' ";
/////Storing search values into session///
$this->session->set_userdata("txt_news_title",$s_news_title);
$this->session->set_userdata("h_search",$s_search);
$this->data["h_search"]=$s_search;
$this->data["txt_news_title"]=$s_news_title;
/////end Storing search values into session///
*/ }
elseif($s_search=="advanced")
{
//$s_where=" WHERE n.s_title LIKE '%".get_formatted_string($s_news_title)."%' ";
if(trim($dt_created_on)!="")
{
$dt_start=date("Y-m-d",strtotime(trim($dt_created_on." "))) ;
$s_where.=" WHERE FROM_UNIXTIME( n.dt_entry_date , '%Y-%m-%d' ) ='".$dt_start."' ";
unset($dt_start);
}
/////Storing search values into session///
$this->session->set_userdata("txt_created_on",$dt_created_on);
$this->session->set_userdata("h_search",$s_search);
$this->data["h_search"]=$s_search;
$this->data["txt_created_on"]=$dt_created_on;
/////end Storing search values into session///
}
else////List all records, **not done
{
$s_where="";
/////Releasing search values from session///
$this->session->unset_userdata("txt_created_on");
$this->session->unset_userdata("h_search");
$this->data["h_search"]=$s_search;
$this->data["txt_created_on"]="";
/////end Storing search values into session///
}
unset($s_search,$s_user_type,$dt_created_on);
///Setting Limits, If searched then start from 0////
$i_uri_seg = 6;
///Setting Limits, If searched then start from 0////
if($this->input->post("h_search"))
{
$start=0;
}
else
{
$start=$this->uri->segment($i_uri_seg);
}
///////////end generating search query///////
// List of fields for sorting
$arr_sort = array(0=>'dt_entry_date',1=>'i_is_active');
// echo $order_name.'---';
// echo decrypt($order_name);
$s_order_name = !empty($order_name)?in_array(decrypt($order_name),$arr_sort)?decrypt($order_name):$arr_sort[0]:$arr_sort[0];
$order_name = empty($order_name)?encrypt($arr_sort[0]):$order_name;
$limit = $this->i_admin_page_limit;
$info = $this->mod_rect->fetch_report_multi($s_where,$s_order_name,$order_by,intval($start),$limit);
/////////Creating List view for displaying/////////
$table_view=array();
//////Table Headers, with width,alignment///////
$table_view["caption"]="Referral Setting History";
$table_view["total_rows"]=count($info);
$table_view["total_db_records"]=$this->mod_rect->gettotal_info($s_where);
$table_view["order_name"]=$order_name;
$table_view["order_by"] =$order_by;
$table_view["src_action"]= $this->pathtoclass.$this->router->fetch_method() ;
$j_col = 0;
$table_view["headers"][$j_col]["width"] ="25%";
$table_view["headers"][$j_col]["align"] ="left";
$table_view["headers"][$j_col]["val"] ="Waiver Commission";
$table_view["headers"][++$j_col]["val"] ="Created On";
$table_view["headers"][$j_col]["sort"] = array('field_name'=>encrypt($arr_sort[0]));
$table_view["headers"][++$j_col]["val"] ="Status";
$table_view["headers"][$j_col]["sort"] = array('field_name'=>encrypt($arr_sort[1]));
//////end Table Headers, with width,alignment///////
/////////Table Data/////////
for($i=0; $i<$table_view["total_rows"]; $i++)
{
$i_col=0;
$table_view["tablerows"][$i][$i_col++] = encrypt($info[$i]["id"]);////Index 0 must be the encrypted PK
$table_view["tablerows"][$i][$i_col++] =$info[$i]["i_waiver_commission"];
$table_view["tablerows"][$i][$i_col++] =$info[$i]["dt_entry_date"];
$table_view["tablerows"][$i][$i_col++] =$info[$i]["s_is_active"];
}
/////////end Table Data/////////
unset($i,$i_col,$start,$limit);
$this->data["table_view"]=$this->admin_showin_order_table($table_view,'',$i_uri_seg);
/////////Creating List view for displaying/////////
$this->data["search_action"]=$this->pathtoclass.$this->router->fetch_method();///used for search form action
//echo $this->data["search_action"];
$this->data['order_name'] = $order_name;
$this->data['order_by'] = $order_by;
$this->render();
unset($table_view,$info);
}
catch(Exception $err_obj)
{
show_error($err_obj->getMessage());
}
}
/***
* Method to Display and Save New information
* This have to sections:
* >>Displaying Blank Form for new entry.
* >>Saving the new information into DB
* After Posting the form, the posted values must be
* shown in the form if any error occurs to avoid re-entry of the form.
*
* On Success redirect to the showList interface else display error here.
*/
public function add_information()
{}
/***
* Method to Display and Save Updated information
* This have to sections:
* >>Displaying Values in Form for modifying entry.
* >>Saving the new information into DB
* After Posting the form, the posted values must be
* shown in the form if any error occurs to avoid re-entry of the form.
*
* On Success redirect to the showList interface else display error here.
* @param int $i_id, id of the record to be modified.
*/
public function modify_information($i_id=0)
{}
/***
* Method to Delete information
* This have no interface but db operation
* will be done here.
*
* On Success redirect to the showList interface else display error in showList interface.
* @param int $i_id, id of the record to be modified.
*/
public function remove_information($i_id=0)
{}
/***
* Shows details of a single record.
*
* @param int $i_id, Primary key
*/
public function show_detail($i_id=0)
{
try
{
if(trim($i_id)!="")
{
$info=$this->mod_rect->fetch_this(decrypt($i_id));
if(!empty($info))
{
$temp=array();
$temp["s_id"]= encrypt($info["id"]);////Index 0 must be the encrypted PK
$temp["i_waiver_commission"]= trim($info["i_waiver_commission"]);
$temp["s_is_active"]= trim($info["s_is_active"]);
$temp["dt_created_on"]= trim($info["dt_created_on"]);
$this->data["info"]=$temp;
unset($temp);
}
unset($info);
}
$this->add_css("css/admin/style.css");///include main css
$this->add_js("js/jquery/jquery-1.4.2.js");///include main css
$this->add_css("js/jquery/themes/ui-darkness/ui.all.css");///include jquery css
$this->render("referral_setting_history/show_detail",TRUE);
unset($i_id);
}
catch(Exception $err_obj)
{
show_error($err_obj->getMessage());
}
}
/***
* Checks duplicate value using ajax call
*/
public function ajax_checkduplicate()
{}
public function __destruct()
{}
}
?>
|
apache-2.0
|
pdbartlett/misc-stuff
|
euler/scala/Primes.scala
|
275
|
object Primes {
private var cache: Map[Int, Boolean] = Map(0 -> false, 1 -> false)
def isPrime(num: Int): Boolean = cache.getOrElse(num, {
val p = !(2.until(Math.sqrt(num).toInt + 1).exists(n => isPrime(n) && (num % n == 0)))
cache += (num -> p)
p
})
}
|
apache-2.0
|
NinjaVault/NinjaHive
|
NinjaHive.Contract/IValidatable.cs
|
227
|
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
namespace NinjaHive.Contract
{
public interface IValidatable
{
ICollection<ValidationResult> ValidationResults { get; }
}
}
|
apache-2.0
|
jcmandersson/Operation
|
test/list-spec.js
|
3343
|
describe('The list page', function() {
it('function splitOnce can split a string with a char correctly', function() {
var str = 'a.b.c.d';
var res = splitOnce(str, '.');
expect(res[0]).toBe('a');
expect(res[1]).toBe('b.c.d');
});
it('function splitOnce can split a string with a missing char correctly', function() {
var str = 'a.b.c.d';
var res = splitOnce(str, ',');
expect(res[0]).toBe('a.b.c.d');
expect(res[1]).toBe('');
});
it('function addToUrl can add a specialty correctly with an empty query', function() {
var url = 'http://hej.com/list';
url = addToUrl(url, 'specialty', 'Allmän kirurgi');
expect(url).toBe('http://hej.com/list?specialty=Allmän kirurgi');
});
it('function addToUrl can add a specialty correctly with a non-empty query', function() {
var url = 'http://hej.com/list?specialty=Urologi&state=Utkast';
url = addToUrl(url, 'specialty', 'Allmän kirurgi');
expect(url).toBe('http://hej.com/list?specialty=Allmän kirurgi&state=Utkast');
});
it('function removeFromUrl can remove a specialty correctly with an query with one element', function() {
var url = 'http://hej.com/list?specialty=Urologi';
url = removeFromUrl(url, 'specialty');
expect(url).toBe('http://hej.com/list?');
});
it('function removeFromUrl can remove a specialty correctly with a query with two elements', function() {
var url = 'http://hej.com/list?specialty=Urologi&state=Utkast';
url = removeFromUrl(url, 'specialty');
expect(url).toBe('http://hej.com/list?state=Utkast');
});
it('function addAnd can add a & to a query that does not end with ?', function() {
var url = 'http://hej.com/list?specialty=Urologi';
url = addAnd(url);
expect(url).toBe('http://hej.com/list?specialty=Urologi&');
});
it('function addAnd can add a & to a query that does end with ?', function() {
var url = 'http://hej.com/list?';
url = addAnd(url);
expect(url).toBe('http://hej.com/list?');
});
it('function changeSpecialty can change the specialty from an emty query', function() {
var url = window.location.href;
changeSpecialty('Urologi');
expect(window.location.href).toBe(url + '?specialty=Urologi');
});
it('function changeSpecialty can change the specialty to Alla specialiteter', function() {
var url = window.location.href;
changeSpecialty('Alla specialiteter');
expect(window.location.href).toBe(url.split('?')[0] + '?');
});
it('function changeState can change the state from an emty query', function() {
var url = window.location.href;
changeState('Utkast');
expect(window.location.href).toBe(url + 'state=Utkast');
});
it('function changeState can change the state to Alla tillstånd', function() {
var url = window.location.href;
changeState('Alla tillstånd');
expect(window.location.href).toBe(url.split('?')[0] + '?');
});
it('function changeLimit can change the limit from an emty query', function() {
var url = window.location.href;
changeLimit('100');
expect(window.location.href).toBe(url + 'limit=100');
});
it('function changeLimit can change the limit to the default', function() {
var url = window.location.href;
changeLimit('25');
expect(window.location.href).toBe(url.split('?')[0] + '?');
});
});
|
apache-2.0
|
febo/myra
|
src/main/java/myra/classification/tree/Tree.java
|
13953
|
/*
* Tree.java
* (this file is part of MYRA)
*
* Copyright 2008-2015 Fernando Esteban Barril Otero
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package myra.classification.tree;
import static myra.datamining.Attribute.EQUAL_TO;
import static myra.datamining.Attribute.GREATER_THAN;
import static myra.datamining.Attribute.IN_RANGE;
import static myra.datamining.Attribute.LESS_THAN_OR_EQUAL_TO;
import static myra.datamining.Attribute.Type.CONTINUOUS;
import java.util.LinkedList;
import myra.AbstractWeighable;
import myra.Cost;
import myra.classification.Label;
import myra.datamining.Attribute;
import myra.datamining.Attribute.Condition;
import myra.datamining.Dataset;
import myra.datamining.Model;
/**
* This class represents a decision tree.
*
* @author Fernando Esteban Barril Otero
*/
public class Tree extends AbstractWeighable<Tree> implements Model {
/**
* The root node of the tree.
*/
private Node root;
/**
* The quality of the tree.
*/
private Cost quality;
/**
* The iteration that created the tree.
*/
private int iteration;
/**
* Creates a new <code>Tree</code> object.
*
* @param root
* the root node of the tree.
*/
public Tree(Node root) {
this.root = root;
}
/**
* Returns the quality of the tree.
*
* @return the quality of the tree.
*/
public Cost getQuality() {
return quality;
}
/**
* Sets the quality of the tree.
*
* @param quality
* the quality value to set.
*/
public void setQuality(Cost quality) {
this.quality = quality;
}
/**
* Returns the iteration that created the tree.
*
* @return the iteration that created the tree.
*/
public int getIteration() {
return iteration;
}
/**
* Sets the iteration that created the tree.
*
* @param iteration
* the iteration to set.
*/
public void setIteration(int iteration) {
this.iteration = iteration;
}
/**
* Returns the root node of the tree.
*
* @return the root node of the tree.
*/
public Node getRoot() {
return root;
}
/**
* Sets the root node of the tree.
*
* @param root
* the node to set.
*/
public void setRoot(Node root) {
this.root = root;
}
/**
* Returns the number of internal nodes in the tree.
*
* @return the number of internal nodes in the tree.
*/
public int internal() {
int count = 0;
LinkedList<Node> toVisit = new LinkedList<Node>();
toVisit.add(root);
while (!toVisit.isEmpty()) {
Node node = toVisit.removeFirst();
if (!node.isLeaf()) {
Node[] children = ((InternalNode) node).children;
for (int i = 0; i < children.length; i++) {
toVisit.add(children[i]);
}
count++;
}
}
return count;
}
/**
* Returns the number of nodes in the tree.
*
* @return the number of nodes in the tree.
*/
public int size() {
int count = 0;
LinkedList<Node> toVisit = new LinkedList<Node>();
toVisit.add(root);
while (!toVisit.isEmpty()) {
Node node = toVisit.removeFirst();
count++;
if (!node.isLeaf()) {
Node[] children = ((InternalNode) node).children;
for (int i = 0; i < children.length; i++) {
toVisit.add(children[i]);
}
}
}
return count;
}
/**
* Determines the class value prediction for the specified instance by
* following the tree in a top-down fashion.
*
* @param dataset
* the current dataset.
* @param instance
* the index of the instance.
* @param node
* the node to visit.
* @param weight
* the weight of the instance.
* @param probabilities
* the class probabilities vector.
*/
protected void prediction(Dataset dataset,
int instance,
Node node,
double weight,
double[] probabilities) {
if (node.isLeaf()) {
if (node.getTotal() > 0) {
double[] distribution = node.getDistribution();
double total = node.getTotal();
for (int i = 0; i < distribution.length; i++) {
if (distribution[i] > 0) {
probabilities[i] += weight * (distribution[i] / total);
}
}
} else {
probabilities[((LeafNode) node).getPrediction().value()] +=
weight;
}
return;
}
InternalNode internal = (InternalNode) node;
Attribute attribute = dataset.attributes()[internal.attribute()];
double value = dataset.value(instance, attribute.getIndex());
if (dataset.isMissing(attribute, value)) {
for (int i = 0; i < internal.children.length; i++) {
prediction(dataset,
instance,
internal.children[i],
weight * (internal.children[i].getTotal()
/ internal.getTotal()),
probabilities);
}
} else {
for (int i = 0; i < internal.conditions.length; i++) {
if (internal.conditions[i].satisfies(value)) {
prediction(dataset,
instance,
internal.children[i],
weight,
probabilities);
break;
}
}
}
}
/**
* Returns the predicted class for the specified instance.
*
* @param dataset
* the current dataset.
* @param instance
* the index of the instance.
*
* @return the predicted class for the specified instance.
*/
public Label predict(Dataset dataset, int instance) {
double[] probabilities = new double[dataset.classLength()];
prediction(dataset, instance, root, 1.0, probabilities);
int highest = 0;
for (int i = 1; i < probabilities.length; i++) {
if (probabilities[i] > probabilities[highest]) {
highest = i;
}
}
return new Label(dataset.getTarget(), highest);
}
/**
* Returns the string representation of the tree.
*
* @param dataset
* the current dataset.
*
* @return the string representation of the tree.
*/
public String toString(Dataset dataset) {
StringBuffer buffer = new StringBuffer();
buffer.append(export(dataset));
buffer.append(System.lineSeparator());
int size = size();
buffer.append(String.format("%nTotal number of nodes: %d%n", size));
buffer.append(String.format("Number of leaf nodes: %d",
(size - internal())));
if (quality != null) {
buffer.append(String.format("%nTree quality: %f%n", quality.raw()));
buffer.append(String.format("Tree iteration: %d%n", iteration));
} else {
buffer.append(String.format("%n"));
}
return buffer.toString();
}
@Override
public String export(Dataset dataset) {
return toString(dataset, root, "");
}
/**
* Substitutes continuous attributes' threshold values with values that
* occur in the dataset.
*
* @param dataset
* the current dataset.
*/
public void fixThresholds(Dataset dataset) {
LinkedList<InternalNode> nodes = new LinkedList<InternalNode>();
if (!root.isLeaf()) {
nodes.add((InternalNode) root);
}
while (!nodes.isEmpty()) {
InternalNode node = nodes.removeFirst();
for (int i = 0; i < node.children.length; i++) {
Condition c = node.conditions[i];
if (c != null && dataset.attributes()[c.attribute]
.getType() == CONTINUOUS) {
// if a condition was created, we substitute the threshold
// values with values that occur in the dataset (this is to
// avoid having threshold values that don't represent values
// from the dataset)
for (int j = 0; j < dataset.size(); j++) {
double v = dataset.value(j, c.attribute);
for (int k = 0; k < c.value.length; k++) {
if (v <= c.value[k] && v > c.threshold[k]) {
c.threshold[k] = v;
}
}
}
// at the end of this procedure, the threshold ad value
// should be the same
for (int k = 0; k < c.value.length; k++) {
c.value[k] = c.threshold[k];
}
}
if (!node.children[i].isLeaf()) {
nodes.add((InternalNode) node.children[i]);
}
}
}
}
/**
* Returns the string representation of the tree.
*
* @param graph
* the constructions graph.
* @param dataset
* the current dataset.
* @param node
* the node representing the root of the (sub-)tree.
* @param indent
* the current level identation.
*
* @return the string representation of the tree.
*/
private String toString(Dataset dataset, Node node, String indent) {
StringBuffer buffer = new StringBuffer();
if (!node.isLeaf()) {
InternalNode internal = (InternalNode) node;
if (node != root) {
buffer.append(System.lineSeparator());
}
String name = node.getName();
for (int i = 0; i < internal.conditions.length; i++) {
// if we are not the first branch
if (i > 0) {
buffer.append(System.lineSeparator());
}
buffer.append(indent);
Condition condition = internal.conditions[i];
switch (condition.relation) {
case LESS_THAN_OR_EQUAL_TO:
buffer.append(String
.format("%s <= %s",
name,
Double.toString(condition.value[0])));
break;
case GREATER_THAN:
buffer.append(String
.format("%s > %s",
name,
Double.toString(condition.value[0])));
break;
case IN_RANGE:
buffer.append(String
.format("%s < %s <= %s",
Double.toString(condition.value[0]),
name,
Double.toString(condition.value[1])));
break;
case EQUAL_TO:
buffer.append(String
.format("%s = %s",
name,
dataset.attributes()[condition.attribute]
.value((int) condition.value[0])));
break;
}
buffer.append(": ");
String next = indent + "| ";
buffer.append(toString(dataset, internal.children[i], next));
}
} else {
buffer.append(node.getName());
double[] distribution = node.getDistribution();
int prediction = ((LeafNode) node).getPrediction().value();
double errors = 0.0;
for (int i = 0; i < distribution.length; i++) {
if (i != prediction) {
errors += distribution[i];
}
}
buffer.append(" (");
buffer.append(String.format("%.1f",
distribution[prediction] + errors));
if (errors > 0.0) {
buffer.append("/");
buffer.append(String.format("%.1f", errors));
}
buffer.append(")");
// buffer.append(System.lineSeparator());
}
return buffer.toString();
}
@Override
public int compareTo(Tree o) {
// compare the quality
int c = quality.compareTo(o.quality);
if (c == 0) {
// compare the number of rules
c = Double.compare(o.size(), size());
}
return c;
}
}
|
apache-2.0
|
cube-soft/Cube.Net
|
Applications/Rss/Reader/Sources/Interactions/TitleConverter.cs
|
3181
|
/* ------------------------------------------------------------------------- */
//
// Copyright (c) 2010 CubeSoft, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
/* ------------------------------------------------------------------------- */
using System;
using System.Globalization;
using System.Reflection;
using System.Windows.Data;
using System.Windows.Markup;
using Cube.Mixin.Assembly;
namespace Cube.Net.Rss.Reader
{
/* --------------------------------------------------------------------- */
///
/// TitleConverter
///
/// <summary>
/// メイン画面のタイトルに変換するためのクラスです。
/// </summary>
///
/* --------------------------------------------------------------------- */
public class TitleConverter : MarkupExtension, IMultiValueConverter
{
#region Methods
/* ----------------------------------------------------------------- */
///
/// Convert
///
/// <summary>
/// 変換処理を実行します。
/// </summary>
///
/* ----------------------------------------------------------------- */
public object Convert(object[] values, Type targetType, object parameter, CultureInfo culture)
{
var asm = Assembly.GetExecutingAssembly();
var ss = new System.Text.StringBuilder();
if (values[0] is RssItem src) ss.Append($"{src.Title} - ");
ss.Append(asm.GetTitle());
if (values[1] is LockSetting x && x.IsReadOnly) ss.Append($" ({Properties.Resources.MessageReadOnly})");
return ss.ToString();
}
/* ----------------------------------------------------------------- */
///
/// ConvertBack
///
/// <summary>
/// 逆変換を実行します。
/// </summary>
///
/// <remarks>
/// このメソッドはサポートされていません。
/// </remarks>
///
/* ----------------------------------------------------------------- */
public object[] ConvertBack(object s, Type[] t, object p, CultureInfo c) =>
throw new NotSupportedException();
/* ----------------------------------------------------------------- */
///
/// ProvideValue
///
/// <summary>
/// 自身のオブジェクトを返します。
/// </summary>
///
/* ----------------------------------------------------------------- */
public override object ProvideValue(IServiceProvider serviceProvider) => this;
#endregion
}
}
|
apache-2.0
|
tsq-old/hunteron
|
test/unit/controllers/UserController.test.js
|
347
|
/**
* Created by tsq on 14-10-27.
*/
var request = require('supertest');
describe('UsersController', function() {
it('should redirect to /mypage', function(done) {
request(sails.hooks.http.app)
.post('/user/login')
.send({name: 'test', password:'123'})
.expect(302)
.expect('location', '/mypage', done);
});
});
|
apache-2.0
|
rewayaat/rewayaat
|
src/main/java/com/rewayaat/core/HighlySignificantTerms.java
|
4294
|
package com.rewayaat.core;
import com.rewayaat.config.ESClientProvider;
import org.apache.commons.lang3.StringUtils;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.bucket.terms.SignificantTerms;
import org.json.JSONArray;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.stream.Collectors;
/**
* A collection of highly significant terms based on a given set of input terms.
*/
public class HighlySignificantTerms {
private final int size;
private List<String> inputTerms;
private static final double MINIMUM_SCORE = 0.5;
public HighlySignificantTerms(int size, String[] inputTerms) {
this.size = size;
this.inputTerms = Arrays.asList(inputTerms);
}
public JSONArray terms() throws UnknownHostException {
JSONArray result = new JSONArray();
List<String> englishValues = new ArrayList<>();
List<String> arabicValues = new ArrayList<>();
for (String inputTerm : inputTerms) {
// filter out phrases..
if (!inputTerm.trim().contains(" ") && !inputTerm.trim().startsWith("\"")) {
if (new RewayaatTerm(inputTerm).isArabic()) {
arabicValues.add(StringUtils.stripAccents(inputTerm.trim()));
} else {
englishValues.add(StringUtils.stripAccents(inputTerm.trim().toLowerCase()));
}
}
}
SearchResponse resp = ESClientProvider.instance().getClient().prepareSearch(ESClientProvider.INDEX)
.setSearchType(SearchType.DFS_QUERY_THEN_FETCH)
.setQuery(QueryBuilders.boolQuery().should(QueryBuilders.termsQuery("english", englishValues))
.should(QueryBuilders.termsQuery("arabic", arabicValues)).minimumShouldMatch((int) (this.inputTerms.size() * 0.25)))
.addAggregation(AggregationBuilders
.significantTerms("significantEnglishTerms").field("english").size(this.size))
.addAggregation(AggregationBuilders
.significantTerms("significantArabicTerms").field("arabic").size(this.size))
.get();
SignificantTerms englishTermsAgg = resp.getAggregations().get("significantEnglishTerms");
SignificantTerms arabicTermsAgg = resp.getAggregations().get("significantArabicTerms");
List<SignificantTerms.Bucket> allBuckets = new ArrayList<>();
allBuckets.addAll(englishTermsAgg.getBuckets());
allBuckets.addAll(arabicTermsAgg.getBuckets());
allBuckets = allBuckets.stream()
.filter(x -> !this.inputTerms.contains(StringUtils.stripAccents(x.getKeyAsString().trim().toLowerCase())))
.collect(Collectors.toList());
Collections.sort(allBuckets, new SignifcantTermsBucketComparator());
List<SignificantTerms.Bucket> firstSizeElementsList = allBuckets.stream().limit(this.size).collect(Collectors.toList());
for (SignificantTerms.Bucket bucket : firstSizeElementsList) {
if (bucket.getSignificanceScore() < MINIMUM_SCORE) {
break;
} else {
result.put(bucket.getKeyAsString());
}
}
return result;
}
/**
* Comparator for the Signifcant Terms Bucket.
*/
public class SignifcantTermsBucketComparator implements Comparator<SignificantTerms.Bucket> {
@Override
public int compare(SignificantTerms.Bucket o1, SignificantTerms.Bucket o2) {
if (o1.getSignificanceScore() == o2.getSignificanceScore()) {
return 0;
} else if (o1.getSignificanceScore() < o2.getSignificanceScore()) {
return 1;
} else {
return -1;
}
}
}
}
|
apache-2.0
|
terasoluna-batch/v5-tutorial
|
terasoluna-batch-tutorial/src/main/java/org/terasoluna/batch/tutorial/exceptionhandlingwithlistener/chunk/PointAddItemProcessor.java
|
1963
|
/*
* Copyright (C) 2017 NTT DATA Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language
* governing permissions and limitations under the License.
*/
package org.terasoluna.batch.tutorial.exceptionhandlingwithlistener.chunk;
import org.springframework.batch.item.ItemProcessor;
import org.springframework.batch.item.validator.Validator;
import org.springframework.stereotype.Component;
import org.terasoluna.batch.tutorial.common.dto.MemberInfoDto;
import javax.inject.Inject;
@Component
public class PointAddItemProcessor implements ItemProcessor<MemberInfoDto, MemberInfoDto> {
private static final String TARGET_STATUS = "1";
private static final String INITIAL_STATUS = "0";
private static final String GOLD_MEMBER = "G";
private static final String NORMAL_MEMBER = "N";
private static final int MAX_POINT = 1000000;
@Inject
Validator<MemberInfoDto> validator;
@Override
public MemberInfoDto process(MemberInfoDto item) throws Exception {
validator.validate(item);
if (TARGET_STATUS.equals(item.getStatus())) {
if (GOLD_MEMBER.equals(item.getType())) {
item.setPoint(item.getPoint() + 100);
} else if (NORMAL_MEMBER.equals(item.getType())) {
item.setPoint(item.getPoint() + 10);
}
if (item.getPoint() > MAX_POINT) {
item.setPoint(MAX_POINT);
}
item.setStatus(INITIAL_STATUS);
}
return item;
}
}
|
apache-2.0
|
Joseja8/android_speechRecognition
|
AndroidApp/app/src/main/java/com/example/ambiental/speechrecognition/ConnectionThread.java
|
1818
|
package com.example.ambiental.speechrecognition;
import android.renderscript.Int2;
import android.util.Log;
import android.widget.Button;
import java.io.BufferedWriter;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.net.Socket;
import static com.example.ambiental.speechrecognition.MainActivity.TAG;
/**
* @autor: Joaquín Ramírez Guerra
* @description: Class with parametres to create a socket and connect it with server
*/
public class ConnectionThread {
public static String host;
public static int port;
public static Socket socket;
public static BufferedWriter bufferedW;
// Default constructor
public ConnectionThread() {
}
//Parametrized constructor
public ConnectionThread(String h, String p ) {
host = h;
port = Integer.parseInt(p);
Log.d(TAG,"En el constructor de ConnectionThread.");
}
// Function to connect with server
public void connect(){
try {
socket = new Socket(host, port); //connect to server
bufferedW = new BufferedWriter(new OutputStreamWriter(socket.getOutputStream()));
} catch (IOException e) {
e.printStackTrace();
Log.d(TAG, "Error to connect: " + e.getMessage() );
}
Log.d(TAG, "Connection succesful!");
}
// Function to shut down socket and close connection with server
public void closeConnection(){
try {
socket.close(); //closing the connection
} catch (IOException e) {
e.printStackTrace();
Log.d(TAG, "Connection closed. :( ");
}
}
// Function to check socket connection
public boolean isConnect(){
if (socket.isConnected()){
return true;
}
return false;
}
}
|
apache-2.0
|
vjanmey/EpicMudfia
|
com/planet_ink/coffee_mud/Items/BasicTech/GenReflectionShield.java
|
3612
|
package com.planet_ink.coffee_mud.Items.BasicTech;
import com.planet_ink.coffee_mud.core.interfaces.*;
import com.planet_ink.coffee_mud.core.*;
import com.planet_ink.coffee_mud.core.collections.*;
import com.planet_ink.coffee_mud.Abilities.interfaces.*;
import com.planet_ink.coffee_mud.Areas.interfaces.*;
import com.planet_ink.coffee_mud.Behaviors.interfaces.*;
import com.planet_ink.coffee_mud.CharClasses.interfaces.*;
import com.planet_ink.coffee_mud.Commands.interfaces.*;
import com.planet_ink.coffee_mud.Common.interfaces.*;
import com.planet_ink.coffee_mud.Exits.interfaces.*;
import com.planet_ink.coffee_mud.Items.interfaces.*;
import com.planet_ink.coffee_mud.Items.interfaces.Technical.TechType;
import com.planet_ink.coffee_mud.Locales.interfaces.*;
import com.planet_ink.coffee_mud.MOBS.interfaces.*;
import com.planet_ink.coffee_mud.Races.interfaces.*;
import java.util.*;
/*
Copyright 2000-2014 Bo Zimmerman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
public class GenReflectionShield extends GenPersonalShield
{
@Override public String ID(){ return "GenReflectionShield";}
public GenReflectionShield()
{
super();
setName("a reflection shield generator");
setDisplayText("a reflection shield generator sits here.");
setDescription("The reflection shield generator is worn about the body and activated to use. It protects against laser type weapons. ");
}
@Override
protected String fieldOnStr(MOB viewerM)
{
return (owner() instanceof MOB)?
"A reflecting field of energy surrounds <O-NAME>.":
"A reflecting field of energy surrounds <T-NAME>.";
}
@Override
protected String fieldDeadStr(MOB viewerM)
{
return (owner() instanceof MOB)?
"The reflecting field around <O-NAME> flickers and dies out.":
"The reflecting field around <T-NAME> flickers and dies out.";
}
@Override
protected boolean doShield(MOB mob, CMMsg msg, double successFactor)
{
if(mob.location()!=null)
{
if(msg.tool() instanceof Weapon)
{
final String s="^F"+((Weapon)msg.tool()).hitString(0)+"^N";
if(s.indexOf("<DAMAGE>")>0)
mob.location().show(msg.source(),msg.target(),msg.tool(),CMMsg.MSG_OK_VISUAL,CMStrings.replaceAll(s, "<DAMAGE>", "it reflects off the shield around"));
else
if(s.indexOf("<DAMAGES>")>0)
mob.location().show(msg.source(),msg.target(),msg.tool(),CMMsg.MSG_OK_VISUAL,CMStrings.replaceAll(s, "<DAMAGES>", "reflects off the shield around"));
else
mob.location().show(mob,msg.source(),msg.tool(),CMMsg.MSG_OK_VISUAL,_("The field around <S-NAME> reflects the <O-NAMENOART> damage."));
}
else
mob.location().show(mob,msg.source(),msg.tool(),CMMsg.MSG_OK_VISUAL,_("The field around <S-NAME> reflects the <O-NAMENOART> damage."));
}
return false;
}
@Override
protected boolean doesShield(MOB mob, CMMsg msg, double successFactor)
{
if(!activated())
return false;
if((msg.tool() instanceof Electronics)
&& (msg.tool() instanceof Weapon)
&& (Math.random() >= successFactor)
&& (((Weapon)msg.tool()).weaponType()==Weapon.TYPE_LASERING))
{
return true;
}
return false;
}
}
|
apache-2.0
|
captiosus/treadmill
|
tests/appcfg/abort_test.py
|
3800
|
"""Unit test for treadmill.appcfg.abort
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import io
import json
import os
import shutil
import tempfile
import unittest
import kazoo
import mock
import treadmill
from treadmill import appenv
from treadmill import context
from treadmill import fs
from treadmill.apptrace import events
from treadmill.appcfg import abort as app_abort
class AppCfgAbortTest(unittest.TestCase):
"""Tests for teadmill.appcfg.abort"""
def setUp(self):
self.root = tempfile.mkdtemp()
self.tm_env = appenv.AppEnvironment(root=self.root)
def tearDown(self):
if self.root and os.path.isdir(self.root):
shutil.rmtree(self.root)
@mock.patch('treadmill.appcfg.abort.flag_aborted', mock.Mock())
@mock.patch('treadmill.supervisor.control_service', mock.Mock())
def test_abort(self):
"""Tests abort sequence."""
container_dir = os.path.join(self.root, 'apps', 'proid.myapp#001',
'data')
fs.mkdir_safe(container_dir)
app_abort.abort(container_dir,
why=app_abort.AbortedReason.INVALID_TYPE,
payload='test')
treadmill.appcfg.abort.flag_aborted.assert_called_with(
container_dir,
app_abort.AbortedReason.INVALID_TYPE,
'test'
)
treadmill.supervisor.control_service.assert_called_with(
os.path.join(self.root, 'apps', 'proid.myapp#001'),
treadmill.supervisor.ServiceControlAction.kill
)
def test_flag_aborted(self):
"""Tests flag abort sequence."""
container_dir = os.path.join(self.root, 'apps', 'proid.myapp#001',
'data')
fs.mkdir_safe(container_dir)
app_abort.flag_aborted(container_dir,
why=app_abort.AbortedReason.INVALID_TYPE,
payload='test')
aborted_file = os.path.join(container_dir, 'aborted')
with io.open(aborted_file) as f:
aborted = json.load(f)
self.assertEqual('invalid_type', aborted.get('why'))
self.assertEqual('test', aborted.get('payload'))
@mock.patch('kazoo.client.KazooClient.exists', mock.Mock())
@mock.patch('kazoo.client.KazooClient.create', mock.Mock())
@mock.patch('kazoo.client.KazooClient.delete', mock.Mock())
@mock.patch('kazoo.client.KazooClient.get_children', mock.Mock())
@mock.patch('treadmill.appevents.post', mock.Mock())
@mock.patch('treadmill.sysinfo.hostname',
mock.Mock(return_value='xxx.xx.com'))
@mock.patch('treadmill.zkutils.connect', mock.Mock())
@mock.patch('treadmill.zkutils.put', mock.Mock())
def test_report_aborted(self):
"""Tests report abort sequence."""
context.GLOBAL.zk.url = 'zookeeper://xxx@hhh:123/treadmill/mycell'
treadmill.zkutils.connect.return_value = kazoo.client.KazooClient()
kazoo.client.KazooClient.get_children.return_value = []
kazoo.client.KazooClient.exists.return_value = True
kazoo.client.KazooClient.create.reset()
kazoo.client.KazooClient.delete.reset()
app_abort.report_aborted(self.tm_env, 'proid.myapp#001',
why=app_abort.AbortedReason.TICKETS,
payload='test')
treadmill.appevents.post.assert_called_with(
mock.ANY,
events.AbortedTraceEvent(
instanceid='proid.myapp#001',
why='tickets',
payload='test',
),
)
if __name__ == '__main__':
unittest.main()
|
apache-2.0
|
jeremyepling/TypeScript
|
tests/baselines/reference/validUseOfThisInSuper.js
|
939
|
//// [validUseOfThisInSuper.ts]
class Base {
constructor(public b: Base) {
}
}
class Super extends Base {
constructor() {
super((() => this)()); // ok since this is not the case: The constructor declares parameter properties or the containing class declares instance member variables with initializers.
}
}
//// [validUseOfThisInSuper.js]
var __extends = (this && this.__extends) || function (d, b) {
for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p];
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
var Base = (function () {
function Base(b) {
this.b = b;
}
return Base;
}());
var Super = (function (_super) {
__extends(Super, _super);
function Super() {
return _super.call(this, (function () { return _this; })()) || this;
}
return Super;
}(Base));
|
apache-2.0
|
pkarmstr/NYBC
|
solr-4.2.1/lucene/core/src/test/org/apache/lucene/search/TestFieldValueFilter.java
|
4115
|
package org.apache.lucene.search;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.Term;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.LuceneTestCase;
/**
*
*/
public class TestFieldValueFilter extends LuceneTestCase {
public void testFieldValueFilterNoValue() throws IOException {
Directory directory = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random(), directory,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
int docs = atLeast(10);
int[] docStates = buildIndex(writer, docs);
int numDocsNoValue = 0;
for (int i = 0; i < docStates.length; i++) {
if (docStates[i] == 0) {
numDocsNoValue++;
}
}
IndexReader reader = DirectoryReader.open(directory);
IndexSearcher searcher = new IndexSearcher(reader);
TopDocs search = searcher.search(new TermQuery(new Term("all", "test")),
new FieldValueFilter("some", true), docs);
assertEquals(search.totalHits, numDocsNoValue);
ScoreDoc[] scoreDocs = search.scoreDocs;
for (ScoreDoc scoreDoc : scoreDocs) {
assertNull(reader.document(scoreDoc.doc).get("some"));
}
reader.close();
directory.close();
}
public void testFieldValueFilter() throws IOException {
Directory directory = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random(), directory,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
int docs = atLeast(10);
int[] docStates = buildIndex(writer, docs);
int numDocsWithValue = 0;
for (int i = 0; i < docStates.length; i++) {
if (docStates[i] == 1) {
numDocsWithValue++;
}
}
IndexReader reader = DirectoryReader.open(directory);
IndexSearcher searcher = new IndexSearcher(reader);
TopDocs search = searcher.search(new TermQuery(new Term("all", "test")),
new FieldValueFilter("some"), docs);
assertEquals(search.totalHits, numDocsWithValue);
ScoreDoc[] scoreDocs = search.scoreDocs;
for (ScoreDoc scoreDoc : scoreDocs) {
assertEquals("value", reader.document(scoreDoc.doc).get("some"));
}
reader.close();
directory.close();
}
private int[] buildIndex(RandomIndexWriter writer, int docs)
throws IOException {
int[] docStates = new int[docs];
for (int i = 0; i < docs; i++) {
Document doc = new Document();
if (random().nextBoolean()) {
docStates[i] = 1;
doc.add(newTextField("some", "value", Field.Store.YES));
}
doc.add(newTextField("all", "test", Field.Store.NO));
doc.add(newTextField("id", "" + i, Field.Store.YES));
writer.addDocument(doc);
}
writer.commit();
int numDeletes = random().nextInt(docs);
for (int i = 0; i < numDeletes; i++) {
int docID = random().nextInt(docs);
writer.deleteDocuments(new Term("id", "" + docID));
docStates[docID] = 2;
}
writer.close();
return docStates;
}
}
|
apache-2.0
|
51web/vhostmanager
|
VhostManageSetup/frmLoading.cs
|
549
|
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Text;
using System.Windows.Forms;
namespace 自动安装
{
public partial class frmLoading : Form
{
public string txtMessage = "";
public frmLoading()
{
InitializeComponent();
}
private void frmLoading_Load(object sender, EventArgs e)
{
System.Threading.Thread.Sleep(2000);
//textBox1.Text = txtMessage;
}
}
}
|
apache-2.0
|
mal/chef
|
spec/functional/resource/windows_task_spec.rb
|
18028
|
#
# Author:: Nimisha Sharad (<nimisha.sharad@msystechnologies.com>)
# Copyright:: Copyright (c) 2016 Chef Software, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require "spec_helper"
require "chef/provider/windows_task"
describe Chef::Resource::WindowsTask, :windows_only do
let(:task_name) { "chef-client" }
let(:new_resource) { Chef::Resource::WindowsTask.new(task_name) }
let(:windows_task_provider) do
node = Chef::Node.new
events = Chef::EventDispatch::Dispatcher.new
run_context = Chef::RunContext.new(node, {}, events)
Chef::Provider::WindowsTask.new(new_resource, run_context)
end
describe "action :create" do
after { delete_task }
context "when frequency and frequency_modifier are not passed" do
subject do
new_resource = Chef::Resource::WindowsTask.new(task_name, run_context)
new_resource.command task_name
# Make sure MM/DD/YYYY is accepted
new_resource.start_day "09/20/2017"
new_resource
end
it "creates a scheduled task to run every 1 hr starting on 09/20/2017" do
subject.run_action(:create)
task_details = windows_task_provider.send(:load_task_hash, task_name)
expect(task_details[:TaskName]).to eq("\\chef-client")
expect(task_details[:TaskToRun]).to eq("chef-client")
expect(task_details[:"Repeat:Every"]).to eq("1 Hour(s), 0 Minute(s)")
# This test will not work across locales
expect(task_details[:StartDate]).to eq("9/20/2017")
end
end
context "frequency :minute" do
subject do
new_resource = Chef::Resource::WindowsTask.new(task_name, run_context)
new_resource.command task_name
new_resource.run_level :highest
new_resource.frequency :minute
new_resource.frequency_modifier 15
new_resource
end
it "creates a scheduled task that runs after every 15 minutes" do
subject.run_action(:create)
task_details = windows_task_provider.send(:load_task_hash, task_name)
expect(task_details[:TaskName]).to eq("\\chef-client")
expect(task_details[:TaskToRun]).to eq("chef-client")
expect(task_details[:"Repeat:Every"]).to eq("0 Hour(s), 15 Minute(s)")
expect(task_details[:run_level]).to eq("HighestAvailable")
end
end
context "frequency :hourly" do
subject do
new_resource = Chef::Resource::WindowsTask.new(task_name, run_context)
new_resource.command task_name
new_resource.run_level :highest
new_resource.frequency :hourly
new_resource.frequency_modifier 3
new_resource
end
it "creates a scheduled task that runs after every 3 hrs" do
subject.run_action(:create)
task_details = windows_task_provider.send(:load_task_hash, task_name)
expect(task_details[:TaskName]).to eq("\\chef-client")
expect(task_details[:TaskToRun]).to eq("chef-client")
expect(task_details[:"Repeat:Every"]).to eq("3 Hour(s), 0 Minute(s)")
expect(task_details[:run_level]).to eq("HighestAvailable")
end
end
context "frequency :daily" do
subject do
new_resource = Chef::Resource::WindowsTask.new(task_name, run_context)
new_resource.command task_name
new_resource.run_level :highest
new_resource.frequency :daily
new_resource
end
it "creates a scheduled task to run daily" do
subject.run_action(:create)
task_details = windows_task_provider.send(:load_task_hash, task_name)
expect(task_details[:TaskName]).to eq("\\chef-client")
expect(task_details[:TaskToRun]).to eq("chef-client")
expect(task_details[:ScheduleType]).to eq("Daily")
expect(task_details[:Days]).to eq("Every 1 day(s)")
expect(task_details[:run_level]).to eq("HighestAvailable")
end
end
context "frequency :monthly" do
subject do
new_resource = Chef::Resource::WindowsTask.new(task_name, run_context)
new_resource.command task_name
new_resource.run_level :highest
new_resource.frequency :monthly
new_resource.frequency_modifier 2
new_resource
end
it "creates a scheduled task to every 2 months" do
subject.run_action(:create)
task_details = windows_task_provider.send(:load_task_hash, task_name)
expect(task_details[:TaskName]).to eq("\\chef-client")
expect(task_details[:TaskToRun]).to eq("chef-client")
expect(task_details[:ScheduleType]).to eq("Monthly")
expect(task_details[:Months]).to eq("FEB, APR, JUN, AUG, OCT, DEC")
expect(task_details[:run_level]).to eq("HighestAvailable")
end
end
context "frequency :once" do
subject do
new_resource = Chef::Resource::WindowsTask.new(task_name, run_context)
new_resource.command task_name
new_resource.run_level :highest
new_resource.frequency :once
new_resource
end
context "when start_time is not provided" do
it "raises argument error" do
expect { subject.run_action(:create) }.to raise_error(Mixlib::ShellOut::ShellCommandFailed)
end
end
context "when start_time is provided" do
it "creates the scheduled task to run once at 5pm" do
subject.start_time "17:00"
subject.run_action(:create)
task_details = windows_task_provider.send(:load_task_hash, task_name)
expect(task_details[:TaskName]).to eq("\\chef-client")
expect(task_details[:TaskToRun]).to eq("chef-client")
expect(task_details[:ScheduleType]).to eq("One Time Only")
expect(task_details[:StartTime]).to eq("5:00:00 PM")
expect(task_details[:run_level]).to eq("HighestAvailable")
end
end
end
context "frequency :none" do
subject do
new_resource = Chef::Resource::WindowsTask.new(task_name, run_context)
new_resource.command task_name
new_resource.run_level :highest
new_resource.frequency :none
new_resource.random_delay ""
new_resource
end
it "creates the scheduled task to run on demand only" do
subject.run_action(:create)
task_details = windows_task_provider.send(:load_task_hash, task_name)
expect(task_details[:TaskName]).to eq("\\chef-client")
expect(task_details[:TaskToRun]).to eq("chef-client")
expect(task_details[:ScheduleType]).to eq("On demand only")
expect(task_details[:StartTime]).to eq("N/A")
expect(task_details[:StartDate]).to eq("N/A")
expect(task_details[:NextRunTime]).to eq("N/A")
expect(task_details[:none]).to eq(true)
expect(task_details[:run_level]).to eq("HighestAvailable")
end
end
context "frequency :weekly" do
subject do
new_resource = Chef::Resource::WindowsTask.new(task_name, run_context)
new_resource.command task_name
new_resource.run_level :highest
new_resource.frequency :weekly
new_resource
end
it "creates the scheduled task to run weekly" do
subject.run_action(:create)
task_details = windows_task_provider.send(:load_task_hash, task_name)
expect(task_details[:TaskName]).to eq("\\chef-client")
expect(task_details[:TaskToRun]).to eq("chef-client")
expect(task_details[:ScheduleType]).to eq("Weekly")
expect(task_details[:Months]).to eq("Every 1 week(s)")
expect(task_details[:run_level]).to eq("HighestAvailable")
end
context "when days are provided" do
it "creates the scheduled task to run on particular days" do
subject.day "Mon, Fri"
subject.frequency_modifier 2
subject.run_action(:create)
task_details = windows_task_provider.send(:load_task_hash, task_name)
expect(task_details[:TaskName]).to eq("\\chef-client")
expect(task_details[:TaskToRun]).to eq("chef-client")
expect(task_details[:Days]).to eq("MON, FRI")
expect(task_details[:ScheduleType]).to eq("Weekly")
expect(task_details[:Months]).to eq("Every 2 week(s)")
expect(task_details[:run_level]).to eq("HighestAvailable")
end
end
context "when invalid day is passed" do
it "raises error" do
subject.day "abc"
expect { subject.run_action(:create) }.to raise_error(Mixlib::ShellOut::ShellCommandFailed)
end
end
context "when months are passed" do
it "raises error that months are supported only when frequency=:monthly" do
subject.months "Jan"
expect { subject.run_action(:create) }.to raise_error(Mixlib::ShellOut::ShellCommandFailed)
end
end
end
context "frequency :on_logon" do
subject do
new_resource = Chef::Resource::WindowsTask.new(task_name, run_context)
new_resource.command task_name
new_resource.run_level :highest
new_resource.frequency :on_logon
new_resource
end
it "creates the scheduled task to on logon" do
subject.run_action(:create)
task_details = windows_task_provider.send(:load_task_hash, task_name)
expect(task_details[:TaskName]).to eq("\\chef-client")
expect(task_details[:TaskToRun]).to eq("chef-client")
expect(task_details[:ScheduleType]).to eq("At logon time")
expect(task_details[:run_level]).to eq("HighestAvailable")
end
end
context "frequency :on_idle" do
subject do
new_resource = Chef::Resource::WindowsTask.new(task_name, run_context)
new_resource.command task_name
new_resource.run_level :highest
new_resource.frequency :on_idle
new_resource
end
context "when idle_time is not passed" do
it "raises error" do
expect { subject.run_action(:create) }.to raise_error(Mixlib::ShellOut::ShellCommandFailed)
end
end
context "when idle_time is passed" do
it "creates the scheduled task to run when system is idle" do
subject.idle_time 20
subject.run_action(:create)
task_details = windows_task_provider.send(:load_task_hash, task_name)
expect(task_details[:TaskName]).to eq("\\chef-client")
expect(task_details[:TaskToRun]).to eq("chef-client")
expect(task_details[:ScheduleType]).to eq("At idle time")
expect(task_details[:run_level]).to eq("HighestAvailable")
expect(task_details[:idle_time]).to eq("PT20M")
end
end
end
context "when random_delay is passed" do
subject do
new_resource = Chef::Resource::WindowsTask.new(task_name, run_context)
new_resource.command task_name
new_resource.run_level :highest
new_resource
end
it "sets the random_delay for frequency :minute" do
subject.frequency :minute
subject.random_delay "PT20M"
subject.run_action(:create)
task_details = windows_task_provider.send(:load_task_hash, task_name)
expect(task_details[:TaskName]).to eq("\\chef-client")
expect(task_details[:ScheduleType]).to eq("One Time Only, Minute")
expect(task_details[:TaskToRun]).to eq("chef-client")
expect(task_details[:run_level]).to eq("HighestAvailable")
expect(task_details[:random_delay]).to eq("PT20M")
end
it "raises error if invalid random_delay is passed" do
subject.frequency :minute
subject.random_delay "abc"
expect { subject.after_created }.to raise_error("Invalid value passed for `random_delay`. Please pass seconds as a String e.g. '60'.")
end
it "raises error if random_delay is passed with frequency on_idle" do
subject.frequency :on_idle
subject.random_delay "PT20M"
expect { subject.after_created }.to raise_error("`random_delay` property is supported only for frequency :minute, :hourly, :daily, :weekly and :monthly")
end
end
end
describe "#after_created" do
subject do
new_resource = Chef::Resource::WindowsTask.new(task_name, run_context)
new_resource.command task_name
new_resource.run_level :highest
new_resource
end
context "when start_day is passed with frequency :onstart" do
it "raises error" do
subject.frequency :onstart
subject.start_day "09/20/2017"
expect { subject.after_created }.to raise_error("`start_day` property is not supported with frequency: onstart")
end
end
context "when a non-system user is passed without password" do
it "raises error" do
subject.user "Administrator"
subject.frequency :onstart
expect { subject.after_created }.to raise_error("Can't specify a non-system user without a password!")
end
end
context "when interactive_enabled is passed for a System user without password" do
it "raises error" do
subject.interactive_enabled true
subject.frequency :onstart
expect { subject.after_created }.to raise_error("Please provide the password when attempting to set interactive/non-interactive.")
end
end
context "when frequency_modifier > 1439 is passed for frequency=:minute" do
it "raises error" do
subject.frequency_modifier 1450
subject.frequency :minute
expect { subject.after_created }.to raise_error("frequency_modifier value 1450 is invalid. Valid values for :minute frequency are 1 - 1439.")
end
end
context "when invalid months are passed" do
it "raises error" do
subject.months "xyz"
subject.frequency :monthly
expect { subject.after_created }.to raise_error("months attribute invalid. Only valid values are: JAN, FEB, MAR, APR, MAY, JUN, JUL, AUG, SEP, OCT, NOV, DEC and *. Multiple values must be separated by a comma.")
end
end
context "when idle_time > 999 is passed" do
it "raises error" do
subject.idle_time 1000
subject.frequency :on_idle
expect { subject.after_created }.to raise_error("idle_time value 1000 is invalid. Valid values for :on_idle frequency are 1 - 999.")
end
end
context "when idle_time is passed for frequency=:monthly" do
it "raises error" do
subject.idle_time 300
subject.frequency :monthly
expect { subject.after_created }.to raise_error("idle_time attribute is only valid for tasks that run on_idle")
end
end
end
describe "action :delete" do
subject do
new_resource = Chef::Resource::WindowsTask.new(task_name, run_context)
new_resource.command task_name
new_resource
end
it "deletes the task if it exists" do
subject.run_action(:create)
delete_task
task_details = windows_task_provider.send(:load_task_hash, task_name)
expect(task_details).to eq(false)
end
end
describe "action :run" do
after { delete_task }
subject do
new_resource = Chef::Resource::WindowsTask.new(task_name, run_context)
new_resource.command "dir"
new_resource.run_level :highest
new_resource
end
it "runs the existing task" do
skip "Task status is returned as Ready instead of Running randomly"
subject.run_action(:create)
subject.run_action(:run)
task_details = windows_task_provider.send(:load_task_hash, task_name)
expect(task_details[:Status]).to eq("Running")
end
end
describe "action :end", :volatile do
after { delete_task }
subject do
new_resource = Chef::Resource::WindowsTask.new(task_name, run_context)
new_resource.command "dir"
new_resource.run_level :highest
new_resource
end
it "ends the running task" do
subject.run_action(:create)
subject.run_action(:run)
task_details = windows_task_provider.send(:load_task_hash, task_name)
subject.run_action(:end)
task_details = windows_task_provider.send(:load_task_hash, task_name)
expect(task_details[:Status]).to eq("Ready")
end
end
describe "action :enable" do
after { delete_task }
subject do
new_resource = Chef::Resource::WindowsTask.new(task_name, run_context)
new_resource.command task_name
new_resource
end
it "enables the disabled task" do
subject.run_action(:create)
subject.run_action(:disable)
task_details = windows_task_provider.send(:load_task_hash, task_name)
expect(task_details[:ScheduledTaskState]).to eq("Disabled")
subject.run_action(:enable)
task_details = windows_task_provider.send(:load_task_hash, task_name)
expect(task_details[:ScheduledTaskState]).to eq("Enabled")
end
end
describe "action :disable" do
after { delete_task }
subject do
new_resource = Chef::Resource::WindowsTask.new(task_name, run_context)
new_resource.command task_name
new_resource
end
it "disables the task" do
subject.run_action(:create)
subject.run_action(:disable)
task_details = windows_task_provider.send(:load_task_hash, task_name)
expect(task_details[:ScheduledTaskState]).to eq("Disabled")
end
end
def delete_task
task_to_delete = Chef::Resource::WindowsTask.new(task_name, run_context)
task_to_delete.run_action(:delete)
end
end
|
apache-2.0
|
tectronics/tag-db
|
src/main/java/tagdb/algorithm/match/PeptideMatcher.java
|
3772
|
package tagdb.algorithm.match;
import com.compomics.util.experiment.identification.SequenceFactory;
import tagdb.fragmentation.FragmentIon;
import tagdb.fragmentation.Fragmentizer;
import tagdb.fragmentation.SpectrumPeak;
import tagdb.io.MascotGenericFile;
import tagdb.model.Peptide;
import tagdb.model.SuspectHit;
import tagdb.model.params.SearchParams;
import java.io.IOException;
import java.util.*;
import java.util.Map.Entry;
/**
* <b>PeptideMatcher</b>
* <p>
* This class is the start part of the database search java.tagdb.algorithm.
* It matches the theoretical fragment ions against the experimental spectrum peaks.
* Additionally, it calculates an intensity-based score and rMIC, i.e. the relative matched ion count (matched fragment ion intensities to TIC in spectrum).
* </p>
*
* @author T.Muth
*/
public class PeptideMatcher {
/**
* Holds a peptide object.
*/
private Peptide peptide;
/**
* Holds a spectrum.
*/
private MascotGenericFile spectrum;
/**
* The tag database search parameters.
*/
private SearchParams params;
/**
* The flag for decoy or not.
*/
private boolean decoy;
private String description;
/**
* Constructs the PeptideMatcher object for peptide and spectrum.
*
* @param hit The suspect hit.
* @param spectrum The spectrum.
* @param params The tag database search parameters.
*/
public PeptideMatcher(SuspectHit hit, MascotGenericFile spectrum, SearchParams params) throws IOException, InterruptedException {
this.peptide = hit.getPeptide();
this.description = SequenceFactory.getInstance().getHeader(hit.getProtein().getAccession()).getDescription();
this.spectrum = spectrum;
this.params = params;
this.decoy = hit.isDecoy();
}
/**
* This method tries to find a match between a peptide sequence and a spectrum.
* It eventually returns a PSM object with number of matched peaks, score and rMIC.
*
* @return The derived PSM
*/
public PeptideSpectrumMatch findMatch() {
PeptideSpectrumMatch psm = null;
Fragmentizer fragmentizer = new Fragmentizer(peptide);
Map<Double, Double> expPeaks = spectrum.getPeaks();
Set<Entry<Double, Double>> set = expPeaks.entrySet();
List<SpectrumPeak> peakList = new ArrayList<SpectrumPeak>();
double tic = 0.0;
for (Entry<Double, Double> e : set) {
peakList.add(new SpectrumPeak(e.getKey(), e.getValue()));
tic += e.getValue();
}
// Get the matched fragment ions
List<FragmentIon> matchedIons = fragmentizer.getMatchedIons(peakList, params.getFragmentTol());
double score = 0.0, rMIC = 0.0;
Map<Double, Double> map = new HashMap<Double, Double>();
// TODO: Improve the scoring function!
for (FragmentIon ion : matchedIons) {
// Avoid multiple matched ion peaks
if (!map.containsKey(ion.getMZ())) {
score += ion.getScore();
rMIC += ion.getIntensity();
map.put(ion.getMZ(), rMIC);
}
}
// Get the intensity score
score = score / peptide.getSequence().length();
// Calculate the relative matched ion count.
rMIC = rMIC / tic;
// Construct the PeptideSpectrumMatch object.
psm = new PeptideSpectrumMatch(peptide.getCharge(), matchedIons.size(), peakList.size(), score, rMIC, decoy);
psm.setDescription(description);
psm.setSpectrumTitle(spectrum.getTitle().trim());
return psm;
}
}
|
apache-2.0
|
j-coll/biodata
|
biodata-models/src/main/java/org/opencb/biodata/models/variant/StudyEntry.java
|
19222
|
/*
* <!--
* ~ Copyright 2015-2017 OpenCB
* ~
* ~ Licensed under the Apache License, Version 2.0 (the "License");
* ~ you may not use this file except in compliance with the License.
* ~ You may obtain a copy of the License at
* ~
* ~ http://www.apache.org/licenses/LICENSE-2.0
* ~
* ~ Unless required by applicable law or agreed to in writing, software
* ~ distributed under the License is distributed on an "AS IS" BASIS,
* ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* ~ See the License for the specific language governing permissions and
* ~ limitations under the License.
* -->
*
*/
package org.opencb.biodata.models.variant;
import java.io.Serializable;
import java.util.*;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Consumer;
import java.util.stream.Collectors;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import org.opencb.biodata.models.variant.avro.AlternateCoordinate;
import org.opencb.biodata.models.variant.avro.FileEntry;
import org.opencb.biodata.models.variant.avro.VariantType;
import org.opencb.biodata.models.variant.stats.VariantStats;
/**
* Entry that associates a variant and a file in a variant archive. It contains
* information related to samples, statistics and specifics of the file format.
*
* @author Cristina Yenyxe Gonzalez Garcia <cyenyxe@ebi.ac.uk>
* @author Jose Miguel Mut Lopez <jmmut@ebi.ac.uk>
*/
@JsonIgnoreProperties({"impl", "samplesDataAsMap", "samplesPosition", "samplesName", "orderedSamplesName", "formatAsString",
"formatPositions", "fileId", "attributes", "allAttributes", "cohortStats", "secondaryAlternatesAlleles"})
public class StudyEntry implements Serializable {
private volatile LinkedHashMap<String, Integer> samplesPosition = null;
private final AtomicReference<Map<String, Integer>> formatPosition = new AtomicReference<>();
private volatile Map<String, VariantStats> cohortStats = null;
private final org.opencb.biodata.models.variant.avro.StudyEntry impl;
public static final String DEFAULT_COHORT = "ALL";
public static final String QUAL = "QUAL";
public static final String FILTER = "FILTER";
public static final String SRC = "src";
public StudyEntry() {
this(null, null);
}
public StudyEntry(org.opencb.biodata.models.variant.avro.StudyEntry other) {
impl = other;
}
public StudyEntry(String studyId) {
this(studyId, new ArrayList<>(), null);
}
public StudyEntry(String fileId, String studyId) {
this(studyId, new ArrayList<>(), null);
if (fileId != null) {
setFileId(fileId);
}
}
/**
* @deprecated Use {@link #StudyEntry(String, List, List)}
*/
@Deprecated
public StudyEntry(String fileId, String studyId, String[] secondaryAlternates, String format) {
this(fileId, studyId, secondaryAlternates, format == null ? null : Arrays.asList(format.split(":")));
}
/**
* @deprecated Use {@link #StudyEntry(String, List, List)}
*/
@Deprecated
public StudyEntry(String fileId, String studyId, String[] secondaryAlternates, List<String> format) {
this(fileId, studyId, Arrays.asList(secondaryAlternates), format);
}
/**
* @deprecated Use {@link #StudyEntry(String, List, List)}
*/
@Deprecated
public StudyEntry(String fileId, String studyId, List<String> secondaryAlternates, List<String> format) {
this.impl = new org.opencb.biodata.models.variant.avro.StudyEntry(studyId,
new LinkedList<>(), null, format, new LinkedList<>(), new LinkedHashMap<>());
setSecondaryAlternatesAlleles(secondaryAlternates);
if (fileId != null) {
setFileId(fileId);
}
}
public StudyEntry(String studyId, List<AlternateCoordinate> secondaryAlternates, List<String> format) {
this.impl = new org.opencb.biodata.models.variant.avro.StudyEntry(studyId,
new LinkedList<>(), null, format, new LinkedList<>(), new LinkedHashMap<>());
setSecondaryAlternates(secondaryAlternates);
}
public LinkedHashMap<String, Integer> getSamplesPosition() {
return samplesPosition;
}
public void setSamplesPosition(Map<String, Integer> samplesPosition) {
setSamplesPosition(samplesPosition, true);
}
public void setSortedSamplesPosition(LinkedHashMap<String, Integer> samplesPosition) {
setSamplesPosition(samplesPosition, false);
}
protected void setSamplesPosition(Map<String, Integer> samplesPosition, boolean checkSorted) {
if (samplesPosition == null) {
this.samplesPosition = null;
return;
}
if (samplesPosition instanceof LinkedHashMap) {
if (!checkSorted || isSamplesPositionMapSorted((LinkedHashMap<String, Integer>) samplesPosition)) {
this.samplesPosition = ((LinkedHashMap<String, Integer>) samplesPosition);
} else {
this.samplesPosition = sortSamplesPositionMap(samplesPosition);
}
} else {
//Sort samples position
this.samplesPosition = sortSamplesPositionMap(samplesPosition);
}
if (getSamplesData() == null || getSamplesData().isEmpty()) {
for (int size = samplesPosition.size(); size > 0; size--) {
getSamplesData().add(null);
}
}
}
public static boolean isSamplesPositionMapSorted(LinkedHashMap<String, Integer> samplesPosition) {
int idx = 0;
for (Map.Entry<String, Integer> entry : samplesPosition.entrySet()) {
if (entry.getValue() != idx) {
break;
}
idx++;
}
return idx == samplesPosition.size();
}
public static LinkedHashMap<String, Integer> sortSamplesPositionMap(Map<String, Integer> samplesPosition) {
LinkedHashMap<String, Integer> map = new LinkedHashMap<>();
String[] samples = new String[samplesPosition.size()];
for (Map.Entry<String, Integer> entry : samplesPosition.entrySet()) {
samples[entry.getValue()] = entry.getKey();
}
for (int i = 0; i < samples.length; i++) {
map.put(samples[i], i);
}
return map;
}
public org.opencb.biodata.models.variant.avro.StudyEntry getImpl() {
return impl;
}
// public void setSamplePositions(List<String> samplePositions) {
// this.samplePositions = new HashMap<>(samplePositions.size());
// int position = 0;
// for (String sample : samplePositions) {
// this.samplePositions.put(sample, position++);
// }
// }
//
// @Deprecated
// public void setSecondaryAlternates(String[] secondaryAlternates) {
// impl.setSecondaryAlternates(Arrays.asList(secondaryAlternates));
// }
public String getFormatAsString() {
return impl.getFormat() == null ? null : String.join(":", impl.getFormat());
}
public void setFormatAsString(String format) {
setFormat(Arrays.asList(format.split(":")));
}
/**
* Do not modify this list
* @return
*/
public List<String> getFormat() {
return impl.getFormat() == null? null : Collections.unmodifiableList(impl.getFormat());
}
public void setFormat(List<String> value) {
this.formatPosition.set(null);
impl.setFormat(value);
}
public void addFormat(String value) {
formatPosition.set(null);
if (impl.getFormat() == null) {
impl.setFormat(new LinkedList<>());
}
List<String> format = new ArrayList<>(impl.getFormat().size());
format.addAll(impl.getFormat());
format.add(value);
impl.setFormat(format);
}
public Map<String, Integer> getFormatPositions() {
if (Objects.isNull(this.formatPosition.get())) {
Map<String, Integer> map = new HashMap<>();
int pos = 0;
for (String format : getFormat()) {
map.put(format, pos++);
}
this.formatPosition.compareAndSet(null, map);
}
return formatPosition.get();
}
public List<List<String>> getSamplesData() {
return impl.getSamplesData();
}
public void setSamplesData(List<List<String>> value) {
impl.setSamplesData(value);
}
@Deprecated
public Map<String, Map<String, String>> getSamplesDataAsMap() {
requireSamplesPosition();
Map<String, Map<String, String>> samplesDataMap = new HashMap<>();
for (Map.Entry<String, Integer> entry : samplesPosition.entrySet()) {
samplesDataMap.put(entry.getKey(), getSampleData(entry.getKey()));
}
return Collections.unmodifiableMap(samplesDataMap);
}
public String getSampleData(String sampleName, String field) {
requireSamplesPosition();
if (samplesPosition.containsKey(sampleName)) {
Map<String, Integer> formatPositions = getFormatPositions();
if (formatPositions.containsKey(field)) {
List<String> sampleData = impl.getSamplesData().get(samplesPosition.get(sampleName));
Integer formatIdx = formatPositions.get(field);
return formatIdx < sampleData.size() ? sampleData.get(formatIdx) : null;
}
}
return null;
}
public Map<String, String> getSampleData(String sampleName) {
requireSamplesPosition();
if (samplesPosition.containsKey(sampleName)) {
HashMap<String, String> sampleDataMap = new HashMap<>();
Iterator<String> iterator = getFormat().iterator();
List<String> sampleDataList = impl.getSamplesData().get(samplesPosition.get(sampleName));
for (String data : sampleDataList) {
sampleDataMap.put(iterator.next(), data);
}
return Collections.unmodifiableMap(sampleDataMap);
}
return null;
}
public void addSampleData(String sampleName, Map<String, String> sampleData) {
if (getFormat() == null) {
setFormat(new ArrayList<>(sampleData.keySet()));
}
List<String> sampleDataList = new ArrayList<>(getFormat().size());
for (String field : getFormat()) {
sampleDataList.add(sampleData.get(field));
}
if (sampleData.size() != sampleDataList.size()) {
List<String> extraFields = sampleData.keySet().stream().filter(f -> getFormat().contains(f)).collect(Collectors.toList());
throw new IllegalArgumentException("Some sample data fields were not in the format field: " + extraFields);
}
addSampleData(sampleName, sampleDataList);
}
public void addSampleData(String sampleName, List<String> sampleDataList) {
if (samplesPosition == null && impl.getSamplesData().isEmpty()) {
samplesPosition = new LinkedHashMap<>();
}
if (samplesPosition != null) {
if (samplesPosition.containsKey(sampleName)) {
int position = samplesPosition.get(sampleName);
while (impl.getSamplesData().size() <= position) {
actOnSamplesDataList((l) -> l.add(null));
}
actOnSamplesDataList((l) -> l.set(position, sampleDataList));
} else {
int position = samplesPosition.size();
samplesPosition.put(sampleName, position);
actOnSamplesDataList((l) -> l.add(sampleDataList));
}
} else {
actOnSamplesDataList((l) -> l.add(sampleDataList));
}
}
/**
* Acts on the SamplesDataList. If the action throws an UnsupportedOperationException, the list is copied
* into a modifiable list (ArrayList) and the action is executed again.
*
* @param action Action to execute
*/
private void actOnSamplesDataList(Consumer<List<List<String>>> action) {
List<List<String>> samplesDataList = impl.getSamplesData();
try {
action.accept(samplesDataList);
} catch (UnsupportedOperationException e) {
samplesDataList = new ArrayList<>(samplesDataList);
impl.setSamplesData(samplesDataList);
action.accept(samplesDataList);
}
}
public void addSampleData(String sampleName, String format, String value) {
requireSamplesPosition();
Integer formatIdx = getFormatPositions().get(format);
Integer samplePosition = getSamplesPosition().get(sampleName);
if (formatIdx != null && samplePosition != null) {
List<String> sampleData = getSamplesData().get(samplePosition);
if (formatIdx < sampleData.size()) {
sampleData.set(formatIdx, value);
} else {
List<String> modifiableSampleData = new ArrayList<>(getFormat().size());
modifiableSampleData.addAll(sampleData);
modifiableSampleData.add(value);
addSampleData(sampleName, modifiableSampleData);
}
} else {
throw new IndexOutOfBoundsException();
}
}
public Set<String> getSamplesName() {
requireSamplesPosition();
return samplesPosition.keySet();
}
public List<String> getOrderedSamplesName() {
requireSamplesPosition();
return new ArrayList<>(samplesPosition.keySet());
}
public Map<String, VariantStats> getStats() {
resetStatsMap();
return Collections.unmodifiableMap(cohortStats);
}
private void resetStatsMap() {
if (cohortStats == null) {
cohortStats = new HashMap<>();
impl.getStats().forEach((k, v) -> cohortStats.put(k, new VariantStats(v)));
}
}
public void setStats(Map<String, VariantStats> stats) {
this.cohortStats = stats;
impl.setStats(new HashMap<>(stats.size()));
stats.forEach((k, v) -> impl.getStats().put(k, v.getImpl()));
}
public void setStats(String cohortName, VariantStats stats) {
resetStatsMap();
cohortStats.put(cohortName, stats);
impl.getStats().put(cohortName, stats.getImpl());
}
public VariantStats getStats(String cohortName) {
resetStatsMap();
return cohortStats.get(cohortName);
}
@Deprecated
public VariantStats getCohortStats(String cohortName) {
return getStats(cohortName);
}
@Deprecated
public void setCohortStats(String cohortName, VariantStats stats) {
setStats(cohortName, stats);
}
@Deprecated
public Map<String, VariantStats> getCohortStats() {
return getStats();
}
@Deprecated
public void setCohortStats(Map<String, VariantStats> cohortStats) {
setStats(cohortStats);
}
@Deprecated
public String getAttribute(String key) {
return getAttributes().get(key);
}
@Deprecated
public void addAttribute(String key, String value) {
getAttributes().put(key, value);
}
public void addAttribute(String fileId, String key, String value) {
getFile(fileId).getAttributes().put(key, value);
}
@Deprecated
public boolean hasAttribute(String key) {
return getAttributes().containsKey(key);
}
private void requireSamplesPosition() {
if (samplesPosition == null) {
throw new IllegalArgumentException("Require sample positions array to use this method!");
}
}
public String getStudyId() {
return impl.getStudyId();
}
public void setStudyId(String value) {
impl.setStudyId(value);
}
public List<FileEntry> getFiles() {
return impl.getFiles();
}
public void setFiles(List<FileEntry> files) {
impl.setFiles(files);
}
public FileEntry getFile(String fileId) {
for (FileEntry fileEntry : impl.getFiles()) {
if (fileEntry.getFileId().equals(fileId)) {
return fileEntry;
}
}
return null;
}
@Deprecated
public String getFileId() {
return !impl.getFiles().isEmpty() ? impl.getFiles().get(0).getFileId() : null;
}
@Deprecated
public void setFileId(String fileId) {
if (impl.getFiles().isEmpty()) {
impl.getFiles().add(new FileEntry(fileId, "", new HashMap<>()));
} else {
impl.getFiles().get(0).setFileId(fileId);
}
}
/**
* @deprecated Use {@link #getSecondaryAlternates()}
*/
@Deprecated
public List<String> getSecondaryAlternatesAlleles() {
return impl.getSecondaryAlternates() == null
? null
: Collections.unmodifiableList(impl.getSecondaryAlternates().stream()
.map(AlternateCoordinate::getAlternate).collect(Collectors.toList()));
}
/**
* @deprecated Use {@link #setSecondaryAlternates(List)}
*/
@Deprecated
public void setSecondaryAlternatesAlleles(List<String> value) {
List<AlternateCoordinate> secondaryAlternatesMap = null;
if (value != null) {
secondaryAlternatesMap = new ArrayList<>(value.size());
for (String secondaryAlternate : value) {
secondaryAlternatesMap.add(new AlternateCoordinate(null, null, null, null, secondaryAlternate, VariantType.SNV));
}
}
impl.setSecondaryAlternates(secondaryAlternatesMap);
}
public List<AlternateCoordinate> getSecondaryAlternates() {
return impl.getSecondaryAlternates();
}
public void setSecondaryAlternates(List<AlternateCoordinate> value) {
impl.setSecondaryAlternates(value);
}
@Deprecated
public Map<String, String> getAttributes() {
return !impl.getFiles().isEmpty() ? impl.getFiles().get(0).getAttributes() : null;
}
public Map<String, String> getAllAttributes() {
Map<String, String> attributes = new HashMap<>();
impl.getFiles().stream().forEach(fileEntry ->
attributes.putAll(fileEntry.getAttributes().entrySet().stream()
.collect(Collectors.toMap(entry -> fileEntry.getFileId() + "_" + entry.getKey(), Map.Entry::getValue))
)
);
return Collections.unmodifiableMap(attributes);
}
@Deprecated
public void setAttributes(Map<String, String> attributes) {
if (impl.getFiles().isEmpty()) {
impl.getFiles().add(new FileEntry("", null, attributes));
} else {
impl.getFiles().get(0).setAttributes(attributes);
}
}
@Override
public String toString() {
return impl.toString();
}
@Override
public int hashCode() {
return impl.hashCode();
}
@Override
public boolean equals(Object obj) {
if (obj instanceof StudyEntry) {
return impl.equals(((StudyEntry) obj).getImpl());
} else {
return false;
}
}
}
|
apache-2.0
|
hello-xiaopeng/weather
|
src/com/weather/app/util/HttpCallbackListener.java
|
135
|
package com.weather.app.util;
public interface HttpCallbackListener {
void onFinish(String response);
void onError(Exception e);
}
|
apache-2.0
|
nextreports/nextreports-server
|
src/ro/nextreports/server/web/action/RenameActionLink.java
|
3047
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ro.nextreports.server.web.action;
import org.apache.wicket.ajax.AjaxRequestTarget;
import org.apache.wicket.spring.injection.annot.SpringBean;
import ro.nextreports.server.domain.Entity;
import ro.nextreports.server.service.StorageService;
import ro.nextreports.server.util.StorageUtil;
import ro.nextreports.server.web.NextServerSession;
import ro.nextreports.server.web.common.event.AjaxUpdateEvent;
import ro.nextreports.server.web.core.EntityBrowserPanel;
import ro.nextreports.server.web.core.RenamePanel;
import ro.nextreports.server.web.core.action.ActionAjaxLink;
import ro.nextreports.server.web.core.action.ActionContext;
import ro.nextreports.server.web.core.event.SelectEntityEvent;
/**
* @author Decebal Suiu
*/
public class RenameActionLink extends ActionAjaxLink {
@SpringBean
private StorageService storageService;
public RenameActionLink(ActionContext actionContext) {
super(actionContext);
}
public void executeAction(AjaxRequestTarget target) {
Entity entity = getActionContext().getEntity();
final EntityBrowserPanel panel = findParent(EntityBrowserPanel.class);
if (getActionContext().isMenuAction()) {
NextServerSession.get().setSearchContext(null);
panel.restoreWorkspace(target);
}
panel.forwardWorkspace(new RenamePanel("work", entity) {
@Override
public void onRename(AjaxRequestTarget target, Entity entity) {
try {
// IMPORTANT : see SchedulerJobRenamedAdvice
storageService.renameEntity(entity.getPath(), entity.getName());
String parentPath = StorageUtil.getParentPath(entity.getPath());
if (panel.isFormForward(target)) {
new AjaxUpdateEvent(this, target).fire();
panel.backwardWorkspace(target);
} else {
new SelectEntityEvent(this, target, storageService.getEntity(parentPath)).fire();
}
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}, target);
}
}
|
apache-2.0
|
trienvu/testzoomsur
|
SurfaceZoom/src/com/example/surfacezoom/PanAndZoomListener1.java
|
9155
|
package com.example.surfacezoom;
import android.graphics.Bitmap;
import android.graphics.Matrix;
import android.graphics.PointF;
import android.graphics.drawable.BitmapDrawable;
import android.graphics.drawable.Drawable;
import android.util.FloatMath;
import android.util.Log;
import android.view.Gravity;
import android.view.MotionEvent;
import android.view.View;
import android.view.View.OnLayoutChangeListener;
import android.view.View.OnTouchListener;
import android.view.ViewGroup;
import android.view.ViewGroup.MarginLayoutParams;
import android.widget.FrameLayout;
import android.widget.ImageView;
/**
* This class defines an OnTouchListener that you can attach to any view so you
* can support panning and zooming.
*
* <p> This code has been adapted from the work described here:
* "Java Pan/Zoom Listener" (http://code.cheesydesign.com/?p=723)
*
*/
public class PanAndZoomListener1 implements OnTouchListener {
public static class Anchor {
public static final int CENTER = 0;
public static final int TOPLEFT = 1;
}
private static final String TAG = "PanAndZoomListener";
// We can be in one of these 3 states
static final int NONE = 0;
static final int DRAG = 1;
static final int ZOOM = 2;
int mode = NONE;
// Remember some things for zooming
PointF start = new PointF();
PointF mid = new PointF();
float oldDist = 1f;
PanZoomCalculator panZoomCalculator;
public PanAndZoomListener1(FrameLayout container, View view, int anchor) {
panZoomCalculator = new PanZoomCalculator(container, view, anchor);
}
public boolean onTouch(View view, MotionEvent event) {
// Handle touch events here...
switch (event.getAction() & MotionEvent.ACTION_MASK) {
case MotionEvent.ACTION_DOWN:
start.set(event.getX(), event.getY());
Log.d(TAG, "mode=DRAG");
mode = DRAG;
break;
case MotionEvent.ACTION_POINTER_DOWN:
oldDist = spacing(event);
Log.d(TAG, "oldDist=" + oldDist);
if (oldDist > 10f) {
midPoint(mid, event);
mode = ZOOM;
Log.d(TAG, "mode=ZOOM");
}
break;
case MotionEvent.ACTION_UP:
case MotionEvent.ACTION_POINTER_UP:
mode = NONE;
Log.d(TAG, "mode=NONE");
break;
case MotionEvent.ACTION_MOVE:
if (mode == DRAG) {
panZoomCalculator.doPan(event.getX() - start.x, event.getY() - start.y);
start.set(event.getX(), event.getY());
} else if (mode == ZOOM) {
float newDist = spacing(event);
Log.d(TAG, "newDist=" + newDist);
if (newDist > 10f) {
float scale = newDist / oldDist;
oldDist = newDist;
panZoomCalculator.doZoom(scale, mid);
}
}
break;
}
return true; // indicate event was handled
}
// Determine the space between the first two fingers
private float spacing(MotionEvent event) {
float x = event.getX(0) - event.getX(1);
float y = event.getY(0) - event.getY(1);
return FloatMath.sqrt(x * x + y * y);
}
// Calculate the mid point of the first two fingers
private void midPoint(PointF point, MotionEvent event) {
// ...
float x = event.getX(0) + event.getX(1);
float y = event.getY(0) + event.getY(1);
point.set(x / 2, y / 2);
}
public class PanZoomCalculator {
/// The current pan position
PointF currentPan;
/// The current zoom position
float currentZoom;
/// The windows dimensions that we are zooming/panning in
View window;
View child;
Matrix matrix;
// Pan jitter is a workaround to get the video view to update its layout properly when zoom is changed
int panJitter = 0;
int anchor;
PanZoomCalculator(View container, View child, int anchor) {
// Initialize class fields
currentPan = new PointF(0, 0);
currentZoom = 1f;;
this.window = container;
this.child = child;
matrix = new Matrix();
this.anchor = anchor;
// onPanZoomChanged();
/* IS THIS COMPATIBLE WITH 2.3.3?
this.child.addOnLayoutChangeListener(new OnLayoutChangeListener() {
// This catches when the image bitmap changes, for some reason it doesn't recurse
public void onLayoutChange(View v, int left, int top, int right, int bottom, int oldLeft, int oldTop, int oldRight, int oldBottom) {
onPanZoomChanged();
}
});
*/
}
public void doZoom(float scale, PointF zoomCenter) {
float oldZoom = currentZoom;
// multiply in the zoom change
currentZoom *= scale;
// this limits the zoom
currentZoom = Math.max(getMinimumZoom(), currentZoom);
currentZoom = Math.min(8f, currentZoom);
// Adjust the pan accordingly
// Need to make it such that the point under the zoomCenter remains under the zoom center after the zoom
// calculate in fractions of the image so:
float width = window.getWidth();
float height = window.getHeight();
float oldScaledWidth = width * oldZoom;
float oldScaledHeight = height * oldZoom;
float newScaledWidth = width * currentZoom;
float newScaledHeight = height * currentZoom;
if (anchor == Anchor.CENTER) {
float reqXPos = ((oldScaledWidth - width) * 0.5f + zoomCenter.x - currentPan.x) / oldScaledWidth;
float reqYPos = ((oldScaledHeight - height) * 0.5f + zoomCenter.y - currentPan.y) / oldScaledHeight;
float actualXPos = ((newScaledWidth - width) * 0.5f + zoomCenter.x - currentPan.x) / newScaledWidth;
float actualYPos = ((newScaledHeight - height) * 0.5f + zoomCenter.y - currentPan.y) / newScaledHeight;
currentPan.x += (actualXPos - reqXPos) * newScaledWidth;
currentPan.y += (actualYPos - reqYPos) * newScaledHeight;
} else {
// assuming top left
float reqXPos = (zoomCenter.x - currentPan.x) / oldScaledWidth;
float reqYPos = (zoomCenter.y - currentPan.y) / oldScaledHeight;
float actualXPos = (zoomCenter.x - currentPan.x) / newScaledWidth;
float actualYPos = (zoomCenter.y - currentPan.y) / newScaledHeight;
currentPan.x += (actualXPos - reqXPos) * newScaledWidth;
currentPan.y += (actualYPos - reqYPos) * newScaledHeight;
}
onPanZoomChanged();
}
public void doPan(float panX, float panY) {
currentPan.x += panX;
currentPan.y += panY;
onPanZoomChanged();
}
private float getMinimumZoom() {
return 1f;
}
/// Call this to reset the Pan/Zoom state machine
public void reset() {
// Reset zoom and pan
currentZoom = getMinimumZoom();
currentPan = new PointF(0f, 0f);
onPanZoomChanged();
}
public void onPanZoomChanged() {
// Things to try: use a scroll view and set the pan from the scrollview
// when panning, and set the pan of the scroll view when zooming
float winWidth = window.getWidth();
float winHeight = window.getHeight();
if (currentZoom <= 1f) {
currentPan.x = 0;
currentPan.y = 0;
} else if (anchor == Anchor.CENTER) {
float maxPanX = (currentZoom - 1f) * window.getWidth() * 0.5f;
float maxPanY = (currentZoom - 1f) * window.getHeight() * 0.5f;
currentPan.x = Math.max(-maxPanX, Math.min(maxPanX, currentPan.x));
currentPan.y = Math.max(-maxPanY, Math.min(maxPanY, currentPan.y));
} else {
// assume top left
float maxPanX = (currentZoom - 1f) * window.getWidth();
float maxPanY = (currentZoom - 1f) * window.getHeight();
currentPan.x = Math.max(-maxPanX, Math.min(0, currentPan.x));
currentPan.y = Math.max(-maxPanY, Math.min(0, currentPan.y));
}
if (child instanceof ImageView && ((ImageView) child).getScaleType()== ImageView.ScaleType.MATRIX) {
ImageView view = (ImageView) child;
Drawable drawable = view.getDrawable();
if (drawable != null) {
Bitmap bm = ((BitmapDrawable) drawable).getBitmap();
if (bm != null) {
// Limit Pan
float bmWidth = bm.getWidth();
float bmHeight = bm.getHeight();
float fitToWindow = Math.min(winWidth / bmWidth, winHeight / bmHeight);
float xOffset = (winWidth - bmWidth * fitToWindow) * 0.5f * currentZoom;
float yOffset = (winHeight - bmHeight * fitToWindow) * 0.5f * currentZoom;
matrix.reset();
matrix.postScale(currentZoom * fitToWindow, currentZoom * fitToWindow);
matrix.postTranslate(currentPan.x + xOffset, currentPan.y + yOffset);
((ImageView) child).setImageMatrix(matrix);
}
}
} else {
MarginLayoutParams lp = (MarginLayoutParams) child.getLayoutParams();
lp.leftMargin = (int) currentPan.x + panJitter;
lp.topMargin = (int) currentPan.y;
lp.width = (int) (window.getWidth() * currentZoom);
lp.height = (int) (window.getHeight() * currentZoom);
panJitter ^= 1;
child.setLayoutParams(lp);
}
}
}
}
|
apache-2.0
|
scanf/cilium
|
cilium/cmd/debuginfo.go
|
13217
|
// Copyright 2017 Authors of Cilium
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package cmd
import (
"bytes"
"encoding/json"
"fmt"
"io"
"io/ioutil"
"os"
"os/exec"
"path/filepath"
"regexp"
"strconv"
"strings"
"text/tabwriter"
"time"
"github.com/cilium/cilium/api/v1/models"
pkg "github.com/cilium/cilium/pkg/client"
"github.com/cilium/cilium/pkg/command"
"github.com/russross/blackfriday"
"github.com/spf13/cobra"
)
// outputTypes enum definition
type outputType int
// outputTypes enum values
const (
STDOUT outputType = 0 + iota
MARKDOWN
HTML
JSONOUTPUT
JSONPATH
)
var (
// Can't call it jsonOutput because another var in this package uses that.
jsonOutputDebuginfo = "json"
markdownOutput = "markdown"
htmlOutput = "html"
jsonpathOutput = "jsonpath"
jsonPathRegExp = regexp.MustCompile(`^jsonpath\=(.*)`)
)
// outputTypes enum strings
var outputTypes = [...]string{
"STDOUT",
markdownOutput,
htmlOutput,
jsonOutputDebuginfo,
jsonpathOutput,
}
var debuginfoCmd = &cobra.Command{
Use: "debuginfo",
Short: "Request available debugging information from agent",
Run: runDebugInfo,
}
var (
outputToFile bool
html string
filePerCommand bool
outputOpts []string
outputDir string
)
type addSection func(*tabwriter.Writer, *models.DebugInfo)
var sections = map[string]addSection{
"cilium-version": addCiliumVersion,
"kernel-version": addKernelVersion,
"cilium-status": addCiliumStatus,
"cilium-environment-keys": addCiliumEnvironmentKeys,
"cilium-endpoint-list": addCiliumEndpointList,
"cilium-service-list": addCiliumServiceList,
"cilium-policy": addCiliumPolicy,
"cilium-memory-map": addCiliumMemoryMap,
"cilium-subsystems": addSubsystems,
}
func init() {
rootCmd.AddCommand(debuginfoCmd)
debuginfoCmd.Flags().BoolVarP(&outputToFile, "file", "f", false, "Redirect output to file(s)")
debuginfoCmd.Flags().BoolVarP(&filePerCommand, "file-per-command", "", false, "Generate a single file per command")
debuginfoCmd.Flags().StringSliceVar(&outputOpts, "output", []string{}, "markdown| html| json| jsonpath='{}'")
debuginfoCmd.Flags().StringVar(&outputDir, "output-directory", "", "directory for files (if specified will use directory in which this command was ran)")
}
func validateInput() []outputType {
if outputDir != "" && !outputToFile {
fmt.Fprintf(os.Stderr, "invalid option combination; specified output-directory %q, but did not specify for output to be redirected to file; exiting\n", outputDir)
os.Exit(1)
}
return validateOutputOpts()
}
func validateOutputOpts() []outputType {
var outputTypes []outputType
for _, outputOpt := range outputOpts {
switch strings.ToLower(outputOpt) {
case markdownOutput:
outputTypes = append(outputTypes, MARKDOWN)
case htmlOutput:
if !outputToFile {
fmt.Fprintf(os.Stderr, "if HTML is specified as the output format, it is required that you provide the `--file` argument as well\n")
os.Exit(1)
}
outputTypes = append(outputTypes, HTML)
case jsonOutputDebuginfo:
if filePerCommand {
fmt.Fprintf(os.Stderr, "%s does not support dumping a file per command; exiting\n", outputOpt)
os.Exit(1)
}
outputTypes = append(outputTypes, JSONOUTPUT)
// Empty JSONPath filter case.
case jsonpathOutput:
if filePerCommand {
fmt.Fprintf(os.Stderr, "%s does not support dumping a file per command; exiting\n", outputOpt)
os.Exit(1)
}
outputTypes = append(outputTypes, JSONPATH)
default:
// Check to see if arg contains jsonpath filtering as well.
if jsonPathRegExp.MatchString(outputOpt) {
outputTypes = append(outputTypes, JSONPATH)
continue
}
fmt.Fprintf(os.Stderr, "%s is not a valid output format; exiting\n", outputOpt)
os.Exit(1)
}
}
return outputTypes
}
func formatFileName(outputDir string, cmdTime time.Time, outtype outputType) string {
var fileName string
var sep string
if outputDir != "" {
sep = outputDir + "/"
}
timeStr := cmdTime.Format("20060102-150405.999-0700-MST")
switch outtype {
case MARKDOWN:
fileName = fmt.Sprintf("%scilium-debuginfo-%s.md", sep, timeStr)
case HTML:
fileName = fmt.Sprintf("%scilium-debuginfo-%s.html", sep, timeStr)
case JSONOUTPUT:
fileName = fmt.Sprintf("%scilium-debuginfo-%s.json", sep, timeStr)
case JSONPATH:
fileName = fmt.Sprintf("%scilium-debuginfo-%s.jsonpath", sep, timeStr)
default:
fileName = fmt.Sprintf("%scilium-debuginfo-%s.md", sep, timeStr)
}
return fileName
}
func rootWarningMessage() {
fmt.Fprint(os.Stderr, "Warning, some of the BPF commands might fail when not run as root\n")
}
func runDebugInfo(cmd *cobra.Command, args []string) {
outputTypes := validateInput()
resp, err := client.Daemon.GetDebuginfo(nil)
if err != nil {
fmt.Fprintf(os.Stderr, "%s\n", pkg.Hint(err))
os.Exit(1)
}
// create tab-writer to fill buffer
var buf bytes.Buffer
w := tabwriter.NewWriter(&buf, 5, 0, 3, ' ', 0)
p := resp.Payload
cmdTime := time.Now()
if outputToFile && len(outputTypes) == 0 {
outputTypes = append(outputTypes, MARKDOWN)
}
// Dump payload for each output format.
for i, output := range outputTypes {
var fileName string
// Only warn when not dumping output as JSON so that when the output of the
// command is specified to be JSON, the only outputted content is the JSON
// model of debuginfo.
if os.Getuid() != 0 && output != JSONOUTPUT && output != JSONPATH {
rootWarningMessage()
}
if outputToFile {
fileName = formatFileName(outputDir, cmdTime, output)
}
// Generate multiple files for each subsection of the command if
// specified, except in the JSON cases, because in the JSON cases,
// we want to dump the entire DebugInfo JSON object, not sections of it.
if filePerCommand && (output != JSONOUTPUT && output != JSONPATH) {
for cmdName, section := range sections {
addHeader(w)
section(w, p)
writeToOutput(buf, output, fileName, cmdName)
buf.Reset()
}
continue
}
// Generate a single file, except not for JSON; no formatting is
// needed.
if output == JSONOUTPUT || output == JSONPATH {
marshaledDebugInfo, _ := p.MarshalBinary()
buf.Write(marshaledDebugInfo)
if output == JSONOUTPUT {
writeToOutput(buf, output, fileName, "")
} else {
writeJSONPathToOutput(buf, fileName, "", outputOpts[i])
}
buf.Reset()
} else {
addHeader(w)
for _, section := range sections {
section(w, p)
}
writeToOutput(buf, output, fileName, "")
buf.Reset()
}
}
if len(outputTypes) > 0 {
return
}
if os.Getuid() != 0 {
rootWarningMessage()
}
// Just write to stdout in markdown formats if no output option specified.
addHeader(w)
for _, section := range sections {
section(w, p)
}
writeToOutput(buf, STDOUT, "", "")
}
func addHeader(w *tabwriter.Writer) {
fmt.Fprintf(w, "# Cilium debug information\n")
}
func addCiliumVersion(w *tabwriter.Writer, p *models.DebugInfo) {
printMD(w, "Cilium version", p.CiliumVersion)
}
func addKernelVersion(w *tabwriter.Writer, p *models.DebugInfo) {
printMD(w, "Kernel version", p.KernelVersion)
}
func addCiliumStatus(w *tabwriter.Writer, p *models.DebugInfo) {
printMD(w, "Cilium status", "")
printTicks(w)
pkg.FormatStatusResponse(w, p.CiliumStatus, true, true, true, true)
printTicks(w)
}
func addCiliumEnvironmentKeys(w *tabwriter.Writer, p *models.DebugInfo) {
printMD(w, "Cilium environment keys", strings.Join(p.EnvironmentVariables, "\n"))
}
func addCiliumEndpointList(w *tabwriter.Writer, p *models.DebugInfo) {
printMD(w, "Endpoint list", "")
printTicks(w)
printEndpointList(w, p.EndpointList)
printTicks(w)
for _, ep := range p.EndpointList {
epID := strconv.FormatInt(ep.ID, 10)
printList(w, "BPF Policy Get "+epID, "bpf", "policy", "get", epID, "-n")
printList(w, "BPF CT List "+epID, "bpf", "ct", "list", epID)
printList(w, "Endpoint Get "+epID, "endpoint", "get", epID)
printList(w, "Endpoint Health "+epID, "endpoint", "health", epID)
printList(w, "Endpoint Log "+epID, "endpoint", "log", epID)
if ep.Status != nil && ep.Status.Identity != nil {
id := strconv.FormatInt(ep.Status.Identity.ID, 10)
printList(w, "Identity get "+id, "identity", "get", id)
}
}
}
func addCiliumServiceList(w *tabwriter.Writer, p *models.DebugInfo) {
printMD(w, "Service list", "")
printTicks(w)
printServiceList(w, p.ServiceList)
printTicks(w)
}
func addCiliumPolicy(w *tabwriter.Writer, p *models.DebugInfo) {
printMD(w, "Policy get", fmt.Sprintf(":\n %s\nRevision: %d\n", p.Policy.Policy, p.Policy.Revision))
}
func addSubsystems(w *tabwriter.Writer, p *models.DebugInfo) {
for name, status := range p.Subsystem {
printMD(w, name, status)
}
}
func addCiliumMemoryMap(w *tabwriter.Writer, p *models.DebugInfo) {
printMD(w, "Cilium memory map\n", p.CiliumMemoryMap)
if nm := p.CiliumNodemonitorMemoryMap; len(nm) > 0 {
printMD(w, "Cilium nodemonitor memory map", p.CiliumNodemonitorMemoryMap)
}
}
func writeJSONPathToOutput(buf bytes.Buffer, path string, suffix string, jsonPath string) {
data := buf.Bytes()
db := &models.DebugInfo{}
err := db.UnmarshalBinary(data)
if err != nil {
fmt.Fprintf(os.Stderr, "error unmarshaling binary: %s\n", err)
}
jsonBytes, err := command.DumpJSONToSlice(db, jsonPath)
if err != nil {
fmt.Fprintf(os.Stderr, "error printing JSON: %s\n", err)
}
if path == "" {
fmt.Println(string(jsonBytes[:]))
return
}
fileName := fileName(path, suffix)
writeFile(jsonBytes, fileName)
fmt.Printf("%s output at %s\n", jsonpathOutput, fileName)
return
}
func writeToOutput(buf bytes.Buffer, output outputType, path string, suffix string) {
data := buf.Bytes()
if path == "" {
switch output {
case JSONOUTPUT:
db := &models.DebugInfo{}
err := db.UnmarshalBinary(data)
if err != nil {
fmt.Fprintf(os.Stderr, "error unmarshaling binary: %s\n", err)
}
err = command.PrintOutputWithType(db, "json")
if err != nil {
fmt.Fprintf(os.Stderr, "error printing JSON: %s\n", err)
}
default:
fmt.Println(string(data))
}
return
}
if output == STDOUT {
// Write to standard output
fmt.Println(string(data))
return
}
fileName := fileName(path, suffix)
switch output {
case MARKDOWN:
// Markdown file
writeMarkdown(data, fileName)
case HTML:
// HTML file
writeHTML(data, fileName)
case JSONOUTPUT:
writeJSON(data, fileName)
case JSONPATH:
writeJSON(data, fileName)
}
fmt.Printf("%s output at %s\n", outputTypes[output], fileName)
}
func fileName(path, suffix string) string {
if len(suffix) == 0 {
// no suffix, return path
return path
}
ext := filepath.Ext(path)
if ext != "" {
// insert suffix and move extension to back
return fmt.Sprintf("%s-%s%s", strings.TrimSuffix(path, ext), suffix, ext)
}
// no extension, just append suffix
return fmt.Sprintf("%s-%s", path, suffix)
}
func printList(w io.Writer, header string, args ...string) {
output, _ := exec.Command("cilium", args...).CombinedOutput()
printMD(w, header, string(output))
}
func printMD(w io.Writer, header string, body string) {
if len(body) > 0 {
fmt.Fprintf(w, "\n#### %s\n\n```\n%s\n```\n\n", header, body)
} else {
fmt.Fprintf(w, "\n#### %s\n\n", header)
}
}
func printTicks(w io.Writer) {
fmt.Fprint(w, "```\n")
}
func writeHTML(data []byte, path string) {
output := blackfriday.MarkdownCommon(data)
if err := ioutil.WriteFile(path, output, 0644); err != nil {
fmt.Fprintf(os.Stderr, "Error while writing HTML file %s", err)
return
}
}
func writeMarkdown(data []byte, path string) {
f, err := os.OpenFile(path, os.O_CREATE|os.O_WRONLY|os.O_TRUNC, 0600)
if err != nil {
fmt.Fprintf(os.Stderr, "Could not create file %s", path)
}
w := tabwriter.NewWriter(f, 5, 0, 3, ' ', 0)
w.Write(data)
}
func writeFile(data []byte, path string) {
f, err := os.OpenFile(path, os.O_CREATE|os.O_WRONLY|os.O_TRUNC, 0600)
if err != nil {
fmt.Fprintf(os.Stderr, "Could not create file %s", path)
os.Exit(1)
}
f.Write(data)
}
func writeJSON(data []byte, path string) {
f, err := os.OpenFile(path, os.O_CREATE|os.O_WRONLY|os.O_TRUNC, 0600)
if err != nil {
fmt.Fprintf(os.Stderr, "Could not create file %s", path)
os.Exit(1)
}
db := &models.DebugInfo{}
// Unmarshal the binary so we can indent the JSON appropriately when we
// display it to end-users.
err = db.UnmarshalBinary(data)
if err != nil {
fmt.Fprintf(os.Stderr, "error unmarshaling binary: %s\n", err)
os.Exit(1)
}
result, err := json.MarshalIndent(db, "", " ")
if err != nil {
fmt.Fprintf(os.Stderr, "error marshal-indenting data: %s\n", err)
os.Exit(1)
}
f.Write(result)
}
|
apache-2.0
|
igorcoding/asynctnt
|
docs/conf.py
|
3253
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# asynctnt documentation build configuration file, created by
# sphinx-quickstart on Sun Mar 12 18:57:44 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
import os
import sys
import sphinx_rtd_theme
sys.path.insert(0, os.path.abspath('..'))
def find_version():
import re
for line in open("../asynctnt/__init__.py"):
if line.startswith("__version__"):
return re.match(
r"""__version__\s*=\s*(['"])([^'"]+)\1""", line).group(2)
_ver = find_version()
# -- General configuration ------------------------------------------------
extensions = ['sphinx.ext.autodoc',
'sphinx.ext.viewcode',
'sphinx.ext.githubpages',
'sphinxcontrib.asyncio']
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = 'asynctnt'
copyright = '2017, igorcoding'
author = 'igorcoding'
version = _ver
release = _ver
language = None
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
pygments_style = 'sphinx'
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
html_static_path = ['_static']
# -- Options for HTMLHelp output ------------------------------------------
htmlhelp_basename = 'asynctntdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'asynctnt.tex', 'asynctnt Documentation',
'igorcoding', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'asynctnt', 'asynctnt Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'asynctnt', 'asynctnt Documentation',
author, 'asynctnt', 'One line description of project.',
'Miscellaneous'),
]
|
apache-2.0
|
momchil-atanasov/gostub
|
acceptance/acceptance_stubs/primitive_params_stub.go
|
1137
|
// Generated by 'github.com/mokiat/gostub'
package acceptance_stubs
import (
sync "sync"
alias1 "github.com/mokiat/gostub/acceptance"
)
type PrimitiveParamsStub struct {
StubGUID int
SaveStub func(arg1 int, arg2 string, arg3 float32)
saveMutex sync.RWMutex
saveArgsForCall []struct {
arg1 int
arg2 string
arg3 float32
}
}
var _ alias1.PrimitiveParams = new(PrimitiveParamsStub)
func (stub *PrimitiveParamsStub) Save(arg1 int, arg2 string, arg3 float32) {
stub.saveMutex.Lock()
defer stub.saveMutex.Unlock()
stub.saveArgsForCall = append(stub.saveArgsForCall, struct {
arg1 int
arg2 string
arg3 float32
}{arg1, arg2, arg3})
if stub.SaveStub != nil {
stub.SaveStub(arg1, arg2, arg3)
}
}
func (stub *PrimitiveParamsStub) SaveCallCount() int {
stub.saveMutex.RLock()
defer stub.saveMutex.RUnlock()
return len(stub.saveArgsForCall)
}
func (stub *PrimitiveParamsStub) SaveArgsForCall(index int) (int, string, float32) {
stub.saveMutex.RLock()
defer stub.saveMutex.RUnlock()
return stub.saveArgsForCall[index].arg1, stub.saveArgsForCall[index].arg2, stub.saveArgsForCall[index].arg3
}
|
apache-2.0
|
NationalSecurityAgency/ghidra
|
Ghidra/Framework/SoftwareModeling/src/main/java/ghidra/program/database/reloc/RelocationDBAdapterV3.java
|
3106
|
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.program.database.reloc;
import ghidra.program.database.map.AddressKeyRecordIterator;
import ghidra.program.database.map.AddressMap;
import ghidra.program.model.address.Address;
import ghidra.program.model.address.AddressSetView;
import ghidra.util.exception.VersionException;
import java.io.IOException;
import java.lang.UnsupportedOperationException;
import db.*;
class RelocationDBAdapterV3 extends RelocationDBAdapter {
final static int VERSION = 3;
private Table relocTable;
private AddressMap addrMap;
RelocationDBAdapterV3(DBHandle handle, AddressMap addrMap, boolean create) throws IOException,
VersionException {
this.addrMap = addrMap;
if (create) {
relocTable = handle.createTable(TABLE_NAME, SCHEMA);
}
else {
relocTable = handle.getTable(TABLE_NAME);
if (relocTable == null) {
throw new VersionException("Missing Table: " + TABLE_NAME);
}
else if (relocTable.getSchema().getVersion() != VERSION) {
int version = relocTable.getSchema().getVersion();
if (version < VERSION) {
throw new VersionException(true);
}
throw new VersionException(VersionException.NEWER_VERSION, false);
}
}
}
@Override
void add(long addrKey, int type, long[] values, byte[] bytes, String symbolName)
throws IOException {
throw new UnsupportedOperationException();
}
@Override
DBRecord get(long addrKey) throws IOException {
return relocTable.getRecord(addrKey);
}
@Override
void remove(long addrKey) throws IOException {
throw new UnsupportedOperationException();
}
@Override
int getRecordCount() {
return relocTable.getRecordCount();
}
@Override
int getVersion() {
return VERSION;
}
@Override
RecordIterator iterator() throws IOException {
return new AddressKeyRecordIterator(relocTable, addrMap);
}
@Override
RecordIterator iterator(AddressSetView set) throws IOException {
return new AddressKeyRecordIterator(relocTable, addrMap, set, set.getMinAddress(), true);
}
@Override
RecordIterator iterator(Address start) throws IOException {
return new AddressKeyRecordIterator(relocTable, addrMap, start, true);
}
@Override
DBRecord adaptRecord(DBRecord rec) {
DBRecord newRec = SCHEMA.createRecord(rec.getKey());
newRec.setIntValue(TYPE_COL, rec.getIntValue(TYPE_COL));
long[] values = new long[] { rec.getLongValue(VALU_COL) };
newRec.setField(VALU_COL, new BinaryCodedField(values));
newRec.setBinaryData(BYTES_COL, null);
newRec.setString(SYMBOL_NAME_COL, null);
return newRec;
}
}
|
apache-2.0
|
vam-google/google-cloud-java
|
google-cloud-clients/google-cloud-compute/src/main/java/com/google/cloud/compute/v1/stub/HttpJsonFirewallCallableFactory.java
|
3551
|
/*
* Copyright 2019 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.compute.v1.stub;
import com.google.api.core.BetaApi;
import com.google.api.gax.core.BackgroundResource;
import com.google.api.gax.httpjson.ApiMessage;
import com.google.api.gax.httpjson.HttpJsonCallSettings;
import com.google.api.gax.httpjson.HttpJsonCallableFactory;
import com.google.api.gax.httpjson.HttpJsonStubCallableFactory;
import com.google.api.gax.rpc.BatchingCallSettings;
import com.google.api.gax.rpc.ClientContext;
import com.google.api.gax.rpc.OperationCallSettings;
import com.google.api.gax.rpc.OperationCallable;
import com.google.api.gax.rpc.PagedCallSettings;
import com.google.api.gax.rpc.UnaryCallSettings;
import com.google.api.gax.rpc.UnaryCallable;
import javax.annotation.Generated;
import javax.annotation.Nullable;
// AUTO-GENERATED DOCUMENTATION AND CLASS
/**
* HTTP callable factory implementation for compute.
*
* <p>This class is for advanced usage.
*/
@Generated("by gapic-generator")
@BetaApi("The surface for use by generated code is not stable yet and may change in the future.")
public class HttpJsonFirewallCallableFactory
implements HttpJsonStubCallableFactory<ApiMessage, BackgroundResource> {
@Override
public <RequestT, ResponseT> UnaryCallable<RequestT, ResponseT> createUnaryCallable(
HttpJsonCallSettings<RequestT, ResponseT> httpJsonCallSettings,
UnaryCallSettings<RequestT, ResponseT> callSettings,
ClientContext clientContext) {
return HttpJsonCallableFactory.createUnaryCallable(
httpJsonCallSettings, callSettings, clientContext);
}
@BetaApi(
"The surface for long-running operations is not stable yet and may change in the future.")
@Override
@Nullable
public <RequestT, ResponseT, MetadataT>
OperationCallable<RequestT, ResponseT, MetadataT> createOperationCallable(
HttpJsonCallSettings<RequestT, ApiMessage> httpJsonCallSettings,
OperationCallSettings<RequestT, ResponseT, MetadataT> operationCallSettings,
ClientContext clientContext,
BackgroundResource operationsStub) {
return null;
}
@Override
public <RequestT, ResponseT, PagedListResponseT>
UnaryCallable<RequestT, PagedListResponseT> createPagedCallable(
HttpJsonCallSettings<RequestT, ResponseT> httpJsonCallSettings,
PagedCallSettings<RequestT, ResponseT, PagedListResponseT> pagedCallSettings,
ClientContext clientContext) {
return HttpJsonCallableFactory.createPagedCallable(
httpJsonCallSettings, pagedCallSettings, clientContext);
}
@Override
public <RequestT, ResponseT> UnaryCallable<RequestT, ResponseT> createBatchingCallable(
HttpJsonCallSettings<RequestT, ResponseT> httpJsonCallSettings,
BatchingCallSettings<RequestT, ResponseT> batchingCallSettings,
ClientContext clientContext) {
return HttpJsonCallableFactory.createBatchingCallable(
httpJsonCallSettings, batchingCallSettings, clientContext);
}
}
|
apache-2.0
|
an83/android-play-location
|
Geofencing/app/src/main/java/com/google/android/gms/location/sample/geofencing/GeofenceTransitionsIntentService.java
|
5748
|
/**
* Copyright 2014 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.gms.location.sample.geofencing;
import android.app.IntentService;
import android.content.Context;
import android.content.Intent;
import android.support.v4.content.LocalBroadcastManager;
import android.text.TextUtils;
import android.util.Log;
import com.google.android.gms.location.Geofence;
import com.google.android.gms.location.GeofencingEvent;
import java.util.ArrayList;
import java.util.List;
/**
* Listener for geofence transition changes.
*
* Receives geofence transition events from Location Services in the form of an Intent containing
* the transition type and geofence id(s) that triggered the transition. Creates a notification
* as the output.
*/
public class GeofenceTransitionsIntentService extends IntentService {
protected static final String TAG = "geofence-service";
protected static final String REQUEST_RECEIVER_EXTRA = "geofence-service-receiver-extra";
public static final String GEOFENCE_TRANSITION = "geofence-service-transition";
private static final String GEOFENCE_TRANSITION_DETAILS = "geofence-service-transition-details";
/**
* This constructor is required, and calls the super IntentService(String)
* constructor with the name for a worker thread.
*/
public GeofenceTransitionsIntentService() {
// Use the TAG to name the worker thread.
super(TAG);
}
@Override
public void onCreate() {
super.onCreate();
}
/**
* Handles incoming intents.
* @param intent sent by Location Services. This Intent is provided to Location
* Services (inside a PendingIntent) when addGeofences() is called.
*/
@Override
protected void onHandleIntent(Intent intent) {
GeofencingEvent geofencingEvent = GeofencingEvent.fromIntent(intent);
if (geofencingEvent.hasError()) {
String errorMessage = GeofenceErrorMessages.getErrorString(this,
geofencingEvent.getErrorCode());
Log.e(TAG, errorMessage);
return;
}
// Get the transition type.
int geofenceTransition = geofencingEvent.getGeofenceTransition();
// Test that the reported transition was of interest.
if (geofenceTransition == Geofence.GEOFENCE_TRANSITION_ENTER ||
geofenceTransition == Geofence.GEOFENCE_TRANSITION_EXIT) {
// Get the geofences that were triggered. A single event can trigger multiple geofences.
List<Geofence> triggeringGeofences = geofencingEvent.getTriggeringGeofences();
// Get the transition details as a String.
String geofenceTransitionDetails = getGeofenceTransitionDetails(
this,
geofenceTransition,
triggeringGeofences
);
// Send notification and log the transition details.
Notification.sendNotification(this, geofenceTransitionDetails);
Log.i(TAG, geofenceTransitionDetails);
// LocalBroadcastManager.getInstance(this).sendBroadcast(new Intent(GEOFENCE_TRANSITION)
// .putExtra(GEOFENCE_TRANSITION_DETAILS, geofenceTransitionDetails));
} else {
// Log the error.
Log.e(TAG, getString(R.string.geofence_transition_invalid_type, geofenceTransition));
}
}
/**
* Gets transition details and returns them as a formatted string.
*
* @param context The app context.
* @param geofenceTransition The ID of the geofence transition.
* @param triggeringGeofences The geofence(s) triggered.
* @return The transition details formatted as String.
*/
private String getGeofenceTransitionDetails(
Context context,
int geofenceTransition,
List<Geofence> triggeringGeofences) {
String geofenceTransitionString = getTransitionString(geofenceTransition);
// Get the Ids of each geofence that was triggered.
ArrayList triggeringGeofencesIdsList = new ArrayList();
for (Geofence geofence : triggeringGeofences) {
triggeringGeofencesIdsList.add(geofence.getRequestId());
}
String triggeringGeofencesIdsString = TextUtils.join(", ", triggeringGeofencesIdsList);
return geofenceTransitionString + ": " + triggeringGeofencesIdsString;
}
/**
* Maps geofence transition types to their human-readable equivalents.
*
* @param transitionType A transition type constant defined in Geofence
* @return A String indicating the type of transition
*/
private String getTransitionString(int transitionType) {
switch (transitionType) {
case Geofence.GEOFENCE_TRANSITION_ENTER:
return getString(R.string.geofence_transition_entered);
case Geofence.GEOFENCE_TRANSITION_EXIT:
return getString(R.string.geofence_transition_exited);
default:
return getString(R.string.unknown_geofence_transition);
}
}
}
|
apache-2.0
|
wildfly-security-incubator/keycloak
|
server-spi/src/main/java/org/keycloak/models/cache/CachedUserModel.java
|
1284
|
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.models.cache;
import org.keycloak.models.UserModel;
import java.util.concurrent.ConcurrentHashMap;
/**
* @author <a href="mailto:bill@burkecentral.com">Bill Burke</a>
* @version $Revision: 1 $
*/
public interface CachedUserModel extends UserModel {
void invalidate();
/**
* When was the user loaded from database.
*
* @return
*/
long getCacheTimestamp();
/**
* Returns a map that contains custom things that are cached along with the user. You can write to this map.
*
* @return
*/
ConcurrentHashMap getCachedWith();
}
|
apache-2.0
|
michaelliao/warpdb
|
src/test/java/com/itranswarp/warpdb/WarpDbBatchUpdateTest.java
|
1556
|
package com.itranswarp.warpdb;
import static org.junit.Assert.*;
import java.util.Arrays;
import java.util.List;
import org.junit.Test;
import com.itranswarp.warpdb.test.User;
public class WarpDbBatchUpdateTest extends WarpDbTestBase {
@Test
public void testBatchUpdate() throws Exception {
// insert batch with id=0~26:
User[] users = new User[27];
for (int i = 0; i < users.length; i++) {
User user = new User();
user.name = "Name-" + i;
user.email = "name" + i + "@somewhere.org";
users[i] = user;
}
warpdb.insert(Arrays.asList(users));
long ts = System.currentTimeMillis();
for (int i = 0; i < users.length; i++) {
User user = users[i];
assertEquals(String.format("%04d", i + 1), user.id);
assertEquals(user.createdAt, user.updatedAt);
assertEquals(ts, user.createdAt, 500.0);
}
Thread.sleep(600);
// update:
for (int i = 0; i < users.length; i++) {
User user = users[i];
user.name = "Updated-" + i;
user.email = "updated" + i + "@new.org";
}
warpdb.update(Arrays.asList(users));
ts = System.currentTimeMillis();
// check:
List<User> us = warpdb.from(User.class).orderBy("id").list();
for (int i = 0; i < us.size(); i++) {
User user = us.get(i);
assertEquals(String.format("%04d", i + 1), user.id);
assertEquals("Updated-" + i, user.name);
assertEquals("name" + i + "@somewhere.org", user.email); // not updated
assertNotEquals(user.createdAt, user.updatedAt);
assertEquals(ts, user.updatedAt, 500.0);
assertNotEquals(ts, user.createdAt, 500.0);
}
}
}
|
apache-2.0
|
PvMeira/LunchTime
|
src/main/java/com/br/pvemira/app/service/RestaurantServiceLocal.java
|
823
|
package com.br.pvemira.app.service;
import com.br.pvemira.app.model.DTO.RestaurantDTO;
import com.br.pvemira.app.model.Restaurant;
import com.br.pvemira.app.model.StrawPoll;
import com.br.pvemira.app.model.Vote;
import java.time.LocalDate;
import java.util.List;
/**
* Created by pvmeira on 17/06/17.
*/
public interface RestaurantServiceLocal {
void saveRestaurant(RestaurantDTO restaurantDTO);
Restaurant findByid(Long id);
void addStrawPollDateToRestaurant(Restaurant restaurant, LocalDate date);
List<RestaurantDTO> findAllRestaurantsAvaliable(StrawPoll strawPol, List<Vote> votesbyStrawPollId);
void addVoteToRestaurant(Restaurant restaurant, Vote vote);
void delete(Long id);
List<RestaurantDTO> listAll();
List<Restaurant> getRestaurantsFromVoteList(List<Vote> votes);
}
|
apache-2.0
|
Hydrospheredata/mist
|
mist/master/src/test/scala/io/hydrosphere/mist/master/interfaces/http/HttpApiV2Spec.scala
|
18225
|
package io.hydrosphere.mist.master.interfaces.http
import java.io.File
import java.nio.file.{Files, Paths}
import java.util.UUID
import akka.http.scaladsl.model._
import akka.http.scaladsl.testkit.ScalatestRouteTest
import akka.util.ByteString
import io.hydrosphere.mist.core.CommonData._
import io.hydrosphere.mist.core.{FunctionInfoData, MockitoSugar}
import io.hydrosphere.mist.master.JobDetails.Source
import io.hydrosphere.mist.master._
import io.hydrosphere.mist.master.artifact.ArtifactRepository
import io.hydrosphere.mist.master.execution.{ExecutionService, WorkerLink}
import io.hydrosphere.mist.master.data.{ContextsStorage, FunctionConfigStorage}
import io.hydrosphere.mist.master.interfaces.JsonCodecs
import io.hydrosphere.mist.master.jobs.FunctionsService
import io.hydrosphere.mist.master.models._
import org.apache.commons.codec.digest.DigestUtils
import org.apache.commons.io.FileUtils
import org.mockito.Matchers.{anyInt, eq => mockitoEq}
import org.mockito.Mockito.{times, verify}
import org.scalatest.{FunSpec, Matchers}
import spray.json.RootJsonWriter
import mist.api.data._
import mist.api.encoding.defaultEncoders._
import mist.api.encoding.JsSyntax._
import scala.concurrent.Future
import scala.concurrent.duration._
class HttpApiV2Spec extends FunSpec
with Matchers
with MockitoSugar
with ScalatestRouteTest
with TestData
with TestUtils {
import JsonCodecs._
val mappings = new LogStoragePaths(Paths.get("."))
implicit class ToEntityOps[A](a: A)(implicit f: RootJsonWriter[A]) {
def toEntity(implicit f: RootJsonWriter[A]): RequestEntity = {
val data = f.write(a)
HttpEntity(ContentTypes.`application/json`, data)
}
}
describe("workers") {
it("should return workers") {
val execution = mock[ExecutionService]
when(execution.workers()).thenReturn(Seq(workerLinkData))
val route = HttpV2Routes.workerRoutes(execution)
Get("/v2/api/workers") ~> route ~> check {
status shouldBe StatusCodes.OK
val rsp = responseAs[Seq[WorkerLink]]
rsp.size shouldBe 1
}
}
it("should stop worker") {
val execution = mock[ExecutionService]
when(execution.stopWorker(any[String])).thenSuccess(())
val route = HttpV2Routes.workerRoutes(execution)
Delete("/v2/api/workers/id") ~> route ~> check {
status shouldBe StatusCodes.OK
}
}
it("should get full worker info") {
val execution = mock[ExecutionService]
when(execution.getWorkerLink(any[String]))
.thenReturn(Some(WorkerLink(
"id", "test", None,
WorkerInitInfo(Map(), 20, Duration.Inf, Duration.Inf, "test", "localhost:0", "localhost:0", 262144000, ""))))
val route = HttpV2Routes.workerRoutes(execution)
Get("/v2/api/workers/id") ~> route ~> check {
status shouldBe StatusCodes.OK
val resp = responseAs[WorkerLink]
resp.name shouldBe "id"
resp.initInfo shouldBe WorkerInitInfo(Map(), 20, Duration.Inf, Duration.Inf, "test", "localhost:0","localhost:0", 262144000, "")
resp.sparkUi should not be defined
resp.address shouldBe "test"
}
}
it("should return worker jobs") {
val execution = mock[ExecutionService]
when(execution.getHistory(any[JobDetailsRequest])).thenSuccess(JobDetailsResponse(
Seq(JobDetails("id", "1",
JobParams("path", "className", JsMap.empty, Action.Execute),
"context", None, JobDetails.Source.Http)
), 1
))
val route = HttpV2Routes.workerRoutes(execution)
Get("/v2/api/workers/id/jobs?status=started") ~> route ~> check {
status shouldBe StatusCodes.OK
val jobs = responseAs[Seq[JobDetails]]
jobs.size shouldBe 1
}
Get("/v2/api/workers/id/jobs?status=started&paginate=true") ~> route ~> check {
status shouldBe StatusCodes.OK
val rsp = responseAs[JobDetailsResponse]
rsp.jobs.size shouldBe 1
}
}
}
describe("functions") {
it("should run job") {
val master = mock[MainService]
when(master.runJob(any[FunctionStartRequest], any[Source]))
.thenSuccess(Some(JobStartResponse("1")))
val route = HttpV2Routes.functionsJobs(master)
Post(s"/v2/api/functions/x/jobs", JsMap("1" -> "Hello".js)) ~> route ~> check {
status shouldBe StatusCodes.OK
}
}
it("should return history for function") {
val execution = mock[ExecutionService]
val master = mock[MainService]
when(master.execution).thenReturn(execution)
when(execution.getHistory(any[JobDetailsRequest])).thenSuccess(JobDetailsResponse(
Seq(JobDetails("id", "1",
JobParams("path", "className", JsMap.empty, Action.Execute),
"context", None, JobDetails.Source.Http)
), 1
))
val route = HttpV2Routes.functionsJobs(master)
Get("/v2/api/functions/id/jobs?status=started") ~> route ~> check {
status shouldBe StatusCodes.OK
val jobs = responseAs[Seq[JobDetails]]
jobs.size shouldBe 1
}
Get("/v2/api/functions/id/jobs?status=started&paginate=true") ~> route ~> check {
status shouldBe StatusCodes.OK
val rsp = responseAs[JobDetailsResponse]
rsp.jobs.size shouldBe 1
}
}
}
describe("function creation") {
val functionData = FunctionInfoData(
name="test",
lang="scala",
path= "path",
className="Test",
defaultContext = "foo"
)
it("should update function on create if function created") {
val functions = mock[FunctionsService]
val test = FunctionConfig("test", "test", "test", "default")
when(functions.hasFunction(any[String])).thenSuccess(false)
when(functions.update(any[FunctionConfig]))
.thenSuccess(FunctionInfoData(
lang = "python",
path = "test",
defaultContext = "foo",
className = "test",
name = "test"
))
val route = HttpV2Routes.functionsCrud(functions)
Post("/v2/api/functions", test.toEntity) ~> route ~> check {
status shouldBe StatusCodes.OK
}
}
it("should return different entity when forcibly update") {
val functions = mock[FunctionsService]
val test = FunctionConfig("test", "test", "test", "default")
when(functions.updateConfig(any[FunctionConfig])).thenSuccess(test)
val route = HttpV2Routes.functionsCrud(functions)
Post("/v2/api/functions?force=true", test.toEntity) ~> route ~> check {
status shouldBe StatusCodes.OK
responseAs[FunctionConfig] shouldBe test
}
}
it("should update function if it exists") {
val functions = mock[FunctionsService]
val test = FunctionConfig("test", "test", "test", "default")
when(functions.hasFunction(any[String])).thenSuccess(true)
when(functions.update(any[FunctionConfig]))
.thenSuccess(FunctionInfoData(
lang = "python",
path = "test",
defaultContext = "default",
className = "test",
name = "test"
))
val route = HttpV2Routes.functionsCrud(functions)
Put("/v2/api/functions", test.toEntity) ~> route ~> check {
status shouldBe StatusCodes.OK
responseAs[FunctionConfig] shouldBe test
}
}
it("should not update function if it do not exist") {
val functions = mock[FunctionsService]
val test = FunctionConfig("test", "test", "test", "default")
when(functions.hasFunction(any[String])).thenSuccess(false)
val route = HttpV2Routes.functionsCrud(functions)
Put("/v2/api/functions", test.toEntity) ~> route ~> check {
status shouldBe StatusCodes.BadRequest
}
}
it("should fail with invalid data for function") {
val functions = mock[FunctionsService]
val functionConfig = FunctionConfig("name", "path", "className", "context")
when(functions.hasFunction(any[String])).thenReturn(Future.successful(false))
when(functions.update(any[FunctionConfig])).thenReturn(Future.failed(new Exception("test failure")))
val route = HttpV2Routes.functionsCrud(functions)
Post("/v2/api/functions", functionConfig.toEntity) ~> route ~> check {
status shouldBe StatusCodes.BadRequest
}
}
it("should delete function") {
val functions = mock[FunctionsService]
when(functions.delete(any[String])).thenSuccess(Some(FunctionInfoData(
name="test",
lang="scala",
path= "path",
className="Test",
defaultContext = "foo"
)))
val route = HttpV2Routes.functionsCrud(functions)
Delete(s"/v2/api/functions/x") ~> route ~> check {
status shouldBe StatusCodes.OK
}
}
}
describe("jobs") {
val jobDetails = JobDetails(
params = JobParams("path", "className", JsMap.empty, Action.Execute),
jobId = "id",
source = Source.Http,
function = "function",
context = "context",
externalId = None
)
it("should return jobs") {
val execution = mock[ExecutionService]
val master = mock[MainService]
when(master.execution).thenReturn(execution)
when(execution.getHistory(any[JobDetailsRequest]))
.thenSuccess(JobDetailsResponse(Seq(jobDetails), 1))
val route = HttpV2Routes.jobsRoutes(master)
Get(s"/v2/api/jobs") ~> route ~> check {
status shouldBe StatusCodes.OK
responseAs[Seq[JobDetails]]
}
Get(s"/v2/api/jobs?paginate=true") ~> route ~> check {
status shouldBe StatusCodes.OK
responseAs[JobDetailsResponse]
}
}
it("should return jobs status by id") {
val execution = mock[ExecutionService]
val master = mock[MainService]
when(master.execution).thenReturn(execution)
when(execution.jobStatusById(any[String]))
.thenSuccess(Some(jobDetails))
val route = HttpV2Routes.jobsRoutes(master)
Get(s"/v2/api/jobs/id") ~> route ~> check {
status shouldBe StatusCodes.OK
val rsp = responseAs[Option[JobDetails]]
rsp.isDefined shouldBe true
}
}
it("should return 400 on logs request when job not found") {
val execution = mock[ExecutionService]
val master = mock[MainService]
when(master.execution).thenReturn(execution)
when(execution.jobStatusById(any[String]))
.thenSuccess(None)
val route = HttpV2Routes.jobsRoutes(master)
Get(s"/v2/api/jobs/id/logs") ~> route ~> check {
status shouldBe StatusCodes.NotFound
}
}
it("should return 200 empty response on logs request when job log file not exists") {
val execution = mock[ExecutionService]
val master = mock[MainService]
val logStorageMappings = mock[LogStoragePaths]
when(master.execution).thenReturn(execution)
when(master.logsPaths).thenReturn(logStorageMappings)
when(execution.jobStatusById(any[String]))
.thenSuccess(Some(jobDetails))
when(logStorageMappings.pathFor(any[String]))
.thenReturn(Paths.get(".", UUID.randomUUID().toString))
val route = HttpV2Routes.jobsRoutes(master)
Get(s"/v2/api/jobs/id/logs") ~> route ~> check {
status shouldBe StatusCodes.OK
responseAs[String] shouldBe ""
}
}
it("should cancel job") {
val execution = mock[ExecutionService]
val master = mock[MainService]
when(master.execution).thenReturn(execution)
when(execution.stopJob(any[String])).thenSuccess(Some(jobDetails))
val route = HttpV2Routes.jobsRoutes(master)
Delete(s"/v2/api/jobs/id") ~> route ~> check {
status shouldBe StatusCodes.OK
val rsp = responseAs[Option[JobDetails]]
rsp.isDefined shouldBe true
}
}
}
describe("contexts") {
class TestCrud extends ContextsCRUDLike {
def create(req: ContextCreateRequest): Future[ContextConfig] = ???
def getAll(): Future[Seq[ContextConfig]] = ???
def update(a: ContextConfig): Future[ContextConfig] = ???
def get(id: String): Future[Option[ContextConfig]] = ???
def delete(id: String): Future[Option[ContextConfig]] = ???
}
it("should create context") {
val crud = new TestCrud {
override def create(req: ContextCreateRequest): Future[ContextConfig] = Future.successful(FooContext)
}
val route = HttpV2Routes.contextsRoutes(crud)
val req = ContextCreateRequest("yoyo", workerMode = Some(RunMode.ExclusiveContext))
Post(s"/v2/api/contexts", req.toEntity) ~> route ~> check {
status shouldBe StatusCodes.OK
}
}
it("should delete context") {
val crud = new TestCrud {
override def delete(id: String): Future[Option[ContextConfig]] = Future.successful(Some(FooContext))
}
val route = HttpV2Routes.contextsRoutes(crud)
Delete(s"/v2/api/contexts/foo") ~> route ~> check {
status shouldBe StatusCodes.OK
}
}
}
describe("artifact") {
it("should list all unique file names in artifact repository") {
val artifactRepo = mock[ArtifactRepository]
when(artifactRepo.listPaths()).thenSuccess(Set("test.jar", "test.py"))
val routes = HttpV2Routes.artifactRoutes(artifactRepo)
Get("/v2/api/artifacts") ~> routes ~> check {
status shouldBe StatusCodes.OK
responseAs[Set[String]] should contain allOf("test.jar", "test.py")
}
}
it("should upload file if it unique") {
val artifactRepo = mock[ArtifactRepository]
when(artifactRepo.get(any[String]))
.thenReturn(None)
when(artifactRepo.store(any[File], any[String]))
.thenSuccess(new File("some/internal/path/test.jar"))
val routes = HttpV2Routes.artifactRoutes(artifactRepo)
val multipartForm =
Multipart.FormData(Multipart.FormData.BodyPart.Strict(
"file",
HttpEntity(ContentTypes.`application/octet-stream`, ByteString.fromString("Jar content")),
Map("filename" -> "test.jar")))
Post("/v2/api/artifacts", multipartForm) ~> routes ~> check {
status shouldBe StatusCodes.OK
responseAs[String] shouldBe "test.jar"
}
}
it("should return 400 when upload filename not unique") {
val artifactRepo = mock[ArtifactRepository]
when(artifactRepo.get(any[String]))
.thenReturn(Some(new File("test.jar")))
val routes = HttpV2Routes.artifactRoutes(artifactRepo)
val multipartForm =
Multipart.FormData(Multipart.FormData.BodyPart.Strict(
"file",
HttpEntity(ContentTypes.`application/octet-stream`, ByteString.fromString("Jar content")),
Map("filename" -> "test.jar")))
Post("/v2/api/artifacts", multipartForm) ~> routes ~> check {
status shouldBe StatusCodes.Conflict
}
}
it("should not check uniqueness when force flag applied") {
val artifactRepo = mock[ArtifactRepository]
val routes = HttpV2Routes.artifactRoutes(artifactRepo)
when(artifactRepo.store(any[File], any[String]))
.thenSuccess(new File("some/internal/path/test.jar"))
val multipartForm =
Multipart.FormData(Multipart.FormData.BodyPart.Strict(
"file",
HttpEntity(ContentTypes.`application/octet-stream`, ByteString.fromString("Jar content")),
Map("filename" -> "test.jar")))
Post("/v2/api/artifacts?force=true", multipartForm) ~> routes ~> check {
status shouldBe StatusCodes.OK
}
}
it("should download file if it exists") {
val artifactRepo = mock[ArtifactRepository]
val file = new File("./target/test.jar")
FileUtils.touch(file)
when(artifactRepo.get(any[String]))
.thenReturn(Some(file))
val routes = HttpV2Routes.artifactRoutes(artifactRepo)
Get("/v2/api/artifacts/test.jar") ~> routes ~> check {
status shouldBe StatusCodes.OK
}
FileUtils.deleteQuietly(file)
}
it("should return not found when download file not exists") {
val artifactRepo = mock[ArtifactRepository]
when(artifactRepo.get(any[String]))
.thenReturn(None)
val routes = HttpV2Routes.artifactRoutes(artifactRepo)
Get("/v2/api/artifacts/test.jar") ~> routes ~> check {
status shouldBe StatusCodes.NotFound
}
}
it("should return sha of given filename") {
val file = new File("./target/test.jar")
FileUtils.touch(file)
val expectedHex = DigestUtils.sha1Hex(Files.newInputStream(file.toPath))
val artifactRepository = mock[ArtifactRepository]
when(artifactRepository.get(any[String]))
.thenReturn(Some(file))
val routes = HttpV2Routes.artifactRoutes(artifactRepository)
Get("/v2/api/artifacts/test.jar/sha") ~> routes ~> check {
responseAs[String] shouldBe expectedHex
}
}
}
describe("status") {
it("should return status") {
val route = HttpV2Routes.statusApi
Get("/v2/api/status") ~> route ~> check {
status shouldBe StatusCodes.OK
responseAs[MistStatus]
}
}
}
describe("full api") {
it("should return bad request on futures failed illegal argument exception") {
val master = mock[MainService]
when(master.runJob(any[FunctionStartRequest], any[Source]))
.thenFailure(new IllegalArgumentException("argument missing"))
val route = HttpV2Routes.apiRoutes(master, mock[ArtifactRepository], "")
Post(s"/v2/api/functions/x/jobs", JsMap("1" -> "Hello".js)) ~> route ~> check {
status shouldBe StatusCodes.BadRequest
}
}
it("should return 500 on future`s any exception except iae") {
val master = mock[MainService]
when(master.runJob(any[FunctionStartRequest], any[Source]))
.thenFailure(new RuntimeException("some exception"))
val route = HttpV2Routes.apiRoutes(master, mock[ArtifactRepository], "")
Post(s"/v2/api/functions/x/jobs", JsMap("1" -> "Hello".js)) ~> route ~> check {
status shouldBe StatusCodes.InternalServerError
}
}
}
}
|
apache-2.0
|
pwittchen/ReactiveNetwork
|
library/src/test/java/com/github/pwittchen/reactivenetwork/library/rx2/ConnectivityTest.java
|
12175
|
/*
* Copyright (C) 2016 Piotr Wittchen
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.pwittchen.reactivenetwork.library.rx2;
import android.content.Context;
import android.net.ConnectivityManager;
import android.net.NetworkInfo;
import io.reactivex.functions.Predicate;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.robolectric.RobolectricTestRunner;
import org.robolectric.RuntimeEnvironment;
import static com.google.common.truth.Truth.assertThat;
@RunWith(RobolectricTestRunner.class)
@SuppressWarnings("NullAway") public class ConnectivityTest {
private static final String TYPE_NAME_WIFI = "WIFI";
private static final String TYPE_NAME_MOBILE = "MOBILE";
private static final String TYPE_NAME_NONE = "NONE";
@Test public void shouldCreateConnectivity() {
// when
Connectivity connectivity = Connectivity.create();
// then
assertThat(connectivity).isNotNull();
assertThat(connectivity.state()).isEqualTo(NetworkInfo.State.DISCONNECTED);
assertThat(connectivity.detailedState()).isEqualTo(NetworkInfo.DetailedState.IDLE);
assertThat(connectivity.type()).isEqualTo(Connectivity.UNKNOWN_TYPE);
assertThat(connectivity.subType()).isEqualTo(Connectivity.UNKNOWN_SUB_TYPE);
assertThat(connectivity.available()).isFalse();
assertThat(connectivity.failover()).isFalse();
assertThat(connectivity.roaming()).isFalse();
assertThat(connectivity.typeName()).isEqualTo(TYPE_NAME_NONE);
assertThat(connectivity.subTypeName()).isEqualTo(TYPE_NAME_NONE);
assertThat(connectivity.reason()).isEmpty();
assertThat(connectivity.extraInfo()).isEmpty();
}
@Test public void stateShouldBeEqualToGivenValue() throws Exception {
// given
final Connectivity connectivity = Connectivity.state(NetworkInfo.State.CONNECTED)
.type(ConnectivityManager.TYPE_WIFI)
.typeName(TYPE_NAME_WIFI)
.build();
// when
final Predicate<Connectivity> equalTo = ConnectivityPredicate.hasState(connectivity.state());
final Boolean shouldBeEqualToGivenStatus = equalTo.test(connectivity);
// then
assertThat(shouldBeEqualToGivenStatus).isTrue();
}
@Test public void stateShouldBeEqualToOneOfGivenMultipleValues() throws Exception {
// given
final Connectivity connectivity = Connectivity.state(NetworkInfo.State.CONNECTING)
.type(ConnectivityManager.TYPE_WIFI)
.typeName(TYPE_NAME_WIFI)
.build();
final NetworkInfo.State states[] =
{ NetworkInfo.State.CONNECTED, NetworkInfo.State.CONNECTING };
// when
final Predicate<Connectivity> equalTo = ConnectivityPredicate.hasState(states);
final Boolean shouldBeEqualToGivenStatus = equalTo.test(connectivity);
// then
assertThat(shouldBeEqualToGivenStatus).isTrue();
}
@Test public void stateShouldNotBeEqualToGivenValue() throws Exception {
// given
final Connectivity connectivity =
Connectivity.state(NetworkInfo.State.DISCONNECTED)
.type(ConnectivityManager.TYPE_WIFI)
.typeName(TYPE_NAME_WIFI)
.build();
// when
final Predicate<Connectivity> equalTo =
ConnectivityPredicate.hasState(NetworkInfo.State.CONNECTED);
final Boolean shouldBeEqualToGivenStatus = equalTo.test(connectivity);
// then
assertThat(shouldBeEqualToGivenStatus).isFalse();
}
@Test public void typeShouldBeEqualToGivenValue() throws Exception {
// given
final Connectivity connectivity = Connectivity.state(NetworkInfo.State.CONNECTED)
.type(ConnectivityManager.TYPE_WIFI)
.typeName(TYPE_NAME_WIFI)
.build();
// note that unknown type is added initially by the ConnectivityPredicate#hasType method
final int givenTypes[] = { connectivity.type(), Connectivity.UNKNOWN_TYPE };
// when
final Predicate<Connectivity> equalTo = ConnectivityPredicate.hasType(givenTypes);
final Boolean shouldBeEqualToGivenStatus = equalTo.test(connectivity);
// then
assertThat(shouldBeEqualToGivenStatus).isTrue();
}
@Test public void typeShouldBeEqualToOneOfGivenMultipleValues() throws Exception {
// given
final Connectivity connectivity = Connectivity.state(NetworkInfo.State.CONNECTING)
.type(ConnectivityManager.TYPE_MOBILE)
.typeName(TYPE_NAME_MOBILE)
.build();
// note that unknown type is added initially by the ConnectivityPredicate#hasType method
final int givenTypes[] = {
ConnectivityManager.TYPE_WIFI, ConnectivityManager.TYPE_MOBILE, Connectivity.UNKNOWN_TYPE
};
// when
final Predicate<Connectivity> equalTo = ConnectivityPredicate.hasType(givenTypes);
final Boolean shouldBeEqualToGivenStatus = equalTo.test(connectivity);
// then
assertThat(shouldBeEqualToGivenStatus).isTrue();
}
@Test public void typeShouldNotBeEqualToGivenValue() throws Exception {
// given
final Connectivity connectivity = Connectivity.state(NetworkInfo.State.CONNECTED)
.type(ConnectivityManager.TYPE_WIFI)
.typeName(TYPE_NAME_WIFI)
.build();
// note that unknown type is added initially by the ConnectivityPredicate#hasType method
final int givenTypes[] = { ConnectivityManager.TYPE_MOBILE, Connectivity.UNKNOWN_TYPE };
// when
final Predicate<Connectivity> equalTo = ConnectivityPredicate.hasType(givenTypes);
final Boolean shouldBeEqualToGivenStatus = equalTo.test(connectivity);
// then
assertThat(shouldBeEqualToGivenStatus).isFalse();
}
@Test(expected = IllegalArgumentException.class)
public void createShouldThrowAnExceptionWhenContextIsNull() {
// given
final Context context = null;
// when
Connectivity.create(context);
// then
// an exception is thrown
}
@Test public void shouldReturnProperToStringValue() {
// given
final String expectedToString = "Connectivity{"
+ "state=DISCONNECTED, "
+ "detailedState=IDLE, "
+ "type=-1, "
+ "subType=-1, "
+ "available=false, "
+ "failover=false, "
+ "roaming=false, "
+ "typeName='NONE', "
+ "subTypeName='NONE', "
+ "reason='', "
+ "extraInfo=''}";
// when
Connectivity connectivity = Connectivity.create();
// then
assertThat(connectivity.toString()).isEqualTo(expectedToString);
}
@Test public void theSameConnectivityObjectsShouldBeEqual() {
// given
final Connectivity connectivityOne = Connectivity.create();
final Connectivity connectivityTwo = Connectivity.create();
// when
boolean objectsAreEqual = connectivityOne.equals(connectivityTwo);
// then
assertThat(objectsAreEqual).isTrue();
}
@Test public void twoDefaultObjectsShouldBeInTheSameBucket() {
// given
final Connectivity connectivityOne = Connectivity.create();
final Connectivity connectivityTwo = Connectivity.create();
// when
boolean hashCodesAreEqual = connectivityOne.hashCode() == connectivityTwo.hashCode();
// then
assertThat(hashCodesAreEqual).isTrue();
}
@Test public void shouldAppendUnknownTypeWhileFilteringNetworkTypesInsidePredicate() {
// given
int[] types = { ConnectivityManager.TYPE_MOBILE, ConnectivityManager.TYPE_WIFI };
int[] expectedOutputTypes = {
ConnectivityManager.TYPE_MOBILE, ConnectivityManager.TYPE_WIFI, Connectivity.UNKNOWN_TYPE
};
// when
int[] outputTypes = ConnectivityPredicate.appendUnknownNetworkTypeToTypes(types);
// then
assertThat(outputTypes).isEqualTo(expectedOutputTypes);
}
@Test
public void shouldAppendUnknownTypeWhileFilteringNetworkTypesInsidePredicateForEmptyArray() {
// given
int[] types = {};
int[] expectedOutputTypes = { Connectivity.UNKNOWN_TYPE };
// when
int[] outputTypes = ConnectivityPredicate.appendUnknownNetworkTypeToTypes(types);
// then
assertThat(outputTypes).isEqualTo(expectedOutputTypes);
}
@Test public void shouldCreateConnectivityWithBuilder() {
// given
NetworkInfo.State state = NetworkInfo.State.CONNECTED;
NetworkInfo.DetailedState detailedState = NetworkInfo.DetailedState.CONNECTED;
int type = ConnectivityManager.TYPE_WIFI;
int subType = ConnectivityManager.TYPE_WIMAX;
String typeName = TYPE_NAME_WIFI;
String subTypeName = "test subType";
String reason = "no reason";
String extraInfo = "extra info";
// when
Connectivity connectivity = Connectivity.state(state)
.detailedState(detailedState)
.type(type)
.subType(subType)
.available(true)
.failover(false)
.roaming(true)
.typeName(typeName)
.subTypeName(subTypeName)
.reason(reason)
.extraInfo(extraInfo)
.build();
// then
assertThat(connectivity.state()).isEqualTo(state);
assertThat(connectivity.detailedState()).isEqualTo(detailedState);
assertThat(connectivity.type()).isEqualTo(type);
assertThat(connectivity.subType()).isEqualTo(subType);
assertThat(connectivity.available()).isTrue();
assertThat(connectivity.failover()).isFalse();
assertThat(connectivity.roaming()).isTrue();
assertThat(connectivity.typeName()).isEqualTo(typeName);
assertThat(connectivity.subTypeName()).isEqualTo(subTypeName);
assertThat(connectivity.reason()).isEqualTo(reason);
assertThat(connectivity.extraInfo()).isEqualTo(extraInfo);
}
@Test public void connectivityShouldNotBeEqualToAnotherOne() {
// given
Connectivity connectivityOne = Connectivity.state(NetworkInfo.State.CONNECTED)
.detailedState(NetworkInfo.DetailedState.CONNECTED)
.type(ConnectivityManager.TYPE_WIFI)
.subType(1)
.available(true)
.failover(true)
.roaming(true)
.typeName(TYPE_NAME_WIFI)
.subTypeName("subtypeOne")
.reason("reasonOne")
.extraInfo("extraInfoOne")
.build();
Connectivity connectivityTwo = Connectivity.state(NetworkInfo.State.DISCONNECTED)
.detailedState(NetworkInfo.DetailedState.DISCONNECTED)
.type(ConnectivityManager.TYPE_MOBILE)
.subType(2)
.available(false)
.failover(false)
.roaming(false)
.typeName(TYPE_NAME_MOBILE)
.subTypeName("subtypeTwo")
.reason("reasonTwo")
.extraInfo("extraInfoTwo")
.build();
// when
final boolean isAnotherConnectivityTheSame = connectivityOne.equals(connectivityTwo);
// then
assertThat(isAnotherConnectivityTheSame).isFalse();
}
@Test public void shouldCreateDefaultConnectivityWhenConnectivityManagerIsNull() {
// given
final Context context = RuntimeEnvironment.application.getApplicationContext();
final ConnectivityManager connectivityManager = null;
// when
Connectivity connectivity = Connectivity.create(context, connectivityManager);
// then
assertThat(connectivity.type()).isEqualTo(Connectivity.UNKNOWN_TYPE);
assertThat(connectivity.subType()).isEqualTo(Connectivity.UNKNOWN_SUB_TYPE);
assertThat(connectivity.state()).isEqualTo(NetworkInfo.State.DISCONNECTED);
assertThat(connectivity.detailedState()).isEqualTo(NetworkInfo.DetailedState.IDLE);
assertThat(connectivity.available()).isFalse();
assertThat(connectivity.failover()).isFalse();
assertThat(connectivity.roaming()).isFalse();
assertThat(connectivity.typeName()).isEqualTo(TYPE_NAME_NONE);
assertThat(connectivity.subTypeName()).isEqualTo(TYPE_NAME_NONE);
assertThat(connectivity.reason()).isEmpty();
assertThat(connectivity.extraInfo()).isEmpty();
}
}
|
apache-2.0
|
bayofmany/peapod
|
core/src/test/java/peapod/interfaces/Person.java
|
2733
|
/*
* Copyright 2015-Bay of Many
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* This project is derived from code in the TinkerPop project under the following licenses:
*
* TinkerPop3
* http://www.apache.org/licenses/LICENSE-2.0
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the TinkerPop nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL TINKERPOP BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package peapod.interfaces;
import peapod.FramedVertex;
import peapod.annotations.Edge;
import peapod.annotations.Property;
import peapod.annotations.Vertex;
import java.util.List;
@Vertex
public interface Person extends FramedVertex<Person> {
@Property("p-name")
String getName();
void setName(String name);
@Edge("e-friend")
void addFriend(Person person);
void removeFriend(Person person);
List<Person> getFriends();
}
|
apache-2.0
|
liuyuanyuan/dbeaver
|
plugins/org.jkiss.dbeaver.model/src/org/jkiss/dbeaver/model/navigator/DBNRoot.java
|
5876
|
/*
* DBeaver - Universal Database Manager
* Copyright (C) 2010-2019 Serge Rider (serge@jkiss.org)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jkiss.dbeaver.model.navigator;
import org.eclipse.core.resources.IProject;
import org.jkiss.code.NotNull;
import org.jkiss.dbeaver.model.DBPImage;
import org.jkiss.dbeaver.model.app.DBPProject;
import org.jkiss.dbeaver.model.app.DBPProjectListener;
import org.jkiss.dbeaver.model.messages.ModelMessages;
import org.jkiss.dbeaver.model.meta.Property;
import org.jkiss.dbeaver.model.runtime.DBRProgressMonitor;
import org.jkiss.dbeaver.runtime.DBWorkbench;
import org.jkiss.utils.ArrayUtils;
import java.util.Arrays;
import java.util.Comparator;
/**
* DBNRoot
*/
public class DBNRoot extends DBNNode implements DBNContainer, DBPProjectListener
{
private final DBNModel model;
private DBNProject[] projects = new DBNProject[0];
public DBNRoot(DBNModel model)
{
super();
this.model = model;
for (DBPProject project : DBWorkbench.getPlatform().getWorkspace().getProjects()) {
addProject(project, false);
}
if (model.isGlobal()) {
model.getPlatform().getWorkspace().addProjectListener(this);
}
}
@Override
void dispose(boolean reflect)
{
for (DBNProject project : projects) {
project.dispose(reflect);
}
projects = new DBNProject[0];
if (model.isGlobal()) {
model.getPlatform().getWorkspace().removeProjectListener(this);
}
}
@Override
public DBNModel getModel() {
return model;
}
@Override
public String getNodeType()
{
return ModelMessages.model_navigator_Root;
}
@Override
public Object getValueObject()
{
return this;
}
@Override
public String getChildrenType()
{
return ModelMessages.model_navigator_Project;
}
@Override
public Class<IProject> getChildrenClass()
{
return IProject.class;
}
@NotNull
@Override
@Property(viewable = true, order = 1)
public String getName() {
return super.getName();
}
@Override
public String getNodeName()
{
return "#root"; //$NON-NLS-1$
}
@Override
public String getNodeDescription()
{
return ModelMessages.model_navigator_Model_root;
}
@Override
public DBPImage getNodeIcon()
{
return null;
}
@Override
public boolean allowsChildren()
{
return projects.length > 0;
}
@Override
public DBNProject[] getChildren(DBRProgressMonitor monitor)
{
return projects;
}
public DBNProject[] getProjects() {
return projects;
}
@Override
public boolean allowsOpen()
{
return true;
}
@Override
public String getNodeItemPath() {
return "";
}
public DBNProject getProjectNode(IProject project) {
for (DBNProject node : projects) {
if (node.getProject().getEclipseProject() == project) {
return node;
}
}
return null;
}
public DBNProject getProjectNode(DBPProject project) {
for (DBNProject node : projects) {
if (node.getProject() == project) {
return node;
}
}
return null;
}
public DBNProject addProject(DBPProject project, boolean reflect)
{
DBNProject projectNode = new DBNProject(
this,
project,
project.getWorkspace().getResourceHandler(project.getEclipseProject()));
projects = ArrayUtils.add(DBNProject.class, projects, projectNode);
Arrays.sort(projects, Comparator.comparing(DBNResource::getNodeName));
model.fireNodeEvent(new DBNEvent(this, DBNEvent.Action.ADD, projectNode));
return projectNode;
}
public void removeProject(DBPProject project)
{
for (int i = 0; i < projects.length; i++) {
DBNProject projectNode = projects[i];
if (projectNode.getProject() == project) {
projects = ArrayUtils.remove(DBNProject.class, projects, i);
model.fireNodeEvent(new DBNEvent(this, DBNEvent.Action.REMOVE, projectNode));
projectNode.dispose(true);
break;
}
}
}
@Override
public void handleProjectAdd(DBPProject project) {
addProject(project, true);
}
@Override
public void handleProjectRemove(DBPProject project) {
removeProject(project);
}
@Override
public void handleActiveProjectChange(DBPProject oldValue, DBPProject newValue)
{
DBNProject projectNode = getProjectNode(newValue);
DBNProject oldProjectNode = getProjectNode(oldValue);
if (projectNode != null) {
model.fireNodeEvent(new DBNEvent(this, DBNEvent.Action.UPDATE, projectNode));
}
if (oldProjectNode != null) {
model.fireNodeEvent(new DBNEvent(this, DBNEvent.Action.UPDATE, oldProjectNode));
}
}
}
|
apache-2.0
|
sapka12/mosaicmaker
|
src/test/java/hu/sapka12/mozaik/TilesTest.java
|
1356
|
package hu.sapka12.mozaik;
import hu.sapka12.mozaik.maker.bufferedimage.Tile;
import hu.sapka12.mozaik.tile.Tiles;
import java.awt.image.BufferedImage;
import org.easymock.EasyMock;
import org.testng.Assert;
import org.testng.annotations.Test;
public class TilesTest {
@Test(enabled = false)
public void testbuildImage() throws CloneNotSupportedException {
BufferedImage input = new BufferedImage(10, 21, BufferedImage.TYPE_INT_RGB);
Tile tile = EasyMock.createMock(Tile.class);
EasyMock.expect(tile.get()).andStubReturn(input);
Tile[][] tileArray = new Tile[][]{
new Tile[]{tile}
};
Tiles tiles = new Tiles(tileArray);
BufferedImage outImage = tiles.buildImage();
assertEquals(outImage, input);
}
private boolean assertEquals(BufferedImage actual, BufferedImage expected) {
if (actual.getWidth() != expected.getWidth()) {
return false;
}
if (actual.getHeight() != expected.getHeight()) {
return false;
}
for (int x = 0; x < expected.getWidth(); x++) {
for (int y = 0; y < expected.getHeight(); y++) {
if (expected.getRGB(x, y) != actual.getRGB(x, y)) {
return false;
}
}
}
return true;
}
}
|
apache-2.0
|
janekdb/ntropa
|
presentation/sao/src/tests/org/ntropa/runtime/sao/AbstactServerActiveObjectTest.java
|
12334
|
/*
* Copyright 2001-2006 LEARNING INFORMATION SYSTEMS PTY LIMITED
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* AbstactServerActiveObjectTest.java
*
* Created on 10 January 2002, 14:47
*/
package tests.org.ntropa.runtime.sao;
import java.util.Arrays;
import java.util.LinkedList;
import java.util.List;
import junit.framework.Test;
import junit.framework.TestCase;
import junit.framework.TestSuite;
import org.ntropa.runtime.sao.AbstractElement;
import org.ntropa.runtime.sao.AbstractServerActiveObject;
import org.ntropa.runtime.sao.BaseServerActiveObject;
import org.ntropa.runtime.sao.InvocationContext;
import org.ntropa.runtime.sao.NoSuchAbstractElementException;
import org.ntropa.runtime.sao.StandardElement;
import org.ntropa.runtime.sao.StandardFragment;
import org.ntropa.runtime.sao.StandardInvocationContext;
import com.mockobjects.servlet.MockHttpServletRequest;
/**
*
* @author jdb
* @version $Id: AbstactServerActiveObjectTest.java,v 1.7 2003/03/24 16:58:35 jdb Exp $
*/
public class AbstactServerActiveObjectTest extends TestCase {
/** Creates new AbstactServerActiveObjectTest */
public AbstactServerActiveObjectTest ( String testName ) {
super(testName);
}
/* Comments copied from junit.framework.TestSuite. */
/**
* A <code>TestSuite</code> is a <code>Composite</code> of Tests.
* It runs a collection of test cases.
*
* This constructor creates a suite with all the methods
* starting with "test" that take no arguments.
*/
public static Test suite () {
TestSuite suite = new TestSuite ( AbstactServerActiveObjectTest.class );
return suite;
}
/**
* Test understanding of ThreadLocal behaviour.
*
* Create a sub-type of AbstactServerActiveObject which tests itself.
*
* Create object with references to:
*
* i) A list to note its values in
* ii) A control variable starting at 2
*
* Thread 1 Thread 2
*
* Set icb to x Set icb to y
*
* Set "key" = 1 Set "key" = 2
*
* decrement control and wait until 0
*
* Add id of icb to Add id of icb to
* list list
*
* Add value of key Add value of key
* to list to list
*/
public void testThreadLocalAssumptions () throws InterruptedException {
MyController c = new MyController () ;
AbstractServerActiveObject sao = new BaseServerActiveObject () ;
String icbValue [] = new String [ 2 ] ;
String mapValue [] = new String [ 2 ] ;
Thread thread_1 = new Thread (
new ThreadLocalTask ( 0, c, sao, icbValue, mapValue )
) ;
thread_1.start () ;
Thread thread_2 = new Thread (
new ThreadLocalTask ( 1, c, sao, icbValue, mapValue )
) ;
thread_2.start () ;
thread_1.join () ;
thread_2.join () ;
//System.out.println("icbValue:\n" + Arrays.asList ( icbValue ) );
//System.out.println("mapValue:\n" + Arrays.asList ( mapValue) ) ;
if ( icbValue [ 0 ] . equals ( icbValue [ 1 ] ) )
fail ( "The cached InvocationContexts were not different" ) ;
assertEquals ( "The first map value was wrong", "value-0", mapValue [ 0 ] ) ;
assertEquals ( "The second map value was wrong", "value-1", mapValue [ 1 ] ) ;
}
/*
* There is most probably a computer science semaphore
* or monitor type I should use here.
*/
public class MyController {
int count = 2 ;
public synchronized void decrement () {
count-- ;
}
public synchronized int getValue () {
return count ;
}
}
public class ThreadLocalTask implements Runnable {
int id ;
MyController c ;
AbstractServerActiveObject sao ;
String [] icbValue ;
String [] mapValue ;
public ThreadLocalTask (
int id,
MyController c,
AbstractServerActiveObject sao,
String [] icbValue,
String [] mapValue ) {
this.id = id ;
this.c = c ;
this.sao = sao ;
this.icbValue = icbValue ;
this.mapValue = mapValue ;
}
public void run () {
sao.setInvocationContext ( new StandardInvocationContext () ) ;
sao.setThreadLocalValue ( "key", "value-" + id ) ;
c.decrement () ;
while ( c.getValue () != 0 )
;
icbValue [ id ] = sao.getInvocationContext ().toString () ;
mapValue [ id ] = sao.getThreadLocalValue ( "key" ).toString () ;
}
}
public void testGetChild () {
AbstractServerActiveObject sah = new AbstractServerActiveObject () {
public void controlSelf ( InvocationContext icb ) throws Exception {} ;
public void controlChildren ( InvocationContext icb ) throws Exception {} ;
public void render ( InvocationContext icb ) throws Exception {} ;
} ;
AbstractElement ae = null ;
try {
ae = sah.getChild ( "no-such-element" ) ;
fail ( "getChild failed to throw NoSuchAbstractElementException" ) ;
}
catch ( NoSuchAbstractElementException e ) {}
AbstractElement element = new StandardElement () ;
element.setName ( "actual-element" ) ;
sah.addChild ( element ) ;
ae = sah.getChild ( "actual-element" ) ;
}
public void testChildExists () {
AbstractServerActiveObject sao = new BaseServerActiveObject () ;
if (sao.childExists ("randomname")) {
fail ("SAO says child exists, but doesn't!") ;
}
AbstractElement element = new StandardElement () ;
element.setName ( "actual-element" ) ;
sao.addChild ( element ) ;
if (! sao.childExists ("actual-element")) {
fail ("SAO says child doesn't exist, but does!") ;
}
}
/**
* This test case flushed a couple of bug in code apparently too simple
* to get wrong and it highlighted some dubious equality by value tests
* in AbstractServerActiveObject.addChild.
*/
public void testGetDataKey () throws Exception {
MySAO sao = new MySAO () ;
InvocationContext icb = new StandardInvocationContext () ;
icb.enableControlPhase () ;
MockHttpServletRequest request = new MyMockHttpServletRequest () ;
//out.setExpectedData ( expected ) ;
icb.setHttpServletRequest ( request ) ;
/* Kick it off */
sao.controlSelf ( icb ) ;
//out.verify () ;
assertEquals (
"The data key was correct when there was no parent",
"index.html#",
sao.getDataKeyPublic ()
) ;
AbstractElement el_b = new StandardElement () ;
el_b.setName ( "B" ) ;
el_b.addChild ( sao ) ;
assertEquals (
"The data key was correct when there was one parent",
"index.html#-0",
sao.getDataKeyPublic ()
) ;
AbstractServerActiveObject grandParent = new BaseServerActiveObject () ;
grandParent.addChild ( el_b ) ;
AbstractElement el_a = new StandardElement () ;
el_a.setName ( "A" ) ;
grandParent.addChild ( el_a ) ;
AbstractElement el_c = new StandardElement () ;
el_c.setName ( "C" ) ;
grandParent.addChild ( el_c ) ;
assertEquals (
"The data key was correct when there was a grandparent",
"index.html#-0-1",
sao.getDataKeyPublic ()
) ;
AbstractElement grandGrandParent = new StandardElement () ;
grandGrandParent.setName ( "D" ) ;
grandGrandParent.addChild ( new StandardFragment () ) ;
grandGrandParent.addChild ( new BaseServerActiveObject () ) ;
grandGrandParent.addChild ( new StandardFragment () ) ;
grandGrandParent.addChild ( grandParent ) ;
grandGrandParent.addChild ( new BaseServerActiveObject () ) ;
assertEquals (
"The data key was correct when there was a grandparent",
"index.html#-0-1-3",
sao.getDataKeyPublic ()
) ;
}
private class MySAO extends BaseServerActiveObject {
public void controlSelf ( InvocationContext icb ) {
setInvocationContext ( icb ) ;
}
public String getDataKeyPublic () {
return getDataKey () ;
}
}
private class MyMockHttpServletRequest extends MockHttpServletRequest {
public String getServletPath () {
return "index.html" ;
}
}
public void testRecycle () {
/*
* Check recycle walks the tree of saos depth first.
* This is because breadth first makes less sense if
* a child were to use a service of a parent. Currently
* there are no such server-client relationships
*/
/*
* a
*/
List recordOfRecycling = new LinkedList () ;
MyRecycler root = new MyRecycler ( "a", recordOfRecycling ) ;
/*
* a
* / \
* aa ab
*/
AbstractElement element = new StandardElement () ;
element.setName ( "element" ) ;
root.addChild ( element ) ;
MyRecycler aa = new MyRecycler ( "aa", recordOfRecycling ) ;
element.addChild ( aa ) ;
MyRecycler ab = new MyRecycler ( "ab", recordOfRecycling ) ;
element.addChild ( ab ) ;
/*
* a
* / \
* aa ab
* aaa aab aba abb
*/
element = new StandardElement () ;
element.setName ( "element" ) ;
aa.addChild ( element ) ;
MyRecycler aaa = new MyRecycler ( "aaa", recordOfRecycling ) ;
element.addChild ( aaa ) ;
MyRecycler aab = new MyRecycler ( "aab", recordOfRecycling ) ;
element.addChild ( aab ) ;
element = new StandardElement () ;
element.setName ( "element" ) ;
ab.addChild ( element ) ;
MyRecycler aba = new MyRecycler ( "aba", recordOfRecycling ) ;
element.addChild ( aba ) ;
MyRecycler abb = new MyRecycler ( "abb", recordOfRecycling ) ;
element.addChild ( abb ) ;
root.recycle () ;
assertEquals (
"recycle was invoked depth first",
Arrays.asList ( new String [] { "aaa", "aab", "aa", "aba", "abb", "ab", "a" } ),
recordOfRecycling
) ;
}
private static class MyRecycler extends BaseServerActiveObject {
String id ;
List recordOfRecycling ;
MyRecycler ( String id, List recordOfRecycling ) {
this.id = id ;
this.recordOfRecycling = recordOfRecycling ;
}
public void recycle () {
super.recycle () ;
recordOfRecycling.add ( id ) ;
}
}
}
|
apache-2.0
|
robinckanatzar/ror-recipes
|
recipes/db/schema.rb
|
2156
|
# This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# Note that this schema.rb definition is the authoritative source for your
# database schema. If you need to create the application database on another
# system, you should be using db:schema:load, not running all the migrations
# from scratch. The latter is a flawed and unsustainable approach (the more migrations
# you'll amass, the slower it'll run and the greater likelihood for issues).
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 20170217191829) do
create_table "chefs", force: :cascade do |t|
t.string "chefname"
t.string "email"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.string "password_digest"
t.boolean "admin", default: false
end
create_table "comments", force: :cascade do |t|
t.text "description"
t.integer "chef_id"
t.integer "recipe_id"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "ingredients", force: :cascade do |t|
t.string "name"
end
create_table "likes", force: :cascade do |t|
t.boolean "like"
t.integer "chef_id"
t.integer "recipe_id"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "messages", force: :cascade do |t|
t.text "content"
t.integer "chef_id"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "recipe_ingredients", force: :cascade do |t|
t.integer "recipe_id"
t.integer "ingredient_id"
end
create_table "recipes", force: :cascade do |t|
t.string "name"
t.text "description"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.integer "chef_id"
t.string "image"
end
end
|
apache-2.0
|
szegedi/dynalink
|
src/main/java/org/dynalang/dynalink/linker/TypeBasedGuardingDynamicLinker.java
|
3541
|
/*
Copyright 2009-2013 Attila Szegedi
Licensed under both the Apache License, Version 2.0 (the "Apache License")
and the BSD License (the "BSD License"), with licensee being free to
choose either of the two at their discretion.
You may not use this file except in compliance with either the Apache
License or the BSD License.
If you choose to use this file in compliance with the Apache License, the
following notice applies to you:
You may obtain a copy of the Apache License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied. See the License for the specific language governing
permissions and limitations under the License.
If you choose to use this file in compliance with the BSD License, the
following notice applies to you:
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of the copyright holder nor the names of
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDER
BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.dynalang.dynalink.linker;
/**
* A guarding dynamic linker that can determine whether it can link the call site solely based on the type of the first
* argument at linking invocation time. (The first argument is usually the receiver class). Most language-specific
* linkers will fall into this category, as they recognize their native objects as Java objects of classes implementing
* a specific language-native interface or superclass. The linker mechanism can optimize the dispatch for these linkers.
*
* @author Attila Szegedi
*/
public interface TypeBasedGuardingDynamicLinker extends GuardingDynamicLinker {
/**
* Returns true if the linker can link an invocation where the first argument (receiver) is of the specified type.
*
* @param type the type to link
* @return true if the linker can link calls for the receiver type, or false otherwise.
*/
public boolean canLinkType(Class<?> type);
}
|
apache-2.0
|
rburgst/okhttp-digest
|
src/main/java/com/burgstaller/okhttp/digest/fromhttpclient/ParserCursor.java
|
2452
|
/*
* This file incorporates work covered by the following copyright and
* permission notice:
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.burgstaller.okhttp.digest.fromhttpclient;
public class ParserCursor {
private final int lowerBound;
private final int upperBound;
private int pos;
public ParserCursor(int lowerBound, int upperBound) {
if(lowerBound < 0) {
throw new IndexOutOfBoundsException("Lower bound cannot be negative");
} else if(lowerBound > upperBound) {
throw new IndexOutOfBoundsException("Lower bound cannot be greater then upper bound");
} else {
this.lowerBound = lowerBound;
this.upperBound = upperBound;
this.pos = lowerBound;
}
}
public int getLowerBound() {
return this.lowerBound;
}
public int getUpperBound() {
return this.upperBound;
}
public int getPos() {
return this.pos;
}
public void updatePos(int pos) {
if(pos < this.lowerBound) {
throw new IndexOutOfBoundsException();
} else if(pos > this.upperBound) {
throw new IndexOutOfBoundsException();
} else {
this.pos = pos;
}
}
public boolean atEnd() {
return this.pos >= this.upperBound;
}
public String toString() {
StringBuilder buffer = new StringBuilder(16);
buffer.append('[');
buffer.append(this.lowerBound);
buffer.append('>');
buffer.append(this.pos);
buffer.append('>');
buffer.append(this.upperBound);
buffer.append(']');
return buffer.toString();
}
}
|
apache-2.0
|
rmap-project/share-client
|
src/main/java/info/rmapproject/cos/share/client/model/OtherPropertyType.java
|
1380
|
/*******************************************************************************
* Copyright 2016 Johns Hopkins University
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* This software was produced as part of the RMap Project (http://rmap-project.info),
* The RMap Project was funded by the Alfred P. Sloan Foundation and is a
* collaboration between Data Conservancy, Portico, and IEEE.
*******************************************************************************/
package info.rmapproject.cos.share.client.model;
/**
* The Enum OtherPropertyType.
*/
public enum OtherPropertyType {
/** DOI. */
DOI,
/** ISSN. */
ISSN,
/** EISSN. */
EISSN,
/** ISBN. */
ISBN,
/** EISBN. */
EISBN,
/** Relations. */
RELATIONS,
/** Links. */
LINKS,
/** Types. */
TYPES,
/** Identifiers. */
IDENTIFIERS,
/** Formats. */
FORMATS;
}
|
apache-2.0
|
sbower/kuali-rice-1
|
krms/framework/src/main/java/org/kuali/rice/krms/framework/engine/Context.java
|
994
|
package org.kuali.rice.krms.framework.engine;
import java.util.List;
import org.kuali.rice.krms.api.engine.ExecutionEnvironment;
import org.kuali.rice.krms.api.engine.TermResolver;
/**
* The context represents the area(s) of an organization's activity where a
* rule applies and where the terms used to create the rule are defined and relevant.
* An equivalent phrase often used is business domain. Rules only make sense in a
* particular context and because they must be evaluated against the information in
* that domain or context.
*
* <p>For example, rules that are specifically authored and
* that are meaningful in an application on a Research Proposal would be most
* unlikely to make sense or be relevant in the context of a Student Record even
* if the condition could be evaluated.
*
* @author Kuali Rice Team (rice.collab@kuali.org)
*
*/
public interface Context {
void execute(ExecutionEnvironment environment);
List<TermResolver<?>> getTermResolvers();
}
|
apache-2.0
|
jgum/jgum
|
src/test/java/org/jcategory/category/AdHocCategorizationTutorialTest.java
|
1549
|
package org.jcategory.category;
import static java.util.Arrays.asList;
import static org.jcategory.category.Key.key;
import static org.junit.Assert.assertEquals;
import org.jcategory.JCategory;
import org.junit.Test;
public class AdHocCategorizationTutorialTest {
@Test
public void testSimpleHierarchy() {
//creating a simple hierarchy
Categorization<Category> mySimpleCategorization = new Categorization<>();
Category grandFather = new Category(mySimpleCategorization); //the root of the hierarchy
Category parent1 = new Category(asList(grandFather)); //parent1 inherits from grandFather
Category parent2 = new Category(asList(grandFather)); //parent2 also inherits from grandFather
Category child = new Category(asList(parent1, parent2)); //child inherits from both parent1 and parent2
//setting properties
Key p1 = key();
Key p2 = key();
grandFather.setProperty(p1, "x"); //setting property "p1" to "x" in grandFather
parent1.setProperty(p1, "y"); //overridden property "p1" as "y" in parent1
parent2.setProperty(p1, "z"); //overridden property "p1" as "z" in parent2
parent2.setProperty(p2, "x"); //setting property "p2" to "x" in parent2
//testing
assertEquals("y", child.getProperty(p1).get()); //"p1" property found in parent1
assertEquals("x", child.getProperty(p2).get()); //"p2" property found in parent2
//optionally registering the previous categorization in a JCategory context
JCategory context = new JCategory();
context.register("my-categorization", mySimpleCategorization);
}
}
|
apache-2.0
|
mebigfatguy/java-driver
|
driver-mapping/src/main/java/com/datastax/driver/mapping/MappingManager.java
|
12049
|
/*
* Copyright (C) 2012-2015 DataStax Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.datastax.driver.mapping;
import com.datastax.driver.core.*;
import com.datastax.driver.mapping.annotations.Accessor;
import com.datastax.driver.mapping.annotations.Table;
import com.datastax.driver.mapping.annotations.UDT;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.*;
/**
* Mapping manager from which to obtain entity mappers.
*/
public class MappingManager {
private static final Logger LOGGER = LoggerFactory.getLogger(MappingManager.class);
private final Session session;
final boolean isCassandraV1;
private volatile Map<Class<?>, Mapper<?>> mappers = Collections.emptyMap();
private volatile Map<Class<?>, MappedUDTCodec<?>> udtCodecs = Collections.emptyMap();
private volatile Map<Class<?>, Object> accessors = Collections.emptyMap();
/**
* Creates a new {@code MappingManager} using the provided {@code Session}.
* <p/>
* Note that this constructor forces the initialization of the session (see
* {@link #MappingManager(Session, ProtocolVersion)} if that is a problem for you).
*
* @param session the {@code Session} to use.
*/
public MappingManager(Session session) {
this(session, getProtocolVersion(session));
}
private static ProtocolVersion getProtocolVersion(Session session) {
session.init();
return session.getCluster().getConfiguration().getProtocolOptions().getProtocolVersion();
}
/**
* Creates a new {@code MappingManager} using the provided {@code Session}.
* <p/>
* This constructor is only provided for backward compatibility: before 2.1.7, {@code MappingManager} could be
* built from an uninitialized session; since 2.1.7, the mapper needs to know the active protocol version to
* adapt its internal requests, so {@link #MappingManager(Session)} will now initialize the session if needed.
* If you rely on the session not being initialized, use this constructor and provide the version manually.
*
* @param session the {@code Session} to use.
* @param protocolVersion the protocol version that will be used with this session.
* @since 2.1.7
*/
public MappingManager(Session session, ProtocolVersion protocolVersion) {
this.session = session;
// This is not strictly correct because we could connect to C* 2.0 with the v1 protocol.
// But mappers need to make a decision early so that generated queries are compatible, and we don't know in advance
// which nodes might join the cluster later.
// At least if protocol >=2 we know there won't be any 1.2 nodes ever.
this.isCassandraV1 = (protocolVersion == ProtocolVersion.V1);
session.getCluster().register(new SchemaChangeListenerBase() {
@Override
public void onTableRemoved(TableMetadata table) {
synchronized (mappers) {
Iterator<Mapper<?>> it = mappers.values().iterator();
while (it.hasNext()) {
Mapper<?> mapper = it.next();
if (mapper.getTableMetadata().equals(table)) {
LOGGER.error("Table {} has been removed; existing mappers for @Entity annotated {} will not work anymore", table.getName(), mapper.getMappedClass());
it.remove();
}
}
}
}
@Override
public void onTableChanged(TableMetadata current, TableMetadata previous) {
synchronized (mappers) {
Iterator<Mapper<?>> it = mappers.values().iterator();
while (it.hasNext()) {
Mapper<?> mapper = it.next();
if (mapper.getTableMetadata().equals(previous)) {
LOGGER.warn("Table {} has been altered; existing mappers for @Entity annotated {} might not work properly anymore",
previous.getName(), mapper.getMappedClass());
it.remove();
}
}
}
}
@Override
public void onUserTypeRemoved(UserType type) {
synchronized (udtCodecs) {
Iterator<MappedUDTCodec<?>> it = udtCodecs.values().iterator();
while (it.hasNext()) {
MappedUDTCodec<?> codec = it.next();
if (type.equals(codec.getCqlType())) {
LOGGER.error("User type {} has been removed; existing mappers for @UDT annotated {} will not work anymore",
type, codec.getUdtClass());
it.remove();
}
}
}
}
@Override
public void onUserTypeChanged(UserType current, UserType previous) {
synchronized (udtCodecs) {
Set<Class<?>> udtClasses = new HashSet<Class<?>>();
Iterator<MappedUDTCodec<?>> it = udtCodecs.values().iterator();
while (it.hasNext()) {
MappedUDTCodec<?> codec = it.next();
if (previous.equals(codec.getCqlType())) {
LOGGER.warn("User type {} has been altered; existing mappers for @UDT annotated {} might not work properly anymore",
previous, codec.getUdtClass());
udtClasses.add(codec.getUdtClass());
it.remove();
}
}
for (Class<?> udtClass : udtClasses) {
// try to register an updated version of the previous codec
try {
getUDTCodec(udtClass);
} catch (Exception e) {
LOGGER.error("Could not update mapping for @UDT annotated " + udtClass, e);
}
}
}
}
});
}
/**
* The underlying {@code Session} used by this manager.
* <p/>
* Note that you can get obtain the {@code Cluster} object corresponding
* to that session using {@code getSession().getCluster()}.
* <p/>
* It is inadvisable to close the returned Session while this manager and
* its mappers are in use.
*
* @return the underlying session used by this manager.
*/
public Session getSession() {
return session;
}
/**
* Creates a {@code Mapper} for the provided class (that must be annotated by a
* {@link Table} annotation).
* <p/>
* The {@code MappingManager} only ever keeps one Mapper for each class, and so calling this
* method multiple times on the same class will always return the same object.
* <p/>
* If the type of any field in the class is an {@link UDT}-annotated classes, a codec for that
* class will automatically be created and registered with the underlying {@code Cluster}.
* This works recursively with UDTs nested in other UDTs or in collections.
*
* @param <T> the type of the class to map.
* @param klass the (annotated) class for which to return the mapper.
* @return the {@code Mapper} object for class {@code klass}.
*/
public <T> Mapper<T> mapper(Class<T> klass) {
return getMapper(klass);
}
/**
* Creates a {@code TypeCodec} for the provided class (that must be annotated by
* a {@link UDT} annotation).
* <p/>
* This method also registers the codec against the underlying {@code Cluster}.
* In addition, the codecs for any nested UDTs will also be created and registered.
* <p/>
* You don't need to call this method explicitly if you already call {@link #mapper(Class)}
* for a class that references this UDT class (creating a mapper will automatically
* process all UDTs that it uses).
*
* @param <T> the type of the class to map.
* @param klass the (annotated) class for which to return the codec.
* @return the codec that maps the provided class to the corresponding user-defined type.
*/
public <T> TypeCodec<T> udtCodec(Class<T> klass) {
return getUDTCodec(klass);
}
/**
* Creates an accessor object based on the provided interface (that must be annotated by
* a {@link Accessor} annotation).
* <p/>
* The {@code MappingManager} only ever keep one Accessor for each class, and so calling this
* method multiple time on the same class will always return the same object.
*
* @param <T> the type of the accessor class.
* @param klass the (annotated) class for which to create an accessor object.
* @return the accessor object for class {@code klass}.
*/
public <T> T createAccessor(Class<T> klass) {
return getAccessor(klass);
}
@SuppressWarnings("unchecked")
private <T> Mapper<T> getMapper(Class<T> klass) {
Mapper<T> mapper = (Mapper<T>) mappers.get(klass);
if (mapper == null) {
synchronized (mappers) {
mapper = (Mapper<T>) mappers.get(klass);
if (mapper == null) {
EntityMapper<T> entityMapper = AnnotationParser.parseEntity(klass, this);
mapper = new Mapper<T>(this, klass, entityMapper);
Map<Class<?>, Mapper<?>> newMappers = new HashMap<Class<?>, Mapper<?>>(mappers);
newMappers.put(klass, mapper);
mappers = newMappers;
}
}
}
return mapper;
}
@SuppressWarnings("unchecked")
<T> TypeCodec<T> getUDTCodec(Class<T> mappedClass) {
MappedUDTCodec<T> codec = (MappedUDTCodec<T>) udtCodecs.get(mappedClass);
if (codec == null) {
synchronized (udtCodecs) {
codec = (MappedUDTCodec<T>) udtCodecs.get(mappedClass);
if (codec == null) {
codec = AnnotationParser.parseUDT(mappedClass, this);
session.getCluster().getConfiguration().getCodecRegistry().register(codec);
HashMap<Class<?>, MappedUDTCodec<?>> newCodecs = new HashMap<Class<?>, MappedUDTCodec<?>>(udtCodecs);
newCodecs.put(mappedClass, codec);
udtCodecs = newCodecs;
}
}
}
return codec;
}
@SuppressWarnings("unchecked")
private <T> T getAccessor(Class<T> klass) {
T accessor = (T) accessors.get(klass);
if (accessor == null) {
synchronized (accessors) {
accessor = (T) accessors.get(klass);
if (accessor == null) {
AccessorMapper<T> mapper = AnnotationParser.parseAccessor(klass, this);
mapper.prepare(this);
accessor = mapper.createProxy();
Map<Class<?>, Object> newAccessors = new HashMap<Class<?>, Object>(accessors);
newAccessors.put(klass, accessor);
accessors = newAccessors;
}
}
}
return accessor;
}
}
|
apache-2.0
|
D3DeFi/vmcli
|
lib/modules/modify.py
|
5618
|
from pyVmomi import vim
from lib.modules import BaseCommands
from lib.tools import normalize_memory
from lib.tools.argparser import args
from lib.exceptions import VmCLIException
import lib.constants as c
class ModifyCommands(BaseCommands):
"""modify VMware objects resources or configuration."""
def __init__(self, *args, **kwargs):
super(ModifyCommands, self).__init__(*args, **kwargs)
@args('--name', required=True, help='name of a object to modify')
def execute(self, args):
if args.mem or args.cpu:
self.change_hw_resource(args.name, args.mem, args.cpu)
elif args.net:
self.change_network(args.name, args.net, args.dev)
elif args.vHWversion:
self.change_vHWversion(args.name, args.vHWversion)
else:
raise VmCLIException('Too few arguments. Aborting...')
@args('--mem', help='memory to set for a vm in megabytes')
@args('--cpu', help='cpu count to set for a vm', type=int)
def change_hw_resource(self, name, mem=None, cpu=None):
"""Changes hardware resource of a specific VM."""
if not mem and not cpu:
raise VmCLIException('Neither memory or cpu specified! Cannot run hardware reconfiguration.')
vm = self.get_vm_obj(name, fail_missing=True)
config_spec = vim.vm.ConfigSpec()
if mem:
mem = normalize_memory(mem)
self.logger.info("Increasing memory to {} megabytes...".format(mem))
config_spec.memoryMB = mem
if cpu:
if cpu < c.VM_MIN_CPU or cpu > c.VM_MAX_CPU:
raise VmCLIException('CPU count must be between {}-{}'.format(c.VM_MIN_CPU, c.VM_MAX_CPU))
else:
self.logger.info("Increasing cpu count to {} cores...".format(cpu))
config_spec.numCPUs = cpu
task = vm.ReconfigVM_Task(config_spec)
self.wait_for_tasks([task])
@args('--net', help='network to attach to a network device')
@args('--dev', type=int, default=1, help='serial number of device to modify (e.g. 1 == eth0, 2 == eth1)')
def change_network(self, name, net, dev):
"""Changes network associated with a specifc VM's network interface."""
vm = self.get_vm_obj(name, fail_missing=True)
# locate network, which should be assigned to device
network = self.get_obj('network', net)
if not network:
raise VmCLIException('Unable to find provided network {}! Aborting...'.format(net))
# search for Ethernet devices
self.logger.info('Searching for ethernet devices attached to vm...')
nic_counter = 1
for device in vm.config.hardware.device:
# Search for a specific network interfaces
if isinstance(device, vim.vm.device.VirtualEthernetCard):
if nic_counter != dev:
nic_counter += 1
continue
if isinstance(network, vim.dvs.DistributedVirtualPortgroup):
# specify backing that connects device to a DVS switch portgroup
dvs_port_conn = vim.dvs.PortConnection(
portgroupKey=network.key, switchUuid=network.config.distributedVirtualSwitch.uuid)
backing = vim.vm.device.VirtualEthernetCard.DistributedVirtualPortBackingInfo(port=dvs_port_conn)
else:
# expect simple vim.Network if DistributedVirtualPortgroup was not used
backing = vim.vm.device.VirtualEthernetCard.NetworkBackingInfo(
useAutoDetect=False, network=network, deviceName=net)
device.backing = backing
# specify power status for nic
device.connectable = vim.vm.device.VirtualDevice.ConnectInfo(
connected=True, startConnected=True, allowGuestControl=True)
# build object with change specifications
nicspec = vim.vm.device.VirtualDeviceSpec(device=device)
nicspec.operation = vim.vm.device.VirtualDeviceSpec.Operation.edit
config_spec = vim.vm.ConfigSpec(deviceChange=[nicspec])
self.logger.info("Attaching network {} to {}. network device on VM...".format(net, dev))
task = vm.ReconfigVM_Task(config_spec)
self.wait_for_tasks([task])
return
raise VmCLIException('Unable to find ethernet device on a specified target!')
@args('--vHWversion', help='VM hardware version number to assign to the VM or \'latest\'', metavar='VER')
def change_vHWversion(self, name, vHWversion=None):
"""Changes VM HW version. If version is None, then VM is set to the latest version."""
vm = self.get_vm_obj(name, fail_missing=True)
if vHWversion == 'latest':
version = None # None will default to latest so we don't need to search for it
else:
try:
version = 'vmx-{:02d}'.format(vHWversion)
except ValueError:
raise VmCLIException('VM version must be integer or \'latest\'! Aborting...')
if vm.runtime.powerState != 'poweredOff':
raise VmCLIException('VM hardware version change cannot be performed on running VM! Aborting...')
self.logger.info('Updating VM hardware version...')
try:
task = vm.UpgradeVM_Task(version=version)
self.wait_for_tasks([task])
except vim.fault.AlreadyUpgraded:
pass
BaseCommands.register('modify', ModifyCommands)
|
apache-2.0
|
Arcnor/bladecoder-adventure-engine
|
adventure-composer/src/main/java/com/bladecoder/engineeditor/setup/ProjectFile.java
|
1982
|
/*******************************************************************************
* Copyright 2011 See AUTHORS file.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.bladecoder.engineeditor.setup;
/**
* A file in a {@link Project}, the resourceName specifies the location
* of the template file, the outputName specifies the final name of the
* file relative to its project, the isTemplate field specifies if
* values need to be replaced in this file or not.
* @author badlogic
*
*/
public class ProjectFile {
/** the name of the template resource, relative to resourceLoc **/
public String resourceName;
/** the name of the output file, including directories, relative to the project dir **/
public String outputName;
/** whether to replace values in this file **/
public boolean isTemplate;
/** If the resource is from resource directory, or working dir **/
public String resourceLoc = "/projectTmpl/";
public ProjectFile(String name) {
this.resourceName = name;
this.outputName = name;
this.isTemplate = true;
}
public ProjectFile(String name, boolean isTemplate) {
this.resourceName = name;
this.outputName = name;
this.isTemplate = isTemplate;
}
public ProjectFile(String resourceName, String outputName, boolean isTemplate) {
this.resourceName = resourceName;
this.outputName = outputName;
this.isTemplate = isTemplate;
}
}
|
apache-2.0
|
fanghon/antiplag
|
src/imghash/plag/edu/ImageSim.java
|
4791
|
package imghash.plag.edu;
import java.io.File;
import java.io.FileFilter;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import com.github.kilianB.hash.Hash;
import com.github.kilianB.hashAlgorithms.HashingAlgorithm;
import com.github.kilianB.hashAlgorithms.PerceptiveHash;
import com.github.kilianB.hashAlgorithms.RotPHash;
import data.plag.edu.SimData;
import utils.edu.FileIO;
public class ImageSim {
String dic = null; //×÷ҵ·¾¶Ãû
float threshold = 0.8f; //ͼƬÏàËÆÃÅÏÞĬÈÏÊÇ0.8
List<File> filels = new ArrayList<File>(); //ÐèÒª±È½ÏµÄÎļþ
List<SimData> listsd = new ArrayList<SimData>(); //Îļþ±È½ÏµÄ½á¹û
//½«¸ø¶¨Ä¿Â¼ÏµķûºÏÀ©Õ¹ÃûÒªÇóµÄÎļþ£¨º¬×ÓĿ¼Ïµģ©£¬ÌáÈ¡³öÀ´
public void explore(File file) {
if (file != null && file.isDirectory()) {
File[] files = file.listFiles(new Fileter());
for (File tempfile : files) {
if (tempfile.isDirectory()) {
explore(tempfile);// Èç¹ûÊÇ×ÓĿ¼£¬ÔòµÝ¹éµ÷ÓÃ
} else {
filels.add(tempfile);
}
}
}
}
// ʵÏÖÎļþ¹ýÂ˽ӿڣ¬ÄÚ²¿À෽ʽ,Ö»ÔÊÐíÖ¸¶¨À©Õ¹ÃûµÄÎļþ¼°×ÓĿ¼
class Fileter implements FileFilter {
@Override
public boolean accept(File arg0) {
// TODO Auto-generated method stub
String fn = arg0.getName().toLowerCase();
if (fn.endsWith(".png") //
|| fn.endsWith(".jpg")
|| fn.endsWith(".jpeg")
|| fn.endsWith(".gif")
|| fn.endsWith(".bmp")
|| fn.endsWith(".tiff")
|| arg0.isDirectory())
return true;
return false;
}
}
//±È½ÏÖ¸¶¨Ä¿Â¼ÏµÄͼƬÎļþÏàËÆ¶È£¬·µ»Ø³¬¹ýÖ¸¶¨ÃÅÏÞÖµµÄÏàËÆÎļþ¶ÔÊý
void ImageSimFiles(float threshold){
List<Hash> listhashs = new ArrayList<Hash>();
HashingAlgorithm hasher = new PerceptiveHash(128); //128λphash
// HashingAlgorithm hasher = new RotPHash(256); //ÄÜÅжÏÐýתÏàËÆÍ¼Æ¬
//ÏȶÁȡͼƬÎļþ¡¢Éú³ÉhashÖµ
for(int i=0;i<filels.size();i++){
File file = filels.get(i);
try {
Hash hash = hasher.hash(file);
listhashs.add(hash);
}catch(Exception e) {
e.printStackTrace();
continue ;
}
}
for(int i=0;i<filels.size();i++){
Hash hash1 = listhashs.get(i);
for(int j=i+1;j<filels.size();j++){
Hash hash2 = listhashs.get(j);;
double score = hash1.normalizedHammingDistance(hash2);
if(score<1-threshold/100){ //scoreԽСԽÏàËÆ
// System.out.println(score+" "+filels.get(i).getName()+" "+
// filels.get(j).getName());
SimData sim = new SimData();
sim.setSimilar((float)(1-score)*100);
sim.setFile1(filels.get(i).getName());
sim.setFile2(filels.get(j).getName());
listsd.add(sim);
}
}//for
}
}
//½«±È½Ï½á¹ûÓÉ´óµ½Ð¡Êä³ö
void report(){
Collections.sort(listsd); //½«ÁбíÔªËØ°´ÏàËÆÖµÓÉСµ½´óÅÅÐò
//Arrays.sort(listsd.toArray());
for(int i=listsd.size()-1;i>=0;i--){ //ÓÉ´óµ½Ð¡Êä³ö
System.out.println(listsd.get(i).getSimilar()+" "+listsd.get(i).getFile1()
+" "+listsd.get(i).getFile2());
}
}
//ÉèÖÃÃüÁîÐвÎÊý£¬³É¹¦0£¬Ê§°Ü-1
int setParams(String[] args){
int res = 0;
if(args.length<2){
System.out.println("usage:"+"java -jar ImageSim.jar dic threshold");
return -1;
}
this.dic = args[0];
File dic = new File(this.dic);
if(!dic.isDirectory()){
System.out.println("dic is not exsit!");
return -1;
}
try {
this.threshold = Float.valueOf(args[1]);
if(this.threshold<0 || this.threshold>100){
System.out.println("threshold is outof 0-100");
return -1;
}
} catch (NumberFormatException e) {
// TODO Auto-generated catch block
System.out.println("threshold is 0-100 number");
return -1;
}
return res;
}
/**
* @param args
*/
public static void main(String[] args) {
try {
ImageSim testsc = new ImageSim();
int res = testsc.setParams(args);
if(res>=0){
long st = System.currentTimeMillis();
File file = new File(testsc.dic);
testsc.explore(file); //¶ÁÈ¡²¢¼ÆËãͼƬÎļþhashÖµ
testsc.ImageSimFiles(testsc.threshold); //¸ù¾ÝhashÖµ±È½ÏÏàËÆÖµ
testsc.report(); //Éú³É½á¹û
File outfile = new File("out.txt");
FileIO.saveFile(outfile, testsc.listsd,2,"from fh"); //½á¹ûÊÇÓÉ´óµ½Ð¡ÅŵÄ
System.out.println("handle documents:"+testsc.filels.size());
System.out.println("time:"+(System.currentTimeMillis()-st));
}else{
System.out.println("²ÎÊý´íÎó");
}
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
|
apache-2.0
|
sonymoon/algorithm
|
src/main/java/com/bailei/study/jzoffer/interview2/Singleton2.java
|
499
|
package com.bailei.study.jzoffer.interview2;
/**
* Created by bailei on 16/12/3.
* 多线程考虑,但效率不高
*/
public class Singleton2 {
private static Object lock = new Object();
private static Singleton2 instance;
private Singleton2() {
}
public static Singleton2 getInstance() {
synchronized (Singleton2.lock) {
if (instance == null) {
instance = new Singleton2();
}
}
return instance;
}
}
|
apache-2.0
|
pebble2015/cpoi
|
src/org/apache/poi/hssf/record/DateWindow1904Record.cpp
|
2803
|
// Generated from /POI/java/org/apache/poi/hssf/record/DateWindow1904Record.java
#include <org/apache/poi/hssf/record/DateWindow1904Record.hpp>
#include <java/lang/Integer.hpp>
#include <java/lang/NullPointerException.hpp>
#include <java/lang/String.hpp>
#include <java/lang/StringBuffer.hpp>
#include <org/apache/poi/hssf/record/RecordInputStream.hpp>
#include <org/apache/poi/util/LittleEndianOutput.hpp>
#include <Array.hpp>
template<typename T>
static T* npc(T* t)
{
if(!t) throw new ::java::lang::NullPointerException();
return t;
}
poi::hssf::record::DateWindow1904Record::DateWindow1904Record(const ::default_init_tag&)
: super(*static_cast< ::default_init_tag* >(0))
{
clinit();
}
poi::hssf::record::DateWindow1904Record::DateWindow1904Record()
: DateWindow1904Record(*static_cast< ::default_init_tag* >(0))
{
ctor();
}
poi::hssf::record::DateWindow1904Record::DateWindow1904Record(RecordInputStream* in)
: DateWindow1904Record(*static_cast< ::default_init_tag* >(0))
{
ctor(in);
}
constexpr int16_t poi::hssf::record::DateWindow1904Record::sid;
void poi::hssf::record::DateWindow1904Record::ctor()
{
super::ctor();
}
void poi::hssf::record::DateWindow1904Record::ctor(RecordInputStream* in)
{
super::ctor();
field_1_window = npc(in)->readShort();
}
void poi::hssf::record::DateWindow1904Record::setWindowing(int16_t window)
{
field_1_window = window;
}
int16_t poi::hssf::record::DateWindow1904Record::getWindowing()
{
return field_1_window;
}
java::lang::String* poi::hssf::record::DateWindow1904Record::toString()
{
auto buffer = new ::java::lang::StringBuffer();
npc(buffer)->append(u"[1904]\n"_j);
npc(npc(npc(buffer)->append(u" .is1904 = "_j))->append(::java::lang::Integer::toHexString(getWindowing())))->append(u"\n"_j);
npc(buffer)->append(u"[/1904]\n"_j);
return npc(buffer)->toString();
}
void poi::hssf::record::DateWindow1904Record::serialize(::poi::util::LittleEndianOutput* out)
{
npc(out)->writeShort(getWindowing());
}
int32_t poi::hssf::record::DateWindow1904Record::getDataSize()
{
return 2;
}
int16_t poi::hssf::record::DateWindow1904Record::getSid()
{
return sid;
}
extern java::lang::Class *class_(const char16_t *c, int n);
java::lang::Class* poi::hssf::record::DateWindow1904Record::class_()
{
static ::java::lang::Class* c = ::class_(u"org.apache.poi.hssf.record.DateWindow1904Record", 47);
return c;
}
int32_t poi::hssf::record::DateWindow1904Record::serialize(int32_t offset, ::int8_tArray* data)
{
return super::serialize(offset, data);
}
int8_tArray* poi::hssf::record::DateWindow1904Record::serialize()
{
return super::serialize();
}
java::lang::Class* poi::hssf::record::DateWindow1904Record::getClass0()
{
return class_();
}
|
apache-2.0
|
redhat-cip/horizon
|
horizon/static/framework/util/q/q.extensions.spec.js
|
3261
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
(function () {
'use strict';
describe('horizon.framework.util.q.extensions', function () {
describe('allSettled', function() {
var service, $q, $scope;
var failedPromise = function() {
var deferred2 = $q.defer();
deferred2.reject('failed');
return deferred2.promise;
};
var passedPromise = function() {
var deferred1 = $q.defer();
deferred1.resolve('passed');
return deferred1.promise;
};
beforeEach(module('horizon.framework.util.q'));
beforeEach(inject(function($injector, _$rootScope_) {
service = $injector.get('horizon.framework.util.q.extensions');
$q = $injector.get('$q');
$scope = _$rootScope_.$new();
}));
it('should define allSettled', function () {
expect(service.allSettled).toBeDefined();
});
it('should resolve all given promises', function() {
service.allSettled([{
promise: failedPromise(),
context: '1'
}, {
promise: passedPromise(),
context: '2'
}]).then(onAllSettled);
$scope.$apply();
function onAllSettled(resolvedPromises) {
expect(resolvedPromises.fail.length).toEqual(1);
expect(resolvedPromises.fail[0]).toEqual({data: 'failed', context: '1'});
expect(resolvedPromises.pass.length).toEqual(1);
expect(resolvedPromises.pass[0]).toEqual({data: 'passed', context: '2'});
}
});
});
describe('booleanAsPromise', function() {
var service, $scope;
beforeEach(module('horizon.framework.util.q'));
beforeEach(inject(function($injector, _$rootScope_) {
service = $injector.get('horizon.framework.util.q.extensions');
$scope = _$rootScope_.$new();
}));
it('should define booleanAsPromise', function () {
expect(service.booleanAsPromise).toBeDefined();
});
it('should reject the promise if condition does not evaluates to true', function() {
var testValues = [ false, null, {}, 'A', 7 ];
var rejectCount = 0;
testValues.map(function doTest(testValue) {
service.booleanAsPromise(testValue).then(angular.noop, function failTest() {
rejectCount++;
});
$scope.$apply();
});
expect(rejectCount).toEqual(testValues.length);
});
it('should resolve the promise only if condition to true', function() {
var passCount = 0;
service.booleanAsPromise(true).then(function passTest() {
passCount++;
});
$scope.$apply();
expect(passCount).toEqual(1);
});
});
});
})();
|
apache-2.0
|
davyjoneswang/AndroidLearnDemos
|
app/src/main/java/com/example/wangyonghua/androidlearndemos/adapter/Test.java
|
591
|
package com.example.wangyonghua.androidlearndemos.adapter;
/**
* Created by wangyonghua on 15-12-8.
*/
public class Test {
public static void main(String[] args) {
float w = 1080;
//float de = w * 3.375f / 1080f;
float delta = w / 320f;
System.out.println("<?xml version=\"1.0\" encoding=\"utf-8\"?>\n" +
"<resources>");
for (int i = 1; i < 600; i++) {
System.out.println(String.format("<dimen name=dimen_%d_dip>" + "%s" +"px</dimen>", i, i * delta));
}
System.out.println("</resources>");
}
}
|
apache-2.0
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.