repo_name
stringlengths 4
116
| path
stringlengths 4
379
| size
stringlengths 1
7
| content
stringlengths 3
1.05M
| license
stringclasses 15
values |
---|---|---|---|---|
theLastHero/PlayerWarpGUI
|
src/FileHandlers/ConfigHandler.java
|
7522
|
package FileHandlers;
import java.io.File;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.PrintWriter;
import org.bukkit.configuration.file.FileConfiguration;
import org.bukkit.configuration.file.YamlConfiguration;
import PlayerWarpGUI.PlayerWarpGUI;
public class ConfigHandler {
public static PlayerWarpGUI plugin;
// +-------------------------------------------------------------------------------------
// | Constructor
// +-----------------------------------------------------------------------------------
public ConfigHandler(PlayerWarpGUI playerWarpGUI) {
plugin = playerWarpGUI;
}
// +-----------------------------------------------------
// | loadConfigFile
// +-----------------------------------------------------
public void loadConfigFile() {
// check if config file exsists else make it
if (!checkConfigFile()) {
createConfigFile();
}
// load the configs
FileConfiguration config = new YamlConfiguration();
// for added metric support
try {
config.load(plugin.configFile);
if (config.getString("Metrics.enabled") == null) {
File saveTo = new File(plugin.configFile);
FileWriter fw = new FileWriter(saveTo, true);
PrintWriter pw = new PrintWriter(fw);
pw.println("Metrics:");
pw.println(" enabled: true");
pw.flush();
pw.close();
}
// load useSafeWarp
PlayerWarpGUI.useSafeWarp = config.getBoolean("SafeWarp.enabled", false);
// A.d("Setting useSafeWarp to: " + String.valueOf(PlayerWarpGUI.useSafeWarp));
// load unsafeBlocks
PlayerWarpGUI.unsafeBlocks = config.getStringList("SafeWarp.unsafeBlocks");
// A.d("unSafeBlocks: ");
// print out unsafeBlocks
int theTotalNumberOfElements = PlayerWarpGUI.unsafeBlocks.size();
for (int counter = 0; counter < theTotalNumberOfElements; counter++) {
// A.d(" " + PlayerWarpGUI.unsafeBlocks.get(counter));
}
// load teleport cooldown
PlayerWarpGUI.cooldown = config.getInt("Teleport.cooldown", 3);
// A.d("Setting teleport cooldown to: " + PlayerWarpGUI.cooldown + " seconds");
// load cancelOnMovement
PlayerWarpGUI.cancelOnMovement = config.getBoolean("Teleport.cancelOnMovement", true);
// A.d("Setting cencelOnMovement to: " + PlayerWarpGUI.cancelOnMovement);
// gomeModeAfterTP
PlayerWarpGUI.godModeAfterTP = config.getInt("Teleport.godModeAfterTP", 0);
// A.d("Setting cencelOnMovement to: " + PlayerWarpGUI.cancelOnMovement);
// load defaultWarpIcon
PlayerWarpGUI.defaultWarpIcon = config.getString("GUI.DefaultWarpIcon", "35:9");
// A.d("Setting defaultWarpIcon to: " + PlayerWarpGUI.defaultWarpIcon);
// load nextPageIcon
PlayerWarpGUI.nextPageIcon = config.getString("GUI.nextPageIcon", "35:8");
// A.d("Setting nextPageIcon to: " + PlayerWarpGUI.nextPageIcon);
// load messagePrefix
PlayerWarpGUI.messagePrefix = config.getString("Messages.prefix", "[PlayerWarpGUI]");
// A.d("Setting messagePrefix to: " + PlayerWarpGUI.messagePrefix);
// load chest size
PlayerWarpGUI.chestSize = (config.getInt("GUI.rows") * 9);
// A.d("Setting chestSize: " + PlayerWarpGUI.chestSize);
// load chest size
PlayerWarpGUI.chestText = config.getString("GUI.chestText", "PlayerWarpGUI");
// A.d("Setting chestText to: " + PlayerWarpGUI.chestText);
// load player warp text
PlayerWarpGUI.playerWarpText = config.getString("GUI.playerWarpText", "&6[username]");
// A.d("Setting playerWarpText to: " + PlayerWarpGUI.playerWarpText);
// load setWarpCost
PlayerWarpGUI.setWarpCost = config.getInt("Settings.setWarpCost", 0);
// A.d("Setting setWarpCost to: " +
// Integer.toString(PlayerWarpGUI.setWarpCost));
// load disabledWorlds
PlayerWarpGUI.disabledWorlds = config.getStringList("Settings.disabledWorlds");
// A.d("Setting setWarpCost to: " +
// Integer.toString(PlayerWarpGUI.setWarpCost));
// load GriefPrevetion
PlayerWarpGUI.enableGriefPrevetion = config.getBoolean("GriefPrevetion.enabled", false);
// A.d("Setting enableGriefPrevetion to: " +
// PlayerWarpGUI.enableGriefPrevetion);
// load enableWorldGuard
PlayerWarpGUI.enableWorldGuard = config.getBoolean("WorldGuard.enabled", false);
// A.d("Setting enableWorldGuardto: " + PlayerWarpGUI.enableWorldGuard);
PlayerWarpGUI.useOwners = config.getBoolean("WorldGuard.enabled", false);
// A.d("Setting owners to: " + PlayerWarpGUI.useOwners);
// load enableWorldGuard
PlayerWarpGUI.useMembers = config.getBoolean("WorldGuard.enabled", false);
// A.d("Setting members to: " + PlayerWarpGUI.useMembers);
// load RedProtect
PlayerWarpGUI.enableRedProtect = config.getBoolean("RedProtect.enabled", false);
// A.d("Setting enableWorldGuardto: " + PlayerWarpGUI.enableWorldGuard);
PlayerWarpGUI.useRPAdmins = config.getBoolean("RedProtect.admins", false);
// A.d("Setting owners to: " + PlayerWarpGUI.useOwners);
PlayerWarpGUI.useRPLeaders = config.getBoolean("RedProtect.leaders", false);
// A.d("Setting owners to: " + PlayerWarpGUI.useOwners);
PlayerWarpGUI.useRPMembers = config.getBoolean("RedProtect.members", false);
// A.d("Setting owners to: " + PlayerWarpGUI.useOwners);
// load debug_mode
PlayerWarpGUI.DEBUG_MODE = config.getBoolean("Settings.debug_mode", false);
PlayerWarpGUI.useMetrics = config.getBoolean("Metrics.enabled", false);
// A.d("Setting debug_mode to: " + PlayerWarpGUI.DEBUG_MODE);
// load maxTitleSize
PlayerWarpGUI.maxTitleSize = config.getInt("Settings.maxTitleSize", 25);
// A.d("Setting maxTitleSize to: " + PlayerWarpGUI.maxTitleSize);
// for added lore support
//maxLoreSize
if (!config.isSet("Settings.maxLoreSize")) {
PlayerWarpGUI.maxLoreSize = 40;
} else {
PlayerWarpGUI.maxLoreSize = config.getInt("Settings.maxLoreSize",40);
}
//usePlayerHead
PlayerWarpGUI.usePlayerHead = config.getBoolean("GUI.usePlayerHead", false);
// PlayerWarpGUI.getInstance().saveConfig();
} catch (Exception e) {
e.printStackTrace();
}
}
// -----------------------------------------------------
// createConfigFile
// -----------------------------------------------------
public void createConfigFile() {
File configFile = new File(plugin.configFile);
configFile.getParentFile().mkdirs();
copy(plugin.getResource(plugin.defaultConfigFile), configFile);
}
// -----------------------------------------------------
// checkConfigFile
// -----------------------------------------------------
public boolean checkConfigFile() {
File playerWarpDataFile = new File(plugin.configFile);
// Check it exsists
if (!playerWarpDataFile.exists()) {
return false;
}
return true;
}
// -------------------------------------------------------------------------------------
// copy
// -------------------------------------------------------------------------------------
public static void copy(InputStream in, File file) {
try {
OutputStream out = new FileOutputStream(file);
byte[] buf = new byte[1024];
int len;
while ((len = in.read(buf)) > 0) {
out.write(buf, 0, len);
}
out.close();
in.close();
} catch (Exception e) {
e.printStackTrace();
}
}
}
|
apache-2.0
|
wangsongpeng/jdk-src
|
src/main/java/com/sun/corba/se/PortableActivationIDL/InvalidORBidHelper.java
|
2099
|
package com.sun.corba.se.PortableActivationIDL;
/**
* com/sun/corba/se/PortableActivationIDL/InvalidORBidHelper.java .
* Generated by the IDL-to-Java compiler (portable), version "3.2"
* from /HUDSON3/workspace/8-2-build-linux-amd64/jdk8u121/8372/corba/src/share/classes/com/sun/corba/se/PortableActivationIDL/activation.idl
* Monday, December 12, 2016 4:37:46 PM PST
*/
abstract public class InvalidORBidHelper
{
private static String _id = "IDL:PortableActivationIDL/InvalidORBid:1.0";
public static void insert (org.omg.CORBA.Any a, InvalidORBid that)
{
org.omg.CORBA.portable.OutputStream out = a.create_output_stream ();
a.type (type ());
write (out, that);
a.read_value (out.create_input_stream (), type ());
}
public static InvalidORBid extract (org.omg.CORBA.Any a)
{
return read (a.create_input_stream ());
}
private static org.omg.CORBA.TypeCode __typeCode = null;
private static boolean __active = false;
synchronized public static org.omg.CORBA.TypeCode type ()
{
if (__typeCode == null)
{
synchronized (org.omg.CORBA.TypeCode.class)
{
if (__typeCode == null)
{
if (__active)
{
return org.omg.CORBA.ORB.init().create_recursive_tc ( _id );
}
__active = true;
org.omg.CORBA.StructMember[] _members0 = new org.omg.CORBA.StructMember [0];
org.omg.CORBA.TypeCode _tcOf_members0 = null;
__typeCode = org.omg.CORBA.ORB.init ().create_exception_tc (InvalidORBidHelper.id (), "InvalidORBid", _members0);
__active = false;
}
}
}
return __typeCode;
}
public static String id ()
{
return _id;
}
public static InvalidORBid read (org.omg.CORBA.portable.InputStream istream)
{
InvalidORBid value = new InvalidORBid ();
// read and discard the repository ID
istream.read_string ();
return value;
}
public static void write (org.omg.CORBA.portable.OutputStream ostream, InvalidORBid value)
{
// write the repository ID
ostream.write_string (id ());
}
}
|
apache-2.0
|
panelion/incubator-stratos
|
components/org.apache.stratos.cloud.controller/src/main/java/org/apache/stratos/cloud/controller/publisher/CartridgeInstanceDataPublisherTask.java
|
21854
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.stratos.cloud.controller.publisher;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.stratos.cloud.controller.exception.CloudControllerException;
import org.apache.stratos.cloud.controller.pojo.CartridgeInstanceData;
import org.apache.stratos.cloud.controller.runtime.FasterLookUpDataHolder;
import org.apache.stratos.cloud.controller.util.*;
import org.wso2.carbon.base.ServerConfiguration;
import org.wso2.carbon.databridge.agent.thrift.Agent;
import org.wso2.carbon.databridge.agent.thrift.DataPublisher;
import org.wso2.carbon.databridge.agent.thrift.conf.AgentConfiguration;
import org.wso2.carbon.databridge.commons.Event;
import org.wso2.carbon.databridge.commons.exception.NoStreamDefinitionExistException;
import org.wso2.carbon.ntask.core.Task;
import org.wso2.carbon.utils.CarbonUtils;
import java.util.*;
public class CartridgeInstanceDataPublisherTask implements Task{
private static final Log log = LogFactory.getLog(CartridgeInstanceDataPublisherTask.class);
private static DataPublisher dataPublisher;
private static String streamId;
private static final String cloudControllerEventStreamVersion = "1.0.0";
private static List<CartridgeInstanceData> dataToBePublished ;
protected enum NodeStatus {
PENDING, RUNNING, SUSPENDED, TERMINATED, ERROR, UNRECOGNIZED
};
@Override
public void execute() {
publish();
}
@SuppressWarnings("deprecation")
public static void publish(){
if(FasterLookUpDataHolder.getInstance().isPublisherRunning() ||
// this is a temporary fix to avoid task execution - limitation with ntask
!FasterLookUpDataHolder.getInstance().getEnableBAMDataPublisher()){
return;
}
log.debug(CloudControllerConstants.DATA_PUB_TASK_NAME+" cycle started.");
FasterLookUpDataHolder.getInstance().setPublisherRunning(true);
dataToBePublished = new ArrayList<CartridgeInstanceData>();
if(dataPublisher==null){
createDataPublisher();
//If we cannot create a data publisher we should give up
//this means data will not be published
if(dataPublisher == null){
log.error("Data Publisher cannot be created or found.");
release();
return;
}
}
if(streamId == null){
try{
streamId = dataPublisher.findStream(CloudControllerConstants.CLOUD_CONTROLLER_EVENT_STREAM, cloudControllerEventStreamVersion);
}catch (NoStreamDefinitionExistException e){
log.info("Defining the event stream because it was not found in BAM");
try{
defineStream();
} catch(Exception ex){
String msg = "Error occurred while defining the event stream for publishing Cloud Controller data. " + ex.getMessage();
log.error(msg, ex);
//We do not want to proceed without an event stream. Therefore we return.
release();
return;
}
}catch (Exception exc){
log.error("Error occurred while searching for stream id. " + exc.getMessage(), exc);
//We do not want to proceed without an event stream. Therefore we return.
release();
return;
}
}
// build the new node - state Map
Map<String, String> newNodeToStateMap;
try{
newNodeToStateMap = getNodeIdToStatusMap();
}catch (Exception e) {
release();
throw new CloudControllerException(e.getMessage(), e);
}
// compare it with old map and populate data to be published with ones newly added
// and once whose state got changed
populateNewlyAddedOrStateChangedNodes(newNodeToStateMap);
// issue events for the ones obtained from above
for (CartridgeInstanceData dataObj : dataToBePublished) {
StringBuffer temp = new StringBuffer("");
String privateIpAddresses="";
// Concatenate private IP addresses
for (String ip : dataObj.getMetaData().getPrivateAddresses()) {
temp.append(ip+",");
}
if(!"".equals(temp.toString())){
// remove comma at the end of the string
privateIpAddresses = temp.toString().substring(0, temp.toString().length()-1);
}
temp = new StringBuffer("");
String publicIpAddresses="";
// Concatenate public IP addresses
for (String ip : dataObj.getMetaData().getPublicAddresses()) {
temp.append(ip+",");
}
if(!"".equals(temp.toString())){
// remove comma at the end of the string
publicIpAddresses = temp.toString().substring(0, temp.toString().length()-1);
}
try {
Event cloudControllerEvent = new Event(streamId, System.currentTimeMillis(), new Object[]{}, null,
new Object[]{dataObj.getNodeId(),
dataObj.getType(),
dataObj.getDomain(),
dataObj.getSubDomain(),
dataObj.getAlias(),
dataObj.getTenantRange(),
String.valueOf(dataObj.isMultiTenant()),
dataObj.getIaas(),
dataObj.getStatus(),
dataObj.getMetaData().getHostname(),
dataObj.getMetaData().getHardware().getHypervisor(),
String.valueOf(dataObj.getMetaData().getHardware().getRam()),
dataObj.getMetaData().getImageId(),
String.valueOf(dataObj.getMetaData().getLoginPort()),
dataObj.getMetaData().getOperatingSystem().getName(),
dataObj.getMetaData().getOperatingSystem().getVersion(),
dataObj.getMetaData().getOperatingSystem().getArch(),
String.valueOf(dataObj.getMetaData().getOperatingSystem().is64Bit()),
privateIpAddresses,
publicIpAddresses});
dataPublisher.publish(cloudControllerEvent);
log.debug("Data published : "+cloudControllerEvent.toString());
} catch (Exception e) {
String msg = "Error occurred while publishing Cartridge instance event to BAM. ";
log.error(msg, e);
release();
throw new CloudControllerException(msg, e);
}
}
// replace old map with new one only if data is published
// FasterLookUpDataHolder.getInstance().setNodeIdToStatusMap(newNodeToStateMap);
//TODO remove
// CassandraDataRetriever.init();
// CassandraDataRetriever.connect();
// HiveQueryExecutor hive = new HiveQueryExecutor();
// hive.createHiveTable();
// System.out.println("***********");
// for (String str : hive.getRunningNodeIds()) {
//
// System.out.println(str);
// }
// System.out.println("***********");
release();
}
private static void release(){
FasterLookUpDataHolder.getInstance().setPublisherRunning(false);
}
private static void defineStream() throws Exception {
streamId = dataPublisher.
defineStream("{" +
" 'name':'" + CloudControllerConstants.CLOUD_CONTROLLER_EVENT_STREAM +"'," +
" 'version':'" + cloudControllerEventStreamVersion +"'," +
" 'nickName': 'cloud.controller'," +
" 'description': 'Instances booted up by the Cloud Controller '," +
" 'metaData':[]," +
" 'payloadData':[" +
" {'name':'"+CloudControllerConstants.NODE_ID_COL+"','type':'STRING'}," +
" {'name':'"+CloudControllerConstants.CARTRIDGE_TYPE_COL+"','type':'STRING'}," +
" {'name':'"+CloudControllerConstants.DOMAIN_COL+"','type':'STRING'}," +
" {'name':'"+CloudControllerConstants.SUB_DOMAIN_COL+"','type':'STRING'}," +
" {'name':'"+CloudControllerConstants.ALIAS_COL+"','type':'STRING'}," +
" {'name':'"+CloudControllerConstants.TENANT_RANGE_COL+"','type':'STRING'}," +
" {'name':'"+CloudControllerConstants.IS_MULTI_TENANT_COL+"','type':'STRING'}," +
" {'name':'"+CloudControllerConstants.IAAS_COL+"','type':'STRING'}," +
" {'name':'"+CloudControllerConstants.STATUS_COL+"','type':'STRING'}," +
" {'name':'"+CloudControllerConstants.HOST_NAME_COL+"','type':'STRING'}," +
" {'name':'"+CloudControllerConstants.HYPERVISOR_COL+"','type':'STRING'}," +
" {'name':'"+CloudControllerConstants.RAM_COL+"','type':'STRING'}," +
" {'name':'"+CloudControllerConstants.IMAGE_ID_COL+"','type':'STRING'}," +
" {'name':'"+CloudControllerConstants.LOGIN_PORT_COL+"','type':'STRING'}," +
" {'name':'"+CloudControllerConstants.OS_NAME_COL+"','type':'STRING'}," +
" {'name':'"+CloudControllerConstants.OS_VERSION_COL+"','type':'STRING'}," +
" {'name':'"+CloudControllerConstants.OS_ARCH_COL+"','type':'STRING'}," +
" {'name':'"+CloudControllerConstants.OS_BIT_COL+"','type':'STRING'}," +
" {'name':'"+CloudControllerConstants.PRIV_IP_COL+"','type':'STRING'}," +
" {'name':'"+CloudControllerConstants.PUB_IP_COL+"','type':'STRING'}" +
" ]" +
"}");
}
@Override
public void init() {
// this is a temporary fix to avoid task execution - limitation with ntask
if(!FasterLookUpDataHolder.getInstance().getEnableBAMDataPublisher()){
log.debug("BAM data publisher is disabled. ");
return;
}
if((dataPublisher = FasterLookUpDataHolder.getInstance().getDataPublisher()) == null){
createDataPublisher();
}
streamId = FasterLookUpDataHolder.getInstance().getStreamId();
}
@Override
public void setProperties(Map<String, String> arg0) {}
private static void createDataPublisher(){
//creating the agent
AgentConfiguration agentConfiguration = new AgentConfiguration();
ServerConfiguration serverConfig = CarbonUtils.getServerConfiguration();
String trustStorePath = serverConfig.getFirstProperty("Security.TrustStore.Location");
String trustStorePassword = serverConfig.getFirstProperty("Security.TrustStore.Password");
String bamServerUrl = serverConfig.getFirstProperty("BamServerURL");
String adminUsername = FasterLookUpDataHolder.getInstance().getDataPubConfig().getBamUsername();
String adminPassword = FasterLookUpDataHolder.getInstance().getDataPubConfig().getBamPassword();
System.setProperty("javax.net.ssl.trustStore", trustStorePath);
System.setProperty("javax.net.ssl.trustStorePassword", trustStorePassword);
Agent agent = new Agent(agentConfiguration);
try {
dataPublisher = new DataPublisher(bamServerUrl, adminUsername, adminPassword, agent);
FasterLookUpDataHolder.getInstance().setDataPublisher(dataPublisher);
} catch (Exception e) {
String msg = "Unable to create a data publisher to " + bamServerUrl +
". Usage Agent will not function properly. ";
log.error(msg, e);
throw new CloudControllerException(msg, e);
}
}
// private static void bundleData(String key, String val, ServiceContext serviceCtxt) {
//
// CartridgeInstanceData instanceData = new CartridgeInstanceData();
// instanceData.setNodeId(key);
// instanceData.setStatus(val);
// instanceData.setDomain(serviceCtxt.getClusterId());
// instanceData.setAlias("".equals(serviceCtxt.getProperty(CloudControllerConstants.ALIAS_PROPERTY))
// ? "NULL"
// : serviceCtxt.getProperty(CloudControllerConstants.ALIAS_PROPERTY));
// instanceData.setTenantRange("".equals(serviceCtxt.getProperty(CloudControllerConstants.TENANT_ID_PROPERTY))
// ? serviceCtxt.getTenantRange()
// : serviceCtxt.getProperty(CloudControllerConstants.TENANT_ID_PROPERTY));
//
// if (serviceCtxt.getCartridge() != null) {
// instanceData.setMultiTenant(serviceCtxt.getCartridge().isMultiTenant());
//
// for (IaasProvider iaas : serviceCtxt.getCartridge().getIaases()) {
//
// IaasContext ctxt = null;
// if ((ctxt = serviceCtxt.getIaasContext(iaas.getType())) == null) {
// ctxt = serviceCtxt.addIaasContext(iaas.getType());
// }
//
// if (ctxt.didISpawn(key)) {
// instanceData.setIaas(iaas.getType());
// instanceData.setMetaData(ctxt.getNode(key));
//
// // clear to be removed data
// ctxt.removeToBeRemovedNodeId(key);
//
// // if the node is terminated
// if (val.equals(NodeStatus.TERMINATED.toString())) {
// // since this node is terminated
//// FasterLookUpDataHolder.getInstance().removeNodeId(key);
//
// // remove node meta data
// ctxt.removeNodeMetadata(ctxt.getNode(key));
// }
//
// break;
// }
// }
//
// instanceData.setType(serviceCtxt.getCartridge().getType());
// } else {
// log.warn("Cartridge is null for Service Context : (domain: " +
// serviceCtxt.getClusterId() +
// ")");
// }
//
// dataToBePublished.add(instanceData);
//
// }
private static Map<String, String> getNodeIdToStatusMap() throws Exception {
Map<String, String> statusMap = new HashMap<String, String>();
// // iterate through all ServiceContexts
// for (Iterator<?> it1 = FasterLookUpDataHolder.getInstance().getServiceContexts().entrySet().iterator(); it1.hasNext();) {
// @SuppressWarnings("unchecked")
// Map.Entry<String, Map<String, ServiceContext>> entry = (Map.Entry<String, Map<String, ServiceContext>>) it1.next();
//
// Map<String, ServiceContext> map = (Map<String, ServiceContext>) entry.getValue();
//
// for (Iterator<ServiceContext> it2 = map.values().iterator(); it2.hasNext();) {
// ServiceContext subjectedSerCtxt = (ServiceContext) it2.next();
//
// if (subjectedSerCtxt != null && subjectedSerCtxt.getCartridge() != null) {
// List<IaasProvider> iaases = subjectedSerCtxt.getCartridge().getIaases();
//
// for (IaasProvider iaas : iaases) {
//
// ComputeService computeService = iaas.getComputeService();
//
// if(computeService == null){
// continue;
// }
//
// IaasContext ctxt = null;
// if((ctxt = subjectedSerCtxt.getIaasContext(iaas.getType())) == null){
// ctxt = subjectedSerCtxt.addIaasContext(iaas.getType());
// }
//
// // get list of node Ids
// List<String> nodeIds = ctxt.getAllNodeIds();
//
// if (nodeIds.isEmpty()) {
//
// continue;
// }
//
// try {
//
// // get all the nodes spawned by this IaasContext
// Set<? extends ComputeMetadata> set = computeService.listNodes();
//
// Iterator<? extends ComputeMetadata> iterator = set.iterator();
//
// // traverse through all nodes of this ComputeService object
// while (iterator.hasNext()) {
// NodeMetadata nodeMetadata = (NodeMetadataImpl) iterator.next();
//
// // if this node belongs to the requested domain
// if (nodeIds.contains(nodeMetadata.getId())) {
//
// statusMap.put(nodeMetadata.getId(), nodeMetadata.getStatus()
// .toString());
//
// ctxt.addNodeMetadata(nodeMetadata);
// }
//
// }
//
// }catch (Exception e) {
// log.error(e.getMessage(), e);
// throw e;
// }
//
// }
// }
// }
//
//
// }
return statusMap;
}
private static void populateNewlyAddedOrStateChangedNodes(Map<String, String> newMap){
// MapDifference<String, String> diff = Maps.difference(newMap,
// FasterLookUpDataHolder.getInstance().getNodeIdToStatusMap());
//
// // adding newly added nodes
// Map<String, String> newlyAddedNodes = diff.entriesOnlyOnLeft();
//
// for (Iterator<?> it = newlyAddedNodes.entrySet().iterator(); it.hasNext();) {
// @SuppressWarnings("unchecked")
// Map.Entry<String, String> entry = (Map.Entry<String, String>) it.next();
// String key = entry.getKey();
// String val = entry.getValue();
// ServiceContext ctxt = FasterLookUpDataHolder.getInstance().getServiceContextFromNodeId(key);
//
// log.debug("------ Node id: "+key+" --- node status: "+val+" -------- ctxt: "+ctxt);
//
// if (ctxt != null && key != null && val != null) {
// // bundle the data to be published
// bundleData(key, val, ctxt);
// }
//
// }
//
// // adding nodes with state changes
// Map<String, ValueDifference<String>> stateChangedNodes = diff.entriesDiffering();
//
// for (Iterator<?> it = stateChangedNodes.entrySet().iterator(); it.hasNext();) {
// @SuppressWarnings("unchecked")
// Map.Entry<String, ValueDifference<String>> entry = (Map.Entry<String, ValueDifference<String>>) it.next();
//
// String key = entry.getKey();
// String newState = entry.getValue().leftValue();
// ServiceContext ctxt = FasterLookUpDataHolder.getInstance().getServiceContextFromNodeId(key);
//
// log.debug("------- Node id: "+key+" --- node status: "+newState+" -------- ctxt: "+ctxt);
//
// if (ctxt != null && key != null && newState != null) {
// // bundle the data to be published
// bundleData(key, newState, ctxt);
// }
//
// }
}
}
|
apache-2.0
|
typetools/guava
|
guava/src/com/google/common/hash/FarmHashFingerprint64.java
|
10098
|
/*
* Copyright (C) 2015 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.common.hash;
import static com.google.common.base.Preconditions.checkPositionIndexes;
import static com.google.common.hash.LittleEndianByteArray.load32;
import static com.google.common.hash.LittleEndianByteArray.load64;
import static java.lang.Long.rotateRight;
import com.google.common.annotations.VisibleForTesting;
import org.checkerframework.checker.index.qual.LTLengthOf;
import org.checkerframework.checker.index.qual.LengthOf;
import org.checkerframework.checker.index.qual.NonNegative;
import org.checkerframework.common.value.qual.ArrayLenRange;
import org.checkerframework.common.value.qual.IntRange;
import org.checkerframework.common.value.qual.MinLen;
/**
* Implementation of FarmHash Fingerprint64, an open-source fingerprinting algorithm for strings.
*
* <p>Its speed is comparable to CityHash64, and its quality of hashing is at least as good.
*
* <p>Note to maintainers: This implementation relies on signed arithmetic being bit-wise equivalent
* to unsigned arithmetic in all cases except:
*
* <ul>
* <li>comparisons (signed values can be negative)
* <li>division (avoided here)
* <li>shifting (right shift must be unsigned)
* </ul>
*
* @author Kyle Maddison
* @author Geoff Pike
*/
final class FarmHashFingerprint64 extends AbstractNonStreamingHashFunction {
static final HashFunction FARMHASH_FINGERPRINT_64 = new FarmHashFingerprint64();
// Some primes between 2^63 and 2^64 for various uses.
private static final long K0 = 0xc3a5c85c97cb3127L;
private static final long K1 = 0xb492b66fbe98f273L;
private static final long K2 = 0x9ae16a3b2f90404fL;
@Override
public HashCode hashBytes(byte[] input, @NonNegative @LTLengthOf(value = "#1", offset = "#3 - 1") int off, @NonNegative @LTLengthOf(value = "#1", offset = "#2 - 1") int len) {
checkPositionIndexes(off, off + len, input.length);
return HashCode.fromLong(fingerprint(input, off, len));
}
@Override
public @NonNegative int bits() {
return 64;
}
@Override
public String toString() {
return "Hashing.farmHashFingerprint64()";
}
// End of public functions.
@VisibleForTesting
static long fingerprint(byte[] bytes, @NonNegative @LTLengthOf(value = "#1", offset = "#3 - 1") int offset, @NonNegative @LTLengthOf(value = "#1", offset = "#2 - 1") int length) {
if (length <= 32) {
if (length <= 16) {
return hashLength0to16(bytes, offset, length);
} else {
return hashLength17to32(bytes, offset, length);
}
} else if (length <= 64) {
return hashLength33To64(bytes, offset, length);
} else {
return hashLength65Plus(bytes, offset, length);
}
}
private static long shiftMix(long val) {
return val ^ (val >>> 47);
}
private static long hashLength16(long u, long v, long mul) {
long a = (u ^ v) * mul;
a ^= (a >>> 47);
long b = (v ^ a) * mul;
b ^= (b >>> 47);
b *= mul;
return b;
}
/**
* Computes intermediate hash of 32 bytes of byte array from the given offset. Results are
* returned in the output array because when we last measured, this was 12% faster than allocating
* new arrays every time.
*/
private static void weakHashLength32WithSeeds(
byte[] bytes, @NonNegative int offset, long seedA, long seedB, long @MinLen(2)[] output) {
long part1 = load64(bytes, offset);
long part2 = load64(bytes, offset + 8);
long part3 = load64(bytes, offset + 16);
long part4 = load64(bytes, offset + 24);
seedA += part1;
seedB = rotateRight(seedB + seedA + part4, 21);
long c = seedA;
seedA += part2;
seedA += part3;
seedB += rotateRight(seedA, 44);
output[0] = seedA + part4;
output[1] = seedB + c;
}
@SuppressWarnings({"lowerbound:argument.type.incompatible",//(1): if length >= 8 and offset is non negative, offset + length - 8 >= 0.
"upperbound:argument.type.incompatible",//(2): if length >= 4 and offset + length - 1 < bytes.length, offset + 4 - 1 < bytes.length.
"upperbound:array.access.unsafe.high",/*(3): if 0 < length < 4, therefore offset + length - 1 < bytes.length,
offset + length / 2(length >> 2) - 1 < bytes.length and offset + length - 1 - 1 < bytes.length.
*/
})
private static long hashLength0to16(byte[] bytes, @NonNegative @LTLengthOf(value = "#1", offset = "#3 - 1") int offset, @IntRange(from = 0, to = 16) @LTLengthOf(value = "#1", offset = "#2 - 1") int length) {
if (length >= 8) {
long mul = K2 + length * 2;
long a = load64(bytes, offset) + K2;
long b = load64(bytes, offset + length - 8);//(1)
long c = rotateRight(b, 37) * mul + a;
long d = (rotateRight(a, 25) + b) * mul;
return hashLength16(c, d, mul);
}
if (length >= 4) {
long mul = K2 + length * 2;
long a = load32(bytes, offset) & 0xFFFFFFFFL;//(2)
return hashLength16(length + (a << 3), load32(bytes, offset + length - 4) & 0xFFFFFFFFL, mul);//(1)
}
if (length > 0) {
byte a = bytes[offset];//(3)
byte b = bytes[offset + (length >> 1)];//(3)
byte c = bytes[offset + (length - 1)];//(3)
int y = (a & 0xFF) + ((b & 0xFF) << 8);
int z = length + ((c & 0xFF) << 2);
return shiftMix(y * K2 ^ z * K0) * K2;
}
return K2;
}
@SuppressWarnings(value = {"lowerbound:argument.type.incompatible"/*(1): if 17 <= length <= 32 and offset is non negative,
offset + length - 8 >= 0 and offset + length - 16 >= 0.
*/})
private static long hashLength17to32(byte[] bytes, @NonNegative @LTLengthOf(value = "#1", offset = "#3 - 1") int offset, @IntRange(from = 17, to = 32) @LTLengthOf(value = "#1", offset = "#2 - 1") int length) {
long mul = K2 + length * 2;
long a = load64(bytes, offset) * K1;
long b = load64(bytes, offset + 8);
long c = load64(bytes, offset + length - 8) * mul;//(1)
long d = load64(bytes, offset + length - 16) * K2;//(1)
return hashLength16(
rotateRight(a + b, 43) + rotateRight(c, 30) + d, a + rotateRight(b + K2, 18) + c, mul);
}
private static long hashLength33To64(byte[] bytes, @NonNegative @LTLengthOf(value = "#1", offset = "#3 - 1") int offset, @IntRange(from = 33, to = 64) @LTLengthOf(value = "#1", offset = "#2 - 1") int length) {
long mul = K2 + length * 2;
long a = load64(bytes, offset) * K2;
long b = load64(bytes, offset + 8);
long c = load64(bytes, offset + length - 8) * mul;
long d = load64(bytes, offset + length - 16) * K2;
long y = rotateRight(a + b, 43) + rotateRight(c, 30) + d;
long z = hashLength16(y, a + rotateRight(b + K2, 18) + c, mul);
long e = load64(bytes, offset + 16) * mul;
long f = load64(bytes, offset + 24);
long g = (y + load64(bytes, offset + length - 32)) * mul;
long h = (z + load64(bytes, offset + length - 24)) * mul;
return hashLength16(
rotateRight(e + f, 43) + rotateRight(g, 30) + h, e + rotateRight(f + a, 18) + g, mul);
}
/*
* Compute an 8-byte hash of a byte array of length greater than 64 bytes.
*/
@SuppressWarnings({"lowerbound:assignment.type.incompatible",/*(1): since length >= 65, `end` is non negative,
therefore `last64offset` is also non negative. */
"upperbound:assignment.type.incompatible",//(1): ?
"upperbound:compound.assignment.type.incompatible"/*(2): if length >= 65 and offset < bytes.length - length + 1,
offset += 64 < bytes.length.
*/
})
private static long hashLength65Plus(byte[] bytes, @NonNegative @LTLengthOf(value = "#1", offset = "#3 - 1") int offset, @IntRange(from = 65) @LTLengthOf(value = "#1", offset = "#2 - 1") int length) {
final int seed = 81;
// For strings over 64 bytes we loop. Internal state consists of 56 bytes: v, w, x, y, and z.
long x = seed;
@SuppressWarnings("ConstantOverflow")
long y = seed * K1 + 113;
long z = shiftMix(y * K2 + 113) * K2;
long[] v = new long[2];
long[] w = new long[2];
x = x * K2 + load64(bytes, offset);
// Set end so that after the loop we have 1 to 64 bytes left to process.
int end = offset + ((length - 1) / 64) * 64;
int last64offset = end + ((length - 1) & 63) - 63;
do {
x = rotateRight(x + y + v[0] + load64(bytes, offset + 8), 37) * K1;
y = rotateRight(y + v[1] + load64(bytes, offset + 48), 42) * K1;
x ^= w[1];
y += v[0] + load64(bytes, offset + 40);
z = rotateRight(z + w[0], 33) * K1;
weakHashLength32WithSeeds(bytes, offset, v[1] * K1, x + w[0], v);
weakHashLength32WithSeeds(bytes, offset + 32, z + w[1], y + load64(bytes, offset + 16), w);
long tmp = x;
x = z;
z = tmp;
offset += 64;//(2)
} while (offset != end);
long mul = K1 + ((z & 0xFF) << 1);
// Operate on the last 64 bytes of input.
offset = last64offset;//(1)
w[0] += ((length - 1) & 63);
v[0] += w[0];
w[0] += v[0];
x = rotateRight(x + y + v[0] + load64(bytes, offset + 8), 37) * mul;
y = rotateRight(y + v[1] + load64(bytes, offset + 48), 42) * mul;
x ^= w[1] * 9;
y += v[0] * 9 + load64(bytes, offset + 40);
z = rotateRight(z + w[0], 33) * mul;
weakHashLength32WithSeeds(bytes, offset, v[1] * mul, x + w[0], v);
weakHashLength32WithSeeds(bytes, offset + 32, z + w[1], y + load64(bytes, offset + 16), w);
return hashLength16(
hashLength16(v[0], w[0], mul) + shiftMix(y) * K0 + x,
hashLength16(v[1], w[1], mul) + z,
mul);
}
}
|
apache-2.0
|
sergeykhbr/riscv_vhdl
|
debugger/src/cpu_sysc_plugin/riverlib/core/bp_btb.cpp
|
5660
|
/*
* Copyright 2018 Sergey Khabarov, sergeykhbr@gmail.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "api_core.h"
#include "bp_btb.h"
namespace debugger {
BpBTB::BpBTB(sc_module_name name_, bool async_reset) :
sc_module(name_),
i_clk("i_clk"),
i_nrst("i_nrst"),
i_flush_pipeline("i_flush_pipeline"),
i_e("i_e"),
i_we("i_we"),
i_we_pc("i_we_pc"),
i_we_npc("i_we_npc"),
i_bp_pc("i_bp_pc"),
o_bp_npc("o_bp_npc"),
o_bp_exec("o_bp_exec") {
async_reset_ = async_reset;
SC_METHOD(comb);
sensitive << i_nrst;
sensitive << i_flush_pipeline;
sensitive << i_e;
sensitive << i_we;
sensitive << i_we_pc;
sensitive << i_we_npc;
sensitive << i_bp_pc;
for (int i = 0; i < CFG_BTB_SIZE; i++) {
sensitive << r_btb[i].pc;
sensitive << r_btb[i].npc;
sensitive << r_btb[i].exec;
}
SC_METHOD(registers);
sensitive << i_nrst;
sensitive << i_clk.pos();
};
void BpBTB::generateVCD(sc_trace_file *i_vcd, sc_trace_file *o_vcd) {
if (o_vcd) {
sc_trace(o_vcd, i_flush_pipeline, i_flush_pipeline.name());
sc_trace(o_vcd, i_e, i_e.name());
sc_trace(o_vcd, i_we, i_we.name());
sc_trace(o_vcd, i_we_pc, i_we_pc.name());
sc_trace(o_vcd, i_we_npc, i_we_npc.name());
sc_trace(o_vcd, i_bp_pc, i_bp_pc.name());
sc_trace(o_vcd, o_bp_npc, o_bp_npc.name());
sc_trace(o_vcd, o_bp_exec, o_bp_exec.name());
std::string pn(name());
sc_trace(o_vcd, r_btb[0].pc, pn + ".btb0_pc");
sc_trace(o_vcd, r_btb[0].npc, pn + ".btb0_npc");
sc_trace(o_vcd, r_btb[1].pc, pn + ".btb1_pc");
sc_trace(o_vcd, r_btb[1].npc, pn + ".btb1_npc");
sc_trace(o_vcd, r_btb[2].pc, pn + ".btb2_pc");
sc_trace(o_vcd, r_btb[2].npc, pn + ".btb2_npc");
sc_trace(o_vcd, r_btb[3].pc, pn + ".btb3_pc");
sc_trace(o_vcd, r_btb[3].npc, pn + ".btb3_npc");
sc_trace(o_vcd, r_btb[4].pc, pn + ".btb4_pc");
sc_trace(o_vcd, r_btb[4].npc, pn + ".btb4_npc");
sc_trace(o_vcd, r_btb[5].pc, pn + ".btb5_pc");
sc_trace(o_vcd, r_btb[5].npc, pn + ".btb5_npc");
sc_trace(o_vcd, r_btb[6].pc, pn + ".btb6_pc");
sc_trace(o_vcd, r_btb[6].npc, pn + ".btb6_npc");
sc_trace(o_vcd, r_btb[7].pc, pn + ".btb7_pc");
sc_trace(o_vcd, r_btb[7].npc, pn + ".btb7_npc");
sc_trace(o_vcd, dbg_npc[0], pn + ".dbg_npc0");
sc_trace(o_vcd, dbg_npc[1], pn + ".dbg_npc1");
sc_trace(o_vcd, dbg_npc[2], pn + ".dbg_npc2");
sc_trace(o_vcd, dbg_npc[3], pn + ".dbg_npc3");
sc_trace(o_vcd, dbg_npc[4], pn + ".dbg_npc4");
}
}
void BpBTB::comb() {
sc_biguint<CFG_BP_DEPTH*CFG_CPU_ADDR_BITS> vb_addr;
sc_uint<CFG_BP_DEPTH> vb_hit;
sc_uint<CFG_CPU_ADDR_BITS> t_addr;
sc_uint<CFG_BTB_SIZE> vb_pc_equal;
sc_uint<CFG_BTB_SIZE> vb_pc_nshift;
sc_uint<CFG_BP_DEPTH> vb_bp_exec;
bool v_dont_update;
vb_hit = 0;
vb_bp_exec = 0;
for (int i = 0; i < CFG_BTB_SIZE; i++) {
v_btb[i] = r_btb[i];
}
vb_addr(CFG_CPU_ADDR_BITS-1,0) = i_bp_pc.read();
vb_bp_exec[0] = i_e;
for (int i = 1; i < CFG_BP_DEPTH; i++) {
t_addr = vb_addr(i*CFG_CPU_ADDR_BITS-1, (i-1)*CFG_CPU_ADDR_BITS);
for (int n = CFG_BTB_SIZE-1; n >= 0; n--) {
if (t_addr == r_btb[n].pc) {
vb_addr((i+1)*CFG_CPU_ADDR_BITS-1, i*CFG_CPU_ADDR_BITS) = r_btb[n].npc;
vb_hit[i] = 1;
vb_bp_exec[i] = r_btb[n].exec; // Used for: Do not override by pre-decoded jumps
} else if (vb_hit[i] == 0) {
vb_addr((i+1)*CFG_CPU_ADDR_BITS-1, i*CFG_CPU_ADDR_BITS) = t_addr + 4;
}
}
}
v_dont_update = 0;
vb_pc_equal = 0;
for (int i = 0; i < CFG_BTB_SIZE; i++) {
if (r_btb[i].pc == i_we_pc) {
vb_pc_equal[i] = 1;
v_dont_update = r_btb[i].exec && !i_e;
}
}
vb_pc_nshift = 0;
for (int i = 1; i < CFG_BTB_SIZE; i++) {
vb_pc_nshift[i] = vb_pc_equal[i-1] | vb_pc_nshift[i-1];
}
if (i_we && !v_dont_update) {
v_btb[0].exec = i_e;
v_btb[0].pc = i_we_pc;
v_btb[0].npc = i_we_npc;
for (int i = 1; i < CFG_BTB_SIZE; i++) {
if (vb_pc_nshift[i] == 0) {
v_btb[i] = r_btb[i - 1];
} else {
v_btb[i] = r_btb[i];
}
}
}
if ((!async_reset_ && !i_nrst.read()) || i_flush_pipeline) {
for (int i = 0; i < CFG_BTB_SIZE; i++) {
R_RESET(v_btb[i]);
}
}
for (int i = 0; i < CFG_BP_DEPTH; i++) {
dbg_npc[i] = vb_addr((i+1)*CFG_CPU_ADDR_BITS-1, i*CFG_CPU_ADDR_BITS).to_uint64();
}
o_bp_npc = vb_addr;
o_bp_exec = vb_bp_exec;
}
void BpBTB::registers() {
if (async_reset_ && i_nrst.read() == 0) {
for (int i = 0; i < CFG_BTB_SIZE; i++) {
R_RESET(r_btb[i]);
}
} else {
for (int i = 0; i < CFG_BTB_SIZE; i++) {
r_btb[i] = v_btb[i];
}
}
}
} // namespace debugger
|
apache-2.0
|
sflpro/ms_payment
|
services/services_impl/src/test/java/com/sfl/pms/services/payment/common/impl/auth/CustomerPaymentMethodAuthorizationPaymentServiceImplTest.java
|
13173
|
package com.sfl.pms.services.payment.common.impl.auth;
import com.sfl.pms.persistence.repositories.payment.common.AbstractPaymentRepository;
import com.sfl.pms.persistence.repositories.payment.common.auth.CustomerPaymentMethodAuthorizationPaymentRepository;
import com.sfl.pms.services.customer.model.Customer;
import com.sfl.pms.services.payment.common.AbstractPaymentService;
import com.sfl.pms.services.payment.common.dto.auth.CustomerPaymentMethodAuthorizationPaymentDto;
import com.sfl.pms.services.payment.common.dto.channel.EncryptedPaymentMethodProcessingChannelDto;
import com.sfl.pms.services.payment.common.dto.channel.PaymentProcessingChannelDto;
import com.sfl.pms.services.payment.common.exception.auth.CustomerPaymentMethodAuthorizationPaymentAlreadyExistsException;
import com.sfl.pms.services.payment.common.impl.AbstractPaymentServiceImplTest;
import com.sfl.pms.services.payment.common.model.PaymentState;
import com.sfl.pms.services.payment.common.model.auth.CustomerPaymentMethodAuthorizationPayment;
import com.sfl.pms.services.payment.common.model.channel.PaymentProcessingChannel;
import com.sfl.pms.services.payment.customer.method.authorization.CustomerPaymentMethodAuthorizationRequestService;
import com.sfl.pms.services.payment.customer.method.model.authorization.CustomerPaymentMethodAuthorizationRequest;
import org.easymock.IAnswer;
import org.easymock.Mock;
import org.easymock.TestSubject;
import org.junit.Assert;
import org.junit.Test;
import static org.easymock.EasyMock.*;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
/**
* User: Ruben Dilanyan
* Company: SFL LLC
* Date: 1/11/15
* Time: 12:29 PM
*/
public class CustomerPaymentMethodAuthorizationPaymentServiceImplTest extends AbstractPaymentServiceImplTest<CustomerPaymentMethodAuthorizationPayment> {
/* Test subject and mocks */
@TestSubject
private CustomerPaymentMethodAuthorizationPaymentServiceImpl customerPaymentMethodAuthorizationPaymentService = new CustomerPaymentMethodAuthorizationPaymentServiceImpl();
@Mock
private CustomerPaymentMethodAuthorizationPaymentRepository customerPaymentMethodAuthorizationPaymentRepository;
@Mock
private CustomerPaymentMethodAuthorizationRequestService customerPaymentMethodAuthorizationRequestService;
/* Constructors */
public CustomerPaymentMethodAuthorizationPaymentServiceImplTest() {
}
/* Test methods */
@Test
public void testCreatePaymentWithInvalidArguments() {
// Test data
final Long authRequestId = 1l;
final CustomerPaymentMethodAuthorizationPaymentDto paymentDto = getServicesImplTestHelper().createCustomerPaymentMethodAuthorizationPaymentDto();
final PaymentProcessingChannelDto<? extends PaymentProcessingChannel> paymentProcessingChannelDto = getServicesImplTestHelper().createEncryptedPaymentMethodProcessingChannelDto();
// Reset
resetAll();
// Replay
replayAll();
// Run test scenario
try {
customerPaymentMethodAuthorizationPaymentService.createPayment(null, paymentDto, paymentProcessingChannelDto);
fail("Exception should be thrown");
} catch (final IllegalArgumentException ex) {
// Expected
}
try {
customerPaymentMethodAuthorizationPaymentService.createPayment(authRequestId, null, paymentProcessingChannelDto);
fail("Exception should be thrown");
} catch (final IllegalArgumentException ex) {
// Expected
}
try {
customerPaymentMethodAuthorizationPaymentService.createPayment(authRequestId, paymentDto, null);
fail("Exception should be thrown");
} catch (final IllegalArgumentException ex) {
// Expected
}
try {
customerPaymentMethodAuthorizationPaymentService.createPayment(authRequestId, new CustomerPaymentMethodAuthorizationPaymentDto(null, paymentDto.getAmount(), paymentDto.getCurrency(), paymentDto.getClientIpAddress()), paymentProcessingChannelDto);
fail("Exception should be thrown");
} catch (final IllegalArgumentException ex) {
// Expected
}
try {
customerPaymentMethodAuthorizationPaymentService.createPayment(authRequestId, new CustomerPaymentMethodAuthorizationPaymentDto(paymentDto.getPaymentProviderType(), null, paymentDto.getCurrency(), paymentDto.getClientIpAddress()), paymentProcessingChannelDto);
fail("Exception should be thrown");
} catch (final IllegalArgumentException ex) {
// Expected
}
try {
customerPaymentMethodAuthorizationPaymentService.createPayment(authRequestId, new CustomerPaymentMethodAuthorizationPaymentDto(paymentDto.getPaymentProviderType(), paymentDto.getAmount(), null, paymentDto.getClientIpAddress()), paymentProcessingChannelDto);
fail("Exception should be thrown");
} catch (final IllegalArgumentException ex) {
// Expected
}
// Verify
verifyAll();
}
@Test
public void testCreatePaymentWhenRequestAlreadyHasAssociatedPayment() {
// Test data
final Long customerId = 4L;
final Customer customer = getServicesImplTestHelper().createCustomer();
customer.setId(customerId);
final Long authRequestId = 1l;
final CustomerPaymentMethodAuthorizationRequest authRequest = getServicesImplTestHelper().createCustomerEncryptedPaymentMethodAuthorizationRequest();
authRequest.setId(authRequestId);
authRequest.setCustomer(customer);
final Long existingPaymentId = 2l;
final CustomerPaymentMethodAuthorizationPayment existingPayment = getServicesImplTestHelper().createCustomerPaymentMethodAuthorizationPayment();
existingPayment.setId(existingPaymentId);
final CustomerPaymentMethodAuthorizationPaymentDto paymentDto = getServicesImplTestHelper().createCustomerPaymentMethodAuthorizationPaymentDto();
final PaymentProcessingChannelDto<? extends PaymentProcessingChannel> paymentProcessingChannelDto = getServicesImplTestHelper().createEncryptedPaymentMethodProcessingChannelDto();
// Reset
resetAll();
// Expectations
expect(customerPaymentMethodAuthorizationRequestService.getPaymentMethodAuthorizationRequestById(eq(authRequestId))).andReturn(authRequest).once();
expect(customerPaymentMethodAuthorizationPaymentRepository.findByAuthorizationRequest(eq(authRequest))).andReturn(existingPayment).once();
getPaymentProcessingChannelHandler(paymentProcessingChannelDto.getType()).assertPaymentProcessingChannelDto(eq(paymentProcessingChannelDto), eq(authRequest.getCustomer()));
expectLastCall().once();
// Replay
replayAll();
// Run test scenario
try {
customerPaymentMethodAuthorizationPaymentService.createPayment(authRequestId, paymentDto, paymentProcessingChannelDto);
fail("Exception should be thrown");
} catch (final CustomerPaymentMethodAuthorizationPaymentAlreadyExistsException ex) {
// Expected
assertCustomerPaymentMethodAuthorizationPaymentAlreadyExistsException(ex, authRequestId, existingPaymentId);
}
// Verify
verifyAll();
}
@Test
public void testCreatePaymentWithInvalidPaymentChannelDto() {
// Test data
final Long customerId = 4L;
final Customer customer = getServicesImplTestHelper().createCustomer();
customer.setId(customerId);
final Long authRequestId = 1l;
final CustomerPaymentMethodAuthorizationRequest authRequest = getServicesImplTestHelper().createCustomerEncryptedPaymentMethodAuthorizationRequest();
authRequest.setId(authRequestId);
authRequest.setCustomer(customer);
final CustomerPaymentMethodAuthorizationPaymentDto paymentDto = getServicesImplTestHelper().createCustomerPaymentMethodAuthorizationPaymentDto();
final PaymentProcessingChannelDto<? extends PaymentProcessingChannel> paymentProcessingChannelDto = getServicesImplTestHelper().createEncryptedPaymentMethodProcessingChannelDto();
// Reset
resetAll();
// Expectations
expect(customerPaymentMethodAuthorizationRequestService.getPaymentMethodAuthorizationRequestById(eq(authRequestId))).andReturn(authRequest).once();
getPaymentProcessingChannelHandler(paymentProcessingChannelDto.getType()).assertPaymentProcessingChannelDto(eq(paymentProcessingChannelDto), eq(authRequest.getCustomer()));
expectLastCall().andThrow(new IllegalArgumentException()).once();
// Replay
replayAll();
// Run test scenario
try {
customerPaymentMethodAuthorizationPaymentService.createPayment(authRequestId, paymentDto, paymentProcessingChannelDto);
fail("Exception should be thrown");
} catch (final IllegalArgumentException ex) {
// Expected
}
// Verify
verifyAll();
}
@Test
public void testCreatePayment() {
// Test data
final Long authRequestId = 1l;
final Long customerId = 2L;
final Customer customer = getServicesImplTestHelper().createCustomer();
customer.setId(customerId);
final CustomerPaymentMethodAuthorizationRequest authRequest = getServicesImplTestHelper().createCustomerEncryptedPaymentMethodAuthorizationRequest();
authRequest.setCustomer(customer);
authRequest.setId(authRequestId);
final CustomerPaymentMethodAuthorizationPaymentDto paymentDto = getServicesImplTestHelper().createCustomerPaymentMethodAuthorizationPaymentDto();
final EncryptedPaymentMethodProcessingChannelDto paymentProcessingChannelDto = getServicesImplTestHelper().createEncryptedPaymentMethodProcessingChannelDto();
final PaymentProcessingChannel paymentProcessingChannel = getServicesImplTestHelper().createEncryptedPaymentMethodProcessingChannel(paymentProcessingChannelDto);
// Reset
resetAll();
// Expectations
expect(customerPaymentMethodAuthorizationRequestService.getPaymentMethodAuthorizationRequestById(eq(authRequestId))).andReturn(authRequest).once();
expect(customerPaymentMethodAuthorizationPaymentRepository.findByAuthorizationRequest(eq(authRequest))).andReturn(null).once();
expect(customerPaymentMethodAuthorizationPaymentRepository.save(isA(CustomerPaymentMethodAuthorizationPayment.class))).andAnswer(new IAnswer<CustomerPaymentMethodAuthorizationPayment>() {
@Override
public CustomerPaymentMethodAuthorizationPayment answer() throws Throwable {
return (CustomerPaymentMethodAuthorizationPayment) getCurrentArguments()[0];
}
}).once();
getPaymentProcessingChannelHandler(paymentProcessingChannelDto.getType()).assertPaymentProcessingChannelDto(eq(paymentProcessingChannelDto), eq(customer));
expectLastCall().once();
expect(getPaymentProcessingChannelHandler(paymentProcessingChannelDto.getType()).convertPaymentProcessingChannelDto(eq(paymentProcessingChannelDto), eq(customer))).andReturn(paymentProcessingChannel).once();
// Replay
replayAll();
// Run test scenario
final CustomerPaymentMethodAuthorizationPayment payment = customerPaymentMethodAuthorizationPaymentService.createPayment(authRequestId, paymentDto, paymentProcessingChannelDto);
getServicesImplTestHelper().assertCustomerPaymentMethodAuthorizationPayment(payment, paymentDto);
getServicesImplTestHelper().assertPaymentLastState(payment, PaymentState.CREATED, null, 1);
assertEquals(authRequest, payment.getAuthorizationRequest());
assertEquals(customer, payment.getCustomer());
Assert.assertEquals(paymentProcessingChannel, payment.getPaymentProcessingChannel());
// Verify
verifyAll();
}
/* Utility methods */
private void assertCustomerPaymentMethodAuthorizationPaymentAlreadyExistsException(final CustomerPaymentMethodAuthorizationPaymentAlreadyExistsException ex, final Long authRequestId, final Long existingPaymentId) {
assertEquals(authRequestId, ex.getPaymentMethodAuthorizationRequestId());
assertEquals(existingPaymentId, ex.getExistingPaymentId());
}
@Override
protected AbstractPaymentService<CustomerPaymentMethodAuthorizationPayment> getService() {
return customerPaymentMethodAuthorizationPaymentService;
}
@Override
protected AbstractPaymentRepository<CustomerPaymentMethodAuthorizationPayment> getRepository() {
return customerPaymentMethodAuthorizationPaymentRepository;
}
@Override
protected CustomerPaymentMethodAuthorizationPayment getInstance() {
return getServicesImplTestHelper().createCustomerPaymentMethodAuthorizationPayment();
}
@Override
protected Class<CustomerPaymentMethodAuthorizationPayment> getInstanceClass() {
return CustomerPaymentMethodAuthorizationPayment.class;
}
}
|
apache-2.0
|
cloudant/sync-android
|
cloudant-sync-datastore-core/src/main/java/com/cloudant/sync/documentstore/LocalDocument.java
|
1542
|
/*
* Copyright © 2015, 2018 IBM Corp. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the
* License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.cloudant.sync.documentstore;
/**
* <p>
* <b>Note:</b> this class is deprecated and will be moved to an internal package in a future
* release. For local documents use a {@link DocumentRevision} with an {@code id} prefixed with
* {@code _local} and a {@code rev} set to {@code null}.
* </p>
* <p>
* A local Document. {@code LocalDocument}s do not have a history, or the concept of revisions
* </p>
*/
@Deprecated
public class LocalDocument {
/**
* The ID of the local document
*/
public final String docId;
/**
* The body of the local document
*/
public final DocumentBody body;
/**
* Creates a local document
* @param docId The documentId for this document
* @param body The body of the local document
*/
public LocalDocument(String docId, DocumentBody body){
this.docId = docId;
this.body = body;
}
}
|
apache-2.0
|
chrisekelley/zeprs
|
src/zeprs/org/cidrz/webapp/dynasite/struts/action/InactivateOutcomeAction.java
|
1912
|
/*
* Copyright 2003, 2004, 2005, 2006 Research Triangle Institute
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*/
package org.cidrz.webapp.dynasite.struts.action;
import org.apache.struts.action.ActionForm;
import org.apache.struts.action.ActionForward;
import org.apache.struts.action.ActionMapping;
import org.cidrz.webapp.dynasite.struts.action.generic.BaseAction;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
public class InactivateOutcomeAction extends BaseAction {
protected ActionForward doExecute(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception {
/*try {
Long outcomeId = new Long(request.getParameter("id"));
Patient p = SessionUtil.getInstance(request.getSession()).getPatient();
Outcome o;
Set outcomes = p.getOutcomes();
HttpSession session = request.getSession();
Site site = SessionUtil.getInstance(session).getClientSettings().getSite();
for (Iterator iterator = outcomes.iterator(); iterator.hasNext();) {
o = (Outcome) iterator.next();
if (o.getId().equals(outcomeId)) {
o.setActive(false);
PersistenceManagerFactory.getInstance(InactivateOutcomeAction.class).save(o, request.getUserPrincipal(), site);
break;
}
}
} catch (Exception e) {
//todo: log?
}*/
request.setAttribute("exception", "This has not be implemented");
return mapping.findForward("error");
}
}
|
apache-2.0
|
zhangqiangoffice/Explain-C
|
list0805.cpp
|
581
|
#include <stdio.h>
enum animal { Dog, Cat, Monkey, Invalid};
void dog(void) {
puts("ÍúÍú");
}
void cat(void) {
puts("ß÷~");
}
void monkey(void) {
puts("ßóßó");
}
enum animal select(void) {
enum animal tmp;
do {
printf("0~~dog 1~~cat 2~~monkey 3~~over:");
scanf("%d", &tmp);
} while (tmp < Dog || tmp > Invalid);
return (tmp);
}
int main(void) {
enum animal selected;
do {
selected = select();
switch (selected ) {
case Dog : dog(); break;
case Cat : cat(); break;
case Monkey : monkey(); break;
}
} while (selected != Invalid);
return (0);
}
|
apache-2.0
|
Cue/hegemon
|
hegemon-core/src/test/java/com/cueup/hegemon/TestUtils.java
|
2009
|
/*
* Copyright 2012 the hegemon authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.cueup.hegemon;
import com.google.common.base.Joiner;
import com.google.common.collect.Lists;
import org.apache.commons.lang.exception.ExceptionUtils;
import org.junit.Assert;
import java.util.List;
/**
* Utilities for tests.
*/
public class TestUtils {
private TestUtils() { }
/**
* Utility class that collects errors from sub-threads.
*/
private static class ErrorCollector implements Thread.UncaughtExceptionHandler {
private final List<String> errors = Lists.newArrayList();
@Override
public synchronized void uncaughtException(Thread thread, Throwable throwable) {
this.errors.add(thread.getName() + ": " + ExceptionUtils.getFullStackTrace(throwable));
}
public void assertNoErrors() {
if (!this.errors.isEmpty()) {
Assert.fail(Joiner.on("\n\n").join(this.errors));
}
}
}
public static void runConcurrent(int count, Runnable r) throws InterruptedException {
List<Thread> threads = Lists.newArrayList();
ErrorCollector errorCollector = new ErrorCollector();
for (int i = 0; i < count; i++) {
Thread t = new Thread(r);
t.setName("testThread" + i);
t.setUncaughtExceptionHandler(errorCollector);
threads.add(t);
}
for (Thread t : threads) {
t.start();
}
for (Thread t : threads) {
t.join();
}
errorCollector.assertNoErrors();
}
}
|
apache-2.0
|
ecosense-au-dk/Karibu-core
|
Karibu-consumer/src/main/java/dk/au/cs/karibu/backend/rabbitmq/package-info.java
|
844
|
/*
* Copyright 2013 Henrik Baerbak Christensen, Aarhus University
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* This package contains delegates that bind karibu roles
* to the RabbitMQ message system.
*
* @author Henrik Baerbak Christensen, Aarhus University
*
*/
package dk.au.cs.karibu.backend.rabbitmq;
|
apache-2.0
|
awsdocs/aws-doc-sdk-examples
|
python/demo_tools/custom_waiter.py
|
4672
|
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0
"""
Base class for implementing custom waiters for services that don't already have
prebuilt waiters. This class leverages botocore waiter code.
"""
from enum import Enum
import logging
import botocore.waiter
logger = logging.getLogger(__name__)
class WaitState(Enum):
SUCCESS = 'success'
FAILURE = 'failure'
class CustomWaiter:
"""
Base class for a custom waiter that leverages botocore's waiter code. Waiters
poll an operation, with a specified delay between each polling attempt, until
either an accepted result is returned or the number of maximum attempts is reached.
To use, implement a subclass that passes the specific operation, arguments,
and acceptors to the superclass.
For example, to implement a custom waiter for the transcription client that
waits for both success and failure outcomes of the get_transcription_job function,
create a class like the following:
class TranscribeCompleteWaiter(CustomWaiter):
def __init__(self, client):
super().__init__(
'TranscribeComplete', 'GetTranscriptionJob',
'TranscriptionJob.TranscriptionJobStatus',
{'COMPLETED': WaitState.SUCCESS, 'FAILED': WaitState.FAILURE},
client)
def wait(self, job_name):
self._wait(TranscriptionJobName=job_name)
"""
def __init__(
self, name, operation, argument, acceptors, client, delay=10, max_tries=60,
matcher='path'):
"""
Subclasses should pass specific operations, arguments, and acceptors to
their superclass.
:param name: The name of the waiter. This can be any descriptive string.
:param operation: The operation to wait for. This must match the casing of
the underlying operation model, which is typically in
CamelCase.
:param argument: The dict keys used to access the result of the operation, in
dot notation. For example, 'Job.Status' will access
result['Job']['Status'].
:param acceptors: The list of acceptors that indicate the wait is over. These
can indicate either success or failure. The acceptor values
are compared to the result of the operation after the
argument keys are applied.
:param client: The Boto3 client.
:param delay: The number of seconds to wait between each call to the operation.
:param max_tries: The maximum number of tries before exiting.
:param matcher: The kind of matcher to use.
"""
self.name = name
self.operation = operation
self.argument = argument
self.client = client
self.waiter_model = botocore.waiter.WaiterModel({
'version': 2,
'waiters': {
name: {
"delay": delay,
"operation": operation,
"maxAttempts": max_tries,
"acceptors": [{
"state": state.value,
"matcher": matcher,
"argument": argument,
"expected": expected
} for expected, state in acceptors.items()]
}}})
self.waiter = botocore.waiter.create_waiter_with_client(
self.name, self.waiter_model, self.client)
def __call__(self, parsed, **kwargs):
"""
Handles the after-call event by logging information about the operation and its
result.
:param parsed: The parsed response from polling the operation.
:param kwargs: Not used, but expected by the caller.
"""
status = parsed
for key in self.argument.split('.'):
if key.endswith('[]'):
status = status.get(key[:-2])[0]
else:
status = status.get(key)
logger.info(
"Waiter %s called %s, got %s.", self.name, self.operation, status)
def _wait(self, **kwargs):
"""
Registers for the after-call event and starts the botocore wait loop.
:param kwargs: Keyword arguments that are passed to the operation being polled.
"""
event_name = f'after-call.{self.client.meta.service_model.service_name}'
self.client.meta.events.register(event_name, self)
self.waiter.wait(**kwargs)
self.client.meta.events.unregister(event_name, self)
|
apache-2.0
|
brendan-ssw/SSW.RulesSearch
|
SSW.RulesSearch/SSW.RulesSearch.Lucene/LuceneSettings.cs
|
253
|
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace SSW.RulesSearch.Lucene
{
public class LuceneSettings
{
public string IndexDirectory { get; set; }
}
}
|
apache-2.0
|
mkeesey/guava-for-small-classpaths
|
guava/src/com/google/common/collect/ImmutableSetMultimap.java
|
17459
|
/*
* Copyright (C) 2009 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.collect;
import static com.google.common.base.Preconditions.checkNotNull;
import com.google.common.annotations.Beta;
import com.google.common.annotations.GwtCompatible;
import com.google.common.annotations.GwtIncompatible;
import com.google.common.base.Function;
import java.io.IOException;
import java.io.InvalidObjectException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.TreeMap;
import javax.annotation.Nullable;
/**
* An immutable {@link SetMultimap} with reliable user-specified key and value
* iteration order. Does not permit null keys or values.
*
* <p>Unlike {@link Multimaps#unmodifiableSetMultimap(SetMultimap)}, which is
* a <i>view</i> of a separate multimap which can still change, an instance of
* {@code ImmutableSetMultimap} contains its own data and will <i>never</i>
* change. {@code ImmutableSetMultimap} is convenient for
* {@code public static final} multimaps ("constant multimaps") and also lets
* you easily make a "defensive copy" of a multimap provided to your class by
* a caller.
*
* <p><b>Note:</b> Although this class is not final, it cannot be subclassed as
* it has no public or protected constructors. Thus, instances of this class
* are guaranteed to be immutable.
*
* <p>See the Guava User Guide article on <a href=
* "http://code.google.com/p/guava-libraries/wiki/ImmutableCollectionsExplained">
* immutable collections</a>.
*
* @author Mike Ward
* @since 2.0 (imported from Google Collections Library)
*/
@GwtCompatible(serializable = true, emulated = true)
public class ImmutableSetMultimap<K, V>
extends ImmutableMultimap<K, V>
implements SetMultimap<K, V> {
/** Returns the empty multimap. */
// Casting is safe because the multimap will never hold any elements.
@SuppressWarnings("unchecked")
public static <K, V> ImmutableSetMultimap<K, V> of() {
return (ImmutableSetMultimap<K, V>) EmptyImmutableSetMultimap.INSTANCE;
}
/**
* Returns an immutable multimap containing a single entry.
*/
public static <K, V> ImmutableSetMultimap<K, V> of(K k1, V v1) {
ImmutableSetMultimap.Builder<K, V> builder = ImmutableSetMultimap.builder();
builder.put(k1, v1);
return builder.build();
}
/**
* Returns an immutable multimap containing the given entries, in order.
* Repeated occurrences of an entry (according to {@link Object#equals}) after
* the first are ignored.
*/
public static <K, V> ImmutableSetMultimap<K, V> of(K k1, V v1, K k2, V v2) {
ImmutableSetMultimap.Builder<K, V> builder = ImmutableSetMultimap.builder();
builder.put(k1, v1);
builder.put(k2, v2);
return builder.build();
}
/**
* Returns an immutable multimap containing the given entries, in order.
* Repeated occurrences of an entry (according to {@link Object#equals}) after
* the first are ignored.
*/
public static <K, V> ImmutableSetMultimap<K, V> of(
K k1, V v1, K k2, V v2, K k3, V v3) {
ImmutableSetMultimap.Builder<K, V> builder = ImmutableSetMultimap.builder();
builder.put(k1, v1);
builder.put(k2, v2);
builder.put(k3, v3);
return builder.build();
}
/**
* Returns an immutable multimap containing the given entries, in order.
* Repeated occurrences of an entry (according to {@link Object#equals}) after
* the first are ignored.
*/
public static <K, V> ImmutableSetMultimap<K, V> of(
K k1, V v1, K k2, V v2, K k3, V v3, K k4, V v4) {
ImmutableSetMultimap.Builder<K, V> builder = ImmutableSetMultimap.builder();
builder.put(k1, v1);
builder.put(k2, v2);
builder.put(k3, v3);
builder.put(k4, v4);
return builder.build();
}
/**
* Returns an immutable multimap containing the given entries, in order.
* Repeated occurrences of an entry (according to {@link Object#equals}) after
* the first are ignored.
*/
public static <K, V> ImmutableSetMultimap<K, V> of(
K k1, V v1, K k2, V v2, K k3, V v3, K k4, V v4, K k5, V v5) {
ImmutableSetMultimap.Builder<K, V> builder = ImmutableSetMultimap.builder();
builder.put(k1, v1);
builder.put(k2, v2);
builder.put(k3, v3);
builder.put(k4, v4);
builder.put(k5, v5);
return builder.build();
}
// looking for of() with > 5 entries? Use the builder instead.
/**
* Returns a new {@link Builder}.
*/
public static <K, V> Builder<K, V> builder() {
return new Builder<K, V>();
}
/**
* Multimap for {@link ImmutableSetMultimap.Builder} that maintains key
* and value orderings and performs better than {@link LinkedHashMultimap}.
*/
private static class BuilderMultimap<K, V> extends AbstractMultimap<K, V> {
BuilderMultimap() {
super(new LinkedHashMap<K, Collection<V>>());
}
@Override Collection<V> createCollection() {
return Sets.newLinkedHashSet();
}
private static final long serialVersionUID = 0;
}
/**
* Multimap for {@link ImmutableSetMultimap.Builder} that sorts keys and
* maintains value orderings.
*/
private static class SortedKeyBuilderMultimap<K, V>
extends AbstractMultimap<K, V> {
SortedKeyBuilderMultimap(
Comparator<? super K> keyComparator, Multimap<K, V> multimap) {
super(new TreeMap<K, Collection<V>>(keyComparator));
putAll(multimap);
}
@Override Collection<V> createCollection() {
return Sets.newLinkedHashSet();
}
private static final long serialVersionUID = 0;
}
/**
* A builder for creating immutable {@code SetMultimap} instances, especially
* {@code public static final} multimaps ("constant multimaps"). Example:
* <pre> {@code
*
* static final Multimap<String, Integer> STRING_TO_INTEGER_MULTIMAP =
* new ImmutableSetMultimap.Builder<String, Integer>()
* .put("one", 1)
* .putAll("several", 1, 2, 3)
* .putAll("many", 1, 2, 3, 4, 5)
* .build();}</pre>
*
* Builder instances can be reused; it is safe to call {@link #build} multiple
* times to build multiple multimaps in series. Each multimap contains the
* key-value mappings in the previously created multimaps.
*
* @since 2.0 (imported from Google Collections Library)
*/
public static final class Builder<K, V>
extends ImmutableMultimap.Builder<K, V> {
/**
* Creates a new builder. The returned builder is equivalent to the builder
* generated by {@link ImmutableSetMultimap#builder}.
*/
public Builder() {
builderMultimap = new BuilderMultimap<K, V>();
}
/**
* Adds a key-value mapping to the built multimap if it is not already
* present.
*/
@Override public Builder<K, V> put(K key, V value) {
builderMultimap.put(checkNotNull(key), checkNotNull(value));
return this;
}
/**
* Adds an entry to the built multimap if it is not already present.
*
* @since 11.0
*/
@Override public Builder<K, V> put(Entry<? extends K, ? extends V> entry) {
builderMultimap.put(
checkNotNull(entry.getKey()), checkNotNull(entry.getValue()));
return this;
}
@Override public Builder<K, V> putAll(K key, Iterable<? extends V> values) {
Collection<V> collection = builderMultimap.get(checkNotNull(key));
for (V value : values) {
collection.add(checkNotNull(value));
}
return this;
}
@Override public Builder<K, V> putAll(K key, V... values) {
return putAll(key, Arrays.asList(values));
}
@Override public Builder<K, V> putAll(
Multimap<? extends K, ? extends V> multimap) {
for (Entry<? extends K, ? extends Collection<? extends V>> entry
: multimap.asMap().entrySet()) {
putAll(entry.getKey(), entry.getValue());
}
return this;
}
/**
* {@inheritDoc}
*
* @since 8.0
*/
@Beta @Override
public Builder<K, V> orderKeysBy(Comparator<? super K> keyComparator) {
this.keyComparator = checkNotNull(keyComparator);
return this;
}
/**
* Specifies the ordering of the generated multimap's values for each key.
*
* <p>If this method is called, the sets returned by the {@code get()}
* method of the generated multimap and its {@link Multimap#asMap()} view
* are {@link ImmutableSortedSet} instances. However, serialization does not
* preserve that property, though it does maintain the key and value
* ordering.
*
* @since 8.0
*/
// TODO: Make serialization behavior consistent.
@Beta @Override
public Builder<K, V> orderValuesBy(Comparator<? super V> valueComparator) {
super.orderValuesBy(valueComparator);
return this;
}
/**
* Returns a newly-created immutable set multimap.
*/
@Override public ImmutableSetMultimap<K, V> build() {
if (keyComparator != null) {
Multimap<K, V> sortedCopy = new BuilderMultimap<K, V>();
List<Map.Entry<K, Collection<V>>> entries = Lists.newArrayList(
builderMultimap.asMap().entrySet());
Collections.sort(
entries,
Ordering.from(keyComparator).onResultOf(new Function<Entry<K, Collection<V>>, K>() {
@Override
public K apply(Entry<K, Collection<V>> entry) {
return entry.getKey();
}
}));
for (Map.Entry<K, Collection<V>> entry : entries) {
sortedCopy.putAll(entry.getKey(), entry.getValue());
}
builderMultimap = sortedCopy;
}
return copyOf(builderMultimap, valueComparator);
}
}
/**
* Returns an immutable set multimap containing the same mappings as
* {@code multimap}. The generated multimap's key and value orderings
* correspond to the iteration ordering of the {@code multimap.asMap()} view.
* Repeated occurrences of an entry in the multimap after the first are
* ignored.
*
* <p>Despite the method name, this method attempts to avoid actually copying
* the data when it is safe to do so. The exact circumstances under which a
* copy will or will not be performed are undocumented and subject to change.
*
* @throws NullPointerException if any key or value in {@code multimap} is
* null
*/
public static <K, V> ImmutableSetMultimap<K, V> copyOf(
Multimap<? extends K, ? extends V> multimap) {
return copyOf(multimap, null);
}
private static <K, V> ImmutableSetMultimap<K, V> copyOf(
Multimap<? extends K, ? extends V> multimap,
Comparator<? super V> valueComparator) {
checkNotNull(multimap); // eager for GWT
if (multimap.isEmpty() && valueComparator == null) {
return of();
}
if (multimap instanceof ImmutableSetMultimap) {
@SuppressWarnings("unchecked") // safe since multimap is not writable
ImmutableSetMultimap<K, V> kvMultimap
= (ImmutableSetMultimap<K, V>) multimap;
if (!kvMultimap.isPartialView()) {
return kvMultimap;
}
}
ImmutableMap.Builder<K, ImmutableSet<V>> builder = ImmutableMap.builder();
int size = 0;
for (Entry<? extends K, ? extends Collection<? extends V>> entry
: multimap.asMap().entrySet()) {
K key = entry.getKey();
Collection<? extends V> values = entry.getValue();
ImmutableSet<V> set = (valueComparator == null)
? ImmutableSet.copyOf(values)
: ImmutableSortedSet.copyOf(valueComparator, values);
if (!set.isEmpty()) {
builder.put(key, set);
size += set.size();
}
}
return new ImmutableSetMultimap<K, V>(
builder.build(), size, valueComparator);
}
// Returned by get() when values are sorted and a missing key is provided.
private final transient ImmutableSortedSet<V> emptySet;
ImmutableSetMultimap(ImmutableMap<K, ImmutableSet<V>> map, int size,
@Nullable Comparator<? super V> valueComparator) {
super(map, size);
this.emptySet = (valueComparator == null)
? null : ImmutableSortedSet.<V>emptySet(valueComparator);
}
// views
/**
* Returns an immutable set of the values for the given key. If no mappings
* in the multimap have the provided key, an empty immutable set is returned.
* The values are in the same order as the parameters used to build this
* multimap.
*/
@Override public ImmutableSet<V> get(@Nullable K key) {
// This cast is safe as its type is known in constructor.
ImmutableSet<V> set = (ImmutableSet<V>) map.get(key);
if (set != null) {
return set;
} else if (emptySet != null) {
return emptySet;
} else {
return ImmutableSet.<V>of();
}
}
private transient ImmutableSetMultimap<V, K> inverse;
/**
* {@inheritDoc}
*
* <p>Because an inverse of a set multimap cannot contain multiple pairs with the same key and
* value, this method returns an {@code ImmutableSetMultimap} rather than the
* {@code ImmutableMultimap} specified in the {@code ImmutableMultimap} class.
*
* @since 11
*/
@Beta
public ImmutableSetMultimap<V, K> inverse() {
ImmutableSetMultimap<V, K> result = inverse;
return (result == null) ? (inverse = invert()) : result;
}
private ImmutableSetMultimap<V, K> invert() {
Builder<V, K> builder = builder();
for (Entry<K, V> entry : entries()) {
builder.put(entry.getValue(), entry.getKey());
}
ImmutableSetMultimap<V, K> invertedMultimap = builder.build();
invertedMultimap.inverse = this;
return invertedMultimap;
}
/**
* Guaranteed to throw an exception and leave the multimap unmodified.
*
* @throws UnsupportedOperationException always
*/
@Override public ImmutableSet<V> removeAll(Object key) {
throw new UnsupportedOperationException();
}
/**
* Guaranteed to throw an exception and leave the multimap unmodified.
*
* @throws UnsupportedOperationException always
*/
@Override public ImmutableSet<V> replaceValues(
K key, Iterable<? extends V> values) {
throw new UnsupportedOperationException();
}
private transient ImmutableSet<Entry<K, V>> entries;
/**
* Returns an immutable collection of all key-value pairs in the multimap.
* Its iterator traverses the values for the first key, the values for the
* second key, and so on.
*/
// TODO(kevinb): Fix this so that two copies of the entries are not created.
@Override public ImmutableSet<Entry<K, V>> entries() {
ImmutableSet<Entry<K, V>> result = entries;
return (result == null)
? (entries = ImmutableSet.copyOf(super.entries()))
: result;
}
/**
* @serialData number of distinct keys, and then for each distinct key: the
* key, the number of values for that key, and the key's values
*/
@GwtIncompatible("java.io.ObjectOutputStream")
private void writeObject(ObjectOutputStream stream) throws IOException {
stream.defaultWriteObject();
Serialization.writeMultimap(this, stream);
}
@GwtIncompatible("java.io.ObjectInputStream")
private void readObject(ObjectInputStream stream)
throws IOException, ClassNotFoundException {
stream.defaultReadObject();
int keyCount = stream.readInt();
if (keyCount < 0) {
throw new InvalidObjectException("Invalid key count " + keyCount);
}
ImmutableMap.Builder<Object, ImmutableSet<Object>> builder
= ImmutableMap.builder();
int tmpSize = 0;
for (int i = 0; i < keyCount; i++) {
Object key = stream.readObject();
int valueCount = stream.readInt();
if (valueCount <= 0) {
throw new InvalidObjectException("Invalid value count " + valueCount);
}
Object[] array = new Object[valueCount];
for (int j = 0; j < valueCount; j++) {
array[j] = stream.readObject();
}
ImmutableSet<Object> valueSet = ImmutableSet.copyOf(array);
if (valueSet.size() != array.length) {
throw new InvalidObjectException(
"Duplicate key-value pairs exist for key " + key);
}
builder.put(key, valueSet);
tmpSize += valueCount;
}
ImmutableMap<Object, ImmutableSet<Object>> tmpMap;
try {
tmpMap = builder.build();
} catch (IllegalArgumentException e) {
throw (InvalidObjectException)
new InvalidObjectException(e.getMessage()).initCause(e);
}
FieldSettersHolder.MAP_FIELD_SETTER.set(this, tmpMap);
FieldSettersHolder.SIZE_FIELD_SETTER.set(this, tmpSize);
}
@GwtIncompatible("not needed in emulated source.")
private static final long serialVersionUID = 0;
}
|
apache-2.0
|
castagna/dataset-movies
|
src/main/java/com/kasabi/data/movies/dbpedia/DBPediaActorLinker.java
|
1609
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.kasabi.data.movies.dbpedia;
import org.openjena.atlas.lib.Pair;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.hp.hpl.jena.rdf.model.Model;
public class DBPediaActorLinker extends DBPediaBaseLinker {
private static final Logger log = LoggerFactory.getLogger(DBPediaActorLinker.class) ;
public DBPediaActorLinker(String base) {
super(base);
}
@Override
public String getURL ( String name ) {
String result = null;
try {
Pair<String,Model> pair = get(httpclient, "actor", name);
String url = pair.getLeft();
if ( url != null ) {
result = url;
}
} catch (Exception e) {
log.error ( e.getMessage(), e );
}
log.debug("getURL({}) --> {}", name, result);
return result;
}
}
|
apache-2.0
|
fhg-fokus-nubomedia/signaling-plane
|
modules/cdn_connector/src/main/java/org/openxsp/cdn/connector/youtube/YoutubeConnector.java
|
1722
|
package org.openxsp.cdn.connector.youtube;
import java.io.File;
import org.openxsp.cdn.connector.CdnConnector;
import org.openxsp.cdn.connector.ConnectorCallback;
import org.openxsp.cdn.connector.ConnectorCallback.ConnectorError;
import org.openxsp.cdn.connector.util.log.Logger;
import org.openxsp.cdn.connector.util.log.LoggerFactory;
import org.vertx.java.core.json.JsonObject;
public class YoutubeConnector implements CdnConnector{
public static final String
PARAM_APPLICATION_NAME = "application_name",
PARAM_AUTH_CONFIG = "auth";
private static Logger log = LoggerFactory.getLogger(YoutubeConnector.class);
public void uploadVideoAsFile(JsonObject connectorConfig, File file, ConnectorCallback cb) {
UploadVideo.upload(connectorConfig, file, cb);
}
public void uploadVideoAsStream(JsonObject connectorConfig, ConnectorCallback cb) {
log.w("Upload stream not yet implemented");
if(cb!=null){
cb.onError(ConnectorError.OperationNotSupported);
}
}
public void playVideoStream(JsonObject connectorConfig, String id, ConnectorCallback cb) {
log.w("Play stream not yet implemented");
if(cb!=null){
cb.onError(ConnectorError.OperationNotSupported);
}
}
public void downloadVideo(JsonObject connectorConfig, String id, ConnectorCallback cb){
log.w("Download video not yet implemented");
if(cb!=null){
cb.onError(ConnectorError.OperationNotSupported);
}
}
@Override
public void getUploadedVideos(JsonObject connectorConfig, ConnectorCallback cb) {
VideoList.getVideos(connectorConfig, cb);
}
@Override
public void deleteVideo(JsonObject connectorConfig, String id, ConnectorCallback cb) {
DeleteVideo.delete(connectorConfig, id, cb);
}
}
|
apache-2.0
|
cpcloud/arrow
|
cpp/src/arrow/compute/exec/key_hash.cc
|
8915
|
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
#include "arrow/compute/exec/key_hash.h"
#include <memory.h>
#include <algorithm>
#include <cstdint>
#include "arrow/compute/exec/util.h"
namespace arrow {
namespace compute {
inline uint32_t Hashing::avalanche_helper(uint32_t acc) {
acc ^= (acc >> 15);
acc *= PRIME32_2;
acc ^= (acc >> 13);
acc *= PRIME32_3;
acc ^= (acc >> 16);
return acc;
}
void Hashing::avalanche(int64_t hardware_flags, uint32_t num_keys, uint32_t* hashes) {
uint32_t processed = 0;
#if defined(ARROW_HAVE_AVX2)
if (hardware_flags & arrow::internal::CpuInfo::AVX2) {
int tail = num_keys % 8;
avalanche_avx2(num_keys - tail, hashes);
processed = num_keys - tail;
}
#endif
for (uint32_t i = processed; i < num_keys; ++i) {
hashes[i] = avalanche_helper(hashes[i]);
}
}
inline uint32_t Hashing::combine_accumulators(const uint32_t acc1, const uint32_t acc2,
const uint32_t acc3, const uint32_t acc4) {
return ROTL(acc1, 1) + ROTL(acc2, 7) + ROTL(acc3, 12) + ROTL(acc4, 18);
}
inline void Hashing::helper_8B(uint32_t key_length, uint32_t num_keys,
const uint8_t* keys, uint32_t* hashes) {
ARROW_DCHECK(key_length <= 8);
uint64_t mask = ~0ULL >> (8 * (8 - key_length));
constexpr uint64_t multiplier = 14029467366897019727ULL;
uint32_t offset = 0;
for (uint32_t ikey = 0; ikey < num_keys; ++ikey) {
uint64_t x = *reinterpret_cast<const uint64_t*>(keys + offset);
x &= mask;
hashes[ikey] = static_cast<uint32_t>(BYTESWAP(x * multiplier));
offset += key_length;
}
}
inline void Hashing::helper_stripe(uint32_t offset, uint64_t mask_hi, const uint8_t* keys,
uint32_t& acc1, uint32_t& acc2, uint32_t& acc3,
uint32_t& acc4) {
uint64_t v1 = reinterpret_cast<const uint64_t*>(keys + offset)[0];
// We do not need to mask v1, because we will not process a stripe
// unless at least 9 bytes of it are part of the key.
uint64_t v2 = reinterpret_cast<const uint64_t*>(keys + offset)[1];
v2 &= mask_hi;
uint32_t x1 = static_cast<uint32_t>(v1);
uint32_t x2 = static_cast<uint32_t>(v1 >> 32);
uint32_t x3 = static_cast<uint32_t>(v2);
uint32_t x4 = static_cast<uint32_t>(v2 >> 32);
acc1 += x1 * PRIME32_2;
acc1 = ROTL(acc1, 13) * PRIME32_1;
acc2 += x2 * PRIME32_2;
acc2 = ROTL(acc2, 13) * PRIME32_1;
acc3 += x3 * PRIME32_2;
acc3 = ROTL(acc3, 13) * PRIME32_1;
acc4 += x4 * PRIME32_2;
acc4 = ROTL(acc4, 13) * PRIME32_1;
}
void Hashing::helper_stripes(int64_t hardware_flags, uint32_t num_keys,
uint32_t key_length, const uint8_t* keys, uint32_t* hash) {
uint32_t processed = 0;
#if defined(ARROW_HAVE_AVX2)
if (hardware_flags & arrow::internal::CpuInfo::AVX2) {
int tail = num_keys % 2;
helper_stripes_avx2(num_keys - tail, key_length, keys, hash);
processed = num_keys - tail;
}
#endif
// If length modulo stripe length is less than or equal 8, round down to the nearest 16B
// boundary (8B ending will be processed in a separate function), otherwise round up.
const uint32_t num_stripes = (key_length + 7) / 16;
uint64_t mask_hi =
~0ULL >>
(8 * ((num_stripes * 16 > key_length) ? num_stripes * 16 - key_length : 0));
for (uint32_t i = processed; i < num_keys; ++i) {
uint32_t acc1, acc2, acc3, acc4;
acc1 = static_cast<uint32_t>(
(static_cast<uint64_t>(PRIME32_1) + static_cast<uint64_t>(PRIME32_2)) &
0xffffffff);
acc2 = PRIME32_2;
acc3 = 0;
acc4 = static_cast<uint32_t>(-static_cast<int32_t>(PRIME32_1));
uint32_t offset = i * key_length;
for (uint32_t stripe = 0; stripe < num_stripes - 1; ++stripe) {
helper_stripe(offset, ~0ULL, keys, acc1, acc2, acc3, acc4);
offset += 16;
}
helper_stripe(offset, mask_hi, keys, acc1, acc2, acc3, acc4);
hash[i] = combine_accumulators(acc1, acc2, acc3, acc4);
}
}
inline uint32_t Hashing::helper_tail(uint32_t offset, uint64_t mask, const uint8_t* keys,
uint32_t acc) {
uint64_t v = reinterpret_cast<const uint64_t*>(keys + offset)[0];
v &= mask;
uint32_t x1 = static_cast<uint32_t>(v);
uint32_t x2 = static_cast<uint32_t>(v >> 32);
acc += x1 * PRIME32_3;
acc = ROTL(acc, 17) * PRIME32_4;
acc += x2 * PRIME32_3;
acc = ROTL(acc, 17) * PRIME32_4;
return acc;
}
void Hashing::helper_tails(int64_t hardware_flags, uint32_t num_keys, uint32_t key_length,
const uint8_t* keys, uint32_t* hash) {
uint32_t processed = 0;
#if defined(ARROW_HAVE_AVX2)
if (hardware_flags & arrow::internal::CpuInfo::AVX2) {
int tail = num_keys % 8;
helper_tails_avx2(num_keys - tail, key_length, keys, hash);
processed = num_keys - tail;
}
#endif
uint64_t mask = ~0ULL >> (8 * (((key_length % 8) == 0) ? 0 : 8 - (key_length % 8)));
uint32_t offset = key_length / 16 * 16;
offset += processed * key_length;
for (uint32_t i = processed; i < num_keys; ++i) {
hash[i] = helper_tail(offset, mask, keys, hash[i]);
offset += key_length;
}
}
void Hashing::hash_fixed(int64_t hardware_flags, uint32_t num_keys, uint32_t length_key,
const uint8_t* keys, uint32_t* hashes) {
ARROW_DCHECK(length_key > 0);
if (length_key <= 8) {
helper_8B(length_key, num_keys, keys, hashes);
return;
}
helper_stripes(hardware_flags, num_keys, length_key, keys, hashes);
if ((length_key % 16) > 0 && (length_key % 16) <= 8) {
helper_tails(hardware_flags, num_keys, length_key, keys, hashes);
}
avalanche(hardware_flags, num_keys, hashes);
}
void Hashing::hash_varlen_helper(uint32_t length, const uint8_t* key, uint32_t* acc) {
for (uint32_t i = 0; i < length / 16; ++i) {
for (int j = 0; j < 4; ++j) {
uint32_t lane = reinterpret_cast<const uint32_t*>(key)[i * 4 + j];
acc[j] += (lane * PRIME32_2);
acc[j] = ROTL(acc[j], 13);
acc[j] *= PRIME32_1;
}
}
int tail = length % 16;
if (tail) {
uint64_t last_stripe[2];
const uint64_t* last_stripe_base =
reinterpret_cast<const uint64_t*>(key + length - (length % 16));
last_stripe[0] = last_stripe_base[0];
uint64_t mask = ~0ULL >> (8 * ((length + 7) / 8 * 8 - length));
if (tail <= 8) {
last_stripe[1] = 0;
last_stripe[0] &= mask;
} else {
last_stripe[1] = last_stripe_base[1];
last_stripe[1] &= mask;
}
// The stack allocation and memcpy here should be optimized out by the compiler.
// Using a reinterpret_cast causes a compiler warning on gcc and can lead to incorrect
// results. See https://issues.apache.org/jira/browse/ARROW-13600 for more info.
uint32_t lanes[4];
memcpy(&lanes, &last_stripe, sizeof(last_stripe));
for (int j = 0; j < 4; ++j) {
uint32_t lane = lanes[j];
acc[j] += (lane * PRIME32_2);
acc[j] = ROTL(acc[j], 13);
acc[j] *= PRIME32_1;
}
}
}
void Hashing::hash_varlen(int64_t hardware_flags, uint32_t num_rows,
const uint32_t* offsets, const uint8_t* concatenated_keys,
uint32_t* temp_buffer, // Needs to hold 4 x 32-bit per row
uint32_t* hashes) {
#if defined(ARROW_HAVE_AVX2)
if (hardware_flags & arrow::internal::CpuInfo::AVX2) {
hash_varlen_avx2(num_rows, offsets, concatenated_keys, temp_buffer, hashes);
} else {
#endif
for (uint32_t i = 0; i < num_rows; ++i) {
uint32_t acc[4];
acc[0] = static_cast<uint32_t>(
(static_cast<uint64_t>(PRIME32_1) + static_cast<uint64_t>(PRIME32_2)) &
0xffffffff);
acc[1] = PRIME32_2;
acc[2] = 0;
acc[3] = static_cast<uint32_t>(-static_cast<int32_t>(PRIME32_1));
uint32_t length = offsets[i + 1] - offsets[i];
hash_varlen_helper(length, concatenated_keys + offsets[i], acc);
hashes[i] = combine_accumulators(acc[0], acc[1], acc[2], acc[3]);
}
avalanche(hardware_flags, num_rows, hashes);
#if defined(ARROW_HAVE_AVX2)
}
#endif
}
} // namespace compute
} // namespace arrow
|
apache-2.0
|
mikelalcon/bazel
|
src/main/java/com/google/devtools/build/lib/analysis/config/BuildConfiguration.java
|
93506
|
// Copyright 2014 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.analysis.config;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Joiner;
import com.google.common.base.Verify;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.ClassToInstanceMap;
import com.google.common.collect.ImmutableCollection;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedMap;
import com.google.common.collect.Iterables;
import com.google.common.collect.ListMultimap;
import com.google.common.collect.Maps;
import com.google.common.collect.Multimap;
import com.google.common.collect.MutableClassToInstanceMap;
import com.google.devtools.build.lib.actions.Root;
import com.google.devtools.build.lib.analysis.AspectDescriptor;
import com.google.devtools.build.lib.analysis.BlazeDirectories;
import com.google.devtools.build.lib.analysis.ConfiguredRuleClassProvider;
import com.google.devtools.build.lib.analysis.Dependency;
import com.google.devtools.build.lib.analysis.RuleContext;
import com.google.devtools.build.lib.analysis.config.BuildConfigurationCollection.Transitions;
import com.google.devtools.build.lib.cmdline.Label;
import com.google.devtools.build.lib.cmdline.LabelSyntaxException;
import com.google.devtools.build.lib.events.Event;
import com.google.devtools.build.lib.events.EventHandler;
import com.google.devtools.build.lib.packages.Attribute;
import com.google.devtools.build.lib.packages.Attribute.Configurator;
import com.google.devtools.build.lib.packages.Attribute.SplitTransition;
import com.google.devtools.build.lib.packages.Attribute.Transition;
import com.google.devtools.build.lib.packages.InputFile;
import com.google.devtools.build.lib.packages.PackageGroup;
import com.google.devtools.build.lib.packages.Rule;
import com.google.devtools.build.lib.packages.RuleClass;
import com.google.devtools.build.lib.packages.RuleClassProvider;
import com.google.devtools.build.lib.packages.Target;
import com.google.devtools.build.lib.rules.test.TestActionBuilder;
import com.google.devtools.build.lib.skylarkinterface.SkylarkCallable;
import com.google.devtools.build.lib.skylarkinterface.SkylarkModule;
import com.google.devtools.build.lib.skylarkinterface.SkylarkModuleCategory;
import com.google.devtools.build.lib.util.CPU;
import com.google.devtools.build.lib.util.Fingerprint;
import com.google.devtools.build.lib.util.OS;
import com.google.devtools.build.lib.util.Preconditions;
import com.google.devtools.build.lib.util.RegexFilter;
import com.google.devtools.build.lib.vfs.Path;
import com.google.devtools.build.lib.vfs.PathFragment;
import com.google.devtools.common.options.Converter;
import com.google.devtools.common.options.Converters;
import com.google.devtools.common.options.EnumConverter;
import com.google.devtools.common.options.Option;
import com.google.devtools.common.options.OptionsBase;
import com.google.devtools.common.options.OptionsParsingException;
import com.google.devtools.common.options.TriState;
import java.io.Serializable;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Queue;
import java.util.Set;
import java.util.TreeMap;
import javax.annotation.Nullable;
/**
* Instances of BuildConfiguration represent a collection of context
* information which may affect a build (for example: the target platform for
* compilation, or whether or not debug tables are required). In fact, all
* "environmental" information (e.g. from the tool's command-line, as opposed
* to the BUILD file) that can affect the output of any build tool should be
* explicitly represented in the BuildConfiguration instance.
*
* <p>A single build may require building tools to run on a variety of
* platforms: when compiling a server application for production, we must build
* the build tools (like compilers) to run on the host platform, but cross-compile
* the application for the production environment.
*
* <p>There is always at least one BuildConfiguration instance in any build:
* the one representing the host platform. Additional instances may be created,
* in a cross-compilation build, for example.
*
* <p>Instances of BuildConfiguration are canonical:
* <pre>c1.equals(c2) <=> c1==c2.</pre>
*/
@SkylarkModule(name = "configuration",
category = SkylarkModuleCategory.BUILTIN,
doc = "Data required for the analysis of a target that comes from targets that "
+ "depend on it and not targets that it depends on.")
public final class BuildConfiguration {
/**
* An interface for language-specific configurations.
*
* <p>All implementations must be immutable and communicate this as clearly as possible
* (e.g. declare {@link ImmutableList} signatures on their interfaces vs. {@link List}).
* This is because fragment instances may be shared across configurations.
*/
public abstract static class Fragment {
/**
* Validates the options for this Fragment. Issues warnings for the
* use of deprecated options, and warnings or errors for any option settings
* that conflict.
*/
@SuppressWarnings("unused")
public void reportInvalidOptions(EventHandler reporter, BuildOptions buildOptions) {
}
/**
* Adds mapping of names to values of "Make" variables defined by this configuration.
*/
@SuppressWarnings("unused")
public void addGlobalMakeVariables(ImmutableMap.Builder<String, String> globalMakeEnvBuilder) {
}
/**
* Collects all labels that should be implicitly loaded from labels that were specified as
* options, keyed by the name to be displayed to the user if something goes wrong.
* The resulting set only contains labels that were derived from command-line options; the
* intention is that it can be used to sanity-check that the command-line options actually
* contain these in their transitive closure.
*
* <p>This functionality only exists for legacy configuration fragments that compute labels from
* command-line option values. Don't do that! Instead, use a rule that specifies the mapping
* explicitly.
*/
@SuppressWarnings("unused")
protected void addImplicitLabels(Multimap<String, Label> implicitLabels) {
}
/**
* Returns a multimap of all labels that should be implicitly loaded from labels that were
* specified as options, keyed by the name to be displayed to the user if something goes wrong.
* The returned set only contains labels that were derived from command-line options; the
* intention is that it can be used to sanity-check that the command-line options actually
* contain these in their transitive closure.
*/
public final ListMultimap<String, Label> getImplicitLabels() {
ListMultimap<String, Label> implicitLabels = ArrayListMultimap.create();
addImplicitLabels(implicitLabels);
return implicitLabels;
}
/**
* Returns a (key, value) mapping to insert into the subcommand environment for coverage.
*/
public Map<String, String> getCoverageEnvironment() {
return ImmutableMap.<String, String>of();
}
/*
* Returns the command-line "Make" variable overrides.
*/
public ImmutableMap<String, String> getCommandLineDefines() {
return ImmutableMap.of();
}
/**
* Returns a fragment of the output directory name for this configuration. The output
* directory for the whole configuration contains all the short names by all fragments.
*/
@Nullable
public String getOutputDirectoryName() {
return null;
}
/**
* The platform name is a concatenation of fragment platform names.
*/
public String getPlatformName() {
return "";
}
/**
* Return true if the fragment performs static linking. This information is needed for
* lincence checking.
*/
public boolean performsStaticLink() {
return false;
}
/**
* Add items to the shell environment.
*/
@SuppressWarnings("unused")
public void setupShellEnvironment(ImmutableMap.Builder<String, String> builder) {
}
/**
* Add mappings from generally available tool names (like "sh") to their paths
* that actions can access.
*/
@SuppressWarnings("unused")
public void defineExecutables(ImmutableMap.Builder<String, PathFragment> builder) {
}
/**
* Returns { 'option name': 'alternative default' } entries for options where the
* "real default" should be something besides the default specified in the {@link Option}
* declaration.
*/
public Map<String, Object> lateBoundOptionDefaults() {
return ImmutableMap.of();
}
/**
* Return set of features enabled by this configuration.
*/
public ImmutableSet<String> configurationEnabledFeatures(RuleContext ruleContext) {
return ImmutableSet.of();
}
}
private static final Label convertLabel(String input) throws OptionsParsingException {
try {
// Check if the input starts with '/'. We don't check for "//" so that
// we get a better error message if the user accidentally tries to use
// an absolute path (starting with '/') for a label.
if (!input.startsWith("/") && !input.startsWith("@")) {
input = "//" + input;
}
return Label.parseAbsolute(input);
} catch (LabelSyntaxException e) {
throw new OptionsParsingException(e.getMessage());
}
}
/**
* A converter from strings to Labels.
*/
public static class LabelConverter implements Converter<Label> {
@Override
public Label convert(String input) throws OptionsParsingException {
return convertLabel(input);
}
@Override
public String getTypeDescription() {
return "a build target label";
}
}
/**
* A converter that returns null if the input string is empty, otherwise it converts
* the input to a label.
*/
public static class EmptyToNullLabelConverter implements Converter<Label> {
@Override
public Label convert(String input) throws OptionsParsingException {
return input.isEmpty() ? null : convertLabel(input);
}
@Override
public String getTypeDescription() {
return "a build target label";
}
}
/**
* A label converter that returns a default value if the input string is empty.
*/
public static class DefaultLabelConverter implements Converter<Label> {
private final Label defaultValue;
protected DefaultLabelConverter(String defaultValue) {
this.defaultValue = defaultValue.equals("null")
? null
: Label.parseAbsoluteUnchecked(defaultValue);
}
@Override
public Label convert(String input) throws OptionsParsingException {
return input.isEmpty() ? defaultValue : convertLabel(input);
}
@Override
public String getTypeDescription() {
return "a build target label";
}
}
/** TODO(bazel-team): document this */
public static class PluginOptionConverter implements Converter<Map.Entry<String, String>> {
@Override
public Map.Entry<String, String> convert(String input) throws OptionsParsingException {
int index = input.indexOf('=');
if (index == -1) {
throw new OptionsParsingException("Plugin option not in the plugin=option format");
}
String option = input.substring(0, index);
String value = input.substring(index + 1);
return Maps.immutableEntry(option, value);
}
@Override
public String getTypeDescription() {
return "An option for a plugin";
}
}
/** TODO(bazel-team): document this */
public static class RunsPerTestConverter extends PerLabelOptions.PerLabelOptionsConverter {
@Override
public PerLabelOptions convert(String input) throws OptionsParsingException {
try {
return parseAsInteger(input);
} catch (NumberFormatException ignored) {
return parseAsRegex(input);
}
}
private PerLabelOptions parseAsInteger(String input)
throws NumberFormatException, OptionsParsingException {
int numericValue = Integer.parseInt(input);
if (numericValue <= 0) {
throw new OptionsParsingException("'" + input + "' should be >= 1");
} else {
RegexFilter catchAll = new RegexFilter(Collections.singletonList(".*"),
Collections.<String>emptyList());
return new PerLabelOptions(catchAll, Collections.singletonList(input));
}
}
private PerLabelOptions parseAsRegex(String input) throws OptionsParsingException {
PerLabelOptions testRegexps = super.convert(input);
if (testRegexps.getOptions().size() != 1) {
throw new OptionsParsingException(
"'" + input + "' has multiple runs for a single pattern");
}
String runsPerTest = Iterables.getOnlyElement(testRegexps.getOptions());
try {
int numericRunsPerTest = Integer.parseInt(runsPerTest);
if (numericRunsPerTest <= 0) {
throw new OptionsParsingException("'" + input + "' has a value < 1");
}
} catch (NumberFormatException e) {
throw new OptionsParsingException("'" + input + "' has a non-numeric value", e);
}
return testRegexps;
}
@Override
public String getTypeDescription() {
return "a positive integer or test_regex@runs. This flag may be passed more than once";
}
}
/**
* Values for the --strict_*_deps option
*/
public static enum StrictDepsMode {
/** Silently allow referencing transitive dependencies. */
OFF,
/** Warn about transitive dependencies being used directly. */
WARN,
/** Fail the build when transitive dependencies are used directly. */
ERROR,
/** Transition to strict by default. */
STRICT,
/** When no flag value is specified on the command line. */
DEFAULT
}
/**
* Converter for the --strict_*_deps option.
*/
public static class StrictDepsConverter extends EnumConverter<StrictDepsMode> {
public StrictDepsConverter() {
super(StrictDepsMode.class, "strict dependency checking level");
}
}
/**
* Converter for default --host_cpu to the auto-detected host cpu.
*
* <p>This detects the host cpu of the Blaze's server but if the compilation happens in a
* compilation cluster then the host cpu of the compilation cluster might be different than
* the auto-detected one and the --host_cpu option must then be set explicitly.
*/
public static class HostCpuConverter implements Converter<String> {
@Override
public String convert(String input) throws OptionsParsingException {
if (input.isEmpty()) {
// TODO(philwo) - replace these deprecated names with more logical ones (e.g. k8 becomes
// linux-x86_64, darwin includes the CPU architecture, ...).
switch (OS.getCurrent()) {
case DARWIN:
return "darwin";
case FREEBSD:
return "freebsd";
case WINDOWS:
switch (CPU.getCurrent()) {
case X86_64:
return "x64_windows";
default:
// We only support x64 Windows for now.
return "unknown";
}
case LINUX:
switch (CPU.getCurrent()) {
case X86_32:
return "piii";
case X86_64:
return "k8";
case PPC:
return "ppc";
case ARM:
return "arm";
default:
return "unknown";
}
default:
return "unknown";
}
}
return input;
}
@Override
public String getTypeDescription() {
return "a string";
}
}
/**
* Options that affect the value of a BuildConfiguration instance.
*
* <p>(Note: any client that creates a view will also need to declare
* BuildView.Options, which affect the <i>mechanism</i> of view construction,
* even if they don't affect the value of the BuildConfiguration instances.)
*
* <p>IMPORTANT: when adding new options, be sure to consider whether those
* values should be propagated to the host configuration or not (see
* {@link ConfigurationFactory#getConfiguration}.
*
* <p>ALSO IMPORTANT: all option types MUST define a toString method that
* gives identical results for semantically identical option values. The
* simplest way to ensure that is to return the input string.
*/
public static class Options extends FragmentOptions implements Cloneable {
public String getCpu() {
return cpu;
}
@Option(name = "cpu",
defaultValue = "null",
category = "semantics",
help = "The target CPU.")
public String cpu;
@Option(name = "min_param_file_size",
defaultValue = "32768",
category = "undocumented",
help = "Minimum command line length before creating a parameter file.")
public int minParamFileSize;
@Option(name = "experimental_extended_sanity_checks",
defaultValue = "false",
category = "undocumented",
help = "Enables internal validation checks to make sure that configured target "
+ "implementations only access things they should. Causes a performance hit.")
public boolean extendedSanityChecks;
@Option(name = "experimental_allow_runtime_deps_on_neverlink",
defaultValue = "true",
category = "undocumented",
help = "Flag to help transition from allowing to disallowing runtime_deps on neverlink"
+ " Java archives. The depot needs to be cleaned up to roll this out by default.")
public boolean allowRuntimeDepsOnNeverLink;
@Option(name = "strict_filesets",
defaultValue = "false",
category = "semantics",
help = "If this option is enabled, filesets crossing package boundaries are reported "
+ "as errors. It does not work when check_fileset_dependencies_recursively is "
+ "disabled.")
public boolean strictFilesets;
// Plugins are build using the host config. To avoid cycles we just don't propagate
// this option to the host config. If one day we decide to use plugins when building
// host tools, we can improve this by (for example) creating a compiler configuration that is
// used only for building plugins.
@Option(name = "plugin",
converter = LabelConverter.class,
allowMultiple = true,
defaultValue = "",
category = "flags",
help = "Plugins to use in the build. Currently works with java_plugin.")
public List<Label> pluginList;
@Option(name = "plugin_copt",
converter = PluginOptionConverter.class,
allowMultiple = true,
category = "flags",
defaultValue = ":",
help = "Plugin options")
public List<Map.Entry<String, String>> pluginCoptList;
@Option(name = "stamp",
defaultValue = "false",
category = "semantics",
help = "Stamp binaries with the date, username, hostname, workspace information, etc.")
public boolean stampBinaries;
// TODO(bazel-team): delete from OSS tree
// This default value is always overwritten in the case of "blaze coverage" by
// CoverageCommand.setDefaultInstrumentationFilter()
@Option(name = "instrumentation_filter",
converter = RegexFilter.RegexFilterConverter.class,
defaultValue = "-javatests,-_test$,-Tests$",
category = "semantics",
help = "When coverage is enabled, only rules with names included by the "
+ "specified regex-based filter will be instrumented. Rules prefixed "
+ "with '-' are excluded instead. By default, rules containing "
+ "'javatests' or ending with '_test' will not be instrumented.")
public RegexFilter instrumentationFilter;
@Option(name = "instrument_test_targets",
defaultValue = "true",
category = "semantics",
help = "When coverage is enabled, specifies whether to consider instrumenting test rules. "
+ "When true (the default), test rules included by --instrumentation_filter are "
+ "instrumented. When false, test rules are always excluded from coverage "
+ "instrumentation.")
public boolean instrumentTestTargets;
@Option(name = "show_cached_analysis_results",
defaultValue = "true",
category = "undocumented",
help = "Bazel reruns a static analysis only if it detects changes in the analysis "
+ "or its dependencies. If this option is enabled, Bazel will show the analysis' "
+ "results, even if it did not rerun the analysis. If this option is disabled, "
+ "Bazel will show analysis results only if it reran the analysis.")
public boolean showCachedAnalysisResults;
@Option(name = "host_cpu",
defaultValue = "",
category = "semantics",
converter = HostCpuConverter.class,
help = "The host CPU.")
public String hostCpu;
@Option(name = "compilation_mode",
abbrev = 'c',
converter = CompilationMode.Converter.class,
defaultValue = "fastbuild",
category = "semantics", // Should this be "flags"?
help = "Specify the mode the binary will be built in. "
+ "Values: 'fastbuild', 'dbg', 'opt'.")
public CompilationMode compilationMode;
/**
* This option is used internally to set output directory name of the <i>host</i> configuration
* to a constant, so that the output files for the host are completely independent of those for
* the target, no matter what options are in force (k8/piii, opt/dbg, etc).
*/
@Option(name = "output directory name", // (Spaces => can't be specified on command line.)
defaultValue = "null",
category = "undocumented")
public String outputDirectoryName;
@Option(name = "platform_suffix",
defaultValue = "null",
category = "misc",
help = "Specifies a suffix to be added to the configuration directory.")
public String platformSuffix;
// TODO(bazel-team): The test environment is actually computed in BlazeRuntime and this option
// is not read anywhere else. Thus, it should be in a different options class, preferably one
// specific to the "test" command or maybe in its own configuration fragment.
// BlazeRuntime, though.
@Option(name = "test_env",
converter = Converters.OptionalAssignmentConverter.class,
allowMultiple = true,
defaultValue = "",
category = "testing",
help = "Specifies additional environment variables to be injected into the test runner "
+ "environment. Variables can be either specified by name, in which case its value "
+ "will be read from the Bazel client environment, or by the name=value pair. "
+ "This option can be used multiple times to specify several variables. "
+ "Used only by the 'bazel test' command."
)
public List<Map.Entry<String, String>> testEnvironment;
@Option(name = "collect_code_coverage",
defaultValue = "false",
category = "testing",
help = "If specified, Bazel will instrument code (using offline instrumentation where "
+ "possible) and will collect coverage information during tests. Only targets that "
+ " match --instrumentation_filter will be affected. Usually this option should "
+ " not be specified directly - 'bazel coverage' command should be used instead."
)
public boolean collectCodeCoverage;
@Option(name = "microcoverage",
defaultValue = "false",
category = "testing",
help = "If specified with coverage, Blaze will collect microcoverage (per test method "
+ "coverage) information during tests. Only targets that match "
+ "--instrumentation_filter will be affected. Usually this option should not be "
+ "specified directly - 'blaze coverage --microcoverage' command should be used "
+ "instead."
)
public boolean collectMicroCoverage;
@Option(name = "coverage_support",
converter = LabelConverter.class,
defaultValue = "@bazel_tools//tools/test:coverage_support",
category = "testing",
help = "Location of support files that are required on the inputs of every test action "
+ "that collects code coverage. Defaults to '//tools/test:coverage_support'.")
public Label coverageSupport;
@Option(name = "coverage_report_generator",
converter = LabelConverter.class,
defaultValue = "@bazel_tools//tools/test:coverage_report_generator",
category = "testing",
help = "Location of the binary that is used to generate coverage reports. This must "
+ "currently be a filegroup that contains a single file, the binary. Defaults to "
+ "'//tools/test:coverage_report_generator'.")
public Label coverageReportGenerator;
@Option(name = "cache_test_results",
defaultValue = "auto",
category = "testing",
abbrev = 't', // it's useful to toggle this on/off quickly
help = "If 'auto', Bazel will only rerun a test if any of the following conditions apply: "
+ "(1) Bazel detects changes in the test or its dependencies "
+ "(2) the test is marked as external "
+ "(3) multiple test runs were requested with --runs_per_test"
+ "(4) the test failed"
+ "If 'yes', the caching behavior will be the same as 'auto' except that "
+ "it may cache test failures and test runs with --runs_per_test."
+ "If 'no', all tests will be always executed.")
public TriState cacheTestResults;
@Deprecated
@Option(name = "test_result_expiration",
defaultValue = "-1", // No expiration by defualt.
category = "testing",
help = "This option is deprecated and has no effect.")
public int testResultExpiration;
@Option(name = "test_sharding_strategy",
defaultValue = "explicit",
category = "testing",
converter = TestActionBuilder.ShardingStrategyConverter.class,
help = "Specify strategy for test sharding: "
+ "'explicit' to only use sharding if the 'shard_count' BUILD attribute is present. "
+ "'disabled' to never use test sharding. "
+ "'experimental_heuristic' to enable sharding on remotely executed tests without an "
+ "explicit 'shard_count' attribute which link in a supported framework. Considered "
+ "experimental.")
public TestActionBuilder.TestShardingStrategy testShardingStrategy;
@Option(name = "runs_per_test",
allowMultiple = true,
defaultValue = "1",
category = "testing",
converter = RunsPerTestConverter.class,
help = "Specifies number of times to run each test. If any of those attempts "
+ "fail for any reason, the whole test would be considered failed. "
+ "Normally the value specified is just an integer. Example: --runs_per_test=3 "
+ "will run all tests 3 times. "
+ "Alternate syntax: regex_filter@runs_per_test. Where runs_per_test stands for "
+ "an integer value and regex_filter stands "
+ "for a list of include and exclude regular expression patterns (Also see "
+ "--instrumentation_filter). Example: "
+ "--runs_per_test=//foo/.*,-//foo/bar/.*@3 runs all tests in //foo/ "
+ "except those under foo/bar three times. "
+ "This option can be passed multiple times. ")
public List<PerLabelOptions> runsPerTest;
@Option(name = "build_runfile_links",
defaultValue = "true",
category = "strategy",
help = "If true, build runfiles symlink forests for all targets. "
+ "If false, write only manifests when possible.")
public boolean buildRunfiles;
@Option(name = "legacy_external_runfiles",
defaultValue = "true",
category = "strategy",
help = "If true, build runfiles symlink forests for external repositories under "
+ ".runfiles/wsname/external/repo (in addition to .runfiles/repo).")
public boolean legacyExternalRunfiles;
@Option(name = "test_arg",
allowMultiple = true,
defaultValue = "",
category = "testing",
help = "Specifies additional options and arguments that should be passed to the test "
+ "executable. Can be used multiple times to specify several arguments. "
+ "If multiple tests are executed, each of them will receive identical arguments. "
+ "Used only by the 'bazel test' command."
)
public List<String> testArguments;
@Option(name = "test_filter",
allowMultiple = false,
defaultValue = "null",
category = "testing",
help = "Specifies a filter to forward to the test framework. Used to limit "
+ "the tests run. Note that this does not affect which targets are built.")
public String testFilter;
@Option(name = "check_fileset_dependencies_recursively",
defaultValue = "true",
category = "semantics",
help = "If false, fileset targets will, whenever possible, create "
+ "symlinks to directories instead of creating one symlink for each "
+ "file inside the directory. Disabling this will significantly "
+ "speed up fileset builds, but targets that depend on filesets will "
+ "not be rebuilt if files are added, removed or modified in a "
+ "subdirectory which has not been traversed.")
public boolean checkFilesetDependenciesRecursively;
@Option(
name = "experimental_skyframe_native_filesets",
defaultValue = "false",
category = "experimental",
help =
"If true, Blaze will use the skyframe-native implementation of the Fileset rule."
+ " This offers improved performance in incremental builds of Filesets as well as"
+ " correct incremental behavior, but is not yet stable. The default is false,"
+ " meaning Blaze uses the legacy impelementation of Fileset."
)
public boolean skyframeNativeFileset;
@Option(
name = "run_under",
category = "run",
defaultValue = "null",
converter = RunUnderConverter.class,
help =
"Prefix to insert in front of command before running. "
+ "Examples:\n"
+ "\t--run_under=valgrind\n"
+ "\t--run_under=strace\n"
+ "\t--run_under='strace -c'\n"
+ "\t--run_under='valgrind --quiet --num-callers=20'\n"
+ "\t--run_under=//package:target\n"
+ "\t--run_under='//package:target --options'\n"
)
public RunUnder runUnder;
@Option(name = "distinct_host_configuration",
defaultValue = "true",
category = "strategy",
help = "Build all the tools used during the build for a distinct configuration from "
+ "that used for the target program. By default, the same configuration is used "
+ "for host and target programs, but this may cause undesirable rebuilds of tool "
+ "such as the protocol compiler (and then everything downstream) whenever a minor "
+ "change is made to the target configuration, such as setting the linker options. "
+ "When this flag is specified, a distinct configuration will be used to build the "
+ "tools, preventing undesired rebuilds. However, certain libraries will then "
+ "need to be compiled twice, once for each configuration, which may cause some "
+ "builds to be slower. As a rule of thumb, this option is likely to benefit "
+ "users that make frequent changes in configuration (e.g. opt/dbg). "
+ "Please read the user manual for the full explanation.")
public boolean useDistinctHostConfiguration;
@Option(name = "check_visibility",
defaultValue = "true",
category = "checking",
help = "If disabled, visibility errors are demoted to warnings.")
public boolean checkVisibility;
// Moved from viewOptions to here because license information is very expensive to serialize.
// Having it here allows us to skip computation of transitive license information completely
// when the setting is disabled.
@Option(name = "check_licenses",
defaultValue = "false",
category = "checking",
help = "Check that licensing constraints imposed by dependent packages "
+ "do not conflict with distribution modes of the targets being built. "
+ "By default, licenses are not checked.")
public boolean checkLicenses;
@Option(name = "experimental_enforce_constraints",
defaultValue = "true",
category = "undocumented",
help = "Checks the environments each target is compatible with and reports errors if any "
+ "target has dependencies that don't support the same environments")
public boolean enforceConstraints;
@Option(name = "experimental_action_listener",
allowMultiple = true,
defaultValue = "",
category = "experimental",
converter = LabelConverter.class,
help = "Use action_listener to attach an extra_action to existing build actions.")
public List<Label> actionListeners;
@Option(name = "is host configuration",
defaultValue = "false",
category = "undocumented",
help = "Shows whether these options are set for host configuration.")
public boolean isHost;
@Option(name = "experimental_proto_header_modules",
defaultValue = "false",
category = "undocumented",
help = "Enables compilation of C++ header modules for proto libraries.")
public boolean protoHeaderModules;
@Option(name = "features",
allowMultiple = true,
defaultValue = "",
category = "flags",
help = "The given features will be enabled or disabled by default for all packages. "
+ "Specifying -<feature> will disable the feature globally. "
+ "Negative features always override positive ones. "
+ "This flag is used to enable rolling out default feature changes without a "
+ "Blaze release.")
public List<String> defaultFeatures;
@Option(name = "target_environment",
converter = LabelConverter.class,
allowMultiple = true,
defaultValue = "",
category = "flags",
help = "Declares this build's target environment. Must be a label reference to an "
+ "\"environment\" rule. If specified, all top-level targets must be "
+ "compatible with this environment."
)
public List<Label> targetEnvironments;
@Option(name = "experimental_dynamic_configs",
defaultValue = "false",
category = "undocumented",
help = "Dynamically instantiates build configurations instead of using the default "
+ "static globally defined ones")
public boolean useDynamicConfigurations;
@Option(
name = "experimental_enable_runfiles",
defaultValue = "auto",
category = "undocumented",
help = "Enable runfiles; off on Windows, on on other platforms"
)
public TriState enableRunfiles;
@Override
public FragmentOptions getHost(boolean fallback) {
Options host = (Options) getDefault();
host.outputDirectoryName = "host";
host.compilationMode = CompilationMode.OPT;
host.isHost = true;
host.useDynamicConfigurations = useDynamicConfigurations;
if (fallback) {
// In the fallback case, we have already tried the target options and they didn't work, so
// now we try the default options; the hostCpu field has the default value, because we use
// getDefault() above.
host.cpu = host.hostCpu;
} else {
host.cpu = hostCpu;
}
// === Runfiles ===
// Ideally we could force this the other way, and skip runfiles construction
// for host tools which are never run locally, but that's probably a very
// small optimization.
host.buildRunfiles = true;
// === Linkstamping ===
// Disable all link stamping for the host configuration, to improve action
// cache hit rates for tools.
host.stampBinaries = false;
// === Visibility ===
host.checkVisibility = checkVisibility;
// === Licenses ===
host.checkLicenses = checkLicenses;
// === Fileset ===
host.skyframeNativeFileset = skyframeNativeFileset;
// === Allow runtime_deps to depend on neverlink Java libraries.
host.allowRuntimeDepsOnNeverLink = allowRuntimeDepsOnNeverLink;
// === Pass on C++ compiler features.
host.defaultFeatures = ImmutableList.copyOf(defaultFeatures);
return host;
}
@Override
public void addAllLabels(Multimap<String, Label> labelMap) {
labelMap.putAll("action_listener", actionListeners);
labelMap.putAll("plugins", pluginList);
if ((runUnder != null) && (runUnder.getLabel() != null)) {
labelMap.put("RunUnder", runUnder.getLabel());
}
}
@Override
public Map<String, Set<Label>> getDefaultsLabels(BuildConfiguration.Options commonOptions) {
return ImmutableMap.<String, Set<Label>>of(
"coverage_support", ImmutableSet.of(coverageSupport),
"coverage_report_generator", ImmutableSet.of(coverageReportGenerator));
}
}
/**
* All the output directories pertinent to a configuration.
*/
private static final class OutputRoots implements Serializable {
private final Root outputDirectory; // the configuration-specific output directory.
private final Root binDirectory;
private final Root genfilesDirectory;
private final Root coverageMetadataDirectory; // for coverage-related metadata, artifacts, etc.
private final Root testLogsDirectory;
private final Root includeDirectory;
private final Root middlemanDirectory;
private OutputRoots(BlazeDirectories directories, String outputDirName) {
Path execRoot = directories.getExecRoot();
// configuration-specific output tree
Path outputDir = directories.getOutputPath().getRelative(outputDirName);
this.outputDirectory = Root.asDerivedRoot(execRoot, outputDir);
// specific subdirs under outputDirectory
this.binDirectory = Root.asDerivedRoot(execRoot, outputDir.getRelative("bin"));
this.genfilesDirectory = Root.asDerivedRoot(execRoot, outputDir.getRelative("genfiles"));
this.coverageMetadataDirectory = Root.asDerivedRoot(execRoot,
outputDir.getRelative("coverage-metadata"));
this.testLogsDirectory = Root.asDerivedRoot(execRoot, outputDir.getRelative("testlogs"));
this.includeDirectory = Root.asDerivedRoot(execRoot,
outputDir.getRelative(BlazeDirectories.RELATIVE_INCLUDE_DIR));
this.middlemanDirectory = Root.middlemanRoot(execRoot, outputDir);
}
@Override
public boolean equals(Object o) {
if (o == this) {
return true;
}
if (!(o instanceof OutputRoots)) {
return false;
}
OutputRoots other = (OutputRoots) o;
return outputDirectory.equals(other.outputDirectory)
&& binDirectory.equals(other.binDirectory)
&& genfilesDirectory.equals(other.genfilesDirectory)
&& coverageMetadataDirectory.equals(other.coverageMetadataDirectory)
&& testLogsDirectory.equals(other.testLogsDirectory)
&& includeDirectory.equals(other.includeDirectory);
}
@Override
public int hashCode() {
return Objects.hash(outputDirectory, binDirectory, genfilesDirectory,
coverageMetadataDirectory, testLogsDirectory, includeDirectory);
}
}
private final String checksum;
private Transitions transitions;
private Set<BuildConfiguration> allReachableConfigurations;
private final ImmutableMap<Class<? extends Fragment>, Fragment> fragments;
private final ImmutableMap<String, Class<? extends Fragment>> skylarkVisibleFragments;
/**
* Directories in the output tree.
*
* <p>The computation of the output directory should be a non-injective mapping from
* BuildConfiguration instances to strings. The result should identify the aspects of the
* configuration that should be reflected in the output file names. Furthermore the
* returned string must not contain shell metacharacters.
*
* <p>For configuration settings which are NOT part of the output directory name,
* rebuilding with a different value of such a setting will build in
* the same output directory. This means that any actions whose
* keys (see Action.getKey()) have changed will be rerun. That
* may result in a lot of recompilation.
*
* <p>For configuration settings which ARE part of the output directory name,
* rebuilding with a different value of such a setting will rebuild
* in a different output directory; this will result in higher disk
* usage and more work the <i>first</i> time you rebuild with a different
* setting, but will result in less work if you regularly switch
* back and forth between different settings.
*
* <p>With one important exception, it's sound to choose any subset of the
* config's components for this string, it just alters the dimensionality
* of the cache. In other words, it's a trade-off on the "injectiveness"
* scale: at one extreme (output directory name contains all data in the config, and is
* thus injective) you get extremely precise caching (no competition for the
* same output-file locations) but you have to rebuild for even the
* slightest change in configuration. At the other extreme (the output
* (directory name is a constant) you have very high competition for
* output-file locations, but if a slight change in configuration doesn't
* affect a particular build step, you're guaranteed not to have to
* rebuild it. The important exception has to do with multiple configurations: every
* configuration in the build must have a different output directory name so that
* their artifacts do not conflict.
*
* <p>The host configuration is special-cased: in order to guarantee that its output directory
* is always separate from that of the target configuration, we simply pin it to "host". We do
* this so that the build works even if the two configurations are too close (which is common)
* and so that the path of artifacts in the host configuration is a bit more readable.
*/
private final OutputRoots outputRoots;
/** If false, AnalysisEnviroment doesn't register any actions created by the ConfiguredTarget. */
private final boolean actionsEnabled;
// TODO(bazel-team): Move this to a configuration fragment.
private final PathFragment shExecutable;
/**
* The global "make variables" such as "$(TARGET_CPU)"; these get applied to all rules analyzed in
* this configuration.
*/
private final ImmutableMap<String, String> globalMakeEnv;
private final ImmutableMap<String, String> localShellEnvironment;
private final BuildOptions buildOptions;
private final Options options;
private final String mnemonic;
private final String platformName;
private final ImmutableMap<String, String> testEnvironment;
/**
* Helper container for {@link #transitiveOptionsMap} below.
*/
private static class OptionDetails implements Serializable {
private OptionDetails(Class<? extends OptionsBase> optionsClass, Object value,
boolean allowsMultiple) {
this.optionsClass = optionsClass;
this.value = value;
this.allowsMultiple = allowsMultiple;
}
/** The {@link FragmentOptions} class that defines this option. */
private final Class<? extends OptionsBase> optionsClass;
/**
* The value of the given option (either explicitly defined or default). May be null.
*/
private final Object value;
/** Whether or not this option supports multiple values. */
private final boolean allowsMultiple;
}
/**
* Maps option names to the {@link OptionDetails} the option takes for this configuration.
*
* <p>This can be used to:
* <ol>
* <li>Find an option's (parsed) value given its command-line name</li>
* <li>Parse alternative values for the option.</li>
* </ol>
*
* <p>This map is "transitive" in that it includes *all* options recognizable by this
* configuration, including those defined in child fragments.
*/
private final Map<String, OptionDetails> transitiveOptionsMap;
/**
* Returns true if this configuration is semantically equal to the other, with
* the possible exception that the other has fewer fragments.
*
* <p>This is useful for dynamic configurations - as the same configuration gets "trimmed" while
* going down a dependency chain, it's still the same configuration but loses some of its
* fragments. So we need a more nuanced concept of "equality" than simple reference equality.
*/
public boolean equalsOrIsSupersetOf(BuildConfiguration other) {
return this.equals(other)
|| (other != null
&& outputRoots.equals(other.outputRoots)
&& actionsEnabled == other.actionsEnabled
&& fragments.values().containsAll(other.fragments.values())
&& buildOptions.getOptions().containsAll(other.buildOptions.getOptions()));
}
/**
* Returns map of all the fragments for this configuration.
*/
public ImmutableMap<Class<? extends Fragment>, Fragment> getAllFragments() {
return fragments;
}
/**
* Validates the options for this BuildConfiguration. Issues warnings for the
* use of deprecated options, and warnings or errors for any option settings
* that conflict.
*/
public void reportInvalidOptions(EventHandler reporter) {
for (Fragment fragment : fragments.values()) {
fragment.reportInvalidOptions(reporter, this.buildOptions);
}
Set<String> plugins = new HashSet<>();
for (Label plugin : options.pluginList) {
String name = plugin.getName();
if (plugins.contains(name)) {
reporter.handle(Event.error("A build cannot have two plugins with the same name"));
}
plugins.add(name);
}
for (Map.Entry<String, String> opt : options.pluginCoptList) {
if (!plugins.contains(opt.getKey())) {
reporter.handle(Event.error("A plugin_copt must refer to an existing plugin"));
}
}
if (options.outputDirectoryName != null) {
reporter.handle(Event.error(
"The internal '--output directory name' option cannot be used on the command line"));
}
if (options.testShardingStrategy
== TestActionBuilder.TestShardingStrategy.EXPERIMENTAL_HEURISTIC) {
reporter.handle(Event.warn(
"Heuristic sharding is intended as a one-off experimentation tool for determing the "
+ "benefit from sharding certain tests. Please don't keep this option in your "
+ ".blazerc or continuous build"));
}
if (options.useDynamicConfigurations && !options.useDistinctHostConfiguration) {
reporter.handle(Event.error(
"--nodistinct_host_configuration does not currently work with dynamic configurations"));
}
}
private ImmutableMap<String, String> setupShellEnvironment() {
ImmutableMap.Builder<String, String> builder = new ImmutableMap.Builder<>();
for (Fragment fragment : fragments.values()) {
fragment.setupShellEnvironment(builder);
}
return builder.build();
}
/**
* Sorts fragments by class name. This produces a stable order which, e.g., facilitates
* consistent output from buildMneumonic.
*/
private final static Comparator lexicalFragmentSorter =
new Comparator<Class<? extends Fragment>>() {
@Override
public int compare(Class<? extends Fragment> o1, Class<? extends Fragment> o2) {
return o1.getName().compareTo(o2.getName());
}
};
/**
* Constructs a new BuildConfiguration instance.
*/
public BuildConfiguration(BlazeDirectories directories,
Map<Class<? extends Fragment>, Fragment> fragmentsMap,
BuildOptions buildOptions,
boolean actionsDisabled) {
this(null, directories, fragmentsMap, buildOptions, actionsDisabled);
}
/**
* Constructor variation that uses the passed in output roots if non-null, else computes them
* from the directories.
*/
public BuildConfiguration(@Nullable OutputRoots outputRoots,
@Nullable BlazeDirectories directories,
Map<Class<? extends Fragment>, Fragment> fragmentsMap,
BuildOptions buildOptions,
boolean actionsDisabled) {
Preconditions.checkState(outputRoots == null ^ directories == null);
this.actionsEnabled = !actionsDisabled;
this.fragments = ImmutableSortedMap.copyOf(fragmentsMap, lexicalFragmentSorter);
this.skylarkVisibleFragments = buildIndexOfSkylarkVisibleFragments();
this.buildOptions = buildOptions;
this.options = buildOptions.get(Options.class);
Map<String, String> testEnv = new TreeMap<>();
for (Map.Entry<String, String> entry : this.options.testEnvironment) {
if (entry.getValue() != null) {
testEnv.put(entry.getKey(), entry.getValue());
}
}
this.testEnvironment = ImmutableMap.copyOf(testEnv);
this.mnemonic = buildMnemonic();
String outputDirName = (options.outputDirectoryName != null)
? options.outputDirectoryName : mnemonic;
this.platformName = buildPlatformName();
this.shExecutable = collectExecutables().get("sh");
this.outputRoots = outputRoots != null
? outputRoots
: new OutputRoots(directories, outputDirName);
this.localShellEnvironment = setupShellEnvironment();
this.transitiveOptionsMap = computeOptionsMap(buildOptions, fragments.values());
ImmutableMap.Builder<String, String> globalMakeEnvBuilder = ImmutableMap.builder();
for (Fragment fragment : fragments.values()) {
fragment.addGlobalMakeVariables(globalMakeEnvBuilder);
}
// Lots of packages in third_party assume that BINMODE expands to either "-dbg", or "-opt". So
// for backwards compatibility we preserve that invariant, setting BINMODE to "-dbg" rather than
// "-fastbuild" if the compilation mode is "fastbuild".
// We put the real compilation mode in a new variable COMPILATION_MODE.
globalMakeEnvBuilder.put("COMPILATION_MODE", options.compilationMode.toString());
globalMakeEnvBuilder.put("BINMODE", "-"
+ ((options.compilationMode == CompilationMode.FASTBUILD)
? "dbg"
: options.compilationMode.toString()));
/*
* Attention! Document these in the build-encyclopedia
*/
// the bin directory and the genfiles directory
// These variables will be used on Windows as well, so we need to make sure
// that paths use the correct system file-separator.
globalMakeEnvBuilder.put("BINDIR", getBinDirectory().getExecPath().getPathString());
globalMakeEnvBuilder.put("GENDIR", getGenfilesDirectory().getExecPath().getPathString());
globalMakeEnv = globalMakeEnvBuilder.build();
checksum = Fingerprint.md5Digest(buildOptions.computeCacheKey());
}
/**
* Returns a copy of this configuration only including the given fragments (which the current
* configuration is assumed to have).
*/
public BuildConfiguration clone(
Set<Class<? extends BuildConfiguration.Fragment>> fragmentClasses,
RuleClassProvider ruleClassProvider) {
ClassToInstanceMap<Fragment> fragmentsMap = MutableClassToInstanceMap.create();
for (Fragment fragment : fragments.values()) {
if (fragmentClasses.contains(fragment.getClass())) {
fragmentsMap.put(fragment.getClass(), fragment);
}
}
BuildOptions options = buildOptions.trim(
getOptionsClasses(fragmentsMap.keySet(), ruleClassProvider));
BuildConfiguration newConfig =
new BuildConfiguration(outputRoots, null, fragmentsMap, options, !actionsEnabled);
newConfig.setConfigurationTransitions(this.transitions);
return newConfig;
}
/**
* Returns the config fragment options classes used by the given fragment types.
*/
public static Set<Class<? extends FragmentOptions>> getOptionsClasses(
Iterable<Class<? extends Fragment>> fragmentClasses, RuleClassProvider ruleClassProvider) {
Multimap<Class<? extends BuildConfiguration.Fragment>, Class<? extends FragmentOptions>>
fragmentToRequiredOptions = ArrayListMultimap.create();
for (ConfigurationFragmentFactory fragmentLoader :
((ConfiguredRuleClassProvider) ruleClassProvider).getConfigurationFragments()) {
fragmentToRequiredOptions.putAll(fragmentLoader.creates(),
fragmentLoader.requiredOptions());
}
Set<Class<? extends FragmentOptions>> options = new HashSet<>();
for (Class<? extends BuildConfiguration.Fragment> fragmentClass : fragmentClasses) {
options.addAll(fragmentToRequiredOptions.get(fragmentClass));
}
return options;
}
private ImmutableMap<String, Class<? extends Fragment>> buildIndexOfSkylarkVisibleFragments() {
ImmutableMap.Builder<String, Class<? extends Fragment>> builder = ImmutableMap.builder();
for (Class<? extends Fragment> fragmentClass : fragments.keySet()) {
String name = SkylarkModule.Resolver.resolveName(fragmentClass);
if (name != null) {
builder.put(name, fragmentClass);
}
}
return builder.build();
}
/**
* Computes and returns the transitive optionName -> "option info" map for
* this configuration.
*/
private static Map<String, OptionDetails> computeOptionsMap(BuildOptions buildOptions,
Iterable<Fragment> fragments) {
// Collect from our fragments "alternative defaults" for options where the default
// should be something other than what's specified in Option.defaultValue.
Map<String, Object> lateBoundDefaults = Maps.newHashMap();
for (Fragment fragment : fragments) {
lateBoundDefaults.putAll(fragment.lateBoundOptionDefaults());
}
ImmutableMap.Builder<String, OptionDetails> map = ImmutableMap.builder();
try {
for (FragmentOptions options : buildOptions.getOptions()) {
for (Field field : options.getClass().getFields()) {
if (field.isAnnotationPresent(Option.class)) {
Option option = field.getAnnotation(Option.class);
Object value = field.get(options);
if (value == null) {
if (lateBoundDefaults.containsKey(option.name())) {
value = lateBoundDefaults.get(option.name());
} else if (!option.defaultValue().equals("null")) {
// See {@link Option#defaultValue} for an explanation of default "null" strings.
value = option.defaultValue();
}
}
map.put(option.name(),
new OptionDetails(options.getClass(), value, option.allowMultiple()));
}
}
}
} catch (IllegalAccessException e) {
throw new IllegalStateException(
"Unexpected illegal access trying to create this configuration's options map: ", e);
}
return map.build();
}
private String buildMnemonic() {
// See explanation at getShortName().
String platformSuffix = (options.platformSuffix != null) ? options.platformSuffix : "";
ArrayList<String> nameParts = new ArrayList<>();
for (Fragment fragment : fragments.values()) {
nameParts.add(fragment.getOutputDirectoryName());
}
nameParts.add(getCompilationMode() + platformSuffix);
return Joiner.on('-').skipNulls().join(nameParts);
}
private String buildPlatformName() {
StringBuilder platformNameBuilder = new StringBuilder();
for (Fragment fragment : fragments.values()) {
platformNameBuilder.append(fragment.getPlatformName());
}
return platformNameBuilder.toString();
}
/**
* Set the outgoing configuration transitions. During the lifetime of a given build configuration,
* this must happen exactly once, shortly after the configuration is created.
*/
public void setConfigurationTransitions(Transitions transitions) {
// TODO(bazel-team): This method makes the object mutable - get rid of it. Dynamic
// configurations should eventually make this obsolete.
Preconditions.checkNotNull(transitions);
Preconditions.checkState(this.transitions == null);
this.transitions = transitions;
}
public Transitions getTransitions() {
return transitions;
}
/**
* Returns all configurations that can be reached from this configuration through any kind of
* configuration transition.
*/
public synchronized Collection<BuildConfiguration> getAllReachableConfigurations() {
if (allReachableConfigurations == null) {
// This is needed for every configured target in skyframe m2, so we cache it.
// We could alternatively make the corresponding dependencies into a skyframe node.
this.allReachableConfigurations = computeAllReachableConfigurations();
}
return allReachableConfigurations;
}
/**
* Returns all configurations that can be reached from this configuration through any kind of
* configuration transition.
*/
private Set<BuildConfiguration> computeAllReachableConfigurations() {
Set<BuildConfiguration> result = new LinkedHashSet<>();
Queue<BuildConfiguration> queue = new LinkedList<>();
queue.add(this);
while (!queue.isEmpty()) {
BuildConfiguration config = queue.remove();
if (!result.add(config)) {
continue;
}
config.getTransitions().addDirectlyReachableConfigurations(queue);
}
return result;
}
/**
* Returns the new configuration after traversing a dependency edge with a given configuration
* transition.
*
* @param transition the configuration transition
* @return the new configuration
* @throws IllegalArgumentException if the transition is a {@link SplitTransition}
*
* TODO(bazel-team): remove this as part of the static -> dynamic configuration migration
*/
public BuildConfiguration getConfiguration(Transition transition) {
Preconditions.checkArgument(!(transition instanceof SplitTransition));
// The below call precondition-checks we're indeed using static configurations.
return transitions.getStaticConfiguration(transition);
}
/**
* Returns the new configurations after traversing a dependency edge with a given split
* transition.
*
* @param transition the split configuration transition
* @return the new configurations
*/
public List<BuildConfiguration> getSplitConfigurations(SplitTransition<?> transition) {
return transitions.getSplitConfigurations(transition);
}
/**
* A common interface for static vs. dynamic configuration implementations that allows
* common configuration and transition-selection logic to seamlessly work with either.
*
* <p>The basic role of this interface is to "accept" a desired transition and produce
* an actual configuration change from it in an implementation-appropriate way.
*/
public interface TransitionApplier {
/**
* Creates a new instance of this transition applier bound to the specified source
* configuration.
*/
TransitionApplier create(BuildConfiguration config);
/**
* Accepts the given configuration transition. The implementation decides how to turn
* this into an actual configuration. This may be called multiple times (representing a
* request for a sequence of transitions).
*/
void applyTransition(Transition transition);
/**
* Accepts the given split transition. The implementation decides how to turn this into
* actual configurations.
*/
void split(SplitTransition<?> splitTransition);
/**
* Returns whether or not all configuration(s) represented by the current state of this
* instance are null.
*/
boolean isNull();
/**
* Applies the given attribute configurator to the current configuration(s).
*/
void applyAttributeConfigurator(Attribute attribute, Rule fromRule, Target toTarget);
/**
* Calls {@link Transitions#configurationHook} on the current configuration(s) represent by
* this instance.
*/
void applyConfigurationHook(Rule fromRule, Attribute attribute, Target toTarget);
/**
* Returns the underlying {@Transitions} object for this instance's current configuration.
* Does not work for split configurations.
*/
Transitions getCurrentTransitions();
/**
* Populates a {@link com.google.devtools.build.lib.analysis.Dependency}
* for each configuration represented by this instance.
* TODO(bazel-team): this is a really ugly reverse dependency: factor this away.
*/
Iterable<Dependency> getDependencies(Label label, ImmutableSet<AspectDescriptor> aspects);
}
/**
* Transition applier for static configurations. This implementation populates
* {@link com.google.devtools.build.lib.analysis.Dependency} objects with
* actual configurations.
*
* <p>Does not support split transitions (see {@link SplittableTransitionApplier}).
* TODO(bazel-team): remove this when dynamic configurations are fully production-ready.
*/
private static class StaticTransitionApplier implements TransitionApplier {
BuildConfiguration currentConfiguration;
private StaticTransitionApplier(BuildConfiguration originalConfiguration) {
this.currentConfiguration = originalConfiguration;
}
@Override
public TransitionApplier create(BuildConfiguration configuration) {
return new StaticTransitionApplier(configuration);
}
@Override
public void applyTransition(Transition transition) {
if (transition == Attribute.ConfigurationTransition.NULL) {
currentConfiguration = null;
} else {
currentConfiguration =
currentConfiguration.getTransitions().getStaticConfiguration(transition);
}
}
@Override
public void split(SplitTransition<?> splitTransition) {
throw new UnsupportedOperationException("This only works with SplittableTransitionApplier");
}
@Override
public boolean isNull() {
return currentConfiguration == null;
}
@Override
public void applyAttributeConfigurator(Attribute attribute, Rule fromRule, Target toTarget) {
@SuppressWarnings("unchecked")
Configurator<BuildConfiguration, Rule> configurator =
(Configurator<BuildConfiguration, Rule>) attribute.getConfigurator();
Verify.verifyNotNull(configurator);
currentConfiguration =
configurator.apply(fromRule, currentConfiguration, attribute, toTarget);
}
@Override
public void applyConfigurationHook(Rule fromRule, Attribute attribute, Target toTarget) {
currentConfiguration.getTransitions().configurationHook(fromRule, attribute, toTarget, this);
// Allow rule classes to override their own configurations.
Rule associatedRule = toTarget.getAssociatedRule();
if (associatedRule != null) {
@SuppressWarnings("unchecked")
RuleClass.Configurator<BuildConfiguration, Rule> func =
associatedRule.getRuleClassObject().<BuildConfiguration, Rule>getConfigurator();
currentConfiguration = func.apply(associatedRule, currentConfiguration);
}
}
@Override
public Transitions getCurrentTransitions() {
return currentConfiguration.getTransitions();
}
@Override
public Iterable<Dependency> getDependencies(
Label label, ImmutableSet<AspectDescriptor> aspects) {
return ImmutableList.of(
currentConfiguration != null
? Dependency.withConfigurationAndAspects(label, currentConfiguration, aspects)
: Dependency.withNullConfiguration(label));
}
}
/**
* Transition applier for dynamic configurations. This implementation populates
* {@link com.google.devtools.build.lib.analysis.Dependency} objects with
* transition definitions that the caller subsequently creates configurations out of.
*
* <p>Does not support split transitions (see {@link SplittableTransitionApplier}).
*/
private static class DynamicTransitionApplier implements TransitionApplier {
private final BuildConfiguration originalConfiguration;
private Transition transition = Attribute.ConfigurationTransition.NONE;
private DynamicTransitionApplier(BuildConfiguration originalConfiguration) {
this.originalConfiguration = originalConfiguration;
}
@Override
public TransitionApplier create(BuildConfiguration configuration) {
return new DynamicTransitionApplier(configuration);
}
@Override
public void applyTransition(Transition transition) {
if (transition == Attribute.ConfigurationTransition.NONE) {
return;
} else if (this.transition != HostTransition.INSTANCE) {
// We don't currently support composed transitions (e.g. applyTransitions shouldn't be
// called multiple times). We can add support for this if needed by simply storing a list of
// transitions instead of a single transition. But we only want to do that if really
// necessary - if we can simplify BuildConfiguration's transition logic to not require
// scenarios like that, it's better to keep this simpler interface.
//
// The HostTransition exemption is because of limited cases where composition can
// occur. See relevant comments beginning with "BuildConfiguration.applyTransition NOTE"
// in the transition logic code if available.
// Ensure we don't already have any mutating transitions registered.
// Note that for dynamic configurations, LipoDataTransition is equivalent to NONE. That's
// because dynamic transitions don't work with LIPO, so there's no LIPO context to change.
Verify.verify(this.transition == Attribute.ConfigurationTransition.NONE
|| this.transition.toString().contains("LipoDataTransition"));
this.transition = getCurrentTransitions().getDynamicTransition(transition);
}
}
@Override
public void split(SplitTransition<?> splitTransition) {
throw new UnsupportedOperationException("This only works with SplittableTransitionApplier");
}
@Override
public boolean isNull() {
return transition == Attribute.ConfigurationTransition.NULL;
}
@Override
public void applyAttributeConfigurator(Attribute attribute, Rule fromRule, Target toTarget) {
// We don't support meaningful attribute configurators (since they produce configurations,
// and we're only interested in generating transitions so the calling code can realize
// configurations from them). So just check that the configurator is just a no-op.
@SuppressWarnings("unchecked")
Configurator<BuildConfiguration, Rule> configurator =
(Configurator<BuildConfiguration, Rule>) attribute.getConfigurator();
Verify.verifyNotNull(configurator);
BuildConfiguration toConfiguration =
configurator.apply(fromRule, originalConfiguration, attribute, toTarget);
Verify.verify(toConfiguration == originalConfiguration);
}
@Override
public void applyConfigurationHook(Rule fromRule, Attribute attribute, Target toTarget) {
if (isNull()) {
return;
}
getCurrentTransitions().configurationHook(fromRule, attribute, toTarget, this);
// We don't support rule class configurators (which might imply composed transitions).
// The only current use of that is LIPO, which can't currently be invoked with dynamic
// configurations (e.g. this code can never get called for LIPO builds). So check that
// if there is a configurator, it's for LIPO, in which case we can ignore it.
Rule associatedRule = toTarget.getAssociatedRule();
if (associatedRule != null) {
@SuppressWarnings("unchecked")
RuleClass.Configurator<?, ?> func =
associatedRule.getRuleClassObject().getConfigurator();
Verify.verify(func == RuleClass.NO_CHANGE || func.getCategory().equals("lipo"));
}
}
@Override
public Transitions getCurrentTransitions() {
return originalConfiguration.getTransitions();
}
@Override
public Iterable<Dependency> getDependencies(
Label label, ImmutableSet<AspectDescriptor> aspects) {
return ImmutableList.of(
Dependency.withTransitionAndAspects(label, transition, aspects));
}
}
/**
* Transition applier that wraps an underlying implementation with added support for
* split transitions. All external calls into BuildConfiguration should use this applier.
*/
private static class SplittableTransitionApplier implements TransitionApplier {
private List<TransitionApplier> appliers;
private SplittableTransitionApplier(TransitionApplier original) {
appliers = ImmutableList.of(original);
}
@Override
public TransitionApplier create(BuildConfiguration configuration) {
throw new UnsupportedOperationException("Not intended to be wrapped under another applier");
}
@Override
public void applyTransition(Transition transition) {
for (TransitionApplier applier : appliers) {
applier.applyTransition(transition);
}
}
@Override
public void split(SplitTransition<?> splitTransition) {
TransitionApplier originalApplier = Iterables.getOnlyElement(appliers);
ImmutableList.Builder<TransitionApplier> splitAppliers = ImmutableList.builder();
for (BuildConfiguration splitConfig :
originalApplier.getCurrentTransitions().getSplitConfigurations(splitTransition)) {
splitAppliers.add(originalApplier.create(splitConfig));
}
appliers = splitAppliers.build();
}
@Override
public boolean isNull() {
throw new UnsupportedOperationException("Only for use from a Transitions instance");
}
@Override
public void applyAttributeConfigurator(Attribute attribute, Rule fromRule, Target toTarget) {
for (TransitionApplier applier : appliers) {
applier.applyAttributeConfigurator(attribute, fromRule, toTarget);
}
}
@Override
public void applyConfigurationHook(Rule fromRule, Attribute attribute, Target toTarget) {
for (TransitionApplier applier : appliers) {
applier.applyConfigurationHook(fromRule, attribute, toTarget);
}
}
@Override
public Transitions getCurrentTransitions() {
throw new UnsupportedOperationException("Only for use from a Transitions instance");
}
@Override
public Iterable<Dependency> getDependencies(
Label label, ImmutableSet<AspectDescriptor> aspects) {
ImmutableList.Builder<Dependency> builder = ImmutableList.builder();
for (TransitionApplier applier : appliers) {
builder.addAll(applier.getDependencies(label, aspects));
}
return builder.build();
}
}
/**
* Returns the {@link TransitionApplier} that should be passed to {#evaluateTransition} calls.
*/
public TransitionApplier getTransitionApplier() {
TransitionApplier applier = useDynamicConfigurations()
? new DynamicTransitionApplier(this)
: new StaticTransitionApplier(this);
return new SplittableTransitionApplier(applier);
}
/**
* Returns true if the given target uses a null configuration, false otherwise. Consider
* this method the "source of truth" for determining this.
*/
public static boolean usesNullConfiguration(Target target) {
return target instanceof InputFile || target instanceof PackageGroup;
}
/**
* Calculates the configurations of a direct dependency. If a rule in some BUILD file refers
* to a target (like another rule or a source file) using a label attribute, that target needs
* to have a configuration, too. This method figures out the proper configuration for the
* dependency.
*
* @param fromRule the rule that's depending on some target
* @param attribute the attribute using which the rule depends on that target (eg. "srcs")
* @param toTarget the target that's dependeded on
* @param transitionApplier the transition applier to accept transitions requests
*/
public void evaluateTransition(final Rule fromRule, final Attribute attribute,
final Target toTarget, TransitionApplier transitionApplier) {
// Fantastic configurations and where to find them:
// I. Input files and package groups have no configurations. We don't want to duplicate them.
if (usesNullConfiguration(toTarget)) {
transitionApplier.applyTransition(Attribute.ConfigurationTransition.NULL);
return;
}
// II. Host configurations never switch to another. All prerequisites of host targets have the
// same host configuration.
if (isHostConfiguration()) {
transitionApplier.applyTransition(Attribute.ConfigurationTransition.NONE);
return;
}
// Make sure config_setting dependencies are resolved in the referencing rule's configuration,
// unconditionally. For example, given:
//
// genrule(
// name = 'myrule',
// tools = select({ '//a:condition': [':sometool'] })
//
// all labels in "tools" get resolved in the host configuration (since the "tools" attribute
// declares a host configuration transition). We want to explicitly exclude configuration labels
// from these transitions, since their *purpose* is to do computation on the owning
// rule's configuration.
// TODO(bazel-team): don't require special casing here. This is far too hackish.
if (toTarget instanceof Rule
&& ((Rule) toTarget).getRuleClass().equals(ConfigRuleClasses.ConfigSettingRule.RULE_NAME)) {
transitionApplier.applyTransition(Attribute.ConfigurationTransition.NONE); // Unnecessary.
return;
}
if (attribute.hasSplitConfigurationTransition()) {
Preconditions.checkState(attribute.getConfigurator() == null);
transitionApplier.split(attribute.getSplitTransition(fromRule));
} else {
// III. Attributes determine configurations. The configuration of a prerequisite is determined
// by the attribute.
@SuppressWarnings("unchecked")
Configurator<BuildConfiguration, Rule> configurator =
(Configurator<BuildConfiguration, Rule>) attribute.getConfigurator();
if (configurator != null) {
transitionApplier.applyAttributeConfigurator(attribute, fromRule, toTarget);
} else {
transitionApplier.applyTransition(attribute.getConfigurationTransition());
}
}
transitionApplier.applyConfigurationHook(fromRule, attribute, toTarget);
}
/**
* For an given environment, returns a subset containing all
* variables in the given list if they are defined in the given
* environment.
*/
@VisibleForTesting
static Map<String, String> getMapping(List<String> variables,
Map<String, String> environment) {
Map<String, String> result = new HashMap<>();
for (String var : variables) {
if (environment.containsKey(var)) {
result.put(var, environment.get(var));
}
}
return result;
}
/**
* Returns the {@link Option} class the defines the given option, null if the
* option isn't recognized.
*
* <p>optionName is the name of the option as it appears on the command line
* e.g. {@link Option#name}).
*/
Class<? extends OptionsBase> getOptionClass(String optionName) {
OptionDetails optionData = transitiveOptionsMap.get(optionName);
return optionData == null ? null : optionData.optionsClass;
}
/**
* Returns the value of the specified option for this configuration or null if the
* option isn't recognized. Since an option's legitimate value could be null, use
* {@link #getOptionClass} to distinguish between that and an unknown option.
*
* <p>optionName is the name of the option as it appears on the command line
* e.g. {@link Option#name}).
*/
Object getOptionValue(String optionName) {
OptionDetails optionData = transitiveOptionsMap.get(optionName);
return (optionData == null) ? null : optionData.value;
}
/**
* Returns whether or not the given option supports multiple values at the command line (e.g.
* "--myoption value1 --myOption value2 ..."). Returns false for unrecognized options. Use
* {@link #getOptionClass} to distinguish between those and legitimate single-value options.
*
* <p>As declared in {@link Option#allowMultiple}, multi-value options are expected to be
* of type {@code List<T>}.
*/
boolean allowsMultipleValues(String optionName) {
OptionDetails optionData = transitiveOptionsMap.get(optionName);
return (optionData == null) ? false : optionData.allowsMultiple;
}
/**
* The platform string, suitable for use as a key into a MakeEnvironment.
*/
public String getPlatformName() {
return platformName;
}
/**
* Returns the output directory for this build configuration.
*/
public Root getOutputDirectory() {
return outputRoots.outputDirectory;
}
/**
* Returns the bin directory for this build configuration.
*/
@SkylarkCallable(name = "bin_dir", structField = true,
doc = "The root corresponding to bin directory.")
public Root getBinDirectory() {
return outputRoots.binDirectory;
}
/**
* Returns a relative path to the bin directory at execution time.
*/
public PathFragment getBinFragment() {
return getBinDirectory().getExecPath();
}
/**
* Returns the include directory for this build configuration.
*/
public Root getIncludeDirectory() {
return outputRoots.includeDirectory;
}
/**
* Returns the genfiles directory for this build configuration.
*/
@SkylarkCallable(name = "genfiles_dir", structField = true,
doc = "The root corresponding to genfiles directory.")
public Root getGenfilesDirectory() {
return outputRoots.genfilesDirectory;
}
/**
* Returns the directory where coverage-related artifacts and metadata files
* should be stored. This includes for example uninstrumented class files
* needed for Jacoco's coverage reporting tools.
*/
public Root getCoverageMetadataDirectory() {
return outputRoots.coverageMetadataDirectory;
}
/**
* Returns the testlogs directory for this build configuration.
*/
public Root getTestLogsDirectory() {
return outputRoots.testLogsDirectory;
}
/**
* Returns a relative path to the genfiles directory at execution time.
*/
public PathFragment getGenfilesFragment() {
return getGenfilesDirectory().getExecPath();
}
/**
* Returns the path separator for the host platform. This is basically the same as {@link
* java.io.File#pathSeparator}, except that that returns the value for this JVM, which may or may
* not match the host platform. You should only use this when invoking tools that are known to use
* the native path separator, i.e., the path separator for the machine that they run on.
*/
@SkylarkCallable(name = "host_path_separator", structField = true,
doc = "Returns the separator for PATH environment variable, which is ':' on Unix.")
public String getHostPathSeparator() {
// TODO(bazel-team): Maybe do this in the constructor instead? This isn't serialization-safe.
return OS.getCurrent() == OS.WINDOWS ? ";" : ":";
}
/**
* Returns the internal directory (used for middlemen) for this build configuration.
*/
public Root getMiddlemanDirectory() {
return outputRoots.middlemanDirectory;
}
public boolean getAllowRuntimeDepsOnNeverLink() {
return options.allowRuntimeDepsOnNeverLink;
}
public boolean isStrictFilesets() {
return options.strictFilesets;
}
public List<Label> getPlugins() {
return options.pluginList;
}
public List<Map.Entry<String, String>> getPluginCopts() {
return options.pluginCoptList;
}
/**
* Like getShortName(), but always returns a configuration-dependent string even for
* the host configuration.
*/
public String getMnemonic() {
return mnemonic;
}
@Override
public String toString() {
return checksum();
}
@SkylarkCallable(
name = "default_shell_env",
structField = true,
doc =
"A dictionary representing the local shell environment. It maps variables "
+ "to their values (strings). The local shell environment contains settings that are "
+ "machine specific, therefore its use should be avoided in rules meant to be hermetic."
)
public ImmutableMap<String, String> getLocalShellEnvironment() {
return localShellEnvironment;
}
/**
* Returns the path to sh.
*/
public PathFragment getShExecutable() {
return shExecutable;
}
/**
* Returns a regex-based instrumentation filter instance that used to match label
* names to identify targets to be instrumented in the coverage mode.
*/
public RegexFilter getInstrumentationFilter() {
return options.instrumentationFilter;
}
/**
* Returns a boolean of whether to include targets created by *_test rules in the set of targets
* matched by --instrumentation_filter. If this is false, all test targets are excluded from
* instrumentation.
*/
public boolean shouldInstrumentTestTargets() {
return options.instrumentTestTargets;
}
/**
* Returns true if bazel should show analyses results, even if it did not
* re-run the analysis.
*/
public boolean showCachedAnalysisResults() {
return options.showCachedAnalysisResults;
}
/**
* Returns a new, unordered mapping of names to values of "Make" variables defined by this
* configuration.
*
* <p>This does *not* include package-defined overrides (e.g. vardef)
* and so should not be used by the build logic. This is used only for
* the 'info' command.
*
* <p>Command-line definitions of make enviroments override variables defined by
* {@code Fragment.addGlobalMakeVariables()}.
*/
public Map<String, String> getMakeEnvironment() {
Map<String, String> makeEnvironment = new HashMap<>();
makeEnvironment.putAll(globalMakeEnv);
for (Fragment fragment : fragments.values()) {
makeEnvironment.putAll(fragment.getCommandLineDefines());
}
return ImmutableMap.copyOf(makeEnvironment);
}
/**
* Returns a new, unordered mapping of names that are set through the command lines.
* (Fragments, in particular the Google C++ support, can set variables through the
* command line.)
*/
public Map<String, String> getCommandLineDefines() {
ImmutableMap.Builder<String, String> builder = ImmutableMap.builder();
for (Fragment fragment : fragments.values()) {
builder.putAll(fragment.getCommandLineDefines());
}
return builder.build();
}
/**
* Returns the global defaults for this configuration for the Make environment.
*/
public Map<String, String> getGlobalMakeEnvironment() {
return globalMakeEnv;
}
/**
* Returns a (key, value) mapping to insert into the subcommand environment for coverage
* actions.
*/
public Map<String, String> getCoverageEnvironment() {
Map<String, String> env = new HashMap<>();
for (Fragment fragment : fragments.values()) {
env.putAll(fragment.getCoverageEnvironment());
}
return env;
}
/**
* Returns the default value for the specified "Make" variable for this
* configuration. Returns null if no value was found.
*/
public String getMakeVariableDefault(String var) {
return globalMakeEnv.get(var);
}
/**
* Returns a configuration fragment instances of the given class.
*/
public <T extends Fragment> T getFragment(Class<T> clazz) {
return clazz.cast(fragments.get(clazz));
}
/**
* Returns true if the requested configuration fragment is present.
*/
public <T extends Fragment> boolean hasFragment(Class<T> clazz) {
return getFragment(clazz) != null;
}
/**
* Returns true if all requested configuration fragment are present (this may be slow).
*/
public boolean hasAllFragments(Set<Class<?>> fragmentClasses) {
for (Class<?> fragmentClass : fragmentClasses) {
if (!hasFragment(fragmentClass.asSubclass(Fragment.class))) {
return false;
}
}
return true;
}
/**
* Which fragments does this configuration contain?
*/
public Set<Class<? extends Fragment>> fragmentClasses() {
return fragments.keySet();
}
/**
* Returns true if non-functional build stamps are enabled.
*/
public boolean stampBinaries() {
return options.stampBinaries;
}
/**
* Returns true if extended sanity checks should be enabled.
*/
public boolean extendedSanityChecks() {
return options.extendedSanityChecks;
}
/**
* Returns true if we are building runfiles symlinks for this configuration.
*/
public boolean buildRunfiles() {
return options.buildRunfiles;
}
/**
* Returns if we are building external runfiles symlinks using the old-style structure.
*/
public boolean legacyExternalRunfiles() {
return options.legacyExternalRunfiles;
}
public boolean getCheckFilesetDependenciesRecursively() {
return options.checkFilesetDependenciesRecursively;
}
public boolean getSkyframeNativeFileset() {
return options.skyframeNativeFileset;
}
public List<String> getTestArguments() {
return options.testArguments;
}
public String getTestFilter() {
return options.testFilter;
}
/**
* Returns user-specified test environment variables and their values, as
* set by the --test_env options.
*/
public ImmutableMap<String, String> getTestEnv() {
return testEnvironment;
}
public TriState cacheTestResults() {
return options.cacheTestResults;
}
public int getMinParamFileSize() {
return options.minParamFileSize;
}
@SkylarkCallable(name = "coverage_enabled", structField = true,
doc = "A boolean that tells whether code coverage is enabled.")
public boolean isCodeCoverageEnabled() {
return options.collectCodeCoverage;
}
public boolean isMicroCoverageEnabled() {
return options.collectMicroCoverage;
}
public boolean isActionsEnabled() {
return actionsEnabled;
}
public TestActionBuilder.TestShardingStrategy testShardingStrategy() {
return options.testShardingStrategy;
}
/**
* @return number of times the given test should run.
* If the test doesn't match any of the filters, runs it once.
*/
public int getRunsPerTestForLabel(Label label) {
for (PerLabelOptions perLabelRuns : options.runsPerTest) {
if (perLabelRuns.isIncluded(label)) {
return Integer.parseInt(Iterables.getOnlyElement(perLabelRuns.getOptions()));
}
}
return 1;
}
public RunUnder getRunUnder() {
return options.runUnder;
}
/**
* Returns true if this is a host configuration.
*/
public boolean isHostConfiguration() {
return options.isHost;
}
public boolean checkVisibility() {
return options.checkVisibility;
}
public boolean checkLicenses() {
return options.checkLicenses;
}
public boolean enforceConstraints() {
return options.enforceConstraints;
}
public List<Label> getActionListeners() {
return actionsEnabled ? options.actionListeners : ImmutableList.<Label>of();
}
/**
* Returns whether we should use dynamically instantiated build configurations
* vs. static configurations (e.g. predefined in
* {@link com.google.devtools.build.lib.analysis.ConfigurationCollectionFactory}).
*/
public boolean useDynamicConfigurations() {
return options.useDynamicConfigurations;
}
/**
* Returns compilation mode.
*/
public CompilationMode getCompilationMode() {
return options.compilationMode;
}
/** Returns the cache key of the build options used to create this configuration. */
public final String checksum() {
return checksum;
}
/** Returns a copy of the build configuration options for this configuration. */
public BuildOptions cloneOptions() {
BuildOptions clone = buildOptions.clone();
return clone;
}
/**
* Returns the actual options reference used by this configuration.
*
* <p><b>Be very careful using this method.</b> Options classes are mutable - no caller
* should ever call this method if there's any change the reference might be written to.
* This method only exists because {@link #cloneOptions} can be expensive when applied to
* every edge in a dependency graph, which becomes possible with dynamic configurations.
*
* <p>Do not use this method without careful review with other Bazel developers..
*/
public BuildOptions getOptions() {
return buildOptions;
}
/**
* Returns all the roots for this configuration.
*/
public List<Root> getRoots() {
List<Root> roots = new ArrayList<>();
// Configuration-specific roots.
roots.add(getBinDirectory());
roots.add(getGenfilesDirectory());
roots.add(getIncludeDirectory());
roots.add(getMiddlemanDirectory());
roots.add(getTestLogsDirectory());
return ImmutableList.copyOf(roots);
}
public ListMultimap<String, Label> getAllLabels() {
return buildOptions.getAllLabels();
}
public String getCpu() {
return options.cpu;
}
public boolean runfilesEnabled() {
switch (options.enableRunfiles) {
case YES:
return true;
case NO:
return false;
default:
return OS.getCurrent() != OS.WINDOWS;
}
}
/**
* Returns true if the configuration performs static linking.
*/
public boolean performsStaticLink() {
for (Fragment fragment : fragments.values()) {
if (fragment.performsStaticLink()) {
return true;
}
}
return false;
}
/**
* Collects executables defined by fragments.
*/
private ImmutableMap<String, PathFragment> collectExecutables() {
ImmutableMap.Builder<String, PathFragment> builder = new ImmutableMap.Builder<>();
for (Fragment fragment : fragments.values()) {
fragment.defineExecutables(builder);
}
return builder.build();
}
/**
* See {@code BuildConfigurationCollection.Transitions.getArtifactOwnerConfiguration()}.
*/
public BuildConfiguration getArtifactOwnerConfiguration() {
// Dynamic configurations inherit transitions objects from other configurations exclusively
// for use of Transitions.getDynamicTransitions. No other calls to transitions should be
// made for dynamic configurations.
// TODO(bazel-team): enforce the above automatically (without having to explicitly check
// for dynamic configuration mode).
return useDynamicConfigurations() ? this : transitions.getArtifactOwnerConfiguration();
}
/**
* @return whether proto header modules should be built.
*/
public boolean getProtoHeaderModules() {
return options.protoHeaderModules;
}
/**
* @return the list of default features used for all packages.
*/
public List<String> getDefaultFeatures() {
return options.defaultFeatures;
}
/**
* Returns the "top-level" environment space, i.e. the set of environments all top-level
* targets must be compatible with. An empty value implies no restrictions.
*/
public List<Label> getTargetEnvironments() {
return options.targetEnvironments;
}
public Class<? extends Fragment> getSkylarkFragmentByName(String name) {
return skylarkVisibleFragments.get(name);
}
public ImmutableCollection<String> getSkylarkFragmentNames() {
return skylarkVisibleFragments.keySet();
}
}
|
apache-2.0
|
jentfoo/aws-sdk-java
|
aws-java-sdk-kms/src/main/java/com/amazonaws/services/kms/model/DisableKeyRotationRequest.java
|
7814
|
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.kms.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/kms-2014-11-01/DisableKeyRotation" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class DisableKeyRotationRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* A unique identifier for the customer master key (CMK).
* </p>
* <p>
* Specify the key ID or the Amazon Resource Name (ARN) of the CMK.
* </p>
* <p>
* For example:
* </p>
* <ul>
* <li>
* <p>
* Key ID: <code>1234abcd-12ab-34cd-56ef-1234567890ab</code>
* </p>
* </li>
* <li>
* <p>
* Key ARN: <code>arn:aws:kms:us-east-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab</code>
* </p>
* </li>
* </ul>
* <p>
* To get the key ID and key ARN for a CMK, use <a>ListKeys</a> or <a>DescribeKey</a>.
* </p>
*/
private String keyId;
/**
* <p>
* A unique identifier for the customer master key (CMK).
* </p>
* <p>
* Specify the key ID or the Amazon Resource Name (ARN) of the CMK.
* </p>
* <p>
* For example:
* </p>
* <ul>
* <li>
* <p>
* Key ID: <code>1234abcd-12ab-34cd-56ef-1234567890ab</code>
* </p>
* </li>
* <li>
* <p>
* Key ARN: <code>arn:aws:kms:us-east-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab</code>
* </p>
* </li>
* </ul>
* <p>
* To get the key ID and key ARN for a CMK, use <a>ListKeys</a> or <a>DescribeKey</a>.
* </p>
*
* @param keyId
* A unique identifier for the customer master key (CMK).</p>
* <p>
* Specify the key ID or the Amazon Resource Name (ARN) of the CMK.
* </p>
* <p>
* For example:
* </p>
* <ul>
* <li>
* <p>
* Key ID: <code>1234abcd-12ab-34cd-56ef-1234567890ab</code>
* </p>
* </li>
* <li>
* <p>
* Key ARN: <code>arn:aws:kms:us-east-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab</code>
* </p>
* </li>
* </ul>
* <p>
* To get the key ID and key ARN for a CMK, use <a>ListKeys</a> or <a>DescribeKey</a>.
*/
public void setKeyId(String keyId) {
this.keyId = keyId;
}
/**
* <p>
* A unique identifier for the customer master key (CMK).
* </p>
* <p>
* Specify the key ID or the Amazon Resource Name (ARN) of the CMK.
* </p>
* <p>
* For example:
* </p>
* <ul>
* <li>
* <p>
* Key ID: <code>1234abcd-12ab-34cd-56ef-1234567890ab</code>
* </p>
* </li>
* <li>
* <p>
* Key ARN: <code>arn:aws:kms:us-east-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab</code>
* </p>
* </li>
* </ul>
* <p>
* To get the key ID and key ARN for a CMK, use <a>ListKeys</a> or <a>DescribeKey</a>.
* </p>
*
* @return A unique identifier for the customer master key (CMK).</p>
* <p>
* Specify the key ID or the Amazon Resource Name (ARN) of the CMK.
* </p>
* <p>
* For example:
* </p>
* <ul>
* <li>
* <p>
* Key ID: <code>1234abcd-12ab-34cd-56ef-1234567890ab</code>
* </p>
* </li>
* <li>
* <p>
* Key ARN: <code>arn:aws:kms:us-east-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab</code>
* </p>
* </li>
* </ul>
* <p>
* To get the key ID and key ARN for a CMK, use <a>ListKeys</a> or <a>DescribeKey</a>.
*/
public String getKeyId() {
return this.keyId;
}
/**
* <p>
* A unique identifier for the customer master key (CMK).
* </p>
* <p>
* Specify the key ID or the Amazon Resource Name (ARN) of the CMK.
* </p>
* <p>
* For example:
* </p>
* <ul>
* <li>
* <p>
* Key ID: <code>1234abcd-12ab-34cd-56ef-1234567890ab</code>
* </p>
* </li>
* <li>
* <p>
* Key ARN: <code>arn:aws:kms:us-east-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab</code>
* </p>
* </li>
* </ul>
* <p>
* To get the key ID and key ARN for a CMK, use <a>ListKeys</a> or <a>DescribeKey</a>.
* </p>
*
* @param keyId
* A unique identifier for the customer master key (CMK).</p>
* <p>
* Specify the key ID or the Amazon Resource Name (ARN) of the CMK.
* </p>
* <p>
* For example:
* </p>
* <ul>
* <li>
* <p>
* Key ID: <code>1234abcd-12ab-34cd-56ef-1234567890ab</code>
* </p>
* </li>
* <li>
* <p>
* Key ARN: <code>arn:aws:kms:us-east-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab</code>
* </p>
* </li>
* </ul>
* <p>
* To get the key ID and key ARN for a CMK, use <a>ListKeys</a> or <a>DescribeKey</a>.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DisableKeyRotationRequest withKeyId(String keyId) {
setKeyId(keyId);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getKeyId() != null)
sb.append("KeyId: ").append(getKeyId());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof DisableKeyRotationRequest == false)
return false;
DisableKeyRotationRequest other = (DisableKeyRotationRequest) obj;
if (other.getKeyId() == null ^ this.getKeyId() == null)
return false;
if (other.getKeyId() != null && other.getKeyId().equals(this.getKeyId()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getKeyId() == null) ? 0 : getKeyId().hashCode());
return hashCode;
}
@Override
public DisableKeyRotationRequest clone() {
return (DisableKeyRotationRequest) super.clone();
}
}
|
apache-2.0
|
compomics/compomics-utilities
|
src/main/java/com/compomics/util/gui/spectrum/SpectrumPanel.java
|
90051
|
/**
* Created by IntelliJ IDEA. User: Lennart Date: 11-mei-2004 Time: 16:34:34
*/
package com.compomics.util.gui.spectrum;
import com.compomics.util.experiment.biology.aminoacids.sequence.AminoAcidSequence;
import com.compomics.util.experiment.biology.ions.Ion;
import com.compomics.util.experiment.biology.ions.NeutralLoss;
import com.compomics.util.experiment.biology.proteins.Peptide;
import com.compomics.util.experiment.biology.ions.impl.PeptideFragmentIon;
import com.compomics.util.experiment.biology.ions.impl.TagFragmentIon;
import com.compomics.util.experiment.identification.matches.IonMatch;
import com.compomics.util.experiment.identification.amino_acid_tags.Tag;
import com.compomics.util.experiment.identification.amino_acid_tags.TagComponent;
import com.compomics.util.experiment.identification.amino_acid_tags.MassGap;
import com.compomics.util.experiment.identification.utils.ModificationUtils;
import com.compomics.util.experiment.io.biology.protein.SequenceProvider;
import com.compomics.util.gui.interfaces.SpectrumAnnotation;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.LogManager;
import com.compomics.util.interfaces.SpectrumFile;
import com.compomics.util.parameters.identification.advanced.SequenceMatchingParameters;
import com.compomics.util.parameters.identification.search.ModificationParameters;
import javax.swing.*;
import javax.swing.border.EtchedBorder;
import java.awt.*;
import java.util.*;
import java.util.ArrayList;
import java.util.stream.Collectors;
/*
* CVS information:
*
* $Revision: 1.9 $ $Date: 2009/08/17 15:15:28 $
*/
/**
* This class presents a JPanel that will hold and display a mass spectrum in
* centroid or profile mode.
*
* @author Lennart Martens
* @author Harald Barsnes
* @author Marc Vaudel
* @version $Id: SpectrumPanel.java,v 1.9 2009/08/17 15:15:28 lennart Exp $
*/
public class SpectrumPanel extends GraphicsPanel {
/**
* Class specific log4j logger for SpectrumPanel instances.
*/
static Logger logger = LogManager.getLogger(SpectrumPanel.class);
/**
* The color used for the peaks. Default to red.
*/
private Color spectrumPeakColor = Color.RED;
/**
* The color used for the profile mode spectra. Defaults to pink.
*/
private Color spectrumProfileModeLineColor = Color.PINK;
/**
* Color map for the ion annotation.
*/
private static HashMap<Ion.IonType, HashMap<Integer, HashMap<String, Color>>> colorMap = new HashMap<Ion.IonType, HashMap<Integer, HashMap<String, Color>>>(2);
/**
* This constructor creates a SpectrumPanel based on the spectrum
* information in the specified SpectrumFile as an interactive lines plot.
*
* @param aSpecFile SpectrumFile with the information about masses and
* intensities that will be copied here. Note that mass-sorting will take
* place in this step as well.
*/
public SpectrumPanel(
SpectrumFile aSpecFile
) {
this(
aSpecFile,
DrawingStyle.LINES,
true
);
}
/**
* This constructor creates a SpectrumPanel based on the spectrum
* information in the specified SpectrumFile as a line plot.
*
* @param aSpecFile SpectrumFile with the information about masses and
* intensities that will be copied here. Note that mass-sorting will take
* place in this step as well.
* @param aEnableInteraction boolean that specifies whether user-derived
* events should be caught and dealt with.
*/
public SpectrumPanel(
SpectrumFile aSpecFile,
boolean aEnableInteraction
) {
this(
aSpecFile,
DrawingStyle.LINES,
aEnableInteraction
);
}
/**
* This constructor creates a SpectrumPanel based on the spectrum
* information in the specified SpectrumFile with the specified drawing
* style.
*
* @param aSpecFile SpectrumFile with the information about masses and
* intensities that will be copied here. Note that mass-sorting will take
* place in this step as well.
* @param aDrawStyle the drawing style to use.
* @param aEnableInteraction boolean that specifies whether user-derived
* events should be caught and dealt with.
*/
public SpectrumPanel(
SpectrumFile aSpecFile,
DrawingStyle aDrawStyle,
boolean aEnableInteraction
) {
this(
aSpecFile,
aDrawStyle,
aEnableInteraction,
null
);
}
/**
* This constructor creates a SpectrumPanel based on the spectrum
* information in the specified SpectrumFile with the specified drawing
* style.
*
* @param aSpecFile SpectrumFile with the information about masses and
* intensities that will be copied here. Note that mass-sorting will take
* place in this step as well.
* @param aDrawStyle the drawing style to use.
* @param aEnableInteraction boolean that specifies whether user-derived
* events should be caught and dealt with.
* @param aSpectrumFilenameColor Color with the color for the
* spectrumfilename on the panel can be 'null' for default coloring.
*/
public SpectrumPanel(
SpectrumFile aSpecFile,
DrawingStyle aDrawStyle,
boolean aEnableInteraction,
Color aSpectrumFilenameColor
) {
this(
aSpecFile,
aDrawStyle,
aEnableInteraction,
aSpectrumFilenameColor,
50,
false,
true,
true
);
}
/**
* Empty default constructor
*/
public SpectrumPanel() {
}
/**
* This constructor creates a SpectrumPanel based on the spectrum
* information in the specified SpectrumFile with the specified drawing
* style.
*
* @param aSpecFile SpectrumFile with the information about masses and
* intensities that will be copied here. Note that mass-sorting will take
* place in this step as well.
* @param aDrawStyle the drawing style to use.
* @param aEnableInteraction boolean that specifies whether user-derived
* events should be caught and dealt with.
* @param aSpectrumFilenameColor Color with the color for the
* spectrumfilename on the panel can be 'null' for default coloring.
* @param aMaxPadding int the sets the maximum padding size.
* @param aShowFileName boolean that specifies if the file name should be
* shown in the panel
*/
public SpectrumPanel(
SpectrumFile aSpecFile,
DrawingStyle aDrawStyle,
boolean aEnableInteraction,
Color aSpectrumFilenameColor,
int aMaxPadding,
boolean aShowFileName
) {
this(
aSpecFile,
aDrawStyle,
aEnableInteraction,
aSpectrumFilenameColor,
aMaxPadding,
aShowFileName,
true,
true
);
}
/**
* This constructor creates a SpectrumPanel based on the spectrum
* information in the specified SpectrumFile with the specified drawing
* style.
*
* @param aSpecFile SpectrumFile with the information about masses and
* intensities that will be copied here. Note that mass-sorting will take
* place in this step as well.
* @param aDrawStyle the drawing style to use.
* @param aEnableInteraction boolean that specifies whether user-derived
* events should be caught and dealt with.
* @param aSpectrumFilenameColor Color with the color for the spectrum
* filename in the panel can be 'null' for default coloring.
* @param aMaxPadding int the sets the maximum padding size.
* @param aShowFileName boolean that specifies if the file name should be
* shown in the panel
* @param aShowPrecursorDetails boolean that specifies if the precursor
* details should be shown in the panel
* @param aShowResolution boolean that specifies if the resolution should be
* shown in the panel
*/
public SpectrumPanel(
SpectrumFile aSpecFile,
DrawingStyle aDrawStyle,
boolean aEnableInteraction,
Color aSpectrumFilenameColor,
int aMaxPadding,
boolean aShowFileName,
boolean aShowPrecursorDetails,
boolean aShowResolution
) {
this(
aSpecFile,
aDrawStyle,
aEnableInteraction,
aSpectrumFilenameColor,
aMaxPadding,
aShowFileName,
aShowPrecursorDetails,
aShowResolution,
0
);
}
/**
* This constructor creates a SpectrumPanel based on the spectrum
* information in the specified SpectrumFile with the specified drawing
* style.
*
* @param aSpecFile SpectrumFile with the information about masses and
* intensities that will be copied here. Note that mass-sorting will take
* place in this step as well.
* @param aDrawStyle the drawing style to use.
* @param aEnableInteraction boolean that specifies whether user-derived
* events should be caught and dealt with.
* @param aSpectrumFilenameColor Color with the color for the spectrum
* filename in the panel can be 'null' for default coloring.
* @param aMaxPadding int the sets the maximum padding size.
* @param aShowFileName boolean that specifies if the file name should be
* shown in the panel
* @param aShowPrecursorDetails boolean that specifies if the precursor
* details should be shown in the panel
* @param aShowResolution boolean that specifies if the resolution should be
* shown in the panel
* @param aMSLevel int with the ms level for the spectrum
*/
public SpectrumPanel(
SpectrumFile aSpecFile,
DrawingStyle aDrawStyle,
boolean aEnableInteraction,
Color aSpectrumFilenameColor,
int aMaxPadding,
boolean aShowFileName,
boolean aShowPrecursorDetails,
boolean aShowResolution,
int aMSLevel
) {
this(
aSpecFile,
aDrawStyle,
aEnableInteraction,
aSpectrumFilenameColor,
aMaxPadding,
aShowFileName,
aShowPrecursorDetails,
aShowResolution,
aMSLevel,
false
);
}
/**
* This constructor creates a SpectrumPanel based on the spectrum
* information in the specified SpectrumFile with the specified drawing
* style.
*
* @param aSpecFile SpectrumFile with the information about masses and
* intensities that will be copied here. Note that mass-sorting will take
* place in this step as well.
* @param aDrawStyle the drawing style to use.
* @param aEnableInteraction boolean that specifies whether user-derived
* events should be caught and dealt with.
* @param aSpectrumFilenameColor Color with the color for the spectrum
* filename in the panel can be 'null' for default coloring.
* @param aMaxPadding int the sets the maximum padding size.
*
* @param aShowFileName boolean that specifies if the file name should be
* shown in the panel
* @param aShowPrecursorDetails boolean that specifies if the precursor
* details should be shown in the panel
* @param aShowResolution boolean that specifies if the resolution should be
* shown in the panel
* @param aMSLevel int with the ms level for the spectrum, set to 0 if ms
* level is unknown
* @param aProfileMode boolean if set to true the spectrum will be drawn in
* profile mode
*/
public SpectrumPanel(
SpectrumFile aSpecFile,
DrawingStyle aDrawStyle,
boolean aEnableInteraction,
Color aSpectrumFilenameColor,
int aMaxPadding,
boolean aShowFileName,
boolean aShowPrecursorDetails,
boolean aShowResolution,
int aMSLevel,
boolean aProfileMode
) {
this.iCurrentDrawStyle = aDrawStyle;
this.iSpecPanelListeners = new ArrayList();
this.setBorder(BorderFactory.createEtchedBorder(EtchedBorder.LOWERED));
this.setBackground(Color.WHITE);
if (aSpecFile != null) {
dataSetCounter = 0;
this.processSpectrumFile(
aSpecFile,
spectrumPeakColor,
spectrumProfileModeLineColor
);
}
if (aEnableInteraction) {
this.addListeners();
}
this.iFilenameColor = aSpectrumFilenameColor;
this.maxPadding = aMaxPadding;
this.showFileName = aShowFileName;
this.showPrecursorDetails = aShowPrecursorDetails;
this.showResolution = aShowResolution;
this.iMSLevel = aMSLevel;
this.currentGraphicsPanelType = aProfileMode ? GraphicsPanelType.profileSpectrum : GraphicsPanelType.centroidSpectrum;
}
/**
* This constructor creates a SpectrumPanel based on the passed parameters.
* This constructor will be used to annotate matched ions on the spectrum
* panels.
*
* @param aXAxisData double[] with all the x-axis values.
* @param aYAxisData double[] with all the y-axis values.
* @param aPrecursorMZ double with the precursor mass.
* @param aPrecursorCharge String with the precursor intensity.
* @param aFileName String with the title of the Query.
*/
public SpectrumPanel(
double[] aXAxisData,
double[] aYAxisData,
double aPrecursorMZ,
String aPrecursorCharge,
String aFileName
) {
this(
aXAxisData,
aYAxisData,
aPrecursorMZ,
aPrecursorCharge,
aFileName,
50,
false,
true,
true
);
}
/**
* This constructor creates a SpectrumPanel based on the passed parameters.
* This constructor will be used to annotate matched ions on the spectrum
* panels.
*
* @param aXAxisData double[] with all the x-axis values.
* @param aYAxisData double[] with all the y-axis values.
* @param aPrecursorMZ double with the precursor mass.
* @param aPrecursorCharge String with the precursor intensity.
* @param aFileName String with the title of the Query.
* @param aShowFileName boolean that specifies if the file name should be
* shown in the panel.
*/
public SpectrumPanel(
double[] aXAxisData,
double[] aYAxisData,
double aPrecursorMZ,
String aPrecursorCharge,
String aFileName,
boolean aShowFileName
) {
this(
aXAxisData,
aYAxisData,
aPrecursorMZ,
aPrecursorCharge,
aFileName,
50,
aShowFileName,
true,
true
);
}
/**
* This constructor creates a SpectrumPanel based on the passed parameters.
* This constructor will be used to annotate matched ions on the spectrum
* panels.
*
* @param aXAxisData double[] with all the x-axis values.
* @param aYAxisData double[] with all the y-axis values.
* @param aPrecursorMZ double with the precursor mass.
* @param aPrecursorCharge String with the precursor intensity.
* @param aFileName String with the title of the Query.
* @param aMaxPadding int the sets the maximum padding size.
* @param aShowFileName boolean that specifies if the file name should be
* shown in the panel.
*/
public SpectrumPanel(
double[] aXAxisData,
double[] aYAxisData,
double aPrecursorMZ,
String aPrecursorCharge,
String aFileName,
int aMaxPadding,
boolean aShowFileName
) {
this(
aXAxisData,
aYAxisData,
aPrecursorMZ,
aPrecursorCharge,
aFileName,
aMaxPadding,
aShowFileName,
true,
true
);
}
/**
* This constructor creates a SpectrumPanel based on the passed parameters.
* This constructor will be used to annotate matched ions on the spectrum
* panels.
*
* @param aXAxisData double[] with all the x-axis values.
* @param aYAxisData double[] with all the y-axis values.
* @param aPrecursorMZ double with the precursor mass.
* @param aPrecursorCharge String with the precursor intensity.
* @param aFileName String with the title of the Query.
* @param aMaxPadding int the sets the maximum padding size.
* @param aShowFileName boolean that specifies if the file name should be
* shown in the panel
* @param aShowPrecursorDetails boolean that specifies if the precursor
* details should be shown in the panel
* @param aShowResolution boolean that specifies if the resolution should be
* shown in the panel
*/
public SpectrumPanel(
double[] aXAxisData,
double[] aYAxisData,
double aPrecursorMZ,
String aPrecursorCharge,
String aFileName,
int aMaxPadding,
boolean aShowFileName,
boolean aShowPrecursorDetails,
boolean aShowResolution
) {
this(
aXAxisData,
aYAxisData,
aPrecursorMZ,
aPrecursorCharge,
aFileName,
aMaxPadding,
aShowFileName,
aShowPrecursorDetails,
aShowResolution,
0
);
}
/**
* This constructor creates a SpectrumPanel based on the passed parameters.
* This constructor will be used to annotate matched ions on the spectrum
* panels.
*
* @param aXAxisData double[] with all the x-axis values.
* @param aYAxisData double[] with all the y-axis values.
* @param aPrecursorMZ double with the precursor mass.
* @param aPrecursorCharge String with the precursor intensity.
* @param aFileName String with the title of the Query.
* @param aMaxPadding int the sets the maximum padding size.
* @param aShowFileName boolean that specifies if the file name should be
* shown in the panel
* @param aShowPrecursorDetails boolean that specifies if the precursor
* details should be shown in the panel
* @param aShowResolution boolean that specifies if the resolution should be
* shown in the panel
* @param aMSLevel int with the ms level for the spectrum, set to 0 if ms
* level is unknown
*/
public SpectrumPanel(
double[] aXAxisData,
double[] aYAxisData,
double aPrecursorMZ,
String aPrecursorCharge,
String aFileName,
int aMaxPadding,
boolean aShowFileName,
boolean aShowPrecursorDetails,
boolean aShowResolution,
int aMSLevel
) {
this(
aXAxisData,
aYAxisData,
aPrecursorMZ,
aPrecursorCharge,
aFileName,
aMaxPadding,
aShowFileName,
aShowPrecursorDetails,
aShowResolution,
aMSLevel,
false
);
}
/**
* This constructor creates a SpectrumPanel based on the passed parameters.
* This constructor will be used to annotate matched ions on the spectrum
* panels.
*
* @param aXAxisData double[] with all the x-axis values.
* @param aYAxisData double[] with all the y-axis values.
* @param aPrecursorMZ double with the precursor mass.
* @param aPrecursorCharge String with the precursor charge.
* @param aFileName String with the title of the Query.
* @param aMaxPadding int the sets the maximum padding size.
* @param aShowFileName boolean that specifies if the file name should be
* shown in the panel
* @param aShowPrecursorDetails boolean that specifies if the precursor
* details should be shown in the panel
* @param aShowResolution boolean that specifies if the resolution should be
* shown in the panel
* @param aMSLevel int with the ms level for the spectrum, set to 0 if ms
* level is unknown
* @param aProfileMode boolean if set to true the spectrum will be drawn in
* profile mode
*/
public SpectrumPanel(
double[] aXAxisData,
double[] aYAxisData,
double aPrecursorMZ,
String aPrecursorCharge,
String aFileName,
int aMaxPadding,
boolean aShowFileName,
boolean aShowPrecursorDetails,
boolean aShowResolution,
int aMSLevel,
boolean aProfileMode
) {
this.iCurrentDrawStyle = DrawingStyle.LINES;
this.iSpecPanelListeners = new ArrayList();
this.setBorder(BorderFactory.createEtchedBorder(EtchedBorder.LOWERED));
this.setBackground(Color.WHITE);
dataSetCounter = 0;
processXAndYData(
aXAxisData,
aYAxisData,
spectrumPeakColor,
spectrumProfileModeLineColor
);
iPrecursorMZ = aPrecursorMZ;
iPrecursorCharge = aPrecursorCharge;
iFilename = aFileName;
this.maxPadding = aMaxPadding;
this.showFileName = aShowFileName;
this.showPrecursorDetails = aShowPrecursorDetails;
this.showResolution = aShowResolution;
this.iMSLevel = aMSLevel;
this.currentGraphicsPanelType = aProfileMode ? GraphicsPanelType.profileSpectrum : GraphicsPanelType.centroidSpectrum;
this.addListeners();
}
/**
* Add a mirrored spectrum (or chromatogram).
*
* @param aXAxisData the x axis data
* @param aYAxisData the y axis data
* @param aPrecursorMZ the precursor m/z
* @param aPrecursorCharge the precursor charge
* @param aFileName the file name
* @param aProfileMode if the spectrum is to be drawn in profile mode
* @param aSpectrumPeakColor the spectrum peak color
* @param aSpectrumProfileModeLineColor the spectrum profile mode line color
*/
public void addMirroredSpectrum(
double[] aXAxisData,
double[] aYAxisData,
double aPrecursorMZ,
String aPrecursorCharge,
String aFileName,
boolean aProfileMode,
Color aSpectrumPeakColor,
Color aSpectrumProfileModeLineColor
) {
iPrecursorMZMirroredSpectrum = aPrecursorMZ;
iPrecursorChargeMirorredSpectrum = aPrecursorCharge;
iFilenameMirrorredSpectrum = aFileName;
processMirroredXAndYData(
aXAxisData,
aYAxisData,
aSpectrumPeakColor,
aSpectrumProfileModeLineColor
);
this.currentGraphicsPanelType = aProfileMode ? GraphicsPanelType.profileSpectrum : GraphicsPanelType.centroidSpectrum;
this.showFileName = false;
this.showPrecursorDetails = false;
this.showResolution = false;
this.yAxisZoomExcludesBackgroundPeaks = false;
this.yDataIsPositive = false;
}
/**
* Adds an additional spectrum dataset to be displayed in the same Spectrum
* Panel. Remember to use different colors for the different datasets.
*
* @param aXAxisData double[] with all the x-axis values.
* @param aYAxisData double[] with all the y-axis values
* @param dataPointAndLineColor the color to use for the data points and
* lines
* @param areaUnderCurveColor the color to use for the area under the curve
*/
public void addAdditionalDataset(
double[] aXAxisData,
double[] aYAxisData,
Color dataPointAndLineColor,
Color areaUnderCurveColor
) {
processXAndYData(
aXAxisData,
aYAxisData,
dataPointAndLineColor,
areaUnderCurveColor
);
this.showFileName = false;
this.showPrecursorDetails = false;
this.showResolution = false;
}
/**
* Adds an additional mirrored spectrum dataset to be displayed in the same
* Spectrum Panel. Remember to use different colors for the different
* datasets.
*
* @param aXAxisData double[] with all the x-axis values.
* @param aYAxisData double[] with all the y-axis values
* @param dataPointAndLineColor the color to use for the data points and
* lines
* @param areaUnderCurveColor the color to use for the area under the curve
*/
public void addAdditionalMirroredDataset(
double[] aXAxisData,
double[] aYAxisData,
Color dataPointAndLineColor,
Color areaUnderCurveColor
) {
processMirroredXAndYData(
aXAxisData,
aYAxisData,
dataPointAndLineColor,
areaUnderCurveColor
);
this.showFileName = false;
this.showPrecursorDetails = false;
this.showResolution = false;
}
/**
* Change the drawing type of the spectrum. Profile or centroid mode.
*
* @param aProfileMode if true, the spectrum is drawn in profile mode
*/
public void setProfileMode(
boolean aProfileMode
) {
this.currentGraphicsPanelType = aProfileMode ? GraphicsPanelType.profileSpectrum : GraphicsPanelType.centroidSpectrum;
}
/**
* Set the default spectrum peak color. (Note that this only has an impact
* on the first spectrum added. For additional spectra or mirrored spectra
* set the color in the given constructor.)
*
* @param aSpectrumPeakColor the color to set
*/
public void setSpectrumPeakColor(
Color aSpectrumPeakColor
) {
this.spectrumPeakColor = aSpectrumPeakColor;
}
/**
* Set the default spectrum profile mode color. (Note that this only has an
* impact on the first spectrum added. For additional spectra or mirrored
* spectra set the color in the given constructor.)
*
* @param aSpectrumProfileModeLineColor the color to set
*/
public void setSpectrumProfileModeLineColor(
Color aSpectrumProfileModeLineColor
) {
this.spectrumProfileModeLineColor = aSpectrumProfileModeLineColor;
}
/**
* If true only the annotated peaks will be drawn. The default value is
* false, and result in all peaks being drawn. Note that this setting is
* ignored when in profile mode!
*
* @param aAnnotatedPeaks if true only the annotated peaks will be drawn
*/
public void showAnnotatedPeaksOnly(
boolean aAnnotatedPeaks
) {
this.showAllPeaks = !aAnnotatedPeaks;
}
/**
* This method initializes a SpectrumPanel based on the spectrum information
* in the specified SpectrumFile.
*
* @param aSpecFile SpectrumFile with the information about masses and
* intensities that will be copied here. Note that mass-sorting will take
* place in this step as well.
*/
public void setSpectrumFile(
SpectrumFile aSpecFile
) {
this.processSpectrumFile(
aSpecFile,
spectrumPeakColor,
spectrumProfileModeLineColor
);
}
/**
* This method reads the peaks and their intensities from the specified
* SpectrumFile and stores these internally for drawing. The masses are
* sorted in this step.
*
* @param aSpecFile SpectrumFile from which the peaks and intensities will
* be copied.
* @param dataPointAndLineColor the color to use for the data points and
* line
* @param areaUnderCurveColor the color to use for the area under the curve
*/
private void processSpectrumFile(
SpectrumFile aSpecFile,
Color dataPointAndLineColor,
Color areaUnderCurveColor
) {
if (dataSetCounter == 0) {
iXAxisData = new ArrayList<>();
iYAxisData = new ArrayList<>();
}
iDataPointAndLineColor.add(dataPointAndLineColor);
iAreaUnderCurveColor.add(areaUnderCurveColor);
HashMap<Double, Double> peaks = aSpecFile.getPeaks();
iXAxisData.add(new double[peaks.size()]);
iYAxisData.add(new double[peaks.size()]);
iFilename = aSpecFile.getFilename();
// Maximum intensity of the peaks.
double maxInt = 0.0;
// TreeSets are sorted.
TreeSet masses = new TreeSet(peaks.keySet());
Iterator<Double> iter = masses.iterator();
int count = 0;
while (iter.hasNext()) {
double mass = iter.next();
double intensity = peaks.get(mass);
if (intensity > maxInt) {
maxInt = intensity;
}
iXAxisData.get(dataSetCounter)[count] = mass;
iYAxisData.get(dataSetCounter)[count] = intensity;
count++;
}
if (iXAxisStartAtZero) {
this.rescale(0.0, getMaxXAxisValue());
} else {
this.rescale(getMinXAxisValue(), getMaxXAxisValue());
}
this.iPrecursorMZ = aSpecFile.getPrecursorMZ();
int liTemp = aSpecFile.getCharge();
if (liTemp == 0) {
iPrecursorCharge = "?";
} else {
iPrecursorCharge = Integer.toString(liTemp);
iPrecursorCharge += (liTemp > 0 ? "+" : "-");
}
dataSetCounter++;
}
/**
* Returns the peak color to be used for the given peak label. The colors
* used are based on the color coding used in MascotDatfile.
*
* @deprecated it is advised to use methods based on the ion type rather
* than on the peak label
* @param peakLabel the peak label
* @return the peak color
*/
public static Color determineColorOfPeak(
String peakLabel
) {
Color currentColor = Color.GRAY;
if (peakLabel.startsWith("a")) {
// turquoise
currentColor = new Color(153, 0, 0);
if (peakLabel.lastIndexOf("H2O") != -1 || peakLabel.lastIndexOf("H20") != -1) {
// light purple-blue
currentColor = new Color(171, 161, 255);
} else if (peakLabel.lastIndexOf("NH3") != -1) {
// ugly purple pink
currentColor = new Color(248, 151, 202);
}
} else if (peakLabel.startsWith("b")) {
// dark blue
currentColor = new Color(0, 0, 255);
if (peakLabel.lastIndexOf("H2O") != -1 || peakLabel.lastIndexOf("H20") != -1) {
// nice blue
currentColor = new Color(0, 125, 200);
} else if (peakLabel.lastIndexOf("NH3") != -1) {
// another purple
currentColor = new Color(153, 0, 255);
}
} else if (peakLabel.startsWith("c")) {
// purple blue
currentColor = new Color(188, 0, 255); // ToDo: no colors for H2O and NH3??
} else if (peakLabel.startsWith("x")) {
// green
currentColor = new Color(78, 200, 0); // ToDo: no colors for H2O and NH3??
} else if (peakLabel.startsWith("y")) {
// black
currentColor = new Color(0, 0, 0);
if (peakLabel.lastIndexOf("H2O") != -1 || peakLabel.lastIndexOf("H20") != -1) {
// navy blue
currentColor = new Color(0, 70, 135);
} else if (peakLabel.lastIndexOf("NH3") != -1) {
// another purple
currentColor = new Color(155, 0, 155);
}
} else if (peakLabel.startsWith("z")) {
// dark green
currentColor = new Color(64, 179, 0); // ToDo: no colors for H2O and NH3??
} else if (peakLabel.startsWith("Prec") || peakLabel.startsWith("MH")) { // precursor
// red
currentColor = Color.gray; // Color.red is used in MascotDatFile
} else if (peakLabel.startsWith("i")) { // immonimum ion
// grey
currentColor = Color.gray;
}
return currentColor;
}
/**
* Filters the annotations and returns the annotations matching the
* currently selected types.
*
* @deprecated used only in demo classes
*
* @param annotations the annotations to be filtered, the annotations are
* assumed to have the following form: ion type + [ion number] + [charge] +
* [neutral loss]
* @param iontypes the fragment ion types to include, assumed to be one of
* the Ion types, e.g, IonType.PeptideFragmentIon >
* PeptideFragmentIon.B_ION
* @param neutralLosses list of neutral losses to display
* @param singleChargeSelected if singly charged fragments are to be
* included
* @param doubleChargeSelected if double charged fragments are to be
* included
* @param moreThanTwoChargesSelected if fragments with more than two charges
* are to be included
* @return the filtered annotations
*/
public static Vector<SpectrumAnnotation> filterAnnotations(
Vector<SpectrumAnnotation> annotations,
HashMap<Ion.IonType, HashSet<Integer>> iontypes,
ArrayList<NeutralLoss> neutralLosses,
boolean singleChargeSelected,
boolean doubleChargeSelected,
boolean moreThanTwoChargesSelected
) {
Vector<SpectrumAnnotation> filteredAnnotations = new Vector();
for (SpectrumAnnotation annotation : annotations) {
String currentLabel = annotation.getLabel();
boolean useAnnotation = false;
// check ion type
if (currentLabel.startsWith("a")) {
if (iontypes.containsKey(Ion.IonType.PEPTIDE_FRAGMENT_ION)
&& iontypes.get(Ion.IonType.PEPTIDE_FRAGMENT_ION).contains(PeptideFragmentIon.A_ION)) {
useAnnotation = true;
}
} else if (currentLabel.startsWith("b")) {
if (iontypes.containsKey(Ion.IonType.PEPTIDE_FRAGMENT_ION)
&& iontypes.get(Ion.IonType.PEPTIDE_FRAGMENT_ION).contains(PeptideFragmentIon.B_ION)) {
useAnnotation = true;
}
} else if (currentLabel.startsWith("c")) {
if (iontypes.containsKey(Ion.IonType.PEPTIDE_FRAGMENT_ION)
&& iontypes.get(Ion.IonType.PEPTIDE_FRAGMENT_ION).contains(PeptideFragmentIon.C_ION)) {
useAnnotation = true;
}
} else if (currentLabel.startsWith("x")) {
if (iontypes.containsKey(Ion.IonType.PEPTIDE_FRAGMENT_ION)
&& iontypes.get(Ion.IonType.PEPTIDE_FRAGMENT_ION).contains(PeptideFragmentIon.X_ION)) {
useAnnotation = true;
}
} else if (currentLabel.startsWith("y")) {
if (iontypes.containsKey(Ion.IonType.PEPTIDE_FRAGMENT_ION)
&& iontypes.get(Ion.IonType.PEPTIDE_FRAGMENT_ION).contains(PeptideFragmentIon.Y_ION)) {
useAnnotation = true;
}
} else if (currentLabel.startsWith("z")) {
if (iontypes.containsKey(Ion.IonType.PEPTIDE_FRAGMENT_ION)
&& iontypes.get(Ion.IonType.PEPTIDE_FRAGMENT_ION).contains(PeptideFragmentIon.Z_ION)) {
useAnnotation = true;
}
} else { // other
if (iontypes.containsKey(Ion.IonType.IMMONIUM_ION)
|| iontypes.containsKey(Ion.IonType.PRECURSOR_ION)
|| iontypes.containsKey(Ion.IonType.REPORTER_ION)
|| iontypes.containsKey(Ion.IonType.RELATED_ION)) {
useAnnotation = true;
}
}
// check neutral losses
if (useAnnotation) {
boolean h2oLossSelected = false;
boolean nh3LossSelected = false;
boolean phosphoLossSelected = false;
boolean moxLossSelected = false;
for (NeutralLoss neutralLoss : neutralLosses) {
if (neutralLoss.isSameAs(NeutralLoss.H2O)) {
h2oLossSelected = true;
} else if (neutralLoss.isSameAs(NeutralLoss.NH3)) {
nh3LossSelected = true;
} else if (neutralLoss.isSameAs(NeutralLoss.H3PO4)
|| neutralLoss.isSameAs(NeutralLoss.HPO3)) {
phosphoLossSelected = true;
} else if (neutralLoss.isSameAs(NeutralLoss.CH4OS)) {
moxLossSelected = true;
}
}
if (currentLabel.lastIndexOf("-H2O") != -1 || currentLabel.lastIndexOf("-H20") != -1) {
if (!h2oLossSelected) {
useAnnotation = false;
}
}
if (currentLabel.lastIndexOf("-NH3") != -1) {
if (!nh3LossSelected) {
useAnnotation = false;
}
}
if (currentLabel.lastIndexOf("-H3PO4") != -1
|| currentLabel.lastIndexOf("-HPO3") != -1) {
if (!phosphoLossSelected) {
useAnnotation = false;
}
}
if (currentLabel.lastIndexOf("-CH4OS") != -1) {
if (!moxLossSelected) {
useAnnotation = false;
}
}
}
// check ion charge
if (useAnnotation) {
if (currentLabel.lastIndexOf("+") == -1) {
// test needed to be able to show ions in the "other" group
if (currentLabel.startsWith("a") || currentLabel.startsWith("b") || currentLabel.startsWith("c")
|| currentLabel.startsWith("x") || currentLabel.startsWith("y") || currentLabel.startsWith("z")) {
if (!singleChargeSelected) {
useAnnotation = false;
}
}
} else if (currentLabel.lastIndexOf("+++") != -1) {
if (!moreThanTwoChargesSelected) {
useAnnotation = false;
}
} else if (currentLabel.lastIndexOf("++") != -1) {
if (!doubleChargeSelected) {
useAnnotation = false;
}
}
}
if (useAnnotation) {
filteredAnnotations.add(annotation);
}
}
return filteredAnnotations;
}
/**
* Sets an annotation color for the given ion.
*
* @param ion the ion
* @param color the new color
*/
public static void setIonColor(
Ion ion,
Color color
) {
if (!colorMap.containsKey(ion.getType())) {
colorMap.put(ion.getType(), new HashMap<>(1));
}
if (!colorMap.get(ion.getType()).containsKey(ion.getSubType())) {
colorMap.get(ion.getType()).put(ion.getSubType(), new HashMap<>(1));
}
colorMap.get(ion.getType()).get(ion.getSubType()).put(ion.getNeutralLossesAsString(), color);
}
/**
* Returns the peak color to be used for the given peak label according to
* the color map. If not implemented returns the default color.
*
* @param ion the ion
* @param isSpectrum if true, the special spectrum color is used for the
* y-ion
*
* @return the peak color
*/
public static Color determineFragmentIonColor(
Ion ion,
boolean isSpectrum
) {
if (colorMap.containsKey(ion.getType())
&& colorMap.get(ion.getType()).containsKey(ion.getSubType())
&& colorMap.get(ion.getType()).get(ion.getSubType()).containsKey(ion.getNeutralLossesAsString())) {
return colorMap.get(ion.getType()).get(ion.getSubType()).get(ion.getNeutralLossesAsString());
}
return determineDefaultFragmentIonColor(ion, isSpectrum);
}
/**
* Returns the peak color to be used for the given peak label. The colors
* used are based on the color coding used in MascotDatfile.
*
* @param ion the ion
* @param isSpectrum if true, the special spectrum color is used for the
* y-ion
*
* @return the peak color
*/
public static Color determineDefaultFragmentIonColor(
Ion ion,
boolean isSpectrum
) {
switch (ion.getType()) {
case PEPTIDE_FRAGMENT_ION:
case TAG_FRAGMENT_ION:
switch (ion.getSubType()) {
case PeptideFragmentIon.A_ION:
if (ion.hasNeutralLosses()) {
NeutralLoss[] neutralLosses = ion.getNeutralLosses();
if (neutralLosses.length == 1) {
NeutralLoss neutralLoss = neutralLosses[0];
if (neutralLoss.isSameAs(NeutralLoss.H2O)) {
// light purple-blue
return new Color(171, 161, 255);
} else if (neutralLoss.isSameAs(NeutralLoss.NH3)) {
// purple pink
return new Color(248, 151, 202);
} else if (neutralLoss.isSameAs(NeutralLoss.H3PO4)
|| neutralLoss.isSameAs(NeutralLoss.HPO3)) {
return Color.BLACK;
}
} else if (neutralLosses.length > 1) {
return Color.GRAY;
}
}
// turquoise
return new Color(153, 0, 0);
case PeptideFragmentIon.B_ION:
if (ion.hasNeutralLosses()) {
NeutralLoss[] neutralLosses = ion.getNeutralLosses();
if (neutralLosses.length == 1) {
NeutralLoss neutralLoss = neutralLosses[0];
if (neutralLoss.isSameAs(NeutralLoss.H2O)) {
// nice blue
return new Color(0, 125, 200);
} else if (neutralLoss.isSameAs(NeutralLoss.NH3)) {
// another purple
return new Color(153, 0, 255);
} else if (neutralLoss.isSameAs(NeutralLoss.H3PO4)
|| neutralLoss.isSameAs(NeutralLoss.HPO3)) {
return Color.PINK;
}
} else if (neutralLosses.length > 1) {
return Color.GRAY;
}
}
// dark blue
return new Color(0, 0, 255);
case PeptideFragmentIon.C_ION:
if (ion.hasNeutralLosses()) {
NeutralLoss[] neutralLosses = ion.getNeutralLosses();
if (neutralLosses.length == 1) {
NeutralLoss neutralLoss = neutralLosses[0];
if (neutralLoss.isSameAs(NeutralLoss.H2O)) {
// ??
return new Color(188, 150, 255);
} else if (neutralLoss.isSameAs(NeutralLoss.NH3)) {
// ??
return new Color(255, 0, 255);
} else if (neutralLoss.isSameAs(NeutralLoss.H3PO4)
|| neutralLoss.isSameAs(NeutralLoss.HPO3)) {
return Color.PINK;
}
} else if (neutralLosses.length > 1) {
return Color.GRAY;
}
}
// purple blue
return new Color(188, 0, 255);
case PeptideFragmentIon.X_ION:
if (ion.hasNeutralLosses()) {
NeutralLoss[] neutralLosses = ion.getNeutralLosses();
if (neutralLosses.length == 1) {
NeutralLoss neutralLoss = neutralLosses[0];
if (neutralLoss.isSameAs(NeutralLoss.H2O)) {
// ??
return new Color(78, 200, 150);
} else if (neutralLoss.isSameAs(NeutralLoss.NH3)) {
// ??
return new Color(255, 200, 255);
} else if (neutralLoss.isSameAs(NeutralLoss.H3PO4)
|| neutralLoss.isSameAs(NeutralLoss.HPO3)) {
return Color.PINK;
}
} else if (neutralLosses.length > 1) {
return Color.GRAY;
}
}
// green
return new Color(78, 200, 0);
case PeptideFragmentIon.Y_ION:
if (ion.hasNeutralLosses()) {
NeutralLoss[] neutralLosses = ion.getNeutralLosses();
if (neutralLosses.length == 1) {
NeutralLoss neutralLoss = neutralLosses[0];
if (neutralLoss.isSameAs(NeutralLoss.H2O)) {
if (isSpectrum) {
// navy blue
return new Color(0, 70, 135);
} else {
// orange
return new Color(255, 150, 0);
}
} else if (neutralLoss.isSameAs(NeutralLoss.NH3)) {
if (isSpectrum) {
// another purple
return new Color(155, 0, 155);
} else {
// pink
return new Color(255, 0, 150);
}
} else if (neutralLoss.isSameAs(NeutralLoss.H3PO4)
|| neutralLoss.isSameAs(NeutralLoss.HPO3)) {
return Color.PINK;
}
} else if (neutralLosses.length > 1) {
return Color.GRAY;
}
}
if (isSpectrum) {
// black
return Color.BLACK; // special case for spectra, as the default peak color is red...
} else {
// red
return new Color(255, 0, 0);
}
case PeptideFragmentIon.Z_ION:
if (ion.hasNeutralLosses()) {
NeutralLoss[] neutralLosses = ion.getNeutralLosses();
if (neutralLosses.length == 1) {
NeutralLoss neutralLoss = neutralLosses[0];
if (neutralLoss.isSameAs(NeutralLoss.H2O)) {
// ??
return new Color(64, 179, 150);
} else if (neutralLoss.isSameAs(NeutralLoss.NH3)) {
// ??
return new Color(255, 179, 150);
} else if (neutralLoss.isSameAs(NeutralLoss.H3PO4)
|| neutralLoss.isSameAs(NeutralLoss.HPO3)) {
return Color.PINK;
}
} else if (neutralLosses.length > 1) {
return Color.GRAY;
}
}
// dark green
return new Color(64, 179, 0);
default:
return Color.GRAY;
}
case PRECURSOR_ION:
return Color.GRAY;
case IMMONIUM_ION:
return Color.GRAY;
case REPORTER_ION:
return Color.ORANGE;
case RELATED_ION:
return Color.GRAY;
default:
return Color.GRAY;
}
}
/**
* Returns the color to use for the given fragment ion label.
*
* @deprecated use the method based on the Ion class instead
* @param seriesLabel the series label
* @return the fragment ion color
*/
public static Color determineFragmentIonColor(
String seriesLabel
) {
Color currentColor = Color.GRAY;
if (seriesLabel.startsWith("a")) {
// turquoise
currentColor = new Color(153, 0, 0);
if (seriesLabel.lastIndexOf("H2O") != -1 || seriesLabel.lastIndexOf("H20") != -1) {
// light purple-blue
currentColor = new Color(171, 161, 255);
} else if (seriesLabel.lastIndexOf("NH3") != -1) {
// purple pink
currentColor = new Color(248, 151, 202);
}
// change color slightly if a double charge is detected
if (seriesLabel.lastIndexOf("++") != -1) {
currentColor = new Color(currentColor.getRed() - 100, currentColor.getGreen(), currentColor.getBlue());
}
} else if (seriesLabel.startsWith("b")) {
// dark blue
currentColor = new Color(0, 0, 255);
// change color slightly if a neutral loss is detected
if (seriesLabel.lastIndexOf("H2O") != -1 || seriesLabel.lastIndexOf("H20") != -1) {
currentColor = new Color(0, 150, 255);
} else if (seriesLabel.lastIndexOf("NH3") != -1 || seriesLabel.equalsIgnoreCase("b ions - mod.")) {
currentColor = new Color(150, 0, 255);
}
// change color slightly if a double charge is detected
if (seriesLabel.lastIndexOf("++") != -1) {
currentColor = new Color(
currentColor.getRed(),
currentColor.getGreen(),
currentColor.getBlue() - 100
);
}
} else if (seriesLabel.startsWith("c")) {
// purple blue
currentColor = new Color(188, 0, 255);
// change color slightly if a neutral loss is detected
if (seriesLabel.lastIndexOf("H2O") != -1 || seriesLabel.lastIndexOf("H20") != -1) {
currentColor = new Color(188, 150, 255);
} else if (seriesLabel.lastIndexOf("NH3") != -1) {
currentColor = new Color(255, 0, 255);
}
// change color slightly if a double charge is detected
if (seriesLabel.lastIndexOf("++") != -1) {
currentColor = new Color(
currentColor.getRed(),
currentColor.getGreen(),
currentColor.getBlue() - 100
);
}
} else if (seriesLabel.startsWith("x")) {
// green
currentColor = new Color(78, 200, 0);
// change color slightly if a neutral loss is detected
if (seriesLabel.lastIndexOf("H2O") != -1 || seriesLabel.lastIndexOf("H20") != -1) {
currentColor = new Color(78, 200, 150);
} else if (seriesLabel.lastIndexOf("NH3") != -1) {
currentColor = new Color(255, 200, 255);
}
// change color slightly if a double charge is detected
if (seriesLabel.lastIndexOf("++") != -1) {
currentColor = new Color(
currentColor.getRed(),
currentColor.getGreen() - 100,
currentColor.getBlue()
);
}
} else if (seriesLabel.startsWith("y")) {
// red
currentColor = new Color(255, 0, 0);
// change color slightly if a neutral loss is detected
if (seriesLabel.lastIndexOf("H2O") != -1 || seriesLabel.lastIndexOf("H20") != -1) {
currentColor = new Color(255, 150, 0);
} else if (seriesLabel.lastIndexOf("NH3") != -1 || seriesLabel.equalsIgnoreCase("y ions - mod.")) {
currentColor = new Color(255, 0, 150);
}
// change color slightly if a double charge is detected
if (seriesLabel.lastIndexOf("++") != -1) {
currentColor = new Color(currentColor.getRed() - 100, currentColor.getGreen(), currentColor.getBlue());
}
} else if (seriesLabel.startsWith("z")) {
// dark green
currentColor = new Color(64, 179, 0);
// change color slightly if a neutral loss is detected
if (seriesLabel.lastIndexOf("H2O") != -1 || seriesLabel.lastIndexOf("H20") != -1) {
currentColor = new Color(64, 179, 150);
} else if (seriesLabel.lastIndexOf("NH3") != -1) {
currentColor = new Color(255, 179, 150);
}
// change color slightly if a double charge is detected
if (seriesLabel.lastIndexOf("++") != -1) {
currentColor = new Color(currentColor.getRed(), currentColor.getGreen() - 100, currentColor.getBlue());
}
} else if (seriesLabel.startsWith("iTRAQ") || seriesLabel.startsWith("TMT")) {
return Color.ORANGE;
}
return currentColor;
}
/**
* Add reference areas annotating the de novo tags, using default percent
* height of 0.9 for the forward ions and 1.0 for the reverse ions default
* alpha levels of 0.2. Fixed modifications are not annotated.
*
* @param currentPeptide the current peptide sequence
* @param annotations the current fragment ion annotations
* @param aForwardIon the forward de novo sequencing fragment ion type,
* i.e., PeptideFragmentIon.A_ION, PeptideFragmentIon.B_ION or
* PeptideFragmentIon.C_ION
* @param aRewindIon the reverse de novo sequencing fragment ion type, i.e.,
* PeptideFragmentIon.X_ION, PeptideFragmentIon.Y_ION or
* PeptideFragmentIon.Z_ION
* @param aDeNovoCharge the de novo sequencing charge
* @param showForwardTags if true, the forward de novo sequencing tags are
* displayed
* @param showRewindTags if true, the reverse de novo sequencing tags are
* displayed
* @param mirrored if true the annotation is for the mirrored spectrum
* @param modificationParameters the modification parameters
* @param sequenceProvider a provider for the protein sequences
* @param modificationSequenceMatchingParameters the sequence matching
* preferences for modification to peptide mapping
*/
public void addAutomaticDeNovoSequencing(
Peptide currentPeptide,
IonMatch[] annotations,
int aForwardIon,
int aRewindIon,
int aDeNovoCharge,
boolean showForwardTags,
boolean showRewindTags,
boolean mirrored,
ModificationParameters modificationParameters,
SequenceProvider sequenceProvider,
SequenceMatchingParameters modificationSequenceMatchingParameters
) {
addAutomaticDeNovoSequencing(
currentPeptide,
annotations,
aForwardIon,
aRewindIon,
aDeNovoCharge,
showForwardTags,
showRewindTags,
0.9,
1.0,
0.2f,
0.2f,
null,
true,
mirrored,
modificationParameters,
sequenceProvider,
modificationSequenceMatchingParameters
);
}
/**
* Add reference areas annotating the de novo tags, using default alpha
* levels of 0.2. Fixed modifications are not annotated.
*
* @param currentPeptide the current peptide sequence
* @param annotations the current fragment ion annotations
* @param aForwardIon the forward de novo sequencing fragment ion type,
* i.e., PeptideFragmentIon.A_ION, PeptideFragmentIon.B_ION or
* PeptideFragmentIon.C_ION
* @param aRewindIon the reverse de novo sequencing fragment ion type, i.e.,
* PeptideFragmentIon.X_ION, PeptideFragmentIon.Y_ION or
* PeptideFragmentIon.Z_ION
* @param aDeNovoCharge the de novo sequencing charge
* @param showForwardTags if true, the forward de novo sequencing tags are
* displayed
* @param showRewindTags if true, the reverse de novo sequencing tags are
* displayed
* @param forwardIonPercentHeight the percent height of the forward ion
* annotation [0-1]
* @param rewindIonPercentHeight the percent height of the reverse ion
* annotation [0-1]
* @param mirrored if true the annotation is for the mirrored spectrum
* @param modificationParameters the modification parameters
* @param sequenceProvider a provider for the protein sequences
* @param modificationSequenceMatchingParameters the sequence matching
* preferences for modification to peptide mapping
*/
public void addAutomaticDeNovoSequencing(
Peptide currentPeptide,
IonMatch[] annotations,
int aForwardIon,
int aRewindIon,
int aDeNovoCharge,
boolean showForwardTags,
boolean showRewindTags,
double forwardIonPercentHeight,
double rewindIonPercentHeight,
boolean mirrored,
ModificationParameters modificationParameters,
SequenceProvider sequenceProvider,
SequenceMatchingParameters modificationSequenceMatchingParameters
) {
addAutomaticDeNovoSequencing(
currentPeptide,
annotations,
aForwardIon,
aRewindIon,
aDeNovoCharge,
showForwardTags,
showRewindTags,
forwardIonPercentHeight,
rewindIonPercentHeight,
0.2f,
0.2f,
null,
true,
mirrored,
modificationParameters,
sequenceProvider,
modificationSequenceMatchingParameters
);
}
/**
* Add reference areas annotating the de novo tags, using default alpha
* levels of 0.2.
*
* @param currentPeptide the current peptide sequence
* @param annotations the current fragment ion annotations
* @param aForwardIon the forward de novo sequencing fragment ion type,
* i.e., PeptideFragmentIon.A_ION, PeptideFragmentIon.B_ION or
* PeptideFragmentIon.C_ION
* @param aRewindIon the reverse de novo sequencing fragment ion type, i.e.,
* PeptideFragmentIon.X_ION, PeptideFragmentIon.Y_ION or
* PeptideFragmentIon.Z_ION
* @param aDeNovoCharge the de novo sequencing charge
* @param showForwardTags if true, the forward de novo sequencing tags are
* displayed
* @param showRewindTags if true, the reverse de novo sequencing tags are
* displayed
* @param forwardIonPercentHeight the percent height of the forward ion
* annotation [0-1]
* @param rewindIonPercentHeight the percent height of the reverse ion
* annotation [0-1]
* @param excludeFixedModifications are fixed modifications to be annotated?
* @param mirrored if true the annotation is for the mirrored spectrum
* @param modificationParameters the modification parameters
* @param sequenceProvider a provider for the protein sequences
* @param modificationSequenceMatchingParameters the sequence matching
* preferences for modification to peptide mapping
*/
public void addAutomaticDeNovoSequencing(
Peptide currentPeptide,
IonMatch[] annotations,
int aForwardIon,
int aRewindIon,
int aDeNovoCharge,
boolean showForwardTags,
boolean showRewindTags,
double forwardIonPercentHeight,
double rewindIonPercentHeight,
boolean excludeFixedModifications,
boolean mirrored,
ModificationParameters modificationParameters,
SequenceProvider sequenceProvider,
SequenceMatchingParameters modificationSequenceMatchingParameters
) {
addAutomaticDeNovoSequencing(
currentPeptide,
annotations,
aForwardIon,
aRewindIon,
aDeNovoCharge,
showForwardTags,
showRewindTags,
forwardIonPercentHeight,
rewindIonPercentHeight,
0.2f,
0.2f,
null,
excludeFixedModifications,
mirrored,
modificationParameters,
sequenceProvider,
modificationSequenceMatchingParameters
);
}
/**
* Add reference areas annotating the de novo tags, using default percent
* height of 0.9 for the forward ions and 1.0 for the reverse ions default
* alpha levels of 0.2. Fixed modifications are not annotated.
*
* @param tag the current tag sequence
* @param annotations the current fragment ion annotations
* @param aForwardIon the forward de novo sequencing fragment ion type,
* i.e., PeptideFragmentIon.A_ION, PeptideFragmentIon.B_ION or
* PeptideFragmentIon.C_ION
* @param aRewindIon the reverse de novo sequencing fragment ion type, i.e.,
* PeptideFragmentIon.X_ION, PeptideFragmentIon.Y_ION or
* PeptideFragmentIon.Z_ION
* @param aDeNovoCharge the de novo sequencing charge
* @param showForwardTags if true, the forward de novo sequencing tags are
* displayed
* @param showRewindTags if true, the reverse de novo sequencing tags are
* displayed
* @param mirrored if true the annotation is for the mirrored spectrum
*/
public void addAutomaticDeNovoSequencing(
Tag tag,
IonMatch[] annotations,
int aForwardIon,
int aRewindIon,
int aDeNovoCharge,
boolean showForwardTags,
boolean showRewindTags,
boolean mirrored
) {
addAutomaticDeNovoSequencing(
tag,
annotations,
aForwardIon,
aRewindIon,
aDeNovoCharge,
showForwardTags,
showRewindTags,
0.9,
1.0,
0.2f,
0.2f,
null,
true,
mirrored
);
}
/**
* Add reference areas annotating the de novo tags, using default alpha
* levels of 0.2. Fixed modifications are not annotated.
*
* @param tag the current tag sequence
* @param annotations the current fragment ion annotations
* @param aForwardIon the forward de novo sequencing fragment ion type,
* i.e., PeptideFragmentIon.A_ION, PeptideFragmentIon.B_ION or
* PeptideFragmentIon.C_ION
* @param aRewindIon the reverse de novo sequencing fragment ion type, i.e.,
* PeptideFragmentIon.X_ION, PeptideFragmentIon.Y_ION or
* PeptideFragmentIon.Z_ION
* @param aDeNovoCharge the de novo sequencing charge
* @param showForwardTags if true, the forward de novo sequencing tags are
* displayed
* @param showRewindTags if true, the reverse de novo sequencing tags are
* displayed
* @param forwardIonPercentHeight the percent height of the forward ion
* annotation [0-1]
* @param rewindIonPercentHeight the percent height of the reverse ion
* annotation [0-1]
* @param mirrored if true the annotation is for the mirrored spectrum
*/
public void addAutomaticDeNovoSequencing(
Tag tag,
IonMatch[] annotations,
int aForwardIon,
int aRewindIon,
int aDeNovoCharge,
boolean showForwardTags,
boolean showRewindTags,
double forwardIonPercentHeight,
double rewindIonPercentHeight,
boolean mirrored
) {
addAutomaticDeNovoSequencing(
tag,
annotations,
aForwardIon,
aRewindIon,
aDeNovoCharge,
showForwardTags,
showRewindTags,
forwardIonPercentHeight,
rewindIonPercentHeight,
0.2f,
0.2f,
null,
true,
mirrored
);
}
/**
* Add reference areas annotating the de novo tags, using default alpha
* levels of 0.2.
*
* @param tag the current tag sequence
* @param annotations the current fragment ion annotations
* @param aForwardIon the forward de novo sequencing fragment ion type,
* i.e., PeptideFragmentIon.A_ION, PeptideFragmentIon.B_ION or
* PeptideFragmentIon.C_ION
* @param aRewindIon the reverse de novo sequencing fragment ion type, i.e.,
* PeptideFragmentIon.X_ION, PeptideFragmentIon.Y_ION or
* PeptideFragmentIon.Z_ION
* @param aDeNovoCharge the de novo sequencing charge
* @param showForwardTags if true, the forward de novo sequencing tags are
* displayed
* @param showRewindTags if true, the reverse de novo sequencing tags are
* displayed
* @param forwardIonPercentHeight the percent height of the forward ion
* annotation [0-1]
* @param rewindIonPercentHeight the percent height of the reverse ion
* annotation [0-1]
* @param alphaLevels the individual alpha levels, if set override
* forwardIonAlphaLevel and rewindIonAlphaLevel
* @param excludeFixedModifications are fixed modifications to be annotated?
* @param mirrored if true the annotation is for the mirrored spectrum
*/
public void addAutomaticDeNovoSequencing(
Tag tag,
IonMatch[] annotations,
int aForwardIon,
int aRewindIon,
int aDeNovoCharge,
boolean showForwardTags,
boolean showRewindTags,
double forwardIonPercentHeight,
double rewindIonPercentHeight,
ArrayList<float[]> alphaLevels,
boolean excludeFixedModifications,
boolean mirrored
) {
addAutomaticDeNovoSequencing(
tag, annotations,
aForwardIon,
aRewindIon,
aDeNovoCharge,
showForwardTags,
showRewindTags,
forwardIonPercentHeight,
rewindIonPercentHeight,
0.2f,
0.2f,
alphaLevels,
excludeFixedModifications,
mirrored
);
}
/**
* Add reference areas annotating the de novo tags.
*
* @param currentPeptide the current peptide sequence
* @param annotations the current fragment ion annotations
* @param aForwardIon the forward de novo sequencing fragment ion type,
* i.e., PeptideFragmentIon.A_ION, PeptideFragmentIon.B_ION or
* PeptideFragmentIon.C_ION
* @param aRewindIon the reverse de novo sequencing fragment ion type, i.e.,
* PeptideFragmentIon.X_ION, PeptideFragmentIon.Y_ION or
* PeptideFragmentIon.Z_ION
* @param aDeNovoCharge the de novo sequencing charge
* @param showForwardTags if true, the forward de novo sequencing tags are
* displayed
* @param showRewindTags if true, the reverse de novo sequencing tags are
* displayed
* @param forwardIonPercentHeight the percent height of the forward ion
* annotation [0-1]
* @param rewindIonPercentHeight the percent height of the reverse ion
* annotation [0-1]
* @param alphaLevels the individual alpha levels, if set override
* forwardIonAlphaLevel and rewindIonAlphaLevel
* @param excludeFixedModifications are fixed modifications to be annotated?
* @param mirrored if true the annotation is for the mirrored spectrum
* @param modificationParameters the modification parameters
* @param sequenceProvider a provider for the protein sequences
* @param modificationSequenceMatchingParameters the sequence matching
* preferences for modification to peptide mapping
*/
public void addAutomaticDeNovoSequencing(
Peptide currentPeptide,
IonMatch[] annotations,
int aForwardIon,
int aRewindIon,
int aDeNovoCharge,
boolean showForwardTags,
boolean showRewindTags,
double forwardIonPercentHeight,
double rewindIonPercentHeight,
ArrayList<float[]> alphaLevels,
boolean excludeFixedModifications,
boolean mirrored,
ModificationParameters modificationParameters,
SequenceProvider sequenceProvider,
SequenceMatchingParameters modificationSequenceMatchingParameters
) {
addAutomaticDeNovoSequencing(
currentPeptide,
annotations,
aForwardIon,
aRewindIon,
aDeNovoCharge,
showForwardTags,
showRewindTags,
forwardIonPercentHeight,
rewindIonPercentHeight,
0.2f,
0.2f,
alphaLevels,
excludeFixedModifications,
mirrored,
modificationParameters,
sequenceProvider,
modificationSequenceMatchingParameters
);
}
/**
* Add reference areas annotating the de novo tags.
*
* @param currentPeptide the current peptide sequence
* @param annotations the current fragment ion annotations
* @param aForwardIon the forward de novo sequencing fragment ion type,
* i.e., PeptideFragmentIon.A_ION, PeptideFragmentIon.B_ION or
* PeptideFragmentIon.C_ION
* @param aRewindIon the reverse de novo sequencing fragment ion type, i.e.,
* PeptideFragmentIon.X_ION, PeptideFragmentIon.Y_ION or
* PeptideFragmentIon.Z_ION
* @param aDeNovoCharge the de novo sequencing charge
* @param showForwardTags if true, the forward de novo sequencing tags are
* displayed
* @param showRewindTags if true, the reverse de novo sequencing tags are
* displayed
* @param forwardIonPercentHeight the percent height of the forward ion
* annotation [0-1]
* @param rewindIonPercentHeight the percent height of the reverse ion
* annotation [0-1]
* @param forwardIonAlphaLevel alpha level of the forward ions
* @param rewindIonAlphaLevel alpha level of the reverse ions
* @param alphaLevels the individual alpha levels, if set override
* forwardIonAlphaLevel and rewindIonAlphaLevel
* @param excludeFixedModifications are fixed modifications to be annotated?
* @param mirrored if true the annotation is for the mirrored spectrum
* @param modificationParameters the modification parameters
* @param sequenceProvider a provider for the protein sequences
* @param modificationSequenceMatchingParameters the sequence matching
* preferences for modification to peptide mapping
*/
public void addAutomaticDeNovoSequencing(
Peptide currentPeptide,
IonMatch[] annotations,
int aForwardIon,
int aRewindIon,
int aDeNovoCharge,
boolean showForwardTags,
boolean showRewindTags,
double forwardIonPercentHeight,
double rewindIonPercentHeight,
float forwardIonAlphaLevel,
float rewindIonAlphaLevel,
ArrayList<float[]> alphaLevels,
boolean excludeFixedModifications,
boolean mirrored,
ModificationParameters modificationParameters,
SequenceProvider sequenceProvider,
SequenceMatchingParameters modificationSequenceMatchingParameters
) {
int forwardIon = aForwardIon;
int reverseIon = aRewindIon;
int deNovoCharge = aDeNovoCharge;
IonMatch[] forwardIons = new IonMatch[currentPeptide.getSequence().length()];
IonMatch[] reverseIons = new IonMatch[currentPeptide.getSequence().length()];
// iterate the annotations and find the de novo tags
Arrays.stream(annotations)
.filter(
tempMatch -> tempMatch.ion.getType() == Ion.IonType.PEPTIDE_FRAGMENT_ION
&& !tempMatch.ion.hasNeutralLosses()
&& tempMatch.charge == deNovoCharge
)
.forEach(
tempMatch -> {
PeptideFragmentIon fragmentIon = (PeptideFragmentIon) tempMatch.ion;
if (fragmentIon.getSubType() == forwardIon) {
forwardIons[fragmentIon.getNumber() - 1] = tempMatch;
} else if (fragmentIon.getSubType() == reverseIon) {
reverseIons[fragmentIon.getNumber() - 1] = tempMatch;
}
}
);
HashSet<Integer> modifiedIndexes = Arrays.stream(currentPeptide.getVariableModifications())
.map(
modificationMatch -> modificationMatch.getSite()
)
.map(
i -> ModificationUtils.getSite(i, currentPeptide.getSequence().length())
)
.collect(
Collectors.toCollection(HashSet::new)
);
if (!excludeFixedModifications) {
String[] fixedModifications = currentPeptide.getFixedModifications(
modificationParameters,
sequenceProvider,
modificationSequenceMatchingParameters
);
for (int i = 0; i < fixedModifications.length; i++) {
if (fixedModifications[i] != null) {
modifiedIndexes.add(ModificationUtils.getSite(i, currentPeptide.getSequence().length()));
}
}
}
// add reverse ion de novo tags (x, y or z)
if (showRewindTags) {
Color annotationColor = SpectrumPanel.determineFragmentIonColor(Ion.getGenericIon(Ion.IonType.PEPTIDE_FRAGMENT_ION, reverseIon), false);
for (int i = 1; i < reverseIons.length; i++) {
if (reverseIons[i] != null && reverseIons[i - 1] != null) {
String mod = "";
if (modifiedIndexes.contains(currentPeptide.getSequence().length() - i)) {
mod = "*";
}
float currentAlphaLevel = rewindIonAlphaLevel;
if (alphaLevels != null) {
currentAlphaLevel = alphaLevels.get(0)[currentPeptide.getSequence().length() - i];
}
addReferenceAreaXAxis(
new ReferenceArea(
"r" + i + "_" + mirrored,
currentPeptide.getSequence()
.substring(currentPeptide.getSequence().length() - i - 1, currentPeptide.getSequence().length() - i) + mod,
reverseIons[i - 1].peakMz,
reverseIons[i].peakMz,
annotationColor,
currentAlphaLevel,
false,
true,
annotationColor,
true,
Color.lightGray,
0.2f,
rewindIonPercentHeight,
!mirrored
)
);
}
}
}
// add forward ion de novo tags (a, b or c)
if (showForwardTags) {
Color annotationColor = SpectrumPanel.determineFragmentIonColor(Ion.getGenericIon(Ion.IonType.PEPTIDE_FRAGMENT_ION, forwardIon), false);
for (int i = 1; i < forwardIons.length; i++) {
if (forwardIons[i] != null && forwardIons[i - 1] != null) {
String mod = "";
if (modifiedIndexes.contains(i + 1)) {
mod = "*";
}
float currentAlphaLevel = forwardIonAlphaLevel;
if (alphaLevels != null) {
currentAlphaLevel = alphaLevels.get(0)[i];
}
addReferenceAreaXAxis(
new ReferenceArea(
"f" + i + "_" + mirrored,
currentPeptide.getSequence().substring(i, i + 1) + mod,
forwardIons[i - 1].peakMz,
forwardIons[i].peakMz,
annotationColor,
currentAlphaLevel,
false,
true,
annotationColor,
true,
Color.lightGray,
0.2f,
forwardIonPercentHeight,
!mirrored
)
);
}
}
}
}
/**
* Add reference areas annotating the de novo tags.
*
* @param tag the current tag sequence
* @param annotations the current fragment ion annotations
* @param aForwardIon the forward de novo sequencing fragment ion type,
* i.e., PeptideFragmentIon.A_ION, PeptideFragmentIon.B_ION or
* PeptideFragmentIon.C_ION
* @param aRewindIon the reverse de novo sequencing fragment ion type, i.e.,
* PeptideFragmentIon.X_ION, PeptideFragmentIon.Y_ION or
* PeptideFragmentIon.Z_ION
* @param aDeNovoCharge the de novo sequencing charge
* @param showForwardTags if true, the forward de novo sequencing tags are
* displayed
* @param showReverseTags if true, the reverse de novo sequencing tags are
* displayed
* @param forwardIonPercentHeight the percent height of the forward ion
* annotation [0-1]
* @param rewindIonPercentHeight the percent height of the reverse ion
* annotation [0-1]
* @param forwardIonAlphaLevel alpha level of the forward ions
* @param rewindIonAlphaLevel alpha level of the reverse ions
* @param alphaLevels the individual alpha levels, if set override
* forwardIonAlphaLevel and rewindIonAlphaLevel
* @param excludeFixedModifications are fixed modifications to be annotated?
* @param mirrored if true the annotation is for the mirrored spectrum
*/
public void addAutomaticDeNovoSequencing(
Tag tag,
IonMatch[] annotations,
int aForwardIon,
int aRewindIon,
int aDeNovoCharge,
boolean showForwardTags,
boolean showReverseTags,
double forwardIonPercentHeight,
double rewindIonPercentHeight,
float forwardIonAlphaLevel,
float rewindIonAlphaLevel,
ArrayList<float[]> alphaLevels,
boolean excludeFixedModifications,
boolean mirrored
) {
int forwardIon = aForwardIon;
int rewindIon = aRewindIon;
int deNovoCharge = aDeNovoCharge;
// @TODO: include multiple ions
HashMap<Integer, IonMatch> forwardMap = new HashMap<>(2);
HashMap<Integer, IonMatch> rewindMap = new HashMap<>(2);
for (IonMatch ionMatch : annotations) {
if (ionMatch.ion.getType() == Ion.IonType.TAG_FRAGMENT_ION
&& !ionMatch.ion.hasNeutralLosses()
&& ionMatch.charge == deNovoCharge) {
TagFragmentIon fragmentIon = (TagFragmentIon) ionMatch.ion;
if (fragmentIon.getSubType() == forwardIon) {
forwardMap.put(fragmentIon.getSubNumber(), ionMatch);
} else if (fragmentIon.getSubType() == rewindIon) {
rewindMap.put(fragmentIon.getSubNumber(), ionMatch);
}
}
}
// add forward annotation
for (int tagCount = 0; tagCount < tag.getContent().size(); tagCount++) {
TagComponent tagComponent = tag.getContent().get(tagCount);
if (tagComponent instanceof AminoAcidSequence) {
AminoAcidSequence aminoAcidSequence = (AminoAcidSequence) tagComponent;
// add forward ion de novo tags (a, b or c)
if (showForwardTags) {
Color annotationColor = SpectrumPanel.determineFragmentIonColor(Ion.getGenericIon(Ion.IonType.PEPTIDE_FRAGMENT_ION, forwardIon), false);
String[] variableModifications = aminoAcidSequence.getIndexedVariableModifications();
for (int i = 0; i < aminoAcidSequence.length(); i++) {
IonMatch ionMatch1 = forwardMap.get(i);
IonMatch ionMatch2 = forwardMap.get(i + 1);
if (ionMatch1 != null && ionMatch2 != null) {
String mod = variableModifications[i + 1] != null ? "*" : "";
if (i == 0 && variableModifications[0] != null) {
mod = "*";
} else if (i == aminoAcidSequence.length() - 1 && variableModifications[aminoAcidSequence.length() + 1] != null) {
mod = "*";
}
float currentAlphaLevel = forwardIonAlphaLevel;
if (alphaLevels != null) {
currentAlphaLevel = alphaLevels.get(tagCount)[i];
}
addReferenceAreaXAxis(
new ReferenceArea(
"f" + i + "_" + mirrored,
aminoAcidSequence.charAt(i) + mod,
ionMatch1.peakMz,
ionMatch2.peakMz,
annotationColor,
currentAlphaLevel,
false,
true,
annotationColor,
true,
Color.lightGray,
0.2f,
forwardIonPercentHeight,
!mirrored
)
);
}
}
}
} else if (tagComponent instanceof MassGap) {
// nothing to annotate here
} else {
throw new UnsupportedOperationException("Spectrum annotation not implemented for tag component " + tagComponent.getClass() + ".");
}
}
ArrayList<TagComponent> reversedTag = new ArrayList<>(tag.getContent());
Collections.reverse(reversedTag);
// add reverse annotation
for (int tagCount = 0; tagCount < reversedTag.size(); tagCount++) {
TagComponent tagComponent = reversedTag.get(tagCount);
if (tagComponent instanceof AminoAcidSequence) {
AminoAcidSequence aminoAcidSequence = (AminoAcidSequence) tagComponent;
// add reverse ion de novo tags (x, y or z)
if (showReverseTags) {
Color annotationColor = SpectrumPanel.determineFragmentIonColor(Ion.getGenericIon(Ion.IonType.PEPTIDE_FRAGMENT_ION, rewindIon), false);
String[] variableModifications = aminoAcidSequence.getIndexedVariableModifications();
for (int i = 0; i < aminoAcidSequence.length(); i++) {
IonMatch ionMatch1 = rewindMap.get(i);
IonMatch ionMatch2 = rewindMap.get(i + 1);
if (ionMatch1 != null && ionMatch2 != null) {
int sequenceIndex = aminoAcidSequence.length() - i - 1;
String mod = variableModifications[sequenceIndex + 1] != null ? "*" : "";
if (sequenceIndex == 0 && variableModifications[0] != null) {
mod = "*";
} else if (sequenceIndex == aminoAcidSequence.length() - 1 && variableModifications[aminoAcidSequence.length() + 1] != null) {
mod = "*";
}
float currentAlphaLevel = rewindIonAlphaLevel;
if (alphaLevels != null) {
currentAlphaLevel = alphaLevels.get(tagCount)[sequenceIndex];
}
addReferenceAreaXAxis(
new ReferenceArea(
"r" + sequenceIndex + "_" + mirrored,
aminoAcidSequence.charAt(sequenceIndex) + mod,
ionMatch1.peakMz,
ionMatch2.peakMz,
annotationColor,
currentAlphaLevel,
false,
true,
annotationColor,
true,
Color.lightGray,
0.2f,
rewindIonPercentHeight,
!mirrored
)
);
}
}
}
} else if (tagComponent instanceof MassGap) {
// nothing to annotate here
} else {
throw new UnsupportedOperationException("Spectrum annotation not implemented for tag component " + tagComponent.getClass() + ".");
}
}
}
}
|
apache-2.0
|
micorochio/SVN
|
workspace/MobilePlatform/src/com/ustcinfo/mobile/platform/view/AppAssetIntroductionActivity.java
|
13857
|
package com.ustcinfo.mobile.platform.view;
import java.util.List;
import android.app.Activity;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.graphics.drawable.Drawable;
import android.os.AsyncTask;
import android.os.Bundle;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.AdapterView;
import android.widget.AdapterView.OnItemSelectedListener;
import android.widget.Button;
import android.widget.Gallery;
import android.widget.ImageButton;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.ProgressBar;
import android.widget.RelativeLayout;
import android.widget.TextView;
import android.widget.Toast;
import com.ustcinfo.mobile.AppConstants;
import com.ustcinfo.mobile.ApplicationEx;
import com.ustcinfo.mobile.download.DownloadService;
import com.ustcinfo.R;
import com.ustcinfo.mobile.platform.adapter.GuideGalleryAdapter;
import com.ustcinfo.mobile.platform.data.AppInfo;
import com.ustcinfo.mobile.util.AppMgrUtil;
import com.ustcinfo.mobile.util.AsyncImageLoader;
/**
* 手机应用简介信息
*/
public class AppAssetIntroductionActivity extends Activity {
private AppInfo appInfo = null;
private LinearLayout loading;
private RelativeLayout gallery;
private LinearLayout download_status_container;
private View buttons_spacer_left;
private Button launchbutton;// 打开
private Button downloadbutton;//下载
private Button updatebutton;// 更新
private Button installbutton;//安装
private Button dummybutton;
private Button uninstallbutton;// 卸载
private Button installingbutton;// 安装中
private Button canceldownloadbutton;// 暂停下载
private View buttons_spacer_right;
ProgressBar download_progress_bar;// 进度条
TextView download_status;// 进度条状态
private String packageName;
private Intent intent;
private MyReceiver receiver;
private TextView text;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.asset_info_gallery);
intent = this.getIntent();
findView();
packageName = intent.getStringExtra("packageName");
//packageName = "com.ustcinfo.mobile.platform.view";
DownloadProcess();
new AppListAsyncTask()
.execute(this.getIntent().getStringExtra("appId"));
}
private void findView() {
gallery = (RelativeLayout) this.findViewById(R.id.asset_info_gallery_ds);
loading = (LinearLayout) this
.findViewById(R.id.fullscreen_loading_indicator);
// 获取底部页面资源
download_status_container = (LinearLayout) this
.findViewById(R.id.download_status_container);
buttons_spacer_left = this.findViewById(R.id.buttons_spacer_left);
launchbutton = (Button) this.findViewById(R.id.launchbutton);// 打开
downloadbutton = (Button) this.findViewById(R.id.downloadbutton);// 下载
updatebutton = (Button) this.findViewById(R.id.updatebutton);// 更新
installbutton = (Button) this.findViewById(R.id.installbutton);// 安装
dummybutton = (Button) this.findViewById(R.id.dummybutton);
uninstallbutton = (Button) this.findViewById(R.id.uninstallbutton);// 卸载
installingbutton = (Button) this.findViewById(R.id.installingbutton);// 安装中
canceldownloadbutton = (Button) this
.findViewById(R.id.canceldownloadbutton);// 暂停下载
buttons_spacer_right = this.findViewById(R.id.buttons_spacer_right);
download_progress_bar = (ProgressBar) this
.findViewById(R.id.download_progress_bar);// 进度条
download_status = (TextView) this.findViewById(R.id.download_status);// 进度条状态
}
/**
* @Title: showGalleryInfo
* @Description: 显示简介信息
* @return void
* @throws
*/
private void showGalleryInfo() {
loading.setVisibility(View.GONE);
gallery.setVisibility(View.VISIBLE);
if (appInfo == null) {
TextView nameView = (TextView) this
.findViewById(R.id.asset_info_title);
nameView.setText(R.string.prompt_exception_net);
Toast.makeText(AppAssetIntroductionActivity.this,
R.string.prompt_exception_net, Toast.LENGTH_LONG).show();
return;
}
showDetailInfo();
showScreenShort();
showDescription();
showBottomBtns();
}
/**
* @Title: showDetailInfo
* @Description: 显示详细信息
* @return void
* @throws
*/
private void showDetailInfo() {
ImageView logoView = (ImageView) this.findViewById(R.id.thumbnail);
TextView versionView = (TextView) this.findViewById(R.id.version);
TextView nameView = (TextView) this.findViewById(R.id.asset_info_title);
TextView sizeView = (TextView) this.findViewById(R.id.size);
TextView time = (TextView) this.findViewById(R.id.last_update);
// 异步的加载图片信息
Drawable cachedImage = ApplicationEx.GetAsyncImageLoader()
.loadDrawable(AppConstants.getImageUrl() +appInfo.getAppIconUrl(),// 换成事件的URL
logoView, new AsyncImageLoader.ImageCallback() {
@Override
public void imageLoaded(Drawable imageDrawable,
ImageView imageView, String imageUrl) {
imageView.setImageDrawable(imageDrawable);
}
});
if (cachedImage == null) {
logoView.setImageResource(R.drawable.ic_launcher);
} else {
logoView.setImageDrawable(cachedImage);
}
versionView.setText("版本:" + appInfo.getVersion());
sizeView.setText("大小:" + appInfo.getAppSize());
time.setText("时间:" + appInfo.getUpdateTime());
nameView.setText(appInfo.getAppName());
// 获得描述信息
}
/**
* @Title: showScreenShort
* @Description: 显示截图信息
* @return void
* @throws
*/
private void showScreenShort() {
final List<String> imgURL = appInfo.getImgURL();
final TextView image_indicator = (TextView) this
.findViewById(R.id.image_indicator);
if (imgURL.size() != 0) {
Gallery screenGallery = (Gallery) this
.findViewById(R.id.screen_gallery);
screenGallery
.setAdapter(new GuideGalleryAdapter(this, imgURL, null));
screenGallery
.setOnItemSelectedListener(new OnItemSelectedListener() {
@Override
public void onItemSelected(AdapterView<?> arg0,
View arg1, int position, long arg3) {
image_indicator.setText(((position + 1)
% imgURL.size() == 0 ? imgURL.size()
: (position + 1) % imgURL.size())
+ "/"
+ imgURL.size());
}
@Override
public void onNothingSelected(AdapterView<?> arg0) {
}
});
} else { // 当没有截图的时候
image_indicator.setText(R.string.no_screen_shorts);
}
}
/**
* @Title: showDescription
* @Description: 显示描述信息
* @return void
* @throws
*/
private void showDescription() {
String description = appInfo.getDescription();
final TextView descriptionView = (TextView) this
.findViewById(R.id.description);
descriptionView.setText(description);
final ImageButton more = (ImageButton) this
.findViewById(R.id.detail_more);
more.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
descriptionView.setMaxLines(10000);
more.setVisibility(View.GONE);
}
});
}
/**
* @Title: showBottom
* @Description: 显示底部按钮
* @return void
* @throws
*/
private void showBottomBtns() {
LinearLayout layout = (LinearLayout) this
.findViewById(R.id.info_buttons_bar);
layout.setVisibility(View.VISIBLE);
// TODO 根据应用的包名判断应用是否安装
Context context = this.getApplicationContext();
Intent intent = this.getIntent();
final String pakage = intent.getStringExtra("packageName");
int newVersion = Integer.valueOf(intent.getStringExtra("version"));
final String downLoadUrl = intent.getStringExtra("downLoadUrl");
final String appName = intent.getStringExtra("appName");
if (!AppMgrUtil.isInstall(context, pakage)) {// 判断是否安装了,没有安装进入
buttons_spacer_left.setVisibility(View.VISIBLE);
buttons_spacer_right.setVisibility(View.VISIBLE);
if (AppMgrUtil.isHaveDownloadedAPK(context, appName)) {//如果没有安装,判断是否已经下载好了应用包
// todo 需要判断安装包是否完整
installbutton.setVisibility(View.VISIBLE);
} else {
downloadbutton.setVisibility(View.VISIBLE);
}
} else if (AppMgrUtil.isNeedUpdate(context, pakage, newVersion)) {// 判断是否需要更新
buttons_spacer_left.setVisibility(View.VISIBLE);
updatebutton.setVisibility(View.VISIBLE);
uninstallbutton.setVisibility(View.VISIBLE);
dummybutton.setVisibility(View.VISIBLE);
buttons_spacer_right.setVisibility(View.VISIBLE);
} else {//已经安装了
buttons_spacer_left.setVisibility(View.VISIBLE);
launchbutton.setVisibility(View.VISIBLE);
uninstallbutton.setVisibility(View.VISIBLE);
buttons_spacer_right.setVisibility(View.VISIBLE);
dummybutton.setVisibility(View.VISIBLE);
}
downloadbutton.setOnClickListener(new OnClickListener() {// 下载
@Override
public void onClick(View v) {
// 展示下载进度条
download_status_container.setVisibility(View.VISIBLE);
downloadbutton.setVisibility(View.GONE);
canceldownloadbutton.setVisibility(View.VISIBLE);
Intent intent = AppAssetIntroductionActivity.this
.getIntent();
synchronized (intent) {
intent.setClass(AppAssetIntroductionActivity.this,
DownloadService.class);
// 这里不再使用bindService,而使用startService
Bundle bundle = new Bundle();
bundle.putString("appName",
intent.getStringExtra("appName"));
bundle.putString("downLoadUrl",
intent.getStringExtra("downLoadUrl"));
bundle.putString("appId",
intent.getStringExtra("appId"));
bundle.putString("packageName",
intent.getStringExtra("packageName"));
bundle.putString("appNameCn",
intent.getStringExtra("appNameCn"));
intent.putExtras(bundle);
startService(intent);
}
}
});
canceldownloadbutton.setOnClickListener(new OnClickListener() {// 取消下载
@Override
public void onClick(View v) {
}
});
updatebutton.setOnClickListener(new OnClickListener() {// 更新
@Override
public void onClick(View v) {
AppMgrUtil.updateSys(AppAssetIntroductionActivity.this
.getApplicationContext(),
DownloadService.DOWNLOAD_SD_PATH + downLoadUrl);
finish();
}
});
launchbutton.setOnClickListener(new OnClickListener() {// 打开
@Override
public void onClick(View v) {
AppMgrUtil.openSys(AppAssetIntroductionActivity.this
.getApplicationContext(), pakage);
finish();
}
});
installbutton.setOnClickListener(new OnClickListener() {// 安装
@Override
public void onClick(View v) {
AppMgrUtil.updateSys(AppAssetIntroductionActivity.this
.getApplicationContext(),
DownloadService.DOWNLOAD_SD_PATH + downLoadUrl);
finish();
}
});
uninstallbutton.setOnClickListener(new OnClickListener() {// 卸载
@Override
public void onClick(View v) {
AppMgrUtil.unInstallApp(
AppAssetIntroductionActivity.this
.getApplicationContext(), pakage);
finish();
}
});
}
private void DownloadProcess() {
receiver = new MyReceiver();
IntentFilter filter = new IntentFilter();
filter.addAction(AppConstants.ACTION_DOWNLOADING);
// 注册
registerReceiver(receiver, filter);
text = (TextView) this.findViewById(R.id.download_status);
}
@Override
protected void onDestroy() {
super.onDestroy();
// 不要忘了这一步
unregisterReceiver(receiver);
}
public void cancel(View view) {
// DownloadService.cancelledURL = this.getIntent().getStringExtra(
// "appName");
// download_status_container.setVisibility(View.GONE);
// downloadbutton.setVisibility(View.VISIBLE);
// canceldownloadbutton.setVisibility(View.GONE);
}
/**
* 广播接收器
*
* @author MIL
*
*/
private class MyReceiver extends BroadcastReceiver {
@Override
public void onReceive(Context context, Intent intent) {
Bundle bundle = intent.getExtras();
if (packageName.equals(bundle.getString("packageName"))) {
download_status_container.setVisibility(View.VISIBLE);
buttons_spacer_left.setVisibility(View.VISIBLE);
canceldownloadbutton.setVisibility(View.VISIBLE);
buttons_spacer_right.setVisibility(View.VISIBLE);
downloadbutton.setVisibility(View.GONE);
int progress = bundle.getInt("progress");
if (progress >= 100) {
download_progress_bar.setProgress(100);
text.setText("下载完成!");
canceldownloadbutton.setVisibility(View.GONE);
launchbutton.setVisibility(View.VISIBLE);
uninstallbutton.setVisibility(View.VISIBLE);
download_status_container.setVisibility(View.GONE);
dummybutton.setVisibility(View.VISIBLE);
} else if (progress == -1) {
download_status_container.setVisibility(View.GONE);
canceldownloadbutton.setVisibility(View.GONE);
buttons_spacer_left.setVisibility(View.VISIBLE);
downloadbutton.setVisibility(View.VISIBLE);
buttons_spacer_right.setVisibility(View.VISIBLE);
} else {
download_progress_bar.setProgress(progress);
text.setText("下载中:" + progress + "%");
}
}
}
}
class AppListAsyncTask extends AsyncTask<String, Integer, AppInfo> {
@Override
protected AppInfo doInBackground(String... params) {
try {
loading.setVisibility(View.VISIBLE);
gallery.setVisibility(View.GONE);
appInfo = ApplicationEx.data.getAppInfo(params[0]);
} catch (Exception e) {
e.printStackTrace();
}
return appInfo;
}
@Override
protected void onPostExecute(AppInfo result) {
showGalleryInfo();
}
}
}
|
apache-2.0
|
DavidWhitlock/PortlandStateJava
|
grader/src/main/java/edu/pdx/cs410J/grader/poa/GradeBookFileManager.java
|
1046
|
package edu.pdx.cs410J.grader.poa;
import com.google.common.eventbus.EventBus;
import com.google.common.eventbus.Subscribe;
import com.google.inject.Inject;
import edu.pdx.cs410J.ParserException;
import edu.pdx.cs410J.grader.GradeBook;
import edu.pdx.cs410J.grader.XmlDumper;
import edu.pdx.cs410J.grader.XmlGradeBookParser;
import java.io.File;
import java.io.IOException;
public class GradeBookFileManager {
private final EventBus bus;
private File file;
@Inject
public GradeBookFileManager(EventBus bus) {
this.bus = bus;
this.bus.register(this);
}
@Subscribe
public void loadGradeBookFromFile(LoadGradeBook event) throws IOException, ParserException {
file = event.getFile();
XmlGradeBookParser parser = new XmlGradeBookParser(file);
GradeBook book = parser.parse();
this.bus.post(new GradeBookLoaded(book));
}
@Subscribe
public void writeGradeBookToFile(SaveGradeBook event) throws IOException {
XmlDumper dumper = new XmlDumper(this.file);
dumper.dump(event.getGradeBook());
}
}
|
apache-2.0
|
dan-zx/zekke-api
|
src/main/java/com/github/danzx/zekke/message/impl/package-info.java
|
690
|
/*
* Copyright 2017 Daniel Pedraza-Arcega
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/** Message source implementations. */
package com.github.danzx.zekke.message.impl;
|
apache-2.0
|
tanglei528/glance
|
glance/common/location_strategy/__init__.py
|
4010
|
# Copyright 2014 IBM Corp.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
from oslo.config import cfg
import stevedore
import glance.openstack.common.log as logging
location_strategy_opts = [
cfg.StrOpt('location_strategy', default='location_order',
help=_("This value sets what strategy will be used to "
"determine the image location order. Currently "
"two strategies are packaged with Glance "
"'location_order' and 'store_type'."))
]
CONF = cfg.CONF
CONF.register_opts(location_strategy_opts)
LOG = logging.getLogger(__name__)
def _load_strategies():
"""Load all strategy modules."""
modules = {}
namespace = "glance.common.image_location_strategy.modules"
ex = stevedore.extension.ExtensionManager(namespace)
for module_name in ex.names():
try:
mgr = stevedore.driver.DriverManager(
namespace=namespace,
name=module_name,
invoke_on_load=False)
# Obtain module name
strategy_name = str(mgr.driver.get_strategy_name())
if strategy_name in modules:
msg = (_('%(strategy)s is registered as a module twice. '
'%(module)s is not being used.') %
{'strategy': strategy_name, 'module': module_name})
LOG.warn(msg)
else:
# Initialize strategy module
mgr.driver.init()
modules[strategy_name] = mgr.driver
except Exception as e:
LOG.error(_("Failed to load location strategy module "
"%(module)s: %(e)s") % {'module': module_name, 'e': e})
return modules
_available_strategies = _load_strategies()
# TODO(kadachi): Not used but don't remove this until glance.store
# development/migration stage.
def verify_location_strategy(conf=None, strategies=_available_strategies):
"""Validate user configured 'location_strategy' option value."""
if not conf:
conf = CONF.location_strategy
if conf not in strategies:
msg = (_('Invalid location_strategy option: %(name)s. '
'The valid strategy option(s) is(are): %(strategies)s') %
{'name': conf, 'strategies': ", ".join(strategies.keys())})
LOG.error(msg)
raise RuntimeError(msg)
def get_ordered_locations(locations, **kwargs):
"""
Order image location list by configured strategy.
:param locations: The original image location list.
:param kwargs: Strategy-specific arguments for under layer strategy module.
:return: The image location list with strategy-specific order.
"""
if not locations:
return []
strategy_module = _available_strategies[CONF.location_strategy]
return strategy_module.get_ordered_locations(copy.deepcopy(locations),
**kwargs)
def choose_best_location(locations, **kwargs):
"""
Choose best location from image location list by configured strategy.
:param locations: The original image location list.
:param kwargs: Strategy-specific arguments for under layer strategy module.
:return: The best location from image location list.
"""
locations = get_ordered_locations(locations, **kwargs)
if locations:
return locations[0]
else:
return None
|
apache-2.0
|
artspb/jdk2trove
|
jdk2trove-plugin/testSrc/me/artspb/idea/jdk2trove/hashset/TLinkedHashSetQuickFixTest.java
|
1110
|
package me.artspb.idea.jdk2trove.hashset;
import com.intellij.codeInspection.BaseJavaLocalInspectionTool;
import me.artspb.idea.jdk2trove.TroveTestCase;
/**
* @author Artem Khvastunov
*/
public class TLinkedHashSetQuickFixTest extends TroveTestCase {
public void testTLinkedHashSet() throws Exception {
doAllTests("quickFix/hashset/tLinkedHashSet");
}
@Override
public void setUp() throws Exception {
super.setUp();
}
@Override
protected BaseJavaLocalInspectionTool[] getInspections() {
return new BaseJavaLocalInspectionTool[] {new THashSetCreationInspection()};
}
@Override
protected String[] getUserClasses() {
return new String[] {
"package gnu.trove.set;" +
"public interface TIntSet {}",
"package gnu.trove.set.hash;" +
"import gnu.trove.set.TIntSet;" +
"public class TIntHashSet implements TIntSet {}",
"package gnu.trove.set.hash;" +
"public class TLinkedHashSet<E> extends THashSet<E> {}"
};
}
}
|
apache-2.0
|
amymalia/solar-power-app
|
app/imports/startup/client/router.js
|
822
|
import { FlowRouter } from 'meteor/kadira:flow-router';
import { BlazeLayout } from 'meteor/kadira:blaze-layout';
FlowRouter.route('/', {
name: 'Home_Page',
action() {
BlazeLayout.render('App_Body', { main: 'Home_Page' });
},
});
FlowRouter.route('/add_contact', {
name: 'Add_Contact_Page',
action() {
BlazeLayout.render('App_Body', { main: 'Add_Contact_Page' });
},
});
FlowRouter.route('/edit_contact/:_id', {
name: 'Edit_Contact_Page',
action() {
BlazeLayout.render('App_Body', { main: 'Edit_Contact_Page' });
},
});
FlowRouter.route('/copy_contact/:_id', {
name: 'Copy_Contact_Page',
action() {
BlazeLayout.render('App_Body', { main: 'Copy_Contact_Page' });
},
});
FlowRouter.notFound = {
action() {
BlazeLayout.render('App_Body', { main: 'App_Not_Found' });
},
};
|
apache-2.0
|
tfmorris/Names
|
service/src/main/java/org/folg/names/service/ScoreService.java
|
2047
|
/*
* Copyright 2011 Foundation for On-Line Genealogy, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.folg.names.service;
import org.folg.names.score.Scorer;
import org.folg.names.search.Normalizer;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import java.util.List;
/**
* Return the score of two name pieces
*/
@Path("/score")
public class ScoreService {
@GET
@Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON})
@Path("{type}/{name1}/{name2}")
public Score get(@PathParam("type") String type, @PathParam("name1") String name1, @PathParam("name2") String name2) {
double bestScore = Double.NEGATIVE_INFINITY;
if (type.equals("surname") || type.equals("givenname")) {
boolean isSurname = type.equals("surname");
Normalizer normalizer = Normalizer.getInstance();
Scorer scorer = (isSurname ? Scorer.getSurnameInstance() : Scorer.getGivennameInstance());
List<String> namePieces1 = normalizer.normalize(name1, isSurname);
List<String> namePieces2 = normalizer.normalize(name2, isSurname);
for (String namePiece1 : namePieces1) {
for (String namePiece2 : namePieces2) {
double score = scorer.scoreNamePair(namePiece1, namePiece2);
if (score > bestScore) {
bestScore = score;
}
}
}
}
return new Score(bestScore);
}
}
|
apache-2.0
|
baozoumanhua/elk-rtf
|
logstash/vendor/bundle/jruby/1.9/gems/manticore-0.4.1-java/lib/manticore/client.rb
|
29478
|
require 'thread'
require 'base64'
module Manticore
# General Timeout exception thrown for various Manticore timeouts
class Timeout < ManticoreException; end
# @!macro [new] http_method_shared
# @param url [String] URL to request
# @param options [Hash]
# @option options [Hash] params Hash of options to pass as request parameters
# @option options [Hash] headers Hash of options to pass as additional request headers
# @option options [String] proxy Proxy host in form: http://proxy.org:1234
# @option options [Hash] proxy Proxy host in form: {host: 'proxy.org'[, port: 80[, scheme: 'http']]}
# @option options [URI] proxy Proxy host as a URI object
# @option options [Integer] connect_timeout Request-specific connect timeout
# @option options [Integer] socket_timeout Request-specific socket timeout
# @option options [Integer] request_timeout Request-specific request timeout
# @option options [Integer] max_redirects Request-specific maximum redirect limit
# @option options [Boolean] follow_redirects Specify whether this request should follow redirects
#
# @!macro [new] http_request_exceptions
# @raise [Manticore::Timeout] on socket, connection, or response timeout
# @raise [Manticore::SocketException] on internal socket exception (ie, unexpected socket closure)
# @raise [Manticore::ClientProtocolException] on protocol errors such as an SSL handshake failure or connection exception
# @raise [Manticore::ResolutionFailure] on DNS resolution failure
# @return [Response]
#
# @!macro [new] http_method_shared_async
# @example Simple usage
# client.$0("http://example.com/some/resource", params: {foo: "bar"}, headers: {"X-Custom-Header" => "whee"}).
# on_success {|response|
# # Do something with response.body, response.code, etc
# }.on_failure {|exception|
# # Handle request exception
# }
# client.execute!
#
# @!macro [new] http_method_shared_sync
# @example Simple usage
# body = client.$0("http://example.com/some/resource", params: {foo: "bar"}, headers: {"X-Custom-Header" => "whee"}).body
# @example Passing a block as the success handler:
# body = client.$0("http://example.com/some/resource", params: {foo: "bar"}, headers: {"X-Custom-Header" => "whee"}) {|response| response.body }
# @example Explicit success handler:
# body = client.$0("http://example.com/some/resource", params: {foo: "bar"}, headers: {"X-Custom-Header" => "whee"}).
# on_success {|response| response.body }
# @macro http_method_shared
# @macro http_request_exceptions
#
# @!macro [new] http_method_shared_async_with_body
# @macro http_method_shared_async
# @option options [String] body Body to pass with the request
#
# @!macro [new] http_method_shared_sync_with_body
# @macro http_method_shared_sync
# @option options [String] body Body to pass with the request
# Core Manticore client, with a backing {http://hc.apache.org/httpcomponents-client-ga/httpclient/apidocs/org/apache/http/impl/conn/PoolingHttpClientConnectionManager.html PoolingHttpClientConnectionManager}
class Client
include_package "org.apache.http.client.methods"
include_package "org.apache.http.client.entity"
include_package "org.apache.http.client.config"
include_package "org.apache.http.config"
include_package "org.apache.http.conn.socket"
include_package "org.apache.http.impl"
include_package "org.apache.http.impl.client"
include_package "org.apache.http.impl.conn"
include_package "org.apache.http.entity"
include_package "org.apache.http.message"
include_package "org.apache.http.params"
include_package "org.apache.http.protocol"
include_package "org.apache.http.auth"
include_package "java.util.concurrent"
include_package "org.apache.http.client.protocol"
include_package 'org.apache.http.conn.ssl'
include_package "java.security.cert"
include_package "java.security.spec"
include_package "java.security"
java_import "org.apache.http.HttpHost"
java_import "javax.net.ssl.SSLContext"
java_import "org.manticore.HttpGetWithEntity"
java_import "org.apache.http.auth.UsernamePasswordCredentials"
include ProxiesInterface
# The default maximum pool size for requests
DEFAULT_MAX_POOL_SIZE = 50
DEFAULT_REQUEST_TIMEOUT = 60
DEFAULT_SOCKET_TIMEOUT = 10
DEFAULT_CONNECT_TIMEOUT = 10
DEFAULT_MAX_REDIRECTS = 5
DEFAULT_EXPECT_CONTINUE = false
DEFAULT_STALE_CHECK = false
# Create a new HTTP client with a backing request pool. if you pass a block to the initializer, the underlying
# {http://hc.apache.org/httpcomponents-client-ga/httpclient/apidocs/org/apache/http/impl/client/HttpClientBuilder.html HttpClientBuilder}
# and {http://hc.apache.org/httpcomponents-client-ga/httpclient/apidocs/org/apache/http/client/config/RequestConfig.Builder.html RequestConfig.Builder}
# will be yielded so that you can operate on them directly.
#
# @see http://hc.apache.org/httpcomponents-client-ga/httpclient/apidocs/org/apache/http/impl/client/HttpClientBuilder.html HttpClientBuilder
# @see http://hc.apache.org/httpcomponents-client-ga/httpclient/apidocs/org/apache/http/client/config/RequestConfig.Builder.html RequestConfig.Builder
# @example Simple instantiation and usage
# client = Manticore::Client.new
# client.get("http://www.google.com")
#
# @example Instantiation with a block
# client = Manticore::Client.new(socket_timeout: 5) do |http_client_builder, request_builder|
# http_client_builder.disable_redirect_handling
# end
#
# @param options [Hash] Client pool options
# @option options [String] user_agent The user agent used in requests.
# @option options [Integer] pool_max (50) The maximum number of active connections in the pool
# @option options [integer] pool_max_per_route (pool_max) Sets the maximum number of active connections for a given target endpoint
# @option options [boolean] cookies (true) enable or disable automatic cookie management between requests
# @option options [boolean] compression (true) enable or disable transparent gzip/deflate support
# @option options [integer] request_timeout (60) Sets the timeout for requests. Raises {Manticore::Timeout} on failure.
# @option options [integer] connect_timeout (10) Sets the timeout for connections. Raises Manticore::Timeout on failure.
# @option options [integer] socket_timeout (10) Sets SO_TIMEOUT for open connections. A value of 0 is an infinite timeout. Raises Manticore::Timeout on failure.
# @option options [boolean] tcp_no_delay (true) Enable or disable Nagle's algorithm
# @option options [integer] request_timeout (60) Sets the timeout for a given request. Raises Manticore::Timeout on failure.
# @option options [integer] max_redirects (5) Sets the maximum number of redirects to follow.
# @option options [integer] automatic_retries (3) Sets the number of times the client will automatically retry failed requests.
# @option options [boolean] expect_continue (false) Enable support for HTTP 100
# @option options [boolean] stale_check (false) Enable support for stale connection checking. Adds overhead.
# @option options [String] proxy Proxy host in form: http://proxy.org:1234
# @option options [Hash] proxy Proxy host in form: {host: 'proxy.org'[, port: 80[, scheme: 'http'[, user: 'username@host', password: 'password']]]}
# @option options [Hash] proxy Proxy host in form: {url: 'http://proxy.org:1234'[, user: 'username@host', password: 'password']]]}
# @option options [URI] proxy Proxy host as a URI object
# @option options [Boolean/Fixnum] keepalive (true) Whether to allow connections to be reused. Defaults to true. If an integer,
# then connections will be kept alive for this long when Connection: keep-alive
# is sent, but no Keep-Alive header is sent.
# @option options [Hash] ssl Hash of options for configuring SSL
# @option options [Array<String>] ssl[:protocols] (nil) A list of protocols that Manticore should accept
# @option options [Array<String>] ssl[:cipher_suites] (nil) A list of cipher suites that Manticore should accept
# @option options [Symbol] ssl[:verify] (:strict) Hostname verification setting. Set to `:disable` to turn off hostname verification. Setting to `:browser` will
# cause Manticore to accept a certificate for *.foo.com for all subdomains and sub-subdomains (eg a.b.foo.com).
# The default `:strict` is like `:browser` except it'll only accept a single level of subdomains for wildcards,
# eg `b.foo.com` will be accepted for a `*.foo.com` certificate, but `a.b.foo.com` will not be.
# @option options [String] ssl[:truststore] (nil) Path to a custom trust store to use the verifying SSL connections
# @option options [String] ssl[:truststore_password] (nil) Password used for decrypting the server trust store
# @option options [String] ssl[:truststore_type] (nil) Format of the trust store, ie "JKS" or "PKCS12". If left nil, the type will be inferred from the truststore filename.
# @option options [String] ssl[:keystore] (nil) Path to a custom key store to use for client certificate authentication
# @option options [String] ssl[:keystore_password] (nil) Password used for decrypting the client auth key store
# @option options [String] ssl[:keystore_type] (nil) Format of the key store, ie "JKS" or "PKCS12". If left nil, the type will be inferred from the keystore filename.
# @option options [String] ssl[:ca_file] (nil) OpenSSL-style path to an X.509 certificate to use to validate SSL certificates
# @option options [String] ssl[:client_cert] (nil) OpenSSL-style path to an X.509 certificate to use for client authentication
# @option options [String] ssl[:client_key] (nil) OpenSSL-style path to an RSA key to use for client authentication
# @option options [boolean] ssl[:track_state] (false) Turn on or off connection state tracking. This helps prevent SSL information from leaking across threads, but means that connections
# can't be shared across those threads. This should generally be left off unless you know what you're doing.
def initialize(options = {})
builder = client_builder
builder.set_user_agent options.fetch(:user_agent, "Manticore #{VERSION}")
@options = options
@use_cookies = options.fetch(:cookies, false)
builder.disable_cookie_management unless @use_cookies
builder.disable_content_compression if options.fetch(:compression, true) == false
builder.set_proxy get_proxy_host(options[:proxy]) if options.key?(:proxy)
builder.set_retry_handler do |exception, executionCount, context|
if (executionCount > options.fetch(:automatic_retries, 3))
false
else
case exception
when Java::OrgApacheHttp::NoHttpResponseException, Java::JavaNet::SocketException
context.setAttribute "retryCount", executionCount
true
else
false
end
end
end
# http://hc.apache.org/httpcomponents-client-ga/tutorial/html/advanced.html#stateful_conn
# By default this is used to prevent different contexts from accessing SSL data
# Since we're running this for JRuby which does not have context separation within the JVM
# We can disable this for connection reuse.
builder.disable_connection_state unless options.fetch(:ssl, {}).fetch(:track_state, false)
keepalive = options.fetch(:keepalive, true)
if keepalive == false
builder.set_connection_reuse_strategy {|response, context| false }
else
builder.set_connection_reuse_strategy DefaultConnectionReuseStrategy.new
end
socket_config_builder = SocketConfig.custom
socket_config_builder.set_so_timeout( options.fetch(:socket_timeout, DEFAULT_SOCKET_TIMEOUT) * 1000 )
socket_config_builder.set_tcp_no_delay( options.fetch(:tcp_no_delay, true) )
builder.set_default_socket_config socket_config_builder.build
builder.set_connection_manager pool(options)
request_config = RequestConfig.custom
request_config.set_connection_request_timeout options.fetch(:request_timeout, DEFAULT_REQUEST_TIMEOUT) * 1000
request_config.set_connect_timeout options.fetch(:connect_timeout, DEFAULT_CONNECT_TIMEOUT) * 1000
request_config.set_socket_timeout options.fetch(:socket_timeout, DEFAULT_SOCKET_TIMEOUT) * 1000
request_config.set_max_redirects options.fetch(:max_redirects, DEFAULT_MAX_REDIRECTS)
request_config.set_expect_continue_enabled options.fetch(:expect_continue, DEFAULT_EXPECT_CONTINUE)
request_config.set_stale_connection_check_enabled options.fetch(:stale_check, DEFAULT_STALE_CHECK)
request_config.set_circular_redirects_allowed false
yield builder, request_config if block_given?
builder.set_default_request_config request_config.build
@client = builder.build
@options = options
@async_requests = []
@stubs = {}
end
# Return a hash of statistics about this client's HTTP connection pool
def pool_stats
stats = @pool.get_total_stats
{
max: stats.get_max,
leased: stats.get_leased,
pending: stats.get_pending,
available: stats.get_available
}
end
### Sync methods
# Perform a HTTP GET request
# @macro http_method_shared_sync
def get(url, options = {}, &block)
request HttpGetWithEntity, url, options, &block
end
# Perform a HTTP PUT request
# @macro http_method_shared_sync_with_body
def put(url, options = {}, &block)
request HttpPut, url, options, &block
end
# Perform a HTTP HEAD request
# @macro http_method_shared_sync
def head(url, options = {}, &block)
request HttpHead, url, options, &block
end
# Perform a HTTP POST request
# @macro http_method_shared_sync_with_body
def post(url, options = {}, &block)
request HttpPost, url, options, &block
end
# Perform a HTTP DELETE request
# @macro http_method_shared_sync
def delete(url, options = {}, &block)
request HttpDelete, url, options, &block
end
# Perform a HTTP OPTIONS request
# @macro http_method_shared_sync
def options(url, options = {}, &block)
request HttpOptions, url, options, &block
end
# Perform a HTTP PATCH request
# @macro http_method_shared_sync_with_body
def patch(url, options = {}, &block)
request HttpPatch, url, options, &block
end
%w(get put head post delete options patch).each do |func|
define_method "#{func}!" do |url, options, &block|
send(func, url, options, &block).call
end
end
# Perform an HTTP request, passing the method as a parameter
# @param method [String, Symbol] Method to call (get put head post options patch)
# @macro http_method_shared
# @macro http_request_exceptions
def http(method, url, options = {}, &block)
case method.to_s.downcase
when *%w(get put head post delete options patch)
send method, url, options, &block
else
raise "Invalid method: #{method}"
end
end
# Cause this client to return a stubbed response for this URL
# @param url [String] URL to stub for
# @param stubs [Hash] Hash of options to return for the stubbed response
def stub(url, stubs)
@stubs[url_as_regex(url)] = stubs
end
# Cause this client to unstubbed previously-stubbed URL
def unstub(url)
@stubs.delete(url_as_regex(url))
end
# Wipe all currently-set stubs.
def clear_stubs!
@stubs.clear
end
# Remove all pending asynchronous requests.
#
# @return nil
def clear_pending
@async_requests.clear
nil
end
# Execute all queued async requests
#
# @return [Array] An array of the responses from the requests executed.
def execute!
method = executor.java_method(:submit, [java.util.concurrent.Callable.java_class])
result = @async_requests.map {|r| method.call r }
@async_requests.clear
result.map do |future|
begin
future.get
rescue Java::JavaUtilConcurrent::ExecutionException => e
# These exceptions should be handled in on_failure blocks.
end
end
end
# Get at the underlying ExecutorService used to invoke asynchronous calls.
def executor
create_executor_if_needed
@executor
end
protected
def url_as_regex(url)
if url.is_a?(String)
%r{^#{Regexp.escape url}$}
else
url
end
end
def client_builder
HttpClientBuilder.create
end
def pool_builder(options)
http_sf = PlainConnectionSocketFactory.new
if options[:ignore_ssl_validation]
$stderr.puts 'The options[:ignore_ssl_validation] setting is deprecated in favor of options[:ssl][:verify]'
options[:ssl] ||= {}
options[:ssl] = {:verify => !options.delete(:ignore_ssl_validation)}.merge(options[:ssl])
end
https_sf = ssl_socket_factory_from_options options.fetch(:ssl, {})
registry = RegistryBuilder.create.register("http", http_sf).register("https", https_sf).build
PoolingHttpClientConnectionManager.new(registry)
end
def pool(options = {})
@pool ||= begin
@max_pool_size = options.fetch(:pool_max, DEFAULT_MAX_POOL_SIZE)
cm = pool_builder options
cm.set_default_max_per_route options.fetch(:pool_max_per_route, @max_pool_size)
cm.set_max_total @max_pool_size
Thread.new {
loop {
cm.closeExpiredConnections
sleep 5000
}
}
cm
end
end
def create_executor_if_needed
return @executor if @executor
@executor = Executors.new_cached_thread_pool
at_exit { @executor.shutdown }
end
def request(klass, url, options, &block)
req, context = request_from_options(klass, url, options)
if options.delete(:async)
async_request req, context
else
sync_request req, context, &block
end
end
def async_request(request, context)
create_executor_if_needed
response = response_object_for(@client, request, context)
@async_requests << response
response
end
def sync_request(request, context, &block)
response = response_object_for(@client, request, context, &block)
if block_given?
response.call
else
response
end
end
def response_object_for(client, request, context, &block)
request_uri = request.getURI.to_s
match_key = @stubs.keys.find { |k| request_uri.match(k) }
if match_key
StubbedResponse.new(client, request, context, &block).stub( @stubs[match_key] )
else
Response.new(client, request, context, &block)
end
end
def uri_from_url_and_options(url, options)
uri = Addressable::URI.parse url
if options[:query]
v = uri.query_values || {}
case options[:query]
when Hash
uri.query_values = v.merge options[:query]
when String
uri.query_values = v.merge CGI.parse(options[:query])
else
raise "Queries must be hashes or strings"
end
end
uri
end
def request_from_options(klass, url, options)
req = klass.new uri_from_url_and_options(url, options).to_s
if ( options[:params] || options[:body] || options[:entity]) &&
( req.instance_of?(HttpPost) || req.instance_of?(HttpPatch) || req.instance_of?(HttpPut) || req.instance_of?(HttpGetWithEntity))
if options[:params]
req.set_entity hash_to_entity(options[:params])
elsif options[:body]
req.set_entity StringEntity.new(options[:body], minimum_encoding_for(options[:body]))
elsif options[:entity]
req.set_entity options[:entity]
end
end
req_options = @options.merge(options)
if options.key?(:proxy) || options.key?(:connect_timeout) || options.key?(:socket_timeout) || options.key?(:max_redirects) || options.key?(:follow_redirects)
config = RequestConfig.custom()
if req_options[:proxy]
config.set_proxy get_proxy_host(req_options[:proxy])
end
config.set_max_redirects req_options[:max_redirects] if req_options[:max_redirects]
config.set_redirects_enabled !!req_options[:follow_redirects] if req_options.fetch(:follow_redirects, nil) != nil
config.set_connect_timeout req_options[:connect_timeout] * 1000 if req_options[:connect_timeout]
config.set_socket_timeout req_options[:socket_timeout] * 1000 if req_options[:socket_timeout]
config.set_connection_request_timeout req_options[:request_timeout] * 1000 if req_options[:request_timeout]
req.set_config config.build
end
if options[:headers]
options[:headers].each {|k, v| req.set_header k, v }
end
context = HttpClientContext.new
proxy_user = req_options[:proxy].is_a?(Hash) && (req_options[:proxy][:user] || req_options[:proxy][:username])
auth_from_options(req_options, context) if req_options.key?(:auth) || proxy_user
if @use_cookies == :per_request
store = BasicCookieStore.new
context.setAttribute(ClientContext.COOKIE_STORE, store)
end
return req, context
end
def get_proxy_host(opt)
host = nil
if opt.is_a? String
uri = URI.parse(opt)
if uri.host
get_proxy_host uri
else
uri = URI.parse("http://#{opt}")
get_proxy_host uri
end
elsif opt.is_a? Hash
if opt.key?(:url)
get_proxy_host URI.parse(opt[:url])
elsif opt.key?(:host)
HttpHost.new(opt[:host], (opt[:port] || 80).to_i, opt[:scheme] || "http")
end
elsif opt.is_a? URI
opt.scheme ||= "http"
opt.port ||= 80
HttpHost.new(opt.host, opt.port, opt.scheme)
end
end
def auth_from_options(options, context)
proxy = options.fetch(:proxy, {})
if options[:auth] || proxy[:user] || proxy[:username]
provider = BasicCredentialsProvider.new
if options[:auth]
username = options[:auth][:user] || options[:auth][:username]
password = options[:auth][:pass] || options[:auth][:password]
provider.set_credentials AuthScope::ANY, UsernamePasswordCredentials.new(username, password)
end
if proxy[:user] || proxy[:username]
username = proxy[:user] || proxy[:username]
password = proxy[:pass] || proxy[:password]
provider.set_credentials AuthScope.new(get_proxy_host(proxy)), UsernamePasswordCredentials.new(username, password)
end
context.set_credentials_provider(provider)
end
end
def hash_to_entity(hash)
# This is a really stupid way to get the "lowest common denominator" encoding for the options hash
# Is there a better way?
encoding = minimum_encoding_for hash.to_a.flatten.join
pairs = hash.map do |key, val|
BasicNameValuePair.new(key, val)
end
UrlEncodedFormEntity.new(pairs, encoding)
end
# Apache HTTP assumes ISO_8859_1 for StringEntities; we'll try to be nice and pass that when possible
# so that it doesn't have to any multibyte work.
ISO_8859_1 = "ISO-8859-1".freeze
def minimum_encoding_for(string)
if string.ascii_only?
ISO_8859_1
else
string.encoding.to_s
end
end
# Configure the SSL Context
def ssl_socket_factory_from_options(ssl_options)
trust_store = trust_strategy = nil
verifier = SSLConnectionSocketFactory::STRICT_HOSTNAME_VERIFIER
case ssl_options.fetch(:verify, :strict)
when false, :disable, :none
trust_store = nil
trust_strategy = TrustSelfSignedStrategy.new
verifier = SSLConnectionSocketFactory::ALLOW_ALL_HOSTNAME_VERIFIER
when :browser
verifier = SSLConnectionSocketFactory::BROWSER_COMPATIBLE_HOSTNAME_VERIFIER
when true, :strict, :default
verifier = SSLConnectionSocketFactory::STRICT_HOSTNAME_VERIFIER
else
raise "Invalid value for :verify. Valid values are (:all, :browser, :default)"
end
context = SSLContexts.custom
setup_trust_store ssl_options, context, trust_strategy
setup_key_store ssl_options, context
SSLConnectionSocketFactory.new context.build, ssl_options[:protocols].to_java(:string), ssl_options[:cipher_suites].to_java(:string), verifier
end
def setup_trust_store(ssl_options, context, trust_strategy)
trust_store = get_store(:truststore, ssl_options) if ssl_options.key?(:truststore)
# Support OpenSSL-style ca_file. We don't support ca_path for now.
if ssl_options[:ca_file]
trust_store ||= blank_keystore
open(ssl_options[:ca_file]) do |fp|
cert_collection = CertificateFactory.get_instance("X509").generate_certificates(fp.to_inputstream).to_a
cert_collection.each do |cert|
trust_store.set_certificate_entry(cert.getSubjectX500Principal.name, cert)
end
end
end
context.load_trust_material(trust_store, trust_strategy)
end
KEY_EXTRACTION_REGEXP = /(?:^-----BEGIN(.* )PRIVATE KEY-----\n)(.*?)(?:-----END\1PRIVATE KEY.*$)/m
def setup_key_store(ssl_options, context)
key_store = get_store(:keystore, ssl_options) if ssl_options.key?(:keystore)
# Support OpenSSL-style bare X.509 certs with an RSA key
# This is really dumb - we have to b64-decode the key ourselves.
if ssl_options[:client_cert] && ssl_options[:client_key]
key_store ||= blank_keystore
certs, key = nil, nil
open(ssl_options[:client_cert]) do |fp|
certs = CertificateFactory.get_instance("X509").generate_certificates(fp.to_inputstream).to_array([].to_java(Certificate))
end
keystore_password = ssl_options.fetch(:keystore_password, "").to_java.toCharArray
# Add each of the keys in the given keyfile into the keystore.
open(ssl_options[:client_key]) do |fp|
key_parts = fp.read.scan(KEY_EXTRACTION_REGEXP)
key_parts.each do |type, b64key|
body = Base64.decode64 b64key
spec = PKCS8EncodedKeySpec.new(body.to_java_bytes)
type = type.strip
type = "RSA" if type == ""
key = KeyFactory.getInstance(type).generatePrivate(spec)
key_store.set_key_entry("key-#{Digest::SHA1.hexdigest(body)}", key, keystore_password, certs)
end
end
end
context.load_key_material(key_store, keystore_password) if key_store
end
def get_trust_store(options)
get_store :truststore, options
end
def get_key_store(options)
get_store :keystore, options
end
def get_store(prefix, options)
KeyStore.get_instance(options[:"#{prefix}_type"] || guess_store_type(options[prefix])).tap do |store|
instream = open(options[prefix], "rb").to_inputstream
store.load(instream, options.fetch(:"#{prefix}_password", nil).to_java.toCharArray)
end
end
def blank_keystore
KeyStore.get_instance(KeyStore.get_default_type).tap {|k| k.load(nil, nil) }
end
def guess_store_type(filename)
if filename.end_with?(".p12")
"pkcs12"
else
KeyStore.get_default_type
end
end
end
end
|
apache-2.0
|
Gilandel/utils-assertor
|
src/test/java/fr/landel/utils/assertor/predicate/PredicateAssertorMapTest.java
|
36900
|
/*-
* #%L
* utils-assertor
* %%
* Copyright (C) 2016 - 2018 Gilles Landel
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package fr.landel.utils.assertor.predicate;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Objects;
import java.util.function.Predicate;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.Pair;
import org.junit.Test;
import fr.landel.utils.assertor.AbstractTest;
import fr.landel.utils.assertor.Assertor;
import fr.landel.utils.assertor.StepAssertor;
import fr.landel.utils.assertor.enums.EnumAnalysisMode;
import fr.landel.utils.assertor.utils.AssertorMap;
import fr.landel.utils.commons.MapUtils2;
/**
* Check {@link AssertorMap}
*
* @since Jun 5, 2016
* @author Gilles
*
*/
public class PredicateAssertorMapTest extends AbstractTest {
private final String ERROR = "error expected";
/**
* Test method for {@link AssertorMap} .
*/
@Test
public void testPredicateGet() {
Map<String, Integer> map = new HashMap<>();
map.put("key", 1);
assertFalse(Assertor.<String, Integer> ofMap().hasHashCode(0).that(map).isOK());
assertTrue(Assertor.<String, Integer> ofMap().hasHashCode(Objects.hashCode(map)).that(map).isOK());
assertTrue(Assertor.<String, Integer> ofMap().contains("key").and().hasHashCode(Objects.hashCode(map)).that(map).isOK());
assertTrue(Assertor.<String, Integer> ofMap().contains("key").or().hasHashCode(Objects.hashCode(map)).that(map).isOK());
assertFalse(Assertor.<String, Integer> ofMap().contains("key").xor().hasHashCode(Objects.hashCode(map)).that(map).isOK());
assertFalse(Assertor.<String, Integer> ofMap().contains("key").nand().hasHashCode(Objects.hashCode(map)).that(map).isOK());
assertFalse(Assertor.<String, Integer> ofMap().contains("key").nor().hasHashCode(Objects.hashCode(map)).that(map).isOK());
}
/**
* Test method for {@link AssertorMap#isEmpty}.
*
* @throws IOException
* On error
*/
@Test
public void testIsEmpty() throws IOException {
final String el = "element";
final Map<String, Integer> map = new HashMap<>();
final PredicateAssertorStepMap<String, Integer> assertMap = Assertor.<String, Integer> ofMap();
assertMap.isEmpty().that(map).orElseThrow();
Assertor.<String, Integer> ofMap().isEmpty().and(assertMap.isEmpty()).that(map).isOK();
map.put(el, 1);
assertException(() -> {
assertMap.isEmpty().that(map).orElseThrow();
fail(ERROR);
}, IllegalArgumentException.class);
assertException(() -> {
assertMap.isEmpty().that(map).orElseThrow("map is not empty");
fail(ERROR);
}, IllegalArgumentException.class, "map is not empty");
assertException(() -> {
assertMap.isEmpty().that(map).orElseThrow(new IOException(), true);
fail(ERROR);
}, IOException.class);
Assertor.<String, Integer> ofMap().isEmpty().that((Map<String, Integer>) null)
.orElseThrow("this argument is required; it must not be null");
}
/**
* Test method for {@link AssertorMap#isNotEmpty}.
*
* @throws IOException
* On error
*/
@Test
public void testIsNotEmpty() throws IOException {
final String el = "element";
final Map<String, Integer> map = new HashMap<>();
map.put(el, 1);
final PredicateAssertorStepMap<String, Integer> assertMap = Assertor.<String, Integer> ofMap();
assertMap.isNotEmpty().that(map).orElseThrow();
assertException(() -> {
assertMap.not().isNotEmpty().that(map).orElseThrow();
fail(ERROR);
}, IllegalArgumentException.class);
map.clear();
assertException(() -> {
assertMap.isNotEmpty().that(map).orElseThrow();
fail(ERROR);
}, IllegalArgumentException.class);
assertException(() -> {
assertMap.isNotEmpty().that(map).orElseThrow("map is empty");
fail(ERROR);
}, IllegalArgumentException.class, "map is empty");
assertException(() -> {
assertMap.isNotEmpty().that(map).orElseThrow(new IOException(), true);
fail(ERROR);
}, IOException.class);
assertFalse(Assertor.<String, Integer> ofMap().isNotEmpty().that((Map<String, Integer>) null).isOK());
}
/**
* Test method for {@link AssertorMap#contains}.
*
* @throws IOException
* On error
*/
@Test
public void testContains() throws IOException {
final String key1 = "element1";
final String key2 = "element2";
final Integer val1 = 1;
final Map<String, Integer> map = new HashMap<>();
map.put(key1, val1);
map.put(key2, null);
final List<String> keys = Arrays.asList(key1);
Map<String, Integer> map1 = new HashMap<>();
map1.put("element3", 2);
this.checkContains(Assertor.ofMap(), key1, key2, val1, keys, map, map1);
this.checkContains(Assertor.ofMap(EnumAnalysisMode.STREAM), key1, key2, val1, keys, map, map1);
this.checkContains(Assertor.ofMap(EnumAnalysisMode.PARALLEL), key1, key2, val1, keys, map, map1);
assertFalse(Assertor.<String, Integer> ofMap().contains(key1).that((Map<String, Integer>) null).isOK());
assertFalse(Assertor.<String, Integer> ofMap().contains(key1, val1).that((Map<String, Integer>) null).isOK());
assertFalse(Assertor.<String, Integer> ofMap().containsAll(keys).that((Map<String, Integer>) null).isOK());
assertFalse(Assertor.<String, Integer> ofMap().containsAll(map).that((Map<String, Integer>) null).isOK());
assertFalse(Assertor.<String, Integer> ofMap().containsAny(keys).that((Map<String, Integer>) null).isOK());
assertFalse(Assertor.<String, Integer> ofMap().containsAny(map).that((Map<String, Integer>) null).isOK());
}
private void checkContains(final PredicateAssertorStepMap<String, Integer> assertMap, final String key1, final String key2,
final Integer val1, final List<String> keys, final Map<String, Integer> map, final Map<String, Integer> map1)
throws IOException {
assertMap.isNotNull().and().contains(key1).that(map).orElseThrow();
assertMap.contains(key1).that(map).orElseThrow();
assertMap.contains(key1).that(map).orElseThrow("map doesn't contain the element %2$s*");
assertMap.contains(key1).that(map).orElseThrow(new IOException(), true);
assertMap.contains(key1, val1).that(map).orElseThrow();
assertMap.contains(key1, val1).that(map).orElseThrow("map doesn't contain the element %3$s*");
assertMap.contains(key1, val1).that(map).orElseThrow(new IOException(), true);
assertMap.contains(key2, null).that(map).orElseThrow();
assertFalse(assertMap.contains(key2, 3).that(map).isOK());
assertMap.containsAll(keys).that(map).orElseThrow();
assertMap.containsAll(keys).that(map).orElseThrow("map doesn't contain the element %2$s*");
assertMap.containsAll(keys).that(map).orElseThrow(new IOException(), true);
assertMap.containsAll(map).that(map).orElseThrow();
assertMap.containsAll(map).that(map).orElseThrow("map doesn't contain the element %2$s*");
assertMap.containsAll(map).that(map).orElseThrow(new IOException(), true);
assertMap.containsAny(keys).that(map).orElseThrow();
assertMap.containsAny(keys).that(map).orElseThrow("map doesn't contain the element %2$s*");
assertMap.containsAny(keys).that(map).orElseThrow(new IOException(), true);
assertMap.containsAny(map).that(map).orElseThrow();
assertMap.not().containsAny(Collections.singletonMap("k", 1)).that(map).orElseThrow();
assertFalse(assertMap.not().containsAny(map).that(map).isOK());
assertMap.containsAny(map).that(map).orElseThrow("map doesn't contain the element %2$s*");
assertMap.containsAny(map).that(map).orElseThrow(new IOException(), true);
assertTrue(assertMap.contains(key1).and().isNotEmpty().that(map).isOK());
assertTrue(assertMap.contains(key1).or().isEmpty().that(map).isOK());
assertTrue(assertMap.contains(key1).xor().isEmpty().that(map).isOK());
assertFalse(assertMap.contains(key1).nand().isEmpty().that(map).isOK());
assertTrue(assertMap.contains(key1).nor().isEmpty().that(map).isOK());
assertFalse(assertMap.contains(key1, (Integer) null).that(map).isOK());
assertTrue(assertMap.contains(key2, (Integer) null).that(map).isOK());
assertFalse(assertMap.contains(key2, 1).that(map).isOK());
assertFalse(assertMap.containsAll(Arrays.asList("element3")).that(map).isOK());
assertFalse(assertMap.containsAll(map1).that(map).isOK());
assertFalse(assertMap.containsAny(Arrays.asList("element3")).that(map).isOK());
assertFalse(assertMap.containsAny(map1).that(map).isOK());
assertFalse(assertMap.contains((String) null).that(map).isOK());
assertFalse(assertMap.contains((String) null, (Integer) null).that(map).isOK());
assertFalse(assertMap.containsAll((List<String>) null).that(map).isOK());
assertFalse(assertMap.containsAll((Map<String, Integer>) null).that(map).isOK());
assertFalse(assertMap.containsAny((List<String>) null).that(map).isOK());
assertFalse(assertMap.containsAny((Map<String, Integer>) null).that(map).isOK());
}
/**
* Test method for {@link AssertorMap#doesNotContain}.
*
* @throws IOException
* On error
*/
@Test
public void testDoesNotContain() throws IOException {
final String key1 = "element1";
final Integer val1 = 1;
final String key2 = "element2";
final Integer val2 = 2;
final Map<String, Integer> map = new HashMap<>();
map.put(key1, val1);
final Map<String, Integer> map1 = new HashMap<>();
map1.put("element3", 2);
final List<String> keys = Arrays.asList("element3");
this.checkDoesNotContain(Assertor.ofMap(), key1, key2, val1, val2, keys, map, map1);
this.checkDoesNotContain(Assertor.ofMap(EnumAnalysisMode.STREAM), key1, key2, val1, val2, keys, map, map1);
this.checkDoesNotContain(Assertor.ofMap(EnumAnalysisMode.PARALLEL), key1, key2, val1, val2, keys, map, map1);
assertFalse(Assertor.<String, Integer> ofMap().not().contains(key1).that((Map<String, Integer>) null).isOK());
assertFalse(Assertor.<String, Integer> ofMap().not().contains(key1, val1).that((Map<String, Integer>) null).isOK());
assertFalse(Assertor.<String, Integer> ofMap().not().containsAll(keys).that((Map<String, Integer>) null).isOK());
assertFalse(Assertor.<String, Integer> ofMap().not().containsAll(map1).that((Map<String, Integer>) null).isOK());
}
private void checkDoesNotContain(final PredicateAssertorStepMap<String, Integer> assertMap, final String key1, final String key2,
final Integer val1, final Integer val2, final List<String> keys, final Map<String, Integer> map,
final Map<String, Integer> map1) throws IOException {
assertMap.isNotNull().and().contains(key1).that(map).orElseThrow();
assertMap.not().contains(key2).that(map).orElseThrow();
assertMap.not().contains(key2).that(map).orElseThrow("map contains the element %2$s*");
assertMap.not().contains(key2).that(map).orElseThrow(new IOException(), true);
assertMap.not().contains(key2, val2).that(map).orElseThrow();
assertMap.not().contains(key2, val2).that(map).orElseThrow("map contains the element %3$s*");
assertMap.not().contains(key2, val2).that(map).orElseThrow(new IOException(), true);
assertFalse(assertMap.not().containsAll(keys).that(map).isOK());
assertFalse(assertMap.not().containsAll(map1).that(map).isOK());
assertTrue(assertMap.not().containsAny(keys).that(map).isOK());
assertTrue(assertMap.not().containsAny(map1).that(map).isOK());
assertEquals("the map '[element1=1]' should NOT contain the key 'element1'",
assertMap.not().contains(key1).that(map).getErrors().get());
assertFalse(assertMap.not().contains(key1).that(map).isOK());
assertFalse(assertMap.not().contains(key1, val1).that(map).isOK());
assertFalse(assertMap.not().containsAll(map.keySet()).that(map).isOK());
assertFalse(assertMap.not().containsAll(map).that(map).isOK());
assertTrue(assertMap.not().contains(key1, (Integer) null).that(map).isOK());
assertFalse(assertMap.not().containsAll(Arrays.asList("element3")).that(map).isOK());
assertFalse(assertMap.not().containsAll(map1).that(map).isOK());
assertTrue(assertMap.not().contains((String) null).that(map).isOK());
assertTrue(assertMap.not().contains(key1, 3).that(map).isOK());
assertTrue(assertMap.not().contains((String) null, (Integer) null).that(map).isOK());
assertFalse(assertMap.not().containsAll((List<String>) null).that(map).isOK());
assertFalse(assertMap.not().containsAll((Map<String, Integer>) null).that(map).isOK());
}
/**
* Test method for {@link AssertorMap#isNotEmpty(Map, String, Object...)} .
*/
@Test
public void testIsNotEmptyOKMapOfQQString() {
try {
Map<String, String> map = new HashMap<>();
map.put("f", "f");
Assertor.<String, String> ofMap().isNotEmpty().that(map).orElseThrow();
} catch (IllegalArgumentException e) {
fail("The test isn't correct");
}
}
/**
* Test method for {@link AssertorMap#isNotEmpty(Map, String, Object...)} .
*/
@Test(expected = IllegalArgumentException.class)
public void testIsNotEmptyKOMapOfQQString() {
Assertor.<String, String> ofMap().isNotEmpty().that(new HashMap<String, String>()).orElseThrow();
}
/**
* Test method for {@link AssertorMap#isNotEmpty(java.util.Map)} .
*/
@Test
public void testIsNotEmptyOKMapOfQQ() {
try {
Map<String, String> map = new HashMap<>();
map.put("fg", "fg");
Assertor.<String, String> ofMap().isNotEmpty().that(map).orElseThrow();
} catch (IllegalArgumentException e) {
fail("The test isn't correct");
}
}
/**
* Test method for {@link AssertorMap#isNotEmpty(java.util.Map)} .
*/
@Test(expected = IllegalArgumentException.class)
public void testIsNotEmptyKOMapOfQQ() {
Assertor.<String, String> ofMap().isNotEmpty().that(new HashMap<String, String>()).orElseThrow();
}
/**
* Test method for {@link AssertorMap#hasSizeGT}.
*/
@Test
public void testHasSizeGT() {
final String key1 = "element1";
final String key2 = "element2";
final Integer val1 = 1;
final Integer val2 = 2;
final Map<String, Integer> map = new HashMap<>();
map.put(key1, val1);
map.put(key2, val2);
assertTrue(Assertor.<String, Integer> ofMap().hasSizeGT(1).that(map).isOK());
assertTrue(Assertor.<String, Integer> ofMap().hasSizeGT(0).that(map).isOK());
assertFalse(Assertor.<String, Integer> ofMap().hasSizeGT(2).that(map).isOK());
assertFalse(Assertor.<String, Integer> ofMap().hasSizeGT(3).that(map).isOK());
assertFalse(Assertor.<String, Integer> ofMap().hasSizeGT(-1).that(map).isOK());
assertFalse(Assertor.<String, Integer> ofMap().hasSizeGT(1).that((Map<String, Integer>) null).isOK());
assertException(() -> Assertor.<String, Integer> ofMap().hasSizeGT(-1).that(map).orElseThrow(), IllegalArgumentException.class,
"the size has to be greater than or equal to 0 and the map cannot be null");
assertException(() -> Assertor.<String, Integer> ofMap().hasSizeGT(2).that(map).orElseThrow(), IllegalArgumentException.class,
"the map '[element1=1, element2=2]' size should be greater than: 2");
assertException(() -> Assertor.<String, Integer> ofMap().not().hasSizeGT(1).that(map).orElseThrow(), IllegalArgumentException.class,
"the map '[element1=1, element2=2]' size should NOT be greater than: 1");
}
/**
* Test method for {@link AssertorMap#hasSizeGTE}.
*/
@Test
public void testHasSizeGTE() {
final String key1 = "element1";
final String key2 = "element2";
final Integer val1 = 1;
final Integer val2 = 2;
final Map<String, Integer> map = new HashMap<>();
map.put(key1, val1);
map.put(key2, val2);
assertTrue(Assertor.<String, Integer> ofMap().hasSizeGTE(1).that(map).isOK());
assertTrue(Assertor.<String, Integer> ofMap().hasSizeGTE(0).that(map).isOK());
assertTrue(Assertor.<String, Integer> ofMap().hasSizeGTE(2).that(map).isOK());
assertFalse(Assertor.<String, Integer> ofMap().hasSizeGTE(3).that(map).isOK());
assertFalse(Assertor.<String, Integer> ofMap().hasSizeGTE(-1).that(map).isOK());
assertFalse(Assertor.<String, Integer> ofMap().hasSizeGTE(1).that((Map<String, Integer>) null).isOK());
assertException(() -> Assertor.<String, Integer> ofMap().hasSizeGTE(-1).that(map).orElseThrow(), IllegalArgumentException.class,
"the size has to be greater than or equal to 0 and the map cannot be null");
assertException(() -> Assertor.<String, Integer> ofMap().hasSizeGTE(3).that(map).orElseThrow(), IllegalArgumentException.class,
"the map '[element1=1, element2=2]' size should be greater than or equal to: 3");
assertException(() -> Assertor.<String, Integer> ofMap().not().hasSizeGTE(1).that(map).orElseThrow(),
IllegalArgumentException.class, "the map '[element1=1, element2=2]' size should NOT be greater than or equal to: 1");
}
/**
* Test method for {@link AssertorMap#hasSizeLT}.
*/
@Test
public void testHasSizeLT() {
final String key1 = "element1";
final String key2 = "element2";
final Integer val1 = 1;
final Integer val2 = 2;
final Map<String, Integer> map = new HashMap<>();
map.put(key1, val1);
map.put(key2, val2);
assertFalse(Assertor.<String, Integer> ofMap().hasSizeLT(1).that(map).isOK());
assertFalse(Assertor.<String, Integer> ofMap().hasSizeLT(0).that(map).isOK());
assertFalse(Assertor.<String, Integer> ofMap().hasSizeLT(2).that(map).isOK());
assertTrue(Assertor.<String, Integer> ofMap().hasSizeLT(3).that(map).isOK());
assertFalse(Assertor.<String, Integer> ofMap().hasSizeLT(-1).that(map).isOK());
assertFalse(Assertor.<String, Integer> ofMap().hasSizeLT(1).that((Map<String, Integer>) null).isOK());
assertException(() -> Assertor.<String, Integer> ofMap().hasSizeLT(-1).that(map).orElseThrow(), IllegalArgumentException.class,
"the size has to be greater than or equal to 0 and the map cannot be null");
assertException(() -> Assertor.<String, Integer> ofMap().hasSizeLT(1).that(map).orElseThrow(), IllegalArgumentException.class,
"the map '[element1=1, element2=2]' size should be lower than: 1");
assertException(() -> Assertor.<String, Integer> ofMap().not().hasSizeLT(3).that(map).orElseThrow(), IllegalArgumentException.class,
"the map '[element1=1, element2=2]' size should NOT be lower than: 3");
}
/**
* Test method for {@link AssertorMap#hasSizeLTE}.
*/
@Test
public void testHasSizeLTE() {
final String key1 = "element1";
final String key2 = "element2";
final Integer val1 = 1;
final Integer val2 = 2;
final Map<String, Integer> map = new HashMap<>();
map.put(key1, val1);
map.put(key2, val2);
assertFalse(Assertor.<String, Integer> ofMap().hasSizeLTE(1).that(map).isOK());
assertFalse(Assertor.<String, Integer> ofMap().hasSizeLTE(0).that(map).isOK());
assertTrue(Assertor.<String, Integer> ofMap().hasSizeLTE(2).that(map).isOK());
assertTrue(Assertor.<String, Integer> ofMap().hasSizeLTE(3).that(map).isOK());
assertFalse(Assertor.<String, Integer> ofMap().hasSizeLTE(-1).that(map).isOK());
assertFalse(Assertor.<String, Integer> ofMap().hasSizeLTE(1).that((Map<String, Integer>) null).isOK());
assertException(() -> Assertor.<String, Integer> ofMap().hasSizeLTE(-1).that(map).orElseThrow(), IllegalArgumentException.class,
"the size has to be greater than or equal to 0 and the map cannot be null");
assertException(() -> Assertor.<String, Integer> ofMap().hasSizeLTE(1).that(map).orElseThrow(), IllegalArgumentException.class,
"the map '[element1=1, element2=2]' size should be lower than or equal to: 1");
assertException(() -> Assertor.<String, Integer> ofMap().not().hasSizeLTE(3).that(map).orElseThrow(),
IllegalArgumentException.class, "the map '[element1=1, element2=2]' size should NOT be lower than or equal to: 3");
}
/**
* Test method for {@link AssertorMap#hasSize}.
*/
@Test
public void testHasSize() {
final String key1 = "element1";
final Integer val1 = 1;
final Map<String, Integer> map = new HashMap<>();
map.put(key1, val1);
assertTrue(Assertor.<String, Integer> ofMap().hasSize(1).that(map).isOK());
assertFalse(Assertor.<String, Integer> ofMap().hasSize(0).that(map).isOK());
assertFalse(Assertor.<String, Integer> ofMap().hasSize(2).that(map).isOK());
assertFalse(Assertor.<String, Integer> ofMap().hasSize(-1).that(map).isOK());
assertFalse(Assertor.<String, Integer> ofMap().hasSize(1).that((Map<String, Integer>) null).isOK());
}
/**
* Test method for {@link AssertorMap#hasNotSize}.
*/
@Test
public void testHasNotSize() {
final String key1 = "element1";
final Integer val1 = 1;
final Map<String, Integer> map = new HashMap<>();
map.put(key1, val1);
assertFalse(Assertor.<String, Integer> ofMap().not().hasSize(1).that(map).isOK());
assertTrue(Assertor.<String, Integer> ofMap().not().hasSize(0).that(map).isOK());
assertTrue(Assertor.<String, Integer> ofMap().not().hasSize(2).that(map).isOK());
assertFalse(Assertor.<String, Integer> ofMap().not().hasSize(-1).that(map).isOK());
assertFalse(Assertor.<String, Integer> ofMap().not().hasSize(1).that((Map<String, Integer>) null).isOK());
}
/**
* Check
* {@link AssertorMap#containsInOrder(StepAssertor, Map, fr.landel.utils.assertor.commons.MessageAssertor)}
*/
@Test
public void testContainsInOrder() {
Map<String, Integer> mapTU = MapUtils2.newMap(LinkedHashMap::new, Pair.of("t", 1), Pair.of("u", 2));
Map<String, Integer> mapTU2 = MapUtils2.newMap(LinkedHashMap::new, Pair.of("t", 2), Pair.of("u", 3));
Map<String, Integer> mapTUClone = MapUtils2.newMap(LinkedHashMap::new, Pair.of("t", 1), Pair.of("u", 2));
Map<String, Integer> mapUT = MapUtils2.newMap(LinkedHashMap::new, Pair.of("u", 2), Pair.of("t", 1));
Map<String, Integer> mapU = Collections.singletonMap("u", 2);
Map<String, Integer> mapTVU = MapUtils2.newMap(LinkedHashMap::new, Pair.of("t", 1), Pair.of("v", 3), Pair.of("t", 1),
Pair.of("u", 2), Pair.of("v", 3));
// t, v, u (old keys are replaced)
Map<String, Integer> mapXTUV = MapUtils2.newMap(LinkedHashMap::new, Pair.of("x", 4), Pair.of("t", 1), Pair.of("u", 2),
Pair.of("t", 1), Pair.of("u", 2), Pair.of("v", 3));
// x, t, u , v (old keys are replaced)
Map<String, Integer> mapTNull = MapUtils2.newMap(LinkedHashMap::new, Pair.of("t", 1), Pair.of(null, 2));
Map<String, Integer> mapZ = MapUtils2.newMap(LinkedHashMap::new, Pair.of("z", 5));
Map<String, Integer> mapUV = MapUtils2.newMap(LinkedHashMap::new, Pair.of("u", 2), Pair.of("v", 3));
Map<String, Integer> mapTUV = MapUtils2.newMap(LinkedHashMap::new, Pair.of("t", 1), Pair.of("u", 2), Pair.of("v", 3));
for (EnumAnalysisMode mode : EnumAnalysisMode.values()) {
PredicateAssertorStepMap<String, Integer> predicate = Assertor.<String, Integer> ofMap(mode);
assertTrue(predicate.containsInOrder(mapTUClone).that(mapTU).isOK());
assertFalse(predicate.containsInOrder(mapUT).that(mapTU).isOK());
assertTrue(predicate.containsInOrder(mapU).that(mapTU).isOK());
assertFalse(predicate.containsInOrder(mapTU).that(mapTVU).isOK());
assertTrue(predicate.containsInOrder(mapTU).that(mapXTUV).isOK());
assertTrue(predicate.containsInOrder(mapTUClone).that(mapTU).isOK());
assertTrue(predicate.containsInOrder(mapTNull).that(mapTNull).isOK());
assertTrue(predicate.containsInOrder(mapTU).that(mapTUV).isOK());
assertTrue(predicate.containsInOrder(mapUV).that(mapTUV).isOK());
assertFalse(predicate.containsInOrder(mapTUV).that(mapTU).isOK());
assertFalse(predicate.containsInOrder(mapUT).that(mapTU).isOK());
assertFalse(predicate.containsInOrder(mapZ).that(mapTU).isOK());
// NOT
assertFalse(predicate.not().containsInOrder(mapTUClone).that(mapTU).isOK());
assertTrue(predicate.not().containsInOrder(mapUT).that(mapTU).isOK());
assertFalse(predicate.not().containsInOrder(mapU).that(mapTU).isOK());
assertTrue(predicate.not().containsInOrder(mapTU).that(mapTVU).isOK());
assertFalse(predicate.not().containsInOrder(mapTU).that(mapXTUV).isOK());
assertFalse(predicate.not().containsInOrder(mapTUClone).that(mapTU).isOK());
assertFalse(predicate.not().containsInOrder(mapTNull).that(mapTNull).isOK());
assertFalse(predicate.not().containsInOrder(mapTU).that(mapTUV).isOK());
assertFalse(predicate.not().containsInOrder(mapUV).that(mapTUV).isOK());
assertTrue(predicate.not().containsInOrder(mapTUV).that(mapTU).isOK());
assertTrue(predicate.not().containsInOrder(mapUT).that(mapTU).isOK());
assertTrue(predicate.not().containsInOrder(mapZ).that(mapTU).isOK());
// Keys and values
assertFalse(predicate.containsInOrder(mapZ.keySet()).that(mapTU).isOK());
assertTrue(predicate.not().containsInOrder(mapZ.keySet()).that(mapTU).isOK());
assertFalse(predicate.containsValuesInOrder(mapZ.values()).that(mapTU).isOK());
assertTrue(predicate.not().containsValuesInOrder(mapZ.values()).that(mapTU).isOK());
assertFalse(predicate.containsInOrder(mapTU2).that(mapTU).isOK());
assertFalse(predicate.containsValuesInOrder(mapTU2.values()).that(mapTU).isOK());
assertTrue(predicate.containsInOrder(mapTU2.keySet()).that(mapTU).isOK());
assertTrue(predicate.not().containsInOrder(mapTU2).that(mapTU).isOK());
assertTrue(predicate.not().containsValuesInOrder(mapTU2.values()).that(mapTU).isOK());
assertFalse(predicate.not().containsInOrder(mapTU2.keySet()).that(mapTU).isOK());
assertFalse(predicate.containsInOrder(mapUV).that(mapTU2).isOK());
assertTrue(predicate.containsValuesInOrder(mapUV.values()).that(mapTU2).isOK());
assertFalse(predicate.containsInOrder(mapUV.keySet()).that(mapTU2).isOK());
assertTrue(predicate.not().containsInOrder(mapUV).that(mapTU2).isOK());
assertFalse(predicate.not().containsValuesInOrder(mapUV.values()).that(mapTU2).isOK());
assertTrue(predicate.not().containsInOrder(mapUV.keySet()).that(mapTU2).isOK());
assertTrue(predicate.containsInOrder(mapTU2.keySet()).that(mapTU).isOK());
assertFalse(predicate.not().containsInOrder(mapTU2.keySet()).that(mapTU).isOK());
}
}
/**
* Check {@link AssertorMap#containsValues}
*/
@Test
public void testContainsValue() {
Map<String, Integer> mapTU = MapUtils2.newMap(LinkedHashMap::new, Pair.of("t", 1), Pair.of("u", 2));
assertTrue(Assertor.<String, Integer> ofMap().containsValue(1).that(mapTU).isOK());
assertFalse(Assertor.<String, Integer> ofMap().containsValue(0).that(mapTU).isOK());
}
/**
* Check {@link AssertorMap#containsAnyValues}
*/
@Test
public void testContainsAnyValues() {
Map<String, Integer> mapTU = MapUtils2.newMap(LinkedHashMap::new, Pair.of("t", 1), Pair.of("u", 2));
for (EnumAnalysisMode mode : EnumAnalysisMode.values()) {
PredicateAssertorStepMap<String, Integer> predicate = Assertor.<String, Integer> ofMap(mode);
assertTrue(predicate.containsAnyValues(Arrays.asList(1, 2)).that(mapTU).isOK());
assertTrue(predicate.containsAnyValues(Arrays.asList(1, 2, 3)).that(mapTU).isOK());
assertTrue(predicate.containsAnyValues(Arrays.asList(0, 1)).that(mapTU).isOK());
assertFalse(predicate.containsAnyValues(Arrays.asList(0)).that(mapTU).isOK());
assertFalse(predicate.containsAnyValues(Arrays.asList(0)).that((Map<String, Integer>) null).isOK());
assertFalse(predicate.containsAnyValues((List<Integer>) null).that(mapTU).isOK());
}
}
/**
* Check {@link AssertorMap#containsAllValues}
*/
@Test
public void testContainsAllValues() {
Map<String, Integer> mapTU = MapUtils2.newMap(LinkedHashMap::new, Pair.of("t", 1), Pair.of("u", 2));
for (EnumAnalysisMode mode : EnumAnalysisMode.values()) {
PredicateAssertorStepMap<String, Integer> predicate = Assertor.<String, Integer> ofMap(mode);
assertTrue(predicate.containsAllValues(Arrays.asList(1, 2)).that(mapTU).isOK());
assertFalse(predicate.containsAllValues(Arrays.asList(1, 2, 3)).that(mapTU).isOK());
assertFalse(predicate.containsAllValues(Arrays.asList(0, 1)).that(mapTU).isOK());
assertFalse(predicate.containsAllValues(Arrays.asList(0)).that(mapTU).isOK());
assertFalse(predicate.containsAllValues(Arrays.asList(0)).that((Map<String, Integer>) null).isOK());
assertFalse(predicate.containsAllValues((List<Integer>) null).that(mapTU).isOK());
}
}
/**
* Check {@link AssertorMap#anyMatch}
*/
@Test
public void testAnyMatch() {
Map<String, Integer> maptu = MapUtils2.newHashMap(Pair.of("t", 2), Pair.of("u", 3));
Map<String, Integer> mapTu = MapUtils2.newHashMap(Pair.of("T", 2), Pair.of("u", 2));
Map<String, Integer> mapTU = MapUtils2.newHashMap(Pair.of("T", 1), Pair.of("U", 2));
Map<String, Integer> maptNull = MapUtils2.newHashMap(Pair.of("t", 1), Pair.of(null, null));
Map<String, Integer> maptUNull = MapUtils2.newHashMap(Pair.of("t", 1), Pair.of("U", null));
Predicate<Entry<String, Integer>> predicate = e -> Objects.equals(e.getKey(), StringUtils.lowerCase(e.getKey()))
&& e.getValue() > 1;
assertTrue(Assertor.<String, Integer> ofMap().anyMatch(predicate).that(maptu).isOK());
for (EnumAnalysisMode mode : EnumAnalysisMode.values()) {
PredicateAssertorStepMap<String, Integer> predicateAssertor = Assertor.<String, Integer> ofMap(mode);
PredicateStepMap<String, Integer> predicateStep = predicateAssertor.anyMatch(predicate);
assertTrue(predicateStep.that(maptu).isOK());
assertTrue(predicateStep.that(mapTu).isOK());
assertFalse(predicateStep.that(mapTU).isOK());
assertFalse(predicateStep.that(maptNull).isOK());
assertFalse(predicateStep.that(maptUNull).isOK());
assertException(() -> predicateStep.that(Collections.<String, Integer> emptyMap()).orElseThrow(),
IllegalArgumentException.class, "the map cannot be null or empty and predicate cannot be null");
assertException(() -> predicateStep.that((Map<String, Integer>) null).orElseThrow(), IllegalArgumentException.class,
"the map cannot be null or empty and predicate cannot be null");
assertException(() -> predicateAssertor.anyMatch(null).that(mapTu).orElseThrow(), IllegalArgumentException.class,
"the map cannot be null or empty and predicate cannot be null");
assertException(() -> predicateStep.that(mapTU).orElseThrow(), IllegalArgumentException.class,
"any map entry '[T=1, U=2]' should match the predicate");
}
}
/**
* Check {@link AssertorMap#allMatch}
*/
@Test
public void testAllMatch() {
Map<String, Integer> maptu = MapUtils2.newHashMap(Pair.of("t", 2), Pair.of("u", 3));
Map<String, Integer> mapTu = MapUtils2.newHashMap(Pair.of("T", 2), Pair.of("u", 2));
Map<String, Integer> mapTU = MapUtils2.newHashMap(Pair.of("T", 1), Pair.of("U", 2));
Map<String, Integer> maptNull = MapUtils2.newHashMap(Pair.of("t", 1), Pair.of(null, null));
Map<String, Integer> maptUNull = MapUtils2.newHashMap(Pair.of("t", 1), Pair.of("U", null));
Predicate<Entry<String, Integer>> predicate = e -> Objects.equals(e.getKey(), StringUtils.lowerCase(e.getKey()))
&& e.getValue() > 1;
assertTrue(Assertor.<String, Integer> ofMap().allMatch(predicate).that(maptu).isOK());
for (EnumAnalysisMode mode : EnumAnalysisMode.values()) {
PredicateAssertorStepMap<String, Integer> predicateAssertor = Assertor.<String, Integer> ofMap(mode);
PredicateStepMap<String, Integer> predicateStep = predicateAssertor.allMatch(predicate);
assertTrue(predicateStep.that(maptu).isOK());
assertFalse(predicateStep.that(mapTu).isOK());
assertFalse(predicateStep.that(mapTU).isOK());
assertFalse(predicateStep.that(maptNull).isOK());
assertFalse(predicateStep.that(maptUNull).isOK());
assertException(() -> predicateStep.that(Collections.<String, Integer> emptyMap()).orElseThrow(),
IllegalArgumentException.class, "the map cannot be null or empty and predicate cannot be null");
assertException(() -> predicateStep.that((Map<String, Integer>) null).orElseThrow(), IllegalArgumentException.class,
"the map cannot be null or empty and predicate cannot be null");
assertException(() -> predicateAssertor.allMatch(null).that(mapTu).orElseThrow(), IllegalArgumentException.class,
"the map cannot be null or empty and predicate cannot be null");
assertException(() -> predicateStep.that(mapTU).orElseThrow(), IllegalArgumentException.class,
"all the map entries '[T=1, U=2]' should match the predicate");
}
}
}
|
apache-2.0
|
nathanbjenx/cairis
|
cairis/gui/TaskDialogParameters.py
|
1189
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import DialogClassParameters
__author__ = 'Shamal Faily'
class TaskDialogParameters(DialogClassParameters.DialogClassParameters):
def __init__(self,winId,winLabel,dClass,createId,setterFn,creationFlag,dp):
DialogClassParameters.DialogClassParameters.__init__(self,winId,winLabel,dClass,createId,setterFn,creationFlag)
self.theProxy = dp
def proxy(self): return self.theProxy
|
apache-2.0
|
davkean/roslyn
|
src/EditorFeatures/CSharpTest/Completion/CompletionProviders/DeclarationNameCompletionProviderTests.cs
|
54141
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Collections.Immutable;
using System.Linq;
using System.Threading.Tasks;
using Microsoft.CodeAnalysis.Completion;
using Microsoft.CodeAnalysis.CSharp;
using Microsoft.CodeAnalysis.CSharp.Completion.Providers;
using Microsoft.CodeAnalysis.Diagnostics.Analyzers.NamingStyles;
using Microsoft.CodeAnalysis.Editor.CSharp.UnitTests.Completion.CompletionProviders;
using Microsoft.CodeAnalysis.Editor.UnitTests.Workspaces;
using Microsoft.CodeAnalysis.NamingStyles;
using Microsoft.CodeAnalysis.Options;
using Microsoft.CodeAnalysis.Simplification;
using Microsoft.CodeAnalysis.Test.Utilities;
using Roslyn.Test.Utilities;
using Xunit;
using static Microsoft.CodeAnalysis.Diagnostics.Analyzers.NamingStyles.SymbolSpecification;
namespace Microsoft.CodeAnalysis.Editor.CSharp.UnitTests.Completion.CompletionSetSources
{
public class DeclarationNameCompletionProviderTests : AbstractCSharpCompletionProviderTests
{
public DeclarationNameCompletionProviderTests(CSharpTestWorkspaceFixture workspaceFixture) : base(workspaceFixture)
{
}
internal override Type GetCompletionProviderType()
=> typeof(DeclarationNameCompletionProvider);
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task NameWithOnlyType1()
{
var markup = @"
public class MyClass
{
MyClass $$
}
";
await VerifyItemExistsAsync(markup, "myClass", glyph: (int)Glyph.FieldPublic);
await VerifyItemExistsAsync(markup, "MyClass", glyph: (int)Glyph.PropertyPublic);
await VerifyItemExistsAsync(markup, "GetMyClass", glyph: (int)Glyph.MethodPublic);
}
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task AsyncTaskOfT()
{
var markup = @"
using System.Threading.Tasks;
public class C
{
async Task<C> $$
}
";
await VerifyItemExistsAsync(markup, "GetCAsync");
}
[Fact(Skip = "not yet implemented"), Trait(Traits.Feature, Traits.Features.Completion)]
public async Task NonAsyncTaskOfT()
{
var markup = @"
public class C
{
Task<C> $$
}
";
await VerifyItemExistsAsync(markup, "GetCAsync");
}
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task MethodDeclaration1()
{
var markup = @"
public class C
{
virtual C $$
}
";
await VerifyItemExistsAsync(markup, "GetC");
await VerifyItemIsAbsentAsync(markup, "C");
await VerifyItemIsAbsentAsync(markup, "c");
}
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task WordBreaking1()
{
var markup = @"
using System.Threading;
public class C
{
CancellationToken $$
}
";
await VerifyItemExistsAsync(markup, "cancellationToken");
await VerifyItemExistsAsync(markup, "cancellation");
await VerifyItemExistsAsync(markup, "token");
}
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task WordBreaking2()
{
var markup = @"
interface I {}
public class C
{
I $$
}
";
await VerifyItemExistsAsync(markup, "GetI");
}
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task WordBreaking3()
{
var markup = @"
interface II {}
public class C
{
II $$
}
";
await VerifyItemExistsAsync(markup, "GetI");
}
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task WordBreaking4()
{
var markup = @"
interface IGoo {}
public class C
{
IGoo $$
}
";
await VerifyItemExistsAsync(markup, "Goo");
}
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task WordBreaking5()
{
var markup = @"
class SomeWonderfullyLongClassName {}
public class C
{
SomeWonderfullyLongClassName $$
}
";
await VerifyItemExistsAsync(markup, "Some");
await VerifyItemExistsAsync(markup, "SomeWonderfully");
await VerifyItemExistsAsync(markup, "SomeWonderfullyLong");
await VerifyItemExistsAsync(markup, "SomeWonderfullyLongClass");
await VerifyItemExistsAsync(markup, "Name");
await VerifyItemExistsAsync(markup, "ClassName");
await VerifyItemExistsAsync(markup, "LongClassName");
await VerifyItemExistsAsync(markup, "WonderfullyLongClassName");
await VerifyItemExistsAsync(markup, "SomeWonderfullyLongClassName");
}
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task Parameter1()
{
var markup = @"
using System.Threading;
public class C
{
void Goo(CancellationToken $$
}
";
await VerifyItemExistsAsync(markup, "cancellationToken", glyph: (int)Glyph.Parameter);
}
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task Parameter2()
{
var markup = @"
using System.Threading;
public class C
{
void Goo(int x, CancellationToken c$$
}
";
await VerifyItemExistsAsync(markup, "cancellationToken", glyph: (int)Glyph.Parameter);
}
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task Parameter3()
{
var markup = @"
using System.Threading;
public class C
{
void Goo(CancellationToken c$$) {}
}
";
await VerifyItemExistsAsync(markup, "cancellationToken", glyph: (int)Glyph.Parameter);
}
[WorkItem(19260, "https://github.com/dotnet/roslyn/issues/19260")]
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task EscapeKeywords1()
{
var markup = @"
using System.Text;
public class C
{
void Goo(StringBuilder $$) {}
}
";
await VerifyItemExistsAsync(markup, "stringBuilder", glyph: (int)Glyph.Parameter);
await VerifyItemExistsAsync(markup, "@string", glyph: (int)Glyph.Parameter);
await VerifyItemExistsAsync(markup, "builder", glyph: (int)Glyph.Parameter);
}
[WorkItem(19260, "https://github.com/dotnet/roslyn/issues/19260")]
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task EscapeKeywords2()
{
var markup = @"
class For { }
public class C
{
void Goo(For $$) {}
}
";
await VerifyItemExistsAsync(markup, "@for", glyph: (int)Glyph.Parameter);
}
[WorkItem(19260, "https://github.com/dotnet/roslyn/issues/19260")]
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task EscapeKeywords3()
{
var markup = @"
class For { }
public class C
{
void goo()
{
For $$
}
}
";
await VerifyItemExistsAsync(markup, "@for");
}
[WorkItem(19260, "https://github.com/dotnet/roslyn/issues/19260")]
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task EscapeKeywords4()
{
var markup = @"
using System.Text;
public class C
{
void goo()
{
StringBuilder $$
}
}
";
await VerifyItemExistsAsync(markup, "stringBuilder");
await VerifyItemExistsAsync(markup, "@string");
await VerifyItemExistsAsync(markup, "builder");
}
[WorkItem(25214, "https://github.com/dotnet/roslyn/issues/25214")]
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task TypeImplementsLazyOfType1()
{
var markup = @"
using System;
using System.Collections.Generic;
internal class Example
{
public Lazy<Item> $$
}
public class Item { }
";
await VerifyItemExistsAsync(markup, "item");
await VerifyItemExistsAsync(markup, "Item");
await VerifyItemExistsAsync(markup, "GetItem");
}
[WorkItem(25214, "https://github.com/dotnet/roslyn/issues/25214")]
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task TypeImplementsLazyOfType2()
{
var markup = @"
using System;
using System.Collections.Generic;
internal class Example
{
public List<Lazy<Item>> $$
}
public class Item { }
";
await VerifyItemExistsAsync(markup, "items");
await VerifyItemExistsAsync(markup, "Items");
await VerifyItemExistsAsync(markup, "GetItems");
}
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task NoSuggestionsForInt()
{
var markup = @"
using System.Threading;
public class C
{
int $$
}
";
await VerifyNoItemsExistAsync(markup);
}
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task NoSuggestionsForLong()
{
var markup = @"
using System.Threading;
public class C
{
long $$
}
";
await VerifyNoItemsExistAsync(markup);
}
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task NoSuggestionsForDouble()
{
var markup = @"
using System.Threading;
public class C
{
double $$
}
";
await VerifyNoItemsExistAsync(markup);
}
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task NoSuggestionsForFloat()
{
var markup = @"
using System.Threading;
public class C
{
float $$
}
";
await VerifyNoItemsExistAsync(markup);
}
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task NoSuggestionsForSbyte()
{
var markup = @"
using System.Threading;
public class C
{
sbyte $$
}
";
await VerifyNoItemsExistAsync(markup);
}
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task NoSuggestionsForShort()
{
var markup = @"
using System.Threading;
public class C
{
short $$
}
";
await VerifyNoItemsExistAsync(markup);
}
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task NoSuggestionsForUint()
{
var markup = @"
using System.Threading;
public class C
{
uint $$
}
";
await VerifyNoItemsExistAsync(markup);
}
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task NoSuggestionsForUlong()
{
var markup = @"
using System.Threading;
public class C
{
ulong $$
}
";
await VerifyNoItemsExistAsync(markup);
}
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task SuggestionsForUShort()
{
var markup = @"
using System.Threading;
public class C
{
ushort $$
}
";
await VerifyNoItemsExistAsync(markup);
}
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task NoSuggestionsForBool()
{
var markup = @"
using System.Threading;
public class C
{
bool $$
}
";
await VerifyNoItemsExistAsync(markup);
}
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task NoSuggestionsForByte()
{
var markup = @"
using System.Threading;
public class C
{
byte $$
}
";
await VerifyNoItemsExistAsync(markup);
}
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task NoSuggestionsForChar()
{
var markup = @"
using System.Threading;
public class C
{
char $$
}
";
await VerifyNoItemsExistAsync(markup);
}
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task NoSuggestionsForString()
{
var markup = @"
public class C
{
string $$
}
";
await VerifyNoItemsExistAsync(markup);
}
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task NoSingleLetterClassNameSuggested()
{
var markup = @"
public class C
{
C $$
}
";
await VerifyItemIsAbsentAsync(markup, "C");
await VerifyItemIsAbsentAsync(markup, "c");
}
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task ArrayElementTypeSuggested()
{
var markup = @"
using System.Threading;
public class MyClass
{
MyClass[] $$
}
";
await VerifyItemExistsAsync(markup, "MyClasses");
await VerifyItemIsAbsentAsync(markup, "Array");
}
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task NotTriggeredByVar()
{
var markup = @"
public class C
{
var $$
}
";
await VerifyNoItemsExistAsync(markup);
}
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task NotAfterVoid()
{
var markup = @"
public class C
{
void $$
}
";
await VerifyNoItemsExistAsync(markup);
}
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task AfterGeneric()
{
var markup = @"
public class C
{
System.Collections.Generic.IEnumerable<C> $$
}
";
await VerifyItemExistsAsync(markup, "GetCs");
}
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task NothingAfterVar()
{
var markup = @"
public class C
{
void goo()
{
var $$
}
}
";
await VerifyNoItemsExistAsync(markup);
}
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task TestCorrectOrder()
{
var markup = @"
public class MyClass
{
MyClass $$
}
";
var items = await GetCompletionItemsAsync(markup, SourceCodeKind.Regular);
Assert.Equal(
new[] { "myClass", "my", "@class", "MyClass", "My", "Class", "GetMyClass", "GetMy", "GetClass" },
items.Select(item => item.DisplayText));
}
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task TestDescriptionInsideClass()
{
var markup = @"
public class MyClass
{
MyClass $$
}
";
await VerifyItemExistsAsync(markup, "myClass", glyph: (int)Glyph.FieldPublic, expectedDescriptionOrNull: CSharpFeaturesResources.Suggested_name);
await VerifyItemExistsAsync(markup, "MyClass", glyph: (int)Glyph.PropertyPublic, expectedDescriptionOrNull: CSharpFeaturesResources.Suggested_name);
await VerifyItemExistsAsync(markup, "GetMyClass", glyph: (int)Glyph.MethodPublic, expectedDescriptionOrNull: CSharpFeaturesResources.Suggested_name);
}
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task TestDescriptionInsideMethod()
{
var markup = @"
public class MyClass
{
void M()
{
MyClass $$
}
}
";
await VerifyItemExistsAsync(markup, "myClass", glyph: (int)Glyph.Local, expectedDescriptionOrNull: CSharpFeaturesResources.Suggested_name);
await VerifyItemIsAbsentAsync(markup, "MyClass");
await VerifyItemIsAbsentAsync(markup, "GetMyClass");
}
[WorkItem(20273, "https://github.com/dotnet/roslyn/issues/20273")]
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task Alias1()
{
var markup = @"
using MyType = System.String;
public class C
{
MyType $$
}
";
await VerifyItemExistsAsync(markup, "my");
await VerifyItemExistsAsync(markup, "type");
await VerifyItemExistsAsync(markup, "myType");
}
[WorkItem(20273, "https://github.com/dotnet/roslyn/issues/20273")]
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task AliasWithInterfacePattern()
{
var markup = @"
using IMyType = System.String;
public class C
{
MyType $$
}
";
await VerifyItemExistsAsync(markup, "my");
await VerifyItemExistsAsync(markup, "type");
await VerifyItemExistsAsync(markup, "myType");
}
[WorkItem(20016, "https://github.com/dotnet/roslyn/issues/20016")]
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task NotAfterExistingName1()
{
var markup = @"
using IMyType = System.String;
public class C
{
MyType myType $$
}
";
await VerifyNoItemsExistAsync(markup);
}
[WorkItem(20016, "https://github.com/dotnet/roslyn/issues/20016")]
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task NotAfterExistingName2()
{
var markup = @"
using IMyType = System.String;
public class C
{
MyType myType, MyType $$
}
";
await VerifyNoItemsExistAsync(markup);
}
[WorkItem(19409, "https://github.com/dotnet/roslyn/issues/19409")]
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task OutVarArgument()
{
var markup = @"
class Test
{
void Do(out Test goo)
{
Do(out var $$
}
}
";
await VerifyItemExistsAsync(markup, "test");
}
[WorkItem(19409, "https://github.com/dotnet/roslyn/issues/19409")]
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task OutArgument()
{
var markup = @"
class Test
{
void Do(out Test goo)
{
Do(out Test $$
}
}
";
await VerifyItemExistsAsync(markup, "test");
}
[WorkItem(19409, "https://github.com/dotnet/roslyn/issues/19409")]
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task OutGenericArgument()
{
var markup = @"
class Test
{
void Do<T>(out T goo)
{
Do(out Test $$
}
}
";
await VerifyItemExistsAsync(markup, "test");
}
[WorkItem(22342, "https://github.com/dotnet/roslyn/issues/22342")]
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task TupleExpressionDeclaration1()
{
var markup = @"
class Test
{
void Do()
{
(System.Array array, System.Action $$
}
}
";
await VerifyItemExistsAsync(markup, "action");
}
[WorkItem(22342, "https://github.com/dotnet/roslyn/issues/22342")]
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task TupleExpressionDeclaration2()
{
var markup = @"
class Test
{
void Do()
{
(array, action $$
}
}
";
await VerifyItemIsAbsentAsync(markup, "action");
}
[WorkItem(22342, "https://github.com/dotnet/roslyn/issues/22342")]
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task TupleExpressionDeclaration_NestedTuples()
{
var markup = @"
class Test
{
void Do()
{
((int i1, int i2), (System.Array array, System.Action $$
}
}
";
await VerifyItemExistsAsync(markup, "action");
}
[WorkItem(22342, "https://github.com/dotnet/roslyn/issues/22342")]
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task TupleExpressionDeclaration_NestedTuples_CompletionInTheMiddle()
{
var markup = @"
class Test
{
void Do()
{
((System.Array array, System.Action $$), (int i1, int i2))
}
}
";
await VerifyItemExistsAsync(markup, "action");
}
[WorkItem(22342, "https://github.com/dotnet/roslyn/issues/22342")]
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task TupleElementDefinition1()
{
var markup = @"
class Test
{
void Do()
{
(System.Array $$
}
}
";
await VerifyItemExistsAsync(markup, "array");
}
[WorkItem(22342, "https://github.com/dotnet/roslyn/issues/22342")]
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task TupleElementDefinition2()
{
var markup = @"
class Test
{
(System.Array $$) Test() => default;
}
";
await VerifyItemExistsAsync(markup, "array");
}
[WorkItem(22342, "https://github.com/dotnet/roslyn/issues/22342")]
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task TupleElementDefinition3()
{
var markup = @"
class Test
{
(System.Array array, System.Action $$) Test() => default;
}
";
await VerifyItemExistsAsync(markup, "action");
}
[WorkItem(22342, "https://github.com/dotnet/roslyn/issues/22342")]
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task TupleElementDefinition4()
{
var markup = @"
class Test
{
(System.Array $$
}
";
await VerifyItemExistsAsync(markup, "array");
}
[WorkItem(22342, "https://github.com/dotnet/roslyn/issues/22342")]
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task TupleElementDefinition5()
{
var markup = @"
class Test
{
void M((System.Array $$
}
";
await VerifyItemExistsAsync(markup, "array");
}
[WorkItem(22342, "https://github.com/dotnet/roslyn/issues/22342")]
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task TupleElementDefinition_NestedTuples()
{
var markup = @"
class Test
{
void M(((int, int), (int, System.Array $$
}
";
await VerifyItemExistsAsync(markup, "array");
}
[WorkItem(22342, "https://github.com/dotnet/roslyn/issues/22342")]
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task TupleElementDefinition_InMiddleOfTuple()
{
var markup = @"
class Test
{
void M((int, System.Array $$),int)
}
";
await VerifyItemExistsAsync(markup, "array");
}
[WorkItem(22342, "https://github.com/dotnet/roslyn/issues/22342")]
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task TupleElementTypeInference()
{
var markup = @"
class Test
{
void Do()
{
(var accessViolationException, var $$) = (new AccessViolationException(), new Action(() => { }));
}
}
";
// Currently not supported:
await VerifyItemIsAbsentAsync(markup, "action");
// see https://github.com/dotnet/roslyn/issues/27138
// after the issue ist fixed we expect this to work:
// await VerifyItemExistsAsync(markup, "action");
}
[WorkItem(22342, "https://github.com/dotnet/roslyn/issues/22342")]
[Fact(Skip = "Not yet supported"), Trait(Traits.Feature, Traits.Features.Completion)]
public async Task TupleElementInGenericTypeArgument()
{
var markup = @"
class Test
{
void Do()
{
System.Func<(System.Action $$
}
}
";
await VerifyItemExistsAsync(markup, "action");
}
[WorkItem(22342, "https://github.com/dotnet/roslyn/issues/22342")]
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task TupleElementInvocationInsideTuple()
{
var markup = @"
class Test
{
void Do()
{
int M(int i1, int i2) => i1;
var t=(e1: 1, e2: M(1, $$));
}
}
";
await VerifyNoItemsExistAsync(markup);
}
[WorkItem(17987, "https://github.com/dotnet/roslyn/issues/17987")]
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task Pluralize1()
{
var markup = @"
using System.Collections.Generic;
class Index
{
IEnumerable<Index> $$
}
";
await VerifyItemExistsAsync(markup, "Indices");
}
[WorkItem(17987, "https://github.com/dotnet/roslyn/issues/17987")]
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task Pluralize2()
{
var markup = @"
using System.Collections.Generic;
class Test
{
IEnumerable<IEnumerable<Test>> $$
}
";
await VerifyItemExistsAsync(markup, "tests");
}
[WorkItem(17987, "https://github.com/dotnet/roslyn/issues/17987")]
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task Pluralize3()
{
var markup = @"
using System.Collections.Generic;
using System.Threading;
class Test
{
IEnumerable<CancellationToken> $$
}
";
await VerifyItemExistsAsync(markup, "cancellationTokens");
await VerifyItemExistsAsync(markup, "cancellations");
await VerifyItemExistsAsync(markup, "tokens");
}
[WorkItem(17987, "https://github.com/dotnet/roslyn/issues/17987")]
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task PluralizeList()
{
var markup = @"
using System.Collections.Generic;
using System.Threading;
class Test
{
List<CancellationToken> $$
}
";
await VerifyItemExistsAsync(markup, "cancellationTokens");
await VerifyItemExistsAsync(markup, "cancellations");
await VerifyItemExistsAsync(markup, "tokens");
}
[WorkItem(17987, "https://github.com/dotnet/roslyn/issues/17987")]
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task PluralizeArray()
{
var markup = @"
using System.Collections.Generic;
using System.Threading;
class Test
{
CancellationToken[] $$
}
";
await VerifyItemExistsAsync(markup, "cancellationTokens");
await VerifyItemExistsAsync(markup, "cancellations");
await VerifyItemExistsAsync(markup, "tokens");
}
[WorkItem(23497, "https://github.com/dotnet/roslyn/issues/23497")]
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task InPatternMatching1()
{
var markup = @"
using System.Threading;
public class C
{
public static void Main()
{
object obj = null;
if (obj is CancellationToken $$) { }
}
}
";
await VerifyItemExistsAsync(markup, "cancellationToken");
await VerifyItemExistsAsync(markup, "cancellation");
await VerifyItemExistsAsync(markup, "token");
}
[WorkItem(23497, "https://github.com/dotnet/roslyn/issues/23497")]
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task InPatternMatching2()
{
var markup = @"
using System.Threading;
public class C
{
public static bool Foo()
{
object obj = null;
return obj is CancellationToken $$
}
}
";
await VerifyItemExistsAsync(markup, "cancellationToken");
await VerifyItemExistsAsync(markup, "cancellation");
await VerifyItemExistsAsync(markup, "token");
}
[WorkItem(23497, "https://github.com/dotnet/roslyn/issues/23497")]
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task InPatternMatching3()
{
var markup = @"
using System.Threading;
public class C
{
public static void Main()
{
object obj = null;
switch(obj)
{
case CancellationToken $$
}
}
}
";
await VerifyItemExistsAsync(markup, "cancellationToken");
await VerifyItemExistsAsync(markup, "cancellation");
await VerifyItemExistsAsync(markup, "token");
}
[WorkItem(23497, "https://github.com/dotnet/roslyn/issues/23497")]
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task InPatternMatching4()
{
var markup = @"
using System.Threading;
public class C
{
public static void Main()
{
object obj = null;
if (obj is CancellationToken ca$$) { }
}
}
";
await VerifyItemExistsAsync(markup, "cancellationToken");
await VerifyItemExistsAsync(markup, "cancellation");
}
[WorkItem(23497, "https://github.com/dotnet/roslyn/issues/23497")]
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task InPatternMatching5()
{
var markup = @"
using System.Threading;
public class C
{
public static bool Foo()
{
object obj = null;
return obj is CancellationToken to$$
}
}
";
await VerifyItemExistsAsync(markup, "cancellationToken");
await VerifyItemExistsAsync(markup, "token");
}
[WorkItem(23497, "https://github.com/dotnet/roslyn/issues/23497")]
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task InPatternMatching6()
{
var markup = @"
using System.Threading;
public class C
{
public static void Main()
{
object obj = null;
switch(obj)
{
case CancellationToken to$$
}
}
}
";
await VerifyItemExistsAsync(markup, "cancellationToken");
await VerifyItemExistsAsync(markup, "token");
}
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task InUsingStatement1()
{
var markup = @"
using System.IO;
class C
{
void M()
{
using (StreamReader s$$
}
}
";
await VerifyItemExistsAsync(markup, "streamReader");
}
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task InUsingStatement2()
{
var markup = @"
using System.IO;
class C
{
void M()
{
using (StreamReader s1, $$
}
}
";
await VerifyItemExistsAsync(markup, "streamReader");
}
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task InUsingStatement_Var()
{
var markup = @"
using System.IO;
class C
{
void M()
{
using (var m$$ = new MemoryStream())
}
}
";
await VerifyItemExistsAsync(markup, "memoryStream");
}
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task InForStatement1()
{
var markup = @"
using System.IO;
class C
{
void M()
{
for (StreamReader s$$
}
}
";
await VerifyItemExistsAsync(markup, "streamReader");
}
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task InForStatement2()
{
var markup = @"
using System.IO;
class C
{
void M()
{
for (StreamReader s1, $$
}
}
";
await VerifyItemExistsAsync(markup, "streamReader");
}
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task InForStatement_Var()
{
var markup = @"
using System.IO;
class C
{
void M()
{
for (var m$$ = new MemoryStream();
}
}
";
await VerifyItemExistsAsync(markup, "memoryStream");
}
[WorkItem(26021, "https://github.com/dotnet/roslyn/issues/26021")]
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task InForEachStatement()
{
var markup = @"
using System.IO;
class C
{
void M()
{
foreach (StreamReader $$
}
}
";
await VerifyItemExistsAsync(markup, "streamReader");
}
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task InForEachStatement_Var()
{
var markup = @"
using System.IO;
class C
{
void M()
{
foreach (var m$$ in new[] { new MemoryStream() })
}
}
";
await VerifyItemExistsAsync(markup, "memoryStream");
}
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task DisabledByOption()
{
var workspace = WorkspaceFixture.GetWorkspace(ExportProvider);
workspace.TryApplyChanges(workspace.CurrentSolution.WithOptions(workspace.Options.
WithChangedOption(CompletionOptions.ShowNameSuggestions, LanguageNames.CSharp, false)));
var markup = @"
class Test
{
Test $$
}
";
await VerifyNoItemsExistAsync(markup);
}
[WorkItem(23590, "https://github.com/dotnet/roslyn/issues/23590")]
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task TypeImplementsIEnumerableOfType()
{
var markup = @"
using System.Collections.Generic;
public class Class1
{
public void Method()
{
Container $$
}
}
public class Container : ContainerBase { }
public class ContainerBase : IEnumerable<ContainerBase> { }
";
await VerifyItemExistsAsync(markup, "container");
}
[WorkItem(23590, "https://github.com/dotnet/roslyn/issues/23590")]
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task TypeImplementsIEnumerableOfType2()
{
var markup = @"
using System.Collections.Generic;
public class Class1
{
public void Method()
{
Container $$
}
}
public class ContainerBase : IEnumerable<Container> { }
public class Container : ContainerBase { }
";
await VerifyItemExistsAsync(markup, "container");
}
[WorkItem(23590, "https://github.com/dotnet/roslyn/issues/23590")]
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task TypeImplementsIEnumerableOfType3()
{
var markup = @"
using System.Collections.Generic;
public class Class1
{
public void Method()
{
Container $$
}
}
public class Container : IEnumerable<Container> { }
";
await VerifyItemExistsAsync(markup, "container");
}
[WorkItem(23590, "https://github.com/dotnet/roslyn/issues/23590")]
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task TypeImplementsIEnumerableOfType4()
{
var markup = @"
using System.Collections.Generic;
using System.Threading.Tasks;
public class Class1
{
public void Method()
{
TaskType $$
}
}
public class ContainerBase : IEnumerable<Container> { }
public class Container : ContainerBase { }
public class TaskType : Task<Container> { }
";
await VerifyItemExistsAsync(markup, "taskType");
}
[WorkItem(23590, "https://github.com/dotnet/roslyn/issues/23590")]
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task TypeImplementsTaskOfType()
{
var markup = @"
using System.Threading.Tasks;
public class Class1
{
public void Method()
{
Container $$
}
}
public class Container : ContainerBase { }
public class ContainerBase : Task<ContainerBase> { }
";
await VerifyItemExistsAsync(markup, "container");
}
[WorkItem(23590, "https://github.com/dotnet/roslyn/issues/23590")]
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task TypeImplementsTaskOfType2()
{
var markup = @"
using System.Threading.Tasks;
public class Class1
{
public void Method()
{
Container $$
}
}
public class Container : Task<ContainerBase> { }
public class ContainerBase : Container { }
";
await VerifyItemExistsAsync(markup, "container");
}
[WorkItem(23590, "https://github.com/dotnet/roslyn/issues/23590")]
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task TypeImplementsTaskOfType3()
{
var markup = @"
using System.Collections.Generic;
using System.Threading.Tasks;
public class Class1
{
public void Method()
{
EnumerableType $$
}
}
public class TaskType : TaskTypeBase { }
public class TaskTypeBase : Task<TaskTypeBase> { }
public class EnumerableType : IEnumerable<TaskType> { }
";
await VerifyItemExistsAsync(markup, "taskTypes");
}
[WorkItem(23590, "https://github.com/dotnet/roslyn/issues/23590")]
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task TypeIsNullableOfNullable()
{
var markup = @"
using System.Collections.Generic;
public class Class1
{
public void Method()
{
// This code isn't legal, but we want to ensure we don't crash in this broken code scenario
IEnumerable<Nullable<int?>> $$
}
}
";
await VerifyItemExistsAsync(markup, "nullables");
}
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task CustomNamingStyleInsideClass()
{
var workspace = WorkspaceFixture.GetWorkspace(ExportProvider);
workspace.TryApplyChanges(workspace.CurrentSolution.WithOptions(workspace.Options.WithChangedOption(
new OptionKey2(NamingStyleOptions.NamingPreferences, LanguageNames.CSharp),
NamesEndWithSuffixPreferences())));
var markup = @"
class Configuration
{
Configuration $$
}
";
await VerifyItemExistsAsync(markup, "ConfigurationField", glyph: (int)Glyph.FieldPublic,
expectedDescriptionOrNull: CSharpFeaturesResources.Suggested_name);
await VerifyItemExistsAsync(markup, "ConfigurationProperty", glyph: (int)Glyph.PropertyPublic,
expectedDescriptionOrNull: CSharpFeaturesResources.Suggested_name);
await VerifyItemExistsAsync(markup, "ConfigurationMethod", glyph: (int)Glyph.MethodPublic,
expectedDescriptionOrNull: CSharpFeaturesResources.Suggested_name);
await VerifyItemIsAbsentAsync(markup, "ConfigurationLocal");
await VerifyItemIsAbsentAsync(markup, "ConfigurationLocalFunction");
}
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task CustomNamingStyleInsideMethod()
{
var workspace = WorkspaceFixture.GetWorkspace(ExportProvider);
workspace.TryApplyChanges(workspace.CurrentSolution.WithOptions(workspace.Options.WithChangedOption(
new OptionKey2(NamingStyleOptions.NamingPreferences, LanguageNames.CSharp),
NamesEndWithSuffixPreferences())));
var markup = @"
class Configuration
{
void M()
{
Configuration $$
}
}
";
await VerifyItemExistsAsync(markup, "ConfigurationLocal", glyph: (int)Glyph.Local,
expectedDescriptionOrNull: CSharpFeaturesResources.Suggested_name);
await VerifyItemExistsAsync(markup, "ConfigurationLocalFunction", glyph: (int)Glyph.MethodPublic,
expectedDescriptionOrNull: CSharpFeaturesResources.Suggested_name);
await VerifyItemIsAbsentAsync(markup, "ConfigurationField");
await VerifyItemIsAbsentAsync(markup, "ConfigurationMethod");
await VerifyItemIsAbsentAsync(markup, "ConfigurationProperty");
}
[WorkItem(31304, "https://github.com/dotnet/roslyn/issues/31304")]
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task TestCompletionDoesNotUseForeachVariableName()
{
var markup = @"
class ClassA
{
class ClassB {}
readonly List<ClassB> classBList;
void M()
{
foreach (var classB in classBList)
{
ClassB $$
}
}
}
";
await VerifyItemIsAbsentAsync(markup, "classB");
await VerifyItemExistsAsync(markup, "classB1", glyph: (int)Glyph.Local,
expectedDescriptionOrNull: CSharpFeaturesResources.Suggested_name);
}
[WorkItem(31304, "https://github.com/dotnet/roslyn/issues/31304")]
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task TestCompletionDoesNotUseParameterName()
{
var markup = @"
class ClassA
{
class ClassB { }
void M(ClassB classB)
{
ClassB $$
}
}
";
await VerifyItemIsAbsentAsync(markup, "classB");
await VerifyItemExistsAsync(markup, "classB1", glyph: (int)Glyph.Local,
expectedDescriptionOrNull: CSharpFeaturesResources.Suggested_name);
}
[WorkItem(31304, "https://github.com/dotnet/roslyn/issues/31304")]
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task TestCompletionCanUsePropertyName()
{
var markup = @"
class ClassA
{
class ClassB { }
ClassB classB { get; set; }
void M()
{
ClassB $$
}
}
";
await VerifyItemExistsAsync(markup, "classB", glyph: (int)Glyph.Local,
expectedDescriptionOrNull: CSharpFeaturesResources.Suggested_name);
}
[WorkItem(31304, "https://github.com/dotnet/roslyn/issues/31304")]
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task TestCompletionCanUseFieldName()
{
var markup = @"
class ClassA
{
class ClassB { }
ClassB classB;
void M()
{
ClassB $$
}
}
";
await VerifyItemExistsAsync(markup, "classB", glyph: (int)Glyph.Local,
expectedDescriptionOrNull: CSharpFeaturesResources.Suggested_name);
}
[WorkItem(31304, "https://github.com/dotnet/roslyn/issues/31304")]
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task TestCompletionDoesNotUseLocalName()
{
var markup = @"
class ClassA
{
class ClassB { }
void M()
{
ClassB classB = new ClassB();
ClassB $$
}
}
";
await VerifyItemIsAbsentAsync(markup, "classB");
await VerifyItemExistsAsync(markup, "classB1", glyph: (int)Glyph.Local,
expectedDescriptionOrNull: CSharpFeaturesResources.Suggested_name);
}
[WorkItem(31304, "https://github.com/dotnet/roslyn/issues/31304")]
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task TestCompletionDoesNotUseLocalNameMultiple()
{
var markup = @"
class ClassA
{
class ClassB { }
void M()
{
ClassB classB = new ClassB();
ClassB classB1 = new ClassB();
ClassB $$
}
}
";
await VerifyItemIsAbsentAsync(markup, "classB");
await VerifyItemIsAbsentAsync(markup, "classB1");
await VerifyItemExistsAsync(markup, "classB2", glyph: (int)Glyph.Local,
expectedDescriptionOrNull: CSharpFeaturesResources.Suggested_name);
}
[WorkItem(31304, "https://github.com/dotnet/roslyn/issues/31304")]
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task TestCompletionDoesNotUseLocalInsideIf()
{
var markup = @"
class ClassA
{
class ClassB { }
void M(bool flag)
{
ClassB $$
if (flag)
{
ClassB classB = new ClassB();
}
}
}
";
await VerifyItemIsAbsentAsync(markup, "classB");
await VerifyItemExistsAsync(markup, "classB1", glyph: (int)Glyph.Local,
expectedDescriptionOrNull: CSharpFeaturesResources.Suggested_name);
}
[WorkItem(31304, "https://github.com/dotnet/roslyn/issues/31304")]
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task TestCompletionCanUseClassName()
{
var markup = @"
class classA
{
void M()
{
classA $$
}
}
";
await VerifyItemExistsAsync(markup, "classA", glyph: (int)Glyph.Local,
expectedDescriptionOrNull: CSharpFeaturesResources.Suggested_name);
}
[WorkItem(31304, "https://github.com/dotnet/roslyn/issues/31304")]
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task TestCompletionCanUseLocalInDifferentScope()
{
var markup = @"
class ClassA
{
class ClassB { }
void M()
{
ClassB classB = new ClassB();
}
void M2()
{
ClassB $$
}
}
";
await VerifyItemExistsAsync(markup, "classB", glyph: (int)Glyph.Local,
expectedDescriptionOrNull: CSharpFeaturesResources.Suggested_name);
}
[WorkItem(35891, "https://github.com/dotnet/roslyn/issues/35891")]
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task TestCompletionDoesNotUseLocalAsLocalFunctionParameter()
{
var markup = @"
class ClassA
{
class ClassB { }
void M()
{
ClassB classB = new ClassB();
void LocalM1(ClassB $$) { }
}
}
";
await VerifyItemIsAbsentAsync(markup, "classB");
}
[WorkItem(35891, "https://github.com/dotnet/roslyn/issues/35891")]
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task TestCompletionDoesNotUseLocalAsLocalFunctionVariable()
{
var markup = @"
class ClassA
{
class ClassB { }
void M()
{
ClassB classB = new ClassB();
void LocalM1()
{
ClassB $$
}
}
}
";
await VerifyItemIsAbsentAsync(markup, "classB");
}
[WorkItem(35891, "https://github.com/dotnet/roslyn/issues/35891")]
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task TestCompletionDoesNotUseLocalInNestedLocalFunction()
{
var markup = @"
class ClassA
{
class ClassB { }
void M()
{
ClassB classB = new ClassB();
void LocalM1()
{
void LocalM2()
{
ClassB $$
}
}
}
}
";
await VerifyItemIsAbsentAsync(markup, "classB");
}
[WorkItem(35891, "https://github.com/dotnet/roslyn/issues/35891")]
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task TestCompletionDoesNotUseLocalFunctionParameterInNestedLocalFunction()
{
var markup = @"
class ClassA
{
class ClassB { }
void M()
{
void LocalM1(ClassB classB)
{
void LocalM2()
{
ClassB $$
}
}
}
}
";
await VerifyItemIsAbsentAsync(markup, "classB");
}
[WorkItem(35891, "https://github.com/dotnet/roslyn/issues/35891")]
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task TestCompletionCanUseLocalFunctionParameterAsParameter()
{
var markup = @"
class ClassA
{
class ClassB { }
void M()
{
void LocalM1(ClassB classB) { }
void LocalM2(ClassB $$) { }
}
}
";
await VerifyItemExistsAsync(markup, "classB", glyph: (int)Glyph.Parameter,
expectedDescriptionOrNull: CSharpFeaturesResources.Suggested_name);
}
[WorkItem(35891, "https://github.com/dotnet/roslyn/issues/35891")]
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task TestCompletionCanUseLocalFunctionVariableAsParameter()
{
var markup = @"
class ClassA
{
class ClassB { }
void M()
{
void LocalM1()
{
ClassB classB
}
void LocalM2(ClassB $$) { }
}
}
";
await VerifyItemExistsAsync(markup, "classB", glyph: (int)Glyph.Parameter,
expectedDescriptionOrNull: CSharpFeaturesResources.Suggested_name);
}
[WorkItem(35891, "https://github.com/dotnet/roslyn/issues/35891")]
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task TestCompletionCanUseLocalFunctionParameterAsVariable()
{
var markup = @"
class ClassA
{
class ClassB { }
void M()
{
void LocalM1(ClassB classB) { }
void LocalM2()
{
ClassB $$
}
}
}
";
await VerifyItemExistsAsync(markup, "classB", glyph: (int)Glyph.Local,
expectedDescriptionOrNull: CSharpFeaturesResources.Suggested_name);
}
[WorkItem(35891, "https://github.com/dotnet/roslyn/issues/35891")]
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
public async Task TestCompletionCanUseLocalFunctionVariableAsVariable()
{
var markup = @"
class ClassA
{
class ClassB { }
void M()
{
void LocalM1()
{
ClassB classB
}
void LocalM2()
{
ClassB $$
}
}
}
";
await VerifyItemExistsAsync(markup, "classB", glyph: (int)Glyph.Local,
expectedDescriptionOrNull: CSharpFeaturesResources.Suggested_name);
}
[Fact, Trait(Traits.Feature, Traits.Features.Completion)]
[WorkItem(43816, "https://github.com/dotnet/roslyn/pull/43816")]
public async Task ConflictingLocalVariable()
{
var workspace = WorkspaceFixture.GetWorkspace(ExportProvider);
workspace.TryApplyChanges(workspace.CurrentSolution.WithOptions(workspace.Options.WithChangedOption(
new OptionKey2(NamingStyleOptions.NamingPreferences, LanguageNames.CSharp),
MultipleCamelCaseLocalRules())));
var markup = @"
public class MyClass
{
void M()
{
MyClass myClass;
MyClass $$
}
}
";
await VerifyItemExistsAsync(markup, "myClass1", glyph: (int)Glyph.Local);
}
private static NamingStylePreferences MultipleCamelCaseLocalRules()
{
var styles = new[]
{
SpecificationStyle(new SymbolKindOrTypeKind(SymbolKind.Local), name: "Local1"),
SpecificationStyle(new SymbolKindOrTypeKind(SymbolKind.Local), name: "Local1"),
};
return new NamingStylePreferences(
styles.Select(t => t.specification).ToImmutableArray(),
styles.Select(t => t.style).ToImmutableArray(),
styles.Select(t => CreateRule(t.specification, t.style)).ToImmutableArray());
// Local functions
static (SymbolSpecification specification, NamingStyle style) SpecificationStyle(SymbolKindOrTypeKind kind, string name)
{
var symbolSpecification = new SymbolSpecification(
id: null,
symbolSpecName: name,
ImmutableArray.Create(kind));
var namingStyle = new NamingStyle(
Guid.NewGuid(),
name,
capitalizationScheme: Capitalization.CamelCase);
return (symbolSpecification, namingStyle);
}
}
private static NamingStylePreferences NamesEndWithSuffixPreferences()
{
var specificationStyles = new[]
{
SpecificationStyle(new SymbolKindOrTypeKind(SymbolKind.Field), "Field"),
SpecificationStyle(new SymbolKindOrTypeKind(SymbolKind.Property), "Property"),
SpecificationStyle(new SymbolKindOrTypeKind(MethodKind.Ordinary), "Method"),
SpecificationStyle(new SymbolKindOrTypeKind(SymbolKind.Local), "Local"),
SpecificationStyle(new SymbolKindOrTypeKind(MethodKind.LocalFunction), "LocalFunction"),
};
return new NamingStylePreferences(
specificationStyles.Select(t => t.specification).ToImmutableArray(),
specificationStyles.Select(t => t.style).ToImmutableArray(),
specificationStyles.Select(t => CreateRule(t.specification, t.style)).ToImmutableArray());
// Local functions
static (SymbolSpecification specification, NamingStyle style) SpecificationStyle(SymbolKindOrTypeKind kind, string suffix)
{
var symbolSpecification = new SymbolSpecification(
id: null,
symbolSpecName: suffix,
ImmutableArray.Create(kind),
accessibilityList: default,
modifiers: default);
var namingStyle = new NamingStyle(
Guid.NewGuid(),
name: suffix,
capitalizationScheme: Capitalization.PascalCase,
prefix: "",
suffix: suffix,
wordSeparator: "");
return (symbolSpecification, namingStyle);
}
}
private static SerializableNamingRule CreateRule(SymbolSpecification specification, NamingStyle style)
{
return new SerializableNamingRule()
{
SymbolSpecificationID = specification.ID,
NamingStyleID = style.ID,
EnforcementLevel = ReportDiagnostic.Error
};
}
}
}
|
apache-2.0
|
cheng-li/pyramid
|
core/src/main/java/edu/neu/ccs/pyramid/calibration/Bucketer.java
|
3000
|
package edu.neu.ccs.pyramid.calibration;
import edu.neu.ccs.pyramid.util.Pair;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.List;
import java.util.stream.Collectors;
public class Bucketer {
public static Result groupWithEqualSize(List<Pair<Double,Double>> pairs, int numPointsInEachBucket){
double[] x = pairs.stream().mapToDouble(p->p.getFirst()).toArray();
double[] y = pairs.stream().mapToDouble(p->p.getSecond()).toArray();
return groupWithEqualSize(x,y,numPointsInEachBucket);
}
/**
* the last bucket may contain more points
* @param x group by x
* @param y
* @param numPointsInEachBucket
* @return
*/
public static Result groupWithEqualSize(double[] x, double[] y, int numPointsInEachBucket){
int numBuckets = x.length/numPointsInEachBucket;
List<Pair<Double,Double>> pairs = new ArrayList<>();
for (int i=0;i<x.length;i++){
pairs.add(new Pair<>(x[i],y[i]));
}
Comparator<Pair<Double,Double>> comparator = Comparator.comparing(Pair::getFirst);
List<Pair<Double,Double>> sortedPairs = pairs.stream().sorted(comparator).collect(Collectors.toList());
double[] averageX = new double[numBuckets];
double[] averageY = new double[numBuckets];
double[] count = new double[numBuckets];
for (int i=0;i<sortedPairs.size();i++){
int bucketIndex = i/numPointsInEachBucket;
if (bucketIndex>=numBuckets){
bucketIndex = numBuckets - 1;
}
averageX[bucketIndex] += sortedPairs.get(i).getFirst();
averageY[bucketIndex] += sortedPairs.get(i).getSecond();
count[bucketIndex] += 1;
}
for (int a=0;a<averageX.length;a++){
averageX[a] /= count[a];
averageY[a] /= count[a];
}
Result result = new Result();
result.averageX = averageX;
result.averageY = averageY;
result.count = count;
return result;
}
public static class Result implements Serializable {
private static final long serialVersionUID = 1L;
double[] averageX;
double[] averageY;
double[] count;
public double[] getAverageX() {
return averageX;
}
public double[] getAverageY() {
return averageY;
}
public double[] getCount() {
return count;
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder("Result{");
sb.append("averageX=").append(Arrays.toString(averageX)).append("\n");
sb.append("averageY=").append(Arrays.toString(averageY)).append("\n");
sb.append("count=").append(Arrays.toString(count)).append("\n");
sb.append('}');
return sb.toString();
}
}
}
|
apache-2.0
|
yukixz/qqbot
|
twitter/src/config.template.js
|
167
|
export const TWITTER_CONSUMER_KEY = ''
export const TWITTER_CONSUMER_SECRET = ''
export const TWITTER_ACCESS_KEY = ''
export const TWITTER_ACCESS_TOKEN = ''
|
apache-2.0
|
googleapis/java-dialogflow
|
proto-google-cloud-dialogflow-v2/src/main/java/com/google/cloud/dialogflow/v2/NotificationConfigOrBuilder.java
|
3148
|
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dialogflow/v2/conversation_profile.proto
package com.google.cloud.dialogflow.v2;
public interface NotificationConfigOrBuilder
extends
// @@protoc_insertion_point(interface_extends:google.cloud.dialogflow.v2.NotificationConfig)
com.google.protobuf.MessageOrBuilder {
/**
*
*
* <pre>
* Name of the Pub/Sub topic to publish conversation
* events like
* [CONVERSATION_STARTED][google.cloud.dialogflow.v2.ConversationEvent.Type.CONVERSATION_STARTED] as
* serialized [ConversationEvent][google.cloud.dialogflow.v2.ConversationEvent] protos.
* Notification works for phone calls, if this topic either is in the same
* project as the conversation or you grant `service-<Conversation Project
* Number>@gcp-sa-dialogflow.iam.gserviceaccount.com` the `Dialogflow Service
* Agent` role in the topic project.
* Format: `projects/<Project ID>/locations/<Location ID>/topics/<Topic ID>`.
* </pre>
*
* <code>string topic = 1;</code>
*
* @return The topic.
*/
java.lang.String getTopic();
/**
*
*
* <pre>
* Name of the Pub/Sub topic to publish conversation
* events like
* [CONVERSATION_STARTED][google.cloud.dialogflow.v2.ConversationEvent.Type.CONVERSATION_STARTED] as
* serialized [ConversationEvent][google.cloud.dialogflow.v2.ConversationEvent] protos.
* Notification works for phone calls, if this topic either is in the same
* project as the conversation or you grant `service-<Conversation Project
* Number>@gcp-sa-dialogflow.iam.gserviceaccount.com` the `Dialogflow Service
* Agent` role in the topic project.
* Format: `projects/<Project ID>/locations/<Location ID>/topics/<Topic ID>`.
* </pre>
*
* <code>string topic = 1;</code>
*
* @return The bytes for topic.
*/
com.google.protobuf.ByteString getTopicBytes();
/**
*
*
* <pre>
* Format of message.
* </pre>
*
* <code>.google.cloud.dialogflow.v2.NotificationConfig.MessageFormat message_format = 2;</code>
*
* @return The enum numeric value on the wire for messageFormat.
*/
int getMessageFormatValue();
/**
*
*
* <pre>
* Format of message.
* </pre>
*
* <code>.google.cloud.dialogflow.v2.NotificationConfig.MessageFormat message_format = 2;</code>
*
* @return The messageFormat.
*/
com.google.cloud.dialogflow.v2.NotificationConfig.MessageFormat getMessageFormat();
}
|
apache-2.0
|
THESLA/Saber_Prototipe
|
scripts/controllers/registrarController.js
|
312
|
(function () {
'use strict';
var controller = app.controller('registrarController', registrarController);
//angular.module('productManagement')
// .controller('welcome', welcome);
registrarController.$inject = [ '$scope'];
function registrarController($scope) {
}
})();
|
apache-2.0
|
NationalSecurityAgency/ghidra
|
Ghidra/Features/PDB/src/main/java/ghidra/app/util/bin/format/pdb2/pdbreader/PdbReaderOptions.java
|
6360
|
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.app.util.bin.format.pdb2.pdbreader;
import java.nio.charset.Charset;
import java.util.List;
import ghidra.framework.options.Options;
import ghidra.program.model.data.CharsetInfo;
import ghidra.util.HelpLocation;
/**
* Options used while reading a PDB ({@link AbstractPdb}) that control various aspects. These
* can be optional values used during our development of this PdbReader. Currently included are
* a field to control debug logging and {@link Charset} values used for String interpretation.
*/
public class PdbReaderOptions extends Exception {
// Developer turn on/off options that are in still in development.
private static final boolean developerMode = false;
// Sets the one-byte Charset to be used for PDB processing.
// NOTE: This "Option" is not intended as a permanent part of this analyzer. Should be
// replaced by target-specific Charset.
private static final String OPTION_NAME_ONE_BYTE_CHARSET_NAME = "PDB One-Byte Charset Name";
private static final String OPTION_DESCRIPTION_ONE_BYTE_CHARSET_NAME =
"Charset used for processing of one-byte (or multi) encoded Strings: " +
PdbReaderOptions.getOneByteCharsetNames();
private static final String DEFAULT_ONE_BYTE_CHARSET_NAME = CharsetInfo.UTF8;
private String oneByteCharsetName;
// Sets the wchar_t Charset to be used for PDB processing.
// NOTE: This "Option" is not intended as a permanent part of this analyzer. Should be
// replaced by target-program-specific Charset.
private static final String OPTION_NAME_WCHAR_CHARSET_NAME = "PDB Wchar_t Charset Name";
private static final String OPTION_DESCRIPTION_WCHAR_CHARSET_NAME =
"Charset used for processing of wchar_t encoded Strings: " +
PdbReaderOptions.getTwoByteCharsetNames();
private static final String DEFAULT_TWO_BYTE_CHARSET_NAME = CharsetInfo.UTF16;
private String wideCharCharsetName;
//==============================================================================================
private static List<String> oneByteCharsetNames =
CharsetInfo.getInstance().getCharsetNamesWithCharSize(1);
private static List<String> twoByteCharsetNames =
CharsetInfo.getInstance().getCharsetNamesWithCharSize(2);
private Charset oneByteCharset;
private Charset wideCharset;
/**
* Constructor.
*/
public PdbReaderOptions() {
setDefaults();
}
public void registerOptions(Options options) {
HelpLocation help = null;
if (developerMode) {
options.registerOption(OPTION_NAME_ONE_BYTE_CHARSET_NAME, oneByteCharsetName, help,
OPTION_DESCRIPTION_ONE_BYTE_CHARSET_NAME);
options.registerOption(OPTION_NAME_WCHAR_CHARSET_NAME, wideCharCharsetName, help,
OPTION_DESCRIPTION_WCHAR_CHARSET_NAME);
}
}
public void loadOptions(Options options) {
if (developerMode) {
oneByteCharsetName =
options.getString(OPTION_NAME_ONE_BYTE_CHARSET_NAME, oneByteCharsetName);
setOneByteCharsetForName(oneByteCharsetName);
wideCharCharsetName =
options.getString(OPTION_NAME_WCHAR_CHARSET_NAME, wideCharCharsetName);
setWideCharCharsetForName(wideCharCharsetName);
}
}
/**
* Set the options to their default values
*/
public void setDefaults() {
oneByteCharsetName = DEFAULT_ONE_BYTE_CHARSET_NAME;
wideCharCharsetName = DEFAULT_TWO_BYTE_CHARSET_NAME;
setOneByteCharsetForName(oneByteCharsetName);
setWideCharCharsetForName(wideCharCharsetName);
}
/**
* Returns list of Charsets names that encode one byte characters.
* @return Charsets that encode one byte characters.
*/
public static List<String> getOneByteCharsetNames() {
return oneByteCharsetNames;
}
/**
* Returns list of Charsets names that encode two byte characters.
* @return Charsets that encode two byte characters.
*/
public static List<String> getTwoByteCharsetNames() {
return twoByteCharsetNames;
}
/**
* Sets the one-byte Charset to use for PDB processing.
* @param name Name of the Charset to use.
* @return this, so options can be daisy-chained.
*/
public PdbReaderOptions setOneByteCharsetForName(String name) {
if (!oneByteCharsetNames.contains(name)) {
throw new IllegalArgumentException("Unknown OneByteCharset: " + name);
}
oneByteCharset = Charset.forName(name);
oneByteCharsetName = name;
return this;
}
/**
* Sets the Wchar Charset to use for PDB processing.
* @param name Name of the Charset to use.
* @return this, so options can be daisy-chained.
*/
public PdbReaderOptions setWideCharCharsetForName(String name) {
if (!twoByteCharsetNames.contains(name)) {
throw new IllegalArgumentException("Unknown TwoByteCharset: " + name);
}
wideCharset = Charset.forName(name);
wideCharCharsetName = name;
return this;
}
/**
* Returns the name of the one-byte Charset in use for PDB processing.
* @return the name of the Charset.
*/
public String getOneByteCharsetName() {
return oneByteCharsetName;
}
/**
* Returns the name of the two-byte Charset in use for PDB processing.
* @return the name of the Charset.
*/
public String getTwoByteCharsetName() {
return wideCharCharsetName;
}
/**
* Returns the name of the Wchar Charset in use for PDB processing.
* @return the name of the Wchar Charset.
*/
public String getWideCharCharsetName() {
return wideCharCharsetName;
}
/**
* Returns the one-byte Charset in use for PDB processing.
* @return the Charset.
*/
public Charset getOneByteCharset() {
return oneByteCharset;
}
/**
* Returns the two-byte Charset in use for PDB processing.
* @return the Charset.
*/
public Charset getTwoByteCharset() {
return wideCharset;
}
/**
* Returns the Wchar Charset in use for PDB processing.
* @return the Wchar Charset.
*/
public Charset getWideCharCharset() {
return wideCharset;
}
}
|
apache-2.0
|
IanLewis/dashboard
|
src/app/backend/resource/workload/workload_test.go
|
10956
|
// Copyright 2015 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package workload
import (
"reflect"
"testing"
"github.com/kubernetes/dashboard/src/app/backend/resource/common"
"github.com/kubernetes/dashboard/src/app/backend/resource/daemonset"
"github.com/kubernetes/dashboard/src/app/backend/resource/dataselect"
"github.com/kubernetes/dashboard/src/app/backend/resource/deployment"
"github.com/kubernetes/dashboard/src/app/backend/resource/job"
"github.com/kubernetes/dashboard/src/app/backend/resource/metric"
"github.com/kubernetes/dashboard/src/app/backend/resource/pod"
"github.com/kubernetes/dashboard/src/app/backend/resource/replicaset"
"github.com/kubernetes/dashboard/src/app/backend/resource/replicationcontroller"
"github.com/kubernetes/dashboard/src/app/backend/resource/statefulset"
metaV1 "k8s.io/apimachinery/pkg/apis/meta/v1"
api "k8s.io/client-go/pkg/api/v1"
apps "k8s.io/client-go/pkg/apis/apps/v1beta1"
batch "k8s.io/client-go/pkg/apis/batch/v1"
extensions "k8s.io/client-go/pkg/apis/extensions/v1beta1"
)
func TestGetWorkloadsFromChannels(t *testing.T) {
replicas := int32(0)
var jobCompletions int32
cases := []struct {
k8sRs extensions.ReplicaSetList
k8sJobs batch.JobList
k8sDaemonSet extensions.DaemonSetList
k8sDeployment extensions.DeploymentList
k8sRc api.ReplicationControllerList
k8sPod api.PodList
k8sStatefulSet apps.StatefulSetList
rcs []replicationcontroller.ReplicationController
rs []replicaset.ReplicaSet
jobs []job.Job
daemonset []daemonset.DaemonSet
deployment []deployment.Deployment
pod []pod.Pod
statefulSet []statefulset.StatefulSet
}{
{
extensions.ReplicaSetList{},
batch.JobList{},
extensions.DaemonSetList{},
extensions.DeploymentList{},
api.ReplicationControllerList{},
api.PodList{},
apps.StatefulSetList{},
[]replicationcontroller.ReplicationController{},
[]replicaset.ReplicaSet{},
[]job.Job{},
[]daemonset.DaemonSet{},
[]deployment.Deployment{},
[]pod.Pod{},
[]statefulset.StatefulSet{},
},
{
extensions.ReplicaSetList{
Items: []extensions.ReplicaSet{
{
ObjectMeta: metaV1.ObjectMeta{Name: "rs-name"},
Spec: extensions.ReplicaSetSpec{
Replicas: &replicas,
Selector: &metaV1.LabelSelector{},
},
}},
},
batch.JobList{
Items: []batch.Job{
{
ObjectMeta: metaV1.ObjectMeta{Name: "job-name"},
Spec: batch.JobSpec{
Selector: &metaV1.LabelSelector{},
Completions: &jobCompletions,
},
}},
},
extensions.DaemonSetList{
Items: []extensions.DaemonSet{
{
ObjectMeta: metaV1.ObjectMeta{Name: "ds-name"},
Spec: extensions.DaemonSetSpec{Selector: &metaV1.LabelSelector{}},
}},
},
extensions.DeploymentList{
Items: []extensions.Deployment{
{
ObjectMeta: metaV1.ObjectMeta{Name: "deployment-name"},
Spec: extensions.DeploymentSpec{
Selector: &metaV1.LabelSelector{},
Replicas: &replicas,
},
}},
},
api.ReplicationControllerList{
Items: []api.ReplicationController{{
ObjectMeta: metaV1.ObjectMeta{Name: "rc-name"},
Spec: api.ReplicationControllerSpec{
Replicas: &replicas,
Template: &api.PodTemplateSpec{},
},
}},
},
api.PodList{},
apps.StatefulSetList{},
[]replicationcontroller.ReplicationController{{
ObjectMeta: common.ObjectMeta{
Name: "rc-name",
},
TypeMeta: common.TypeMeta{Kind: common.ResourceKindReplicationController},
Pods: common.PodInfo{
Warnings: []common.Event{},
},
}},
[]replicaset.ReplicaSet{{
ObjectMeta: common.ObjectMeta{
Name: "rs-name",
},
TypeMeta: common.TypeMeta{Kind: common.ResourceKindReplicaSet},
Pods: common.PodInfo{
Warnings: []common.Event{},
},
}},
[]job.Job{{
ObjectMeta: common.ObjectMeta{
Name: "job-name",
},
TypeMeta: common.TypeMeta{Kind: common.ResourceKindJob},
Pods: common.PodInfo{
Warnings: []common.Event{},
},
}},
[]daemonset.DaemonSet{{
ObjectMeta: common.ObjectMeta{
Name: "ds-name",
},
TypeMeta: common.TypeMeta{Kind: common.ResourceKindDaemonSet},
Pods: common.PodInfo{
Warnings: []common.Event{},
},
}},
[]deployment.Deployment{{
ObjectMeta: common.ObjectMeta{
Name: "deployment-name",
},
TypeMeta: common.TypeMeta{Kind: common.ResourceKindDeployment},
Pods: common.PodInfo{
Warnings: []common.Event{},
},
}},
[]pod.Pod{},
[]statefulset.StatefulSet{},
},
}
for _, c := range cases {
expected := &Workloads{
ReplicationControllerList: replicationcontroller.ReplicationControllerList{
ListMeta: common.ListMeta{TotalItems: len(c.rcs)},
CumulativeMetrics: make([]metric.Metric, 0),
ReplicationControllers: c.rcs,
},
ReplicaSetList: replicaset.ReplicaSetList{
ListMeta: common.ListMeta{TotalItems: len(c.rs)},
CumulativeMetrics: make([]metric.Metric, 0),
ReplicaSets: c.rs,
},
JobList: job.JobList{
ListMeta: common.ListMeta{TotalItems: len(c.jobs)},
CumulativeMetrics: make([]metric.Metric, 0),
Jobs: c.jobs,
},
DaemonSetList: daemonset.DaemonSetList{
ListMeta: common.ListMeta{TotalItems: len(c.daemonset)},
CumulativeMetrics: make([]metric.Metric, 0),
DaemonSets: c.daemonset,
},
DeploymentList: deployment.DeploymentList{
ListMeta: common.ListMeta{TotalItems: len(c.deployment)},
CumulativeMetrics: make([]metric.Metric, 0),
Deployments: c.deployment,
},
PodList: pod.PodList{
ListMeta: common.ListMeta{TotalItems: len(c.pod)},
CumulativeMetrics: make([]metric.Metric, 0),
Pods: c.pod,
},
StatefulSetList: statefulset.StatefulSetList{
ListMeta: common.ListMeta{TotalItems: len(c.statefulSet)},
CumulativeMetrics: make([]metric.Metric, 0),
StatefulSets: c.statefulSet,
},
}
var expectedErr error
channels := &common.ResourceChannels{
ReplicaSetList: common.ReplicaSetListChannel{
List: make(chan *extensions.ReplicaSetList, 1),
Error: make(chan error, 1),
},
JobList: common.JobListChannel{
List: make(chan *batch.JobList, 1),
Error: make(chan error, 1),
},
ReplicationControllerList: common.ReplicationControllerListChannel{
List: make(chan *api.ReplicationControllerList, 1),
Error: make(chan error, 1),
},
DaemonSetList: common.DaemonSetListChannel{
List: make(chan *extensions.DaemonSetList, 1),
Error: make(chan error, 1),
},
DeploymentList: common.DeploymentListChannel{
List: make(chan *extensions.DeploymentList, 1),
Error: make(chan error, 1),
},
StatefulSetList: common.StatefulSetListChannel{
List: make(chan *apps.StatefulSetList, 1),
Error: make(chan error, 1),
},
NodeList: common.NodeListChannel{
List: make(chan *api.NodeList, 6),
Error: make(chan error, 6),
},
ServiceList: common.ServiceListChannel{
List: make(chan *api.ServiceList, 6),
Error: make(chan error, 6),
},
PodList: common.PodListChannel{
List: make(chan *api.PodList, 7),
Error: make(chan error, 7),
},
EventList: common.EventListChannel{
List: make(chan *api.EventList, 7),
Error: make(chan error, 7),
},
}
channels.ReplicaSetList.Error <- nil
channels.ReplicaSetList.List <- &c.k8sRs
channels.JobList.Error <- nil
channels.JobList.List <- &c.k8sJobs
channels.DaemonSetList.Error <- nil
channels.DaemonSetList.List <- &c.k8sDaemonSet
channels.DeploymentList.Error <- nil
channels.DeploymentList.List <- &c.k8sDeployment
channels.ReplicationControllerList.List <- &c.k8sRc
channels.ReplicationControllerList.Error <- nil
channels.StatefulSetList.List <- &c.k8sStatefulSet
channels.StatefulSetList.Error <- nil
nodeList := &api.NodeList{}
channels.NodeList.List <- nodeList
channels.NodeList.Error <- nil
channels.NodeList.List <- nodeList
channels.NodeList.Error <- nil
channels.NodeList.List <- nodeList
channels.NodeList.Error <- nil
channels.NodeList.List <- nodeList
channels.NodeList.Error <- nil
channels.NodeList.List <- nodeList
channels.NodeList.Error <- nil
channels.NodeList.List <- nodeList
channels.NodeList.Error <- nil
serviceList := &api.ServiceList{}
channels.ServiceList.List <- serviceList
channels.ServiceList.Error <- nil
channels.ServiceList.List <- serviceList
channels.ServiceList.Error <- nil
channels.ServiceList.List <- serviceList
channels.ServiceList.Error <- nil
channels.ServiceList.List <- serviceList
channels.ServiceList.Error <- nil
channels.ServiceList.List <- serviceList
channels.ServiceList.Error <- nil
channels.ServiceList.List <- serviceList
channels.ServiceList.Error <- nil
podList := &c.k8sPod
channels.PodList.List <- podList
channels.PodList.Error <- nil
channels.PodList.List <- podList
channels.PodList.Error <- nil
channels.PodList.List <- podList
channels.PodList.Error <- nil
channels.PodList.List <- podList
channels.PodList.Error <- nil
channels.PodList.List <- podList
channels.PodList.Error <- nil
channels.PodList.List <- podList
channels.PodList.Error <- nil
channels.PodList.List <- podList
channels.PodList.Error <- nil
eventList := &api.EventList{}
channels.EventList.List <- eventList
channels.EventList.Error <- nil
channels.EventList.List <- eventList
channels.EventList.Error <- nil
channels.EventList.List <- eventList
channels.EventList.Error <- nil
channels.EventList.List <- eventList
channels.EventList.Error <- nil
channels.EventList.List <- eventList
channels.EventList.Error <- nil
channels.EventList.List <- eventList
channels.EventList.Error <- nil
channels.EventList.List <- eventList
channels.EventList.Error <- nil
actual, err := GetWorkloadsFromChannels(channels, nil, dataselect.NoMetrics)
if !reflect.DeepEqual(actual, expected) {
t.Errorf("GetWorkloadsFromChannels() ==\n %#v\nExpected: %#v", actual, expected)
}
if !reflect.DeepEqual(err, expectedErr) {
t.Errorf("error from GetWorkloadsFromChannels() == %#v, expected %#v", err, expectedErr)
}
}
}
|
apache-2.0
|
fconFGDCA/DetailCADA
|
resources/sap/ui/commons/RowRepeaterFilter-dbg.js
|
1856
|
/*!
* SAP UI development toolkit for HTML5 (SAPUI5/OpenUI5)
* (c) Copyright 2009-2015 SAP SE or an SAP affiliate company.
* Licensed under the Apache License, Version 2.0 - see LICENSE.txt.
*/
// Provides control sap.ui.commons.RowRepeaterFilter.
sap.ui.define(['jquery.sap.global', './library', 'sap/ui/core/Element'],
function(jQuery, library, Element) {
"use strict";
/**
* Constructor for a new RowRepeaterFilter.
*
* @param {string} [sId] id for the new control, generated automatically if no id is given
* @param {object} [mSettings] initial settings for the new control
*
* @class
* This element is used by the RowRepeater and allows to define a filter in this context along with the related data such as a text and an icon.
* @extends sap.ui.core.Element
*
* @author SAP SE
* @version 1.30.4-SNAPSHOT
*
* @constructor
* @public
* @alias sap.ui.commons.RowRepeaterFilter
* @ui5-metamodel This control/element also will be described in the UI5 (legacy) designtime metamodel
*/
var RowRepeaterFilter = Element.extend("sap.ui.commons.RowRepeaterFilter", /** @lends sap.ui.commons.RowRepeaterFilter.prototype */ { metadata : {
library : "sap.ui.commons",
properties : {
/**
* The filter title if needed for display.
*/
text : {type : "string", group : "Appearance", defaultValue : null},
/**
* The filter icon if needed for display.
*/
icon : {type : "string", group : "Appearance", defaultValue : null},
/**
* The set of filter objects.
*/
filters : {type : "object", group : "Data", defaultValue : null}
}
}});
///**
// * This file defines behavior for the control,
// */
//sap.ui.commons.RowRepeaterFilter.prototype.init = function(){
// // do something for initialization...
//};
return RowRepeaterFilter;
}, /* bExport= */ true);
|
apache-2.0
|
ydubreuil/zendesk-java-client
|
src/main/java/org/zendesk/client/v2/model/Metric.java
|
6822
|
package org.zendesk.client.v2.model;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.io.Serializable;
import java.util.Date;
/**
* @author jyrij
*/
public class Metric implements Serializable {
private static final long serialVersionUID = 1L;
@JsonProperty("id")
protected Long id;
@JsonProperty("ticket_id")
protected Long ticketId;
@JsonProperty("group_stations")
protected Long groupStations;
@JsonProperty("assignee_stations")
protected Long assigneeStations;
@JsonProperty("reopens")
protected Long reopens;
@JsonProperty("replies")
protected Long replies;
@JsonProperty("assignee_updated_at")
protected Date assigneeUpdatedAt;
@JsonProperty("requester_updated_at")
protected Date requesterUpdatedAt;
@JsonProperty("status_updated_at")
protected Date lastUpdatedAt;
@JsonProperty("initially_assigned_at")
protected Date initiallyUpdatedAt;
@JsonProperty("assigned_at")
protected Date assignedAt;
@JsonProperty("solved_at")
protected Date solvedAt;
protected Date lastCommentAddedAt;
@JsonProperty("first_resolution_time_in_minutes")
protected ZendeskComboMinutes replyTimeMinutes;
@JsonProperty("full_resolution_time_in_minutes")
protected ZendeskComboMinutes fullResolutionTimeMinutes;
@JsonProperty("agent_wait_time_in_minutes")
protected ZendeskComboMinutes agentWaitTimeMinutes;
@JsonProperty("requester_wait_time_in_minutes")
protected ZendeskComboMinutes requesterWaitTimeMinutes;
@JsonProperty("created_at")
protected Date createdAt;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public Date getSolvedAt() {
return solvedAt;
}
public void setSolvedAt(Date solvedAt) {
this.solvedAt = solvedAt;
}
public Long getTicketId() {
return ticketId;
}
public void setTicketId(Long ticketId) {
this.ticketId = ticketId;
}
public Date getCreatedAt() {
return createdAt;
}
public void setCreatedAt(Date createdAt) {
this.createdAt = createdAt;
}
public Long getGroupStations() {
return groupStations;
}
public void setGroupStations(Long groupStations) {
this.groupStations = groupStations;
}
public Long getAssigneeStations() {
return assigneeStations;
}
public void setAssigneeStations(Long assigneeStations) {
this.assigneeStations = assigneeStations;
}
public Long getReopens() {
return reopens;
}
public void setReopens(Long reopens) {
this.reopens = reopens;
}
public Long getReplies() {
return replies;
}
public void setReplies(Long replies) {
this.replies = replies;
}
public Date getAssigneeUpdatedAt() {
return assigneeUpdatedAt;
}
public void setAssigneeUpdatedAt(Date assigneeUpdatedAt) {
this.assigneeUpdatedAt = assigneeUpdatedAt;
}
public Date getRequesterUpdatedAt() {
return requesterUpdatedAt;
}
public void setRequesterUpdatedAt(Date requesterUpdatedAt) {
this.requesterUpdatedAt = requesterUpdatedAt;
}
public Date getLastUpdatedAt() {
return lastUpdatedAt;
}
public void setLastUpdatedAt(Date lastUpdatedAt) {
this.lastUpdatedAt = lastUpdatedAt;
}
public Date getInitiallyUpdatedAt() {
return initiallyUpdatedAt;
}
public void setInitiallyUpdatedAt(Date initiallyUpdatedAt) {
this.initiallyUpdatedAt = initiallyUpdatedAt;
}
public Date getAssignedAt() {
return assignedAt;
}
public void setAssignedAt(Date assignedAt) {
this.assignedAt = assignedAt;
}
public Date getLastCommentAddedAt() {
return lastCommentAddedAt;
}
public void setLastCommentAddedAt(Date lastCommentAddedAt) {
this.lastCommentAddedAt = lastCommentAddedAt;
}
public ZendeskComboMinutes getReplyTimeMinutes() {
return replyTimeMinutes;
}
public void setReplyTimeMinutes(ZendeskComboMinutes replyTimeMinutes) {
this.replyTimeMinutes = replyTimeMinutes;
}
public ZendeskComboMinutes getFullResolutionTimeMinutes() {
return fullResolutionTimeMinutes;
}
public void setFullResolutionTimeMinutes(ZendeskComboMinutes fullResolutionTimeMinutes) {
this.fullResolutionTimeMinutes = fullResolutionTimeMinutes;
}
public ZendeskComboMinutes getAgentWaitTimeMinutes() {
return agentWaitTimeMinutes;
}
public void setAgentWaitTimeMinutes(ZendeskComboMinutes agentWaitTimeMinutes) {
this.agentWaitTimeMinutes = agentWaitTimeMinutes;
}
public ZendeskComboMinutes getRequesterWaitTimeMinutes() {
return requesterWaitTimeMinutes;
}
public void setRequesterWaitTimeMinutes(ZendeskComboMinutes requesterWaitTimeMinutes) {
this.requesterWaitTimeMinutes = requesterWaitTimeMinutes;
}
@Override
public String toString() {
return "Metric{" +
"id=" + id +
", ticketId=" + ticketId +
", groupStations=" + groupStations +
", assigneeStations=" + assigneeStations +
", reopens=" + reopens +
", replies=" + replies +
", assigneeUpdatedAt=" + assigneeUpdatedAt +
", requesterUpdatedAt=" + requesterUpdatedAt +
", lastUpdatedAt=" + lastUpdatedAt +
", initiallyUpdatedAt=" + initiallyUpdatedAt +
", assignedAt=" + assignedAt +
", solvedAt=" + solvedAt +
", lastCommentAddedAt=" + lastCommentAddedAt +
", replyTimeMinutes=" + replyTimeMinutes +
", fullResolutionTimeMinutes=" + fullResolutionTimeMinutes +
", agentWaitTimeMinutes=" + agentWaitTimeMinutes +
", requesterWaitTimeMinutes=" + requesterWaitTimeMinutes +
", createdAt=" + createdAt +
'}';
}
protected class ZendeskComboMinutes {
@JsonProperty("calendar")
protected Long calendarMinutes;
@JsonProperty("business")
protected Long businessMinutes;
public ZendeskComboMinutes() {};
public Long getCalendarMinutes() {
return calendarMinutes;
}
public void setCalendarMinutes(Long calendarMinutes) {
this.calendarMinutes = calendarMinutes;
}
public Long getBusinessMinutes() {
return businessMinutes;
}
public void setBusinessMinutes(Long businessMinutes) {
this.businessMinutes = businessMinutes;
}
}
}
|
apache-2.0
|
cxxjava/cxxLog4j
|
src/EConfigurator.cpp
|
6467
|
/*
* EConfigurator.cpp
*
* Created on: 2015-7-27
* Author: cxxjava@163.com
*/
#include "./EConfigurator.hh"
#include "./EOptionConverter.hh"
#include "./ELoggerImp.hh"
#include "./ELayoutFactory.hh"
#include "./EAppenderFactory.hh"
#include "./ESimpleLayout.hh"
#include "./EFileAppender.hh"
namespace efc {
namespace log {
static const char* CATEGORY_PREFIX = "log4j.category.";
static const char* LOGGER_PREFIX = "log4j.logger.";
static const char* FACTORY_PREFIX = "log4j.factory";
static const char* ADDITIVITY_PREFIX = "log4j.additivity.";
static const char* ROOT_CATEGORY_PREFIX = "log4j.rootCategory";
static const char* ROOT_LOGGER_PREFIX = "log4j.rootLogger";
static const char* APPENDER_PREFIX = "log4j.appender.";
static const char* RENDERER_PREFIX = "log4j.renderer.";
static const char* THRESHOLD_PREFIX = "log4j.threshold";
/**
Special level value signifying inherited behaviour. The current
value of this string constant is <b>inherited</b>. {@link #NULL}
is a synonym. */
static const char* LEVEL_INHERITED = "inherited";
/**
Special level signifying inherited behaviour, same as {@link
#INHERITED}. The current value of this string constant is
<b>null</b>. */
static const char* LEVEL_NULL = "null";
EConfigurator::~EConfigurator() {
}
EConfigurator::EConfigurator() : threshold(ELogger::LEVEL_TRACE) {
}
sp<EConfiguration> EConfigurator::doConfigure(EConfig* properties) {
EString thresholdStr = EOptionConverter::findAndSubst(THRESHOLD_PREFIX, properties);
if (thresholdStr.length() > 0) {
threshold = EOptionConverter::toLevel(thresholdStr.c_str());
}
/**
* New configure.
*/
EArray<EString*> loggerNames;
findAllLoggers(loggerNames, properties);
sp<EConfiguration> configure = new EConfiguration(loggerNames, threshold);
parseRootLogger(configure, properties);
parseSubLoggers(configure, properties);
return configure;
}
void EConfigurator::findAllLoggers(EArray<EString*>& loggerNames, EConfig* properties) {
EArray<EString*> keys = properties->keyNames();
for (int i = 0; i < keys.size(); i++) {
EString* key = keys[i];
if (key->startsWith(CATEGORY_PREFIX)
|| key->startsWith(LOGGER_PREFIX)) {
EString loggerName;
if (key->startsWith(CATEGORY_PREFIX)) {
loggerName = key->substring(eso_strlen(CATEGORY_PREFIX));
} else if (key->startsWith(LOGGER_PREFIX)) {
loggerName = key->substring(eso_strlen(LOGGER_PREFIX));
}
//printf("loggerName=%s\n", loggerName.c_str());
loggerNames.add(new EString(loggerName));
}
}
}
void EConfigurator::parseRootLogger(sp<EConfiguration>& configure, EConfig* props) {
EString value = EOptionConverter::findAndSubst(ROOT_LOGGER_PREFIX, props);
if (value.length() == 0) {
value = EOptionConverter::findAndSubst(ROOT_CATEGORY_PREFIX, props);
}
if (value.length() > 0) {
ELoggerConfig* newConfig = parseCategory(configure, props, ROOT_LOGGER_NAME, value.c_str());
configure->addLoggerConfig(newConfig);
}
else {
configure->addLoggerConfig(new ELoggerConfig(configure.get(), ROOT_LOGGER_NAME, ELogger::LEVEL_TRACE, true));
}
}
void EConfigurator::parseSubLoggers(sp<EConfiguration>& configure, EConfig* props) {
EA<EString*> sortedNames = configure->getSortedLoggerNames();
for (int i = 0; i < sortedNames.length(); i++) {
const char* loggerName = sortedNames[i]->c_str();
//printf("loggerName=%s\n", loggerName);
EString prefix(LOGGER_PREFIX);
EString value = EOptionConverter::findAndSubst(prefix.concat(loggerName).c_str(), props);
if (value.length() == 0) {
prefix = CATEGORY_PREFIX;
value = EOptionConverter::findAndSubst(prefix.concat(loggerName).c_str(), props);
}
if (value.length() > 0) {
ELoggerConfig* newConfig = parseCategory(configure, props, loggerName, value.c_str());
configure->addLoggerConfig(newConfig);
}
else {
ELoggerConfig* parent = configure->getLoggerConfig(loggerName);
configure->addLoggerConfig(new ELoggerConfig(configure.get(), loggerName, parent->getLevel(), parent->getAdditivity()));
}
}
}
ELoggerConfig* EConfigurator::parseCategory(sp<EConfiguration>& configure, EConfig* props,
const char* loggerName, const char* value) {
//value = [level|INHERITED|NULL], appenderName, appenderName, ...
EArray<EString*> vs = EPattern::split(",", value);
ELogger::Level level = ELogger::LEVEL_TRACE;
boolean additive = true;
// parse additivity
EString additivity(ADDITIVITY_PREFIX);
additivity.concat(loggerName); //ADDITIVITY_PREFIX + loggerName
EString v = EOptionConverter::findAndSubst(additivity.c_str(), props);
// touch additivity only if necessary
if (v.trim().length() > 0) {
additive = EBoolean::parseBoolean(v.c_str());
}
// parse level
if (vs.size() > 0) {
if (vs[0]->equalsIgnoreCase(LEVEL_INHERITED) || vs[0]->equalsIgnoreCase(LEVEL_NULL)) {
level = configure->getLoggerConfig(loggerName)->getLevel();
}
else {
level = EOptionConverter::toLevel(vs[0]->c_str());
}
}
// new logger config.
ELoggerConfig* lc = new ELoggerConfig(configure.get(), loggerName, level, additive);
// parse all appender
for (int i = 1; i < vs.length(); i++) {
if (vs[i]->trim().length() == 0)
continue;
const char* appenderName = vs[i]->c_str();
sp<EAppender> appender = parseAppender(configure, props, appenderName);
if (appender != null) {
lc->addAppender(appender);
}
}
return lc;
}
sp<EAppender> EConfigurator::parseAppender(sp<EConfiguration>& configure, EConfig* props, const char* an) {
sp<EAppender>* appender = null;
EString appenderName(an);
if ((appender = (sp<EAppender>*)appenderCache.get(&appenderName)) != null) {
return *appender;
}
// Appender was not previously initialized.
EString prefix(APPENDER_PREFIX); //APPENDER_PREFIX + appenderName;
prefix.concat(appenderName);
EAppender* newAppender = EAppenderFactory::newInstance(configure.get(), props, prefix, configure);
if (newAppender) {
EString layoutPrefix = prefix + ".layout";
ELayout* newLayout = ELayoutFactory::newInstance(props, layoutPrefix);
if (!newLayout && newAppender->requiresLayout()) {
newLayout = new ESimpleLayout(props, layoutPrefix);
}
newAppender->setLayout(newLayout);
EFileAppender* fa = dynamic_cast<EFileAppender*>(newAppender);
if (fa) {
configure->addLoggerFile(fa->getFile(), fa);
}
appender = new sp<EAppender>(newAppender);
appenderCache.put(new EString(an), appender);
return *appender;
}
return null;
}
} /* namespace log */
} /* namespace efc */
|
apache-2.0
|
lukaszbudnik/migrator
|
common/common_test.go
|
3097
|
package common
import (
"context"
"testing"
"github.com/stretchr/testify/assert"
)
func newTestContext() context.Context {
ctx := context.TODO()
ctx = context.WithValue(ctx, RequestIDKey{}, "123")
// log level empty = default log level = INFO
ctx = context.WithValue(ctx, LogLevelKey{}, "")
return ctx
}
func newTestContextWithDebugLogLevel() context.Context {
ctx := newTestContext()
ctx = context.WithValue(ctx, LogLevelKey{}, debugLevel)
return ctx
}
func TestLogDebugSkip(t *testing.T) {
// DEBUG message will be skipped, as the default log level is INFO
message := LogDebug(newTestContext(), "success")
assert.Empty(t, message)
}
func TestLogDebug(t *testing.T) {
// DEBUG message will be returned, as the log level is set to DEBUG
message := LogDebug(newTestContextWithDebugLogLevel(), "success")
assert.Equal(t, "success", message)
}
func TestLogInfo(t *testing.T) {
message := LogInfo(newTestContext(), "success")
assert.Equal(t, "success", message)
}
func TestLogError(t *testing.T) {
message := LogError(newTestContext(), "param=%v", 123)
assert.Equal(t, "param=123", message)
}
func TestLogPanic(t *testing.T) {
message := LogPanic(newTestContext(), "param=%v", 123456)
assert.Equal(t, "param=123456", message)
}
func TestLog(t *testing.T) {
message := Log("INFO", "param=%v", 456)
assert.Equal(t, "param=456", message)
}
func TestFindNthIndex(t *testing.T) {
indx := FindNthIndex("https://lukaszbudniktest.blob.core.windows.net/mycontainer/prod/artefacts", '/', 4)
assert.Equal(t, 58, indx)
}
func TestFindNthIndexNotFound(t *testing.T) {
indx := FindNthIndex("https://lukaszbudniktest.blob.core.windows.net/mycontainer", '/', 4)
assert.Equal(t, -1, indx)
}
func TestShouldLogMessage(t *testing.T) {
// default logLevel is info, should log all except of debug
assert.False(t, shouldLogMessage("", debugLevel))
assert.True(t, shouldLogMessage("", infoLevel))
assert.True(t, shouldLogMessage("", errorLevel))
assert.True(t, shouldLogMessage("", panicLevel))
// debug logLevel logs all
assert.True(t, shouldLogMessage(debugLevel, debugLevel))
assert.True(t, shouldLogMessage(debugLevel, infoLevel))
assert.True(t, shouldLogMessage(debugLevel, errorLevel))
assert.True(t, shouldLogMessage(debugLevel, panicLevel))
// info logLevel logs all except of debug
assert.False(t, shouldLogMessage(infoLevel, debugLevel))
assert.True(t, shouldLogMessage(infoLevel, infoLevel))
assert.True(t, shouldLogMessage(infoLevel, errorLevel))
assert.True(t, shouldLogMessage(infoLevel, panicLevel))
// error logLevel logs only error or panic
assert.False(t, shouldLogMessage(errorLevel, debugLevel))
assert.False(t, shouldLogMessage(errorLevel, infoLevel))
assert.True(t, shouldLogMessage(errorLevel, errorLevel))
assert.True(t, shouldLogMessage(errorLevel, panicLevel))
// panic logLevel logs only panic
assert.False(t, shouldLogMessage(panicLevel, debugLevel))
assert.False(t, shouldLogMessage(panicLevel, infoLevel))
assert.False(t, shouldLogMessage(panicLevel, errorLevel))
assert.True(t, shouldLogMessage(panicLevel, panicLevel))
}
|
apache-2.0
|
eichhoff/funky
|
de.eich.funky.core/src/main/java/de/eich/decomposer/optimization/Selector.java
|
1739
|
/**
* @author Julian Eichhoff
*
* Copyright 2014 Julian Eichhoff
*/
package de.eich.decomposer.optimization;
import java.util.Collection;
import java.util.HashSet;
import java.util.Set;
import org.opt4j.core.Individual;
import org.opt4j.core.common.random.Rand;
import org.opt4j.optimizers.ea.SelectorDefault;
import com.google.inject.Inject;
import de.eich.rewriter.AbstractRewriteSystem;
import de.eich.rewriter.Rewriter;
import de.eich.rewriter.derivation.DerivationResult;
public class Selector implements org.opt4j.optimizers.ea.Selector{
@Inject
private Problem problem;
private SelectorDefault selector;
@Inject
public Selector(Rand random){
selector = new SelectorDefault(random);
}
public Collection<Individual> getLames(int arg0, Collection<Individual> arg1) {
Collection<Individual> lames = selector.getLames(arg0, arg1);
return lames;
}
public Collection<Individual> getParents(int arg0, Collection<Individual> all) {
Collection<Individual> parents = selector.getParents(arg0, all);
for(AbstractRewriteSystem rewriter : problem.rewriters){
if(rewriter instanceof Rewriter){
Set<DerivationResult> derivationResultsParents = new HashSet<DerivationResult>();
Set<DerivationResult> derivationResultsAll = new HashSet<DerivationResult>();
for(Individual individual : parents){
derivationResultsParents.add((DerivationResult) individual.getPhenotype());
}
for(Individual individual : all){
derivationResultsAll.add((DerivationResult) individual.getPhenotype());
}
((Rewriter) rewriter).postProcessEvolutionary(derivationResultsParents, derivationResultsAll);
}
}
return parents;
}
public void init(int maxsize) {
selector.init(maxsize);
}
}
|
apache-2.0
|
CircleBinder/Android-CommonLibrary
|
Library/src/main/java/circlebinder/common/event/GenreBuilder.java
|
359
|
package circlebinder.common.event;
public final class GenreBuilder {
int id;
String name;
public Genre build() {
return new Genre(this);
}
public GenreBuilder setId(int id) {
this.id = id;
return this;
}
public GenreBuilder setName(String name) {
this.name = name;
return this;
}
}
|
apache-2.0
|
silly-wacky-3-town-toon/SOURCE-COD
|
toontown/golf/DistributedGolfCourse.py
|
14404
|
from direct.interval.IntervalGlobal import Sequence, Func, Wait, LerpColorScaleInterval, Parallel
from direct.distributed import DistributedObject
from direct.directnotify import DirectNotifyGlobal
from direct.task.Task import Task
from direct.showbase import PythonUtil
from toontown.distributed import DelayDelete
from toontown.distributed.DelayDeletable import DelayDeletable
from toontown.toonbase import ToontownGlobals
from toontown.toonbase import TTLocalizer
from panda3d.core import *
from panda3d.direct import *
from direct.gui.DirectGui import *
from direct.distributed.ClockDelta import *
from direct.fsm.FSM import FSM
from toontown.golf import GolfGlobals
from toontown.golf import GolfScoreBoard
from toontown.golf import GolfRewardDialog
from toontown.toon import ToonHeadFrame
class DistributedGolfCourse(DistributedObject.DistributedObject, FSM, DelayDeletable):
notify = directNotify.newCategory('DistributedGolfCourse')
defaultTransitions = {'Off': ['Join'],
'Join': ['WaitStartHole', 'Cleanup'],
'WaitStartHole': ['PlayHole', 'Cleanup', 'WaitReward'],
'PlayHole': ['WaitFinishCourse',
'WaitStartHole',
'WaitReward',
'Cleanup'],
'WaitReward': ['WaitFinishCourse', 'Cleanup'],
'WaitFinishCourse': ['Cleanup'],
'Cleanup': ['Off']}
id = 0
def __init__(self, cr):
DistributedObject.DistributedObject.__init__(self, base.cr)
FSM.__init__(self, 'Golf_%s_FSM' % self.id)
self.waitingStartLabel = DirectLabel(text=TTLocalizer.MinigameWaitingForOtherPlayers, text_fg=VBase4(1, 1, 1, 1), relief=None, pos=(-0.6, 0, -0.75), scale=0.075)
self.waitingStartLabel.hide()
self.avIdList = []
self.remoteAvIdList = []
self.exitedAvIdList = []
self.toonPanels = []
self.exitedPanels = []
self.exitedToonsWithPanels = []
self.localAvId = base.localAvatar.doId
self.hasLocalToon = 0
self.modelCount = 500
self.cleanupActions = []
self.courseId = None
self.scores = {}
self.curHoleIndex = 0
self.golfRewardDialog = None
self.rewardIval = None
self.scoreBoard = None
self.exit = False
self.drivingToons = []
return
def generate(self):
self.notify.debug('GOLF COURSE: generate, %s' % self.getTitle())
DistributedObject.DistributedObject.generate(self)
def announceGenerate(self):
DistributedObject.DistributedObject.announceGenerate(self)
if not self.hasLocalToon:
return
self.notify.debug('BASE: handleAnnounceGenerate: send setAvatarJoined')
self.__delayDelete = DelayDelete.DelayDelete(self, 'GolfCourse.self')
self.request('Join')
self.normalExit = 1
count = self.modelCount
loader.beginBulkLoad('minigame', TTLocalizer.HeadingToMinigameTitle % self.getTitle(), count, 1, TTLocalizer.TIP_GOLF, 0)
self.load()
globalClock.syncFrameTime()
self.onstage()
self.accept('clientCleanup', self._handleClientCleanup)
def _handleClientCleanup(self):
self._destroyDelayDelete()
def _destroyDelayDelete(self):
if self.__delayDelete:
self.__delayDelete.destroy()
self.__delayDelete = None
return
def delete(self):
self.ignore('clientCleanup')
if self.scoreBoard:
self.scoreBoard.delete()
DistributedObject.DistributedObject.delete(self)
if self.golfRewardDialog:
self.golfRewardDialog.delete()
self.cleanUpReward()
if self.toonPanels:
for x in xrange(len(self.toonPanels)):
self.toonPanels[x].destroy()
self.toonPanels = None
self.scores = None
self.music.stop()
self.music = None
for avId in self.avIdList:
av = base.cr.doId2do.get(avId)
if av:
av.show()
return
def load(self):
self.music = base.loadMusic('phase_6/audio/bgm/GZ_PlayGolf.ogg')
def setCourseReady(self, numHoles, holeIds, coursePar):
self.notify.debug('GOLF COURSE: received setCourseReady')
if self.state == 'Cleanup':
return
self.numHoles = numHoles
self.holeIds = holeIds
self.coursePar = coursePar
for avId in self.avIdList:
blankScoreList = [0] * self.numHoles
self.scores[avId] = blankScoreList
self.request('WaitStartHole')
for avId in self.avIdList:
av = base.cr.doId2do.get(avId)
if av:
av.show()
av.reparentTo(render)
av.setPos(0, 0, -100)
else:
self.notify.warning('avId =%d does not exist')
self.scoreBoard = GolfScoreBoard.GolfScoreBoard(self)
toonPanelsStart = 0.3
whichToon = 0
color = 0
tpDiff = -0.45
headPanel = loader.loadModel('phase_6/models/golf/headPanel')
if self.numPlayers > 0:
for avId in self.avIdList:
if not self.localAvId == avId:
av = base.cr.doId2do.get(avId)
if av:
tPanels = ToonHeadFrame.ToonHeadFrame(av, GolfGlobals.PlayerColors[color], headPanel)
tPanels.setPos(-1.17, 0, toonPanelsStart + whichToon * tpDiff)
tPanels.setScale(0.3, 1, 0.7)
tPanels.head.setPos(0, 10, 0.18)
tPanels.head.setScale(0.47, 0.2, 0.2)
tPanels.tag1.setPos(0.3, 10, 0.18)
tPanels.tag1.setScale(0.1283, 0.055, 0.055)
tPanels.tag2.setPos(0, 10, 0.43)
tPanels.tag2.setScale(0.117, 0.05, 0.05)
self.toonPanels.append(tPanels)
whichToon = whichToon + 1
color += 1
else:
color += 1
else:
self.toonPanels = None
for avId in self.exitedAvIdList:
if avId not in self.exitedToonsWithPanels:
self.exitMessageForToon(avId)
return
def setPlayHole(self):
self.notify.debug('GOLF COURSE: received setPlayHole')
if self.state not in ['PlayHole', 'Cleanup']:
self.request('PlayHole')
def getTitle(self):
return GolfGlobals.getCourseName(self.courseId)
def getInstructions(self):
return 'You should not be seeing this'
def setGolferIds(self, avIds):
self.avIdList = avIds
self.numPlayers = len(self.avIdList)
self.hasLocalToon = self.localAvId in self.avIdList
if not self.hasLocalToon:
self.notify.warning('localToon (%s) not in list of golfers: %s' % (self.localAvId, self.avIdList))
return
self.notify.info('GOLF COURSE: setParticipants: %s' % self.avIdList)
self.remoteAvIdList = []
for avId in self.avIdList:
if avId != self.localAvId:
self.remoteAvIdList.append(avId)
def setCourseAbort(self, avId):
if avId == self.localAvId or avId == 0:
if not self.hasLocalToon:
return
self.notify.warning('GOLF COURSE: setGameAbort: Aborting game')
self.normalExit = 0
if not self.state == 'Cleanup':
self.request('Cleanup')
else:
self.notify.warning('GOLF COURSE: Attempting to clean up twice')
def onstage(self):
self.notify.debug('GOLF COURSE: onstage')
base.playMusic(self.music, looping=1, volume=0.9)
def avExited(self, avId):
self.exitedAvIdList.append(avId)
hole = base.cr.doId2do.get(self.curHoleDoId)
if hole:
hole.avExited(avId)
if self.localAvId == avId:
self.notify.debug('forcing setCourseAbort')
if self.state == 'Join':
loader.endBulkLoad('minigame')
self.setCourseAbort(0)
self.exitMessageForToon(avId)
def exitMessageForToon(self, avId):
if self.toonPanels and self.localAvId != avId:
y = 0
for x in xrange(len(self.avIdList)):
if avId == self.avIdList[x] and y < len(self.toonPanels):
toonPanel = self.toonPanels[y]
toonPanel.headModel.hide()
toonPanel.tag1.hide()
toonPanel.tag2.hide()
exitedToon = DirectLabel(parent=self.toonPanels[y], relief=None, pos=(0, 0, 0.4), color=(1, 1, 1, 1), text_align=TextNode.ACenter, text=TTLocalizer.GolferExited % toonPanel.av.getName(), text_scale=0.07, text_wordwrap=6)
exitedToon.setScale(2, 1, 1)
self.exitedPanels.append(exitedToon)
self.exitedToonsWithPanels.append(avId)
toonPanel.removeAvKeep()
elif not self.avIdList[x] == self.localAvId:
y += 1
return
def enterJoin(self):
self.sendUpdate('setAvatarJoined', [])
def handleFallingAsleepGolf(self, task):
base.localAvatar.stopSleepWatch()
base.localAvatar.forceGotoSleep()
self.sendUpdate('setAvatarExited', [])
def exitJoin(self):
pass
def enterWaitStartHole(self):
self.sendUpdate('setAvatarReadyCourse', [])
def exitWaitStartHole(self):
pass
def enterPlayHole(self):
loader.endBulkLoad('minigame')
def exitPlayHole(self):
pass
def enterCleanup(self):
base.localAvatar.stopSleepWatch()
for action in self.cleanupActions:
action()
self.cleanupActions = []
if not self.scoreBoard == None:
self.scoreBoard.delete()
if self.toonPanels:
for x in xrange(len(self.toonPanels)):
self.toonPanels[x].destroy()
self.toonPanels = None
for avId in self.avIdList:
av = base.cr.doId2do.get(avId)
if av:
av.show()
av.resetLOD()
self.ignoreAll()
if self.hasLocalToon:
messenger.send('leavingGolf')
self._destroyDelayDelete()
return
def exitCleanup(self):
pass
def setCourseId(self, courseId):
self.courseId = courseId
def calcHolesToUse(self):
retval = []
while len(retval) < self.numHoles:
for holeId in self.courseInfo['holeIds']:
retval.append(holeId)
if len(retval) >= self.numHoles:
break
return retval
def calcCoursePar(self):
retval = 0
for holeId in self.holeIds:
holeInfo = GolfGlobals.HoleInfo[holeId]
retval += holeInfo['par']
return retval
def setScores(self, scoreList):
scoreList.reverse()
for avId in self.avIdList:
avScores = []
for holeIndex in xrange(self.numHoles):
avScores.append(scoreList.pop())
self.scores[avId] = avScores
self.notify.debug('self.scores=%s' % self.scores)
def setCurHoleIndex(self, holeIndex):
self.curHoleIndex = holeIndex
def setCurHoleDoId(self, holeDoId):
self.curHoleDoId = holeDoId
def getCurGolfer(self):
if self.curHoleDoId != 0:
av = base.cr.doId2do.get(self.curHoleDoId)
if av:
return av.currentGolfer
else:
return None
return None
def getStrokesForCurHole(self, avId):
retval = 0
if avId in self.scores:
retval = self.scores[avId][self.curHoleIndex]
return retval
def isGameDone(self):
retval = False
self.notify.debug('Self state is: %s' % self.state)
if self.getCurrentOrNextState() == 'WaitReward' or self.getCurrentOrNextState() == 'WaitFinishCourse':
retval = True
return retval
def setReward(self, trophiesList, rankingsList, holeBestList, courseBestList, cupList, tieBreakWinner, aim0, aim1, aim2, aim3):
self.trophiesList = trophiesList
self.rankingsList = rankingsList
self.holeBestList = holeBestList
self.courseBestList = courseBestList
self.cupList = cupList
self.tieBreakWinner = tieBreakWinner
self.aimTimesList = [aim0,
aim1,
aim2,
aim3]
if self.state not in ['Cleanup']:
self.demand('WaitReward')
def enterWaitReward(self):
self.scoreBoard.showBoardFinal()
if self.curHoleDoId != 0:
av = base.cr.doId2do.get(self.curHoleDoId)
av.cleanupPowerBar()
def doneWithRewardMovie():
if self.exit == False:
self.notify.debug('doneWithRewardMovie')
self.sendUpdate('setDoneReward', [])
self._destroyDelayDelete()
self.exit = True
self.golfRewardDialog = GolfRewardDialog.GolfRewardDialog(self.avIdList, self.trophiesList, self.rankingsList, self.holeBestList, self.courseBestList, self.cupList, self.localAvId, self.tieBreakWinner, self.aimTimesList)
self.rewardIval = Sequence(Parallel(Wait(5), self.golfRewardDialog.getMovie()), Func(doneWithRewardMovie))
self.rewardIval.start()
def exitEarly(self):
if self.exit == False:
self.notify.debug('doneWithRewardMovie')
self.sendUpdate('setDoneReward', [])
self._destroyDelayDelete()
self.exit = True
def exitReward(self):
self.cleanUpReward()
def cleanUpReward(self):
if self.rewardIval:
self.rewardIval.pause()
self.rewardIval = None
return
def updateScoreBoard(self):
if self.scoreBoard:
self.scoreBoard.update()
def changeDrivePermission(self, avId, canDrive):
if canDrive:
if avId not in self.drivingToons:
self.drivingToons.append(avId)
elif avId in self.drivingToons:
self.drivingToons.remove(avId)
def canDrive(self, avId):
retval = avId in self.drivingToons
return retval
|
apache-2.0
|
carcer/Castle.Components.Validator
|
src/Castle.Components.Validator/IBrowserValidationGenerator.cs
|
10469
|
// Copyright 2004-2009 Castle Project - http://www.castleproject.org/
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
namespace Castle.Components.Validator
{
using System;
/// <summary>
/// Abstracts a JS validation library implementation.
/// Each implementation should map the calls to their
/// own approach to enforce validation.
/// </summary>
public interface IBrowserValidationGenerator
{
/// <summary>
/// Set that a field should only accept digits.
/// </summary>
/// <param name="target">The target name (ie, a hint about the controller being validated)</param>
/// <param name="violationMessage">The violation message.</param>
void SetDigitsOnly(string target, string violationMessage);
/// <summary>
/// Set that a field should only accept numbers.
/// </summary>
/// <param name="target">The target name (ie, a hint about the controller being validated)</param>
/// <param name="violationMessage">The violation message.</param>
void SetNumberOnly(string target, string violationMessage);
/// <summary>
/// Sets that a field is required.
/// </summary>
/// <param name="target">The target name (ie, a hint about the controller being validated)</param>
/// <param name="violationMessage">The violation message.</param>
void SetAsRequired(string target, string violationMessage);
/// <summary>
/// Sets that a field value must match the specified regular expression.
/// </summary>
/// <param name="target">The target name (ie, a hint about the controller being validated)</param>
/// <param name="regExp">The reg exp.</param>
/// <param name="violationMessage">The violation message.</param>
void SetRegExp(string target, string regExp, string violationMessage);
/// <summary>
/// Sets that a field value must be a valid email address.
/// </summary>
/// <param name="target">The target name (ie, a hint about the controller being validated)</param>
/// <param name="violationMessage">The violation message.</param>
void SetEmail(string target, string violationMessage);
/// <summary>
/// Sets that field must have an exact lenght.
/// </summary>
/// <param name="target">The target name (ie, a hint about the controller being validated)</param>
/// <param name="length">The length.</param>
void SetExactLength(string target, int length);
/// <summary>
/// Sets that field must have an exact lenght.
/// </summary>
/// <param name="target">The target name (ie, a hint about the controller being validated)</param>
/// <param name="length">The length.</param>
/// <param name="violationMessage">The violation message.</param>
void SetExactLength(string target, int length, string violationMessage);
/// <summary>
/// Sets that field must have an minimum lenght.
/// </summary>
/// <param name="target">The target name (ie, a hint about the controller being validated)</param>
/// <param name="minLength">The minimum length.</param>
void SetMinLength(string target, int minLength);
/// <summary>
/// Sets that field must have an minimum lenght.
/// </summary>
/// <param name="target">The target name (ie, a hint about the controller being validated)</param>
/// <param name="minLength">The minimum length.</param>
/// <param name="violationMessage">The violation message.</param>
void SetMinLength(string target, int minLength, string violationMessage);
/// <summary>
/// Sets that field must have an maximum lenght.
/// </summary>
/// <param name="target">The target name (ie, a hint about the controller being validated)</param>
/// <param name="maxLength">The maximum length.</param>
void SetMaxLength(string target, int maxLength);
/// <summary>
/// Sets that field must have an maximum lenght.
/// </summary>
/// <param name="target">The target name (ie, a hint about the controller being validated)</param>
/// <param name="maxLength">The maximum length.</param>
/// <param name="violationMessage">The violation message.</param>
void SetMaxLength(string target, int maxLength, string violationMessage);
/// <summary>
/// Sets that field must be between a length range.
/// </summary>
/// <param name="target">The target name (ie, a hint about the controller being validated)</param>
/// <param name="minLength">The minimum length.</param>
/// <param name="maxLength">The maximum length.</param>
void SetLengthRange(string target, int minLength, int maxLength);
/// <summary>
/// Sets that field must be between a length range.
/// </summary>
/// <param name="target">The target name (ie, a hint about the controller being validated)</param>
/// <param name="minLength">The minimum length.</param>
/// <param name="maxLength">The maximum length.</param>
/// <param name="violationMessage">The violation message.</param>
void SetLengthRange(string target, int minLength, int maxLength, string violationMessage);
/// <summary>
/// Sets that field must be between a value range.
/// </summary>
/// <param name="target">The target name (ie, a hint about the controller being validated)</param>
/// <param name="minValue">Minimum value.</param>
/// <param name="maxValue">Maximum value.</param>
/// <param name="violationMessage">The violation message.</param>
void SetValueRange(string target, int minValue, int maxValue, string violationMessage);
/// <summary>
/// Sets that field must be between a value range.
/// </summary>
/// <param name="target">The target name (ie, a hint about the controller being validated)</param>
/// <param name="minValue">Minimum value.</param>
/// <param name="maxValue">Maximum value.</param>
/// <param name="violationMessage">The violation message.</param>
void SetValueRange(string target, decimal minValue, decimal maxValue, string violationMessage);
/// <summary>
/// Sets that field must be between a value range.
/// </summary>
/// <param name="target">The target name (ie, a hint about the controller being validated)</param>
/// <param name="minValue">Minimum value.</param>
/// <param name="maxValue">Maximum value.</param>
/// <param name="violationMessage">The violation message.</param>
void SetValueRange(string target, DateTime minValue, DateTime maxValue, string violationMessage);
/// <summary>
/// Sets that field must be between a value range.
/// </summary>
/// <param name="target">The target name (ie, a hint about the controller being validated)</param>
/// <param name="minValue">Minimum value.</param>
/// <param name="maxValue">Maximum value.</param>
/// <param name="violationMessage">The violation message.</param>
void SetValueRange(string target, string minValue, string maxValue, string violationMessage);
/// <summary>
/// Set that a field value must be the same as another field's value.
/// </summary>
/// <param name="target">The target name (ie, a hint about the controller being validated)</param>
/// <param name="comparisonFieldName">The name of the field to compare with.</param>
/// <param name="violationMessage">The violation message.</param>
void SetAsSameAs(string target, string comparisonFieldName, string violationMessage);
/// <summary>
/// Set that a field value must _not_ be the same as another field's value.
/// </summary>
/// <param name="target">The target name (ie, a hint about the controller being validated)</param>
/// <param name="comparisonFieldName">The name of the field to compare with.</param>
/// <param name="violationMessage">The violation message.</param>
void SetAsNotSameAs(string target, string comparisonFieldName, string violationMessage);
/// <summary>
/// Set that a field value must be a valid date.
/// </summary>
/// <param name="target">The target name (ie, a hint about the controller being validated)</param>
/// <param name="violationMessage">The violation message.</param>
void SetDate(string target, string violationMessage);
/// <summary>
/// Sets that a field's value must be greater than another field's value.
/// </summary>
/// <remarks>Not implemented by the JQuery validate plugin. Done via a custom rule.</remarks>
/// <param name="target">The target name (ie, a hint about the controller being validated)</param>
/// <param name="comparisonFieldName">The name of the field to compare with.</param>
/// <param name="validationType">The type of data to compare.</param>
/// <param name="violationMessage">The violation message.</param>
void SetAsGreaterThan(string target, string comparisonFieldName, IsGreaterValidationType validationType,string violationMessage);
/// <summary>
/// Sets that a field's value must be lesser than another field's value.
/// </summary>
/// <remarks>Not implemented by the JQuery validate plugin. Done via a custom rule.</remarks>
/// <param name="target">The target name (ie, a hint about the controller being validated)</param>
/// <param name="comparisonFieldName">The name of the field to compare with.</param>
/// <param name="validationType">The type of data to compare.</param>
/// <param name="violationMessage">The violation message.</param>
void SetAsLesserThan( string target, string comparisonFieldName, IsLesserValidationType validationType, string violationMessage );
/// <summary>
/// Sets that a flied is part of a group validation.
/// </summary>
/// <remarks>Not implemented by the JQuery validate plugin. Done via a custom rule.</remarks>
/// <param name="target">The target.</param>
/// <param name="groupName">Name of the group.</param>
/// <param name="violationMessage">The violation message.</param>
void SetAsGroupValidation(string target, string groupName, string violationMessage);
}
}
|
apache-2.0
|
codycollier/ocelog
|
ocelog/__init__.py
|
16
|
__all__ = []
|
apache-2.0
|
shivangi1015/incubator-carbondata
|
core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/MeasureColumnChunkReader.java
|
2207
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.core.datastore.chunk.reader;
import java.io.IOException;
import org.apache.carbondata.core.datastore.FileHolder;
import org.apache.carbondata.core.datastore.chunk.MeasureColumnDataChunk;
import org.apache.carbondata.core.datastore.chunk.impl.MeasureRawColumnChunk;
import org.apache.carbondata.core.memory.MemoryException;
/**
* Reader interface for reading the measure blocks from file
*/
public interface MeasureColumnChunkReader {
/**
* Method to read the blocks data based on block indexes
*
* @param fileReader file reader to read the blocks
* @param blockIndexes blocks to be read
* @return measure data chunks
*/
MeasureRawColumnChunk[] readRawMeasureChunks(FileHolder fileReader, int[][] blockIndexes)
throws IOException;
/**
* Method to read the blocks data based on block index
*
* @param fileReader file reader to read the blocks
* @param blockIndex block to be read
* @return measure data chunk
*/
MeasureRawColumnChunk readRawMeasureChunk(FileHolder fileReader, int blockIndex)
throws IOException;
/**
* Covert raw data to measure chunk
* @param measureRawColumnChunk
* @param pageNumber
* @return
* @throws IOException
*/
MeasureColumnDataChunk convertToMeasureChunk(MeasureRawColumnChunk measureRawColumnChunk,
int pageNumber) throws IOException, MemoryException;
}
|
apache-2.0
|
KarloKnezevic/Ferko
|
src/java/hr/fer/zemris/jcms/web/actions/UserAction.java
|
3533
|
package hr.fer.zemris.jcms.web.actions;
import hr.fer.zemris.jcms.beans.UserBean;
import hr.fer.zemris.jcms.service.BasicBrowsing;
import hr.fer.zemris.jcms.web.actions.data.UserActionData;
import hr.fer.zemris.jcms.web.actions.data.support.AbstractActionData;
import hr.fer.zemris.jcms.web.actions.data.support.MessageLoggerFactory;
public class UserAction extends ExtendedActionSupport {
private static final long serialVersionUID = 2L;
private UserBean bean = new UserBean(true);
private UserActionData data;
public String execute() throws Exception {
return fillSearch();
}
public String fillSearch() throws Exception {
// Ako korisnik nije logiran - van!
String check = checkUser(null, true);
if(check != null) return check;
data = new UserActionData(MessageLoggerFactory.createMessageLogger(this,true));
BasicBrowsing.getUserActionData(data, getCurrentUser().getUserID(), bean,"fillSearch");
if(data.getResult().equals(AbstractActionData.RESULT_FATAL)) {
return SHOW_FATAL_MESSAGE;
}
return "searchForm";
}
public String find() throws Exception {
// Ako korisnik nije logiran - van!
String check = checkUser(null, true);
if(check != null) return check;
data = new UserActionData(MessageLoggerFactory.createMessageLogger(this,true));
BasicBrowsing.getUserActionData(data, getCurrentUser().getUserID(), bean,"find");
if(data.getResult().equals(AbstractActionData.RESULT_FATAL)) {
return SHOW_FATAL_MESSAGE;
}
if(data.getResult().equals(AbstractActionData.RESULT_INPUT)) {
return "searchForm";
}
return INPUT;
}
public String fillNew() throws Exception {
// Ako korisnik nije logiran - van!
String check = checkUser(null, true);
if(check != null) return check;
data = new UserActionData(MessageLoggerFactory.createMessageLogger(this,true));
BasicBrowsing.getUserActionData(data, getCurrentUser().getUserID(), bean,"fillNew");
if(data.getResult().equals(AbstractActionData.RESULT_FATAL)) {
return SHOW_FATAL_MESSAGE;
}
return INPUT;
}
public String update() throws Exception {
// Ako korisnik nije logiran - van!
String check = checkUser(null, true);
if(check != null) return check;
data = new UserActionData(MessageLoggerFactory.createMessageLogger(this,true));
BasicBrowsing.getUserActionData(data, getCurrentUser().getUserID(), bean,"update");
if(data.getResult().equals(AbstractActionData.RESULT_FATAL)) {
return SHOW_FATAL_MESSAGE;
}
if(data.getResult().equals(AbstractActionData.RESULT_INPUT)) {
return INPUT;
}
data.getMessageLogger().registerAsDelayed();
return SUCCESS;
}
public String resetExternalID() throws Exception {
// Ako korisnik nije logiran - van!
String check = checkUser(null, true);
if(check != null) return check;
data = new UserActionData(MessageLoggerFactory.createMessageLogger(this,true));
BasicBrowsing.getUserActionData(data, getCurrentUser().getUserID(), bean,"resetExternalID");
if(data.getResult().equals(AbstractActionData.RESULT_FATAL)) {
return SHOW_FATAL_MESSAGE;
}
if(data.getResult().equals(AbstractActionData.RESULT_INPUT)) {
return "searchForm";
}
return INPUT;
}
public UserActionData getData() {
return data;
}
public void setData(UserActionData data) {
this.data = data;
}
public UserBean getBean() {
return bean;
}
public void setBean(UserBean bean) {
this.bean = bean;
}
}
|
apache-2.0
|
Aloomaio/androidsdk
|
src/main/java/com/github/aloomaio/androidsdk/viewcrawler/ViewSnapshot.java
|
15608
|
package com.github.aloomaio.androidsdk.viewcrawler;
import android.annotation.TargetApi;
import android.app.Activity;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.os.Build;
import android.os.Handler;
import android.os.Looper;
import android.util.Base64;
import android.util.Base64OutputStream;
import android.util.DisplayMetrics;
import android.util.JsonWriter;
import android.util.Log;
import android.util.LruCache;
import android.view.View;
import android.view.ViewGroup;
import com.github.aloomaio.androidsdk.aloomametrics.AConfig;
import com.github.aloomaio.androidsdk.aloomametrics.ResourceIds;
import org.json.JSONObject;
import java.io.IOException;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.FutureTask;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
@TargetApi(AConfig.UI_FEATURES_MIN_API)
/* package */ class ViewSnapshot {
public ViewSnapshot(List<PropertyDescription> properties, ResourceIds resourceIds) {
mProperties = properties;
mResourceIds = resourceIds;
mMainThreadHandler = new Handler(Looper.getMainLooper());
mRootViewFinder = new RootViewFinder();
mClassnameCache = new ClassNameCache(MAX_CLASS_NAME_CACHE_SIZE);
}
/**
* Take a snapshot of each activity in liveActivities. The given UIThreadSet will be accessed
* on the main UI thread, and should contain a set with elements for every activity to be
* snapshotted. Given stream out will be written on the calling thread.
*/
public void snapshots(UIThreadSet<Activity> liveActivities, OutputStream out) throws IOException {
mRootViewFinder.findInActivities(liveActivities);
final FutureTask<List<RootViewInfo>> infoFuture = new FutureTask<List<RootViewInfo>>(mRootViewFinder);
mMainThreadHandler.post(infoFuture);
final OutputStreamWriter writer = new OutputStreamWriter(out);
try {
final List<RootViewInfo> infoList = infoFuture.get(1, TimeUnit.SECONDS);
final int infoCount = infoList.size();
writer.write("[");
for (int i = 0; i < infoCount; i++) {
if (i > 0) {
writer.write(",");
}
final RootViewInfo info = infoList.get(i);
writer.write("{");
writer.write("\"activity\":");
writer.write(JSONObject.quote(info.activityName));
writer.write(",");
writer.write("\"scale\":");
writer.write(String.format("%s", info.scale));
writer.write(",");
writer.write("\"serialized_objects\":");
{
final JsonWriter j = new JsonWriter(writer);
j.beginObject();
j.name("rootObject").value(info.rootView.hashCode());
j.name("objects");
snapshotViewHierarchy(j, info.rootView);
j.endObject();
j.flush();
}
writer.write(",");
writer.write("\"screenshot\":");
writer.flush();
info.screenshot.writeBitmapJSON(Bitmap.CompressFormat.PNG, 100, out);
writer.write("}");
}
writer.write("]");
writer.flush();
} catch (final InterruptedException e) {
if (AConfig.DEBUG) {
Log.d(LOGTAG, "Screenshot interrupted, no screenshot will be sent.", e);
}
} catch (final TimeoutException e) {
if (AConfig.DEBUG) {
Log.i(LOGTAG, "Screenshot took more than 1 second to be scheduled and executed. No screenshot will be sent.", e);
}
} catch (final ExecutionException e) {
if (AConfig.DEBUG) {
Log.e(LOGTAG, "Exception thrown during screenshot attempt", e);
}
}
}
// For testing only
/* package */ List<PropertyDescription> getProperties() {
return mProperties;
}
/* package */ void snapshotViewHierarchy(JsonWriter j, View rootView)
throws IOException {
j.beginArray();
snapshotView(j, rootView);
j.endArray();
}
private void snapshotView(JsonWriter j, View view)
throws IOException {
final int viewId = view.getId();
final String viewIdName;
if (-1 == viewId) {
viewIdName = null;
} else {
viewIdName = mResourceIds.nameForId(viewId);
}
j.beginObject();
j.name("hashCode").value(view.hashCode());
j.name("id").value(viewId);
j.name("mp_id_name").value(viewIdName);
final CharSequence description = view.getContentDescription();
if (null == description) {
j.name("contentDescription").nullValue();
} else {
j.name("contentDescription").value(description.toString());
}
final Object tag = view.getTag();
if (null == tag) {
j.name("tag").nullValue();
} else if (tag instanceof CharSequence) {
j.name("tag").value(tag.toString());
}
j.name("top").value(view.getTop());
j.name("left").value(view.getLeft());
j.name("width").value(view.getWidth());
j.name("height").value(view.getHeight());
j.name("scrollX").value(view.getScrollX());
j.name("scrollY").value(view.getScrollY());
j.name("visibility").value(view.getVisibility());
float translationX = 0;
float translationY = 0;
if (Build.VERSION.SDK_INT >= 11) {
translationX = view.getTranslationX();
translationY = view.getTranslationY();
}
j.name("translationX").value(translationX);
j.name("translationY").value(translationY);
j.name("classes");
j.beginArray();
Class<?> klass = view.getClass();
do {
j.value(mClassnameCache.get(klass));
klass = klass.getSuperclass();
} while (klass != Object.class && klass != null);
j.endArray();
addProperties(j, view);
j.name("subviews");
j.beginArray();
if (view instanceof ViewGroup) {
final ViewGroup group = (ViewGroup) view;
final int childCount = group.getChildCount();
for (int i = 0; i < childCount; i++) {
final View child = group.getChildAt(i);
// child can be null when views are getting disposed.
if (null != child) {
j.value(child.hashCode());
}
}
}
j.endArray();
j.endObject();
if (view instanceof ViewGroup) {
final ViewGroup group = (ViewGroup) view;
final int childCount = group.getChildCount();
for (int i = 0; i < childCount; i++) {
final View child = group.getChildAt(i);
// child can be null when views are getting disposed.
if (null != child) {
snapshotView(j, child);
}
}
}
}
private void addProperties(JsonWriter j, View v)
throws IOException {
final Class<?> viewClass = v.getClass();
for (final PropertyDescription desc : mProperties) {
if (desc.targetClass.isAssignableFrom(viewClass) && null != desc.accessor) {
final Object value = desc.accessor.applyMethod(v);
if (null == value) {
// Don't produce anything in this case
} else if (value instanceof Number) {
j.name(desc.name).value((Number) value);
} else if (value instanceof Boolean) {
j.name(desc.name).value((Boolean) value);
} else {
j.name(desc.name).value(value.toString());
}
}
}
}
private static class ClassNameCache extends LruCache<Class<?>, String> {
public ClassNameCache(int maxSize) {
super(maxSize);
}
@Override
protected String create(Class<?> klass) {
return klass.getCanonicalName();
}
}
private static class RootViewFinder implements Callable<List<RootViewInfo>> {
public RootViewFinder() {
mDisplayMetrics = new DisplayMetrics();
mRootViews = new ArrayList<RootViewInfo>();
mCachedBitmap = new CachedBitmap();
}
public void findInActivities(UIThreadSet<Activity> liveActivities) {
mLiveActivities = liveActivities;
}
@Override
public List<RootViewInfo> call() throws Exception {
mRootViews.clear();
final Set<Activity> liveActivities = mLiveActivities.getAll();
for (final Activity a : liveActivities) {
final String activityName = a.getClass().getCanonicalName();
final View rootView = a.getWindow().getDecorView().getRootView();
a.getWindowManager().getDefaultDisplay().getMetrics(mDisplayMetrics);
final RootViewInfo info = new RootViewInfo(activityName, rootView);
mRootViews.add(info);
}
final int viewCount = mRootViews.size();
for (int i = 0; i < viewCount; i++) {
final RootViewInfo info = mRootViews.get(i);
takeScreenshot(info);
}
return mRootViews;
}
private void takeScreenshot(final RootViewInfo info) {
final View rootView = info.rootView;
Bitmap rawBitmap = null;
try {
final Method createSnapshot = View.class.getDeclaredMethod("createSnapshot", Bitmap.Config.class, Integer.TYPE, Boolean.TYPE);
createSnapshot.setAccessible(true);
rawBitmap = (Bitmap) createSnapshot.invoke(rootView, Bitmap.Config.RGB_565, Color.WHITE, false);
} catch (final NoSuchMethodException e) {
if (AConfig.DEBUG) {
Log.v(LOGTAG, "Can't call createSnapshot, will use drawCache", e);
}
} catch (final IllegalArgumentException e) {
Log.d(LOGTAG, "Can't call createSnapshot with arguments", e);
} catch (final InvocationTargetException e) {
Log.e(LOGTAG, "Exception when calling createSnapshot", e);
} catch (final IllegalAccessException e) {
Log.e(LOGTAG, "Can't access createSnapshot, using drawCache", e);
} catch (final ClassCastException e) {
Log.e(LOGTAG, "createSnapshot didn't return a bitmap?", e);
}
Boolean originalCacheState = null;
try {
if (null == rawBitmap) {
originalCacheState = rootView.isDrawingCacheEnabled();
rootView.setDrawingCacheEnabled(true);
rootView.buildDrawingCache(true);
rawBitmap = rootView.getDrawingCache();
}
} catch (final RuntimeException e) {
if (AConfig.DEBUG) {
Log.v(LOGTAG, "Can't take a bitmap snapshot of view " + rootView + ", skipping for now.", e);
}
}
float scale = 1.0f;
if (null != rawBitmap) {
final int rawDensity = rawBitmap.getDensity();
if (rawDensity != Bitmap.DENSITY_NONE) {
scale = ((float) mClientDensity) / rawDensity;
}
final int rawWidth = rawBitmap.getWidth();
final int rawHeight = rawBitmap.getHeight();
final int destWidth = (int) ((rawBitmap.getWidth() * scale) + 0.5);
final int destHeight = (int) ((rawBitmap.getHeight() * scale) + 0.5);
if (rawWidth > 0 && rawHeight > 0 && destWidth > 0 && destHeight > 0) {
mCachedBitmap.recreate(destWidth, destHeight, mClientDensity, rawBitmap);
}
}
if (null != originalCacheState && !originalCacheState) {
rootView.setDrawingCacheEnabled(false);
}
info.scale = scale;
info.screenshot = mCachedBitmap;
}
private UIThreadSet<Activity> mLiveActivities;
private final List<RootViewInfo> mRootViews;
private final DisplayMetrics mDisplayMetrics;
private final CachedBitmap mCachedBitmap;
private final int mClientDensity = DisplayMetrics.DENSITY_DEFAULT;
}
private static class CachedBitmap {
public CachedBitmap() {
mPaint = new Paint(Paint.FILTER_BITMAP_FLAG);
mCached = null;
}
public synchronized void recreate(int width, int height, int destDensity, Bitmap source) {
if (null == mCached || mCached.getWidth() != width || mCached.getHeight() != height) {
try {
mCached = Bitmap.createBitmap(width, height, Bitmap.Config.RGB_565);
} catch (final OutOfMemoryError e) {
mCached = null;
}
if (null != mCached) {
mCached.setDensity(destDensity);
}
}
if (null != mCached) {
final Canvas scaledCanvas = new Canvas(mCached);
scaledCanvas.drawBitmap(source, 0, 0, mPaint);
}
}
// Writes a QUOTED base64 string (or the string null) to the output stream
public synchronized void writeBitmapJSON(Bitmap.CompressFormat format, int quality, OutputStream out)
throws IOException {
if (null == mCached || mCached.getWidth() == 0 || mCached.getHeight() == 0) {
out.write("null".getBytes());
} else {
out.write('"');
final Base64OutputStream imageOut = new Base64OutputStream(out, Base64.NO_WRAP);
mCached.compress(Bitmap.CompressFormat.PNG, 100, imageOut);
imageOut.flush();
out.write('"');
}
}
private Bitmap mCached;
private final Paint mPaint;
}
private static class RootViewInfo {
public RootViewInfo(String activityName, View rootView) {
this.activityName = activityName;
this.rootView = rootView;
this.screenshot = null;
this.scale = 1.0f;
}
public final String activityName;
public final View rootView;
public CachedBitmap screenshot;
public float scale;
}
private final RootViewFinder mRootViewFinder;
private final List<PropertyDescription> mProperties;
private final ClassNameCache mClassnameCache;
private final Handler mMainThreadHandler;
private final ResourceIds mResourceIds;
private static final int MAX_CLASS_NAME_CACHE_SIZE = 255;
@SuppressWarnings("unused")
private static final String LOGTAG = "AloomaAPI.ViewSnapshot";
}
|
apache-2.0
|
sega4revenge/Sega
|
app/src/main/java/com/sega/vimarket/activity/FullScreenImageActivity.java
|
3500
|
package com.sega.vimarket.activity;
import android.app.ProgressDialog;
import android.graphics.Bitmap;
import android.graphics.drawable.Drawable;
import android.os.Bundle;
import android.support.v7.widget.Toolbar;
import android.util.Log;
import android.view.MenuItem;
import android.widget.ImageView;
import android.widget.TextView;
import android.widget.Toast;
import com.bumptech.glide.Glide;
import com.bumptech.glide.request.animation.GlideAnimation;
import com.bumptech.glide.request.target.SimpleTarget;
import com.sega.vimarket.R;
import com.sega.vimarket.view.CircleTransform;
import com.sega.vimarket.color.CActivity;
import com.sega.vimarket.widget.TouchImageView;
/**
* Created by Sega on 04/01/2017.
*/
public class FullScreenImageActivity extends CActivity {
private TouchImageView mImageView;
private ImageView ivUser;
private TextView tvUser;
private ProgressDialog progressDialog;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_full_screen);
bindViews();
}
@Override
protected void onResume() {
super.onResume();
setValues();
}
@Override
public void onBackPressed() {
super.onBackPressed();
System.gc();
finish();
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
int id = item.getItemId();
if (id == android.R.id.home) {
onBackPressed();
}
return super.onOptionsItemSelected(item);
}
private void bindViews() {
progressDialog = new ProgressDialog(this);
mImageView = (TouchImageView) findViewById(R.id.imageView);
Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar);
setSupportActionBar(toolbar);
getSupportActionBar().setDisplayShowTitleEnabled(false);
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
ivUser = (ImageView) toolbar.findViewById(R.id.avatar);
tvUser = (TextView) toolbar.findViewById(R.id.title);
}
private void setValues() {
String nameUser, urlPhotoUser, urlPhotoClick;
nameUser = getIntent().getStringExtra("nameUser");
urlPhotoUser = getIntent().getStringExtra("urlPhotoUser");
urlPhotoClick = getIntent().getStringExtra("urlPhotoClick");
Log.i("TAG", "imagem recebida " + urlPhotoClick);
tvUser.setText(nameUser); // Name
Glide.with(this).load(urlPhotoUser).centerCrop().transform(new CircleTransform(this)).override(40, 40).into(ivUser);
Glide.with(this).load(urlPhotoClick).asBitmap().override(640, 640).fitCenter().into(new SimpleTarget<Bitmap>() {
@Override
public void onLoadStarted(Drawable placeholder) {
progressDialog.setMessage("Carregando Imagem...");
progressDialog.show();
}
@Override
public void onResourceReady(Bitmap resource,
GlideAnimation<? super Bitmap> glideAnimation) {
progressDialog.dismiss();
mImageView.setImageBitmap(resource);
}
@Override
public void onLoadFailed(Exception e, Drawable errorDrawable) {
Toast.makeText(FullScreenImageActivity.this, "Erro, tente novamente", Toast.LENGTH_LONG).show();
progressDialog.dismiss();
}
});
}
}
|
apache-2.0
|
SoffidIAM/jxpath
|
src/java/es/caib/zkib/jxpath/util/ReverseComparator.java
|
1498
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package es.caib.zkib.jxpath.util;
import java.io.Serializable;
import java.util.Comparator;
/**
* Reverse comparator.
*
* @author Dmitri Plotnikov
* @version $Revision: 1.1 $ $Date: 2009-04-03 08:13:15 $
*/
public final class ReverseComparator implements Comparator, Serializable {
private static final long serialVersionUID = -2795475743948616649L;
/**
* Singleton reverse comparator instance.
*/
public static final Comparator INSTANCE = new ReverseComparator();
/**
* Create a new ReverseComparator.
*/
private ReverseComparator() {
}
public int compare(Object o1, Object o2) {
return ((Comparable) o2).compareTo(o1);
}
}
|
apache-2.0
|
dwdyer/ipdframework
|
src/java/main/au/edu/uwa/csse/dyerd01/ipd/strategies/PeriodicCCD.java
|
399
|
// $Header: $
package au.edu.uwa.csse.dyerd01.ipd.strategies;
import au.edu.uwa.csse.dyerd01.ipd.framework.Action;
/**
* @author Daniel Dyer
*/
public class PeriodicCCD extends AbstractPeriodicPlayer
{
public PeriodicCCD()
{
super(new Action[]{Action.COOPERATE, Action.COOPERATE, Action.DEFECT});
}
public String getName()
{
return "PeriodicCCD";
}
}
|
apache-2.0
|
angeloc/sysdig
|
userspace/libsinsp/tracers.cpp
|
23634
|
/*
Copyright (C) 2013-2018 Draios Inc dba Sysdig.
This file is part of sysdig.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
#include <time.h>
#include "sinsp.h"
#include "sinsp_int.h"
#include "tracers.h"
sinsp_tracerparser::sinsp_tracerparser(sinsp *inspector)
{
m_inspector = inspector;
m_storage_size = 0;
m_storage = NULL;
m_res = sinsp_tracerparser::RES_OK;
m_fragment_size = 0;
m_enter_pae = NULL;
}
sinsp_tracerparser::~sinsp_tracerparser()
{
if(m_storage)
{
free(m_storage);
}
}
void sinsp_tracerparser::set_storage_size(uint32_t newsize)
{
m_storage = (char*)realloc(m_storage, newsize);
if(m_storage == NULL)
{
throw sinsp_exception("memory allocation error in sinsp_tracerparser::process_event_data.");
}
m_storage_size = newsize;
}
sinsp_tracerparser::parse_result sinsp_tracerparser::process_event_data(char *data, uint32_t datalen, uint64_t ts)
{
ASSERT(data != NULL);
m_storlen = m_fragment_size + datalen;
//
// Make sure we have enough space in the buffer and copy the data into it
//
if(m_storage_size < m_storlen + 1)
{
set_storage_size(m_storlen + 1);
}
memcpy(m_storage + m_fragment_size, data, datalen);
m_storage[m_storlen] = 0;
if(m_fragment_size != 0)
{
m_fullfragment_storage_str = m_storage;
}
//
// Do the parsing
//
if(m_storlen > 0)
{
//
// Reset the content
//
m_res = sinsp_tracerparser::RES_OK;
m_tags.clear();
m_argnames.clear();
m_argvals.clear();
m_taglens.clear();
m_argnamelens.clear();
m_argvallens.clear();
m_tot_taglens = 0;
m_tot_argnamelens = 0;
m_tot_argvallens = 0;
if(m_storage[0] == '>' || m_storage[0] == '<')
{
parse_simple(m_storage);
}
else
{
parse_json(m_storage);
}
}
else
{
m_res = sinsp_tracerparser::RES_FAILED;
}
if(m_res == sinsp_tracerparser::RES_FAILED)
{
//
// Invalid syntax
//
m_fragment_size = 0;
m_fullfragment_storage_str.clear();
return m_res;
}
else if(m_res == sinsp_tracerparser::RES_TRUNCATED)
{
//
// Valid syntax, but the message is incomplete. Buffer it and wait for
// more fragments.
//
if(m_fragment_size > MAX_USER_EVT_BUFFER)
{
//
// Maximum buffering size reached, drop the event
//
m_fragment_size = 0;
return m_res;
}
if(m_fullfragment_storage_str.length() == 0)
{
memcpy(m_storage,
data,
datalen);
m_storage[datalen] = 0;
m_fragment_size += datalen;
}
else
{
uint32_t tlen = (uint32_t)m_fullfragment_storage_str.length();
memcpy(m_storage,
m_fullfragment_storage_str.c_str(),
tlen);
m_fragment_size = tlen;
}
return m_res;
}
m_fragment_size = 0;
m_fullfragment_storage_str.clear();
//
// Parser tests stop here
//
if(m_inspector == NULL)
{
return sinsp_tracerparser::RES_OK;
}
//
// Event decoding done. We do state tracking only if explicitly requested
// by one or more filters.
//
if(m_inspector->m_track_tracers_state == false)
{
return sinsp_tracerparser::RES_OK;
}
//
// If this is an enter event, allocate a sinsp_partial_tracer object and
// push it to the list
//
if(m_type_str[0] == '>')
{
sinsp_partial_tracer* pae = m_inspector->m_partial_tracers_pool->pop();
if(pae == NULL)
{
//
// The list is completely used. This likely means that there have been drops and
// the entries will be stuck there forever. Better clean the list, miss the 128
// events it contains, and start fresh.
//
list<sinsp_partial_tracer*>* partial_tracers_list = &m_inspector->m_partial_tracers_list;
list<sinsp_partial_tracer*>::iterator it;
for(it = partial_tracers_list->begin(); it != partial_tracers_list->end(); ++it)
{
m_inspector->m_partial_tracers_pool->push(*it);
}
partial_tracers_list->clear();
return sinsp_tracerparser::RES_OK;
}
init_partial_tracer(pae);
pae->m_time = ts;
m_inspector->m_partial_tracers_list.push_front(pae);
m_enter_pae = pae;
}
else
{
list<sinsp_partial_tracer*>* partial_tracers_list = &m_inspector->m_partial_tracers_list;
list<sinsp_partial_tracer*>::iterator it;
init_partial_tracer(&m_exit_pae);
for(it = partial_tracers_list->begin(); it != partial_tracers_list->end(); ++it)
{
if(m_exit_pae.compare(*it) == true)
{
m_exit_pae.m_time = ts;
//
// This is a bit tricky and deserves some explanation:
// despite removing the pae and returning it to the available pool,
// we link to it so that the filters will use it. We do that as an
// optimization (it avoids making a copy or implementing logic for
// delayed list removal), and we base it on the assumption that,
// since the processing is strictly sequential and single thread,
// nobody will modify the pae until the event is fully processed.
//
m_enter_pae = *it;
m_inspector->m_partial_tracers_pool->push(*it);
partial_tracers_list->erase(it);
return sinsp_tracerparser::RES_OK;
}
}
m_enter_pae = NULL;
}
return sinsp_tracerparser::RES_OK;
}
sinsp_partial_tracer* sinsp_tracerparser::find_parent_enter_pae()
{
list<sinsp_partial_tracer*>* partial_tracers_list = &m_inspector->m_partial_tracers_list;
list<sinsp_partial_tracer*>::iterator it;
char* tse = m_enter_pae->m_tags_storage + m_tot_taglens;
if(*tse == 0 && tse > m_enter_pae->m_tags_storage)
{
--tse;
}
uint32_t len = 0;
while(tse != m_enter_pae->m_tags_storage)
{
if(*tse == 0)
{
len = tse - m_enter_pae->m_tags_storage + 1; // 1 is for the traling zero
break;
}
--tse;
}
for(it = partial_tracers_list->begin(); it != partial_tracers_list->end(); ++it)
{
if(m_enter_pae->compare(*it, len) == true)
{
return *it;
}
}
return NULL;
}
inline void sinsp_tracerparser::parse_json(char* evtstr)
{
char* p = m_storage;
uint32_t delta;
char* tstr;
//
// Skip the initial bracket
//
m_res = skip_spaces(p, &delta);
if(m_res != sinsp_tracerparser::RES_OK)
{
return;
}
p += delta;
if(*(p++) != '[')
{
m_res = sinsp_tracerparser::RES_FAILED;
return;
}
//
// type
//
m_res = parsestr(p, &m_type_str, &delta);
if(m_res != sinsp_tracerparser::RES_OK)
{
return;
}
p += delta;
//
// ID
//
m_res = skip_spaces_and_commas(p, &delta, 1);
if(m_res != sinsp_tracerparser::RES_OK)
{
return;
}
p += delta;
if(*p == '"')
{
switch(*(++p))
{
case 't':
m_id = m_tinfo->m_tid;
delta = 2;
break;
case 'p':
m_id = m_tinfo->m_pid;
if(*(p + 1) == 'p')
{
m_id = m_tinfo->m_ptid;
p++;
}
delta = 2;
break;
case ':':
m_id = 0;
delta = 1;
break;
case 'g':
m_id = 0;
delta = 2;
break;
default:
m_res = sinsp_tracerparser::RES_FAILED;
break;
}
}
else
{
m_res = parsenumber(p, &m_id, &delta);
if(m_res > sinsp_tracerparser::RES_COMMA)
{
return;
}
}
p += delta;
if(m_res == sinsp_tracerparser::RES_COMMA)
{
m_res = skip_spaces(p, &delta);
}
else
{
m_res = skip_spaces_and_commas(p, &delta, 1);
}
if(m_res != sinsp_tracerparser::RES_OK)
{
return;
}
p += delta;
//
// First tag
//
m_res = skip_spaces_and_char(p, &delta, '[');
if(m_res != sinsp_tracerparser::RES_OK)
{
return;
}
p += delta;
m_res = parsestr_not_enforce(p, &tstr, &delta);
if(m_res != sinsp_tracerparser::RES_OK)
{
return;
}
p += delta;
if(tstr != NULL)
{
m_tags.push_back(tstr);
m_taglens.push_back(delta - 2);
m_tot_taglens += delta - 2;
//
// Remaining tags
//
while(true)
{
m_res = skip_spaces_and_commas(p, &delta, 0);
if(m_res != sinsp_tracerparser::RES_OK)
{
return;
}
p += delta;
if(*p == ']')
{
break;
}
m_res = parsestr(p, &tstr, &delta);
if(m_res != sinsp_tracerparser::RES_OK)
{
return;
}
p += delta;
m_tags.push_back(tstr);
m_taglens.push_back(delta - 2);
m_tot_taglens += delta - 2;
}
}
//
// First argument
//
m_res = skip_spaces_and_commas_and_all_brakets(p, &delta);
if(m_res != sinsp_tracerparser::RES_OK)
{
return;
}
p += delta;
m_res = parsestr_not_enforce(p, &tstr, &delta);
if(m_res != sinsp_tracerparser::RES_OK)
{
return;
}
p += delta;
if(tstr != NULL)
{
m_argnames.push_back(tstr);
m_argnamelens.push_back(delta - 2);
m_tot_argnamelens += delta - 2;
m_res = skip_spaces_and_char(p, &delta, ':');
if(m_res != sinsp_tracerparser::RES_OK)
{
return;
}
p += delta;
m_res = parsestr(p, &tstr, &delta);
if(m_res != sinsp_tracerparser::RES_OK)
{
return;
}
p += delta;
m_argvals.push_back(tstr);
m_argvallens.push_back(delta - 2);
m_tot_argvallens += delta - 2;
//
// Remaining arguments
//
while(true)
{
m_res = skip_spaces_and_commas_and_cr_brakets(p, &delta);
if(m_res != sinsp_tracerparser::RES_OK)
{
return;
}
p += delta;
if(*p == ']')
{
p++;
break;
}
m_res = parsestr(p, &tstr, &delta);
if(m_res != sinsp_tracerparser::RES_OK)
{
return;
}
p += delta;
m_argnames.push_back(tstr);
m_argnamelens.push_back(delta - 2);
m_tot_argnamelens += delta - 2;
m_res = skip_spaces_and_char(p, &delta, ':');
if(m_res != sinsp_tracerparser::RES_OK)
{
return;
}
p += delta;
m_res = parsestr(p, &tstr, &delta);
if(m_res != sinsp_tracerparser::RES_OK)
{
return;
}
p += delta;
m_argvals.push_back(tstr);
m_argvallens.push_back(delta - 2);
m_tot_argvallens += delta - 2;
}
}
//
// Terminating ]
//
m_res = skip_spaces(p, &delta);
if(m_res != sinsp_tracerparser::RES_OK)
{
return;
}
p += delta;
if(*p != ']')
{
if(*p == 0)
{
m_res = sinsp_tracerparser::RES_TRUNCATED;
}
else
{
m_res = sinsp_tracerparser::RES_FAILED;
}
return;
}
m_res = sinsp_tracerparser::RES_OK;
return;
}
inline void sinsp_tracerparser::delete_char(char* p)
{
while(*p != 0)
{
*p = *(p + 1);
p++;
}
}
inline void sinsp_tracerparser::parse_simple(char* evtstr)
{
char* p = evtstr;
uint32_t delta;
//
// Extract the type
//
m_type_str = p++;
//
// Skip to the scope/id
//
if(*p != ':')
{
if(*p == 0)
{
m_res = sinsp_tracerparser::RES_TRUNCATED;
}
else
{
m_res = sinsp_tracerparser::RES_FAILED;
}
return;
}
*p = 0;
p++;
//
// Extract the scope
//
if(*p == '0')
{
m_res = sinsp_tracerparser::RES_TRUNCATED;
return;
}
switch(*p)
{
case 't':
m_id = m_tinfo->m_tid;
delta = 2;
break;
case 'p':
m_id = m_tinfo->m_pid;
if(*(p + 1) == 'p')
{
m_id = m_tinfo->m_ptid;
p++;
}
delta = 2;
break;
case ':':
m_id = 0;
delta = 1;
break;
case 'g':
m_id = 0;
delta = 2;
break;
default:
m_res = parsenumber_colend(p, &m_id, &delta);
if(m_res > sinsp_tracerparser::RES_COMMA)
{
return;
}
break;
}
p += delta;
//
// Extract the tags
//
if(*p == '0')
{
m_res = sinsp_tracerparser::RES_TRUNCATED;
return;
}
if(*p != ':')
{
bool dont_interpret_next_char = false;
while(true)
{
char* start = p;
m_tags.push_back(p);
while(*p != 0)
{
if(dont_interpret_next_char)
{
dont_interpret_next_char = false;
++p;
continue;
}
if(*p == '\\')
{
ASSERT(dont_interpret_next_char == false);
dont_interpret_next_char = true;
delete_char(p);
continue;
}
if(*p == '.' || *p == ':')
{
break;
}
if(*p == '>' || *p == '<' || *p == '=' || *p == '\n')
{
m_res = sinsp_tracerparser::RES_FAILED;
return;
}
++p;
}
m_taglens.push_back((uint32_t)(p - start));
m_tot_taglens += (uint32_t)(p - start);
if(*p == ':')
{
*p = 0;
break;
}
else if(*p == 0)
{
m_res = sinsp_tracerparser::RES_TRUNCATED;
return;
}
else
{
*p = 0;
++p;
}
}
}
++p;
//
// Extract the arguments
//
if(*p == 0)
{
m_res = sinsp_tracerparser::RES_TRUNCATED;
return;
}
if(*p != ':')
{
bool dont_interpret_next_char = false;
while(true)
{
char* start = p;
//
// Arg name
//
m_argnames.push_back(p);
while(*p != 0)
{
if(dont_interpret_next_char)
{
dont_interpret_next_char = false;
++p;
continue;
}
if(*p == '\\')
{
ASSERT(dont_interpret_next_char == false);
dont_interpret_next_char = true;
delete_char(p);
continue;
}
if(*p == '=')
{
break;
}
if(*p == '>' || *p == '<' || *p == '\n')
{
m_res = sinsp_tracerparser::RES_FAILED;
return;
}
++p;
}
m_argnamelens.push_back((uint32_t)(p - start));
m_tot_argnamelens += (uint32_t)(p - start);
if(*p == 0)
{
if(*(p - 1) == ':')
{
//
// This means there was an argument without value,
// which we don't support
//
m_res = sinsp_tracerparser::RES_FAILED;
}
else
{
m_res = sinsp_tracerparser::RES_TRUNCATED;
}
break;
}
else
{
*p = 0;
++p;
}
//
// Arg vals
//
start = p;
m_argvals.push_back(p);
dont_interpret_next_char = false;
while(*p != 0)
{
if(dont_interpret_next_char)
{
dont_interpret_next_char = false;
++p;
continue;
}
if(*p == '\\')
{
ASSERT(dont_interpret_next_char == false);
dont_interpret_next_char = true;
delete_char(p);
continue;
}
if(*p == ',' || *p == ':' || *p == '=')
{
break;
}
++p;
}
m_argvallens.push_back((uint32_t)(p - start));
m_tot_argvallens += (uint32_t)(p - start);
if(*p == ':')
{
*p = 0;
m_res = sinsp_tracerparser::RES_OK;
break;
}
else if(*p == 0)
{
m_res = sinsp_tracerparser::RES_TRUNCATED;
break;
}
else
{
*p = 0;
++p;
}
}
}
//
// All done
//
return;
}
inline sinsp_tracerparser::parse_result sinsp_tracerparser::skip_spaces(char* p, uint32_t* delta)
{
char* start = p;
while(*p == ' ')
{
if(*p == 0)
{
return sinsp_tracerparser::RES_TRUNCATED;
}
p++;
}
*delta = (uint32_t)(p - start);
return sinsp_tracerparser::RES_OK;
}
inline sinsp_tracerparser::parse_result sinsp_tracerparser::skip_spaces_and_commas(char* p, uint32_t* delta, uint32_t n_expected_commas)
{
char* start = p;
uint32_t nc = 0;
while(true)
{
if(*p == ' ')
{
p++;
continue;
}
else if(*p == ',')
{
nc++;
}
else if(*p == 0)
{
return sinsp_tracerparser::RES_TRUNCATED;
}
else
{
break;
}
p++;
}
if(nc < n_expected_commas)
{
return sinsp_tracerparser::RES_FAILED;
}
*delta = (uint32_t)(p - start);
return sinsp_tracerparser::RES_OK;
}
inline sinsp_tracerparser::parse_result sinsp_tracerparser::skip_spaces_and_char(char* p, uint32_t* delta, char char_to_skip)
{
char* start = p;
uint32_t nc = 0;
while(*p == ' ' || *p == char_to_skip || *p == 0)
{
if(*p == 0)
{
return sinsp_tracerparser::RES_TRUNCATED;
}
else if(*p == char_to_skip)
{
nc++;
}
p++;
}
if(nc != 1)
{
return sinsp_tracerparser::RES_FAILED;
}
*delta = (uint32_t)(p - start);
return sinsp_tracerparser::RES_OK;
}
inline sinsp_tracerparser::parse_result sinsp_tracerparser::skip_spaces_and_commas_and_sq_brakets(char* p, uint32_t* delta)
{
char* start = p;
uint32_t nc = 0;
uint32_t nosb = 0;
while(*p == ' ' || *p == ',' || *p == '[' || *p == ']' || *p == 0)
{
if(*p == 0)
{
return sinsp_tracerparser::RES_TRUNCATED;
}
else if(*p == ',')
{
nc++;
}
else if(*p == '[')
{
nosb++;
}
else if(*p == ']')
{
if(nosb != 0)
{
break;
}
}
p++;
}
if(nc != 1 || nosb != 1)
{
return sinsp_tracerparser::RES_FAILED;
}
*delta = (uint32_t)(p - start);
return sinsp_tracerparser::RES_OK;
}
inline sinsp_tracerparser::parse_result sinsp_tracerparser::skip_spaces_and_commas_and_cr_brakets(char* p, uint32_t* delta)
{
char* start = p;
uint32_t nc = 0;
uint32_t nocb = 0;
uint32_t nccb = 0;
while(*p == ' ' || *p == ',' || *p == '{' || *p == '}' || *p == 0)
{
if(*p == 0)
{
return sinsp_tracerparser::RES_TRUNCATED;
}
else if(*p == ',')
{
nc++;
}
else if(*p == '{')
{
nocb++;
}
else if(*p == '}')
{
nccb++;
}
p++;
}
if(!((nc == 1 && nocb == 1) || (nc == 1 && nccb == 1) || (nccb == 1 && *p == ']')))
{
return sinsp_tracerparser::RES_FAILED;
}
*delta = (uint32_t)(p - start);
return sinsp_tracerparser::RES_OK;
}
inline sinsp_tracerparser::parse_result sinsp_tracerparser::skip_spaces_and_commas_and_all_brakets(char* p, uint32_t* delta)
{
char* start = p;
uint32_t nc = 0;
uint32_t nosb = 0;
uint32_t nocb = 0;
while(*p == ' ' || *p == ',' || *p == '[' || *p == ']' || *p == '{' || *p == '}' || (*p == 0))
{
if(*p == 0)
{
return sinsp_tracerparser::RES_TRUNCATED;
}
else if(*p == ',')
{
nc++;
}
else if(*p == '[')
{
nosb++;
}
else if(*p == ']')
{
if(nosb != 0)
{
break;
}
}
else if(*p == '{')
{
nocb++;
}
p++;
}
if(nc != 1 || nosb != 1)
{
return sinsp_tracerparser::RES_FAILED;
}
else if(nocb != 1)
{
if(*p != ']')
{
return sinsp_tracerparser::RES_FAILED;
}
}
*delta = (uint32_t)(p - start);
return sinsp_tracerparser::RES_OK;
}
inline sinsp_tracerparser::parse_result sinsp_tracerparser::parsestr(char* p, char** res, uint32_t* delta)
{
char* initial = p;
*res = NULL;
//
// Make sure that we start with a \"
//
if(*p != '"')
{
*delta = (uint32_t)(p - initial + 1);
if(*p == 0)
{
return sinsp_tracerparser::RES_TRUNCATED;
}
else
{
return sinsp_tracerparser::RES_FAILED;
}
}
*res = p + 1;
p++;
//
// Navigate to the end of the string
//
while(!(*p == '\"' && *(p - 1) != '\\'))
{
if(*p == 0)
{
*delta = (uint32_t)(p - initial + 1);
return sinsp_tracerparser::RES_TRUNCATED;
}
p++;
}
*p = 0;
*delta = (uint32_t)(p - initial + 1);
return sinsp_tracerparser::RES_OK;
}
inline sinsp_tracerparser::parse_result sinsp_tracerparser::parsestr_not_enforce(char* p, char** res, uint32_t* delta)
{
sinsp_tracerparser::parse_result psres = parsestr(p, res, delta);
if(psres == sinsp_tracerparser::RES_FAILED)
{
if(*(p + *delta) == ']')
{
*res = NULL;
return sinsp_tracerparser::RES_OK;
}
}
else if(psres == sinsp_tracerparser::RES_TRUNCATED)
{
return psres;
}
return sinsp_tracerparser::RES_OK;
}
inline sinsp_tracerparser::parse_result sinsp_tracerparser::parsenumber(char* p, int64_t* res, uint32_t* delta)
{
char* start = p;
sinsp_tracerparser::parse_result retval = sinsp_tracerparser::RES_OK;
int64_t val = 0;
bool negative = false;
if(*p == '-')
{
negative = true;
p++;
}
while(*p >= '0' && *p <= '9')
{
val = val * 10 + (*p - '0');
p++;
}
if(*p == ',')
{
retval = sinsp_tracerparser::RES_COMMA;
}
else if(*p != 0 && *p != ' ')
{
return sinsp_tracerparser::RES_FAILED;
}
else if(*p == 0)
{
return sinsp_tracerparser::RES_TRUNCATED;
}
*p = 0;
if(negative)
{
*res = -val;
}
else
{
*res = val;
}
*delta = (uint32_t)(p - start + 1);
return retval;
}
inline sinsp_tracerparser::parse_result sinsp_tracerparser::parsenumber_colend(char* p, int64_t* res, uint32_t* delta)
{
char* start = p;
int64_t val = 0;
bool negative = false;
if(*p == '-')
{
negative = true;
p++;
}
while(*p >= '0' && *p <= '9')
{
val = val * 10 + (*p - '0');
p++;
}
if(*p != ':')
{
if(*p == 0)
{
return sinsp_tracerparser::RES_TRUNCATED;
}
else
{
return sinsp_tracerparser::RES_FAILED;
}
}
else
{
*delta = (uint32_t)(p - start + 1);
if(negative)
{
*res = -val;
}
else
{
*res = val;
}
return sinsp_tracerparser::RES_OK;
}
}
inline void sinsp_tracerparser::init_partial_tracer(sinsp_partial_tracer* pae)
{
vector<char*>::iterator it;
vector<uint32_t>::iterator sit;
ASSERT(m_tinfo != NULL);
pae->m_tid = m_tinfo->m_tid;
//
// Store the ID
//
pae->m_id = m_id;
ASSERT(m_tags.size() == m_taglens.size());
ASSERT(m_argnames.size() == m_argnamelens.size());
ASSERT(m_argvals.size() == m_argvallens.size());
//
// Pack the tags
//
pae->m_tags.clear();
pae->m_taglens.clear();
pae->m_ntags = (uint32_t)m_tags.size();
uint32_t encoded_tags_len = m_tot_taglens + pae->m_ntags + 1;
if(pae->m_tags_storage_size < encoded_tags_len)
{
pae->m_tags_storage = (char*)realloc(pae->m_tags_storage, encoded_tags_len);
pae->m_tags_storage_size = encoded_tags_len;
}
char* p = pae->m_tags_storage;
for(it = m_tags.begin(), sit = m_taglens.begin();
it != m_tags.end(); ++it, ++sit)
{
memcpy(p, *it, (*sit) + 1);
pae->m_tags.push_back(p);
pae->m_taglens.push_back(*sit);
p += (*sit) + 1;
}
*p++ = 0;
pae->m_tags_len = (uint32_t)(p - pae->m_tags_storage);
//
// Pack the argnames
//
pae->m_argnames.clear();
pae->m_argnamelens.clear();
pae->m_nargs = (uint32_t)m_argnames.size();
uint32_t encoded_argnames_len = m_tot_argnamelens + pae->m_nargs + 1;
if(pae->m_argnames_storage_size < encoded_argnames_len)
{
pae->m_argnames_storage = (char*)realloc(pae->m_argnames_storage, encoded_argnames_len);
pae->m_argnames_storage_size = encoded_argnames_len;
}
p = pae->m_argnames_storage;
for(it = m_argnames.begin(), sit = m_argnamelens.begin();
it != m_argnames.end(); ++it, ++sit)
{
memcpy(p, *it, (*sit) + 1);
pae->m_argnames.push_back(p);
pae->m_argnamelens.push_back(*sit);
p += (*sit) + 1;
}
*p++ = 0;
pae->m_argnames_len = (uint32_t)(p - pae->m_argnames_storage);
//
// Pack the argvals
//
pae->m_argvals.clear();
pae->m_argvallens.clear();
uint32_t encoded_argvals_len = m_tot_argvallens + pae->m_nargs + 1;
if(pae->m_argvals_storage_size < encoded_argvals_len)
{
pae->m_argvals_storage = (char*)realloc(pae->m_argvals_storage, encoded_argvals_len);
pae->m_argvals_storage_size = encoded_argvals_len;
}
p = pae->m_argvals_storage;
for(it = m_argvals.begin(), sit = m_argvallens.begin();
it != m_argvals.end(); ++it, ++sit)
{
memcpy(p, *it, (*sit) + 1);
pae->m_argvals.push_back(p);
pae->m_argvallens.push_back(*sit);
p += (*sit) + 1;
}
*p++ = 0;
pae->m_argvals_len = (uint32_t)(p - pae->m_argvals_storage);
}
void sinsp_tracerparser::test()
{
// char doc[] = "[\">\\\"\", 12435, [\"mysql\", \"query\", \"init\"], [{\"argname1\":\"argval1\"}, {\"argname2\":\"argval2\"}, {\"argname3\":\"argval3\"}]]";
// char doc1[] = "[\"<t\", 12435, [\"mysql\", \"query\", \"init\"], []]";
char doc1[] = "[\">\", 12345, [\"mysql\", \"query\", \"init\"], [{\"argname1\":\"argval1\"}, {\"argname2\":\"argval2\"}, {\"argname3\":\"argval3\"}]]";
// char doc1[] = ">:1111:u\\:\\=a.u\\:\\>.aaa.33.aa\\::a=b\\:\\=,c=d\\:\\=a:";
sinsp_threadinfo tinfo;
m_tinfo = &tinfo;
tinfo.m_ptid = 11;
tinfo.m_pid = 22;
tinfo.m_tid = 33;
printf("1\n");
float cpu_time = ((float)clock ()) / CLOCKS_PER_SEC;
for(uint64_t j = 0; j < 30000000; j++)
{
process_event_data(doc1, sizeof(doc1) - 1, 10);
if(m_res != sinsp_tracerparser::RES_OK)
{
printf("ERROR\n");
}
process_event_data(doc1, sizeof(doc1) - 1, 20);
if(m_res != sinsp_tracerparser::RES_OK)
{
printf("ERROR\n");
}
}
cpu_time = ((float)clock()/ CLOCKS_PER_SEC) - cpu_time;
printf ("time: %5.2f\n", cpu_time);
}
|
apache-2.0
|
dcarbone/php-fhir-generated
|
src/DCarbone/PHPFHIRGenerated/STU3/FHIRCodePrimitive/FHIRGuidePageKindList.php
|
8147
|
<?php
namespace DCarbone\PHPFHIRGenerated\STU3\FHIRCodePrimitive;
/*!
* This class was generated with the PHPFHIR library (https://github.com/dcarbone/php-fhir) using
* class definitions from HL7 FHIR (https://www.hl7.org/fhir/)
*
* Class creation date: December 26th, 2019 15:43+0000
*
* PHPFHIR Copyright:
*
* Copyright 2016-2019 Daniel Carbone (daniel.p.carbone@gmail.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
* FHIR Copyright Notice:
*
* Copyright (c) 2011+, HL7, Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* * Neither the name of HL7 nor the names of its contributors may be used to
* endorse or promote products derived from this software without specific
* prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*
*
* Generated on Wed, Apr 19, 2017 07:44+1000 for FHIR v3.0.1
*
* Note: the schemas & schematrons do not contain all of the rules about what makes resources
* valid. Implementers will still need to be familiar with the content of the specification and with
* any profiles that apply to the resources in order to make a conformant implementation.
*
*/
use DCarbone\PHPFHIRGenerated\STU3\FHIRCodePrimitive;
use DCarbone\PHPFHIRGenerated\STU3\PHPFHIRConstants;
use DCarbone\PHPFHIRGenerated\STU3\PHPFHIRTypeInterface;
/**
* Class FHIRGuidePageKindList
* @package \DCarbone\PHPFHIRGenerated\STU3\FHIRCodePrimitive
*/
class FHIRGuidePageKindList extends FHIRCodePrimitive
{
// name of FHIR type this class describes
const FHIR_TYPE_NAME = PHPFHIRConstants::TYPE_NAME_GUIDE_PAGE_KIND_HYPHEN_LIST;
/** @var string */
private $_xmlns = 'http://hl7.org/fhir';
/**
* Validation map for fields in type GuidePageKind-list
* @var array
*/
private static $_validationRules = [
self::FIELD_VALUE => [
PHPFHIRConstants::VALIDATE_ENUM => ['page','example','list','include','directory','dictionary','toc','resource',],
],
];
/**
* FHIRGuidePageKindList Constructor
* @param null|string $value
*/
public function __construct($value = null)
{
parent::__construct($value);
}
/**
* @return string
*/
public function _getFHIRTypeName()
{
return self::FHIR_TYPE_NAME;
}
/**
* @return string
*/
public function _getFHIRXMLElementDefinition()
{
$xmlns = $this->_getFHIRXMLNamespace();
if (null !== $xmlns) {
$xmlns = " xmlns=\"{$xmlns}\"";
}
return "<GuidePageKind_list{$xmlns}></GuidePageKind_list>";
}
/**
* Returns the validation rules that this type's fields must comply with to be considered "valid"
* The returned array is in ["fieldname[.offset]" => ["rule" => {constraint}]]
*
* @return array
*/
public function _getValidationRules()
{
return self::$_validationRules;
}
/**
* Validates that this type conforms to the specifications set forth for it by FHIR. An empty array must be seen as
* passing.
*
* @return array
*/
public function _getValidationErrors()
{
$errs = parent::_getValidationErrors();
$validationRules = $this->_getValidationRules();
if (isset($validationRules[self::FIELD_VALUE])) {
$v = $this->getValue();
foreach($validationRules[self::FIELD_VALUE] as $rule => $constraint) {
$err = $this->_performValidation(PHPFHIRConstants::TYPE_NAME_CODE_HYPHEN_PRIMITIVE, self::FIELD_VALUE, $rule, $constraint, $v);
if (null !== $err) {
if (!isset($errs[self::FIELD_VALUE])) {
$errs[self::FIELD_VALUE] = [];
}
$errs[self::FIELD_VALUE][$rule] = $err;
}
}
}
return $errs;
}
/**
* @param \SimpleXMLElement|string|null $sxe
* @param null|\DCarbone\PHPFHIRGenerated\STU3\FHIRCodePrimitive\FHIRGuidePageKindList $type
* @param null|int $libxmlOpts
* @return null|\DCarbone\PHPFHIRGenerated\STU3\FHIRCodePrimitive\FHIRGuidePageKindList
*/
public static function xmlUnserialize($sxe = null, PHPFHIRTypeInterface $type = null, $libxmlOpts = 591872)
{
if (null === $sxe) {
return null;
}
if (is_string($sxe)) {
libxml_use_internal_errors(true);
$sxe = new \SimpleXMLElement($sxe, $libxmlOpts, false);
if ($sxe === false) {
throw new \DomainException(sprintf('FHIRGuidePageKindList::xmlUnserialize - String provided is not parseable as XML: %s', implode(', ', array_map(function(\libXMLError $err) { return $err->message; }, libxml_get_errors()))));
}
libxml_use_internal_errors(false);
}
if (!($sxe instanceof \SimpleXMLElement)) {
throw new \InvalidArgumentException(sprintf('FHIRGuidePageKindList::xmlUnserialize - $sxe value must be null, \\SimpleXMLElement, or valid XML string, %s seen', gettype($sxe)));
}
if (null === $type) {
$type = new FHIRGuidePageKindList;
} elseif (!is_object($type) || !($type instanceof FHIRGuidePageKindList)) {
throw new \RuntimeException(sprintf(
'FHIRGuidePageKindList::xmlUnserialize - $type must be instance of \DCarbone\PHPFHIRGenerated\STU3\FHIRCodePrimitive\FHIRGuidePageKindList or null, %s seen.',
is_object($type) ? get_class($type) : gettype($type)
));
}
FHIRCodePrimitive::xmlUnserialize($sxe, $type);
$xmlNamespaces = $sxe->getDocNamespaces(false, false);
if ([] !== $xmlNamespaces) {
$ns = reset($xmlNamespaces);
if (false !== $ns && '' !== $ns) {
$type->_xmlns = $ns;
}
}
return $type;
}
/**
* @param null|\SimpleXMLElement $sxe
* @param null|int $libxmlOpts
* @return \SimpleXMLElement
*/
public function xmlSerialize(\SimpleXMLElement $sxe = null, $libxmlOpts = 591872)
{
if (null === $sxe) {
$sxe = new \SimpleXMLElement($this->_getFHIRXMLElementDefinition(), $libxmlOpts, false);
}
parent::xmlSerialize($sxe);
return $sxe;
}
}
|
apache-2.0
|
liuyuanyuan/dbeaver
|
plugins/org.jkiss.dbeaver.core/src/org/jkiss/dbeaver/tools/project/ProjectExportData.java
|
2174
|
/*
* DBeaver - Universal Database Manager
* Copyright (C) 2010-2019 Serge Rider (serge@jkiss.org)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jkiss.dbeaver.tools.project;
import org.jkiss.dbeaver.model.app.DBPProject;
import org.jkiss.dbeaver.model.app.DBPWorkspace;
import org.jkiss.dbeaver.model.connection.DBPDriver;
import org.jkiss.utils.xml.XMLBuilder;
import java.io.File;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.zip.ZipOutputStream;
class ProjectExportData {
private List<DBPProject> projects;
private File outputFolder;
private boolean exportDrivers;
private String archiveFileName;
DBPWorkspace workspace;
XMLBuilder meta;
ZipOutputStream archiveStream;
Set<DBPDriver> usedDrivers = new HashSet<>();
public ProjectExportData(List<DBPProject> projects, File outputFolder, boolean exportDrivers, String archiveFileName)
{
this.projects = projects;
this.outputFolder = outputFolder;
this.exportDrivers = exportDrivers;
this.archiveFileName = archiveFileName;
}
void initExport(DBPWorkspace workspace, XMLBuilder meta, ZipOutputStream archiveStream)
{
this.workspace = workspace;
this.meta = meta;
this.archiveStream = archiveStream;
}
public List<DBPProject> getProjectsToExport()
{
return projects;
}
public File getOutputFolder()
{
return outputFolder;
}
public boolean isExportDrivers()
{
return exportDrivers;
}
public String getArchiveFileName()
{
return archiveFileName;
}
}
|
apache-2.0
|
yamamoto-febc/usacloud
|
vendor/github.com/sacloud/libsacloud/v2/helper/service/cdrom/read_service.go
|
1057
|
// Copyright 2016-2021 The Libsacloud Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package cdrom
import (
"context"
"github.com/sacloud/libsacloud/v2/sacloud"
)
func (s *Service) Read(req *ReadRequest) (*sacloud.CDROM, error) {
return s.ReadWithContext(context.Background(), req)
}
func (s *Service) ReadWithContext(ctx context.Context, req *ReadRequest) (*sacloud.CDROM, error) {
if err := req.Validate(); err != nil {
return nil, err
}
client := sacloud.NewCDROMOp(s.caller)
return client.Read(ctx, req.Zone, req.ID)
}
|
apache-2.0
|
jdcasey/pnc
|
build-coordinator/src/test/java/org/jboss/pnc/coordinator/test/configuration/ConfigurationTest.java
|
2645
|
/**
* JBoss, Home of Professional Open Source.
* Copyright 2014-2018 Red Hat, Inc., and individual contributors
* as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.pnc.coordinator.test.configuration;
import org.jboss.arquillian.container.test.api.Deployment;
import org.jboss.arquillian.junit.Arquillian;
import org.jboss.pnc.common.Configuration;
import org.jboss.pnc.common.json.ConfigurationParseException;
import org.jboss.pnc.common.json.moduleconfig.OpenshiftEnvironmentDriverModuleConfig;
import org.jboss.pnc.common.json.moduleprovider.PncConfigProvider;
import org.jboss.pnc.spi.exception.CoreException;
import org.jboss.shrinkwrap.api.ShrinkWrap;
import org.jboss.shrinkwrap.api.asset.EmptyAsset;
import org.jboss.shrinkwrap.api.spec.JavaArchive;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.inject.Inject;
import java.lang.invoke.MethodHandles;
/**
* @author <a href="mailto:matejonnet@gmail.com">Matej Lazar</a>
*/
@RunWith(Arquillian.class)
public class ConfigurationTest {
public static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
@Deployment
public static JavaArchive createDeployment() {
JavaArchive jar = ShrinkWrap.create(JavaArchive.class)
.addClass(Configuration.class)
.addClass(OpenshiftEnvironmentDriverModuleConfig.class)
.addAsManifestResource(EmptyAsset.INSTANCE, "beans.xml")
.addAsResource("logback-test.xml", "logback.xml");
logger.debug(jar.toString(true));
return jar;
}
@Inject
private Configuration configuration;
@Test
public void isEnvDriverEnabled() throws CoreException, ConfigurationParseException {
OpenshiftEnvironmentDriverModuleConfig openShiftConfig = configuration.getModuleConfig(new PncConfigProvider<>(OpenshiftEnvironmentDriverModuleConfig.class));
Assert.assertTrue("Environment driver disabled.", !openShiftConfig.isDisabled());
}
}
|
apache-2.0
|
william-taylor/world-cup-manager
|
app/app/src/main/java/application/drawables/MatchesObject.java
|
6764
|
package application.drawables;
import android.view.MotionEvent;
import framework.IRenderable;
import framework.core.*;
import application.events.*;
import framework.graphics.Button;
import framework.graphics.Font;
import framework.graphics.Image;
public class MatchesObject implements IRenderable {
private final static Integer NUMBER_OF_TEAMS = 12;
private final static Integer EVENT_COUNT = 6;
private final static Integer Y_MARGIN = 130;
private final static Integer X_MARGIN = 30;
private ClickEvent[] events = new ClickEvent[NUMBER_OF_TEAMS];
private MatchEvent[] event = new MatchEvent[EVENT_COUNT];
private Button[] buttons = new Button[NUMBER_OF_TEAMS];
private Image[] flags = new Image[NUMBER_OF_TEAMS];
private String[] names;
//private ClickEvent completedEvent;
private Button completeButton;
public MatchesObject(GroupObject group) {
Font font = Font.get("tiny");
Integer x = group.getX() + group.getWidth() + X_MARGIN;
Integer y = (int)(group.getY()) + Y_MARGIN;
names = group.getTeamNames();
for(int i = 0; i < 6; i++) {
int number = i;
switch(number){
case 0: number = 1; break;
case 1: number = 3; break;
case 2: number = 2; break;
case 3: number = 1; break;
case 5: number = 2; break;
default: number = 0; break;
}
// load the flag of the team
flags[i] = new Image("sprites/" + names[number] + ".bmp");
flags[i].setPosition((int)(x + 70), (int) (y), 50, 50);
// load the button that stores the score
buttons[i] = new Button(font);
buttons[i].setSprite("sprites/button.png", x+15, y, 50, 50);
// at certain points increment either x or y
if(i == 1 || i == 3 || i == 5) {
y = (int) (group.getY()) + 130;
x += 240;
} else {
y -= 60;
}
}
// reset position to the second load of matches
x = group.getX() + group.getWidth() - 70;
y = (int) (group.getY() - 70.0F);
completeButton = new Button(font);
completeButton.setSprite("sprites/button2.png", 780, group.getY() - 25, 220, 60);
completeButton.setText("Auto-Complete", 890, group.getY() - 10, 200, 50);
completeButton.setTextColour(1f, 1f, 0f, 1f);
//completedEvent = new ClickEvent(completeButton);
//completedEvent.eventType(new AutoCompleteEvent(event));
// finish loading all the flags and button
for(int i = 6; i < 12; i++) {
int number = i - 6;
switch(number){
case 1: number = 2; break;
case 3: number = 3; break;
case 4: number = 3; break;
case 5: number = 1; break;
default: number = 0; break;
}
flags[i] = new Image("sprites/" + names[number] + ".bmp");
flags[i].setPosition(x+5, (int) (y + 200), 50, 50);
buttons[i] = new Button(font);
buttons[i].setSprite("sprites/button.png", (int)(x + 60), y + 200, 50, 50);
if(i == 7 || i == 9 || i == 11) {
y = (int) (group.getY() - 70.0F);
x += 240;
} else {
y -= 60;
}
}
for(int i = 0; i < 6; i++) {
int opponent = i;
int team = i;
switch(opponent){
case 1: opponent = 2; break;
case 3: opponent = 3; break;
case 4: opponent = 3; break;
case 5: opponent = 1; break;
default: opponent = 0; break;
}
switch(team){
case 0: team = 1; break;
case 1: team = 3; break;
case 2: team = 2; break;
case 3: team = 1; break;
case 5: team = 2; break;
default: team = 0; break;
}
Integer day = 3;
switch(i) {
case 0: day = 1; break;
case 1: day = 1; break;
case 2: day = 2; break;
case 3: day = 2; break;
default: break;
}
event[i] = new MatchEvent(buttons[i], buttons[i+6], group.getTeam(team), group.getTeam(opponent), day);
events[i+6] = new ClickEvent(buttons[i+6]);
events[i+6].eventType(event[i]);
events[i] = new ClickEvent(buttons[i]);
events[i].eventType(event[i]);
}
}
public void restart() {
for(int i = 0; i < 6; i++) {
event[i].Reset();
}
for(int i = 0; i < 12; i++) {
buttons[i].hideText();
}
}
public void reset() {
completeButton.reset();
for(int i = 0; i < 12; i++) {
buttons[i].reset();
flags[i].reset();
}
}
public void onTouch(MotionEvent e, float x, float y) {
if(e.getAction() == MotionEvent.ACTION_DOWN) {
//completedEvent.OnTouch(e, x, y);
for(int i = 0; i < events.length; i++) {
events[i].OnTouch(e, x, y);
}
}
}
public void update(Integer y) {
for(int i = 0; i < 12; i++) {
buttons[i].translate(0, y);
buttons[i].update();
flags[i].translate(0, y);
flags[i].update();
}
//completeButton.translate(0, y);
//completeButton.update();
}
public void onLongPress(MotionEvent e, int x, int y) {
if(e.getAction() == MotionEvent.ACTION_DOWN) {
//completedEvent.onLongPress(e, x, y);
for(int i = 0; i < events.length; i++) {
events[i].onLongPress(e, x, y);
}
}
}
public void onEnter() {
EventManager.get().addListeners(events);
//EventManager.get().addListener(completedEvent);
}
public void onExit() {
EventManager eventsMgr = EventManager.get();
//eventsMgr.removeListener(completedEvent);
for(int i = 0; i < events.length; i++) {
eventsMgr.removeListener(events[i]);
}
}
public Button[] getButtons() {
return this.buttons;
}
public Image[] getFlags() {
return this.flags;
}
@Override
public void render() {
for(int i = 0; i < 12; i++) {
buttons[i].render();
flags[i].render();
}
//completeButton.render();
}
public Object getCompleteButton() {
return completeButton;
}
}
|
apache-2.0
|
macisamuele/GoogleCloudMessaging
|
Demo/GoogleCloudMessaging-Android/app/src/main/java/it/macisamuele/googlecloudmessaging/GCMIntentService.java
|
1859
|
package it.macisamuele.googlecloudmessaging;
import android.app.NotificationManager;
import android.app.PendingIntent;
import android.content.Context;
import android.content.Intent;
import android.support.v4.app.NotificationCompat;
import com.android.google.gcm.GCMBaseIntentService;
public class GCMIntentService extends GCMBaseIntentService {
public static final int NOTIFICATION_ID = 1;
public GCMIntentService() {
super(GCMIntentService.class.getName());
}
@Override
protected void onSendError() {
sendNotification("Send error: ");
}
@Override
protected void onMessageDeleted(int total) {
sendNotification("Deleted messages on server: " + total);
}
@Override
protected void onMessageReceived(Intent intent) {
sendNotification("Received: " + intent.getExtras().toString());
}
// Put the message into a notification and post it.
// This is just one simple example of what you might choose to do with
// a GCM message.
private void sendNotification(String msg) {
NotificationManager mNotificationManager = (NotificationManager)
this.getSystemService(Context.NOTIFICATION_SERVICE);
PendingIntent contentIntent = PendingIntent.getActivity(this, 0,
new Intent(this, MainActivity.class), 0);
NotificationCompat.Builder mBuilder =
new NotificationCompat.Builder(this)
.setSmallIcon(R.mipmap.ic_launcher)
.setContentTitle("GCM Notification")
.setStyle(new NotificationCompat.BigTextStyle()
.bigText(msg))
.setContentText(msg);
mBuilder.setContentIntent(contentIntent);
mNotificationManager.notify(NOTIFICATION_ID, mBuilder.build());
}
}
|
apache-2.0
|
tupunco/Tup.Cobar4Net
|
Tup.Cobar4Net/Parser/Ast/Stmt/Mts/MTSReleaseStatement.cs
|
1316
|
/*
* Copyright 1999-2012 Alibaba Group.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using Tup.Cobar4Net.Parser.Ast.Expression.Primary;
using Tup.Cobar4Net.Parser.Visitor;
namespace Tup.Cobar4Net.Parser.Ast.Stmt.Mts
{
/// <author>
/// <a href="mailto:shuo.qius@alibaba-inc.com">QIU Shuo</a>
/// </author>
public class MTSReleaseStatement : ISqlStatement
{
public MTSReleaseStatement(Identifier savepoint)
{
if (savepoint == null)
{
throw new ArgumentException("savepoint is null");
}
Savepoint = savepoint;
}
public virtual Identifier Savepoint { get; }
public virtual void Accept(ISqlAstVisitor visitor)
{
visitor.Visit(this);
}
}
}
|
apache-2.0
|
PurelyApplied/geode
|
geode-core/src/integrationTest/java/org/apache/geode/internal/logging/LoggingWithLocatorLauncherIntegrationTest.java
|
2328
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal.logging;
import static org.apache.geode.internal.logging.Banner.BannerHeader.displayValues;
import static org.assertj.core.api.Assertions.assertThat;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.apache.geode.distributed.LocatorLauncher;
import org.apache.geode.distributed.LocatorLauncherIntegrationTestCase;
import org.apache.geode.internal.process.ProcessControllerFactory;
import org.apache.geode.internal.process.ProcessType;
import org.apache.geode.test.assertj.LogFileAssert;
import org.apache.geode.test.junit.categories.LoggingTest;
/**
* Integration tests of logging with {@link LocatorLauncher}.
*/
@Category(LoggingTest.class)
public class LoggingWithLocatorLauncherIntegrationTest extends LocatorLauncherIntegrationTestCase {
@Before
public void setUp() throws Exception {
System.setProperty(ProcessType.PROPERTY_TEST_PREFIX, getUniqueName() + "-");
assertThat(new ProcessControllerFactory().isAttachAPIFound()).isTrue();
givenRunningLocator();
}
@After
public void tearDown() throws Exception {
disconnectFromDS();
}
@Test
public void logFileExists() {
assertThat(getLogFile()).exists();
}
@Test
public void logFileContainsBanner() {
LogFileAssert.assertThat(getLogFile()).contains(displayValues());
}
@Test
public void logFileContainsBannerOnlyOnce() {
LogFileAssert.assertThat(getLogFile()).containsOnlyOnce(displayValues());
}
}
|
apache-2.0
|
jcmoraisjr/haproxy-ingress
|
pkg/haproxy/types/backends.go
|
8274
|
/*
Copyright 2020 The HAProxy Ingress Controller Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package types
import (
"crypto/md5"
"fmt"
"reflect"
"sort"
"strconv"
"strings"
)
// CreateBackends ...
func CreateBackends(shardCount int) *Backends {
shards := make([]map[string]*Backend, shardCount)
for i := range shards {
shards[i] = map[string]*Backend{}
}
return &Backends{
items: map[string]*Backend{},
itemsAdd: map[string]*Backend{},
itemsDel: map[string]*Backend{},
authBackends: map[string]*Backend{},
shards: shards,
changedShards: map[int]bool{},
}
}
// Items ...
func (b *Backends) Items() map[string]*Backend {
return b.items
}
// ItemsAdd ...
func (b *Backends) ItemsAdd() map[string]*Backend {
return b.itemsAdd
}
// ItemsDel ...
func (b *Backends) ItemsDel() map[string]*Backend {
return b.itemsDel
}
// Shrink compares deleted and added backends with the same name - ie changed
// objects - and remove both from the changing hashmap tracker when they match.
func (b *Backends) Shrink() {
changed := false
for name, del := range b.itemsDel {
if add, found := b.itemsAdd[name]; found {
if len(add.Endpoints) <= len(del.Endpoints) && backendsMatch(add, del) {
// Such changed backend, when removed from the tracking, need to
// be reincluded into the current state hashmap `items` and also
// into its shard hashmap when backend sharding is enabled.
if len(b.shards) > 0 {
b.shards[del.shard][del.ID] = del
}
b.items[name] = del
delete(b.itemsAdd, name)
delete(b.itemsDel, name)
changed = true
}
}
}
// Backends removed from the changing tracker might clean a shard state if it
// was the only one changed into the shard. Recalc changedShards if anything
// was changed.
if changed {
b.changedShards = map[int]bool{}
for _, back := range b.itemsAdd {
b.BackendChanged(back)
}
for _, back := range b.itemsDel {
b.BackendChanged(back)
}
}
}
// backendsMatch returns true if two backends match. This comparison
// ignores empty endpoints and its order and it's cheaper than leave
// the backend dirty.
func backendsMatch(back1, back2 *Backend) bool {
if reflect.DeepEqual(back1, back2) {
return true
}
b1copy := *back1
b1copy.PathsMap = back2.PathsMap
b1copy.pathConfig = back2.pathConfig
b1copy.Endpoints = back2.Endpoints
if !reflect.DeepEqual(&b1copy, back2) {
return false
}
epmap := make(map[Endpoint]bool, len(back1.Endpoints))
for _, ep := range back1.Endpoints {
if !ep.IsEmpty() {
epmap[*ep] = false
}
}
for _, ep := range back2.Endpoints {
if !ep.IsEmpty() {
if _, found := epmap[*ep]; !found {
return false
}
epmap[*ep] = true
}
}
for _, found := range epmap {
if !found {
return false
}
}
return true
}
// Commit ...
func (b *Backends) Commit() {
b.itemsAdd = map[string]*Backend{}
b.itemsDel = map[string]*Backend{}
b.changedShards = map[int]bool{}
}
// Changed ...
func (b *Backends) Changed() bool {
return len(b.itemsAdd) > 0 || len(b.itemsDel) > 0
}
// BackendChanged ...
func (b *Backends) BackendChanged(backend *Backend) {
b.changedShards[backend.shard] = true
}
// ChangedShards ...
func (b *Backends) ChangedShards() []int {
changed := []int{}
for i, c := range b.changedShards {
if c {
changed = append(changed, i)
}
}
sort.Ints(changed)
return changed
}
// FillSourceIPs ...
func (b *Backends) FillSourceIPs() {
for _, backend := range b.itemsAdd {
backend.fillSourceIPs()
}
}
// SortChangedEndpoints ...
func (b *Backends) SortChangedEndpoints(sortBy string) {
for _, backend := range b.itemsAdd {
backend.sortEndpoints(sortBy)
}
}
// ShuffleAllEndpoints ...
func (b *Backends) ShuffleAllEndpoints() {
for _, backend := range b.items {
backend.shuffleEndpoints()
}
}
// BuildSortedItems ...
func (b *Backends) BuildSortedItems() []*Backend {
// TODO BuildSortedItems() is currently used only by the backend template.
// The main cfg template doesn't care if there are backend shards or not,
// so the logic is here, but this doesn't seem to be a good place.
if len(b.shards) == 0 {
return b.buildSortedItems(b.items)
}
return nil
}
// BuildSortedShard ...
func (b *Backends) BuildSortedShard(shardRef int) []*Backend {
return b.buildSortedItems(b.shards[shardRef])
}
func (b *Backends) buildSortedItems(backendItems map[string]*Backend) []*Backend {
items := make([]*Backend, len(backendItems))
var i int
for _, item := range backendItems {
items[i] = item
i++
}
sort.Slice(items, func(i, j int) bool {
return items[i].ID < items[j].ID
})
return items
}
// BuildUsedAuthBackends ...
func (b *Backends) BuildUsedAuthBackends() map[string]bool {
usedNames := map[string]bool{}
for _, backend := range b.items {
for _, path := range backend.Paths {
name := path.AuthExternal.AuthBackendName
if name != "" {
usedNames[name] = true
}
}
}
return usedNames
}
// AcquireBackend ...
func (b *Backends) AcquireBackend(namespace, name, port string) *Backend {
if backend := b.FindBackend(namespace, name, port); backend != nil {
return backend
}
shardCount := len(b.shards)
backend := createBackend(shardCount, namespace, name, port)
b.items[backend.ID] = backend
b.itemsAdd[backend.ID] = backend
if shardCount > 0 {
b.shards[backend.shard][backend.ID] = backend
}
b.BackendChanged(backend)
return backend
}
// AcquireAuthBackend ...
func (b *Backends) AcquireAuthBackend(ipList []string, port int, hostname string) *Backend {
sort.Strings(ipList)
key := fmt.Sprintf("%s:%d:%s", strings.Join(ipList, ","), port, hostname)
backend := b.authBackends[key]
if backend == nil {
name := fmt.Sprintf("backend%03d", len(b.authBackends)+1)
backend = b.AcquireBackend("_auth", name, strconv.Itoa(port))
if hostname != "" {
backend.CustomConfig = []string{"http-request set-header Host " + hostname}
}
for _, ip := range ipList {
_ = backend.AcquireEndpoint(ip, port, "")
}
b.authBackends[key] = backend
}
return backend
}
// FindBackend ...
func (b *Backends) FindBackend(namespace, name, port string) *Backend {
return b.items[buildID(namespace, name, port)]
}
// FindBackendID ...
func (b *Backends) FindBackendID(backendID BackendID) *Backend {
return b.items[backendID.String()]
}
// RemoveAll ...
func (b *Backends) RemoveAll(backendID []string) {
for _, id := range backendID {
if item, found := b.items[id]; found {
if len(b.shards) > 0 {
delete(b.shards[item.shard], id)
}
b.BackendChanged(item)
b.itemsDel[id] = item
if item == b.DefaultBackend {
b.DefaultBackend = nil
}
delete(b.items, id)
}
}
}
// IsEmpty ...
func (b BackendID) IsEmpty() bool {
return b.Name == ""
}
func (b BackendID) String() string {
if b.id == "" {
b.id = buildID(b.Namespace, b.Name, b.Port)
}
return b.id
}
func createBackend(shards int, namespace, name, port string) *Backend {
id := buildID(namespace, name, port)
hash := md5.Sum([]byte(id))
part0 := uint64(hash[0])<<56 |
uint64(hash[1])<<48 |
uint64(hash[2])<<40 |
uint64(hash[3])<<32 |
uint64(hash[4])<<24 |
uint64(hash[5])<<16 |
uint64(hash[6])<<8 |
uint64(hash[7])
part1 := uint64(hash[8])<<56 |
uint64(hash[9])<<48 |
uint64(hash[10])<<40 |
uint64(hash[11])<<32 |
uint64(hash[12])<<24 |
uint64(hash[13])<<16 |
uint64(hash[14])<<8 |
uint64(hash[15])
hash64 := part0 ^ part1
var shard int
if shards > 0 {
shard = int(hash64 % uint64(shards))
}
return &Backend{
hash64: hash64,
shard: shard,
ID: id,
Namespace: namespace,
Name: name,
Port: port,
Server: ServerConfig{InitialWeight: 1},
}
}
func buildID(namespace, name, port string) string {
return namespace + "_" + name + "_" + port
}
|
apache-2.0
|
aradchykov/playframework
|
framework/src/play/src/main/scala/play/api/mvc/BodyParsers.scala
|
32117
|
/*
* Copyright (C) 2009-2017 Lightbend Inc. <https://www.lightbend.com>
*/
package play.api.mvc
import java.io._
import java.nio.channels.{ ByteChannel, Channels }
import java.util.Locale
import javax.inject.{ Inject, Provider }
import akka.actor.ActorSystem
import akka.stream._
import akka.stream.scaladsl.{ Flow, Sink, StreamConverters }
import akka.stream.stage._
import akka.util.ByteString
import play.api._
import play.api.data.Form
import play.api.http.Status._
import play.api.http._
import play.api.libs.Files.{ SingletonTemporaryFileCreator, TemporaryFile, TemporaryFileCreator }
import play.api.libs.json._
import play.api.libs.streams.Accumulator
import play.api.mvc.MultipartFormData._
import play.core.parsers.Multipart
import play.utils.PlayIO
import scala.concurrent.{ ExecutionContext, Future, Promise }
import scala.util.control.NonFatal
import scala.xml._
/**
* A request body that adapts automatically according the request Content-Type.
*/
sealed trait AnyContent {
/**
* application/x-www-form-urlencoded
*/
def asFormUrlEncoded: Option[Map[String, Seq[String]]] = this match {
case AnyContentAsFormUrlEncoded(data) => Some(data)
case _ => None
}
/**
* text/plain
*/
def asText: Option[String] = this match {
case AnyContentAsText(txt) => Some(txt)
case _ => None
}
/**
* application/xml
*/
def asXml: Option[NodeSeq] = this match {
case AnyContentAsXml(xml) => Some(xml)
case _ => None
}
/**
* text/json or application/json
*/
def asJson: Option[JsValue] = this match {
case AnyContentAsJson(json) => Some(json)
case _ => None
}
/**
* multipart/form-data
*/
def asMultipartFormData: Option[MultipartFormData[TemporaryFile]] = this match {
case AnyContentAsMultipartFormData(mfd) => Some(mfd)
case _ => None
}
/**
* Used when no Content-Type matches
*/
def asRaw: Option[RawBuffer] = this match {
case AnyContentAsRaw(raw) => Some(raw)
case _ => None
}
}
/**
* Factory object for creating an AnyContent instance. Useful for unit testing.
*/
object AnyContent {
def apply(): AnyContent = {
AnyContentAsEmpty
}
def apply(contentText: String): AnyContent = {
AnyContentAsText(contentText)
}
def apply(json: JsValue): AnyContent = {
AnyContentAsJson(json)
}
def apply(xml: NodeSeq): AnyContent = {
AnyContentAsXml(xml)
}
def apply(formUrlEncoded: Map[String, Seq[String]]): AnyContent = {
AnyContentAsFormUrlEncoded(formUrlEncoded)
}
def apply(formData: MultipartFormData[TemporaryFile]): AnyContent = {
AnyContentAsMultipartFormData(formData)
}
def apply(raw: RawBuffer): AnyContent = {
AnyContentAsRaw(raw)
}
}
/**
* AnyContent - Empty request body
*/
case object AnyContentAsEmpty extends AnyContent
/**
* AnyContent - Text body
*/
case class AnyContentAsText(txt: String) extends AnyContent
/**
* AnyContent - Form url encoded body
*/
case class AnyContentAsFormUrlEncoded(data: Map[String, Seq[String]]) extends AnyContent
/**
* AnyContent - Raw body (give access to the raw data as bytes).
*/
case class AnyContentAsRaw(raw: RawBuffer) extends AnyContent
/**
* AnyContent - XML body
*/
case class AnyContentAsXml(xml: NodeSeq) extends AnyContent
/**
* AnyContent - Json body
*/
case class AnyContentAsJson(json: JsValue) extends AnyContent
/**
* AnyContent - Multipart form data body
*/
case class AnyContentAsMultipartFormData(mfd: MultipartFormData[TemporaryFile]) extends AnyContent
/**
* Multipart form data body.
*/
case class MultipartFormData[A](dataParts: Map[String, Seq[String]], files: Seq[FilePart[A]], badParts: Seq[BadPart]) {
/**
* Extract the data parts as Form url encoded.
*/
def asFormUrlEncoded: Map[String, Seq[String]] = dataParts
/**
* Access a file part.
*/
def file(key: String): Option[FilePart[A]] = files.find(_.key == key)
}
/**
* Defines parts handled by Multipart form data.
*/
object MultipartFormData {
/**
* A part.
*
* @tparam A the type that file parts are exposed as.
*/
sealed trait Part[+A]
/**
* A data part.
*/
case class DataPart(key: String, value: String) extends Part[Nothing]
/**
* A file part.
*/
case class FilePart[A](key: String, filename: String, contentType: Option[String], ref: A) extends Part[A]
/**
* A part that has not been properly parsed.
*/
case class BadPart(headers: Map[String, String]) extends Part[Nothing]
/**
* Emitted when the multipart stream can't be parsed for some reason.
*/
case class ParseError(message: String) extends Part[Nothing]
/**
* The multipart/form-data parser buffers many things in memory, including data parts, headers, file names etc.
*
* Some buffer limits apply to each element, eg, there is a buffer for headers before they are parsed. Other buffer
* limits apply to all in memory data in aggregate, this includes data parts, file names, part names.
*
* If any of these buffers are exceeded, this will be emitted.
*/
case class MaxMemoryBufferExceeded(message: String) extends Part[Nothing]
}
/**
* Handle the request body a raw bytes data.
*
* @param memoryThreshold If the content size is bigger than this limit, the content is stored as file.
* @param temporaryFileCreator the temporary file creator to store the content as file.
* @param initialData the initial data, ByteString.empty by default.
*/
case class RawBuffer(memoryThreshold: Int, temporaryFileCreator: TemporaryFileCreator, initialData: ByteString = ByteString.empty) {
import play.api.libs.Files._
@volatile private var inMemory: ByteString = initialData
@volatile private var backedByTemporaryFile: TemporaryFile = _
@volatile private var outStream: FileOutputStream = _
private[play] def push(chunk: ByteString) {
if (inMemory != null) {
if (chunk.length + inMemory.size > memoryThreshold) {
backToTemporaryFile()
outStream.write(chunk.toArray)
} else {
inMemory = inMemory ++ chunk
}
} else {
outStream.write(chunk.toArray)
}
}
private[play] def close() {
if (outStream != null) {
outStream.close()
}
}
private[play] def backToTemporaryFile() {
backedByTemporaryFile = temporaryFileCreator.create("requestBody", "asRaw")
outStream = new FileOutputStream(backedByTemporaryFile)
outStream.write(inMemory.toArray)
inMemory = null
}
/**
* Buffer size.
*/
def size: Long = {
if (inMemory != null) inMemory.size else backedByTemporaryFile.length
}
/**
* Returns the buffer content as a bytes array.
*
* This operation will cause the internal collection of byte arrays to be copied into a new byte array on each
* invocation, no caching is done. If the buffer has been written out to a file, it will read the contents of the
* file.
*
* @param maxLength The max length allowed to be stored in memory. If this is smaller than memoryThreshold, and the
* buffer is already in memory then None will still be returned.
* @return None if the content is greater than maxLength, otherwise, the data as bytes.
*/
def asBytes(maxLength: Long = memoryThreshold): Option[ByteString] = {
if (size <= maxLength) {
Some(if (inMemory != null) {
inMemory
} else {
ByteString(PlayIO.readFile(backedByTemporaryFile.path))
})
} else {
None
}
}
/**
* Returns the buffer content as File.
*/
def asFile: File = {
if (inMemory != null) {
backToTemporaryFile()
close()
}
backedByTemporaryFile
}
override def toString = {
"RawBuffer(inMemory=" + Option(inMemory).map(_.size).orNull + ", backedByTemporaryFile=" + backedByTemporaryFile + ")"
}
}
/**
* Legacy body parsers trait. Basically all this does is define a "parse" member with a PlayBodyParsers instance
* constructed from the running app's settings. If no app is running, we create parsers using default settings and an
* internally-created materializer. This is done to support legacy behavior. Instead of using this trait, we suggest
* injecting an instance of PlayBodyParsers (either directly or through AbstractController).
*/
trait BodyParsers {
@inline private def maybeApp = Play.privateMaybeApplication
private val hcCache = Application.instanceCache[HttpConfiguration]
private lazy val mat: Materializer = ActorMaterializer()(ActorSystem("play-body-parsers"))
private def parserConfig: ParserConfiguration = maybeApp.fold(ParserConfiguration())(hcCache(_).parser)
private def parserErrorHandler: HttpErrorHandler = maybeApp.fold[HttpErrorHandler](DefaultHttpErrorHandler)(_.errorHandler)
private def parserMaterializer: Materializer = maybeApp.fold[Materializer](mat)(_.materializer)
private def parserTemporaryFileCreator: TemporaryFileCreator = maybeApp.fold[TemporaryFileCreator](SingletonTemporaryFileCreator)(_.injector.instanceOf[TemporaryFileCreator])
@deprecated("Inject PlayBodyParsers or use AbstractController instead", "2.6.0")
lazy val parse: PlayBodyParsers = new PlayBodyParsers {
override implicit def materializer = parserMaterializer
override def errorHandler = parserErrorHandler
override def config = parserConfig
override def temporaryFileCreator = parserTemporaryFileCreator
}
}
/**
* A set of reusable body parsers and utilities that do not require configuration.
*/
trait BodyParserUtils {
/**
* Don't parse the body content.
*/
def empty: BodyParser[Unit] = ignore(Unit)
def ignore[A](body: A): BodyParser[A] = BodyParser("ignore") { request =>
Accumulator.done(Right(body))
}
/**
* A body parser that always returns an error.
*/
def error[A](result: Future[Result]): BodyParser[A] = BodyParser("error") { request =>
import play.core.Execution.Implicits.trampoline
Accumulator.done(result.map(Left.apply))
}
/**
* Allows to choose the right BodyParser parser to use by examining the request headers.
*/
def using[A](f: RequestHeader => BodyParser[A]) = BodyParser { request =>
f(request)(request)
}
/**
* A body parser that flattens a future BodyParser.
*/
def flatten[A](underlying: Future[BodyParser[A]])(implicit ec: ExecutionContext, mat: Materializer): BodyParser[A] =
BodyParser { request =>
Accumulator.flatten(underlying.map(_(request)))
}
/**
* Creates a conditional BodyParser.
*/
def when[A](predicate: RequestHeader => Boolean, parser: BodyParser[A], badResult: RequestHeader => Future[Result]): BodyParser[A] = {
BodyParser(s"conditional, wrapping=$parser") { request =>
if (predicate(request)) {
parser(request)
} else {
import play.core.Execution.Implicits.trampoline
Accumulator.done(badResult(request).map(Left.apply))
}
}
}
/**
* Wrap an existing BodyParser with a maxLength constraints.
*
* @param maxLength The max length allowed
* @param parser The BodyParser to wrap
*/
def maxLength[A](maxLength: Long, parser: BodyParser[A])(implicit mat: Materializer): BodyParser[Either[MaxSizeExceeded, A]] =
BodyParser(s"maxLength=$maxLength, wrapping=$parser") { request =>
import play.core.Execution.Implicits.trampoline
val takeUpToFlow = Flow.fromGraph(new BodyParsers.TakeUpTo(maxLength))
// Apply the request
val parserSink = parser.apply(request).toSink
Accumulator(takeUpToFlow.toMat(parserSink) { (statusFuture, resultFuture) =>
statusFuture.flatMap {
case exceeded: MaxSizeExceeded => Future.successful(Right(Left(exceeded)))
case _ => resultFuture.map {
case Left(result) => Left(result)
case Right(a) => Right(Right(a))
}
}
})
}
}
class DefaultPlayBodyParsers @Inject() (
val config: ParserConfiguration,
val errorHandler: HttpErrorHandler,
val materializer: Materializer,
val temporaryFileCreator: TemporaryFileCreator) extends PlayBodyParsers
object PlayBodyParsers {
def apply(conf: ParserConfiguration, eh: HttpErrorHandler, mat: Materializer, tfc: TemporaryFileCreator): PlayBodyParsers = {
new DefaultPlayBodyParsers(conf, eh, mat, tfc)
}
}
/**
* Body parsers officially supported by Play (i.e. built-in to Play)
*/
trait PlayBodyParsers extends BodyParserUtils {
private val logger = Logger(classOf[PlayBodyParsers])
private[play] implicit def materializer: Materializer
private[play] def config: ParserConfiguration
private[play] def errorHandler: HttpErrorHandler
private[play] def temporaryFileCreator: TemporaryFileCreator
/**
* Unlimited size.
*/
val UNLIMITED: Long = Long.MaxValue
private[play] val ApplicationXmlMatcher = """application/.*\+xml.*""".r
/**
* Default max length allowed for text based body.
*
* You can configure it in application.conf:
*
* {{{
* play.http.parser.maxMemoryBuffer = 512k
* }}}
*/
def DefaultMaxTextLength: Int = config.maxMemoryBuffer
/**
* Default max length allowed for disk based body.
*
* You can configure it in application.conf:
*
* {{{
* play.http.parser.maxDiskBuffer = 512k
* }}}
*/
def DefaultMaxDiskLength: Long = config.maxDiskBuffer
// -- Text parser
/**
* Parse the body as text without checking the Content-Type.
*
* @param maxLength Max length allowed or returns EntityTooLarge HTTP response.
*/
def tolerantText(maxLength: Long): BodyParser[String] = {
tolerantBodyParser("text", maxLength, "Error decoding text body") { (request, bytes) =>
// Encoding notes: RFC-2616 section 3.7.1 mandates ISO-8859-1 as the default charset if none is specified.
bytes.decodeString(request.charset.getOrElse("ISO-8859-1"))
}
}
/**
* Parse the body as text without checking the Content-Type.
*/
def tolerantText: BodyParser[String] = tolerantText(DefaultMaxTextLength)
/**
* Parse the body as text if the Content-Type is text/plain.
*
* @param maxLength Max length allowed or returns EntityTooLarge HTTP response.
*/
def text(maxLength: Int): BodyParser[String] = when(
_.contentType.exists(_.equalsIgnoreCase("text/plain")),
tolerantText(maxLength),
createBadResult("Expecting text/plain body", UNSUPPORTED_MEDIA_TYPE)
)
/**
* Parse the body as text if the Content-Type is text/plain.
*/
def text: BodyParser[String] = text(DefaultMaxTextLength)
// -- Raw parser
/**
* Store the body content in a RawBuffer.
*
* @param memoryThreshold If the content size is bigger than this limit, the content is stored as file.
*/
def raw(memoryThreshold: Int = DefaultMaxTextLength, maxLength: Long = DefaultMaxDiskLength): BodyParser[RawBuffer] =
BodyParser("raw, memoryThreshold=" + memoryThreshold) { request =>
import play.core.Execution.Implicits.trampoline
enforceMaxLength(request, maxLength, Accumulator {
val buffer = RawBuffer(memoryThreshold, temporaryFileCreator)
val sink = Sink.fold[RawBuffer, ByteString](buffer) { (bf, bs) => bf.push(bs); bf }
sink.mapMaterializedValue { future =>
future andThen { case _ => buffer.close() }
}
} map (buffer => Right(buffer)))
}
/**
* Store the body content in a RawBuffer.
*/
def raw: BodyParser[RawBuffer] = raw()
// -- JSON parser
/**
* Parse the body as Json without checking the Content-Type.
*
* @param maxLength Max length allowed or returns EntityTooLarge HTTP response.
*/
def tolerantJson(maxLength: Int): BodyParser[JsValue] =
tolerantBodyParser[JsValue]("json", maxLength, "Invalid Json") { (request, bytes) =>
// Encoding notes: RFC 4627 requires that JSON be encoded in Unicode, and states that whether that's
// UTF-8, UTF-16 or UTF-32 can be auto detected by reading the first two bytes. So we ignore the declared
// charset and don't decode, we passing the byte array as is because Jackson supports auto detection.
Json.parse(bytes.iterator.asInputStream)
}
/**
* Parse the body as Json without checking the Content-Type.
*/
def tolerantJson: BodyParser[JsValue] = tolerantJson(DefaultMaxTextLength)
/**
* Parse the body as Json if the Content-Type is text/json or application/json.
*
* @param maxLength Max length allowed or returns EntityTooLarge HTTP response.
*/
def json(maxLength: Int): BodyParser[JsValue] = when(
_.contentType.exists(m => m.equalsIgnoreCase("text/json") || m.equalsIgnoreCase("application/json")),
tolerantJson(maxLength),
createBadResult("Expecting text/json or application/json body", UNSUPPORTED_MEDIA_TYPE)
)
/**
* Parse the body as Json if the Content-Type is text/json or application/json.
*/
def json: BodyParser[JsValue] = json(DefaultMaxTextLength)
/**
* Parse the body as Json if the Content-Type is text/json or application/json,
* validating the result with the Json reader.
*
* @tparam A the type to read and validate from the body.
* @param reader a Json reader for type A.
*/
def json[A](implicit reader: Reads[A]): BodyParser[A] =
BodyParser("json reader") { request =>
import play.core.Execution.Implicits.trampoline
json(request) mapFuture {
case Left(simpleResult) =>
Future.successful(Left(simpleResult))
case Right(jsValue) =>
jsValue.validate(reader) map { a =>
Future.successful(Right(a))
} recoverTotal { jsError =>
val msg = s"Json validation error ${JsError.toFlatForm(jsError)}"
createBadResult(msg)(request) map Left.apply
}
}
}
// -- Form parser
/**
* Parse the body and binds it to a given form model.
*
* {{{
* case class User(name: String)
*
* val userForm: Form[User] = Form(mapping("name" -> nonEmptyText)(User.apply)(User.unapply))
*
* Action(parse.form(userForm)) { request =>
* Ok(s"Hello, \${request.body.name}!")
* }
* }}}
*
* @param form Form model
* @param maxLength Max length allowed or returns EntityTooLarge HTTP response. If `None`, the default `play.http.parser.maxMemoryBuffer` configuration value is used.
* @param onErrors The result to reply in case of errors during the form binding process
*/
def form[A](form: Form[A], maxLength: Option[Long] = None, onErrors: Form[A] => Result = (formErrors: Form[A]) => Results.BadRequest): BodyParser[A] =
BodyParser { requestHeader =>
import play.core.Execution.Implicits.trampoline
val parser = anyContent(maxLength)
parser(requestHeader).map { resultOrBody =>
resultOrBody.right.flatMap { body =>
form
.bindFromRequest()(Request[AnyContent](requestHeader, body))
.fold(formErrors => Left(onErrors(formErrors)), a => Right(a))
}
}
}
// -- XML parser
/**
* Parse the body as Xml without checking the Content-Type.
*
* @param maxLength Max length allowed or returns EntityTooLarge HTTP response.
*/
def tolerantXml(maxLength: Int): BodyParser[NodeSeq] =
tolerantBodyParser[NodeSeq]("xml", maxLength, "Invalid XML") { (request, bytes) =>
val inputSource = new InputSource(bytes.iterator.asInputStream)
// Encoding notes: RFC 3023 is the RFC for XML content types. Comments below reflect what it says.
// An externally declared charset takes precedence
request.charset.orElse(
// If omitted, maybe select a default charset, based on the media type.
request.mediaType.collect {
// According to RFC 3023, the default encoding for text/xml is us-ascii. This contradicts RFC 2616, which
// states that the default for text/* is ISO-8859-1. An RFC 3023 conforming client will send US-ASCII,
// in that case it is safe for us to use US-ASCII or ISO-8859-1. But a client that knows nothing about
// XML, and therefore nothing about RFC 3023, but rather conforms to RFC 2616, will send ISO-8859-1.
// Since decoding as ISO-8859-1 works for both clients that conform to RFC 3023, and clients that conform
// to RFC 2616, we use that.
case mt if mt.mediaType == "text" => "iso-8859-1"
// Otherwise, there should be no default, it will be detected by the XML parser.
}
).foreach { charset =>
inputSource.setEncoding(charset)
}
Play.XML.load(inputSource)
}
/**
* Parse the body as Xml without checking the Content-Type.
*/
def tolerantXml: BodyParser[NodeSeq] = tolerantXml(DefaultMaxTextLength)
/**
* Parse the body as Xml if the Content-Type is application/xml, text/xml or application/XXX+xml.
*
* @param maxLength Max length allowed or returns EntityTooLarge HTTP response.
*/
def xml(maxLength: Int): BodyParser[NodeSeq] = when(
_.contentType.exists { t =>
val tl = t.toLowerCase(Locale.ENGLISH)
tl.startsWith("text/xml") || tl.startsWith("application/xml") || ApplicationXmlMatcher.pattern.matcher(tl).matches()
},
tolerantXml(maxLength),
createBadResult("Expecting xml body", UNSUPPORTED_MEDIA_TYPE)
)
/**
* Parse the body as Xml if the Content-Type is application/xml, text/xml or application/XXX+xml.
*/
def xml: BodyParser[NodeSeq] = xml(DefaultMaxTextLength)
// -- File parsers
/**
* Store the body content into a file.
*
* @param to The file used to store the content.
*/
def file(to: File): BodyParser[File] = BodyParser("file, to=" + to) { request =>
import play.core.Execution.Implicits.trampoline
Accumulator(StreamConverters.fromOutputStream(() => new FileOutputStream(to))).map(_ => Right(to))
}
/**
* Store the body content into a temporary file.
*/
def temporaryFile: BodyParser[TemporaryFile] = BodyParser("temporaryFile") { request =>
val tempFile = temporaryFileCreator.create("requestBody", "asTemporaryFile")
file(tempFile)(request).map(_ => Right(tempFile))(play.core.Execution.Implicits.trampoline)
}
// -- FormUrlEncoded
/**
* Parse the body as Form url encoded without checking the Content-Type.
*
* @param maxLength Max length allowed or returns EntityTooLarge HTTP response.
*/
def tolerantFormUrlEncoded(maxLength: Int): BodyParser[Map[String, Seq[String]]] =
tolerantBodyParser("formUrlEncoded", maxLength, "Error parsing application/x-www-form-urlencoded") { (request, bytes) =>
import play.core.parsers._
val charset = request.charset.getOrElse("UTF-8")
val urlEncodedString = bytes.decodeString("UTF-8")
FormUrlEncodedParser.parse(urlEncodedString, charset)
}
/**
* Parse the body as form url encoded without checking the Content-Type.
*/
def tolerantFormUrlEncoded: BodyParser[Map[String, Seq[String]]] =
tolerantFormUrlEncoded(DefaultMaxTextLength)
@deprecated("Use formUrlEncoded", "2.6.0")
def urlFormEncoded(maxLength: Int): BodyParser[Map[String, Seq[String]]] = formUrlEncoded(maxLength)
@deprecated("Use formUrlEncoded", "2.6.0")
def urlFormEncoded: BodyParser[Map[String, Seq[String]]] = formUrlEncoded
/**
* Parse the body as form url encoded if the Content-Type is application/x-www-form-urlencoded.
*
* @param maxLength Max length allowed or returns EntityTooLarge HTTP response.
*/
def formUrlEncoded(maxLength: Int): BodyParser[Map[String, Seq[String]]] = when(
_.contentType.exists(_.equalsIgnoreCase("application/x-www-form-urlencoded")),
tolerantFormUrlEncoded(maxLength),
createBadResult("Expecting application/x-www-form-urlencoded body", UNSUPPORTED_MEDIA_TYPE)
)
/**
* Parse the body as form url encoded if the Content-Type is application/x-www-form-urlencoded.
*/
def formUrlEncoded: BodyParser[Map[String, Seq[String]]] =
formUrlEncoded(DefaultMaxTextLength)
// -- Magic any content
/**
* If the request has a body, parse the body content by checking the Content-Type header.
*/
def default: BodyParser[AnyContent] = default(None)
// this is an alias method since "default" is a Java reserved word
def defaultBodyParser: BodyParser[AnyContent] = default
/**
* If the request has a body, parse the body content by checking the Content-Type header.
*/
def default(maxLength: Option[Long]): BodyParser[AnyContent] = using { request =>
if (request.hasBody) {
anyContent(maxLength)
} else {
ignore(AnyContentAsEmpty)
}
}
/**
* Guess the body content by checking the Content-Type header.
*/
def anyContent: BodyParser[AnyContent] = anyContent(None)
/**
* Guess the body content by checking the Content-Type header.
*/
def anyContent(maxLength: Option[Long]): BodyParser[AnyContent] = BodyParser("anyContent") { request =>
import play.core.Execution.Implicits.trampoline
def maxLengthOrDefault = maxLength.fold(DefaultMaxTextLength)(_.toInt)
def maxLengthOrDefaultLarge = maxLength.getOrElse(DefaultMaxDiskLength)
val contentType: Option[String] = request.contentType.map(_.toLowerCase(Locale.ENGLISH))
contentType match {
case Some("text/plain") =>
logger.trace("Parsing AnyContent as text")
text(maxLengthOrDefault)(request).map(_.right.map(s => AnyContentAsText(s)))
case Some("text/xml") | Some("application/xml") | Some(ApplicationXmlMatcher()) =>
logger.trace("Parsing AnyContent as xml")
xml(maxLengthOrDefault)(request).map(_.right.map(x => AnyContentAsXml(x)))
case Some("text/json") | Some("application/json") =>
logger.trace("Parsing AnyContent as json")
json(maxLengthOrDefault)(request).map(_.right.map(j => AnyContentAsJson(j)))
case Some("application/x-www-form-urlencoded") =>
logger.trace("Parsing AnyContent as urlFormEncoded")
formUrlEncoded(maxLengthOrDefault)(request).map(_.right.map(d => AnyContentAsFormUrlEncoded(d)))
case Some("multipart/form-data") =>
logger.trace("Parsing AnyContent as multipartFormData")
multipartFormData(Multipart.handleFilePartAsTemporaryFile(temporaryFileCreator), maxLengthOrDefaultLarge).apply(request)
.map(_.right.map(m => AnyContentAsMultipartFormData(m)))
case _ =>
logger.trace("Parsing AnyContent as raw")
raw(DefaultMaxTextLength, maxLengthOrDefaultLarge)(request).map(_.right.map(r => AnyContentAsRaw(r)))
}
}
// -- Multipart
/**
* Parse the content as multipart/form-data
*/
def multipartFormData: BodyParser[MultipartFormData[TemporaryFile]] =
multipartFormData(Multipart.handleFilePartAsTemporaryFile(temporaryFileCreator))
/**
* Parse the content as multipart/form-data
*
* @param filePartHandler Handles file parts.
*/
def multipartFormData[A](filePartHandler: Multipart.FilePartHandler[A], maxLength: Long = DefaultMaxDiskLength): BodyParser[MultipartFormData[A]] = {
BodyParser("multipartFormData") { request =>
val bodyAccumulator = Multipart.multipartParser(DefaultMaxTextLength, filePartHandler, errorHandler).apply(request)
enforceMaxLength(request, maxLength, bodyAccumulator)
}
}
protected def createBadResult(msg: String, statusCode: Int = BAD_REQUEST): RequestHeader => Future[Result] = { request =>
errorHandler.onClientError(request, statusCode, msg)
}
/**
* Enforce the max length on the stream consumed by the given accumulator.
*/
private[play] def enforceMaxLength[A](request: RequestHeader, maxLength: Long, accumulator: Accumulator[ByteString, Either[Result, A]]): Accumulator[ByteString, Either[Result, A]] = {
val takeUpToFlow = Flow.fromGraph(new BodyParsers.TakeUpTo(maxLength))
Accumulator(takeUpToFlow.toMat(accumulator.toSink) { (statusFuture, resultFuture) =>
import play.core.Execution.Implicits.trampoline
val defaultCtx = materializer.executionContext
statusFuture.flatMap {
case MaxSizeExceeded(_) =>
val badResult = Future.successful(()).flatMap(_ => createBadResult("Request Entity Too Large", REQUEST_ENTITY_TOO_LARGE)(request))(defaultCtx)
badResult.map(Left(_))
case MaxSizeNotExceeded => resultFuture
}
})
}
/**
* Create a body parser that uses the given parser and enforces the given max length.
*
* @param name The name of the body parser.
* @param maxLength The maximum length of the body to buffer.
* @param errorMessage The error message to prepend to the exception message if an error was encountered.
* @param parser The parser.
*/
protected def tolerantBodyParser[A](name: String, maxLength: Long, errorMessage: String)(parser: (RequestHeader, ByteString) => A): BodyParser[A] =
BodyParser(name + ", maxLength=" + maxLength) { request =>
import play.core.Execution.Implicits.trampoline
enforceMaxLength(request, maxLength, Accumulator(
Sink.fold[ByteString, ByteString](ByteString.empty)((state, bs) => state ++ bs)
) mapFuture { bytes =>
try {
Future.successful(Right(parser(request, bytes)))
} catch {
case NonFatal(e) =>
logger.debug(errorMessage, e)
createBadResult(errorMessage + ": " + e.getMessage)(request).map(Left(_))
}
})
}
}
/**
* Default BodyParsers.
*/
object BodyParsers extends BodyParsers {
/**
* The default body parser provided by Play
*/
class Default @Inject() (parse: PlayBodyParsers) extends BodyParser[AnyContent] {
def this(config: ParserConfiguration, eh: HttpErrorHandler, mat: Materializer, tfc: TemporaryFileCreator) =
this(PlayBodyParsers(config, eh, mat, tfc))
override def apply(rh: RequestHeader) = parse.default(None)(rh)
}
object utils extends BodyParserUtils
private[play] def takeUpTo(maxLength: Long): Graph[FlowShape[ByteString, ByteString], Future[MaxSizeStatus]] = new TakeUpTo(maxLength)
private[play] class TakeUpTo(maxLength: Long) extends GraphStageWithMaterializedValue[FlowShape[ByteString, ByteString], Future[MaxSizeStatus]] {
private val in = Inlet[ByteString]("TakeUpTo.in")
private val out = Outlet[ByteString]("TakeUpTo.out")
override def shape: FlowShape[ByteString, ByteString] = FlowShape.of(in, out)
override def createLogicAndMaterializedValue(inheritedAttributes: Attributes): (GraphStageLogic, Future[MaxSizeStatus]) = {
val status = Promise[MaxSizeStatus]()
var pushedBytes: Long = 0
val logic = new GraphStageLogic(shape) {
setHandler(out, new OutHandler {
override def onPull(): Unit = {
pull(in)
}
override def onDownstreamFinish(): Unit = {
status.success(MaxSizeNotExceeded)
completeStage()
}
})
setHandler(in, new InHandler {
override def onPush(): Unit = {
val chunk = grab(in)
pushedBytes += chunk.size
if (pushedBytes > maxLength) {
status.success(MaxSizeExceeded(maxLength))
// Make sure we fail the stream, this will ensure downstream body parsers don't try to parse it
failStage(new MaxLengthLimitAttained)
} else {
push(out, chunk)
}
}
override def onUpstreamFinish(): Unit = {
status.success(MaxSizeNotExceeded)
completeStage()
}
override def onUpstreamFailure(ex: Throwable): Unit = {
status.failure(ex)
failStage(ex)
}
})
}
(logic, status.future)
}
}
private[play] class MaxLengthLimitAttained extends RuntimeException(null, null, false, false)
}
/**
* The status of a max size flow.
*/
sealed trait MaxSizeStatus
/**
* Signal a max content size exceeded.
*/
case class MaxSizeExceeded(length: Long) extends MaxSizeStatus
/**
* Signal max size is not exceeded.
*/
case object MaxSizeNotExceeded extends MaxSizeStatus
|
apache-2.0
|
ibnelazzouzi/bpm
|
activiti-testing/activiti-testing-needle/src/main/java/org/activiti/testing/needle/engine/test/ProcessEngineTestRule.java
|
794
|
package org.activiti.testing.needle.engine.test;
import java.util.Date;
import org.activiti.engine.ProcessEngine;
import org.junit.rules.TestRule;
/**
* Combined interface of {@link org.activiti.testing.needle.engine.ProcessEngineServices}
* and {@link TestRule}.
*/
public interface ProcessEngineTestRule extends TestRule, ProcessEngine {
/**
* Sets current time of in memory engine. Use to test timers etc.
*
* @param currentTime
* time to set
*/
void setCurrentTime(Date currentTime);
/**
* Provide deployment id after deploying with @Deployment-annotation.
*
* @return current deployment id
*/
String getDeploymentId();
/**
* Get the process engine.
*
* @return the process engine
*/
ProcessEngine getProcessEngine();
}
|
apache-2.0
|
Singleton06/terra-core
|
packages/terra-i18n-plugin/src/I18nAggregatorPlugin.js
|
4428
|
import fs from 'fs';
import path from 'path';
let supportedLocales;
function generateTranslationFile(language, messages) {
return `import { addLocaleData } from 'react-intl';
import localeData from 'react-intl/locale-data/${language.split('-')[0]}';
addLocaleData(localeData);
const messages = ${JSON.stringify(messages, null, 2)};
const areTranslationsLoaded = true;
const locale = '${language}';
export {
areTranslationsLoaded,
locale,
messages
};`;
}
function getDirectories(srcPath, inputFileSystem) {
return inputFileSystem.readdirSync(srcPath).filter(file => inputFileSystem.statSync(path.join(srcPath, file)).isDirectory());
}
function aggregateDirectory(languageMessages, currentDirectory, inputFileSystem) {
// Check the directory for translations
const translationsDirectory = path.resolve(currentDirectory, 'translations');
try {
// Check if the directory exists by attempting to read from it
inputFileSystem.readdirSync(translationsDirectory);
// Check the directory for each translation file
supportedLocales.forEach((language) => {
const translationFile = path.resolve(translationsDirectory, `${language}.json`);
try {
Object.assign(languageMessages[language], JSON.parse(inputFileSystem.readFileSync(translationFile, 'utf8')));
} catch (e) {
console.warn(`Translation file ${language}.json not found for ${translationsDirectory}`);
}
});
} catch (e) {
// not outputting anything here as the catching of the directory not existing is not an error in this case
}
// Check the directory's node_modules for translation files
const nodeMoudlesPath = path.resolve(currentDirectory, 'node_modules');
try {
getDirectories(nodeMoudlesPath, inputFileSystem).forEach((module) => {
aggregateDirectory(languageMessages, path.resolve(nodeMoudlesPath, module), inputFileSystem);
});
} catch (e) {
// not outputting anything here as the catching of the directories not existing is not an error in this case
}
return languageMessages;
}
function aggregateTranslationMessages(options, inputFileSystem) {
if (!options.baseDirectory) {
throw new Error('Please included the base directory path in the plugin options.');
}
if (!options.supportedLocales) {
throw new Error('Please included the supported locales in the plugin options.');
}
supportedLocales = options.supportedLocales;
let languageMessages = {};
supportedLocales.forEach((language) => { languageMessages[language] = {}; });
// Aggregate translation messages for the directory
languageMessages = aggregateDirectory(languageMessages, options.baseDirectory, inputFileSystem);
return languageMessages;
}
function aggregateTranslations(options, compiler) {
compiler.plugin('after-environment', () => {
let inputFileSystem = options.inputFileSystem;
if (!inputFileSystem) {
inputFileSystem = compiler.inputFileSystem;
}
// Aggregate translation messages for the directory
const languageMessages = aggregateTranslationMessages(options, inputFileSystem);
const directoryPath = path.resolve(options.baseDirectory, 'aggregated-translations');
let outputFileSystem = options.outputFileSystem;
// Create the aggregated-translations directory
if (outputFileSystem) {
outputFileSystem.mkdirpSync(directoryPath);
} else {
outputFileSystem = fs;
if (!outputFileSystem.existsSync(directoryPath)) {
outputFileSystem.mkdirSync(directoryPath);
}
}
// Create a file for each language for the aggregated messages
supportedLocales.forEach((language) => {
if (language in languageMessages) {
outputFileSystem.writeFileSync(path.resolve(directoryPath, `${language}.js`),
generateTranslationFile(language, languageMessages[language]));
} else {
throw new Error(`Translation file found for ${language}.json, but translations were not loaded correctly. Please check that your translated modules were installed correctly.`);
}
});
});
}
module.exports = (options) => {
let updatedOptions = options;
if (updatedOptions instanceof Array) {
updatedOptions = {
include: updatedOptions,
};
}
if (!Array.isArray(updatedOptions.include)) {
updatedOptions.include = [updatedOptions.include];
}
return {
apply: aggregateTranslations.bind(this, updatedOptions),
};
};
|
apache-2.0
|
h-crisis/assistant
|
Common/src/files/FileManagement.java
|
519
|
package files;
import java.io.File;
/**
* Created by manab on 2016/08/12.
*/
public class FileManagement {
/**
* 指定したフォルダ内を空にする
* @param dir フォルダ
*/
public static void removeFiles(File dir) {
File files[] = dir.listFiles();
for(int i=0; i<files.length; i++) {
if(files[i].isDirectory()) {
removeFiles(files[i]);
}
else {
files[i].delete();
}
}
}
}
|
apache-2.0
|
rlocus/SPAccess
|
LinqToSP/LinqToSP/Attributes/ContentTypeAttribute.cs
|
531
|
using Microsoft.SharePoint.Client;
using System;
namespace SP.Client.Linq.Attributes
{
[AttributeUsage(AttributeTargets.Class | AttributeTargets.Interface, Inherited = true, AllowMultiple = false)]
public class ContentTypeAttribute : Attribute
{
public ContentTypeAttribute()
{
}
public virtual string Id { get; set; }
public virtual string Name { get; set; }
public virtual string Group { get; set; }
public virtual string ParentId { get; set; }
}
}
|
apache-2.0
|
sschepens/pulsar
|
pulsar-functions/utils/src/test/java/org/apache/pulsar/functions/utils/functioncache/FunctionCacheManagerImplTest.java
|
5762
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.pulsar.functions.utils.functioncache;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertNotNull;
import static org.testng.Assert.assertNull;
import static org.testng.Assert.assertSame;
import static org.testng.Assert.assertTrue;
import com.google.common.collect.Lists;
import java.net.URL;
import java.util.Collections;
import java.util.List;
import java.util.function.Function;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
/**
* Unit test of {@link FunctionCacheManagerImpl}.
*/
public class FunctionCacheManagerImplTest {
private URL jarUrl;
private List<String> jarFiles;
private List<URL> classpaths;
private FunctionCacheManagerImpl cacheManager;
@BeforeMethod
public void setUp() {
this.jarUrl = getClass().getClassLoader().getResource("multifunction.jar");
this.jarFiles = Lists.newArrayList(jarUrl.getPath());
this.classpaths = Collections.emptyList();
this.cacheManager = new FunctionCacheManagerImpl();
}
@AfterMethod
public void tearDown() {
this.cacheManager.close();
}
void verifyClassLoader(ClassLoader clsLoader) throws Exception {
assertNotNull(clsLoader);
Class<? extends Function<Integer, Integer>> cls =
(Class<? extends Function<Integer, Integer>>)
clsLoader.loadClass("org.apache.pulsar.functions.runtime.functioncache.AddFunction");
Function<Integer, Integer> func = cls.newInstance();
assertEquals(4, func.apply(2).intValue());
}
@Test(expectedExceptions = NullPointerException.class)
public void testGetClassLoaderNullFunctionID() {
this.cacheManager.getClassLoader(null);
}
@Test(expectedExceptions = IllegalStateException.class)
public void testGetClassLoaderNotFound() {
this.cacheManager.getClassLoader(java.util.UUID.randomUUID().toString());
}
@Test(expectedExceptions = NullPointerException.class)
public void testRegisterNullFunctionID() throws Exception {
this.cacheManager.registerFunctionInstance(
null,
java.util.UUID.randomUUID().toString(),
Collections.emptyList(),
Collections.emptyList());
}
@Test
public void testRegister() throws Exception {
String fid = java.util.UUID.randomUUID().toString();
String eid = java.util.UUID.randomUUID().toString();
this.cacheManager.registerFunctionInstance(fid, eid,
jarFiles,
classpaths);
assertEquals(1, cacheManager.getCacheFunctions().size());
FunctionCacheEntry entry = cacheManager.getCacheFunctions().get(fid);
assertNotNull(entry);
assertTrue(entry.isInstanceRegistered(eid));
verifyClassLoader(cacheManager.getClassLoader(fid));
}
@Test
public void testRegisterTwoInstances() throws Exception {
String fid = java.util.UUID.randomUUID().toString();
String iid1 = java.util.UUID.randomUUID().toString();
String iid2 = java.util.UUID.randomUUID().toString();
this.cacheManager.registerFunctionInstance(
fid,
iid1,
jarFiles,
classpaths);
assertEquals(1, cacheManager.getCacheFunctions().size());
FunctionCacheEntry entry1 = cacheManager.getCacheFunctions().get(fid);
assertNotNull(entry1);
assertTrue(entry1.isInstanceRegistered(iid1));
verifyClassLoader(cacheManager.getClassLoader(fid));
this.cacheManager.registerFunctionInstance(
fid,
iid2,
jarFiles,
classpaths);
assertEquals(1, cacheManager.getCacheFunctions().size());
FunctionCacheEntry entry2 = cacheManager.getCacheFunctions().get(fid);
assertNotNull(entry2);
assertSame(entry1, entry2);
assertTrue(entry1.isInstanceRegistered(iid2));
}
@Test
public void testUnregister() throws Exception {
String fid = java.util.UUID.randomUUID().toString();
String iid = java.util.UUID.randomUUID().toString();
this.cacheManager.registerFunctionInstance(
fid,
iid,
jarFiles,
classpaths);
assertEquals(1, cacheManager.getCacheFunctions().size());
FunctionCacheEntry entry = cacheManager.getCacheFunctions().get(fid);
assertNotNull(entry);
assertTrue(entry.isInstanceRegistered(iid));
verifyClassLoader(cacheManager.getClassLoader(fid));
this.cacheManager.unregisterFunctionInstance(
fid,
iid);
assertEquals(0, cacheManager.getCacheFunctions().size());
assertNull(cacheManager.getCacheFunctions().get(fid));
assertFalse(entry.isInstanceRegistered(iid));
}
}
|
apache-2.0
|
phatboyg/Machete
|
src/Machete.HL7Schema/Generated/V26/Groups/OML_O33_ORDER_PRIOR.cs
|
1073
|
// This file was automatically generated and may be regenerated at any
// time. To ensure any changes are retained, modify the tool with any segment/component/group/field name
// or type changes.
namespace Machete.HL7Schema.V26
{
using HL7;
/// <summary>
/// OML_O33_ORDER_PRIOR (Group) -
/// </summary>
public interface OML_O33_ORDER_PRIOR :
HL7V26Layout
{
/// <summary>
/// ORC
/// </summary>
Segment<ORC> ORC { get; }
/// <summary>
/// OBR
/// </summary>
Segment<OBR> OBR { get; }
/// <summary>
/// NTE
/// </summary>
SegmentList<NTE> NTE { get; }
/// <summary>
/// ROL
/// </summary>
SegmentList<ROL> ROL { get; }
/// <summary>
/// TIMING_PRIOR
/// </summary>
LayoutList<OML_O33_TIMING_PRIOR> TimingPrior { get; }
/// <summary>
/// OBSERVATION_PRIOR
/// </summary>
LayoutList<OML_O33_OBSERVATION_PRIOR> ObservationPrior { get; }
}
}
|
apache-2.0
|
reo7sp/SBF-Bukkit-Plugin
|
src/net/sbfmc/logging/DebugLog.java
|
2263
|
/*
Copyright 2014 Reo_SP
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package net.sbfmc.logging;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.logging.Level;
import net.sbfmc.def.SBFPlugin;
import net.sbfmc.module.conf.DefaultConf;
public class DebugLog extends DefaultConf {
private static final DebugLog INSTANCE = new DebugLog();
private static String lastLogMessage;
private DebugLog() {
try {
initConf();
} catch (IOException err) {
err.printStackTrace();
}
}
public static void writeToLog(String message) {
writeToLog(message, Level.INFO);
}
public static void writeToLog(String message, Level level) {
lastLogMessage = "[" + new SimpleDateFormat("dd-MM-yy HH:mm").format(new Date()) + "] [" + level + "] " + message;
try {
INSTANCE.saveConf();
} catch (IOException err) {
err.printStackTrace();
}
}
@Override
public void loadConf() throws IOException {
}
@Override
public void saveConf() throws IOException {
if (lastLogMessage == null) {
return;
}
try {
BufferedWriter writer = new BufferedWriter(new FileWriter(confFile, true));
writer.write(lastLogMessage + "\n");
writer.close();
} catch (IOException err) {
throw err;
} finally {
lastLogMessage = null;
}
}
@Override
public void initConf() throws IOException {
confFile = new File(SBFPlugin.getPlugin().getDataFolder(), "debugLog.txt");
if (confFile.exists() && confFile.length() / 1024 / 1024 > 64) {
confFile.delete();
}
createConf();
}
@Override
public void deinitConf() throws IOException {
confFile = null;
}
@Override
public int getModuleID() {
return 3;
}
}
|
apache-2.0
|
qubole/quark
|
server/src/main/java/com/qubole/quark/server/ServerConfig.java
|
1053
|
/*
* Copyright (c) 2015. Qubole Inc
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.qubole.quark.server;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;
/**
* POJO for server specific configurations.
*/
@JsonIgnoreProperties(ignoreUnknown = true)
public class ServerConfig {
public final int port;
@JsonCreator
public ServerConfig(@JsonProperty("port") int port) {
this.port = port;
}
}
|
apache-2.0
|
EtaliaSA/jalia
|
src/main/java/net/etalia/jalia/spring/JaliaHttpMessageConverter.java
|
1543
|
package net.etalia.jalia.spring;
import java.io.IOException;
import java.nio.charset.Charset;
import net.etalia.jalia.ObjectMapper;
import net.etalia.jalia.TypeUtil;
import org.springframework.http.HttpInputMessage;
import org.springframework.http.HttpOutputMessage;
import org.springframework.http.MediaType;
import org.springframework.http.converter.AbstractHttpMessageConverter;
import org.springframework.http.converter.HttpMessageNotReadableException;
import org.springframework.http.converter.HttpMessageNotWritableException;
public class JaliaHttpMessageConverter extends AbstractHttpMessageConverter<Object> {
public static final Charset DEFAULT_CHARSET = Charset.forName("UTF-8");
private ObjectMapper mapper = new ObjectMapper();
public JaliaHttpMessageConverter() {
super(new MediaType("application", "json", DEFAULT_CHARSET));
}
public void setObjectMapper(ObjectMapper mapper) {
this.mapper = mapper;
}
@Override
protected boolean supports(Class<?> clazz) {
//return Persistent.class.isAssignableFrom(clazz);
return true;
}
@Override
protected Object readInternal(Class<?> clazz, HttpInputMessage inputMessage) throws IOException, HttpMessageNotReadableException {
return mapper.readValue(inputMessage.getBody(), TypeUtil.get(clazz));
}
@Override
protected void writeInternal(Object t, HttpOutputMessage outputMessage) throws IOException, HttpMessageNotWritableException {
mapper.writeValue(outputMessage.getBody(), JaliaParametersFilter.getFields(), t);
}
}
|
apache-2.0
|
xiaomozhang/druid
|
druid-1.0.9/src/test/java/com/alibaba/druid/bvt/filter/wall/MySqlWallTest6.java
|
1096
|
/*
* Copyright 1999-2011 Alibaba Group Holding Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alibaba.druid.bvt.filter.wall;
import junit.framework.TestCase;
import org.junit.Assert;
import com.alibaba.druid.wall.WallUtils;
/**
* SQLServerWallTest
*
* @author RaymondXiu
* @version 1.0, 2012-3-18
* @see
*/
public class MySqlWallTest6 extends TestCase {
public void test_stuff() throws Exception {
Assert.assertFalse(WallUtils.isValidateMySql(//
"SELECT a.* FROM vote_info a where a.id<10 or (id <5 or 1=1) limit 1,10"));
}
}
|
apache-2.0
|
ruixuekaifa/android--Demos
|
传感器/计步器/And001/src/com/cn/Main.java
|
2052
|
package com.cn;
import android.app.Activity;
import android.content.Context;
import android.hardware.Sensor;
import android.hardware.SensorEvent;
import android.hardware.SensorEventListener;
import android.hardware.SensorManager;
import android.os.Bundle;
import android.telephony.TelephonyManager;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.Button;
import android.widget.TextView;
import android.widget.Toast;
public class Main extends Activity implements SensorEventListener,
OnClickListener
{
private TextView textView;
private float lastPoint;
private int count = 0;
private boolean flag = true;
private SensorManager sm;
public void onClick(View view)
{
String msg = "";
switch (view.getId())
{
case R.id.btnStart:
sm = (SensorManager) getSystemService(SENSOR_SERVICE);
sm.registerListener(this, sm
.getDefaultSensor(Sensor.TYPE_ORIENTATION),
SensorManager.SENSOR_DELAY_FASTEST);
msg = "ÒѾ¿ªÊ¼¼Æ²½Æ÷.";
break;
case R.id.btnReset:
count = 0;
msg = "ÒÑ¾ÖØÖüƲ½Æ÷.";
break;
case R.id.btnStop:
sm.unregisterListener(this);
count = 0;
msg = "ÒѾֹͣ¼Æ²½Æ÷.";
break;
}
textView.setText(String.valueOf(count));
Toast.makeText(this, msg, Toast.LENGTH_SHORT).show();
}
public void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
Button btnStart = (Button) findViewById(R.id.btnStart);
Button btnReset = (Button) findViewById(R.id.btnReset);
Button btnStop = (Button) findViewById(R.id.btnStop);
btnStart.setOnClickListener(this);
btnReset.setOnClickListener(this);
btnStop.setOnClickListener(this);
textView = (TextView) findViewById(R.id.textview);
textView.setText(String.valueOf(count));
}
public void onAccuracyChanged(Sensor sensor, int accuracy)
{
}
public void onSensorChanged(SensorEvent event)
{
if (flag)
{
lastPoint = event.values[1];
flag = false;
}
if (Math.abs(event.values[1] - lastPoint) > 8)
{
lastPoint = event.values[1];
textView.setText(String.valueOf(++count));
}
}
}
|
apache-2.0
|
rickkosa/pynet_test
|
yaml_json/yaml_json_write.py
|
378
|
#!/usr/bin/python
import yaml
import json
my_list = range(8)
my_list.append('whatever')
my_list.append('hello')
my_list.append({})
my_list[-1]
my_list[-1]['ip_addr'] = '10.10.10.239'
my_list[-1]['attribs'] = range(7)
with open("ex6.yml", "w") as f:
f.write(yaml.dump(my_list, default_flow_style=False))
with open("ex6.json", "w") as f:
f.write(json.dumps(my_list))
|
apache-2.0
|
sambalmueslie/herold
|
src/main/java/de/sambalmueslie/herold/BaseDataModelElement.java
|
1065
|
package de.sambalmueslie.herold;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
import de.sambalmueslie.herold.annotations.Key;
/**
* Base implementation for the {@link DataModelElement}.
*/
public abstract class BaseDataModelElement implements DataModelElement {
@Override
public final boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null) return false;
if (getClass() != obj.getClass()) return false;
final BaseDataModelElement other = (BaseDataModelElement) obj;
if (id != other.id) return false;
return true;
}
@Override
public final long getId() {
return id;
}
@Override
public final int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + (int) (id ^ id >>> 32);
return result;
}
public final void setId(long id) {
this.id = id;
}
@Override
public String toString() {
return ToStringBuilder.reflectionToString(this, ToStringStyle.SHORT_PREFIX_STYLE);
}
/** the id. */
@Key
private long id;
}
|
apache-2.0
|
AlbertoMonteiro/boleto2net
|
Boleto2.Net.Testes/Utils.cs
|
10961
|
using System;
using System.IO;
using System.Text;
using NReco.PdfGenerator;
using NUnit.Framework;
namespace Boleto2Net.Testes
{
internal sealed class Utils
{
private static int _contador = 1;
private static int _proximoNossoNumero = 1;
internal static Cedente GerarCedente(string codigoCedente, string digitoCodigoCedente, string codigoTransmissao, ContaBancaria contaBancaria)
{
return new Cedente
{
CPFCNPJ = "86.875.666/0001-09",
Nome = "Cedente Teste",
Codigo = codigoCedente,
CodigoDV = digitoCodigoCedente,
Endereco = new Endereco
{
LogradouroEndereco = "Rua Teste do Cedente",
LogradouroNumero = "789",
LogradouroComplemento = "Cj 333",
Bairro = "Bairro",
Cidade = "Cidade",
UF = "SP",
CEP = "65432987"
},
ContaBancaria = contaBancaria
};
}
internal static Sacado GerarSacado()
{
if (_contador % 2 == 0)
return new Sacado
{
CPFCNPJ = "443.316.101-28",
Nome = "Sacado Teste PF",
Observacoes = "Matricula 678/9",
Endereco = new Endereco
{
LogradouroEndereco = "Rua Testando",
LogradouroNumero = "456",
Bairro = "Bairro",
Cidade = "Cidade",
UF = "SP",
CEP = "56789012"
}
};
return new Sacado
{
CPFCNPJ = "71.738.978/0001-01",
Nome = "Sacado Teste PJ",
Observacoes = "Matricula 123/4",
Endereco = new Endereco
{
LogradouroEndereco = "Avenida Testando",
LogradouroNumero = "123",
Bairro = "Bairro",
Cidade = "Cidade",
UF = "SP",
CEP = "12345678"
}
};
}
internal static Boletos GerarBoletos(IBanco banco, int quantidadeBoletos, string aceite)
{
var boletos = new Boletos
{
Banco = banco
};
for (var i = 1; i <= quantidadeBoletos; i++)
boletos.Add(GerarBoleto(banco, i, aceite));
return boletos;
}
internal static Boleto GerarBoleto(IBanco banco, int i, string aceite)
{
if (aceite == "?")
aceite = _contador % 2 == 0 ? "N" : "A";
var boleto = new Boleto(banco)
{
Sacado = GerarSacado(),
DataEmissao = DateTime.Now.AddDays(-3),
DataProcessamento = DateTime.Now,
DataVencimento = DateTime.Now.AddMonths(i),
ValorTitulo = (decimal)100 * i,
NossoNumero = (223344 + _proximoNossoNumero).ToString(),
NumeroDocumento = "BB" + _proximoNossoNumero.ToString("D6") + (char)(64 + i),
EspecieDocumento = TipoEspecieDocumento.DM,
Aceite = aceite,
CodigoInstrucao1 = "11",
CodigoInstrucao2 = "22",
DataDesconto = DateTime.Now.AddMonths(i),
ValorDesconto = (decimal)(100 * i * 0.10),
DataMulta = DateTime.Now.AddMonths(i),
PercentualMulta = (decimal)2.00,
ValorMulta = (decimal)(100 * i * (2.00 / 100)),
DataJuros = DateTime.Now.AddMonths(i),
PercentualJurosDia = (decimal)0.2,
ValorJurosDia = (decimal)(100 * i * (0.2 / 100)),
MensagemArquivoRemessa = "Mensagem para o arquivo remessa",
NumeroControleParticipante = "CHAVEPRIMARIA=" + _proximoNossoNumero
};
// Mensagem - Instruções do Caixa
StringBuilder msgCaixa = new StringBuilder();
if (boleto.ValorDesconto > 0)
msgCaixa.AppendLine($"Conceder desconto de {boleto.ValorDesconto.ToString("R$ ##,##0.00")} até {boleto.DataDesconto.ToString("dd/MM/yyyy")}. ");
if (boleto.ValorMulta > 0)
msgCaixa.AppendLine($"Cobrar multa de {boleto.ValorMulta.ToString("R$ ##,##0.00")} após o vencimento. ");
if (boleto.ValorJurosDia > 0)
msgCaixa.AppendLine($"Cobrar juros de {boleto.ValorJurosDia.ToString("R$ ##,##0.00")} por dia de atraso. ");
boleto.MensagemInstrucoesCaixa = msgCaixa.ToString();
// Avalista
if (_contador % 3 == 0)
{
boleto.Avalista = GerarSacado();
boleto.Avalista.Nome = boleto.Avalista.Nome.Replace("Sacado", "Avalista");
}
// Grupo Demonstrativo do Boleto
var grupoDemonstrativo = new GrupoDemonstrativo { Descricao = "GRUPO 1" };
grupoDemonstrativo.Itens.Add(new ItemDemonstrativo { Descricao = "Grupo 1, Item 1", Referencia = boleto.DataEmissao.AddMonths(-1).Month + "/" + boleto.DataEmissao.AddMonths(-1).Year, Valor = boleto.ValorTitulo * (decimal)0.15 });
grupoDemonstrativo.Itens.Add(new ItemDemonstrativo { Descricao = "Grupo 1, Item 2", Referencia = boleto.DataEmissao.AddMonths(-1).Month + "/" + boleto.DataEmissao.AddMonths(-1).Year, Valor = boleto.ValorTitulo * (decimal)0.05 });
boleto.Demonstrativos.Add(grupoDemonstrativo);
grupoDemonstrativo = new GrupoDemonstrativo { Descricao = "GRUPO 2" };
grupoDemonstrativo.Itens.Add(new ItemDemonstrativo { Descricao = "Grupo 2, Item 1", Referencia = boleto.DataEmissao.Month + "/" + boleto.DataEmissao.Year, Valor = boleto.ValorTitulo * (decimal)0.20 });
boleto.Demonstrativos.Add(grupoDemonstrativo);
grupoDemonstrativo = new GrupoDemonstrativo { Descricao = "GRUPO 3" };
grupoDemonstrativo.Itens.Add(new ItemDemonstrativo { Descricao = "Grupo 3, Item 1", Referencia = boleto.DataEmissao.AddMonths(-1).Month + "/" + boleto.DataEmissao.AddMonths(-1).Year, Valor = boleto.ValorTitulo * (decimal)0.37 });
grupoDemonstrativo.Itens.Add(new ItemDemonstrativo { Descricao = "Grupo 3, Item 2", Referencia = boleto.DataEmissao.Month + "/" + boleto.DataEmissao.Year, Valor = boleto.ValorTitulo * (decimal)0.03 });
grupoDemonstrativo.Itens.Add(new ItemDemonstrativo { Descricao = "Grupo 3, Item 3", Referencia = boleto.DataEmissao.Month + "/" + boleto.DataEmissao.Year, Valor = boleto.ValorTitulo * (decimal)0.12 });
grupoDemonstrativo.Itens.Add(new ItemDemonstrativo { Descricao = "Grupo 3, Item 4", Referencia = boleto.DataEmissao.AddMonths(+1).Month + "/" + boleto.DataEmissao.AddMonths(+1).Year, Valor = boleto.ValorTitulo * (decimal)0.08 });
boleto.Demonstrativos.Add(grupoDemonstrativo);
boleto.ValidarDados();
_contador++;
_proximoNossoNumero++;
return boleto;
}
internal static void TestarHomologacao(IBanco banco, TipoArquivo tipoArquivo, string nomeCarteira, int quantidadeBoletos, bool gerarPDF, string aceite)
{
var boletos = GerarBoletos(banco, quantidadeBoletos, aceite);
Assert.AreEqual(quantidadeBoletos, boletos.Count, "Quantidade de boletos diferente de " + quantidadeBoletos);
// Define os nomes dos arquivos, cria pasta e apaga arquivos anteriores
var nomeArquivoREM = Path.Combine(Path.GetTempPath(), "Boleto2Net", $"{nomeCarteira}_{tipoArquivo}.REM");
var nomeArquivoPDF = Path.Combine(Path.GetTempPath(), "Boleto2Net", $"{nomeCarteira}_{tipoArquivo}.PDF");
if (!Directory.Exists(Path.GetDirectoryName(nomeArquivoREM)))
Directory.CreateDirectory(Path.GetDirectoryName(nomeArquivoREM));
if (File.Exists(nomeArquivoREM))
{
File.Delete(nomeArquivoREM);
if (File.Exists(nomeArquivoREM))
Assert.Fail("Arquivo Remessa não foi excluído: " + nomeArquivoREM);
}
if (File.Exists(nomeArquivoPDF))
{
File.Delete(nomeArquivoPDF);
if (File.Exists(nomeArquivoPDF))
Assert.Fail("Arquivo Boletos (PDF) não foi excluído: " + nomeArquivoPDF);
}
// Arquivo Remessa.
try
{
var arquivoRemessa = new ArquivoRemessa(boletos.Banco, tipoArquivo, 1);
using (var fileStream = new FileStream(nomeArquivoREM, FileMode.Create))
arquivoRemessa.GerarArquivoRemessa(boletos, fileStream);
if (!File.Exists(nomeArquivoREM))
Assert.Fail("Arquivo Remessa não encontrado: " + nomeArquivoREM);
}
catch (Exception e)
{
if (File.Exists(nomeArquivoREM))
File.Delete(nomeArquivoREM);
Assert.Fail(e.InnerException.ToString());
}
if (gerarPDF)
{
// Gera arquivo PDF
try
{
var html = new StringBuilder();
foreach (var boletoTmp in boletos)
{
using (var boletoParaImpressao = new BoletoBancario
{
Boleto = boletoTmp,
OcultarInstrucoes = false,
MostrarComprovanteEntrega = false,
MostrarEnderecoCedente = true,
ExibirDemonstrativo = true
})
{
html.Append("<div style=\"page-break-after: always;\">");
html.Append(boletoParaImpressao.MontaHtml());
html.Append("</div>");
}
var pdf = new HtmlToPdfConverter().GeneratePdf(html.ToString());
using (var fs = new FileStream(nomeArquivoPDF, FileMode.Create))
fs.Write(pdf, 0, pdf.Length);
if (!File.Exists(nomeArquivoPDF))
Assert.Fail("Arquivo Boletos (PDF) não encontrado: " + nomeArquivoPDF);
}
}
catch (Exception e)
{
if (File.Exists(nomeArquivoPDF))
File.Delete(nomeArquivoPDF);
Assert.Fail(e.InnerException.ToString());
}
}
}
}
}
|
apache-2.0
|
kevoree-modeling/experiments
|
org.mwg.experiments.smartgridprofiling/src/main/java/org/mwg/experiments/reccurent/datasets/TextGeneration.java
|
9209
|
package org.mwg.experiments.reccurent.datasets;
import java.io.File;
import java.nio.charset.Charset;
import java.nio.file.Files;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import org.mwg.experiments.reccurent.autodiff.Graph;
import org.mwg.experiments.reccurent.datastructs.DataSequence;
import org.mwg.experiments.reccurent.datastructs.DataSet;
import org.mwg.experiments.reccurent.datastructs.DataStep;
import org.mwg.experiments.reccurent.util.Util;
import org.mwg.experiments.reccurent.loss.LossSoftmax;
import org.mwg.experiments.reccurent.matrix.Matrix;
import org.mwg.experiments.reccurent.model.LinearUnit;
import org.mwg.experiments.reccurent.model.Model;
import org.mwg.experiments.reccurent.model.Nonlinearity;
public class TextGeneration extends DataSet {
public static int reportSequenceLength = 100;
public static boolean singleWordAutocorrect = false;
public static boolean reportPerplexity = true;
private static Map<String, Integer> charToIndex = new HashMap<>();
private static Map<Integer, String> indexToChar = new HashMap<>();
private static int dimension;
private static double[] vecStartEnd;
private static final int START_END_TOKEN_INDEX = 0;
private static Set<String> words = new HashSet<>();
public static List<String> generateText(Model model, int steps, boolean argmax, double temperature, Random rng) throws Exception {
List<String> lines = new ArrayList<>();
Matrix start = new Matrix(dimension);
start.w[START_END_TOKEN_INDEX] = 1.0;
model.resetState();
Graph g = new Graph(false);
Matrix input = start.clone();
String line = "";
for (int s = 0; s < steps; s++) {
Matrix logprobs = model.forward(input, g);
Matrix probs = LossSoftmax.getSoftmaxProbs(logprobs, temperature);
if (singleWordAutocorrect) {
Matrix possible = Matrix.ones(dimension, 1);
try {
possible = singleWordAutocorrect(line);
}
catch (Exception e) {
//TODO: still may be some lingering bugs, so don't constrain by possible if a problem occurs. Fix later..
}
double tot = 0;
//remove impossible transitions
for (int i = 0; i < probs.w.length; i++) {
probs.w[i] *= possible.w[i];
tot += probs.w[i];
}
//normalize to sum of 1.0 again
for (int i = 0; i < probs.w.length; i++) {
probs.w[i] /= tot;
}
for (int i = 0; i < probs.w.length; i++) {
if (probs.w[i] > 0 && possible.w[i] == 0) {
throw new Exception("Illegal transition");
}
}
}
int indxChosen = -1;
if (argmax) {
double high = Double.NEGATIVE_INFINITY;
for (int i = 0; i < probs.w.length; i++) {
if (probs.w[i] > high) {
high = probs.w[i];
indxChosen = i;
}
}
}
else {
indxChosen = Util.pickIndexFromRandomVector(probs, rng);
}
if (indxChosen == START_END_TOKEN_INDEX) {
lines.add(line);
line = "";
input = start.clone();
g = new Graph(false);
model.resetState();
input = start.clone();
}
else {
String ch = indexToChar.get(indxChosen);
line += ch;
for (int i = 0; i < input.w.length; i++) {
input.w[i] = 0;
}
input.w[indxChosen] = 1.0;
}
}
if (line.equals("") == false) {
lines.add(line);
}
return lines;
}
private static Matrix singleWordAutocorrect(String sequence) throws Exception {
/*
* This restricts the output of the RNN to being composed of words found in the source text.
* It makes no attempts to account for probabilities in any way.
*/
sequence = sequence.replace("\"\n\"", " ");
if (sequence.equals("") || sequence.endsWith(" ")) { //anything is possible after a space
return Matrix.ones(dimension, 1);
}
String[] parts = sequence.split(" ");
String lastPartialWord = parts[parts.length-1].trim();
if (lastPartialWord.equals(" ") || lastPartialWord.contains(" ")) {
throw new Exception("unexpected");
}
List<String> matches = new ArrayList<>();
for (String word : words) {
if (word.startsWith(lastPartialWord)) {
matches.add(word);
}
}
if (matches.size() == 0) {
throw new Exception("unexpected, no matches for '"+lastPartialWord+"'");
}
Matrix result = new Matrix(dimension);
boolean hit = false;
for (String match : matches) {
if (match.length() < lastPartialWord.length()) {
throw new Exception("How is match shorter than partial word?");
}
if (lastPartialWord.equals(match)) {
result.w[charToIndex.get(" ")] = 1.0;
result.w[START_END_TOKEN_INDEX] = 1.0;
continue;
}
String nextChar = match.charAt(lastPartialWord.length()) + "";
result.w[charToIndex.get(nextChar)] = 1.0;
hit = true;
}
if (hit == false) {
result.w[charToIndex.get(" ")] = 1.0;
result.w[START_END_TOKEN_INDEX] = 1.0;
}
return result;
}
public static String sequenceToSentence(DataSequence sequence) {
String result = "\"";
for (int s = 0; s < sequence.steps.size() - 1; s++) {
DataStep step = sequence.steps.get(s);
int index = -1;
for (int i = 0; i < step.targetOutput.w.length; i++) {
if (step.targetOutput.w[i] == 1) {
index = i;
break;
}
}
String ch = indexToChar.get(index);
result += ch;
}
result += "\"\n";
return result;
}
public TextGeneration(String path) throws Exception {
System.out.println("Text generation task");
System.out.println("loading " + path + "...");
File file = new File(path);
List<String> lines = Files.readAllLines(file.toPath(), Charset.defaultCharset());
Set<String> chars = new HashSet<>();
int id = 0;
charToIndex.put("[START/END]", id);
indexToChar.put(id, "[START/END]");
id++;
System.out.println("Characters:");
System.out.print("\t");
for (String line : lines) {
for (int i = 0; i < line.length(); i++) {
String[] parts = line.split(" ");
for (String part : parts) {
words.add(part.trim());
}
String ch = line.charAt(i) + "";
if (chars.contains(ch) == false) {
System.out.print(ch);
chars.add(ch);
charToIndex.put(ch, id);
indexToChar.put(id, ch);
id++;
}
}
}
dimension = chars.size() + 1;
vecStartEnd = new double[dimension];
vecStartEnd[START_END_TOKEN_INDEX] = 1.0;
List<DataSequence> sequences = new ArrayList<>();
int size = 0;
for (String line : lines) {
List<double[]> vecs = new ArrayList<>();
vecs.add(vecStartEnd);
for (int i = 0; i < line.length(); i++) {
String ch = line.charAt(i) + "";
int index = charToIndex.get(ch);
double[] vec = new double[dimension];
vec[index] = 1.0;
vecs.add(vec);
}
vecs.add(vecStartEnd);
DataSequence sequence = new DataSequence();
for (int i = 0; i < vecs.size() - 1; i++) {
sequence.steps.add(new DataStep(vecs.get(i), vecs.get(i+1)));
size++;
}
sequences.add(sequence);
}
System.out.println("Total unique chars = " + chars.size());
System.out.println(size + " steps in training set.");
training = sequences;
lossTraining = new LossSoftmax();
lossReporting = new LossSoftmax();
inputDimension = sequences.get(0).steps.get(0).input.w.length;
int loc = 0;
while (sequences.get(0).steps.get(loc).targetOutput == null) {
loc++;
}
outputDimension = sequences.get(0).steps.get(loc).targetOutput.w.length;
}
@Override
public void DisplayReport(Model model, Random rng) throws Exception {
System.out.println("========================================");
System.out.println("REPORT:");
if (reportPerplexity) {
System.out.println("\ncalculating perplexity over entire data set...");
double perplexity = LossSoftmax.calculateMedianPerplexity(model, training);
System.out.println("\nMedian Perplexity = " + String.format("%.4f", perplexity));
}
double[] temperatures = {1, 0.75, 0.5, 0.25, 0.1};
for (double temperature : temperatures) {
if (TextGeneration.singleWordAutocorrect) {
System.out.println("\nTemperature "+temperature+" prediction (with single word autocorrect):");
}
else {
System.out.println("\nTemperature "+temperature+" prediction:");
}
List<String> guess = TextGeneration.generateText(model, reportSequenceLength, false, temperature, rng);
for (int i = 0; i < guess.size(); i++) {
if (i == guess.size()-1) {
System.out.println("\t\"" + guess.get(i) + "...\"");
}
else {
System.out.println("\t\"" + guess.get(i) + "\"");
}
}
}
if (TextGeneration.singleWordAutocorrect) {
System.out.println("\nArgmax prediction (with single word autocorrect):");
}
else {
System.out.println("\nArgmax prediction:");
}
List<String> guess = TextGeneration.generateText(model, reportSequenceLength, true, 1.0, rng);
for (int i = 0; i < guess.size(); i++) {
if (i == guess.size()-1) {
System.out.println("\t\"" + guess.get(i) + "...\"");
}
else {
System.out.println("\t\"" + guess.get(i) + "\"");
}
}
System.out.println("========================================");
}
@Override
public Nonlinearity getModelOutputUnitToUse() {
return new LinearUnit();
}
}
|
apache-2.0
|
bolkedebruin/airflow
|
airflow/contrib/hooks/segment_hook.py
|
1174
|
# -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module is deprecated. Please use `airflow.providers.segment.hooks.segment`."""
import warnings
# pylint: disable=unused-import
from airflow.providers.segment.hooks.segment import SegmentHook, analytics # noqa
warnings.warn(
"This module is deprecated. Please use `airflow.providers.segment.hooks.segment`.",
DeprecationWarning, stacklevel=2
)
|
apache-2.0
|
hanhlh/hadoop-0.20.2_FatBTree
|
src/mapred/org/apache/hadoop/mapred/CompletedJobStatusStore.java
|
10491
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapred;
import java.io.IOException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.server.namenodeFBT.msg.MessageException;
import org.apache.hadoop.hdfs.server.namenodeFBT.service.ServiceException;
/**
* Persists and retrieves the Job info of a job into/from DFS.
* <p/>
* If the retain time is zero jobs are not persisted.
* <p/>
* A daemon thread cleans up job info files older than the retain time
* <p/>
* The retain time can be set with the 'persist.jobstatus.hours'
* configuration variable (it is in hours).
*/
class CompletedJobStatusStore implements Runnable {
private boolean active;
private String jobInfoDir;
private long retainTime;
private FileSystem fs;
private static final String JOB_INFO_STORE_DIR = "/jobtracker/jobsInfo";
public static final Log LOG =
LogFactory.getLog(CompletedJobStatusStore.class);
private static long HOUR = 1000 * 60 * 60;
private static long SLEEP_TIME = 1 * HOUR;
CompletedJobStatusStore(Configuration conf) throws IOException, MessageException {
active =
conf.getBoolean("mapred.job.tracker.persist.jobstatus.active", false);
if (active) {
retainTime =
conf.getInt("mapred.job.tracker.persist.jobstatus.hours", 0) * HOUR;
jobInfoDir =
conf.get("mapred.job.tracker.persist.jobstatus.dir", JOB_INFO_STORE_DIR);
Path path = new Path(jobInfoDir);
// set the fs
this.fs = path.getFileSystem(conf);
if (!fs.exists(path)) {
fs.mkdirs(path);
}
if (retainTime == 0) {
// as retain time is zero, all stored jobstatuses are deleted.
deleteJobStatusDirs();
}
LOG.info("Completed job store activated/configured with retain-time : "
+ retainTime + " , job-info-dir : " + jobInfoDir);
} else {
LOG.info("Completed job store is inactive");
}
}
/**
* Indicates if job status persistency is active or not.
*
* @return TRUE if active, FALSE otherwise.
*/
public boolean isActive() {
return active;
}
public void run() {
if (retainTime > 0) {
while (true) {
try {
deleteJobStatusDirs();
} catch (MessageException e) {
// TODO ¼«Æ°À¸À®¤µ¤ì¤¿ catch ¥Ö¥í¥Ã¥¯
e.printStackTrace();
}
try {
Thread.sleep(SLEEP_TIME);
}
catch (InterruptedException ex) {
break;
}
}
}
}
private void deleteJobStatusDirs() throws MessageException {
try {
long currentTime = System.currentTimeMillis();
FileStatus[] jobInfoFiles = fs.listStatus(
new Path[]{new Path(jobInfoDir)});
//noinspection ForLoopReplaceableByForEach
for (FileStatus jobInfo : jobInfoFiles) {
try {
if ((currentTime - jobInfo.getModificationTime()) > retainTime) {
fs.delete(jobInfo.getPath(), true);
}
}
catch (IOException ie) {
LOG.warn("Could not do housekeeping for [ " +
jobInfo.getPath() + "] job info : " + ie.getMessage(), ie);
}
}
}
catch (IOException ie) {
LOG.warn("Could not obtain job info files : " + ie.getMessage(), ie);
}
}
private Path getInfoFilePath(JobID jobId) {
return new Path(jobInfoDir, jobId + ".info");
}
/**
* Persists a job in DFS.
*
* @param job the job about to be 'retired'
* @throws ServiceException
* @throws MessageException
*/
public void store(JobInProgress job) throws MessageException, ServiceException {
if (active && retainTime > 0) {
JobID jobId = job.getStatus().getJobID();
Path jobStatusFile = getInfoFilePath(jobId);
try {
FSDataOutputStream dataOut = fs.create(jobStatusFile);
job.getStatus().write(dataOut);
job.getProfile().write(dataOut);
job.getCounters().write(dataOut);
TaskCompletionEvent[] events =
job.getTaskCompletionEvents(0, Integer.MAX_VALUE);
dataOut.writeInt(events.length);
for (TaskCompletionEvent event : events) {
event.write(dataOut);
}
dataOut.close();
} catch (IOException ex) {
LOG.warn("Could not store [" + jobId + "] job info : " +
ex.getMessage(), ex);
try {
fs.delete(jobStatusFile, true);
}
catch (IOException ex1) {
//ignore
}
}
}
}
private FSDataInputStream getJobInfoFile(JobID jobId) throws IOException, MessageException {
Path jobStatusFile = getInfoFilePath(jobId);
return (fs.exists(jobStatusFile)) ? fs.open(jobStatusFile) : null;
}
private JobStatus readJobStatus(FSDataInputStream dataIn) throws IOException {
JobStatus jobStatus = new JobStatus();
jobStatus.readFields(dataIn);
return jobStatus;
}
private JobProfile readJobProfile(FSDataInputStream dataIn)
throws IOException {
JobProfile jobProfile = new JobProfile();
jobProfile.readFields(dataIn);
return jobProfile;
}
private Counters readCounters(FSDataInputStream dataIn) throws IOException {
Counters counters = new Counters();
counters.readFields(dataIn);
return counters;
}
private TaskCompletionEvent[] readEvents(FSDataInputStream dataIn,
int offset, int len)
throws IOException {
int size = dataIn.readInt();
if (offset > size) {
return TaskCompletionEvent.EMPTY_ARRAY;
}
if (offset + len > size) {
len = size - offset;
}
TaskCompletionEvent[] events = new TaskCompletionEvent[len];
for (int i = 0; i < (offset + len); i++) {
TaskCompletionEvent event = new TaskCompletionEvent();
event.readFields(dataIn);
if (i >= offset) {
events[i - offset] = event;
}
}
return events;
}
/**
* This method retrieves JobStatus information from DFS stored using
* store method.
*
* @param jobId the jobId for which jobStatus is queried
* @return JobStatus object, null if not able to retrieve
* @throws MessageException
*/
public JobStatus readJobStatus(JobID jobId) throws MessageException {
JobStatus jobStatus = null;
if (null == jobId) {
LOG.warn("Could not read job status for null jobId");
return null;
}
if (active) {
try {
FSDataInputStream dataIn = getJobInfoFile(jobId);
if (dataIn != null) {
jobStatus = readJobStatus(dataIn);
dataIn.close();
}
} catch (IOException ex) {
LOG.warn("Could not read [" + jobId + "] job status : " + ex, ex);
}
}
return jobStatus;
}
/**
* This method retrieves JobProfile information from DFS stored using
* store method.
*
* @param jobId the jobId for which jobProfile is queried
* @return JobProfile object, null if not able to retrieve
* @throws MessageException
*/
public JobProfile readJobProfile(JobID jobId) throws MessageException {
JobProfile jobProfile = null;
if (active) {
try {
FSDataInputStream dataIn = getJobInfoFile(jobId);
if (dataIn != null) {
readJobStatus(dataIn);
jobProfile = readJobProfile(dataIn);
dataIn.close();
}
} catch (IOException ex) {
LOG.warn("Could not read [" + jobId + "] job profile : " + ex, ex);
}
}
return jobProfile;
}
/**
* This method retrieves Counters information from DFS stored using
* store method.
*
* @param jobId the jobId for which Counters is queried
* @return Counters object, null if not able to retrieve
* @throws MessageException
*/
public Counters readCounters(JobID jobId) throws MessageException {
Counters counters = null;
if (active) {
try {
FSDataInputStream dataIn = getJobInfoFile(jobId);
if (dataIn != null) {
readJobStatus(dataIn);
readJobProfile(dataIn);
counters = readCounters(dataIn);
dataIn.close();
}
} catch (IOException ex) {
LOG.warn("Could not read [" + jobId + "] job counters : " + ex, ex);
}
}
return counters;
}
/**
* This method retrieves TaskCompletionEvents information from DFS stored
* using store method.
*
* @param jobId the jobId for which TaskCompletionEvents is queried
* @param fromEventId events offset
* @param maxEvents max number of events
* @return TaskCompletionEvent[], empty array if not able to retrieve
* @throws MessageException
*/
public TaskCompletionEvent[] readJobTaskCompletionEvents(JobID jobId,
int fromEventId,
int maxEvents) throws MessageException {
TaskCompletionEvent[] events = TaskCompletionEvent.EMPTY_ARRAY;
if (active) {
try {
FSDataInputStream dataIn = getJobInfoFile(jobId);
if (dataIn != null) {
readJobStatus(dataIn);
readJobProfile(dataIn);
readCounters(dataIn);
events = readEvents(dataIn, fromEventId, maxEvents);
dataIn.close();
}
} catch (IOException ex) {
LOG.warn("Could not read [" + jobId + "] job events : " + ex, ex);
}
}
return events;
}
}
|
apache-2.0
|
sckm/CallbackAttacher
|
library/src/androidTest/java/com/github/scache/callbackattacher/ApplicationTest.java
|
365
|
package com.github.scache.callbackattacher;
import android.app.Application;
import android.test.ApplicationTestCase;
/**
* <a href="http://d.android.com/tools/testing/testing_android.html">Testing Fundamentals</a>
*/
public class ApplicationTest extends ApplicationTestCase<Application> {
public ApplicationTest() {
super(Application.class);
}
}
|
apache-2.0
|
JeremyAiYt/MyApp
|
app/src/main/java/com/shaojun/myapp/base/mvp/BaseMvpFragment.java
|
1106
|
package com.shaojun.myapp.base.mvp;
import android.annotation.SuppressLint;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.view.View;
import com.shaojun.myapp.base.BaseFragment;
import butterknife.ButterKnife;
import butterknife.Unbinder;
@SuppressLint("NewApi")
public abstract class BaseMvpFragment<T extends IPresenter> extends BaseFragment implements IView {
public String title;
protected T mPresenter;
private Unbinder mUnBinder;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
}
@Override
public void onViewCreated(View view, @Nullable Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
mPresenter.attachView(this);
mUnBinder = ButterKnife.bind(this, view);
}
@Override
public void onDestroyView() {
super.onDestroyView();
mUnBinder.unbind();
}
@Override
public void onDestroy() {
super.onDestroy();
if (mPresenter != null) mPresenter.detachView();
}
}
|
apache-2.0
|
aws/aws-sdk-java
|
aws-java-sdk-frauddetector/src/main/java/com/amazonaws/services/frauddetector/model/GetEventTypesResult.java
|
6035
|
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.frauddetector.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/frauddetector-2019-11-15/GetEventTypes" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class GetEventTypesResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable {
/**
* <p>
* An array of event types.
* </p>
*/
private java.util.List<EventType> eventTypes;
/**
* <p>
* The next page token.
* </p>
*/
private String nextToken;
/**
* <p>
* An array of event types.
* </p>
*
* @return An array of event types.
*/
public java.util.List<EventType> getEventTypes() {
return eventTypes;
}
/**
* <p>
* An array of event types.
* </p>
*
* @param eventTypes
* An array of event types.
*/
public void setEventTypes(java.util.Collection<EventType> eventTypes) {
if (eventTypes == null) {
this.eventTypes = null;
return;
}
this.eventTypes = new java.util.ArrayList<EventType>(eventTypes);
}
/**
* <p>
* An array of event types.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setEventTypes(java.util.Collection)} or {@link #withEventTypes(java.util.Collection)} if you want to
* override the existing values.
* </p>
*
* @param eventTypes
* An array of event types.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetEventTypesResult withEventTypes(EventType... eventTypes) {
if (this.eventTypes == null) {
setEventTypes(new java.util.ArrayList<EventType>(eventTypes.length));
}
for (EventType ele : eventTypes) {
this.eventTypes.add(ele);
}
return this;
}
/**
* <p>
* An array of event types.
* </p>
*
* @param eventTypes
* An array of event types.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetEventTypesResult withEventTypes(java.util.Collection<EventType> eventTypes) {
setEventTypes(eventTypes);
return this;
}
/**
* <p>
* The next page token.
* </p>
*
* @param nextToken
* The next page token.
*/
public void setNextToken(String nextToken) {
this.nextToken = nextToken;
}
/**
* <p>
* The next page token.
* </p>
*
* @return The next page token.
*/
public String getNextToken() {
return this.nextToken;
}
/**
* <p>
* The next page token.
* </p>
*
* @param nextToken
* The next page token.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetEventTypesResult withNextToken(String nextToken) {
setNextToken(nextToken);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getEventTypes() != null)
sb.append("EventTypes: ").append("***Sensitive Data Redacted***").append(",");
if (getNextToken() != null)
sb.append("NextToken: ").append(getNextToken());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof GetEventTypesResult == false)
return false;
GetEventTypesResult other = (GetEventTypesResult) obj;
if (other.getEventTypes() == null ^ this.getEventTypes() == null)
return false;
if (other.getEventTypes() != null && other.getEventTypes().equals(this.getEventTypes()) == false)
return false;
if (other.getNextToken() == null ^ this.getNextToken() == null)
return false;
if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getEventTypes() == null) ? 0 : getEventTypes().hashCode());
hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode());
return hashCode;
}
@Override
public GetEventTypesResult clone() {
try {
return (GetEventTypesResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
|
apache-2.0
|
davebarnes97/geode
|
geode-core/src/main/java/org/apache/geode/internal/cache/partitioned/PersistentBucketRecoverer.java
|
16430
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal.cache.partitioned;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import org.apache.logging.log4j.Logger;
import org.apache.geode.distributed.internal.membership.InternalDistributedMember;
import org.apache.geode.internal.cache.BucketPersistenceAdvisor;
import org.apache.geode.internal.cache.ColocationHelper;
import org.apache.geode.internal.cache.DiskStoreImpl;
import org.apache.geode.internal.cache.PRHARedundancyProvider;
import org.apache.geode.internal.cache.PartitionedRegion;
import org.apache.geode.internal.cache.ProxyBucketRegion;
import org.apache.geode.internal.cache.persistence.PersistentMemberID;
import org.apache.geode.internal.cache.persistence.PersistentStateListener;
import org.apache.geode.internal.inet.LocalHostUtil;
import org.apache.geode.internal.process.StartupStatus;
import org.apache.geode.internal.util.TransformUtils;
import org.apache.geode.logging.internal.executors.LoggingThread;
import org.apache.geode.logging.internal.log4j.api.LogService;
/**
* Consolidates logging during the recovery of ProxyRegionBuckets that are not hosted by this
* member. The logger is meant to run in its own thread.
* It uses a count down latch to determine whether the recovery is finished.
*/
public class PersistentBucketRecoverer extends RecoveryRunnable implements PersistentStateListener {
private static final Logger logger = LogService.getLogger();
/**
* True when one or more buckets have reported a change in status.
*/
private volatile boolean membershipChanged = true;
/**
* Sleep period between posting log entries.
*/
private static final int SLEEP_PERIOD = 15000;
/**
* Used to determine when all proxy buckets have been recovered.
*/
private final CountDownLatch allBucketsRecoveredFromDisk;
private final List<RegionStatus> regions;
private final StartupStatus startupStatus;
/**
* Creates a new PersistentBucketRecoverer.
*/
public PersistentBucketRecoverer(PRHARedundancyProvider prhaRedundancyProvider,
int proxyBuckets) {
this(prhaRedundancyProvider, proxyBuckets, new StartupStatus());
}
private PersistentBucketRecoverer(PRHARedundancyProvider prhaRedundancyProvider, int proxyBuckets,
StartupStatus startupStatus) {
super(prhaRedundancyProvider);
this.startupStatus = startupStatus;
PartitionedRegion baseRegion =
ColocationHelper.getLeaderRegion(redundancyProvider.getPartitionedRegion());
List<PartitionedRegion> colocatedRegions =
getColocatedChildRegions(baseRegion);
List<RegionStatus> allRegions = new ArrayList<>(colocatedRegions.size() + 1);
if (baseRegion.getDataPolicy().withPersistence()) {
allRegions.add(new RegionStatus(baseRegion));
}
for (PartitionedRegion region : colocatedRegions) {
if (region.getDataPolicy().withPersistence()) {
allRegions.add(new RegionStatus(region));
}
}
regions = Collections.unmodifiableList(allRegions);
allBucketsRecoveredFromDisk = new CountDownLatch(proxyBuckets);
membershipChanged = true;
addListeners();
}
List<PartitionedRegion> getColocatedChildRegions(PartitionedRegion baseRegion) {
return ColocationHelper.getColocatedChildRegions(baseRegion);
}
public void startLoggingThread() {
Thread loggingThread = new LoggingThread(
"PersistentBucketRecoverer for region "
+ redundancyProvider.getPartitionedRegion().getName(),
false,
this);
loggingThread.start();
}
/**
* Called when a member comes online for a bucket.
*/
@Override
public void memberOnline(InternalDistributedMember member, PersistentMemberID persistentID) {
membershipChanged = true;
}
/**
* Called when a member goes offline for a bucket.
*/
@Override
public void memberOffline(InternalDistributedMember member, PersistentMemberID persistentID) {
membershipChanged = true;
}
/**
* Called when a member is removed for a bucket.
*/
@Override
public void memberRemoved(PersistentMemberID persistentID, boolean revoked) {
membershipChanged = true;
}
/**
* Add this PersistentBucketRecoverer as a persistence listener to all the region's bucket
* advisors.
*/
private void addListeners() {
for (RegionStatus region : regions) {
region.addListeners();
}
}
/**
* Removes this PersistentBucketRecoverer as a persistence listener from all the region's bucket
* advisors.
*/
private void removeListeners() {
for (RegionStatus region : regions) {
region.removeListeners();
}
}
/**
* Writes a consolidated log entry every SLEEP_PERIOD that summarizes which buckets are still
* waiting on persistent members for the region.
*/
@Override
public void run2() {
try {
boolean warningLogged = false;
while (getLatchCount() > 0) {
int sleepMillis = SLEEP_PERIOD;
// reduce the first log time from 15secs so that higher layers can
// report sooner to user
if (!warningLogged) {
sleepMillis = SLEEP_PERIOD / 2;
}
Thread.sleep(sleepMillis);
if (membershipChanged) {
membershipChanged = false;
for (RegionStatus region : regions) {
region.logWaitingForMembers();
}
warningLogged = true;
}
}
} catch (InterruptedException e) {
// Log and bail
logger.error(e.getMessage(), e);
} finally {
/*
* Our job is done. Stop listening to the bucket advisors.
*/
removeListeners();
/*
* Make sure the recovery completion message was printed to the log.
*/
for (RegionStatus region : regions) {
if (!region.loggedDoneMessage) {
region.logDoneMessage();
}
}
}
}
/**
* Keeps track of logging a message for a single partitioned region and logging a separate message
* when the waiting is done for the same region
*
*/
private class RegionStatus {
/**
* The persistent identifier of the member running this PersistentBucketRecoverer.
*/
private final PersistentMemberID thisMember;
/**
* The region that the proxy buckets belong to.
*/
private final String region;
/**
* An array of ProxyBucketRegions that comprise this partitioned region.
*/
private final ProxyBucketRegion[] bucketRegions;
/**
* Indicates that a completion message has been logged.
*/
private volatile boolean loggedDoneMessage = true;
public RegionStatus(PartitionedRegion region) {
thisMember = createPersistentMemberID(region);
this.region = region.getFullPath();
bucketRegions = region.getRegionAdvisor().getProxyBucketArray();
}
public void removeListeners() {
for (ProxyBucketRegion proxyBucket : bucketRegions) {
proxyBucket.getPersistenceAdvisor().removeListener(PersistentBucketRecoverer.this);
}
}
public void addListeners() {
for (ProxyBucketRegion proxyBucket : bucketRegions) {
proxyBucket.getPersistenceAdvisor().addListener(PersistentBucketRecoverer.this);
}
}
/**
* Creates a temporary (and somewhat fake) PersistentMemberID for this member if there is no
* DiskStore available for our region (which can happen in some colocated scenarios).
*/
private PersistentMemberID createPersistentMemberID(PartitionedRegion region) {
DiskStoreImpl diskStore = null;
/*
* A non-persistent colocated region will not have a disk store so check the leader region if
* this region does not have one.
*/
if (region.getAttributes().getDataPolicy().withPersistence()) {
diskStore = region.getDiskStore();
} else if (ColocationHelper.getLeaderRegion(region).getAttributes().getDataPolicy()
.withPersistence()) {
diskStore = ColocationHelper.getLeaderRegion(region).getDiskStore();
}
/*
* We have a DiskStore? Great! Simply have it generate the id.
*/
if (null != diskStore) {
return diskStore.generatePersistentID();
}
/*
* Bummer. No DiskStore. Put together a fake one (for logging only).
*/
{
String name = "No name for this member";
String diskDir = System.getProperty("user.dir");
InetAddress localHost = null;
try {
localHost = LocalHostUtil.getLocalHost();
} catch (UnknownHostException e) {
logger.error("Could not determine my own host", e);
}
return (new PersistentMemberID(null, localHost, diskDir, name,
redundancyProvider.getPartitionedRegion().getCache().cacheTimeMillis(), (short) 0));
}
}
/**
* Returns a unique Set of persistent members that all the ProxyBucketRegions are waiting for.
*
* @param offlineOnly true if only the members which are not currently try running should be
* returned, false to return all members that this member is waiting for, including
* members which are running but not fully initialized.
*/
private Map<PersistentMemberID, Set<Integer>> getMembersToWaitFor(boolean offlineOnly) {
Map<PersistentMemberID, Set<Integer>> waitingForMembers =
new HashMap<PersistentMemberID, Set<Integer>>();
for (ProxyBucketRegion proxyBucket : bucketRegions) {
Integer bucketId = proxyBucket.getBucketId();
// Get the set of missing members from the persistence advisor
Set<PersistentMemberID> missingMembers;
BucketPersistenceAdvisor persistenceAdvisor = proxyBucket.getPersistenceAdvisor();
if (offlineOnly) {
missingMembers = persistenceAdvisor.getMissingMembers();
} else {
missingMembers = persistenceAdvisor.getAllMembersToWaitFor();
}
if (missingMembers != null) {
for (PersistentMemberID missingMember : missingMembers) {
Set<Integer> buckets = waitingForMembers.get(missingMember);
if (buckets == null) {
buckets = new TreeSet<Integer>();
waitingForMembers.put(missingMember, buckets);
}
buckets.add(bucketId);
}
}
}
return waitingForMembers;
}
/**
* Prints a recovery completion message to the log.
*/
private void logDoneMessage() {
loggedDoneMessage = true;
startupStatus.startup(
String.format(
"Region %s has successfully completed waiting for other members to recover the latest data.My persistent member information:%s",
region,
TransformUtils.persistentMemberIdToLogEntryTransformer.transform(thisMember)));
}
/**
* Logs a consolidated log entry for all ProxyBucketRegions waiting for persistent members.
*/
private void logWaitingForMembers() {
Map<PersistentMemberID, Set<Integer>> offlineMembers = getMembersToWaitFor(true);
Map<PersistentMemberID, Set<Integer>> allMembersToWaitFor = getMembersToWaitFor(false);
boolean thereAreBucketsToBeRecovered = (getLatchCount() > 0);
/*
* Log any offline members the region is waiting for.
*/
if (thereAreBucketsToBeRecovered && !offlineMembers.isEmpty()) {
Set<String> membersToWaitForLogEntries = new HashSet<>();
TransformUtils.transform(offlineMembers.entrySet(), membersToWaitForLogEntries,
TransformUtils.persistentMemberEntryToLogEntryTransformer);
Set<Integer> missingBuckets = getAllWaitingBuckets(offlineMembers);
startupStatus.startup(
String.format(
"Region %s (and any colocated sub-regions) has potentially stale data. Buckets %s are waiting for another offline member to recover the latest data.My persistent id is:%sOffline members with potentially new data:%sUse the gfsh show missing-disk-stores command to see all disk stores that are being waited on by other members.",
region, missingBuckets,
TransformUtils.persistentMemberIdToLogEntryTransformer.transform(thisMember),
membersToWaitForLogEntries));
loggedDoneMessage = false;
}
/*
* No offline? Then log any online members the region is waiting for.
*/
else if (thereAreBucketsToBeRecovered && !allMembersToWaitFor.isEmpty()) {
Set<String> membersToWaitForLogEntries = new HashSet<>();
Set<Integer> missingBuckets = getAllWaitingBuckets(allMembersToWaitFor);
TransformUtils.transform(allMembersToWaitFor.entrySet(), membersToWaitForLogEntries,
TransformUtils.persistentMemberEntryToLogEntryTransformer);
startupStatus.startup(
String.format(
"Region %s (and any colocated sub-regions) has potentially stale data. Buckets %s are waiting for another online member to recover the latest data.My persistent id is:%sOnline members with potentially new data:%sUse the gfsh show missing-disk-stores command to see all disk stores that are being waited on by other members.",
region, missingBuckets,
TransformUtils.persistentMemberIdToLogEntryTransformer.transform(thisMember),
membersToWaitForLogEntries));
loggedDoneMessage = false;
}
/*
* No online? Then log that we are done.
*/
else if (!this.loggedDoneMessage) {
logDoneMessage();
}
}
/**
* Get a consolodated set of all buckets that are waiting.
*/
private Set<Integer> getAllWaitingBuckets(
Map<PersistentMemberID, Set<Integer>> offlineMembers) {
Set<Integer> allWaitingBuckets = new TreeSet<Integer>();
for (Set<Integer> missingPerMember : offlineMembers.values()) {
allWaitingBuckets.addAll(missingPerMember);
}
return allWaitingBuckets;
}
}
public void await(long timeout, TimeUnit unit) {
boolean interrupted = false;
while (true) {
try {
redundancyProvider.getPartitionedRegion().getCancelCriterion().checkCancelInProgress(null);
boolean done = allBucketsRecoveredFromDisk.await(timeout, unit);
if (done) {
break;
}
} catch (InterruptedException e) {
interrupted = true;
}
}
if (interrupted) {
Thread.currentThread().interrupt();
}
}
public void await() {
boolean interrupted = false;
while (true) {
try {
getAllBucketsRecoveredFromDiskLatch().await();
break;
} catch (InterruptedException e) {
interrupted = true;
}
}
if (interrupted) {
Thread.currentThread().interrupt();
}
}
public void countDown() {
allBucketsRecoveredFromDisk.countDown();
}
public void countDown(int size) {
while (size > 0) {
allBucketsRecoveredFromDisk.countDown();
--size;
}
}
public boolean hasRecoveryCompleted() {
if (getLatchCount() > 0) {
return false;
}
return true;
}
long getLatchCount() {
return allBucketsRecoveredFromDisk.getCount();
}
CountDownLatch getAllBucketsRecoveredFromDiskLatch() {
return allBucketsRecoveredFromDisk;
}
}
|
apache-2.0
|
corsc/go-tools
|
depends/internal/get_dep_list.go
|
1979
|
// Copyright 2017 Corey Scott http://www.sage42.org/
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package internal
import (
"bytes"
"encoding/json"
"log"
"os/exec"
"strings"
)
// GetDependantsList returns the list packages that depend on a given package (directory)
func GetDependantsList(searchDir string) *MasterList {
bytes := goList(searchDir)
out := &MasterList{}
err := json.Unmarshal(bytes, out)
if err != nil {
log.Fatalf("failed to parse go list data with err %s", err)
}
return out
}
func goList(searchDir string) []byte {
cmd := exec.Command("go", "list", "--json", "./...")
cmd.Dir = searchDir
output := &bytes.Buffer{}
_, _ = output.WriteString(`{"pkgs":[`)
catchErr := &bytes.Buffer{}
cmd.Stdout = output
cmd.Stderr = catchErr
err := cmd.Run()
if err != nil {
log.Fatalf("failed to get deps from go list with err %s", err)
}
if catchErr.Len() > 0 {
log.Fatalf("failed to get deps from go list with err %s", err)
}
_, _ = output.WriteString(`]}`)
// TODO: this is terrible, needs fixing
outString := output.String()
return []byte(strings.Replace(outString, "}\n{", "},\n{", -1))
}
// MasterList is the hack around the `go list --json` format
type MasterList struct {
Pkgs []*Deps `json:"pkgs"`
}
// Deps is the JSON format returned by `go list --json`
type Deps struct {
BasePath string `json:"ImportPath"`
DirectImports []string `json:"Imports"`
TestImports []string `json:"TestImports"`
}
|
apache-2.0
|
schmittjoh/php-stubs
|
res/php/filesystem/functions/fopen.php
|
331
|
<?php
/**
* Opens file or URL
*
* @phpstub
*
* @param string $filename
* @param string $mode
* @param bool $use_include_path
* @param resource $context
*
* @return resource Returns a file pointer resource on success, or false on error.
*/
function fopen($filename, $mode, $use_include_path = false, $context = NULL)
{
}
|
apache-2.0
|
Thangiee/Personal-Health-Monitoring-System
|
PHMS/src/main/java/com/cse3310/phms/ui/fragments/SlideMenuListFragment.java
|
8227
|
/*
* Copyright (c) 2014 Personal-Health-Monitoring-System
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.cse3310.phms.ui.fragments;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.os.Bundle;
import android.support.v4.app.FragmentTransaction;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageView;
import android.widget.ListView;
import android.widget.TextView;
import com.actionbarsherlock.app.SherlockFragment;
import com.actionbarsherlock.app.SherlockListFragment;
import com.ami.fundapter.BindDictionary;
import com.ami.fundapter.FunDapter;
import com.ami.fundapter.extractors.StringExtractor;
import com.ami.fundapter.interfaces.StaticImageLoader;
import com.cse3310.phms.R;
import com.cse3310.phms.model.User;
import com.cse3310.phms.ui.activities.LoginActivity_;
import com.cse3310.phms.ui.utils.DrawerItem;
import com.cse3310.phms.ui.utils.UserSingleton;
import com.jeremyfeinstein.slidingmenu.lib.app.SlidingFragmentActivity;
import java.util.ArrayList;
import java.util.List;
public class SlideMenuListFragment extends SherlockListFragment {
private static final int LOG_OUT = 0;
private List<DrawerItem> drawerItems = new ArrayList<DrawerItem>() {{ // list of items to be display in the sliding menu
add(new DrawerItem(R.layout.home_screen, "Home", R.drawable.ic_action_home));
add(new DrawerItem(R.layout.diet_screen, "Diet", R.drawable.ic_action_restaurant));
add(new DrawerItem(R.layout.weight_log_screen, "Weight Logs", R.drawable.ic_action_line_chart));
add(new DrawerItem(R.layout.appointment_screen, "Appointment", R.drawable.ic_action_calendar_day));
add(new DrawerItem(R.layout.contact_screen, "Contacts", R.drawable.ic_action_users));
add(new DrawerItem(R.layout.medication_screen, "Medication", R.drawable.ic_action_pill));
add(new DrawerItem(R.layout.vitals_screen, "Vital Signs", R.drawable.ic_action_warning));
add(new DrawerItem(R.layout.estorage_screen, "eStorage", R.drawable.ic_action_database));
add(new DrawerItem(R.layout.reminder_screen, "Reminders", R.drawable.ic_action_alarm));
add(new DrawerItem(LOG_OUT, "Log out", R.drawable.ic_action_key));
}};
private int lastPosition = 0;
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
super.onCreateView(inflater, container, savedInstanceState);
View view = inflater.inflate(R.layout.list_sliding_menu, container, false);
// set the sliding menu header to show the current user's username and email.
User user = UserSingleton.INSTANCE.getCurrentUser();
TextView usernameHeader = (TextView) view.findViewById(R.id.frag_list_sliding_menu_tv_header_username);
usernameHeader.setText(Character.toUpperCase(user.getUsername().charAt(0)) + user.getUsername().substring(1)); // first char to upper case
TextView emailHeader = (TextView) view.findViewById(R.id.frag_list_sliding_menu_tv_header_email);
emailHeader.setText(user.getPersonalInfo().getEmail());
return view;
}
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
// see FunDapter library at https://github.com/amigold/FunDapter
BindDictionary<DrawerItem> dict = new BindDictionary<DrawerItem>();
// setup the text for the items in the sliding menu
dict.addStringField(R.id.frag_list_item_tv_title,
new StringExtractor<DrawerItem>() {
@Override
public String getStringValue(DrawerItem drawerItem, int i) {
return drawerItem.title;
}
});
// setup the icon for the items in the sliding menu
dict.addStaticImageField(R.id.list_item_icon, new StaticImageLoader<DrawerItem>() {
@Override
public void loadImage(DrawerItem item, ImageView imageView, int position) {
if (item.imageId == DrawerItem.DEFAULT) {
imageView.setImageResource(R.drawable.ic_launcher);
} else {
imageView.setImageResource(item.imageId);
}
}
});
FunDapter<DrawerItem> adapter = new FunDapter<DrawerItem>(getActivity(), drawerItems, R.layout.list_item, dict);
setListAdapter(adapter);
getListView().setItemChecked(0, true); // set home in sliding menu as default on start up
}
@Override
public void onListItemClick(ListView l, View v, int position, long id) {
selectItem(position);
lastPosition = position;
((SlidingFragmentActivity) getActivity()).getSlidingMenu().toggle(); // close sliding menu after clicking an item
}
private void selectItem(int position) {
SherlockFragment fragment = null;
FragmentTransaction fragTran = getActivity().getSupportFragmentManager().beginTransaction();
// decide which screen to be switch to base on drawer item the user clicked
switch (drawerItems.get(position).layoutId) {
case R.layout.home_screen:
fragment = new HomeScreenFragment_();
break;
case R.layout.diet_screen:
fragment = new DietScreenFragment_();
break;
case R.layout.weight_log_screen:
fragment = new WeightLogScreenFragment_();
break;
case R.layout.contact_screen:
fragment = new ContactScreenFragment_();
break;
case R.layout.estorage_screen:
fragment = new EStorageFragment_();
break;
case R.layout.medication_screen:
fragment = new MedicationScreenFragment_();
break;
case R.layout.appointment_screen:
fragment = new AppointmentScreenFragment_();
break;
case R.layout.reminder_screen:
fragment = new ReminderScreenFragment_();
break;
case R.layout.vitals_screen:
fragment = new VitalsScreenFragment_();
break;
case LOG_OUT:
openLogOutDialog();
break;
}
// if the screen to switch to is the same as the current screen,
// do nothing/don't recreate that screen.
if (lastPosition == position) {
return;
}
if (fragment != null) {
// switch the screen!
fragTran.replace(R.id.frag_front_container, fragment);
fragTran.commit();
}
}
private void openLogOutDialog() {
new AlertDialog.Builder(getActivity()).setTitle("Log out?")
.setMessage("Are you sure you want to log out?")
.setIcon(android.R.drawable.ic_dialog_alert)
.setPositiveButton("Log out", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
LoginActivity_.intent(SlideMenuListFragment.this).start();
getActivity().finish();
}
})
.setNegativeButton("Cancel", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
}
}).show();
}
}
|
apache-2.0
|
AfzalivE/AutoGrid
|
AutoGrid/Properties/AssemblyInfo.cs
|
2375
|
using System.Reflection;
using System.Resources;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Windows;
// General Information about an assembly is controlled through the following
// set of attributes. Change these attribute values to modify the information
// associated with an assembly.
[assembly: AssemblyTitle("AutoGrid")]
[assembly: AssemblyDescription("")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("")]
[assembly: AssemblyProduct("AutoGrid")]
[assembly: AssemblyCopyright("Copyright © 2017")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
// Setting ComVisible to false makes the types in this assembly not visible
// to COM components. If you need to access a type in this assembly from
// COM, set the ComVisible attribute to true on that type.
[assembly: ComVisible(false)]
//In order to begin building localizable applications, set
//<UICulture>CultureYouAreCodingWith</UICulture> in your .csproj file
//inside a <PropertyGroup>. For example, if you are using US english
//in your source files, set the <UICulture> to en-US. Then uncomment
//the NeutralResourceLanguage attribute below. Update the "en-US" in
//the line below to match the UICulture setting in the project file.
//[assembly: NeutralResourcesLanguage("en-US", UltimateResourceFallbackLocation.Satellite)]
[assembly: ThemeInfo(
ResourceDictionaryLocation.None, //where theme specific resource dictionaries are located
//(used if a resource is not found in the page,
// or application resource dictionaries)
ResourceDictionaryLocation.SourceAssembly //where the generic resource dictionary is located
//(used if a resource is not found in the page,
// app, or any theme specific resource dictionaries)
)]
// Version information for an assembly consists of the following four values:
//
// Major Version
// Minor Version
// Build Number
// Revision
//
// You can specify all the values or you can default the Build and Revision Numbers
// by using the '*' as shown below:
// [assembly: AssemblyVersion("1.0.*")]
[assembly: AssemblyVersion("1.0.0.0")]
[assembly: AssemblyFileVersion("1.0.0.0")]
|
apache-2.0
|
stripe/stripe-dotnet
|
src/StripeTests/Infrastructure/JsonUtilsTest.cs
|
3189
|
namespace StripeTests
{
using Newtonsoft.Json;
using Stripe.Infrastructure;
using Xunit;
public class JsonUtilsTest : BaseStripeTest
{
[Fact]
public void DeserializeObjectIgnoresDefaultSettings()
{
var origDefaultSettings = JsonConvert.DefaultSettings;
try
{
JsonConvert.DefaultSettings = () => new JsonSerializerSettings
{
MissingMemberHandling = MissingMemberHandling.Error,
};
var s = "{\"int\":234,\"string\":\"Hello!\",\"foo\":\"bar\"}";
// Deserialization throws an exception because of the extra `foo` property that is
// missing in the TestObject class.
Assert.Throws<JsonSerializationException>(() =>
JsonConvert.DeserializeObject<TestObject>(s));
// Deserialization succeeds because we're not using DefaultSettings, so
// MissingMemberHandling is set to its default value Ignore instead of Error.
var objStripe = JsonUtils.DeserializeObject<TestObject>(s);
Assert.NotNull(objStripe);
Assert.Equal(234, objStripe.Int);
Assert.Equal("Hello!", objStripe.String);
}
finally
{
JsonConvert.DefaultSettings = origDefaultSettings;
}
}
[Fact]
public void SerializeObjectIgnoresDefaultSettings()
{
var origDefaultSettings = JsonConvert.DefaultSettings;
try
{
JsonConvert.DefaultSettings = () => new JsonSerializerSettings
{
Formatting = Formatting.Indented,
PreserveReferencesHandling = PreserveReferencesHandling.All,
};
var o = new TestObject { Int = 234, String = "Hello!" };
// Serialized string is formatted with newlines and indentation because of
// Formatting.Indented, and includes `$id` keys because of
// PreserveReferencesHandling.All.
var jsonDefault = JsonConvert.SerializeObject(o);
jsonDefault = jsonDefault.Replace("\r\n", "\n");
Assert.Equal(
"{\n \"$id\": \"1\",\n \"int\": 234,\n \"string\": \"Hello!\"\n}",
jsonDefault);
// Serialized string is not formatted and doesn't include `$id` keys because
// we're not using DefaultSettings.
var jsonStripe = JsonUtils.SerializeObject(o);
jsonStripe = jsonStripe.Replace("\r\n", "\n");
Assert.Equal("{\"int\":234,\"string\":\"Hello!\"}", jsonStripe);
}
finally
{
JsonConvert.DefaultSettings = origDefaultSettings;
}
}
[JsonObject]
private class TestObject
{
[JsonProperty("int")]
public int Int { get; set; }
[JsonProperty("string")]
public string String { get; set; }
}
}
}
|
apache-2.0
|
Kerbores/spring-thunder
|
spring-thunder/src/main/java/club/zhcs/thunder/aop/OperationLogAop.java
|
4519
|
package club.zhcs.thunder.aop;
import java.lang.reflect.Method;
import java.util.List;
import javax.annotation.Resource;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.http.HttpSession;
import org.apache.shiro.SecurityUtils;
import org.aspectj.lang.JoinPoint;
import org.aspectj.lang.ProceedingJoinPoint;
import org.aspectj.lang.annotation.Around;
import org.aspectj.lang.annotation.Aspect;
import org.aspectj.lang.annotation.Pointcut;
import org.aspectj.lang.reflect.MethodSignature;
import org.nutz.castor.Castors;
import org.nutz.json.Json;
import org.nutz.json.JsonFormat;
import org.nutz.lang.ContinueLoop;
import org.nutz.lang.Each;
import org.nutz.lang.ExitLoop;
import org.nutz.lang.Lang;
import org.nutz.lang.LoopException;
import org.nutz.lang.Stopwatch;
import org.nutz.lang.Strings;
import org.springframework.stereotype.Component;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.ResponseBody;
import com.google.common.collect.Lists;
import club.zhcs.thunder.biz.log.OperationLogService;
import club.zhcs.thunder.domain.log.OperationLog;
import club.zhcs.thunder.ext.spring.SpringBeans;
import club.zhcs.titans.utils.db.Result;
/**
* @author kerbores
*
* @email kerbores@gmail.com
*
*/
@Aspect
@Component
public class OperationLogAop {
@Resource
OperationLogService operationLogService;
@Pointcut("@annotation(club.zhcs.thunder.aop.SystemLog)")
public void cutSystemLog() {
}
public SystemLog getSystemLog(JoinPoint joinPoint) throws Exception {
String targetName = joinPoint.getTarget().getClass().getName();
String methodName = joinPoint.getSignature().getName();
Object[] arguments = joinPoint.getArgs();
Class targetClass = Class.forName(targetName);
Method[] methods = targetClass.getMethods();
SystemLog target = null;
for (Method method : methods) {
if (method.getName().equals(methodName)) {
Class[] clazzs = method.getParameterTypes();
if (clazzs.length == arguments.length) {
target = method.getAnnotation(SystemLog.class);
break;
}
}
}
return target;
}
@Around("cutSystemLog()")
public Object recordSysLog(ProceedingJoinPoint point) throws Throwable {
OperationLog operationLog = new OperationLog();
String ip = Lang.getIP(SpringBeans.getRequest());
String user = SecurityUtils.getSubject().getPrincipal().toString();
SystemLog log = getSystemLog(point);
operationLog.setAccount(user);
operationLog.setMethodMeta(point.getSignature().getName());
operationLog.setParameters(getParameter(point));
operationLog.setAction(log.methods());
operationLog.setIp(ip);
operationLog.setModule(log.module());
Stopwatch stopwatch = Stopwatch.begin();
Object obj = point.proceed();
stopwatch.stop();
Object rObj = getMethodReturnObject(point, obj);// 把业务的返回值取回来
operationLog.setMethodReturn(Json.toJson(rObj, JsonFormat.compact()));
if (rObj instanceof Result) {
operationLog.setDescription(Castors.me().castTo(obj, Result.class).isSuccess() ? "操作成功" : "操作失败");
}
if (Strings.isBlank(operationLog.getDescription())) {
operationLog.setDescription(log.description());
}
operationLog.setOperationTime(stopwatch.getDuration());
operationLogService.save(operationLog);
return obj;
}
/**
* @param point
* @return
*/
private Object getMethodReturnObject(ProceedingJoinPoint point, Object obj) {
MethodSignature signature = (MethodSignature) point.getSignature();
// 如果是 ajax 请求,返回方法的返回值
if (signature.getMethod().getAnnotation(ResponseBody.class) != null) {
return obj;
}
// 如果不是获取 Model 中的属性
for (Object o : point.getArgs()) {
if (o instanceof Model) {
Model m = (Model) o;
return m.asMap();
}
}
return null;// 其他情况
}
/**
* @param point
* @return
*/
private String getParameter(ProceedingJoinPoint point) {
List<Object> target = Lists.newArrayList();
Lang.each(point.getArgs(), new Each<Object>() {
@Override
public void invoke(int arg0, Object obj, int arg2) throws ExitLoop, ContinueLoop, LoopException {
if (obj instanceof ServletRequest) {
target.add(((ServletRequest) obj).getParameterMap());
} else if (obj instanceof ServletResponse || obj instanceof HttpSession || obj instanceof Model) { // response/session/model
} else {
target.add(obj);
}
}
});
return Json.toJson(target, JsonFormat.compact());
}
}
|
apache-2.0
|
SYSTRAN/multimodal-api-java-client
|
src/main/java/net/systran/platform/multimodal/client/model/FileExtractTextResponse.java
|
1837
|
/*
* Copyright © 2015 SYSTRAN Software, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.systran.platform.multimodal.client.model;
import io.swagger.annotations.*;
import com.fasterxml.jackson.annotation.JsonProperty;
/**
* Response for a File Extract Text request
**/
@ApiModel(description = "Response for a File Extract Text request")
public class FileExtractTextResponse {
private String text = null;
private String format = null;
/**
* Text extracted from the input
**/
@ApiModelProperty(required = true, value = "Text extracted from the input")
@JsonProperty("text")
public String getText() {
return text;
}
public void setText(String text) {
this.text = text;
}
/**
* Format of the input
**/
@ApiModelProperty(required = true, value = "Format of the input")
@JsonProperty("format")
public String getFormat() {
return format;
}
public void setFormat(String format) {
this.format = format;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("class FileExtractTextResponse {\n");
sb.append(" text: ").append(text).append("\n");
sb.append(" format: ").append(format).append("\n");
sb.append("}\n");
return sb.toString();
}
}
|
apache-2.0
|
ZhaoYanZy/coolweather
|
app/src/main/java/com/zy/coolweather/WeatherActivity.java
|
9868
|
package com.zy.coolweather;
import android.content.Intent;
import android.content.SharedPreferences;
import android.graphics.Color;
import android.os.Build;
import android.preference.PreferenceManager;
import android.support.v4.view.GravityCompat;
import android.support.v4.widget.DrawerLayout;
import android.support.v4.widget.SwipeRefreshLayout;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.ScrollView;
import android.widget.TextView;
import com.bumptech.glide.Glide;
import com.zy.coolweather.gson.Forecast;
import com.zy.coolweather.gson.Weather;
import com.zy.coolweather.service.AutoUpdateService;
import com.zy.coolweather.util.HttpUtil;
import com.zy.coolweather.util.Utility;
import com.zy.coolweather.util.Utils;
import java.io.IOException;
import okhttp3.Call;
import okhttp3.Callback;
import okhttp3.Response;
public class WeatherActivity extends AppCompatActivity {
private ScrollView weatherLayout;
private TextView titleCity;
private TextView titleUpdateTime;
private TextView degreeText;
private TextView weatherInfoText;
private LinearLayout forecastLayout;
private TextView aqiText;
private TextView pm25Text;
private TextView comfortText;
private TextView carWashText;
private TextView sportText;
//每日一图
private ImageView bingPicImg;
//下拉刷新
public SwipeRefreshLayout swipeRefreshLayout;
//记录当前天气id
private String weatherId;
//侧滑菜单
public DrawerLayout drawerLayout;
private Button navButton;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if (Build.VERSION.SDK_INT >= 21) {
View decorView = getWindow().getDecorView();
decorView.setSystemUiVisibility(View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN |
View.SYSTEM_UI_FLAG_LAYOUT_STABLE);
getWindow().setStatusBarColor(Color.TRANSPARENT);
}
setContentView(R.layout.activity_weather);
//初始化控件
weatherLayout = (ScrollView) findViewById(R.id.weather_layout);
titleCity = (TextView) findViewById(R.id.title_city);
titleUpdateTime = (TextView) findViewById(R.id.title_update_time);
degreeText = (TextView) findViewById(R.id.degree_text);
weatherInfoText = (TextView) findViewById(R.id.weather_info_text);
forecastLayout = (LinearLayout) findViewById(R.id.forecast_layout);
aqiText = (TextView) findViewById(R.id.aqi_text);
pm25Text = (TextView) findViewById(R.id.pm25_text);
comfortText = (TextView) findViewById(R.id.comfort_text);
carWashText = (TextView) findViewById(R.id.car_wash_text);
sportText = (TextView) findViewById(R.id.sport_text);
bingPicImg = (ImageView) findViewById(R.id.bing_pic_img);
drawerLayout = (DrawerLayout) findViewById(R.id.drawer_layout);
navButton = (Button) findViewById(R.id.nav_button);
swipeRefreshLayout = (SwipeRefreshLayout) findViewById(R.id.swipe_refresh);
swipeRefreshLayout.setColorSchemeResources(R.color.colorPrimary);
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(this);
String bingPic = prefs.getString("bing_pic", null);
if (bingPic != null) {
Glide.with(this).load(bingPic).into(bingPicImg);
} else {
loadBingPic();
}
String weatherString = prefs.getString("weather", null);
if (weatherString != null) {
//有缓存时直接解析天气数据
Weather weather = Utility.handleWeatherResponse(weatherString);
weatherId = weather.basic.weatherId;
showWeatherInfo(weather);
} else {
//无缓存时去服务器查询天气
weatherId = getIntent().getStringExtra("weather_id");
weatherLayout.setVisibility(View.VISIBLE);
requestWeather(weatherId);
}
swipeRefreshLayout.setOnRefreshListener(new SwipeRefreshLayout.OnRefreshListener() {
@Override
public void onRefresh() {
requestWeather(weatherId);
}
});
navButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
drawerLayout.openDrawer(GravityCompat.START);
}
});
}
//加载每日一图
private void loadBingPic() {
String requestBingPic = "http://guolin.tech/api/bing_pic";
HttpUtil.sendOkHttpRequest(requestBingPic, new Callback() {
@Override
public void onResponse(Call call, Response response) throws IOException {
final String bingPic = response.body().string();
SharedPreferences.Editor editor = PreferenceManager.
getDefaultSharedPreferences(WeatherActivity.this).edit();
editor.putString("bing_pic", bingPic);
editor.apply();
runOnUiThread(new Runnable() {
@Override
public void run() {
Glide.with(WeatherActivity.this).load(bingPic).into(bingPicImg);
}
});
}
@Override
public void onFailure(Call call, IOException e) {
e.printStackTrace();
}
});
}
//根据天气id请求城市天气信息
public void requestWeather(final String weatherId1) {
String weatherUrl = "http://guolin.tech/api/weather?cityid=" +
weatherId1 + "&key=6a4bafe7d28a4dfd97332e5747bc9799";
HttpUtil.sendOkHttpRequest(weatherUrl, new Callback() {
@Override
public void onResponse(Call call, Response response) throws IOException {
final String responseText = response.body().string();
final Weather weather = Utility.handleWeatherResponse(responseText);
runOnUiThread(new Runnable() {
@Override
public void run() {
if (weather != null && "ok".equals(weather.status)) {
SharedPreferences.Editor editor = PreferenceManager.
getDefaultSharedPreferences(WeatherActivity.this).edit();
editor.putString("weather", responseText);
editor.apply();
weatherId = weather.basic.weatherId;
showWeatherInfo(weather);
} else {
Utils.showToast(WeatherActivity.this, "获取天气信息失败!");
}
swipeRefreshLayout.setRefreshing(false);
}
});
loadBingPic();
}
@Override
public void onFailure(Call call, IOException e) {
e.printStackTrace();
runOnUiThread(new Runnable() {
@Override
public void run() {
Utils.showToast(WeatherActivity.this, "获取天气信息失败!");
swipeRefreshLayout.setRefreshing(false);
}
});
}
});
}
//处理并展示Weather实体类中的数据
private void showWeatherInfo(Weather weather) {
if (weather != null && "ok".equals(weather.status)) {
String cityName = weather.basic.cityName;
String updateTime = weather.basic.update.updateTime.split(" ")[1];
String degree = weather.now.temperature + "℃";
String weatherInfo = weather.now.more.info;
titleCity.setText(cityName);
titleUpdateTime.setText(updateTime);
degreeText.setText(degree);
weatherInfoText.setText(weatherInfo);
forecastLayout.removeAllViews();
for (Forecast forecast : weather.forecastList) {
View view = LayoutInflater.from(this).inflate(R.layout.forecast_item, forecastLayout, false);
TextView dataText = (TextView) view.findViewById(R.id.data_text);
TextView infoText = (TextView) view.findViewById(R.id.info_text);
TextView maxText = (TextView) view.findViewById(R.id.max_text);
TextView minText = (TextView) view.findViewById(R.id.min_text);
dataText.setText(forecast.date);
infoText.setText(forecast.more.info);
maxText.setText(forecast.temperature.max);
minText.setText(forecast.temperature.min);
forecastLayout.addView(view);
}
if (weather.aqi != null) {
aqiText.setText(weather.aqi.city.aqi);
pm25Text.setText(weather.aqi.city.pm25);
}
String comfort = "舒适度:" + weather.suggestion.comfort.info;
String carWash = "汽车指数:" + weather.suggestion.carWash.info;
String sport = "运动建议:" + weather.suggestion.sport.info;
comfortText.setText(comfort);
carWashText.setText(carWash);
sportText.setText(sport);
weatherLayout.setVisibility(View.VISIBLE);
Intent intent = new Intent(this, AutoUpdateService.class);
startService(intent);
} else {
Utils.showToast(WeatherActivity.this, "获取天气信息失败!");
}
}
}
|
apache-2.0
|
kcompher/geowave
|
extensions/datastores/accumulo/src/main/java/mil/nga/giat/geowave/datastore/accumulo/AccumuloCommandLineOptions.java
|
4119
|
package mil.nga.giat.geowave.datastore.accumulo;
import org.apache.accumulo.core.client.AccumuloException;
import org.apache.accumulo.core.client.AccumuloSecurityException;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This class encapsulates all of the options and parsed values specific to
* setting up GeoWave to appropriately connect to Accumulo.
*
*/
public class AccumuloCommandLineOptions
{
private final static Logger LOGGER = LoggerFactory.getLogger(AccumuloCommandLineOptions.class);
private final String zookeepers;
private final String instanceId;
private final String user;
private final String password;
private final String namespace;
private AccumuloOperations operations;
public AccumuloCommandLineOptions(
final String zookeepers,
final String instanceId,
final String user,
final String password,
final String namespace )
throws AccumuloException,
AccumuloSecurityException {
this.zookeepers = zookeepers;
this.instanceId = instanceId;
this.user = user;
this.password = password;
this.namespace = namespace;
}
public String getZookeepers() {
return zookeepers;
}
public String getInstanceId() {
return instanceId;
}
public String getUser() {
return user;
}
public String getPassword() {
return password;
}
public String getNamespace() {
return namespace;
}
public synchronized AccumuloOperations getAccumuloOperations()
throws AccumuloException,
AccumuloSecurityException {
if (operations == null) {
operations = new BasicAccumuloOperations(
zookeepers,
instanceId,
user,
password,
namespace);
}
return operations;
}
public static AccumuloCommandLineOptions parseOptions(
final CommandLine commandLine )
throws ParseException {
boolean success = true;
final String zookeepers = commandLine.getOptionValue("z");
final String instanceId = commandLine.getOptionValue("i");
final String user = commandLine.getOptionValue("u");
final String password = commandLine.getOptionValue("p");
final String namespace = commandLine.getOptionValue(
"n",
"");
if (zookeepers == null) {
success = false;
LOGGER.error("Zookeeper URL not set");
}
if (instanceId == null) {
success = false;
LOGGER.error("Accumulo instance ID not set");
}
if (user == null) {
success = false;
LOGGER.error("Accumulo user ID not set");
}
if (password == null) {
success = false;
LOGGER.error("Accumulo password not set");
}
if (!success) {
throw new ParseException(
"Required option is missing");
}
try {
return new AccumuloCommandLineOptions(
zookeepers,
instanceId,
user,
password,
namespace);
}
catch (AccumuloException | AccumuloSecurityException e) {
LOGGER.error(
"Unable to connect to Accumulo with the specified options",
e);
}
return null;
}
public static void applyOptions(
final Options allOptions ) {
final Option zookeeperUrl = new Option(
"z",
"zookeepers",
true,
"A comma-separated list of zookeeper servers that an Accumulo instance is using");
allOptions.addOption(zookeeperUrl);
final Option instanceId = new Option(
"i",
"instance-id",
true,
"The Accumulo instance ID");
allOptions.addOption(instanceId);
final Option user = new Option(
"u",
"user",
true,
"A valid Accumulo user ID");
allOptions.addOption(user);
final Option password = new Option(
"p",
"password",
true,
"The password for the user");
allOptions.addOption(password);
final Option visibility = new Option(
"v",
"visibility",
true,
"The visibility of the data ingested (optional; default is 'public')");
allOptions.addOption(visibility);
final Option namespace = new Option(
"n",
"namespace",
true,
"The table namespace (optional; default is no namespace)");
allOptions.addOption(namespace);
}
}
|
apache-2.0
|
mirkosertic/Bytecoder
|
core/src/main/java/de/mirkosertic/bytecoder/backend/wasm/ast/WASMExpression.java
|
703
|
/*
* Copyright 2018 Mirko Sertic
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.mirkosertic.bytecoder.backend.wasm.ast;
public interface WASMExpression extends WASMValue {
}
|
apache-2.0
|
bowenli86/flink
|
flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/factories/utils/TestCollectionTableFactory.scala
|
8615
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.factories.utils
import org.apache.flink.api.common.ExecutionConfig
import org.apache.flink.api.common.typeinfo.TypeInformation
import org.apache.flink.api.common.typeutils.TypeSerializer
import org.apache.flink.api.java.io.{CollectionInputFormat, LocalCollectionOutputFormat}
import org.apache.flink.api.java.operators.DataSink
import org.apache.flink.api.java.{DataSet, ExecutionEnvironment}
import org.apache.flink.configuration.Configuration
import org.apache.flink.streaming.api.datastream.{DataStream, DataStreamSink, DataStreamSource}
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment
import org.apache.flink.streaming.api.functions.sink.RichSinkFunction
import org.apache.flink.table.api.TableSchema
import org.apache.flink.table.descriptors.ConnectorDescriptorValidator.CONNECTOR
import org.apache.flink.table.factories.{TableSinkFactory, TableSourceFactory}
import org.apache.flink.table.functions.{AsyncTableFunction, TableFunction}
import org.apache.flink.table.planner.factories.utils.TestCollectionTableFactory.{getCollectionSink, getCollectionSource}
import org.apache.flink.table.runtime.types.TypeInfoDataTypeConverter
import org.apache.flink.table.sinks.{AppendStreamTableSink, BatchTableSink, StreamTableSink, TableSink}
import org.apache.flink.table.sources.{BatchTableSource, LookupableTableSource, StreamTableSource}
import org.apache.flink.table.types.DataType
import org.apache.flink.types.Row
import java.io.IOException
import java.util
import java.util.{ArrayList => JArrayList, LinkedList => JLinkedList, List => JList, Map => JMap}
import scala.collection.JavaConversions._
class TestCollectionTableFactory extends TableSourceFactory[Row] with TableSinkFactory[Row] {
override def createTableSource(
context: TableSourceFactory.Context): StreamTableSource[Row] = {
getCollectionSource(context)
}
override def createTableSink(
context: TableSinkFactory.Context): StreamTableSink[Row] = {
getCollectionSink(context)
}
override def requiredContext(): JMap[String, String] = {
val context = new util.HashMap[String, String]()
context.put(CONNECTOR, "COLLECTION")
context
}
override def supportedProperties(): JList[String] = {
val supported = new JArrayList[String]()
supported.add("*")
supported
}
}
object TestCollectionTableFactory {
val IS_BOUNDED = "is-bounded"
val SOURCE_DATA = new JLinkedList[Row]()
val DIM_DATA = new JLinkedList[Row]()
val RESULT = new JLinkedList[Row]()
private var emitIntervalMS = -1L
def initData(sourceData: JList[Row]): Unit ={
initData(sourceData, List(), -1L)
}
def initData(sourceData: JList[Row],
dimData: JList[Row] = List(),
emitInterval: Long = -1L): Unit ={
SOURCE_DATA.addAll(sourceData)
DIM_DATA.addAll(dimData)
emitIntervalMS = emitInterval
}
def reset(): Unit ={
RESULT.clear()
SOURCE_DATA.clear()
DIM_DATA.clear()
emitIntervalMS = -1L
}
def getResult: util.List[Row] = RESULT
def getCollectionSource(context: TableSourceFactory.Context): CollectionTableSource = {
val schema = context.getTable.getSchema
val isBounded = context.getTable.getProperties.getOrDefault(IS_BOUNDED, "true").toBoolean
new CollectionTableSource(emitIntervalMS, physicalSchema(schema), isBounded)
}
def getCollectionSink(context: TableSinkFactory.Context): CollectionTableSink = {
val schema = context.getTable.getSchema
new CollectionTableSink(physicalSchema(schema))
}
def physicalSchema(schema: TableSchema): TableSchema = {
val builder = TableSchema.builder()
schema.getTableColumns.filter(c => !c.isGenerated)
.foreach(c => builder.field(c.getName, c.getType))
builder.build()
}
/**
* Table source of collection.
*/
class CollectionTableSource(
val emitIntervalMs: Long,
val schema: TableSchema,
val bounded: Boolean)
extends BatchTableSource[Row]
with StreamTableSource[Row]
with LookupableTableSource[Row] {
private val rowType: TypeInformation[Row] = schema.toRowType
override def isBounded: Boolean = bounded
def getDataSet(execEnv: ExecutionEnvironment): DataSet[Row] = {
execEnv.createInput(new TestCollectionInputFormat[Row](emitIntervalMs,
SOURCE_DATA,
rowType.createSerializer(new ExecutionConfig)),
rowType)
}
override def getDataStream(streamEnv: StreamExecutionEnvironment): DataStreamSource[Row] = {
streamEnv.createInput(new TestCollectionInputFormat[Row](emitIntervalMs,
SOURCE_DATA,
rowType.createSerializer(new ExecutionConfig)),
rowType)
}
override def getProducedDataType: DataType = schema.toRowDataType
override def getTableSchema: TableSchema = {
schema
}
override def getLookupFunction(lookupKeys: Array[String]): TemporalTableFetcher = {
new TemporalTableFetcher(DIM_DATA, lookupKeys.map(schema.getFieldNames.indexOf(_)))
}
override def getAsyncLookupFunction(lookupKeys: Array[String]): AsyncTableFunction[Row] = null
override def isAsyncEnabled: Boolean = false
}
/**
* Table sink of collection.
*/
class CollectionTableSink(val schema: TableSchema)
extends BatchTableSink[Row]
with AppendStreamTableSink[Row] {
override def consumeDataSet(dataSet: DataSet[Row]): DataSink[_] = {
dataSet.output(new LocalCollectionOutputFormat[Row](RESULT)).setParallelism(1)
}
override def getConsumedDataType: DataType = schema.toRowDataType
override def getTableSchema: TableSchema = schema
override def consumeDataStream(dataStream: DataStream[Row]): DataStreamSink[_] = {
dataStream.addSink(new UnsafeMemorySinkFunction(
TypeInfoDataTypeConverter.fromDataTypeToTypeInfo(schema.toRowDataType)
.asInstanceOf[TypeInformation[Row]])).setParallelism(1)
}
override def configure(fieldNames: Array[String],
fieldTypes: Array[TypeInformation[_]]): TableSink[Row] = this
}
/**
* Sink function of unsafe memory.
*/
class UnsafeMemorySinkFunction(outputType: TypeInformation[Row]) extends RichSinkFunction[Row] {
private var serializer: TypeSerializer[Row] = _
override def open(param: Configuration): Unit = {
serializer = outputType.createSerializer(new ExecutionConfig)
}
@throws[Exception]
override def invoke(row: Row): Unit = {
RESULT.add(serializer.copy(row))
}
}
/**
* Collection inputFormat for testing.
*/
class TestCollectionInputFormat[T](
val emitIntervalMs: Long,
val dataSet: java.util.Collection[T],
val serializer: TypeSerializer[T])
extends CollectionInputFormat[T](dataSet, serializer) {
@throws[IOException]
override def reachedEnd: Boolean = {
if (emitIntervalMs > 0) {
try
Thread.sleep(emitIntervalMs)
catch {
case _: InterruptedException =>
}
}
super.reachedEnd
}
}
/**
* Dimension table source fetcher.
*/
class TemporalTableFetcher(
val dimData: JLinkedList[Row],
val keys: Array[Int]) extends TableFunction[Row] {
@throws[Exception]
def eval(values: Any*): Unit = {
for (data <- dimData) {
var matched = true
var idx = 0
while (matched && idx < keys.length) {
val dimField = data.getField(keys(idx))
val inputField = values(idx)
matched = dimField.equals(inputField)
idx += 1
}
if (matched) {
// copy the row data
val ret = new Row(data.getArity)
0 until data.getArity foreach { idx =>
ret.setField(idx, data.getField(idx))
}
collect(ret)
}
}
}
}
}
|
apache-2.0
|
ResearchWorx/Cresco-Agent-Dashboard-Plugin
|
src/main/resources/js/popovers.js
|
411
|
/**
* --------------------------------------------------------------------------
* CoreUI Free Boostrap Admin Template (v2.0.0-rc.1): popovers.js
* Licensed under MIT (https://coreui.io/license)
* --------------------------------------------------------------------------
*/
$('[data-toggle="popover"]').popover();
$('.popover-dismiss').popover({
trigger: 'focus'
});
//# sourceMappingURL=popovers.js.map
|
apache-2.0
|
aws/aws-sdk-java
|
aws-java-sdk-wellarchitected/src/main/java/com/amazonaws/services/wellarchitected/model/ChoiceContent.java
|
5345
|
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.wellarchitected.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* <p>
* The choice content.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/wellarchitected-2020-03-31/ChoiceContent" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class ChoiceContent implements Serializable, Cloneable, StructuredPojo {
/**
* <p>
* The display text for the choice content.
* </p>
*/
private String displayText;
/**
* <p>
* The URL for the choice content.
* </p>
*/
private String url;
/**
* <p>
* The display text for the choice content.
* </p>
*
* @param displayText
* The display text for the choice content.
*/
public void setDisplayText(String displayText) {
this.displayText = displayText;
}
/**
* <p>
* The display text for the choice content.
* </p>
*
* @return The display text for the choice content.
*/
public String getDisplayText() {
return this.displayText;
}
/**
* <p>
* The display text for the choice content.
* </p>
*
* @param displayText
* The display text for the choice content.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ChoiceContent withDisplayText(String displayText) {
setDisplayText(displayText);
return this;
}
/**
* <p>
* The URL for the choice content.
* </p>
*
* @param url
* The URL for the choice content.
*/
public void setUrl(String url) {
this.url = url;
}
/**
* <p>
* The URL for the choice content.
* </p>
*
* @return The URL for the choice content.
*/
public String getUrl() {
return this.url;
}
/**
* <p>
* The URL for the choice content.
* </p>
*
* @param url
* The URL for the choice content.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ChoiceContent withUrl(String url) {
setUrl(url);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getDisplayText() != null)
sb.append("DisplayText: ").append(getDisplayText()).append(",");
if (getUrl() != null)
sb.append("Url: ").append(getUrl());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof ChoiceContent == false)
return false;
ChoiceContent other = (ChoiceContent) obj;
if (other.getDisplayText() == null ^ this.getDisplayText() == null)
return false;
if (other.getDisplayText() != null && other.getDisplayText().equals(this.getDisplayText()) == false)
return false;
if (other.getUrl() == null ^ this.getUrl() == null)
return false;
if (other.getUrl() != null && other.getUrl().equals(this.getUrl()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getDisplayText() == null) ? 0 : getDisplayText().hashCode());
hashCode = prime * hashCode + ((getUrl() == null) ? 0 : getUrl().hashCode());
return hashCode;
}
@Override
public ChoiceContent clone() {
try {
return (ChoiceContent) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.wellarchitected.model.transform.ChoiceContentMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
|
apache-2.0
|
liuyuanyuan/dbeaver
|
plugins/org.jkiss.dbeaver.data.transfer/src/org/jkiss/dbeaver/tools/transfer/database/DatabaseTransferConsumer.java
|
27167
|
/*
* DBeaver - Universal Database Manager
* Copyright (C) 2010-2019 Serge Rider (serge@jkiss.org)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jkiss.dbeaver.tools.transfer.database;
import org.jkiss.code.NotNull;
import org.jkiss.dbeaver.DBException;
import org.jkiss.dbeaver.Log;
import org.jkiss.dbeaver.model.*;
import org.jkiss.dbeaver.model.data.DBDAttributeBinding;
import org.jkiss.dbeaver.model.data.DBDAttributeBindingCustom;
import org.jkiss.dbeaver.model.data.DBDValueHandler;
import org.jkiss.dbeaver.model.exec.*;
import org.jkiss.dbeaver.model.impl.AbstractExecutionSource;
import org.jkiss.dbeaver.model.impl.DBObjectNameCaseTransformer;
import org.jkiss.dbeaver.model.runtime.DBRProgressMonitor;
import org.jkiss.dbeaver.model.runtime.VoidProgressMonitor;
import org.jkiss.dbeaver.model.sql.SQLDataSource;
import org.jkiss.dbeaver.model.sql.SQLUtils;
import org.jkiss.dbeaver.model.struct.*;
import org.jkiss.dbeaver.model.struct.rdb.DBSCatalog;
import org.jkiss.dbeaver.model.struct.rdb.DBSManipulationType;
import org.jkiss.dbeaver.model.struct.rdb.DBSSchema;
import org.jkiss.dbeaver.runtime.DBWorkbench;
import org.jkiss.dbeaver.tools.transfer.IDataTransferConsumer;
import org.jkiss.dbeaver.tools.transfer.IDataTransferProcessor;
import org.jkiss.utils.CommonUtils;
import java.util.*;
/**
* Stream transfer consumer
*/
public class DatabaseTransferConsumer implements IDataTransferConsumer<DatabaseConsumerSettings, IDataTransferProcessor> {
private static final Log log = Log.getLog(DatabaseTransferConsumer.class);
private DBSDataContainer sourceObject;
private DBSDataManipulator targetObject;
private DatabaseConsumerSettings settings;
private DatabaseMappingContainer containerMapping;
private ColumnMapping[] columnMappings;
private DBDAttributeBinding[] sourceBindings;
private DBCExecutionContext targetContext;
private DBCSession targetSession;
private DBSDataManipulator.ExecuteBatch executeBatch;
private long rowsExported = 0;
private boolean ignoreErrors = false;
private List<DBSEntityAttribute> targetAttributes;
private boolean useIsolatedConnection;
private static class ColumnMapping {
DBDAttributeBinding sourceAttr;
DatabaseMappingAttribute targetAttr;
DBDValueHandler sourceValueHandler;
DBDValueHandler targetValueHandler;
int targetIndex = -1;
private ColumnMapping(DBDAttributeBinding sourceAttr) {
this.sourceAttr = sourceAttr;
}
}
public DatabaseTransferConsumer() {
}
public DatabaseTransferConsumer(DBSDataManipulator targetObject) {
this.targetObject = targetObject;
}
@Override
public DBSObject getDatabaseObject() {
return targetObject;
}
@Override
public void fetchStart(DBCSession session, DBCResultSet resultSet, long offset, long maxRows) throws DBCException {
initExporter(session.getProgressMonitor());
AbstractExecutionSource executionSource = new AbstractExecutionSource(sourceObject, targetContext, this);
if (offset <= 0 && settings.isTruncateBeforeLoad() && (containerMapping == null || containerMapping.getMappingType() == DatabaseMappingType.existing)) {
// Truncate target tables
if ((targetObject.getSupportedFeatures() & DBSDataManipulator.DATA_TRUNCATE) != 0) {
targetObject.truncateData(
targetSession,
executionSource);
} else {
log.error("Table '" + targetObject.getName() + "' doesn't support truncate operation");
}
}
DBDAttributeBinding[] rsAttributes = DBUtils.makeLeafAttributeBindings(session, sourceObject, resultSet);
columnMappings = new ColumnMapping[rsAttributes.length];
sourceBindings = rsAttributes;
targetAttributes = new ArrayList<>(columnMappings.length);
for (int i = 0; i < rsAttributes.length; i++) {
if (isSkipColumn(rsAttributes[i])) {
continue;
}
ColumnMapping columnMapping = new ColumnMapping(rsAttributes[i]);
if (containerMapping == null) {
// No explicit mappings. Mapping must be provided by data producer
// Map all attributes directly.
if (targetObject instanceof DBSEntity) {
try {
DBSEntityAttribute attribute = ((DBSEntity) targetObject).getAttribute(session.getProgressMonitor(), columnMapping.sourceAttr.getName());
if (attribute != null) {
columnMapping.targetAttr = new DatabaseMappingAttribute(null, columnMapping.sourceAttr);
columnMapping.targetAttr.setTarget(attribute);
columnMapping.targetAttr.setMappingType(DatabaseMappingType.existing);
}
} catch (DBException e) {
log.error("Error getting target attribute");
}
}
if (columnMapping.targetAttr == null) {
throw new DBCException("Can't resolve target attribute for [" + columnMapping.sourceAttr.getName() + "]");
}
} else {
columnMapping.targetAttr = containerMapping.getAttributeMapping(columnMapping.sourceAttr);
if (columnMapping.targetAttr == null) {
throw new DBCException("Can't find target attribute [" + columnMapping.sourceAttr.getName() + "]");
}
}
if (columnMapping.targetAttr.getMappingType() == DatabaseMappingType.skip) {
continue;
}
DBSEntityAttribute targetAttr = columnMapping.targetAttr.getTarget();
if (targetAttr == null) {
if (columnMapping.targetAttr.getSource() instanceof DBSEntityAttribute) {
// Use source attr. Some datasource (e.g. document oriented do not have strict set of attributes)
targetAttr = (DBSEntityAttribute) columnMapping.targetAttr.getSource();
} else {
throw new DBCException("Target attribute for [" + columnMapping.sourceAttr.getName() + "] wasn't resolved");
}
}
columnMapping.sourceValueHandler = columnMapping.sourceAttr.getValueHandler();
columnMapping.targetValueHandler = DBUtils.findValueHandler(targetSession.getDataSource(), targetAttr);
columnMapping.targetIndex = targetAttributes.size();
columnMappings[i] = columnMapping;
targetAttributes.add(targetAttr);
}
DBSAttributeBase[] attributes = targetAttributes.toArray(new DBSAttributeBase[0]);
if (targetObject instanceof DBSDataManipulatorExt) {
((DBSDataManipulatorExt) targetObject).beforeDataChange(session, DBSManipulationType.INSERT, attributes, executionSource);
}
executeBatch = targetObject.insertData(
targetSession,
attributes,
null,
executionSource);
}
private boolean isSkipColumn(DBDAttributeBinding attr) {
return attr.isPseudoAttribute() || (!settings.isTransferAutoGeneratedColumns() && attr.isAutoGenerated());
}
@Override
public void fetchRow(DBCSession session, DBCResultSet resultSet) throws DBCException {
Object[] rowValues = new Object[targetAttributes.size()];
for (int i = 0; i < columnMappings.length; i++) {
ColumnMapping column = columnMappings[i];
if (column == null || column.targetIndex < 0) {
continue;
}
final Object attrValue;
if (column.sourceValueHandler != null) {
if (column.sourceAttr instanceof DBDAttributeBindingCustom) {
attrValue = DBUtils.getAttributeValue(column.sourceAttr, sourceBindings, rowValues);
} else {
attrValue = column.sourceValueHandler.fetchValueObject(session, resultSet, column.sourceAttr, i);
}
} else {
// No value handler - get raw value
attrValue = resultSet.getAttributeValue(i);
}
DatabaseMappingAttribute targetAttr = column.targetAttr;
rowValues[column.targetIndex] = column.targetValueHandler.getValueFromObject(
targetSession,
targetAttr.getTarget() == null ? targetAttr.getSource() : targetAttr.getTarget(),
attrValue,
false);
}
executeBatch.add(rowValues);
rowsExported++;
// No need. mnitor is incremented in data reader
//session.getProgressMonitor().worked(1);
insertBatch(false);
}
private void insertBatch(boolean force) throws DBCException {
boolean needCommit = force || ((rowsExported % settings.getCommitAfterRows()) == 0);
if (needCommit && executeBatch != null) {
boolean retryInsert;
do {
retryInsert = false;
try {
executeBatch.execute(targetSession);
} catch (Throwable e) {
log.error("Error inserting row", e);
if (!ignoreErrors) {
switch (DBWorkbench.getPlatformUI().showErrorStopRetryIgnore(
"Error occurred during data load", e, true)) {
case STOP:
// just stop execution
throw new DBCException("Can't insert row", e);
case RETRY:
// do it again
retryInsert = true;
break;
case IGNORE:
// Just do nothing and go to the next row
retryInsert = false;
break;
case IGNORE_ALL:
ignoreErrors = true;
retryInsert = false;
break;
}
}
}
} while (retryInsert);
}
if (settings.isUseTransactions() && needCommit) {
DBCTransactionManager txnManager = DBUtils.getTransactionManager(targetSession.getExecutionContext());
if (txnManager != null && !txnManager.isAutoCommit()) {
txnManager.commit(targetSession);
}
}
}
@Override
public void fetchEnd(DBCSession session, DBCResultSet resultSet) throws DBCException {
try {
if (rowsExported > 0) {
insertBatch(true);
}
if (executeBatch != null) {
executeBatch.close();
executeBatch = null;
}
} finally {
if (targetObject instanceof DBSDataManipulatorExt) {
((DBSDataManipulatorExt) targetObject).afterDataChange(
session,
DBSManipulationType.INSERT,
targetAttributes.toArray(new DBSAttributeBase[0]),
new AbstractExecutionSource(sourceObject, targetContext, this));
}
}
}
@Override
public void close() {
closeExporter();
}
private void initExporter(DBRProgressMonitor monitor) throws DBCException {
DBSObject targetDB = checkTargetContainer();
DBPDataSourceContainer dataSourceContainer = targetDB.getDataSource().getContainer();
if (!dataSourceContainer.hasModifyPermission(DBPDataSourcePermission.PERMISSION_IMPORT_DATA)) {
throw new DBCException("Data transfer to database [" + dataSourceContainer.getName() + "] restricted by connection configuration");
}
try {
useIsolatedConnection = settings.isOpenNewConnections() && !dataSourceContainer.getDriver().isEmbedded();
targetContext = useIsolatedConnection ?
DBUtils.getObjectOwnerInstance(targetDB).openIsolatedContext(monitor, "Data transfer consumer") : DBUtils.getDefaultContext(targetDB, false);
} catch (DBException e) {
throw new DBCException("Error opening new connection", e);
}
targetSession = targetContext.openSession(monitor, DBCExecutionPurpose.UTIL, "Data load");
targetSession.enableLogging(false);
if (settings.isUseTransactions()) {
DBCTransactionManager txnManager = DBUtils.getTransactionManager(targetSession.getExecutionContext());
if (txnManager != null) {
txnManager.setAutoCommit(monitor, false);
}
}
}
private DBSObject checkTargetContainer() throws DBCException {
if (targetObject == null && settings.getContainer() == null) {
throw new DBCException("Can't initialize database consumer. No target object and no taregt container");
}
containerMapping = sourceObject == null ? null : settings.getDataMapping(sourceObject);
return targetObject == null ? settings.getContainer() : targetObject;
}
private void closeExporter() {
try {
if (targetSession != null) {
targetSession.close();
targetSession = null;
}
} catch (Throwable e) {
log.debug(e);
}
if (targetContext != null && useIsolatedConnection) {
targetContext.close();
targetContext = null;
}
}
@Override
public void initTransfer(DBSObject sourceObject, DatabaseConsumerSettings settings, TransferParameters parameters, IDataTransferProcessor processor, Map<Object, Object> processorProperties) {
this.sourceObject = (DBSDataContainer) sourceObject;
this.settings = settings;
}
@Override
public void startTransfer(DBRProgressMonitor monitor) throws DBException {
// Create all necessary database objects
monitor.beginTask("Create necessary database objects", 1);
try {
DBSObject dbObject = checkTargetContainer();
boolean hasNewObjects = false;
if (containerMapping != null) {
DBSObjectContainer container = settings.getContainer();
if (container == null) {
throw new DBException("No target datasource - can't create target objects");
}
targetObject = containerMapping.getTarget();
try (DBCSession session = DBUtils.openMetaSession(monitor, dbObject, "Create target metadata")) {
{
switch (containerMapping.getMappingType()) {
case create:
createTargetTable(session, containerMapping);
hasNewObjects = true;
break;
case existing:
for (DatabaseMappingAttribute attr : containerMapping.getAttributeMappings(monitor)) {
if (attr.getMappingType() == DatabaseMappingType.create) {
createTargetAttribute(session, attr);
hasNewObjects = true;
}
}
break;
}
}
}
if (hasNewObjects) {
// Refresh node
monitor.subTask("Refresh navigator model");
settings.getContainerNode().refreshNode(monitor, this);
// Reflect database changes in mappings
{
switch (containerMapping.getMappingType()) {
case create:
DBSObject newTarget = container.getChild(monitor, containerMapping.getTargetName());
if (newTarget == null) {
throw new DBCException("New table " + containerMapping.getTargetName() + " not found in container " + DBUtils.getObjectFullName(container, DBPEvaluationContext.UI));
} else if (!(newTarget instanceof DBSDataManipulator)) {
throw new DBCException("New table " + DBUtils.getObjectFullName(newTarget, DBPEvaluationContext.UI) + " doesn't support data manipulation");
}
containerMapping.setTarget((DBSDataManipulator) newTarget);
containerMapping.setMappingType(DatabaseMappingType.existing);
targetObject = (DBSDataManipulator) newTarget;
// ! Fall down is ok here
case existing:
for (DatabaseMappingAttribute attr : containerMapping.getAttributeMappings(monitor)) {
if (attr.getMappingType() == DatabaseMappingType.create) {
attr.updateMappingType(monitor);
if (attr.getTarget() == null) {
log.debug("Can't find target attribute '" + attr.getTargetName() + "' in '" + containerMapping.getTargetName() + "'");
}
}
}
break;
}
}
}
}
} finally {
monitor.done();
}
}
private void createTargetTable(DBCSession session, DatabaseMappingContainer containerMapping) throws DBException {
DBSObjectContainer schema = settings.getContainer();
if (schema == null) {
throw new DBException("No target container selected");
}
String sql = generateTargetTableDDL(session.getProgressMonitor(), session.getDataSource(), schema, containerMapping);
try {
executeDDL(session, sql);
} catch (DBCException e) {
throw new DBCException("Can't create target table:\n" + sql, e);
}
}
public static String generateTargetTableDDL(DBRProgressMonitor monitor, DBPDataSource dataSource, DBSObjectContainer schema, DatabaseMappingContainer containerMapping) throws DBException {
if (containerMapping.getMappingType() == DatabaseMappingType.skip) {
return "";
}
monitor.subTask("Create table " + containerMapping.getTargetName());
StringBuilder sql = new StringBuilder(500);
if (!(dataSource instanceof SQLDataSource)) {
throw new DBException("Data source doesn't support SQL");
}
SQLDataSource targetDataSource = (SQLDataSource) dataSource;
String tableName = DBObjectNameCaseTransformer.transformName(targetDataSource, containerMapping.getTargetName());
containerMapping.setTargetName(tableName);
if (containerMapping.getMappingType() == DatabaseMappingType.create) {
sql.append("CREATE TABLE ");
if (schema instanceof DBSSchema || schema instanceof DBSCatalog) {
sql.append(DBUtils.getQuotedIdentifier(schema));
sql.append(targetDataSource.getSQLDialect().getCatalogSeparator());
}
sql.append(DBUtils.getQuotedIdentifier(targetDataSource, tableName)).append("(\n");
Map<DBSAttributeBase, DatabaseMappingAttribute> mappedAttrs = new HashMap<>();
for (DatabaseMappingAttribute attr : containerMapping.getAttributeMappings(monitor)) {
if (attr.getMappingType() != DatabaseMappingType.create) {
continue;
}
if (!mappedAttrs.isEmpty()) sql.append(",\n");
sql.append("\t");
appendAttributeClause(dataSource, sql, attr);
mappedAttrs.put(attr.getSource(), attr);
}
if (containerMapping.getSource() instanceof DBSEntity) {
// Make primary key
Collection<? extends DBSEntityAttribute> identifier = DBUtils.getBestTableIdentifier(monitor, (DBSEntity) containerMapping.getSource());
if (!CommonUtils.isEmpty(identifier)) {
boolean idMapped = true;
for (DBSEntityAttribute idAttr : identifier) {
if (!mappedAttrs.containsKey(idAttr)) {
idMapped = false;
break;
}
}
if (idMapped) {
sql.append(",\n\tPRIMARY KEY (");
boolean hasAttr = false;
for (DBSEntityAttribute idAttr : identifier) {
DatabaseMappingAttribute mappedAttr = mappedAttrs.get(idAttr);
if (hasAttr) sql.append(",");
sql.append(DBUtils.getQuotedIdentifier(dataSource, mappedAttr.getTargetName()));
hasAttr = true;
}
sql.append(")\n");
}
}
}
sql.append(")");
} else {
for (DatabaseMappingAttribute attr : containerMapping.getAttributeMappings(monitor)) {
if (attr.getMappingType() == DatabaseMappingType.create) {
sql.append(generateTargetAttributeDDL(dataSource, attr)).append(";\n");
}
}
}
return sql.toString();
}
private static void appendAttributeClause(DBPDataSource dataSource, StringBuilder sql, DatabaseMappingAttribute attr) {
sql.append(DBUtils.getQuotedIdentifier(dataSource, attr.getTargetName())).append(" ").append(attr.getTargetType(dataSource));
if (SQLUtils.getDialectFromDataSource(dataSource).supportsNullability()) {
if (attr.getSource().isRequired()) sql.append(" NOT NULL");
}
}
private void createTargetAttribute(DBCSession session, DatabaseMappingAttribute attribute) throws DBCException {
session.getProgressMonitor().subTask("Create column " + DBUtils.getObjectFullName(attribute.getParent().getTarget(), DBPEvaluationContext.DDL) + "." + attribute.getTargetName());
String sql = generateTargetAttributeDDL(session.getDataSource(), attribute);
try {
executeDDL(session, sql);
} catch (DBCException e) {
throw new DBCException("Can't create target column:\n" + sql, e);
}
}
@NotNull
private static String generateTargetAttributeDDL(DBPDataSource dataSource, DatabaseMappingAttribute attribute) {
StringBuilder sql = new StringBuilder(500);
sql.append("ALTER TABLE ").append(DBUtils.getObjectFullName(attribute.getParent().getTarget(), DBPEvaluationContext.DDL))
.append(" ADD ");
appendAttributeClause(dataSource, sql, attribute);
return sql.toString();
}
private void executeDDL(DBCSession session, String sql)
throws DBCException {
try (DBCStatement dbStat = DBUtils.makeStatement(session, sql, false)) {
dbStat.executeStatement();
}
DBCTransactionManager txnManager = DBUtils.getTransactionManager(session.getExecutionContext());
if (txnManager != null && !txnManager.isAutoCommit()) {
// Commit DDL changes
txnManager.commit(session);
}
}
@Override
public void finishTransfer(DBRProgressMonitor monitor, boolean last) {
if (!last && settings.isOpenTableOnFinish()) {
if (containerMapping != null && containerMapping.getTarget() != null) {
DBWorkbench.getPlatformUI().openEntityEditor(containerMapping.getTarget());
}
}
}
public DBSDataManipulator getTargetObject() {
return targetObject;
}
@Override
public String getObjectName() {
String targetName = null;
if (targetObject != null) {
targetName = DBUtils.getObjectFullName(targetObject, DBPEvaluationContext.UI);
}
if (settings == null) {
return targetName;
}
if (targetName != null) {
return targetName;
}
DatabaseMappingContainer dataMapping = settings.getDataMapping(sourceObject);
if (dataMapping == null) {
return "?";
}
targetName = dataMapping.getTargetName();
switch (dataMapping.getMappingType()) {
case create:
return targetName + " [Create]";
case existing:
for (DatabaseMappingAttribute attr : dataMapping.getAttributeMappings(new VoidProgressMonitor())) {
if (attr.getMappingType() == DatabaseMappingType.create) {
return targetName + " [Alter]";
}
}
return targetName;// + " [No changes]";
case skip:
return "[Skip]";
default:
return "?";
}
}
@Override
public DBPImage getObjectIcon() {
if (targetObject instanceof DBPImageProvider) {
return DBValueFormatting.getObjectImage(targetObject);
}
return DBIcon.TREE_TABLE;
}
@Override
public String getObjectContainerName() {
DBPDataSourceContainer container = getDataSourceContainer();
return container != null ? container.getName() : "?";
}
@Override
public DBPImage getObjectContainerIcon() {
DBPDataSourceContainer container = getDataSourceContainer();
return container != null ? container.getDriver().getIcon() : null;
}
private DBPDataSourceContainer getDataSourceContainer() {
if (targetObject != null) {
return targetObject.getDataSource().getContainer();
}
DBSObjectContainer container = settings.getContainer();
if (container != null) {
return container.getDataSource().getContainer();
}
return null;
}
}
|
apache-2.0
|
chadqueen/angular
|
modules/angular2/test/platform/browser/debug/debug_element_view_listener_spec.ts
|
2384
|
import {
AsyncTestCompleter,
beforeEach,
ddescribe,
xdescribe,
describe,
dispatchEvent,
expect,
iit,
inject,
beforeEachProviders,
it,
xit,
TestComponentBuilder,
} from 'angular2/testing_internal';
import {global} from 'angular2/src/facade/lang';
import {APP_VIEW_POOL_CAPACITY} from 'angular2/src/core/linker/view_pool';
import {provide, Component, Directive, Injectable, View} from 'angular2/core';
import {inspectNativeElement} from 'angular2/platform/browser';
import {IS_DART} from 'angular2/src/facade/lang';
@Component({selector: 'my-comp'})
@View({directives: []})
@Injectable()
class MyComp {
ctxProp: string;
}
export function main() {
describe('element probe', function() {
beforeEachProviders(() => [provide(APP_VIEW_POOL_CAPACITY, {useValue: 0})]);
it('should return a TestElement from a dom element',
inject([TestComponentBuilder, AsyncTestCompleter], (tcb: TestComponentBuilder, async) => {
tcb.overrideTemplate(MyComp, '<div some-dir></div>')
.createAsync(MyComp)
.then((componentFixture) => {
expect(inspectNativeElement(componentFixture.debugElement.nativeElement)
.componentInstance)
.toBeAnInstanceOf(MyComp);
async.done();
});
}));
it('should clean up whent the view is destroyed',
inject([TestComponentBuilder, AsyncTestCompleter], (tcb: TestComponentBuilder, async) => {
tcb.overrideTemplate(MyComp, '')
.createAsync(MyComp)
.then((componentFixture) => {
componentFixture.destroy();
expect(inspectNativeElement(componentFixture.debugElement.nativeElement)).toBe(null);
async.done();
});
}));
if (!IS_DART) {
it('should provide a global function to inspect elements',
inject([TestComponentBuilder, AsyncTestCompleter], (tcb: TestComponentBuilder, async) => {
tcb.overrideTemplate(MyComp, '')
.createAsync(MyComp)
.then((componentFixture) => {
expect(global['ng']['probe'](componentFixture.debugElement.nativeElement)
.componentInstance)
.toBeAnInstanceOf(MyComp);
async.done();
});
}));
}
});
}
|
apache-2.0
|
equella/Equella
|
Source/Plugins/Core/com.equella.core/src/com/tle/web/sections/jquery/JQuerySelector.java
|
4234
|
/*
* Licensed to The Apereo Foundation under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
*
* The Apereo Foundation licenses this file to you under the Apache License,
* Version 2.0, (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.tle.web.sections.jquery;
import com.tle.web.sections.SectionUtils;
import com.tle.web.sections.events.PreRenderContext;
import com.tle.web.sections.events.RenderContext;
import com.tle.web.sections.jquery.libraries.JQueryCore;
import com.tle.web.sections.js.ElementId;
import com.tle.web.sections.js.JSCallable;
import com.tle.web.sections.js.JSExpression;
import com.tle.web.sections.js.JSUtils;
import com.tle.web.sections.js.generic.Js;
import com.tle.web.sections.js.generic.expression.AbstractExpression;
import com.tle.web.sections.js.generic.expression.CombinedPropertyExpression;
import com.tle.web.sections.js.generic.expression.FunctionCallExpression;
import com.tle.web.sections.js.generic.expression.PropertyExpression;
import com.tle.web.sections.js.generic.expression.StringExpression;
public class JQuerySelector extends AbstractExpression {
private ElementId elementId;
private JSExpression expr;
private JSExpression contextExpr;
public enum Type {
ID,
CLASS,
RAW,
NAME
}
private String getSelectString(Type type, String id) {
String typeChar = "";
switch (type) {
case ID:
typeChar = "#";
break;
case CLASS:
typeChar = ".";
break;
case RAW:
typeChar = "";
break;
case NAME:
return "[name=\"" + JSUtils.escape(id, false) + "\"]";
}
return typeChar + JSUtils.escape(id, false);
}
public JQuerySelector(Object expr) {
this.expr = JSUtils.convertExpression(expr);
}
public JQuerySelector(ElementId id) {
this.elementId = id;
id.registerUse();
}
public JQuerySelector(Type type, String id) {
this.expr = new StringExpression(getSelectString(type, id));
}
public void setContextExpr(JSExpression contextExpr) {
this.contextExpr = contextExpr;
}
@Override
public String getExpression(RenderContext info) {
if (expr == null) {
expr = new StringExpression(getSelectString(Type.ID, elementId.getElementId(info)));
}
return JQueryCore.JQUERY.getExpressionForCall(
info,
contextExpr == null ? new JSExpression[] {expr} : new JSExpression[] {expr, contextExpr});
}
@Override
public void preRender(PreRenderContext info) {
SectionUtils.preRender(info, JQueryCore.JQUERY, expr, contextExpr);
}
public static JSExpression valueSetExpression(ElementId element, Object value) {
return new CombinedPropertyExpression(
new JQuerySelector(element),
new PropertyExpression(new FunctionCallExpression("val", value)));
}
public static JSExpression valueGetExpression(ElementId element) {
return new CombinedPropertyExpression(
new JQuerySelector(element), new PropertyExpression(new FunctionCallExpression("val")));
}
public static JSExpression methodCallExpression(
ElementId element, JSCallable method, Object... params) {
return methodCallExpression(new JQuerySelector(element), method, params);
}
public static JSExpression methodCallExpression(
Type type, String id, JSCallable method, Object... params) {
return methodCallExpression(new JQuerySelector(type, id), method, params);
}
private static JSExpression methodCallExpression(
JQuerySelector selector, JSCallable method, Object... params) {
return new CombinedPropertyExpression(
selector, new PropertyExpression(Js.call(method, params)));
}
}
|
apache-2.0
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.