repo_name
stringlengths
4
116
path
stringlengths
4
379
size
stringlengths
1
7
content
stringlengths
3
1.05M
license
stringclasses
15 values
tgianos/genie
genie-agent/src/test/java/com/netflix/genie/agent/spring/package-info.java
861
/* * * Copyright 2018 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ /** * Tests for configuration classes in package. * * @author tgianos * @since 4.0.0 */ @ParametersAreNonnullByDefault package com.netflix.genie.agent.spring; import javax.annotation.ParametersAreNonnullByDefault;
apache-2.0
apache/chukwa
core/src/main/java/org/apache/hadoop/chukwa/extraction/demux/DemuxManager.java
17421
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.chukwa.extraction.demux; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; import java.text.SimpleDateFormat; import java.util.Date; import org.apache.hadoop.chukwa.conf.ChukwaConfiguration; import org.apache.hadoop.chukwa.extraction.CHUKWA_CONSTANT; import org.apache.hadoop.chukwa.util.NagiosHelper; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.PathFilter; import org.apache.hadoop.util.ToolRunner; import org.apache.log4j.Logger; public class DemuxManager implements CHUKWA_CONSTANT { static Logger log = Logger.getLogger(DemuxManager.class); int globalErrorcounter = 0; Date firstErrorTime = null; protected int ERROR_SLEEP_TIME = 60; protected int NO_DATASINK_SLEEP_TIME = 20; protected int DEFAULT_MAX_ERROR_COUNT = 6; protected int DEFAULT_MAX_FILES_PER_DEMUX = 500; protected int DEFAULT_REDUCER_COUNT = 8; protected int maxPermittedErrorCount = DEFAULT_MAX_ERROR_COUNT; protected int demuxReducerCount = 0; protected ChukwaConfiguration conf = null; protected FileSystem fs = null; protected int reprocess = 0; protected boolean sendAlert = true; protected SimpleDateFormat dayTextFormat = new java.text.SimpleDateFormat("yyyyMMdd"); protected volatile boolean isRunning = true; final private static PathFilter DATA_SINK_FILTER = new PathFilter() { public boolean accept(Path file) { return file.getName().endsWith(".done"); } }; public static void main(String[] args) throws Exception { DemuxManager manager = new DemuxManager(); manager.start(); } public DemuxManager() throws Exception { this.conf = new ChukwaConfiguration(); init(); } public DemuxManager(ChukwaConfiguration conf) throws Exception { this.conf = conf; init(); } protected void init() throws IOException, URISyntaxException { String fsName = conf.get(HDFS_DEFAULT_NAME_FIELD); fs = FileSystem.get(new URI(fsName), conf); } public void shutdown() { this.isRunning = false; } public int getReprocess() { return reprocess; } /** * Start the Demux Manager daemon * @throws Exception if error in processing data */ public void start() throws Exception { String chukwaRootDir = conf.get(CHUKWA_ROOT_DIR_FIELD, DEFAULT_CHUKWA_ROOT_DIR_NAME); if ( ! chukwaRootDir.endsWith("/") ) { chukwaRootDir += "/"; } log.info("chukwaRootDir:" + chukwaRootDir); String demuxRootDir = chukwaRootDir + DEFAULT_DEMUX_PROCESSING_DIR_NAME; String demuxErrorDir = demuxRootDir + DEFAULT_DEMUX_IN_ERROR_DIR_NAME; String demuxInputDir = demuxRootDir + DEFAULT_DEMUX_MR_INPUT_DIR_NAME; String demuxOutputDir = demuxRootDir + DEFAULT_DEMUX_MR_OUTPUT_DIR_NAME; String dataSinkDir = conf.get(CHUKWA_DATA_SINK_DIR_FIELD, chukwaRootDir +DEFAULT_CHUKWA_LOGS_DIR_NAME); if ( ! dataSinkDir.endsWith("/") ) { dataSinkDir += "/"; } log.info("dataSinkDir:" + dataSinkDir); String postProcessDir = conf.get(CHUKWA_POST_PROCESS_DIR_FIELD, chukwaRootDir +DEFAULT_CHUKWA_POSTPROCESS_DIR_NAME); if ( ! postProcessDir.endsWith("/") ) { postProcessDir += "/"; } log.info("postProcessDir:" + postProcessDir); String archiveRootDir = conf.get(CHUKWA_ARCHIVE_DIR_FIELD, chukwaRootDir +DEFAULT_CHUKWA_DATASINK_DIR_NAME); if ( ! archiveRootDir.endsWith("/") ) { archiveRootDir += "/"; } log.info("archiveRootDir:" + archiveRootDir); maxPermittedErrorCount = conf.getInt(CHUKWA_DEMUX_MAX_ERROR_COUNT_FIELD, DEFAULT_MAX_ERROR_COUNT); demuxReducerCount = conf.getInt(CHUKWA_DEMUX_REDUCER_COUNT_FIELD, DEFAULT_REDUCER_COUNT); log.info("demuxReducerCount:" + demuxReducerCount); String nagiosHost = conf.get(CHUKWA_NAGIOS_HOST_FIELD); int nagiosPort = conf.getInt(CHUKWA_NAGIOS_PORT_FIELD,0); String reportingHost = conf.get(CHUKWA_REPORTING_HOST_FIELD); log.info("Nagios information: nagiosHost:" + nagiosHost + ", nagiosPort:" + nagiosPort + ", reportingHost:" + reportingHost); if (nagiosHost == null || nagiosHost.length() == 0 || nagiosPort == 0 || reportingHost == null || reportingHost.length() == 0) { sendAlert = false; log.warn("Alerting is OFF"); } boolean demuxReady = false; while (isRunning) { try { demuxReady = false; if (maxPermittedErrorCount != -1 && globalErrorcounter >= maxPermittedErrorCount) { log.warn("==================\nToo many errors (" + globalErrorcounter + "), Bail out!\n=================="); break; } // Check for anomalies if (checkDemuxOutputDir(demuxOutputDir) == true) { // delete current demux output dir if ( deleteDemuxOutputDir(demuxOutputDir) == false ) { log.warn("Cannot delete an existing demux output directory!"); throw new IOException("Cannot move demuxOutput to postProcess!"); } continue; } else if (checkDemuxInputDir(demuxInputDir) == true) { // dataSink already there reprocess++; // Data has been processed more than 3 times ... move to InError directory if (reprocess > 3) { if (moveDataSinkFilesToDemuxErrorDirectory(demuxInputDir,demuxErrorDir) == false) { log.warn("Cannot move dataSink files to DemuxErrorDir!"); throw new IOException("Cannot move dataSink files to DemuxErrorDir!"); } reprocess = 0; continue; } log.error("Demux inputDir aready contains some dataSink files," + " going to reprocess, reprocessCount=" + reprocess); demuxReady = true; } else { // standard code path reprocess = 0; // Move new dataSink Files if (moveDataSinkFilesToDemuxInputDirectory(dataSinkDir, demuxInputDir) == true) { demuxReady = true; // if any are available } else { demuxReady = false; // if none } } // start a new demux ? if (demuxReady == true) { boolean demuxStatus = processData(dataSinkDir, demuxInputDir, demuxOutputDir, postProcessDir, archiveRootDir); sendDemuxStatusToNagios(nagiosHost,nagiosPort,reportingHost,demuxErrorDir,demuxStatus,null); // if demux suceeds, then we reset these. if (demuxStatus) { globalErrorcounter = 0; firstErrorTime = null; } } else { log.info("Demux not ready so going to sleep ..."); Thread.sleep(NO_DATASINK_SLEEP_TIME * 1000); } }catch(Throwable e) { globalErrorcounter ++; if (firstErrorTime == null) firstErrorTime = new Date(); log.warn("Consecutive error number " + globalErrorcounter + " encountered since " + firstErrorTime, e); sendDemuxStatusToNagios(nagiosHost,nagiosPort,reportingHost,demuxErrorDir,false, e.getMessage()); try { Thread.sleep(ERROR_SLEEP_TIME * 1000); } catch (InterruptedException e1) {/*do nothing*/ } init(); } } } /** * Send NSCA status to Nagios * @param nagiosHost * @param nagiosPort * @param reportingHost * @param demuxInErrorDir * @param demuxStatus * @param exception */ protected void sendDemuxStatusToNagios(String nagiosHost,int nagiosPort,String reportingHost, String demuxInErrorDir,boolean demuxStatus,String demuxException) { if (sendAlert == false) { return; } boolean demuxInErrorStatus = true; String demuxInErrorMsg = ""; try { Path pDemuxInErrorDir = new Path(demuxInErrorDir); if ( fs.exists(pDemuxInErrorDir)) { FileStatus[] demuxInErrorDirs = fs.listStatus(pDemuxInErrorDir); if (demuxInErrorDirs.length == 0) { demuxInErrorStatus = false; } } } catch (Throwable e) { demuxInErrorMsg = e.getMessage(); log.warn(e); } // send Demux status if (demuxStatus == true) { NagiosHelper.sendNsca("Demux OK",NagiosHelper.NAGIOS_OK); } else { NagiosHelper.sendNsca("Demux failed. " + demuxException,NagiosHelper.NAGIOS_CRITICAL); } // send DemuxInErrorStatus if (demuxInErrorStatus == false) { NagiosHelper.sendNsca("DemuxInError OK",NagiosHelper.NAGIOS_OK); } else { NagiosHelper.sendNsca("DemuxInError not empty -" + demuxInErrorMsg,NagiosHelper.NAGIOS_CRITICAL); } } /** * Process Data, i.e. * - run demux * - move demux output to postProcessDir * - move dataSink file to archiveDir * * @param dataSinkDir * @param demuxInputDir * @param demuxOutputDir * @param postProcessDir * @param archiveDir * @return True iff succeed * @throws IOException */ protected boolean processData(String dataSinkDir, String demuxInputDir, String demuxOutputDir, String postProcessDir, String archiveDir) throws IOException { boolean demuxStatus = false; long startTime = System.currentTimeMillis(); demuxStatus = runDemux(demuxInputDir, demuxOutputDir); log.info("Demux Duration: " + (System.currentTimeMillis() - startTime)); if (demuxStatus == false) { log.warn("Demux failed!"); } else { // Move demux output to postProcessDir if (checkDemuxOutputDir(demuxOutputDir)) { if (moveDemuxOutputDirToPostProcessDirectory(demuxOutputDir, postProcessDir) == false) { log.warn("Cannot move demuxOutput to postProcess! bail out!"); throw new IOException("Cannot move demuxOutput to postProcess! bail out!"); } } else { log.warn("Demux processing OK but no output"); } // Move DataSink Files to archiveDir if (moveDataSinkFilesToArchiveDirectory(demuxInputDir, archiveDir) == false) { log.warn("Cannot move datasinkFile to archive! bail out!"); throw new IOException("Cannot move datasinkFile to archive! bail out!"); } } return demuxStatus; } /** * Submit and Run demux Job * @param demuxInputDir * @param demuxOutputDir * @return true id Demux succeed */ protected boolean runDemux(String demuxInputDir, String demuxOutputDir) { // to reload the configuration, and demux's reduce number Configuration tempConf = new Configuration(conf); tempConf.reloadConfiguration(); demuxReducerCount = tempConf.getInt(CHUKWA_DEMUX_REDUCER_COUNT_FIELD, DEFAULT_REDUCER_COUNT); String[] demuxParams; int i=0; Demux.addParsers(tempConf); demuxParams = new String[4]; demuxParams[i++] = "-r"; demuxParams[i++] = "" + demuxReducerCount; demuxParams[i++] = demuxInputDir; demuxParams[i++] = demuxOutputDir; try { return ( 0 == ToolRunner.run(tempConf,new Demux(), demuxParams) ); } catch (Throwable e) { e.printStackTrace(); globalErrorcounter ++; if (firstErrorTime == null) firstErrorTime = new Date(); log.error("Failed to run demux. Consecutive error number " + globalErrorcounter + " encountered since " + firstErrorTime, e); } return false; } /** * Move dataSink files to Demux input directory * @param dataSinkDir * @param demuxInputDir * @return true if there's any dataSink files ready to be processed * @throws IOException */ protected boolean moveDataSinkFilesToDemuxInputDirectory( String dataSinkDir, String demuxInputDir) throws IOException { Path pDataSinkDir = new Path(dataSinkDir); Path pDemuxInputDir = new Path(demuxInputDir); log.info("dataSinkDir: " + dataSinkDir); log.info("demuxInputDir: " + demuxInputDir); boolean containsFile = false; FileStatus[] dataSinkFiles = fs.listStatus(pDataSinkDir,DATA_SINK_FILTER); if (dataSinkFiles.length > 0) { setup(pDemuxInputDir); } int maxFilesPerDemux = 0; for (FileStatus fstatus : dataSinkFiles) { boolean rename = fs.rename(fstatus.getPath(),pDemuxInputDir); log.info("Moving " + fstatus.getPath() + " to " + pDemuxInputDir +", status is:" + rename); maxFilesPerDemux ++; containsFile = true; if (maxFilesPerDemux >= DEFAULT_MAX_FILES_PER_DEMUX) { log.info("Max File per Demux reached:" + maxFilesPerDemux); break; } } return containsFile; } /** * Move sourceFolder inside destFolder * @param dataSinkDir : ex chukwa/demux/inputDir * @param demuxErrorDir : ex /chukwa/demux/inError * @return true if able to move chukwa/demux/inputDir to /chukwa/demux/inError/<YYYYMMDD>/demuxInputDirXXX * @throws IOException */ protected boolean moveDataSinkFilesToDemuxErrorDirectory( String dataSinkDir, String demuxErrorDir) throws IOException { demuxErrorDir += "/" + dayTextFormat.format(System.currentTimeMillis()); return moveFolder(dataSinkDir,demuxErrorDir,"demuxInputDir"); } /** * Move sourceFolder inside destFolder * @param demuxInputDir: ex chukwa/demux/inputDir * @param archiveDirectory: ex /chukwa/archives * @return true if able to move chukwa/demux/inputDir to /chukwa/archives/raw/<YYYYMMDD>/dataSinkDirXXX * @throws IOException */ protected boolean moveDataSinkFilesToArchiveDirectory( String demuxInputDir, String archiveDirectory) throws IOException { archiveDirectory += "/" + dayTextFormat.format(System.currentTimeMillis()); return moveFolder(demuxInputDir,archiveDirectory,"dataSinkDir"); } /** * Move sourceFolder inside destFolder * @param demuxOutputDir: ex chukwa/demux/outputDir * @param postProcessDirectory: ex /chukwa/postProcess * @return true if able to move chukwa/demux/outputDir to /chukwa/postProcess/demuxOutputDirXXX * @throws IOException */ protected boolean moveDemuxOutputDirToPostProcessDirectory( String demuxOutputDir, String postProcessDirectory) throws IOException { return moveFolder(demuxOutputDir,postProcessDirectory,"demuxOutputDir"); } /** * Test if demuxInputDir exists * @param demuxInputDir * @return true if demuxInputDir exists * @throws IOException */ protected boolean checkDemuxInputDir(String demuxInputDir) throws IOException { return dirExists(demuxInputDir); } /** * Test if demuxOutputDir exists * @param demuxOutputDir * @return true if demuxOutputDir exists * @throws IOException */ protected boolean checkDemuxOutputDir(String demuxOutputDir) throws IOException { return dirExists(demuxOutputDir); } /** * Delete DemuxOutput directory * @param demuxOutputDir * @return true if succeed * @throws IOException */ protected boolean deleteDemuxOutputDir(String demuxOutputDir) throws IOException { return fs.delete(new Path(demuxOutputDir), true); } /** * Create directory if !exists * @param directory * @throws IOException */ protected void setup(Path directory) throws IOException { if ( ! fs.exists(directory)) { fs.mkdirs(directory); } } /** * Check if source exists and if source is a directory * @param f source file */ protected boolean dirExists(String directory) throws IOException { Path pDirectory = new Path(directory); return (fs.exists(pDirectory) && fs.getFileStatus(pDirectory).isDir()); } /** * Move sourceFolder inside destFolder * @param srcDir * @param destDir * @return * @throws IOException */ protected boolean moveFolder(String srcDir,String destDir, String prefix) throws IOException { if (!destDir.endsWith("/")) { destDir +="/"; } Path pSrcDir = new Path(srcDir); Path pDestDir = new Path(destDir ); setup(pDestDir); destDir += prefix +"_" +System.currentTimeMillis(); Path pFinalDestDir = new Path(destDir ); return fs.rename(pSrcDir, pFinalDestDir); } }
apache-2.0
code-orchestra/metaas-fork
src/test/java/uk/co/badgersinfoil/metaas/LiteralTests.java
6042
package uk.co.badgersinfoil.metaas; import java.io.IOException; import uk.co.badgersinfoil.metaas.dom.ASArrayLiteral; import uk.co.badgersinfoil.metaas.dom.ASAssignmentExpression; import uk.co.badgersinfoil.metaas.dom.ASBooleanLiteral; import uk.co.badgersinfoil.metaas.dom.ASClassType; import uk.co.badgersinfoil.metaas.dom.ASCompilationUnit; import uk.co.badgersinfoil.metaas.dom.Literal; import uk.co.badgersinfoil.metaas.dom.ASMethod; import uk.co.badgersinfoil.metaas.dom.ASNullLiteral; import uk.co.badgersinfoil.metaas.dom.ASObjectLiteral; import uk.co.badgersinfoil.metaas.dom.ASRegexpLiteral; import uk.co.badgersinfoil.metaas.dom.ASStringLiteral; import uk.co.badgersinfoil.metaas.dom.ASIntegerLiteral; import uk.co.badgersinfoil.metaas.dom.ASUndefinedLiteral; import uk.co.badgersinfoil.metaas.dom.ASXMLLiteral; import uk.co.badgersinfoil.metaas.dom.Visibility; import junit.framework.TestCase; public class LiteralTests extends TestCase { private ActionScriptFactory fact = new ActionScriptFactory(); private ASCompilationUnit unit; private ASCompilationUnit reflect; private Literal literal = null; protected void setUp() { unit = fact.newClass("Test"); reflect = null; literal = null; } protected void tearDown() throws IOException { if (literal != null && reflect == null) { ASClassType clazz = (ASClassType)unit.getType(); ASMethod meth = clazz.newMethod("test", Visibility.PUBLIC, null); meth.newExprStmt(fact.newAssignExpression(fact.newExpression(getName()), literal)); reflect = assertReflection(); } } private ASCompilationUnit assertReflection() throws IOException { return CodeMirror.assertReflection(fact, unit); } public void testStringLiteral() { ASStringLiteral lit = fact.newStringLiteral(""); assertEquals("", lit.getValue()); lit.setValue("\""); assertEquals("\"", lit.getValue()); literal = lit; } public void testNumberLiteral() { ASIntegerLiteral lit = fact.newIntegerLiteral(123); assertEquals(123, lit.getValue()); lit.setValue(0); assertEquals(0, lit.getValue()); literal = lit; } public void testNullLiteral() { ASNullLiteral lit = fact.newNullLiteral(); assertNotNull(lit); literal = lit; } public void testParseNullLiteral() { ASNullLiteral lit = (ASNullLiteral)fact.newExpression("null"); assertNotNull(lit); literal = lit; } public void testBoolLiteral() { ASBooleanLiteral lit = fact.newBooleanLiteral(true); assertTrue(lit.getValue()); lit.setValue(false); assertFalse(lit.getValue()); lit = fact.newBooleanLiteral(false); assertFalse(lit.getValue()); lit.setValue(true); assertTrue(lit.getValue()); ExtraAssertions.assertInstanceof(fact.newExpression("true"), ASBooleanLiteral.class); literal = lit; } public void testUndefinedLiteral() { ASUndefinedLiteral lit = fact.newUndefinedLiteral(); assertNotNull(lit); literal = lit; } public void testParseUndefinedLiteral() { ASUndefinedLiteral lit = (ASUndefinedLiteral)fact.newExpression("undefined"); assertNotNull(lit); literal = lit; } public void testEmptyArrayLiteral() { ASArrayLiteral lit = fact.newArrayLiteral(); assertNotNull(lit); ExtraAssertions.assertSize(0, lit.getEntries()); literal = lit; } public void testArrayLiteral() { ASArrayLiteral lit = fact.newArrayLiteral(); lit.add(fact.newIntegerLiteral(1)); ExtraAssertions.assertSize(1, lit.getEntries()); ExtraAssertions.assertInstanceof(lit.getEntries().get(0), ASIntegerLiteral.class); lit.add(fact.newStringLiteral("foo")); ExtraAssertions.assertSize(2, lit.getEntries()); ExtraAssertions.assertInstanceof(lit.getEntries().get(1), ASStringLiteral.class); literal = lit; } public void testArrayLiteralRemoveFirst() { ASArrayLiteral lit = fact.newArrayLiteral(); lit.add(fact.newIntegerLiteral(1)); lit.add(fact.newStringLiteral("foo")); lit.remove(0); ExtraAssertions.assertSize(1, lit.getEntries()); literal = lit; } public void testArrayLiteralRemoveLast() { ASArrayLiteral lit = fact.newArrayLiteral(); lit.add(fact.newIntegerLiteral(1)); lit.add(fact.newStringLiteral("foo")); lit.remove(1); ExtraAssertions.assertSize(1, lit.getEntries()); literal = lit; } public void testArrayLiteralRemoveMiddle() { ASArrayLiteral lit = fact.newArrayLiteral(); lit.add(fact.newIntegerLiteral(1)); lit.add(fact.newStringLiteral("foo")); lit.add(fact.newBooleanLiteral(false)); lit.remove(1); ExtraAssertions.assertSize(2, lit.getEntries()); literal = lit; } public void testParseArrayLiteral() { literal = (ASArrayLiteral)fact.newExpression("['foo', [1]]"); assertNotNull(literal); } public void testEmptyObjectLiteral() { ASObjectLiteral lit = fact.newObjectLiteral(); assertNotNull(lit); ExtraAssertions.assertSize(0, lit.getFields()); literal = lit; } public void testObjectLiteral() { ASObjectLiteral lit = fact.newObjectLiteral(); ASObjectLiteral.Field field = lit.newField("foo", fact.newIntegerLiteral(33)); assertEquals("foo", field.getName()); ExtraAssertions.assertInstanceof(field.getValue(), ASIntegerLiteral.class); field = lit.newField("bar", fact.newStringLiteral("hello")); literal = lit; } public void testParseObjectLiteral() { ASAssignmentExpression expr = (ASAssignmentExpression)fact.newExpression("test = {foo:2}"); literal = (Literal)expr.getRightSubexpression(); } public void testXMLLiteral() { ASXMLLiteral lit = fact.newXMLLiteral("<hello>world</hello>"); assertNotNull(lit); literal = lit; } public void testParseXMLLiteral() { ASXMLLiteral lit = (ASXMLLiteral)fact.newExpression("<hello>world</hello>"); assertNotNull(lit); literal = lit; } public void testRegexpLiteral() { ASRegexpLiteral lit = fact.newRegexpLiteral("\\d+", ASRegexpLiteral.FLAG_NONE); assertNotNull(lit); literal = lit; } public void testParseRegexpLiteral() { ASRegexpLiteral lit = (ASRegexpLiteral)fact.newExpression("/[a-z]+-\\d+/i"); assertNotNull(lit); literal = lit; } }
apache-2.0
tarikgwa/test
html/var/generation/Magento/Framework/Model/ActionValidator/RemoveAction/Proxy.php
2336
<?php namespace Magento\Framework\Model\ActionValidator\RemoveAction; /** * Proxy class for @see \Magento\Framework\Model\ActionValidator\RemoveAction */ class Proxy extends \Magento\Framework\Model\ActionValidator\RemoveAction { /** * Object Manager instance * * @var \Magento\Framework\ObjectManagerInterface */ protected $_objectManager = null; /** * Proxied instance name * * @var string */ protected $_instanceName = null; /** * Proxied instance * * @var \Magento\Framework\Model\ActionValidator\RemoveAction */ protected $_subject = null; /** * Instance shareability flag * * @var bool */ protected $_isShared = null; /** * Proxy constructor * * @param \Magento\Framework\ObjectManagerInterface $objectManager * @param string $instanceName * @param bool $shared */ public function __construct(\Magento\Framework\ObjectManagerInterface $objectManager, $instanceName = '\\Magento\\Framework\\Model\\ActionValidator\\RemoveAction', $shared = true) { $this->_objectManager = $objectManager; $this->_instanceName = $instanceName; $this->_isShared = $shared; } /** * @return array */ public function __sleep() { return array('_subject', '_isShared'); } /** * Retrieve ObjectManager from global scope */ public function __wakeup() { $this->_objectManager = \Magento\Framework\App\ObjectManager::getInstance(); } /** * Clone proxied instance */ public function __clone() { $this->_subject = clone $this->_getSubject(); } /** * Get proxied instance * * @return \Magento\Framework\Model\ActionValidator\RemoveAction */ protected function _getSubject() { if (!$this->_subject) { $this->_subject = true === $this->_isShared ? $this->_objectManager->get($this->_instanceName) : $this->_objectManager->create($this->_instanceName); } return $this->_subject; } /** * {@inheritdoc} */ public function isAllowed(\Magento\Framework\Model\AbstractModel $model) { return $this->_getSubject()->isAllowed($model); } }
apache-2.0
fhg-fokus-nubomedia/signaling-plane
modules/lib-sip/src/main/java/de/fhg/fokus/ims/core/matching/Simple.java
144
package de.fhg.fokus.ims.core.matching; public class Simple { private Operation operation; private String name; private Object value; }
apache-2.0
brmeyer/s-ramp
ui/src/main/java/org/artificer/ui/client/shared/beans/OntologyBean.java
2613
/* * Copyright 2013 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.artificer.ui.client.shared.beans; import java.util.ArrayList; import java.util.List; import org.jboss.errai.common.client.api.annotations.Portable; /** * Models an S-RAMP Ontology, including the entire tree of classes. * @author eric.wittmann@redhat.com */ @Portable public class OntologyBean extends OntologySummaryBean { private static final long serialVersionUID = 9164017316249330169L; private String lastModifiedBy; private List<OntologyClassBean> rootClasses = new ArrayList<OntologyClassBean>(); /** * Constructor. */ public OntologyBean() { } /** * @return the lastModifiedBy */ public String getLastModifiedBy() { return lastModifiedBy; } /** * @param lastModifiedBy the lastModifiedBy to set */ public void setLastModifiedBy(String lastModifiedBy) { this.lastModifiedBy = lastModifiedBy; } /** * @return the rootClasses */ public List<OntologyClassBean> getRootClasses() { return rootClasses; } /** * Creates a class (and indexes it). * @param id */ public OntologyClassBean createClass(String id) { OntologyClassBean c = new OntologyClassBean(); c.setId(id); String uri = getBase() + "#" + id; c.setUri(uri); return c; } /** * Make a copy of the ontology. */ public OntologyBean copy() { OntologyBean c = new OntologyBean(); c.setBase(this.getBase()); c.setComment(this.getComment()); c.setCreatedBy(this.getCreatedBy()); c.setCreatedOn(this.getCreatedOn()); c.setId(this.getId()); c.setLabel(this.getLabel()); c.setLastModifiedBy(this.getLastModifiedBy()); c.setLastModifiedOn(this.getLastModifiedOn()); c.setUuid(this.getUuid()); for (OntologyClassBean bean : getRootClasses()) { c.getRootClasses().add(bean.copy()); } return c; } }
apache-2.0
grchanan/kite
kite-tools/src/test/java/org/kitesdk/cli/commands/TestBaseCommand.java
6409
/* * Copyright 2013 Cloudera Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kitesdk.cli.commands; import com.beust.jcommander.internal.Lists; import java.io.IOException; import java.util.List; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.kitesdk.data.TestHelpers; import org.slf4j.Logger; import static org.mockito.Mockito.*; /** * This tests that the base command correctly builds a repository URI from the * given options. */ public class TestBaseCommand { public static class TestCommand extends BaseDatasetCommand { public TestCommand(Logger console) { super(console); } @Override public int run() throws IOException { return 0; } @Override public List<String> getExamples() { return null; } } private Logger console = null; private BaseDatasetCommand command = null; @Before public void createCommand() { this.console = mock(Logger.class); this.command = new TestCommand(console); } @Test public void testDefaults() { Assert.assertEquals("repo:hive", command.buildRepoURI()); verify(console).trace(contains("repo:hive")); } @Test public void testManagedHiveRepo() { command.hive = true; command.directory = null; Assert.assertEquals("repo:hive", command.buildRepoURI()); verify(console).trace(contains("repo:hive")); } @Test public void testExternalHiveRepo() { command.hive = true; command.directory = "/tmp/data"; Assert.assertEquals("repo:hive:/tmp/data", command.buildRepoURI()); verify(console).trace(contains("repo:hive:/tmp/data")); } @Test public void testRelativeExternalHiveRepo() { command.hive = true; command.directory = "data"; Assert.assertEquals("repo:hive:data", command.buildRepoURI()); verify(console).trace(contains("repo:hive:data")); } @Test public void testHDFSRepo() { command.hdfs = true; command.directory = "/tmp/data"; Assert.assertEquals("repo:hdfs:/tmp/data", command.buildRepoURI()); verify(console).trace(contains("repo:hdfs:/tmp/data")); } @Test public void testHDFSRepoRejectsNullPath() { command.hdfs = true; command.directory = null; TestHelpers.assertThrows( "Should reject null directory for HDFS", IllegalArgumentException.class, new Runnable() { @Override public void run() { command.buildRepoURI(); } } ); verifyZeroInteractions(console); } @Test public void testLocalRepo() { command.local = true; command.directory = "/tmp/data"; Assert.assertEquals("repo:file:/tmp/data", command.buildRepoURI()); verify(console).trace(contains("repo:file:/tmp/data")); } @Test public void testLocalRepoRejectsNullPath() { command.hive = false; command.local = true; command.directory = null; TestHelpers.assertThrows( "Should reject null directory for local", IllegalArgumentException.class, new Runnable() { @Override public void run() { command.buildRepoURI(); } } ); verifyZeroInteractions(console); } @Test public void testHBaseRepo() { command.hbase = true; command.zookeeper = Lists.newArrayList("zk1:1234", "zk2"); Assert.assertEquals("repo:hbase:zk1:1234,zk2", command.buildRepoURI()); verify(console).trace(contains("repo:hbase:zk1:1234,zk2")); } @Test public void testHbaseRepoRejectsNullZooKeeper() { command.hive = false; command.local = true; command.directory = null; TestHelpers.assertThrows( "Should reject null ZooKeeper for local, non-Hive", IllegalArgumentException.class, new Runnable() { @Override public void run() { command.buildRepoURI(); } }); verifyZeroInteractions(console); } @Test public void testRejectsMultipleStorageSchemes() { command.hive = true; command.local = true; TestHelpers.assertThrows( "Should reject multiple storage: Hive and local", IllegalArgumentException.class, new Runnable() { @Override public void run() { command.buildRepoURI(); } }); command.local = false; command.hdfs = true; TestHelpers.assertThrows( "Should reject multiple storage: Hive and HDFS", IllegalArgumentException.class, new Runnable() { @Override public void run() { command.buildRepoURI(); } }); command.hdfs = false; command.hbase = true; TestHelpers.assertThrows( "Should reject multiple storage: Hive and HBase", IllegalArgumentException.class, new Runnable() { @Override public void run() { command.buildRepoURI(); } }); command.hive = false; command.local = true; TestHelpers.assertThrows( "Should reject multiple storage: HBase and local", IllegalArgumentException.class, new Runnable() { @Override public void run() { command.buildRepoURI(); } }); command.local = false; command.hdfs = true; TestHelpers.assertThrows( "Should reject multiple storage: HBase and HDFS", IllegalArgumentException.class, new Runnable() { @Override public void run() { command.buildRepoURI(); } }); command.hbase = false; command.local = true; TestHelpers.assertThrows( "Should reject multiple storage: HDFS and local", IllegalArgumentException.class, new Runnable() { @Override public void run() { command.buildRepoURI(); } }); } }
apache-2.0
fx19880617/pinot-1
pinot-core/src/test/java/com/linkedin/pinot/segments/v1/creator/SegmentTestUtils.java
9569
/** * Copyright (C) 2014-2018 LinkedIn Corp. (pinot-core@linkedin.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.linkedin.pinot.segments.v1.creator; import com.google.common.base.Preconditions; import com.linkedin.pinot.common.data.DimensionFieldSpec; import com.linkedin.pinot.common.data.FieldSpec; import com.linkedin.pinot.common.data.FieldSpec.DataType; import com.linkedin.pinot.common.data.FieldSpec.FieldType; import com.linkedin.pinot.common.data.MetricFieldSpec; import com.linkedin.pinot.common.data.Schema; import com.linkedin.pinot.common.data.TimeFieldSpec; import com.linkedin.pinot.common.data.TimeGranularitySpec; import com.linkedin.pinot.core.data.readers.FileFormat; import com.linkedin.pinot.core.indexsegment.generator.SegmentGeneratorConfig; import com.linkedin.pinot.core.indexsegment.generator.SegmentVersion; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; import javax.annotation.Nonnull; import org.apache.avro.Schema.Field; import org.apache.avro.Schema.Type; import org.apache.avro.file.DataFileStream; import org.apache.avro.generic.GenericData.Array; import org.apache.avro.generic.GenericData.Record; import org.apache.avro.generic.GenericDatumReader; import org.apache.avro.generic.GenericRecord; import org.apache.avro.util.Utf8; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.collections.Predicate; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class SegmentTestUtils { private static final Logger LOGGER = LoggerFactory.getLogger(SegmentTestUtils.class); @Nonnull public static SegmentGeneratorConfig getSegmentGeneratorConfigWithoutTimeColumn(@Nonnull File avroFile, @Nonnull File outputDir, @Nonnull String tableName) throws IOException { SegmentGeneratorConfig segmentGeneratorConfig = new SegmentGeneratorConfig(extractSchemaFromAvroWithoutTime(avroFile)); segmentGeneratorConfig.setInputFilePath(avroFile.getAbsolutePath()); segmentGeneratorConfig.setOutDir(outputDir.getAbsolutePath()); segmentGeneratorConfig.setTableName(tableName); return segmentGeneratorConfig; } public static SegmentGeneratorConfig getSegmentGenSpecWithSchemAndProjectedColumns(File inputAvro, File outputDir, String timeColumn, TimeUnit timeUnit, String tableName) throws IOException { final SegmentGeneratorConfig segmentGenSpec = new SegmentGeneratorConfig(extractSchemaFromAvroWithoutTime(inputAvro)); segmentGenSpec.setInputFilePath(inputAvro.getAbsolutePath()); segmentGenSpec.setTimeColumnName(timeColumn); segmentGenSpec.setSegmentTimeUnit(timeUnit); segmentGenSpec.setFormat(FileFormat.AVRO); segmentGenSpec.setSegmentVersion(SegmentVersion.v1); segmentGenSpec.setTableName(tableName); segmentGenSpec.setOutDir(outputDir.getAbsolutePath()); segmentGenSpec.createInvertedIndexForAllColumns(); return segmentGenSpec; } public static SegmentGeneratorConfig getSegmentGeneratorConfigWithSchema(File inputAvro, File outputDir, String tableName, Schema schema) { SegmentGeneratorConfig segmentGeneratorConfig = new SegmentGeneratorConfig(schema); segmentGeneratorConfig.setInputFilePath(inputAvro.getAbsolutePath()); segmentGeneratorConfig.setOutDir(outputDir.getAbsolutePath()); segmentGeneratorConfig.setFormat(FileFormat.AVRO); segmentGeneratorConfig.setSegmentVersion(SegmentVersion.v1); segmentGeneratorConfig.setTableName(tableName); segmentGeneratorConfig.setTimeColumnName(schema.getTimeColumnName()); segmentGeneratorConfig.setSegmentTimeUnit(schema.getOutgoingTimeUnit()); return segmentGeneratorConfig; } public static List<String> getColumnNamesFromAvro(File avro) throws IOException { List<String> ret = new ArrayList<String>(); DataFileStream<GenericRecord> dataStream = new DataFileStream<GenericRecord>(new FileInputStream(avro), new GenericDatumReader<GenericRecord>()); for (final Field field : dataStream.getSchema().getFields()) { ret.add(field.name()); } return ret; } public static Schema extractSchemaFromAvro(File avroFile, Map<String, FieldType> fieldTypeMap, TimeUnit granularity) throws IOException { DataFileStream<GenericRecord> dataStream = new DataFileStream<>(new FileInputStream(avroFile), new GenericDatumReader<GenericRecord>()); Schema schema = new Schema(); for (final Field field : dataStream.getSchema().getFields()) { final String columnName = field.name(); FieldType fieldType = fieldTypeMap.get(columnName); Preconditions.checkNotNull(fieldType); switch (fieldType) { case TIME: final TimeGranularitySpec gSpec = new TimeGranularitySpec(getColumnType(field), granularity, columnName); final TimeFieldSpec fSpec = new TimeFieldSpec(gSpec); schema.addField(fSpec); continue; case DIMENSION: final FieldSpec dimensionFieldSpec = new DimensionFieldSpec(columnName, getColumnType(field), isSingleValueField(field)); schema.addField(dimensionFieldSpec); continue; case METRIC: final FieldSpec metricFieldSpec = new MetricFieldSpec(columnName, getColumnType(field)); schema.addField(metricFieldSpec); continue; default: throw new UnsupportedOperationException("Unsupported field type: " + fieldType); } } dataStream.close(); return schema; } public static Schema extractSchemaFromAvroWithoutTime(File avroFile) throws IOException { DataFileStream<GenericRecord> dataStream = new DataFileStream<GenericRecord>(new FileInputStream(avroFile), new GenericDatumReader<GenericRecord>()); Schema schema = new Schema(); for (final Field field : dataStream.getSchema().getFields()) { try { getColumnType(field); } catch (Exception e) { LOGGER.warn("Caught exception while converting Avro field {} of type {}, field will not be in schema.", field.name(), field.schema().getType()); continue; } final String columnName = field.name(); final String pinotType = field.getProp("pinotType"); final FieldSpec fieldSpec; if (pinotType != null && "METRIC".equals(pinotType)) { fieldSpec = new MetricFieldSpec(); } else { fieldSpec = new DimensionFieldSpec(); } fieldSpec.setName(columnName); fieldSpec.setDataType(getColumnType(dataStream.getSchema().getField(columnName))); fieldSpec.setSingleValueField(isSingleValueField(dataStream.getSchema().getField(columnName))); schema.addField(fieldSpec); } dataStream.close(); return schema; } private static boolean isSingleValueField(Field field) { org.apache.avro.Schema fieldSchema = field.schema(); fieldSchema = extractSchemaFromUnionIfNeeded(fieldSchema); final Type type = fieldSchema.getType(); if (type == Type.ARRAY) { return false; } return true; } public static DataType getColumnType(Field field) { org.apache.avro.Schema fieldSchema = field.schema(); fieldSchema = extractSchemaFromUnionIfNeeded(fieldSchema); final Type type = fieldSchema.getType(); if (type == Type.ARRAY) { org.apache.avro.Schema elementSchema = extractSchemaFromUnionIfNeeded(fieldSchema.getElementType()); if (elementSchema.getType() == Type.RECORD) { if (elementSchema.getFields().size() == 1) { elementSchema = elementSchema.getFields().get(0).schema(); } else { throw new RuntimeException("More than one schema in Multi-value column!"); } elementSchema = extractSchemaFromUnionIfNeeded(elementSchema); } return DataType.valueOf(elementSchema.getType()); } else { return DataType.valueOf(type); } } private static org.apache.avro.Schema extractSchemaFromUnionIfNeeded(org.apache.avro.Schema fieldSchema) { if ((fieldSchema).getType() == Type.UNION) { fieldSchema = ((org.apache.avro.Schema) CollectionUtils.find(fieldSchema.getTypes(), new Predicate() { @Override public boolean evaluate(Object object) { return ((org.apache.avro.Schema) object).getType() != Type.NULL; } })); } return fieldSchema; } private static Object[] transformAvroArrayToObjectArray(Array arr) { if (arr == null) { return new Object[0]; } final Object[] ret = new Object[arr.size()]; final Iterator iterator = arr.iterator(); int i = 0; while (iterator.hasNext()) { Object value = iterator.next(); if (value instanceof Record) { value = ((Record) value).get(0); } if (value instanceof Utf8) { value = ((Utf8) value).toString(); } ret[i++] = value; } return ret; } }
apache-2.0
erikssonorjan/ecommerce-framework
framework/ecommerce-framework-odata-service/src/main/java/com/sdl/ecommerce/odata/function/ProductVariantFunctionImport.java
344
package com.sdl.ecommerce.odata.function; import com.sdl.odata.api.edm.annotations.EdmFunctionImport; /** * ProductVariantFunctionImport * * @author nic */ @EdmFunctionImport( name = "ProductVariant", function = "ProductVariantFunction", namespace = "SDL.ECommerce" ) public class ProductVariantFunctionImport { }
apache-2.0
spartanncoin/Spartancoinj
core/src/main/java/com/google/spartancoin/core/ChildMessage.java
3124
/** * Copyright 2011 Steve Coughlan. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.spartancoin.core; import javax.annotation.Nullable; /** * Represents a Message type that can be contained within another Message. ChildMessages that have a cached * backing byte array need to invalidate their parent's caches as well as their own if they are modified. * * @author git */ public abstract class ChildMessage extends Message { private static final long serialVersionUID = -7657113383624517931L; @Nullable private Message parent; protected ChildMessage() { } public ChildMessage(NetworkParameters params) { super(params); } public ChildMessage(NetworkParameters params, byte[] msg, int offset, int protocolVersion) throws ProtocolException { super(params, msg, offset, protocolVersion); } public ChildMessage(NetworkParameters params, byte[] msg, int offset, int protocolVersion, Message parent, boolean parseLazy, boolean parseRetain, int length) throws ProtocolException { super(params, msg, offset, protocolVersion, parseLazy, parseRetain, length); this.parent = parent; } public ChildMessage(NetworkParameters params, byte[] msg, int offset) throws ProtocolException { super(params, msg, offset); } public ChildMessage(NetworkParameters params, byte[] msg, int offset, @Nullable Message parent, boolean parseLazy, boolean parseRetain, int length) throws ProtocolException { super(params, msg, offset, parseLazy, parseRetain, length); this.parent = parent; } public void setParent(@Nullable Message parent) { if (this.parent != null && this.parent != parent && parent != null) { // After old parent is unlinked it won't be able to receive notice if this ChildMessage // changes internally. To be safe we invalidate the parent cache to ensure it rebuilds // manually on serialization. this.parent.unCache(); } this.parent = parent; } /* (non-Javadoc) * @see Message#unCache() */ @Override protected void unCache() { super.unCache(); if (parent != null) parent.unCache(); } protected void adjustLength(int adjustment) { adjustLength(0, adjustment); } protected void adjustLength(int newArraySize, int adjustment) { super.adjustLength(newArraySize, adjustment); if (parent != null) parent.adjustLength(newArraySize, adjustment); } }
apache-2.0
noslenfa/tdjangorest
uw/lib/python2.7/site-packages/IPython/kernel/zmq/displayhook.py
2119
import __builtin__ import sys from IPython.core.displayhook import DisplayHook from IPython.kernel.inprocess.socket import SocketABC from IPython.utils.jsonutil import encode_images from IPython.utils.traitlets import Instance, Dict from session import extract_header, Session class ZMQDisplayHook(object): """A simple displayhook that publishes the object's repr over a ZeroMQ socket.""" topic=b'pyout' def __init__(self, session, pub_socket): self.session = session self.pub_socket = pub_socket self.parent_header = {} def __call__(self, obj): if obj is None: return __builtin__._ = obj sys.stdout.flush() sys.stderr.flush() msg = self.session.send(self.pub_socket, u'pyout', {u'data':repr(obj)}, parent=self.parent_header, ident=self.topic) def set_parent(self, parent): self.parent_header = extract_header(parent) class ZMQShellDisplayHook(DisplayHook): """A displayhook subclass that publishes data using ZeroMQ. This is intended to work with an InteractiveShell instance. It sends a dict of different representations of the object.""" topic=None session = Instance(Session) pub_socket = Instance(SocketABC) parent_header = Dict({}) def set_parent(self, parent): """Set the parent for outbound messages.""" self.parent_header = extract_header(parent) def start_displayhook(self): self.msg = self.session.msg(u'pyout', {}, parent=self.parent_header) def write_output_prompt(self): """Write the output prompt.""" self.msg['content']['execution_count'] = self.prompt_count def write_format_data(self, format_dict, md_dict=None): self.msg['content']['data'] = encode_images(format_dict) self.msg['content']['metadata'] = md_dict def finish_displayhook(self): """Finish up all displayhook activities.""" sys.stdout.flush() sys.stderr.flush() self.session.send(self.pub_socket, self.msg, ident=self.topic) self.msg = None
apache-2.0
Fabryprog/camel
components/camel-saxon/src/test/java/org/apache/camel/component/xquery/XQueryNullHeaderTest.java
2525
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.xquery; import org.apache.camel.component.mock.MockEndpoint; import org.apache.camel.test.spring.CamelSpringTestSupport; import org.junit.Test; import org.springframework.context.support.ClassPathXmlApplicationContext; public class XQueryNullHeaderTest extends CamelSpringTestSupport { @Test public void testHeader() throws Exception { MockEndpoint mock = getMockEndpoint("mock:result"); mock.expectedBodiesReceived("<employee id=\"James\"><name><firstName>James</firstName>" + "<lastName>Strachan</lastName></name><location><city>London</city></location></employee>"); template.sendBodyAndHeader("direct:start", "<person user='James'><firstName>James</firstName>" + "<lastName>Strachan</lastName><city>London</city></person>", "foo", "123"); assertMockEndpointsSatisfied(); } @Test public void testHeaderWithNull() throws Exception { MockEndpoint mock = getMockEndpoint("mock:result"); mock.expectedBodiesReceived("<employee id=\"James\"><name><firstName>James</firstName>" + "<lastName>Strachan</lastName></name><location><city>London</city></location></employee>"); template.sendBodyAndHeader("direct:start", "<person user='James'><firstName>James</firstName>" + "<lastName>Strachan</lastName><city>London</city></person>", "foo", null); assertMockEndpointsSatisfied(); } protected ClassPathXmlApplicationContext createApplicationContext() { return new ClassPathXmlApplicationContext("org/apache/camel/component/xquery/xqueryExampleTest.xml"); } }
apache-2.0
gaohoward/activemq-artemis
artemis-server/src/main/java/org/apache/activemq/artemis/core/server/ServerSession.java
15771
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.artemis.core.server; import javax.json.JsonArrayBuilder; import javax.transaction.xa.Xid; import java.util.EnumSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.Executor; import org.apache.activemq.artemis.Closeable; import org.apache.activemq.artemis.api.core.Message; import org.apache.activemq.artemis.api.core.Pair; import org.apache.activemq.artemis.api.core.RoutingType; import org.apache.activemq.artemis.api.core.SimpleString; import org.apache.activemq.artemis.core.persistence.OperationContext; import org.apache.activemq.artemis.core.postoffice.RoutingStatus; import org.apache.activemq.artemis.core.security.SecurityAuth; import org.apache.activemq.artemis.core.server.impl.AddressInfo; import org.apache.activemq.artemis.core.transaction.Transaction; import org.apache.activemq.artemis.spi.core.protocol.RemotingConnection; public interface ServerSession extends SecurityAuth { String getName(); int getMinLargeMessageSize(); Object getConnectionID(); Executor getSessionExecutor(); /** * Certain protocols may create an internal session that shouldn't go through security checks. * make sure you don't expose this property through any protocol layer as that would be a security breach */ void enableSecurity(); void disableSecurity(); @Override RemotingConnection getRemotingConnection(); Transaction newTransaction(); boolean removeConsumer(long consumerID) throws Exception; void acknowledge(long consumerID, long messageID) throws Exception; void individualAcknowledge(long consumerID, long messageID) throws Exception; void individualCancel(long consumerID, long messageID, boolean failed) throws Exception; void expire(long consumerID, long messageID) throws Exception; void rollback(boolean considerLastMessageAsDelivered) throws Exception; void commit() throws Exception; void xaCommit(Xid xid, boolean onePhase) throws Exception; void xaEnd(Xid xid) throws Exception; void xaForget(Xid xid) throws Exception; void xaJoin(Xid xid) throws Exception; void xaPrepare(Xid xid) throws Exception; void xaResume(Xid xid) throws Exception; void xaRollback(Xid xid) throws Exception; void xaStart(Xid xid) throws Exception; void xaFailed(Xid xid) throws Exception; void xaSuspend() throws Exception; void markTXFailed(Throwable e); List<Xid> xaGetInDoubtXids(); int xaGetTimeout(); void xaSetTimeout(int timeout); void start(); void stop(); void addCloseable(Closeable closeable); ServerConsumer createConsumer(long consumerID, SimpleString queueName, SimpleString filterString, int priority, boolean browseOnly, boolean supportLargeMessage, Integer credits) throws Exception; /** * To be used by protocol heads that needs to control the transaction outside the session context. */ void resetTX(Transaction transaction); Queue createQueue(SimpleString address, SimpleString name, RoutingType routingType, SimpleString filterString, boolean temporary, boolean durable) throws Exception; Queue createQueue(AddressInfo address, SimpleString name, SimpleString filterString, boolean temporary, boolean durable) throws Exception; /** * Create queue with default delivery mode * * @param address * @param name * @param filterString * @param temporary * @param durable * @return * @throws Exception */ Queue createQueue(SimpleString address, SimpleString name, SimpleString filterString, boolean temporary, boolean durable) throws Exception; Queue createQueue(SimpleString address, SimpleString name, RoutingType routingType, SimpleString filterString, boolean temporary, boolean durable, int maxConsumers, boolean purgeOnNoConsumers, boolean autoCreated) throws Exception; Queue createQueue(SimpleString address, SimpleString name, RoutingType routingType, SimpleString filterString, boolean temporary, boolean durable, int maxConsumers, boolean purgeOnNoConsumers, Boolean exclusive, Boolean lastValue, boolean autoCreated) throws Exception; Queue createQueue(SimpleString address, SimpleString name, RoutingType routingType, SimpleString filterString, boolean temporary, boolean durable, int maxConsumers, boolean purgeOnNoConsumers, Boolean exclusive, Boolean groupRebalance, Integer groupBuckets, Boolean lastValue, SimpleString lastValueKey, Boolean nonDestructive, Integer consumersBeforeDispatch, Long delayBeforeDispatch, Boolean autoDelete, Long autoDeleteDelay, Long autoDeleteMessageCount, boolean autoCreated) throws Exception; Queue createQueue(SimpleString address, SimpleString name, RoutingType routingType, SimpleString filterString, boolean temporary, boolean durable, boolean autoCreated) throws Exception; Queue createQueue(AddressInfo addressInfo, SimpleString name, SimpleString filterString, boolean temporary, boolean durable, boolean autoCreated) throws Exception; Queue createQueue(AddressInfo addressInfo, SimpleString name, SimpleString filterString, boolean temporary, boolean durable, Boolean exclusive, Boolean lastValue, boolean autoCreated) throws Exception; AddressInfo createAddress(SimpleString address, EnumSet<RoutingType> routingTypes, boolean autoCreated) throws Exception; AddressInfo createAddress(SimpleString address, RoutingType routingType, boolean autoCreated) throws Exception; AddressInfo createAddress(AddressInfo addressInfo, boolean autoCreated) throws Exception; void deleteQueue(SimpleString name) throws Exception; ServerConsumer createConsumer(long consumerID, SimpleString queueName, SimpleString filterString, boolean browseOnly) throws Exception; ServerConsumer createConsumer(long consumerID, SimpleString queueName, SimpleString filterString, boolean browseOnly, boolean supportLargeMessage, Integer credits) throws Exception; QueueQueryResult executeQueueQuery(SimpleString name) throws Exception; AddressQueryResult executeAddressQuery(SimpleString name) throws Exception; BindingQueryResult executeBindingQuery(SimpleString address) throws Exception; void closeConsumer(long consumerID) throws Exception; void receiveConsumerCredits(long consumerID, int credits) throws Exception; RoutingStatus send(Transaction tx, Message message, boolean direct, boolean noAutoCreateQueue) throws Exception; RoutingStatus send(Transaction tx, Message message, boolean direct, boolean noAutoCreateQueue, RoutingContext routingContext) throws Exception; RoutingStatus doSend(Transaction tx, Message msg, SimpleString originalAddress, boolean direct, boolean noAutoCreateQueue) throws Exception; RoutingStatus doSend(Transaction tx, Message msg, SimpleString originalAddress, boolean direct, boolean noAutoCreateQueue, RoutingContext routingContext) throws Exception; RoutingStatus send(Message message, boolean direct, boolean noAutoCreateQueue) throws Exception; RoutingStatus send(Message message, boolean direct) throws Exception; void forceConsumerDelivery(long consumerID, long sequence) throws Exception; void requestProducerCredits(SimpleString address, int credits) throws Exception; void close(boolean failed) throws Exception; void setTransferring(boolean transferring); Set<ServerConsumer> getServerConsumers(); void addMetaData(String key, String data) throws Exception; boolean addUniqueMetaData(String key, String data) throws Exception; String getMetaData(String key); Map<String, String> getMetaData(); String[] getTargetAddresses(); /** * Add all the producers detail to the JSONArray object. * This is a method to be used by the management layer. * * @param objs * @throws Exception */ void describeProducersInfo(JsonArrayBuilder objs) throws Exception; String getLastSentMessageID(String address); long getCreationTime(); OperationContext getSessionContext(); Transaction getCurrentTransaction(); ServerConsumer locateConsumer(long consumerID) throws Exception; boolean isClosed(); void createSharedQueue(SimpleString address, SimpleString name, RoutingType routingType, SimpleString filterString, boolean durable, Integer maxConsumers, Boolean purgeOnNoConsumers, Boolean exclusive, Boolean lastValue) throws Exception; void createSharedQueue(SimpleString address, SimpleString name, RoutingType routingType, SimpleString filterString, boolean durable, Integer maxConsumers, Boolean purgeOnNoConsumers, Boolean exclusive, Boolean groupRebalance, Integer groupBuckets, Boolean lastValue, SimpleString lastValueKey, Boolean nonDestructive, Integer consumersBeforeDispatch, Long delayBeforeDispatch, Boolean autoDelete, Long autoDeleteDelay, Long autoDeleteMessageCount) throws Exception; void createSharedQueue(SimpleString address, SimpleString name, RoutingType routingType, boolean durable, SimpleString filterString) throws Exception; void createSharedQueue(SimpleString address, SimpleString name, boolean durable, SimpleString filterString) throws Exception; List<MessageReference> getInTXMessagesForConsumer(long consumerId); String getValidatedUser(); SimpleString getMatchingQueue(SimpleString address, RoutingType routingType) throws Exception; SimpleString getMatchingQueue(SimpleString address, SimpleString queueName, RoutingType routingType) throws Exception; AddressInfo getAddress(SimpleString address); /** * Strip the prefix (if it exists) from the address based on the prefixes provided to the ServerSession constructor. * * @param address the address to inspect * @return the canonical (i.e. non-prefixed) address name */ SimpleString removePrefix(SimpleString address); /** * Get the prefix (if it exists) from the address based on the prefixes provided to the ServerSession constructor. * * @param address the address to inspect * @return the canonical (i.e. non-prefixed) address name */ SimpleString getPrefix(SimpleString address); /** * Get the canonical (i.e. non-prefixed) address and the corresponding routing-type. * * @param addressInfo the address to inspect * @return a {@code org.apache.activemq.artemis.api.core.Pair} representing the canonical (i.e. non-prefixed) address * name and the {@code org.apache.activemq.artemis.api.core.RoutingType} corresponding to the that prefix. */ AddressInfo getAddressAndRoutingType(AddressInfo addressInfo); /** * Get the canonical (i.e. non-prefixed) address and the corresponding routing-type. * * @param address the address to inspect * @param defaultRoutingTypes a the {@code java.util.Set} of {@code org.apache.activemq.artemis.api.core.RoutingType} * objects to return if no prefix match is found. * @return a {@code org.apache.activemq.artemis.api.core.Pair} representing the canonical (i.e. non-prefixed) address * name and the {@code java.util.Set} of {@code org.apache.activemq.artemis.api.core.RoutingType} objects * corresponding to the that prefix. */ Pair<SimpleString, EnumSet<RoutingType>> getAddressAndRoutingTypes(SimpleString address, EnumSet<RoutingType> defaultRoutingTypes); void addProducer(ServerProducer serverProducer); void removeProducer(String ID); Map<String, ServerProducer> getServerProducers(); String getDefaultAddress(); int getConsumerCount(); int getProducerCount(); int getDefaultConsumerWindowSize(SimpleString address); }
apache-2.0
cloudfoundry/php-buildpack
fixtures/cake_local_deps/vendor/cakephp/migrations/src/Util/UtilTrait.php
2302
<?php /** * Copyright (c) Cake Software Foundation, Inc. (http://cakefoundation.org) * * Licensed under The MIT License * Redistributions of files must retain the above copyright notice. * * @copyright Copyright (c) Cake Software Foundation, Inc. (http://cakefoundation.org) * @link http://cakephp.org CakePHP(tm) Project * @license http://www.opensource.org/licenses/mit-license.php MIT License */ namespace Migrations\Util; use Cake\Core\Plugin as CorePlugin; use Cake\Utility\Inflector; use Symfony\Component\Console\Input\InputInterface; /** * Trait gathering useful methods needed in various places of the plugin */ trait UtilTrait { /** * Get the plugin name based on the current InputInterface * * @param \Symfony\Component\Console\Input\InputInterface $input Input of the current command. * @return string|null */ protected function getPlugin(InputInterface $input) { $plugin = $input->getOption('plugin') ?: null; return $plugin; } /** * Get the phinx table name used to store migrations data * * @param string $plugin Plugin name * @return string */ protected function getPhinxTable($plugin = null) { $table = 'phinxlog'; if (empty($plugin)) { return $table; } $plugin = Inflector::underscore($plugin) . '_'; $plugin = str_replace(['\\', '/', '.'], '_', $plugin); return $plugin . $table; } /** * Get the migrations or seeds files path based on the current InputInterface * * @param \Symfony\Component\Console\Input\InputInterface $input Input of the current command. * @param string $default Default folder to set if no source option is found in the $input param * @return string */ protected function getOperationsPath(InputInterface $input, $default = 'Migrations') { $folder = $input->getOption('source') ?: $default; $dir = ROOT . DS . 'config' . DS . $folder; if (defined('CONFIG')) { $dir = CONFIG . $folder; } $plugin = $this->getPlugin($input); if ($plugin !== null) { $dir = CorePlugin::path($plugin) . 'config' . DS . $folder; } return $dir; } }
apache-2.0
kl0u/flink
flink-python/pyflink/table/tests/test_row_based_operation.py
16354
################################################################################ # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ################################################################################ from pandas.util.testing import assert_frame_equal from pyflink.common import Row from pyflink.table import expressions as expr, ListView from pyflink.table.types import DataTypes from pyflink.table.udf import udf, udtf, udaf, AggregateFunction, TableAggregateFunction, udtaf from pyflink.testing import source_sink_utils from pyflink.testing.test_case_utils import PyFlinkBlinkBatchTableTestCase, \ PyFlinkBlinkStreamTableTestCase class RowBasedOperationTests(object): def test_map(self): t = self.t_env.from_elements( [(1, 2, 3), (2, 1, 3), (1, 5, 4), (1, 8, 6), (2, 3, 4)], DataTypes.ROW( [DataTypes.FIELD("a", DataTypes.TINYINT()), DataTypes.FIELD("b", DataTypes.SMALLINT()), DataTypes.FIELD("c", DataTypes.INT())])) table_sink = source_sink_utils.TestAppendSink( ['a', 'b'], [DataTypes.BIGINT(), DataTypes.BIGINT()]) self.t_env.register_table_sink("Results", table_sink) func = udf(lambda x: Row(x + 1, x * x), result_type=DataTypes.ROW( [DataTypes.FIELD("a", DataTypes.BIGINT()), DataTypes.FIELD("b", DataTypes.BIGINT())])) t.map(func(t.b)).alias("a", "b") \ .map(func(t.a)).alias("a", "b") \ .execute_insert("Results") \ .wait() actual = source_sink_utils.results() self.assert_equals( actual, ["+I[4, 9]", "+I[3, 4]", "+I[7, 36]", "+I[10, 81]", "+I[5, 16]"]) def test_map_with_pandas_udf(self): t = self.t_env.from_elements( [(1, Row(2, 3)), (2, Row(1, 3)), (1, Row(5, 4)), (1, Row(8, 6)), (2, Row(3, 4))], DataTypes.ROW( [DataTypes.FIELD("a", DataTypes.TINYINT()), DataTypes.FIELD("b", DataTypes.ROW([DataTypes.FIELD("c", DataTypes.INT()), DataTypes.FIELD("d", DataTypes.INT())]))])) table_sink = source_sink_utils.TestAppendSink( ['a', 'b'], [DataTypes.BIGINT(), DataTypes.BIGINT()]) self.t_env.register_table_sink("Results", table_sink) def func(x): import pandas as pd res = pd.concat([x.a, x.c + x.d], axis=1) return res def func2(x): return x * 2 pandas_udf = udf(func, result_type=DataTypes.ROW( [DataTypes.FIELD("c", DataTypes.BIGINT()), DataTypes.FIELD("d", DataTypes.BIGINT())]), func_type='pandas') pandas_udf_2 = udf(func2, result_type=DataTypes.ROW( [DataTypes.FIELD("c", DataTypes.BIGINT()), DataTypes.FIELD("d", DataTypes.BIGINT())]), func_type='pandas') t.map(pandas_udf).map(pandas_udf_2).execute_insert("Results").wait() actual = source_sink_utils.results() self.assert_equals( actual, ["+I[4, 8]", "+I[2, 10]", "+I[2, 28]", "+I[2, 18]", "+I[4, 14]"]) def test_flat_map(self): t = self.t_env.from_elements( [(1, "2,3"), (2, "1"), (1, "5,6,7")], DataTypes.ROW( [DataTypes.FIELD("a", DataTypes.TINYINT()), DataTypes.FIELD("b", DataTypes.STRING())])) table_sink = source_sink_utils.TestAppendSink( ['a', 'b', 'c', 'd', 'e', 'f'], [DataTypes.BIGINT(), DataTypes.STRING(), DataTypes.BIGINT(), DataTypes.STRING(), DataTypes.BIGINT(), DataTypes.STRING()]) self.t_env.register_table_sink("Results", table_sink) @udtf(result_types=[DataTypes.INT(), DataTypes.STRING()]) def split(x): for s in x[1].split(","): yield x[0], s t.flat_map(split) \ .flat_map(split) \ .join_lateral(split.alias("a", "b")) \ .left_outer_join_lateral(split.alias("c", "d")) \ .execute_insert("Results") \ .wait() actual = source_sink_utils.results() self.assert_equals( actual, ["+I[1, 2, 1, 2, 1, 2]", "+I[1, 3, 1, 3, 1, 3]", "+I[2, 1, 2, 1, 2, 1]", "+I[1, 5, 1, 5, 1, 5]", "+I[1, 6, 1, 6, 1, 6]", "+I[1, 7, 1, 7, 1, 7]"]) class BatchRowBasedOperationITTests(RowBasedOperationTests, PyFlinkBlinkBatchTableTestCase): def test_aggregate_with_pandas_udaf(self): t = self.t_env.from_elements( [(1, 2, 3), (2, 1, 3), (1, 5, 4), (1, 8, 6), (2, 3, 4)], DataTypes.ROW( [DataTypes.FIELD("a", DataTypes.TINYINT()), DataTypes.FIELD("b", DataTypes.SMALLINT()), DataTypes.FIELD("c", DataTypes.INT())])) table_sink = source_sink_utils.TestAppendSink( ['a', 'b', 'c'], [DataTypes.TINYINT(), DataTypes.FLOAT(), DataTypes.INT()]) self.t_env.register_table_sink("Results", table_sink) pandas_udaf = udaf(lambda pd: (pd.b.mean(), pd.a.max()), result_type=DataTypes.ROW( [DataTypes.FIELD("a", DataTypes.FLOAT()), DataTypes.FIELD("b", DataTypes.INT())]), func_type="pandas") t.select(t.a, t.b) \ .group_by(t.a) \ .aggregate(pandas_udaf) \ .select("*") \ .execute_insert("Results") \ .wait() actual = source_sink_utils.results() self.assert_equals(actual, ["+I[1, 5.0, 1]", "+I[2, 2.0, 2]"]) def test_aggregate_with_pandas_udaf_without_keys(self): t = self.t_env.from_elements( [(1, 2, 3), (2, 1, 3), (1, 5, 4), (1, 8, 6), (2, 3, 4)], DataTypes.ROW( [DataTypes.FIELD("a", DataTypes.TINYINT()), DataTypes.FIELD("b", DataTypes.SMALLINT()), DataTypes.FIELD("c", DataTypes.INT())])) table_sink = source_sink_utils.TestAppendSink( ['a', 'b'], [DataTypes.FLOAT(), DataTypes.INT()]) self.t_env.register_table_sink("Results", table_sink) pandas_udaf = udaf(lambda pd: Row(pd.b.mean(), pd.b.max()), result_type=DataTypes.ROW( [DataTypes.FIELD("a", DataTypes.FLOAT()), DataTypes.FIELD("b", DataTypes.INT())]), func_type="pandas") t.select(t.b) \ .aggregate(pandas_udaf.alias("a", "b")) \ .select("a, b") \ .execute_insert("Results") \ .wait() actual = source_sink_utils.results() self.assert_equals(actual, ["+I[3.8, 8]"]) def test_window_aggregate_with_pandas_udaf(self): import datetime from pyflink.table.window import Tumble t = self.t_env.from_elements( [ (1, 2, 3, datetime.datetime(2018, 3, 11, 3, 10, 0, 0)), (3, 2, 4, datetime.datetime(2018, 3, 11, 3, 10, 0, 0)), (2, 1, 2, datetime.datetime(2018, 3, 11, 3, 10, 0, 0)), (1, 3, 1, datetime.datetime(2018, 3, 11, 3, 40, 0, 0)), (1, 8, 5, datetime.datetime(2018, 3, 11, 4, 20, 0, 0)), (2, 3, 6, datetime.datetime(2018, 3, 11, 3, 30, 0, 0)) ], DataTypes.ROW( [DataTypes.FIELD("a", DataTypes.TINYINT()), DataTypes.FIELD("b", DataTypes.SMALLINT()), DataTypes.FIELD("c", DataTypes.INT()), DataTypes.FIELD("rowtime", DataTypes.TIMESTAMP(3))])) table_sink = source_sink_utils.TestAppendSink( ['a', 'b', 'c'], [ DataTypes.TIMESTAMP(3), DataTypes.FLOAT(), DataTypes.INT() ]) self.t_env.register_table_sink("Results", table_sink) pandas_udaf = udaf(lambda pd: (pd.b.mean(), pd.b.max()), result_type=DataTypes.ROW( [DataTypes.FIELD("a", DataTypes.FLOAT()), DataTypes.FIELD("b", DataTypes.INT())]), func_type="pandas") tumble_window = Tumble.over(expr.lit(1).hours) \ .on(expr.col("rowtime")) \ .alias("w") t.select(t.b, t.rowtime) \ .window(tumble_window) \ .group_by("w") \ .aggregate(pandas_udaf.alias("d", "e")) \ .select("w.rowtime, d, e") \ .execute_insert("Results") \ .wait() actual = source_sink_utils.results() self.assert_equals(actual, ["+I[2018-03-11 03:59:59.999, 2.2, 3]", "+I[2018-03-11 04:59:59.999, 8.0, 8]"]) class StreamRowBasedOperationITTests(RowBasedOperationTests, PyFlinkBlinkStreamTableTestCase): def test_aggregate(self): import pandas as pd t = self.t_env.from_elements( [(1, 2, 3), (2, 1, 3), (1, 5, 4), (1, 8, 6), (2, 3, 4)], DataTypes.ROW( [DataTypes.FIELD("a", DataTypes.BIGINT()), DataTypes.FIELD("b", DataTypes.SMALLINT()), DataTypes.FIELD("c", DataTypes.INT())])) function = CountAndSumAggregateFunction() agg = udaf(function, result_type=function.get_result_type(), accumulator_type=function.get_accumulator_type(), name=str(function.__class__.__name__)) result = t.group_by(t.a) \ .aggregate(agg.alias("c", "d")) \ .select("a, c, d") \ .to_pandas() assert_frame_equal(result.sort_values('a').reset_index(drop=True), pd.DataFrame([[1, 3, 15], [2, 2, 4]], columns=['a', 'c', 'd'])) def test_flat_aggregate(self): import pandas as pd mytop = udtaf(Top2()) t = self.t_env.from_elements([(1, 'Hi', 'Hello'), (3, 'Hi', 'hi'), (5, 'Hi2', 'hi'), (7, 'Hi', 'Hello'), (2, 'Hi', 'Hello')], ['a', 'b', 'c']) result = t.select(t.a, t.c) \ .group_by(t.c) \ .flat_aggregate(mytop) \ .select(t.a) \ .flat_aggregate(mytop.alias("b")) \ .select("b") \ .to_pandas() assert_frame_equal(result, pd.DataFrame([[7], [5]], columns=['b'])) def test_flat_aggregate_list_view(self): import pandas as pd my_concat = udtaf(ListViewConcatTableAggregateFunction()) self.t_env.get_config().get_configuration().set_string( "python.fn-execution.bundle.size", "2") # trigger the cache eviction in a bundle. self.t_env.get_config().get_configuration().set_string( "python.state.cache-size", "2") t = self.t_env.from_elements([(1, 'Hi', 'Hello'), (3, 'Hi', 'hi'), (3, 'Hi2', 'hi'), (3, 'Hi', 'hi'), (2, 'Hi', 'Hello'), (1, 'Hi2', 'Hello'), (3, 'Hi3', 'hi'), (3, 'Hi2', 'Hello'), (3, 'Hi3', 'hi'), (2, 'Hi3', 'Hello')], ['a', 'b', 'c']) result = t.group_by(t.c) \ .flat_aggregate(my_concat(t.b, ',').alias("b")) \ .select(t.b, t.c) \ .alias("a, c") assert_frame_equal(result.to_pandas().sort_values('c').reset_index(drop=True), pd.DataFrame([["Hi,Hi,Hi2,Hi2,Hi3", "Hello"], ["Hi,Hi,Hi2,Hi2,Hi3", "Hello"], ["Hi,Hi2,Hi,Hi3,Hi3", "hi"], ["Hi,Hi2,Hi,Hi3,Hi3", "hi"]], columns=['a', 'c'])) class CountAndSumAggregateFunction(AggregateFunction): def get_value(self, accumulator): from pyflink.common import Row return Row(accumulator[0], accumulator[1]) def create_accumulator(self): from pyflink.common import Row return Row(0, 0) def accumulate(self, accumulator, *args): accumulator[0] += 1 accumulator[1] += args[0][1] def retract(self, accumulator, *args): accumulator[0] -= 1 accumulator[1] -= args[0][1] def merge(self, accumulator, accumulators): for other_acc in accumulators: accumulator[0] += other_acc[0] accumulator[1] += other_acc[1] def get_accumulator_type(self): return DataTypes.ROW( [DataTypes.FIELD("a", DataTypes.BIGINT()), DataTypes.FIELD("b", DataTypes.BIGINT())]) def get_result_type(self): return DataTypes.ROW( [DataTypes.FIELD("a", DataTypes.BIGINT()), DataTypes.FIELD("b", DataTypes.BIGINT())]) class Top2(TableAggregateFunction): def emit_value(self, accumulator): yield Row(accumulator[0]) yield Row(accumulator[1]) def create_accumulator(self): return [None, None] def accumulate(self, accumulator, *args): if args[0][0] is not None: if accumulator[0] is None or args[0][0] > accumulator[0]: accumulator[1] = accumulator[0] accumulator[0] = args[0][0] elif accumulator[1] is None or args[0][0] > accumulator[1]: accumulator[1] = args[0][0] def retract(self, accumulator, *args): accumulator[0] = accumulator[0] - 1 def merge(self, accumulator, accumulators): for other_acc in accumulators: self.accumulate(accumulator, other_acc[0]) self.accumulate(accumulator, other_acc[1]) def get_accumulator_type(self): return DataTypes.ARRAY(DataTypes.BIGINT()) def get_result_type(self): return DataTypes.ROW( [DataTypes.FIELD("a", DataTypes.BIGINT())]) class ListViewConcatTableAggregateFunction(TableAggregateFunction): def emit_value(self, accumulator): result = accumulator[1].join(accumulator[0]) yield Row(result) yield Row(result) def create_accumulator(self): return Row(ListView(), '') def accumulate(self, accumulator, *args): accumulator[1] = args[1] accumulator[0].add(args[0]) def retract(self, accumulator, *args): raise NotImplementedError def get_accumulator_type(self): return DataTypes.ROW([ DataTypes.FIELD("f0", DataTypes.LIST_VIEW(DataTypes.STRING())), DataTypes.FIELD("f1", DataTypes.BIGINT())]) def get_result_type(self): return DataTypes.ROW([DataTypes.FIELD("a", DataTypes.STRING())]) if __name__ == '__main__': import unittest try: import xmlrunner testRunner = xmlrunner.XMLTestRunner(output='target/test-reports') except ImportError: testRunner = None unittest.main(testRunner=testRunner, verbosity=2)
apache-2.0
kahing/minio
pkg/crypto/md5/md5_test.go
501
package md5_test import ( "bytes" "encoding/hex" "testing" "github.com/minio/minio/pkg/crypto/md5" . "gopkg.in/check.v1" ) func Test(t *testing.T) { TestingT(t) } type MySuite struct{} var _ = Suite(&MySuite{}) func (s *MySuite) TestMd5sum(c *C) { testString := []byte("Test string") expectedHash, _ := hex.DecodeString("0fd3dbec9730101bff92acc820befc34") hash, err := md5.Sum(bytes.NewBuffer(testString)) c.Assert(err, IsNil) c.Assert(bytes.Equal(expectedHash, hash), Equals, true) }
apache-2.0
wbengine/TRF
src/base/wb-linux.cpp
1235
// You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // // Copyright 2014-2015 Tsinghua University // Author: wb.th08@gmail.com (Bin Wang), ozj@tsinghua.edu.cn (Zhijian Ou) // // All h, cpp, cc, and script files (e.g. bat, sh, pl, py) should include the above // license declaration. Different coding language may use different comment styles. #include "wb-linux.h" #ifdef __linux namespace wb { const char *wday[] = {"Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat"}; void _strdate(char *str) { time_t timep; struct tm *p; time(&timep); p = localtime(&timep); sprintf(str, "%d%d%d %s", (1900+p->tm_yday), (1+p->tm_mon), p->tm_mday, wday[p->tm_wday]); } void _strtime(char *str) { time_t timep; struct tm *p; time(&timep); p = localtime(&timep); sprintf(str, "%d:%d:%d", p->tm_hour, p->tm_min, p->tm_sec); } void getch() { } } #endif
apache-2.0
akash1808/python-barbicanclient
functionaltests/cli/v1/smoke/test_help.py
1424
# Copyright (c) 2015 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from functionaltests.cli.base import CmdLineTestCase from functionaltests.cli.v1.behaviors import base_behaviors from functionaltests import utils from testtools import testcase @utils.parameterized_test_case class HelpTestCase(CmdLineTestCase): def setUp(self): super(HelpTestCase, self).setUp() self.help_behaviors = base_behaviors.BaseBehaviors() def tearDown(self): super(HelpTestCase, self).tearDown() @utils.parameterized_dataset({ 'dash_h': [['-h']], 'doubledash_help': [['--help']] }) @testcase.attr('positive') def test_help(self, argv): stdout, stderr = self.help_behaviors.issue_barbican_command(argv) self.assertIsNotNone(stdout, "{0} returned None".format(argv)) self.assertGreater(len(stdout), 0, "{0} invalid length".format(argv))
apache-2.0
baldimir/drools
kie-dmn/kie-dmn-legacy-tests/src/test/java/org/kie/dmn/legacy/tests/validation/v1_1/ValidatorBusinessKnowledgeModelTest.java
6328
/* * Copyright 2017 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kie.dmn.legacy.tests.validation.v1_1; import java.io.IOException; import java.io.Reader; import java.util.List; import org.junit.Test; import org.kie.dmn.api.core.DMNMessage; import org.kie.dmn.api.core.DMNMessageType; import org.kie.dmn.validation.AbstractValidatorTest; import org.kie.dmn.validation.ValidatorUtil; import static org.hamcrest.CoreMatchers.is; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.kie.dmn.validation.DMNValidator.Validation.VALIDATE_COMPILATION; import static org.kie.dmn.validation.DMNValidator.Validation.VALIDATE_MODEL; import static org.kie.dmn.validation.DMNValidator.Validation.VALIDATE_SCHEMA; public class ValidatorBusinessKnowledgeModelTest extends AbstractValidatorTest { @Test public void testBKM_MISSING_VAR_ReaderInput() throws IOException { try (final Reader reader = getReader("businessknowledgemodel/BKM_MISSING_VAR.dmn")) { final List<DMNMessage> validate = validator.validate( reader, VALIDATE_SCHEMA, VALIDATE_MODEL, VALIDATE_COMPILATION); assertThat(ValidatorUtil.formatMessages(validate), validate.size(), is(1)); assertTrue(validate.stream().anyMatch(p -> p.getMessageType().equals(DMNMessageType.MISSING_VARIABLE))); } } @Test public void testBKM_MISSING_VAR_FileInput() { final List<DMNMessage> validate = validator.validate( getFile("businessknowledgemodel/BKM_MISSING_VAR.dmn"), VALIDATE_SCHEMA, VALIDATE_MODEL, VALIDATE_COMPILATION); assertThat(ValidatorUtil.formatMessages(validate), validate.size(), is(1)); assertTrue(validate.stream().anyMatch(p -> p.getMessageType().equals(DMNMessageType.MISSING_VARIABLE))); } @Test public void testBKM_MISSING_VAR_DefinitionsInput() { final List<DMNMessage> validate = validator.validate( getDefinitions("businessknowledgemodel/BKM_MISSING_VAR.dmn", "https://github.com/kiegroup/kie-dmn", "BKM_MISSING_VAR"), VALIDATE_MODEL, VALIDATE_COMPILATION); assertThat(ValidatorUtil.formatMessages(validate), validate.size(), is(1)); assertTrue(validate.stream().anyMatch(p -> p.getMessageType().equals(DMNMessageType.MISSING_VARIABLE))); } @Test public void testBKM_MISMATCH_VAR_ReaderInput() throws IOException { try (final Reader reader = getReader("businessknowledgemodel/BKM_MISMATCH_VAR.dmn")) { final List<DMNMessage> validate = validator.validate( reader, VALIDATE_SCHEMA, VALIDATE_MODEL, VALIDATE_COMPILATION); assertThat(ValidatorUtil.formatMessages(validate), validate.size(), is(1)); assertTrue(validate.stream().anyMatch(p -> p.getMessageType().equals(DMNMessageType.VARIABLE_NAME_MISMATCH))); } } @Test public void testBKM_MISMATCH_VAR_FileInput() { final List<DMNMessage> validate = validator.validate( getFile("businessknowledgemodel/BKM_MISMATCH_VAR.dmn"), VALIDATE_SCHEMA, VALIDATE_MODEL, VALIDATE_COMPILATION); assertThat(ValidatorUtil.formatMessages(validate), validate.size(), is(1)); assertTrue(validate.stream().anyMatch(p -> p.getMessageType().equals(DMNMessageType.VARIABLE_NAME_MISMATCH))); } @Test public void testBKM_MISMATCH_VAR_DefinitionsInput() { final List<DMNMessage> validate = validator.validate( getDefinitions("businessknowledgemodel/BKM_MISMATCH_VAR.dmn", "https://github.com/kiegroup/kie-dmn", "BKM_MISSING_VAR"), VALIDATE_MODEL, VALIDATE_COMPILATION); assertThat(ValidatorUtil.formatMessages(validate), validate.size(), is(1)); assertTrue(validate.stream().anyMatch(p -> p.getMessageType().equals(DMNMessageType.VARIABLE_NAME_MISMATCH))); } @Test public void testBKM_MISSING_EXPR_ReaderInput() throws IOException { try (final Reader reader = getReader("businessknowledgemodel/BKM_MISSING_EXPR.dmn")) { final List<DMNMessage> validate = validator.validate( reader, VALIDATE_SCHEMA, VALIDATE_MODEL, VALIDATE_COMPILATION); assertThat(ValidatorUtil.formatMessages(validate), validate.size(), is(1)); assertThat(validate.get(0).toString(), validate.get(0).getMessageType(), is(DMNMessageType.MISSING_EXPRESSION)); } } @Test public void testBKM_MISSING_EXPR_FileInput() { final List<DMNMessage> validate = validator.validate( getFile("businessknowledgemodel/BKM_MISSING_EXPR.dmn"), VALIDATE_SCHEMA, VALIDATE_MODEL, VALIDATE_COMPILATION); assertThat(ValidatorUtil.formatMessages(validate), validate.size(), is(1)); assertThat(validate.get(0).toString(), validate.get(0).getMessageType(), is(DMNMessageType.MISSING_EXPRESSION)); } @Test public void testBKM_MISSING_EXPR_DefinitionsInput() { final List<DMNMessage> validate = validator.validate( getDefinitions("businessknowledgemodel/BKM_MISSING_EXPR.dmn", "https://github.com/kiegroup/kie-dmn", "BKM_MISSING_EXPR"), VALIDATE_MODEL, VALIDATE_COMPILATION); assertThat(ValidatorUtil.formatMessages(validate), validate.size(), is(1)); assertThat(validate.get(0).toString(), validate.get(0).getMessageType(), is(DMNMessageType.MISSING_EXPRESSION)); } }
apache-2.0
philips/shortbread
Godeps/_workspace/src/github.com/libgit2/git2go/index.go
6728
package git /* #include <git2.h> #include <git2/errors.h> */ import "C" import ( "fmt" "runtime" "time" "unsafe" ) type Index struct { ptr *C.git_index } type IndexEntry struct { Ctime time.Time Mtime time.Time Mode uint Uid uint Gid uint Size uint Id *Oid Path string } func newIndexEntryFromC(entry *C.git_index_entry) *IndexEntry { if entry == nil { return nil } return &IndexEntry{ time.Unix(int64(entry.ctime.seconds), int64(entry.ctime.nanoseconds)), time.Unix(int64(entry.mtime.seconds), int64(entry.mtime.nanoseconds)), uint(entry.mode), uint(entry.uid), uint(entry.gid), uint(entry.file_size), newOidFromC(&entry.id), C.GoString(entry.path), } } func populateCIndexEntry(source *IndexEntry, dest *C.git_index_entry) { dest.ctime.seconds = C.git_time_t(source.Ctime.Unix()) dest.ctime.nanoseconds = C.uint(source.Ctime.UnixNano()) dest.mtime.seconds = C.git_time_t(source.Mtime.Unix()) dest.mtime.nanoseconds = C.uint(source.Mtime.UnixNano()) dest.mode = C.uint(source.Mode) dest.uid = C.uint(source.Uid) dest.gid = C.uint(source.Gid) dest.file_size = C.git_off_t(source.Size) dest.id = *source.Id.toC() dest.path = C.CString(source.Path) } func freeCIndexEntry(entry *C.git_index_entry) { C.free(unsafe.Pointer(entry.path)) } func newIndexFromC(ptr *C.git_index) *Index { idx := &Index{ptr} runtime.SetFinalizer(idx, (*Index).Free) return idx } // NewIndex allocates a new index. It won't be associated with any // file on the filesystem or repository func NewIndex() (*Index, error) { var ptr *C.git_index runtime.LockOSThread() defer runtime.UnlockOSThread() if err := C.git_index_new(&ptr); err < 0 { return nil, MakeGitError(err) } return &Index{ptr: ptr}, nil } // Add adds or replaces the given entry to the index, making a copy of // the data func (v *Index) Add(entry *IndexEntry) error { var centry C.git_index_entry populateCIndexEntry(entry, &centry) defer freeCIndexEntry(&centry) runtime.LockOSThread() defer runtime.UnlockOSThread() if err := C.git_index_add(v.ptr, &centry); err < 0 { return MakeGitError(err) } return nil } func (v *Index) AddByPath(path string) error { cstr := C.CString(path) defer C.free(unsafe.Pointer(cstr)) runtime.LockOSThread() defer runtime.UnlockOSThread() ret := C.git_index_add_bypath(v.ptr, cstr) if ret < 0 { return MakeGitError(ret) } return nil } func (v *Index) WriteTreeTo(repo *Repository) (*Oid, error) { oid := new(Oid) runtime.LockOSThread() defer runtime.UnlockOSThread() ret := C.git_index_write_tree_to(oid.toC(), v.ptr, repo.ptr) if ret < 0 { return nil, MakeGitError(ret) } return oid, nil } func (v *Index) WriteTree() (*Oid, error) { oid := new(Oid) runtime.LockOSThread() defer runtime.UnlockOSThread() ret := C.git_index_write_tree(oid.toC(), v.ptr) if ret < 0 { return nil, MakeGitError(ret) } return oid, nil } func (v *Index) Write() error { runtime.LockOSThread() defer runtime.UnlockOSThread() ret := C.git_index_write(v.ptr) if ret < 0 { return MakeGitError(ret) } return nil } func (v *Index) Free() { runtime.SetFinalizer(v, nil) C.git_index_free(v.ptr) } func (v *Index) EntryCount() uint { return uint(C.git_index_entrycount(v.ptr)) } func (v *Index) EntryByIndex(index uint) (*IndexEntry, error) { centry := C.git_index_get_byindex(v.ptr, C.size_t(index)) if centry == nil { return nil, fmt.Errorf("Index out of Bounds") } return newIndexEntryFromC(centry), nil } func (v *Index) HasConflicts() bool { return C.git_index_has_conflicts(v.ptr) != 0 } func (v *Index) CleanupConflicts() { C.git_index_conflict_cleanup(v.ptr) } func (v *Index) AddConflict(ancestor *IndexEntry, our *IndexEntry, their *IndexEntry) error { var cancestor *C.git_index_entry var cour *C.git_index_entry var ctheir *C.git_index_entry if ancestor != nil { cancestor = &C.git_index_entry{} populateCIndexEntry(ancestor, cancestor) defer freeCIndexEntry(cancestor) } if our != nil { cour = &C.git_index_entry{} populateCIndexEntry(our, cour) defer freeCIndexEntry(cour) } if their != nil { ctheir = &C.git_index_entry{} populateCIndexEntry(their, ctheir) defer freeCIndexEntry(ctheir) } runtime.LockOSThread() defer runtime.UnlockOSThread() ecode := C.git_index_conflict_add(v.ptr, cancestor, cour, ctheir) if ecode < 0 { return MakeGitError(ecode) } return nil } type IndexConflict struct { Ancestor *IndexEntry Our *IndexEntry Their *IndexEntry } func (v *Index) GetConflict(path string) (IndexConflict, error) { var cancestor *C.git_index_entry var cour *C.git_index_entry var ctheir *C.git_index_entry cpath := C.CString(path) defer C.free(unsafe.Pointer(cpath)) runtime.LockOSThread() defer runtime.UnlockOSThread() ecode := C.git_index_conflict_get(&cancestor, &cour, &ctheir, v.ptr, cpath) if ecode < 0 { return IndexConflict{}, MakeGitError(ecode) } return IndexConflict{ Ancestor: newIndexEntryFromC(cancestor), Our: newIndexEntryFromC(cour), Their: newIndexEntryFromC(ctheir), }, nil } func (v *Index) RemoveConflict(path string) error { cpath := C.CString(path) defer C.free(unsafe.Pointer(cpath)) runtime.LockOSThread() defer runtime.UnlockOSThread() ecode := C.git_index_conflict_remove(v.ptr, cpath) if ecode < 0 { return MakeGitError(ecode) } return nil } type IndexConflictIterator struct { ptr *C.git_index_conflict_iterator index *Index } func newIndexConflictIteratorFromC(index *Index, ptr *C.git_index_conflict_iterator) *IndexConflictIterator { i := &IndexConflictIterator{ptr: ptr, index: index} runtime.SetFinalizer(i, (*IndexConflictIterator).Free) return i } func (v *IndexConflictIterator) Index() *Index { return v.index } func (v *IndexConflictIterator) Free() { runtime.SetFinalizer(v, nil) C.git_index_conflict_iterator_free(v.ptr) } func (v *Index) ConflictIterator() (*IndexConflictIterator, error) { var i *C.git_index_conflict_iterator runtime.LockOSThread() defer runtime.UnlockOSThread() ecode := C.git_index_conflict_iterator_new(&i, v.ptr) if ecode < 0 { return nil, MakeGitError(ecode) } return newIndexConflictIteratorFromC(v, i), nil } func (v *IndexConflictIterator) Next() (IndexConflict, error) { var cancestor *C.git_index_entry var cour *C.git_index_entry var ctheir *C.git_index_entry runtime.LockOSThread() defer runtime.UnlockOSThread() ecode := C.git_index_conflict_next(&cancestor, &cour, &ctheir, v.ptr) if ecode < 0 { return IndexConflict{}, MakeGitError(ecode) } return IndexConflict{ Ancestor: newIndexEntryFromC(cancestor), Our: newIndexEntryFromC(cour), Their: newIndexEntryFromC(ctheir), }, nil }
apache-2.0
google/binnavi
src/main/java/com/google/security/zynamics/zylib/yfileswrap/gui/zygraph/CRegisterHotKeys.java
6946
// Copyright 2011-2016 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.security.zynamics.zylib.yfileswrap.gui.zygraph; import com.google.security.zynamics.zylib.general.ClipboardHelpers; import com.google.security.zynamics.zylib.gui.zygraph.functions.NodeFunctions; import com.google.security.zynamics.zylib.gui.zygraph.helpers.GraphHelpers; import com.google.security.zynamics.zylib.gui.zygraph.helpers.SelectedVisibleFilter; import com.google.security.zynamics.zylib.gui.zygraph.nodes.IViewNode; import com.google.security.zynamics.zylib.yfileswrap.gui.zygraph.functions.MoveFunctions; import com.google.security.zynamics.zylib.yfileswrap.gui.zygraph.functions.ZoomFunctions; import com.google.security.zynamics.zylib.yfileswrap.gui.zygraph.nodes.ZyGraphNode; import y.view.Graph2DView; import y.view.Graph2DViewActions; import java.awt.event.ActionEvent; import java.awt.event.KeyEvent; import java.util.Collection; import javax.swing.AbstractAction; import javax.swing.ActionMap; import javax.swing.InputMap; import javax.swing.JComponent; import javax.swing.KeyStroke; public class CRegisterHotKeys { public static <NodeType extends ZyGraphNode<?>> void register( final AbstractZyGraph<NodeType, ?> graph) { final Graph2DView view = graph.getView(); final Graph2DViewActions actions = new Graph2DViewActions(view); final ActionMap amap = actions.createActionMap(); final InputMap imap = actions.createDefaultInputMap(amap); view.setActionMap(amap); view.setInputMap(JComponent.WHEN_FOCUSED, imap); view.getCanvasComponent().setActionMap(amap); view.getCanvasComponent().setInputMap(JComponent.WHEN_FOCUSED, imap); imap.put(KeyStroke.getKeyStroke(KeyEvent.VK_DOWN, 0), "DOWN"); imap.put(KeyStroke.getKeyStroke(KeyEvent.VK_UP, 0), "UP"); imap.put(KeyStroke.getKeyStroke(KeyEvent.VK_LEFT, 0), "LEFT"); imap.put(KeyStroke.getKeyStroke(KeyEvent.VK_RIGHT, 0), "RIGHT"); imap.put(KeyStroke.getKeyStroke(KeyEvent.VK_PLUS, 0), "+"); imap.put(KeyStroke.getKeyStroke(KeyEvent.VK_MINUS, 0), "-"); imap.put(KeyStroke.getKeyStroke(KeyEvent.VK_M, 0), "m"); imap.put(KeyStroke.getKeyStroke(KeyEvent.VK_S, 0), "s"); imap.put(KeyStroke.getKeyStroke(KeyEvent.VK_LESS, 0), "<"); imap.put(KeyStroke.getKeyStroke(KeyEvent.VK_A, KeyEvent.CTRL_DOWN_MASK), "SELECT_VISIBLE_NODES"); imap.put(KeyStroke.getKeyStroke(KeyEvent.VK_C, KeyEvent.CTRL_DOWN_MASK), "COPY_CONTENT_FROM_SELECTED_NODES"); amap.remove(Graph2DViewActions.DELETE_SELECTION); amap.remove(Graph2DViewActions.EDIT_LABEL); registerActions(graph); } public static <NodeType extends ZyGraphNode<?>> void registerActions( final AbstractZyGraph<NodeType, ?> graph) { final ActionMap amap = graph.getView().getCanvasComponent().getActionMap(); amap.put("DOWN", new CActionHotKey<NodeType>("DOWN", graph)); amap.put("UP", new CActionHotKey<NodeType>("UP", graph)); amap.put("LEFT", new CActionHotKey<NodeType>("LEFT", graph)); amap.put("RIGHT", new CActionHotKey<NodeType>("RIGHT", graph)); amap.put("+", new CActionHotKey<NodeType>("+", graph)); amap.put("-", new CActionHotKey<NodeType>("-", graph)); amap.put("m", new CActionHotKey<NodeType>("m", graph)); amap.put("s", new CActionHotKey<NodeType>("s", graph)); amap.put("<", new CActionHotKey<NodeType>("<", graph)); amap.put("SELECT_VISIBLE_NODES", new CActionHotKey<NodeType>("SELECT_VISIBLE_NODES", graph)); amap.put("COPY_CONTENT_FROM_SELECTED_NODES", new CActionHotKey<NodeType>( "COPY_CONTENT_FROM_SELECTED_NODES", graph)); graph.getView().setActionMap(amap); graph.getView().getCanvasComponent().setActionMap(amap); } public static <NodeType extends ZyGraphNode<?>> void unregisterActions( final AbstractZyGraph<NodeType, ?> graph) { final ActionMap amap1 = graph.getView().getCanvasComponent().getActionMap(); final ActionMap amap2 = graph.getView().getActionMap(); amap1.remove("F2"); amap1.remove("DOWN"); amap1.remove("UP"); amap1.remove("LEFT"); amap1.remove("RIGHT"); amap1.remove("+"); amap1.remove("-"); amap1.remove("m"); amap1.remove("s"); amap1.remove("<"); amap1.remove("SELECT_VISIBLE_NODES"); amap1.remove("COPY_CONTENT_FROM_SELECTED_NODES"); amap2.remove("DOWN"); amap2.remove("UP"); amap2.remove("LEFT"); amap2.remove("RIGHT"); amap2.remove("+"); amap2.remove("-"); amap2.remove("m"); amap2.remove("s"); amap2.remove("<"); amap2.remove("SELECT_VISIBLE_NODES"); amap2.remove("COPY_CONTENT_FROM_SELECTED_NODES"); } private static class CActionHotKey<NodeType extends ZyGraphNode<? extends IViewNode<?>>> extends AbstractAction { private static final long serialVersionUID = 4029488848855226091L; private final String m_action; private final AbstractZyGraph<NodeType, ?> m_graph; public CActionHotKey(final String action, final AbstractZyGraph<NodeType, ?> graph) { super(action); m_action = action; m_graph = graph; } @Override public void actionPerformed(final ActionEvent event) { if (m_action.equals("UP")) { MoveFunctions.pan(m_graph, 0, -1); } else if (m_action.equals("DOWN")) { MoveFunctions.pan(m_graph, 0, 1); } else if (m_action.equals("LEFT")) { MoveFunctions.pan(m_graph, -1, 0); } else if (m_action.equals("RIGHT")) { MoveFunctions.pan(m_graph, 1, 0); } else if (m_action.equals("+")) { m_graph.zoomIn(); } else if (m_action.equals("-")) { m_graph.zoomOut(); } else if (m_action.equals("m")) { m_graph.getView().fitContent(true); } else if (m_action.equals("s")) { ZoomFunctions .zoomToNodes(m_graph, SelectedVisibleFilter.filter(m_graph.getSelectedNodes())); } else if (m_action.equals("SELECT_VISIBLE_NODES")) { // Use a temporary variable to work around OpenJDK build problem. Original code is: // m_graph.selectNodes(NodeFunctions.getVisibleNodes(m_graph), true); final Collection<NodeType> nodes = NodeFunctions.getVisibleNodes(m_graph); m_graph.selectNodes(nodes, true); } else if (m_action.equals("COPY_CONTENT_FROM_SELECTED_NODES")) { ClipboardHelpers.copyToClipboard(GraphHelpers.getSelectedContent(m_graph)); } } } }
apache-2.0
redspread/localkube
vendor/github.com/coreos/etcd/client/client.go
13681
// Copyright 2015 CoreOS, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package client import ( "errors" "fmt" "io/ioutil" "math/rand" "net" "net/http" "net/url" "reflect" "sort" "strconv" "sync" "time" "github.com/coreos/etcd/Godeps/_workspace/src/golang.org/x/net/context" ) var ( ErrNoEndpoints = errors.New("client: no endpoints available") ErrTooManyRedirects = errors.New("client: too many redirects") ErrClusterUnavailable = errors.New("client: etcd cluster is unavailable or misconfigured") errTooManyRedirectChecks = errors.New("client: too many redirect checks") ) var DefaultRequestTimeout = 5 * time.Second var DefaultTransport CancelableTransport = &http.Transport{ Proxy: http.ProxyFromEnvironment, Dial: (&net.Dialer{ Timeout: 30 * time.Second, KeepAlive: 30 * time.Second, }).Dial, TLSHandshakeTimeout: 10 * time.Second, } type Config struct { // Endpoints defines a set of URLs (schemes, hosts and ports only) // that can be used to communicate with a logical etcd cluster. For // example, a three-node cluster could be provided like so: // // Endpoints: []string{ // "http://node1.example.com:2379", // "http://node2.example.com:2379", // "http://node3.example.com:2379", // } // // If multiple endpoints are provided, the Client will attempt to // use them all in the event that one or more of them are unusable. // // If Client.Sync is ever called, the Client may cache an alternate // set of endpoints to continue operation. Endpoints []string // Transport is used by the Client to drive HTTP requests. If not // provided, DefaultTransport will be used. Transport CancelableTransport // CheckRedirect specifies the policy for handling HTTP redirects. // If CheckRedirect is not nil, the Client calls it before // following an HTTP redirect. The sole argument is the number of // requests that have alrady been made. If CheckRedirect returns // an error, Client.Do will not make any further requests and return // the error back it to the caller. // // If CheckRedirect is nil, the Client uses its default policy, // which is to stop after 10 consecutive requests. CheckRedirect CheckRedirectFunc // Username specifies the user credential to add as an authorization header Username string // Password is the password for the specified user to add as an authorization header // to the request. Password string // HeaderTimeoutPerRequest specifies the time limit to wait for response // header in a single request made by the Client. The timeout includes // connection time, any redirects, and header wait time. // // For non-watch GET request, server returns the response body immediately. // For PUT/POST/DELETE request, server will attempt to commit request // before responding, which is expected to take `100ms + 2 * RTT`. // For watch request, server returns the header immediately to notify Client // watch start. But if server is behind some kind of proxy, the response // header may be cached at proxy, and Client cannot rely on this behavior. // // Especially, wait request will ignore this timeout. // // One API call may send multiple requests to different etcd servers until it // succeeds. Use context of the API to specify the overall timeout. // // A HeaderTimeoutPerRequest of zero means no timeout. HeaderTimeoutPerRequest time.Duration } func (cfg *Config) transport() CancelableTransport { if cfg.Transport == nil { return DefaultTransport } return cfg.Transport } func (cfg *Config) checkRedirect() CheckRedirectFunc { if cfg.CheckRedirect == nil { return DefaultCheckRedirect } return cfg.CheckRedirect } // CancelableTransport mimics net/http.Transport, but requires that // the object also support request cancellation. type CancelableTransport interface { http.RoundTripper CancelRequest(req *http.Request) } type CheckRedirectFunc func(via int) error // DefaultCheckRedirect follows up to 10 redirects, but no more. var DefaultCheckRedirect CheckRedirectFunc = func(via int) error { if via > 10 { return ErrTooManyRedirects } return nil } type Client interface { // Sync updates the internal cache of the etcd cluster's membership. Sync(context.Context) error // AutoSync periodically calls Sync() every given interval. // The recommended sync interval is 10 seconds to 1 minute, which does // not bring too much overhead to server and makes client catch up the // cluster change in time. // // The example to use it: // // for { // err := client.AutoSync(ctx, 10*time.Second) // if err == context.DeadlineExceeded || err == context.Canceled { // break // } // log.Print(err) // } AutoSync(context.Context, time.Duration) error // Endpoints returns a copy of the current set of API endpoints used // by Client to resolve HTTP requests. If Sync has ever been called, // this may differ from the initial Endpoints provided in the Config. Endpoints() []string // SetEndpoints sets the set of API endpoints used by Client to resolve // HTTP requests. If the given endpoints are not valid, an error will be // returned SetEndpoints(eps []string) error httpClient } func New(cfg Config) (Client, error) { c := &httpClusterClient{ clientFactory: newHTTPClientFactory(cfg.transport(), cfg.checkRedirect(), cfg.HeaderTimeoutPerRequest), rand: rand.New(rand.NewSource(int64(time.Now().Nanosecond()))), } if cfg.Username != "" { c.credentials = &credentials{ username: cfg.Username, password: cfg.Password, } } if err := c.SetEndpoints(cfg.Endpoints); err != nil { return nil, err } return c, nil } type httpClient interface { Do(context.Context, httpAction) (*http.Response, []byte, error) } func newHTTPClientFactory(tr CancelableTransport, cr CheckRedirectFunc, headerTimeout time.Duration) httpClientFactory { return func(ep url.URL) httpClient { return &redirectFollowingHTTPClient{ checkRedirect: cr, client: &simpleHTTPClient{ transport: tr, endpoint: ep, headerTimeout: headerTimeout, }, } } } type credentials struct { username string password string } type httpClientFactory func(url.URL) httpClient type httpAction interface { HTTPRequest(url.URL) *http.Request } type httpClusterClient struct { clientFactory httpClientFactory endpoints []url.URL pinned int credentials *credentials sync.RWMutex rand *rand.Rand } func (c *httpClusterClient) SetEndpoints(eps []string) error { if len(eps) == 0 { return ErrNoEndpoints } neps := make([]url.URL, len(eps)) for i, ep := range eps { u, err := url.Parse(ep) if err != nil { return err } neps[i] = *u } c.endpoints = shuffleEndpoints(c.rand, neps) // TODO: pin old endpoint if possible, and rebalance when new endpoint appears c.pinned = 0 return nil } func (c *httpClusterClient) Do(ctx context.Context, act httpAction) (*http.Response, []byte, error) { action := act c.RLock() leps := len(c.endpoints) eps := make([]url.URL, leps) n := copy(eps, c.endpoints) pinned := c.pinned if c.credentials != nil { action = &authedAction{ act: act, credentials: *c.credentials, } } c.RUnlock() if leps == 0 { return nil, nil, ErrNoEndpoints } if leps != n { return nil, nil, errors.New("unable to pick endpoint: copy failed") } var resp *http.Response var body []byte var err error cerr := &ClusterError{} for i := pinned; i < leps+pinned; i++ { k := i % leps hc := c.clientFactory(eps[k]) resp, body, err = hc.Do(ctx, action) if err != nil { cerr.Errors = append(cerr.Errors, err) // mask previous errors with context error, which is controlled by user if err == context.Canceled || err == context.DeadlineExceeded { return nil, nil, err } continue } if resp.StatusCode/100 == 5 { switch resp.StatusCode { case http.StatusInternalServerError, http.StatusServiceUnavailable: // TODO: make sure this is a no leader response cerr.Errors = append(cerr.Errors, fmt.Errorf("client: etcd member %s has no leader", eps[k].String())) default: cerr.Errors = append(cerr.Errors, fmt.Errorf("client: etcd member %s returns server error [%s]", eps[k].String(), http.StatusText(resp.StatusCode))) } continue } if k != pinned { c.Lock() c.pinned = k c.Unlock() } return resp, body, nil } return nil, nil, cerr } func (c *httpClusterClient) Endpoints() []string { c.RLock() defer c.RUnlock() eps := make([]string, len(c.endpoints)) for i, ep := range c.endpoints { eps[i] = ep.String() } return eps } func (c *httpClusterClient) Sync(ctx context.Context) error { mAPI := NewMembersAPI(c) ms, err := mAPI.List(ctx) if err != nil { return err } c.Lock() defer c.Unlock() eps := make([]string, 0) for _, m := range ms { eps = append(eps, m.ClientURLs...) } sort.Sort(sort.StringSlice(eps)) ceps := make([]string, len(c.endpoints)) for i, cep := range c.endpoints { ceps[i] = cep.String() } sort.Sort(sort.StringSlice(ceps)) // fast path if no change happens // this helps client to pin the endpoint when no cluster change if reflect.DeepEqual(eps, ceps) { return nil } return c.SetEndpoints(eps) } func (c *httpClusterClient) AutoSync(ctx context.Context, interval time.Duration) error { ticker := time.NewTicker(interval) defer ticker.Stop() for { err := c.Sync(ctx) if err != nil { return err } select { case <-ctx.Done(): return ctx.Err() case <-ticker.C: } } } type roundTripResponse struct { resp *http.Response err error } type simpleHTTPClient struct { transport CancelableTransport endpoint url.URL headerTimeout time.Duration } func (c *simpleHTTPClient) Do(ctx context.Context, act httpAction) (*http.Response, []byte, error) { req := act.HTTPRequest(c.endpoint) if err := printcURL(req); err != nil { return nil, nil, err } isWait := false if req != nil && req.URL != nil { ws := req.URL.Query().Get("wait") if len(ws) != 0 { var err error isWait, err = strconv.ParseBool(ws) if err != nil { return nil, nil, fmt.Errorf("wrong wait value %s (%v for %+v)", ws, err, req) } } } var hctx context.Context var hcancel context.CancelFunc if !isWait && c.headerTimeout > 0 { hctx, hcancel = context.WithTimeout(ctx, c.headerTimeout) } else { hctx, hcancel = context.WithCancel(ctx) } defer hcancel() reqcancel := requestCanceler(c.transport, req) rtchan := make(chan roundTripResponse, 1) go func() { resp, err := c.transport.RoundTrip(req) rtchan <- roundTripResponse{resp: resp, err: err} close(rtchan) }() var resp *http.Response var err error select { case rtresp := <-rtchan: resp, err = rtresp.resp, rtresp.err case <-hctx.Done(): // cancel and wait for request to actually exit before continuing reqcancel() rtresp := <-rtchan resp = rtresp.resp switch { case ctx.Err() != nil: err = ctx.Err() case hctx.Err() != nil: err = fmt.Errorf("client: endpoint %s exceeded header timeout", c.endpoint.String()) default: panic("failed to get error from context") } } // always check for resp nil-ness to deal with possible // race conditions between channels above defer func() { if resp != nil { resp.Body.Close() } }() if err != nil { return nil, nil, err } var body []byte done := make(chan struct{}) go func() { body, err = ioutil.ReadAll(resp.Body) done <- struct{}{} }() select { case <-ctx.Done(): resp.Body.Close() <-done return nil, nil, ctx.Err() case <-done: } return resp, body, err } type authedAction struct { act httpAction credentials credentials } func (a *authedAction) HTTPRequest(url url.URL) *http.Request { r := a.act.HTTPRequest(url) r.SetBasicAuth(a.credentials.username, a.credentials.password) return r } type redirectFollowingHTTPClient struct { client httpClient checkRedirect CheckRedirectFunc } func (r *redirectFollowingHTTPClient) Do(ctx context.Context, act httpAction) (*http.Response, []byte, error) { next := act for i := 0; i < 100; i++ { if i > 0 { if err := r.checkRedirect(i); err != nil { return nil, nil, err } } resp, body, err := r.client.Do(ctx, next) if err != nil { return nil, nil, err } if resp.StatusCode/100 == 3 { hdr := resp.Header.Get("Location") if hdr == "" { return nil, nil, fmt.Errorf("Location header not set") } loc, err := url.Parse(hdr) if err != nil { return nil, nil, fmt.Errorf("Location header not valid URL: %s", hdr) } next = &redirectedHTTPAction{ action: act, location: *loc, } continue } return resp, body, nil } return nil, nil, errTooManyRedirectChecks } type redirectedHTTPAction struct { action httpAction location url.URL } func (r *redirectedHTTPAction) HTTPRequest(ep url.URL) *http.Request { orig := r.action.HTTPRequest(ep) orig.URL = &r.location return orig } func shuffleEndpoints(r *rand.Rand, eps []url.URL) []url.URL { p := r.Perm(len(eps)) neps := make([]url.URL, len(eps)) for i, k := range p { neps[i] = eps[k] } return neps }
apache-2.0
dasein-cloud/dasein-cloud-azure
src/test/java/org/dasein/cloud/azure/tests/network/AzureLoadBalancerSupportWithMockHttpClientTest.java
42595
/* * * * Copyright (C) 2009-2015 Dell, Inc. * See annotations for authorship information * * ==================================================================== * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ==================================================================== * */ package org.dasein.cloud.azure.tests.network; import mockit.Invocation; import mockit.Mock; import mockit.MockUp; import mockit.Mocked; import mockit.NonStrictExpectations; import org.apache.commons.collections.IteratorUtils; import org.apache.http.Header; import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpUriRequest; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.message.BasicHeader; import org.dasein.cloud.CloudException; import org.dasein.cloud.InternalException; import org.dasein.cloud.azure.AzureException; import org.dasein.cloud.azure.compute.vm.AzureVM; import org.dasein.cloud.azure.model.AzureOperationStatus; import org.dasein.cloud.azure.network.AzureLoadBalancerSupport; import org.dasein.cloud.azure.network.model.DefinitionModel; import org.dasein.cloud.azure.network.model.ProfileModel; import org.dasein.cloud.azure.network.model.ProfilesModel; import org.dasein.cloud.azure.tests.AzureTestsBase; import org.dasein.cloud.compute.VirtualMachine; import org.dasein.cloud.network.HealthCheckFilterOptions; import org.dasein.cloud.network.HealthCheckOptions; import org.dasein.cloud.network.IPVersion; import org.dasein.cloud.network.LbAlgorithm; import org.dasein.cloud.network.LbEndpointState; import org.dasein.cloud.network.LbEndpointType; import org.dasein.cloud.network.LbListener; import org.dasein.cloud.network.LbPersistence; import org.dasein.cloud.network.LbProtocol; import org.dasein.cloud.network.LoadBalancer; import org.dasein.cloud.network.LoadBalancerAddressType; import org.dasein.cloud.network.LoadBalancerCreateOptions; import org.dasein.cloud.network.LoadBalancerEndpoint; import org.dasein.cloud.network.LoadBalancerHealthCheck; import org.dasein.cloud.network.LoadBalancerState; import org.dasein.cloud.util.requester.entities.DaseinObjectToXmlEntity; import org.junit.Before; import org.junit.Test; import javax.servlet.http.HttpServletResponse; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.UUID; import java.util.concurrent.atomic.AtomicInteger; import static org.dasein.cloud.azure.tests.HttpMethodAsserts.assertDelete; import static org.dasein.cloud.azure.tests.HttpMethodAsserts.assertGet; import static org.dasein.cloud.azure.tests.HttpMethodAsserts.assertPost; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; /** * Created by Jeffrey Yan on 9/21/2015. * * @author Jeffrey Yan * @since 2015.09.1 */ public class AzureLoadBalancerSupportWithMockHttpClientTest extends AzureTestsBase { private final String LB_NAME = "lb_name"; private final String LB_DESCRIPTION = "lb_description"; private final String LB_DOMAIN = String.format("%s.%s", LB_NAME, "trafficmanager.net"); private final int LB_PUBLIC_PORT=80; private final int LB_PRIVATE_PORT = 80; private final LbProtocol LB_PROTOCOL = LbProtocol.HTTP; private final String HC_DESCRIPTION = "hc_description"; private final LoadBalancerHealthCheck.HCProtocol HC_PROTOCOL = LoadBalancerHealthCheck.HCProtocol.HTTP; private final int HC_PORT = 80; private final String HC_PATH = "/"; private final String PROFILES_URL = String.format("%s/%s/services/WATM/profiles", ENDPOINT, ACCOUNT_NO); private final String PROFILE_URL = String.format("%s/%s/services/WATM/profiles/%s", ENDPOINT, ACCOUNT_NO, LB_NAME); private final String DEFINITIONS_URL = String.format("%s/%s/services/WATM/profiles/%s/definitions", ENDPOINT, ACCOUNT_NO, LB_NAME); private final String DEFINITION_URL = String.format("%s/%s/services/WATM/profiles/%s/definitions/1", ENDPOINT, ACCOUNT_NO, LB_NAME); private AzureLoadBalancerSupport loadBalancerSupport; @Before public void setUp() throws CloudException, InternalException { super.setUp(); loadBalancerSupport = new AzureLoadBalancerSupport(azureMock); } @Test(expected = InternalException.class) public void createLoadBalancerShouldThrowExceptionIfHealthCheckOptionsIsNull() throws CloudException, InternalException { loadBalancerSupport.createLoadBalancer(LoadBalancerCreateOptions.getInstance(LB_NAME, LB_DESCRIPTION)); } @Test(expected = InternalException.class) public void createLoadBalancerShouldThrowExceptionIfNameIsNull() throws CloudException, InternalException { loadBalancerSupport.createLoadBalancer(LoadBalancerCreateOptions.getInstance(null, LB_DESCRIPTION)); } @Test(expected = InternalException.class) public void createLoadBalancerShouldThrowExceptionIfNameIsEmpty() throws CloudException, InternalException { loadBalancerSupport.createLoadBalancer(LoadBalancerCreateOptions.getInstance("", LB_DESCRIPTION)); } @Test(expected = InternalException.class) public void createLoadBalancerShouldThrowExceptionIfHCProtocolIsTCP() throws CloudException, InternalException { HealthCheckOptions healthCheckOptions = HealthCheckOptions.getInstance(LB_NAME, HC_DESCRIPTION, null, null, LoadBalancerHealthCheck.HCProtocol.TCP, HC_PORT, HC_PATH, 9, 9, 9, 9); LoadBalancerCreateOptions loadBalancerCreateOptions = LoadBalancerCreateOptions.getInstance(LB_NAME, LB_DESCRIPTION); loadBalancerCreateOptions.withHealthCheckOptions(healthCheckOptions); loadBalancerSupport.createLoadBalancer(loadBalancerCreateOptions); } @Test(expected = InternalException.class) public void createLoadBalancerShouldThrowExceptionIfHCProtocolIsSSL() throws CloudException, InternalException { HealthCheckOptions healthCheckOptions = HealthCheckOptions.getInstance(LB_NAME, HC_DESCRIPTION, null, null, LoadBalancerHealthCheck.HCProtocol.SSL, HC_PORT, HC_PATH, 9, 9, 9, 9); LoadBalancerCreateOptions loadBalancerCreateOptions = LoadBalancerCreateOptions.getInstance(LB_NAME, LB_DESCRIPTION); loadBalancerCreateOptions.withHealthCheckOptions(healthCheckOptions); loadBalancerSupport.createLoadBalancer(loadBalancerCreateOptions); } @Test(expected = AzureException.class) public void createLoadBalancerShouldThrowExceptionIfNameIsExist() throws CloudException, InternalException { new MockUp<CloseableHttpClient>() { @Mock public CloseableHttpResponse execute(Invocation inv, HttpUriRequest request) throws IOException { if ("POST".equals(request.getMethod()) && PROFILES_URL.equals(request.getURI().toString())) { assertPost(request, PROFILES_URL, new Header[] { new BasicHeader("x-ms-version", "2012-03-01") }, createProfileModel()); AzureOperationStatus.AzureOperationError error = new AzureOperationStatus.AzureOperationError(); error.setCode("BadRequest"); error.setMessage("A conflict occurred to prevent the operation from completing."); DaseinObjectToXmlEntity<AzureOperationStatus.AzureOperationError> daseinEntity = new DaseinObjectToXmlEntity<AzureOperationStatus.AzureOperationError>( error); return getHttpResponseMock(getStatusLineMock(HttpServletResponse.SC_BAD_REQUEST), daseinEntity, new Header[] { new BasicHeader("x-ms-request-id", UUID.randomUUID().toString()) }); } else { throw new IOException("Request is not mocked"); } } }; HealthCheckOptions healthCheckOptions = HealthCheckOptions.getInstance(LB_NAME, HC_DESCRIPTION, null, null, HC_PROTOCOL, HC_PORT, HC_PATH, 9, 9, 9, 9); LoadBalancerCreateOptions loadBalancerCreateOptions = LoadBalancerCreateOptions.getInstance(LB_NAME, LB_DESCRIPTION); loadBalancerCreateOptions.withHealthCheckOptions(healthCheckOptions); loadBalancerSupport.createLoadBalancer(loadBalancerCreateOptions); } @Test public void createLoadBalancerShouldThrowPostCorrectRequestIfLBListenersIsNull() throws CloudException, InternalException { CreateLoadBalancerMockUp mockUp = new CreateLoadBalancerMockUp("RoundRobin"); LoadBalancerCreateOptions loadBalancerCreateOptions = createLoadBalancerCreateOptions(null); String result = loadBalancerSupport.createLoadBalancer(loadBalancerCreateOptions); assertEquals("LoadBalancerSupport.createLoadBalancer() doesn't return correct result", LB_NAME, result); assertEquals("Post profiles count doesn't match", 1, mockUp.postProfilesCount); assertEquals("Post definitions count doesn't match", 1, mockUp.postDefinitionsCount); } @Test public void createLoadBalancerShouldPostCorrectRequestIfLbAlgorithmIsSOURCE() throws CloudException, InternalException { CreateLoadBalancerMockUp mockUp = new CreateLoadBalancerMockUp("Performance"); String result = loadBalancerSupport.createLoadBalancer(createLoadBalancerCreateOptions(LbAlgorithm.SOURCE)); assertEquals("LoadBalancerSupport.createLoadBalancer() doesn't return correct result", LB_NAME, result); assertEquals("Post profiles count doesn't match", 1, mockUp.postProfilesCount); assertEquals("Post definitions count doesn't match", 1, mockUp.postDefinitionsCount); } @Test public void removeLoadBalancerShouldDeleteCorrectRequest() throws CloudException, InternalException { new MockUp<CloseableHttpClient>() { @Mock public CloseableHttpResponse execute(Invocation inv, HttpUriRequest request) throws IOException { if ("DELETE".equals(request.getMethod())) { assertDelete(request, PROFILE_URL, new Header[] { new BasicHeader("x-ms-version", "2012-03-01") }); return getHttpResponseMock(getStatusLineMock(HttpServletResponse.SC_OK), null, new Header[] { new BasicHeader("x-ms-request-id", UUID.randomUUID().toString()) }); } else { throw new IOException("Request is not mocked"); } } }; loadBalancerSupport.removeLoadBalancer(LB_NAME); } @Test public void createLoadBalancerShouldPostCorrectRequestIfLbAlgorithmIsLEAST_CONN() throws CloudException, InternalException { CreateLoadBalancerMockUp mockUp = new CreateLoadBalancerMockUp("Failover"); String result = loadBalancerSupport.createLoadBalancer(createLoadBalancerCreateOptions(LbAlgorithm.LEAST_CONN)); assertEquals("LoadBalancerSupport.createLoadBalancer() doesn't return correct result", LB_NAME, result); assertEquals("Post profiles count doesn't match", 1, mockUp.postProfilesCount); assertEquals("Post definitions count doesn't match", 1, mockUp.postDefinitionsCount); } @Test public void createLoadBalancerShouldPostCorrectRequestIfLbAlgorithmIsROUND_ROBIN() throws CloudException, InternalException { CreateLoadBalancerMockUp mockUp = new CreateLoadBalancerMockUp("RoundRobin"); String result = loadBalancerSupport.createLoadBalancer(createLoadBalancerCreateOptions(LbAlgorithm.ROUND_ROBIN)); assertEquals("LoadBalancerSupport.createLoadBalancer() doesn't return correct result", LB_NAME, result); assertEquals("Post profiles count doesn't match", 1, mockUp.postProfilesCount); assertEquals("Post definitions count doesn't match", 1, mockUp.postDefinitionsCount); } @Test public void listLoadBalancersShouldReturnCorrectResult() throws CloudException, InternalException { new MockUp<CloseableHttpClient>() { @Mock public CloseableHttpResponse execute(Invocation inv, HttpUriRequest request) throws IOException { if ("GET".equals(request.getMethod()) && PROFILES_URL.equals(request.getURI().toString())) { assertGet(request, PROFILES_URL, new Header[] { new BasicHeader("x-ms-version", "2012-03-01") }); DaseinObjectToXmlEntity<ProfilesModel> daseinEntity = new DaseinObjectToXmlEntity<ProfilesModel>( createProfilesModel()); return getHttpResponseMock(getStatusLineMock(HttpServletResponse.SC_OK), daseinEntity, new Header[] { new BasicHeader("x-ms-request-id", UUID.randomUUID().toString()) }); } else if ("GET".equals(request.getMethod()) && DEFINITION_URL.equals(request.getURI().toString())) { assertGet(request, DEFINITION_URL, new Header[] { new BasicHeader("x-ms-version", "2012-03-01") }); DaseinObjectToXmlEntity<DefinitionModel> daseinEntity = new DaseinObjectToXmlEntity<DefinitionModel>( createDefinitionModel("Failover", "Enabled", HC_PORT)); return getHttpResponseMock(getStatusLineMock(HttpServletResponse.SC_OK), daseinEntity, new Header[] { new BasicHeader("x-ms-request-id", UUID.randomUUID().toString()) }); } else { throw new IOException("Request is not mocked"); } } }; List<LoadBalancer> loadBalancers = IteratorUtils.toList(loadBalancerSupport.listLoadBalancers().iterator()); assertEquals("LoadBalancerSupport.listLoadBalancers() doesn't return correct result size", 1, loadBalancers.size()); LoadBalancer loadBalancer = loadBalancers.get(0); assertLoadBalancer(loadBalancer); } @Test public void getLoadBalancerShouldReturnNullIfIsNotExist() throws CloudException, InternalException { new MockUp<CloseableHttpClient>() { @Mock public CloseableHttpResponse execute(Invocation inv, HttpUriRequest request) throws IOException { if ("GET".equals(request.getMethod()) && PROFILE_URL.equals(request.getURI().toString())) { assertGet(request, PROFILE_URL, new Header[] { new BasicHeader("x-ms-version", "2012-03-01") }); return getHttpResponseMock(getStatusLineMock(HttpServletResponse.SC_NOT_FOUND), null, new Header[] { new BasicHeader("x-ms-request-id", UUID.randomUUID().toString()) }); } else { throw new IOException("Request is not mocked"); } } }; assertNull("", loadBalancerSupport.getLoadBalancer(LB_NAME)); } @Test public void getLoadBalancerShouldReturnCorrectResult() throws CloudException, InternalException { new MockUp<CloseableHttpClient>() { @Mock public CloseableHttpResponse execute(Invocation inv, HttpUriRequest request) throws IOException { if ("GET".equals(request.getMethod()) && PROFILE_URL.equals(request.getURI().toString())) { assertGet(request, PROFILE_URL, new Header[] { new BasicHeader("x-ms-version", "2012-03-01") }); DaseinObjectToXmlEntity<ProfileModel> daseinEntity = new DaseinObjectToXmlEntity<ProfileModel>( createProfileModel()); return getHttpResponseMock(getStatusLineMock(HttpServletResponse.SC_OK), daseinEntity, new Header[] { new BasicHeader("x-ms-request-id", UUID.randomUUID().toString()) }); } else if ("GET".equals(request.getMethod()) && DEFINITION_URL.equals(request.getURI().toString())) { assertGet(request, DEFINITION_URL, new Header[] { new BasicHeader("x-ms-version", "2012-03-01") }); DaseinObjectToXmlEntity<DefinitionModel> daseinEntity = new DaseinObjectToXmlEntity<DefinitionModel>( createDefinitionModel("Failover", "Enabled", HC_PORT)); return getHttpResponseMock(getStatusLineMock(HttpServletResponse.SC_OK), daseinEntity, new Header[] { new BasicHeader("x-ms-request-id", UUID.randomUUID().toString()) }); } else { throw new IOException("Request is not mocked"); } } }; LoadBalancer loadBalancer = loadBalancerSupport.getLoadBalancer(LB_NAME); assertLoadBalancer(loadBalancer); } @Test public void addServersShouldPostCorrectRequest() throws CloudException, InternalException { final String ROLE_NAME_2 = "TESTROLENAME2"; final String VM_ID_2 = String.format("%s:%s:%s", SERVICE_NAME, DEPLOYMENT_NAME, ROLE_NAME_2); final AtomicInteger postCount = new AtomicInteger(0); new MockUp<CloseableHttpClient>() { @Mock public CloseableHttpResponse execute(Invocation inv, HttpUriRequest request) throws IOException { if ("GET".equals(request.getMethod()) && DEFINITION_URL.equals(request.getURI().toString())) { assertGet(request, DEFINITION_URL, new Header[] { new BasicHeader("x-ms-version", "2012-03-01") }); DaseinObjectToXmlEntity<DefinitionModel> daseinEntity = new DaseinObjectToXmlEntity<DefinitionModel>( createDefinitionModel("Failover", "Enabled", HC_PORT)); return getHttpResponseMock(getStatusLineMock(HttpServletResponse.SC_OK), daseinEntity, new Header[] { new BasicHeader("x-ms-request-id", UUID.randomUUID().toString()) }); } else if ("POST".equals(request.getMethod()) && DEFINITIONS_URL.equals(request.getURI().toString())) { postCount.incrementAndGet(); assertPost(request, DEFINITIONS_URL, new Header[] { new BasicHeader("x-ms-version", "2012-03-01") }, createDefinitionModelWithAnotherServer("Failover", "Enabled", ROLE_NAME_2)); DefinitionModel definitionModel = new DefinitionModel(); definitionModel.setVersion("2"); DaseinObjectToXmlEntity<DefinitionModel> daseinEntity = new DaseinObjectToXmlEntity<DefinitionModel>( definitionModel); return getHttpResponseMock(getStatusLineMock(HttpServletResponse.SC_OK), daseinEntity, new Header[] { new BasicHeader("x-ms-request-id", UUID.randomUUID().toString()) }); }else { throw new IOException("Request is not mocked"); } } }; loadBalancerSupport.addServers(LB_NAME, ROLE_NAME_2); assertEquals("LoadBalancerSupport.addServers() ", 1, postCount.get()); } @Test public void removeServersShouldPostCorrectRequest() throws CloudException, InternalException { final String ROLE_NAME_2 = "TESTROLENAME2"; final String VM_ID_2 = String.format("%s:%s:%s", SERVICE_NAME, DEPLOYMENT_NAME, ROLE_NAME_2); final AtomicInteger postCount = new AtomicInteger(0); new MockUp<CloseableHttpClient>() { @Mock public CloseableHttpResponse execute(Invocation inv, HttpUriRequest request) throws IOException { if ("GET".equals(request.getMethod()) && DEFINITION_URL.equals(request.getURI().toString())) { assertGet(request, DEFINITION_URL, new Header[] { new BasicHeader("x-ms-version", "2012-03-01") }); DefinitionModel definitionModel = createDefinitionModelWithAnotherServer("Failover", "Enabled", ROLE_NAME_2); DaseinObjectToXmlEntity<DefinitionModel> daseinEntity = new DaseinObjectToXmlEntity<DefinitionModel>( createDefinitionModel("Failover", "Enabled", HC_PORT)); return getHttpResponseMock(getStatusLineMock(HttpServletResponse.SC_OK), daseinEntity, new Header[] { new BasicHeader("x-ms-request-id", UUID.randomUUID().toString()) }); } else if ("POST".equals(request.getMethod()) && DEFINITIONS_URL.equals(request.getURI().toString())) { postCount.incrementAndGet(); assertPost(request, DEFINITIONS_URL, new Header[] { new BasicHeader("x-ms-version", "2012-03-01") }, createDefinitionModel("Failover", "Enabled", HC_PORT)); DefinitionModel definitionModel = new DefinitionModel(); definitionModel.setVersion("2"); DaseinObjectToXmlEntity<DefinitionModel> daseinEntity = new DaseinObjectToXmlEntity<DefinitionModel>( definitionModel); return getHttpResponseMock(getStatusLineMock(HttpServletResponse.SC_OK), daseinEntity, new Header[] { new BasicHeader("x-ms-request-id", UUID.randomUUID().toString()) }); } else { throw new IOException("Request is not mocked"); } } }; loadBalancerSupport.removeServers(LB_NAME, ROLE_NAME_2); assertEquals("LoadBalancerSupport.addServers() post count doesn't match", 1, postCount.get()); } @Test public void listEndpointsShouldReturnCorrectResult(@Mocked final AzureVM azureVM) throws CloudException, InternalException { new NonStrictExpectations() {{ VirtualMachine virtualMachine = new VirtualMachine(); virtualMachine.setPublicDnsAddress(String.format("%s.cloudapp.net", ROLE_NAME)); virtualMachine.setProviderVirtualMachineId(VM_ID); List<VirtualMachine> virtualMachines = new ArrayList<VirtualMachine>(); virtualMachines.add(virtualMachine); azureVM.listVirtualMachines(); result = virtualMachines; }}; new MockUp<CloseableHttpClient>() { @Mock public CloseableHttpResponse execute(Invocation inv, HttpUriRequest request) throws IOException { if ("GET".equals(request.getMethod()) && DEFINITION_URL.equals(request.getURI().toString())) { assertGet(request, DEFINITION_URL, new Header[] { new BasicHeader("x-ms-version", "2012-03-01") }); DaseinObjectToXmlEntity<DefinitionModel> daseinEntity = new DaseinObjectToXmlEntity<DefinitionModel>( createDefinitionModel("Failover", "Enabled", HC_PORT)); return getHttpResponseMock(getStatusLineMock(HttpServletResponse.SC_OK), daseinEntity, new Header[] { new BasicHeader("x-ms-request-id", UUID.randomUUID().toString()) }); } else { throw new IOException("Request is not mocked"); } } }; assertLoadBalancerEndpoints(IteratorUtils.toList(loadBalancerSupport.listEndpoints(LB_NAME).iterator())); //another round to test cache assertLoadBalancerEndpoints(IteratorUtils.toList(loadBalancerSupport.listEndpoints(LB_NAME).iterator())); } @Test public void listLBHealthChecksShouldReturnEmptyIfProfilesIsNotFound() throws CloudException, InternalException { new MockUp<CloseableHttpClient>() { @Mock public CloseableHttpResponse execute(Invocation inv, HttpUriRequest request) throws IOException { if ("GET".equals(request.getMethod()) && PROFILES_URL.equals(request.getURI().toString())) { assertGet(request, PROFILES_URL, new Header[] { new BasicHeader("x-ms-version", "2012-03-01") }); return getHttpResponseMock(getStatusLineMock(HttpServletResponse.SC_NOT_FOUND), null, new Header[] { new BasicHeader("x-ms-request-id", UUID.randomUUID().toString()) }); } else { throw new IOException("Request is not mocked"); } } }; List<LoadBalancerHealthCheck> loadBalancerHealthChecks = IteratorUtils.toList(loadBalancerSupport .listLBHealthChecks(HealthCheckFilterOptions.getInstance(true) .matchingProtocol(LoadBalancerHealthCheck.HCProtocol.HTTP)).iterator()); assertEquals("LoadBalancerSupport.listLBHealthChecks() return size doesn't match", 0, loadBalancerHealthChecks.size()); } @Test public void listLBHealthChecksShouldReturnCorrectResult() throws CloudException, InternalException { new MockUp<CloseableHttpClient>() { @Mock public CloseableHttpResponse execute(Invocation inv, HttpUriRequest request) throws IOException { if ("GET".equals(request.getMethod()) && PROFILES_URL.equals(request.getURI().toString())) { assertGet(request, PROFILES_URL, new Header[] { new BasicHeader("x-ms-version", "2012-03-01") }); DaseinObjectToXmlEntity<ProfilesModel> daseinEntity = new DaseinObjectToXmlEntity<ProfilesModel>( createProfilesModel()); return getHttpResponseMock(getStatusLineMock(HttpServletResponse.SC_OK), daseinEntity, new Header[] { new BasicHeader("x-ms-request-id", UUID.randomUUID().toString()) }); } else if ("GET".equals(request.getMethod()) && DEFINITION_URL.equals(request.getURI().toString())) { assertGet(request, DEFINITION_URL, new Header[] { new BasicHeader("x-ms-version", "2012-03-01") }); DaseinObjectToXmlEntity<DefinitionModel> daseinEntity = new DaseinObjectToXmlEntity<DefinitionModel>( createDefinitionModel("Failover", "Enabled", HC_PORT)); return getHttpResponseMock(getStatusLineMock(HttpServletResponse.SC_OK), daseinEntity, new Header[] { new BasicHeader("x-ms-request-id", UUID.randomUUID().toString()) }); } else { throw new IOException("Request is not mocked"); } } }; List<LoadBalancerHealthCheck> loadBalancerHealthChecks = IteratorUtils.toList(loadBalancerSupport .listLBHealthChecks(HealthCheckFilterOptions.getInstance(true) .matchingProtocol(LoadBalancerHealthCheck.HCProtocol.HTTP)).iterator()); assertEquals("LoadBalancerSupport.listLBHealthChecks() return size doesn't match", 1, loadBalancerHealthChecks.size()); LoadBalancerHealthCheck loadBalancerHealthCheck = loadBalancerHealthChecks.get(0); assertLoadBalancerHealthCheck(loadBalancerHealthCheck, HC_PORT); } @Test(expected = InternalException.class) public void getLoadBalancerHealthCheckShouldThrowExceptionIfProviderLBHealthCheckIdIsNull() throws CloudException, InternalException { loadBalancerSupport.getLoadBalancerHealthCheck(null, LB_NAME); } @Test public void getLoadBalancerHealthCheckShouldReturnCorrectResult() throws CloudException, InternalException { new MockUp<CloseableHttpClient>() { @Mock public CloseableHttpResponse execute(Invocation inv, HttpUriRequest request) throws IOException { if ("GET".equals(request.getMethod()) && DEFINITION_URL.equals(request.getURI().toString())) { assertGet(request, DEFINITION_URL, new Header[] { new BasicHeader("x-ms-version", "2012-03-01") }); DaseinObjectToXmlEntity<DefinitionModel> daseinEntity = new DaseinObjectToXmlEntity<DefinitionModel>( createDefinitionModel("Failover", "Enabled", HC_PORT)); return getHttpResponseMock(getStatusLineMock(HttpServletResponse.SC_OK), daseinEntity, new Header[] { new BasicHeader("x-ms-request-id", UUID.randomUUID().toString()) }); } else { throw new IOException("Request is not mocked"); } } }; assertLoadBalancerHealthCheck(loadBalancerSupport.getLoadBalancerHealthCheck(LB_NAME, LB_NAME), HC_PORT); assertLoadBalancerHealthCheck(loadBalancerSupport.getLoadBalancerHealthCheck(LB_NAME, null), HC_PORT); } @Test(expected = InternalException.class) public void modifyHealthCheckShouldThrowExceptionIfLoadBalancerIdIsNull() throws CloudException, InternalException { loadBalancerSupport.modifyHealthCheck(null, HealthCheckOptions .getInstance(LB_NAME, HC_DESCRIPTION, null, null, HC_PROTOCOL, HC_PORT, HC_PATH, 9, 9, 9, 9)); } @Test(expected = InternalException.class) public void modifyHealthCheckShouldThrowExceptionIfHCProtocolIsTCP() throws CloudException, InternalException { loadBalancerSupport.modifyHealthCheck(LB_NAME, HealthCheckOptions .getInstance(LB_NAME, HC_DESCRIPTION, LB_NAME, null, LoadBalancerHealthCheck.HCProtocol.TCP, HC_PORT, HC_PATH, 9, 9, 9, 9)); } @Test(expected = InternalException.class) public void modifyHealthCheckShouldThrowExceptionIfHCProtocolIsSSL() throws CloudException, InternalException { loadBalancerSupport.modifyHealthCheck(LB_NAME, HealthCheckOptions .getInstance(LB_NAME, HC_DESCRIPTION, LB_NAME, null, LoadBalancerHealthCheck.HCProtocol.SSL, HC_PORT, HC_PATH, 9, 9, 9, 9)); } @Test public void modifyHealthCheckShouldPostCorrectRequest() throws CloudException, InternalException { final int portChangeTo = 8080; final AtomicInteger getCount = new AtomicInteger(0); final AtomicInteger postCount = new AtomicInteger(0); new MockUp<CloseableHttpClient>() { @Mock public CloseableHttpResponse execute(Invocation inv, HttpUriRequest request) throws IOException { if ("GET".equals(request.getMethod()) && DEFINITION_URL.equals(request.getURI().toString())) { getCount.incrementAndGet(); assertGet(request, DEFINITION_URL, new Header[] { new BasicHeader("x-ms-version", "2012-03-01") }); if(getCount.get() == 1) { DaseinObjectToXmlEntity<DefinitionModel> daseinEntity = new DaseinObjectToXmlEntity<DefinitionModel>( createDefinitionModel("Failover", "Enabled", HC_PORT)); return getHttpResponseMock(getStatusLineMock(HttpServletResponse.SC_OK), daseinEntity, new Header[] { new BasicHeader("x-ms-request-id", UUID.randomUUID().toString()) }); } else { DaseinObjectToXmlEntity<DefinitionModel> daseinEntity = new DaseinObjectToXmlEntity<DefinitionModel>( createDefinitionModel("Failover", "Enabled", portChangeTo)); return getHttpResponseMock(getStatusLineMock(HttpServletResponse.SC_OK), daseinEntity, new Header[] { new BasicHeader("x-ms-request-id", UUID.randomUUID().toString()) }); } } else if ("POST".equals(request.getMethod()) && DEFINITIONS_URL.equals(request.getURI().toString())) { postCount.incrementAndGet(); assertPost(request, DEFINITIONS_URL, new Header[] { new BasicHeader("x-ms-version", "2012-03-01") }, createDefinitionModel("Failover", "Enabled", portChangeTo)); DefinitionModel definitionModel = new DefinitionModel(); definitionModel.setVersion("2"); DaseinObjectToXmlEntity<DefinitionModel> daseinEntity = new DaseinObjectToXmlEntity<DefinitionModel>( definitionModel); return getHttpResponseMock(getStatusLineMock(HttpServletResponse.SC_OK), daseinEntity, new Header[] { new BasicHeader("x-ms-request-id", UUID.randomUUID().toString()) }); } else { throw new IOException("Request is not mocked"); } } }; LoadBalancerHealthCheck loadBalancerHealthCheck = loadBalancerSupport.modifyHealthCheck(LB_NAME, HealthCheckOptions.getInstance(LB_NAME, HC_DESCRIPTION, LB_NAME, null, HC_PROTOCOL, 8080, HC_PATH, 9, 9, 9, 9)); assertEquals("LoadBalancerSupport.modifyHealthCheck() post count doesn't match", 1, postCount.get()); assertLoadBalancerHealthCheck(loadBalancerHealthCheck, portChangeTo); } private void assertLoadBalancerHealthCheck(LoadBalancerHealthCheck loadBalancerHealthCheck, int port) { assertEquals("LoadBalancerSupport.listLBHealthChecks() return doesn't match", LB_NAME, loadBalancerHealthCheck.getProviderLBHealthCheckId()); assertEquals("LoadBalancerSupport.listLBHealthChecks() return doesn't match", HC_PATH, loadBalancerHealthCheck.getPath()); assertEquals("LoadBalancerSupport.listLBHealthChecks() return doesn't match", port, loadBalancerHealthCheck.getPort()); assertEquals("LoadBalancerSupport.listLBHealthChecks() return doesn't match", LoadBalancerHealthCheck.HCProtocol.HTTP, loadBalancerHealthCheck.getProtocol()); } private void assertLoadBalancerEndpoints(List<LoadBalancerEndpoint> endpoints) { assertEquals("LoadBalancerSupport.listEndpoints() return size doesn't match", 1, endpoints.size()); LoadBalancerEndpoint endpoint = endpoints.get(0); assertEquals("LoadBalancerSupport.listEndpoints() return doesn't match", LbEndpointState.ACTIVE, endpoint.getCurrentState()); assertEquals("LoadBalancerSupport.listEndpoints() return doesn't match", VM_ID, endpoint.getEndpointValue()); assertEquals("LoadBalancerSupport.listEndpoints() return doesn't match", LbEndpointType.VM, endpoint.getEndpointType()); } private void assertLoadBalancer(LoadBalancer loadBalancer) { assertEquals("LoadBalancerSupport.listLoadBalancers() doesn't return correct result", LB_DOMAIN, loadBalancer.getAddress()); assertEquals("LoadBalancerSupport.listLoadBalancers() doesn't return correct result", LoadBalancerAddressType.DNS, loadBalancer.getAddressType()); assertEquals("LoadBalancerSupport.listLoadBalancers() doesn't return correct result", LoadBalancerState.ACTIVE, loadBalancer.getCurrentState()); assertEquals("LoadBalancerSupport.listLoadBalancers() doesn't return correct result", LB_NAME, loadBalancer.getName()); assertEquals("LoadBalancerSupport.listLoadBalancers() doesn't return correct result", LB_NAME, loadBalancer.getProviderLoadBalancerId()); assertEquals("LoadBalancerSupport.listLoadBalancers() doesn't return correct result", ACCOUNT_NO, loadBalancer.getProviderOwnerId()); assertEquals("LoadBalancerSupport.listLoadBalancers() doesn't return correct result", new IPVersion[] { IPVersion.IPV4 }, loadBalancer.getSupportedTraffic()); assertEquals("LoadBalancerSupport.listLoadBalancers() doesn't return correct result", LB_NAME, loadBalancer.getProviderLBHealthCheckId()); assertEquals("LoadBalancerSupport.listLoadBalancers() doesn't return correct result", 1, loadBalancer.getListeners().length); LbListener lbListener = loadBalancer.getListeners()[0]; assertEquals("LoadBalancerSupport.listLoadBalancers() doesn't return correct result", LbAlgorithm.LEAST_CONN, lbListener.getAlgorithm()); assertEquals("LoadBalancerSupport.listLoadBalancers() doesn't return correct result", LbPersistence.COOKIE, lbListener.getPersistence()); assertEquals("LoadBalancerSupport.listLoadBalancers() doesn't return correct result", "", lbListener.getCookie()); } private LoadBalancerCreateOptions createLoadBalancerCreateOptions(LbAlgorithm algorithm) { LoadBalancerCreateOptions loadBalancerCreateOptions = LoadBalancerCreateOptions.getInstance(LB_NAME, LB_DESCRIPTION); HealthCheckOptions healthCheckOptions = HealthCheckOptions.getInstance(LB_NAME, HC_DESCRIPTION, null, null, HC_PROTOCOL, HC_PORT, HC_PATH, 9, 9, 9, 9); loadBalancerCreateOptions.withHealthCheckOptions(healthCheckOptions); if (algorithm != null) { LbListener lbListener = LbListener .getInstance(algorithm, "jsessionid", LB_PROTOCOL, LB_PUBLIC_PORT, LB_PRIVATE_PORT); loadBalancerCreateOptions.havingListeners(lbListener); } loadBalancerCreateOptions.withVirtualMachines(VM_ID); return loadBalancerCreateOptions; } private DefinitionModel createDefinitionModelWithAnotherServer(String loadBalancingMethod, String status, String anotherRole) { DefinitionModel definitionModel = createDefinitionModel(loadBalancingMethod, status, HC_PORT); DefinitionModel.EndPointModel endPointModel = new DefinitionModel.EndPointModel(); endPointModel.setDomainName(anotherRole + ".cloudapp.net"); endPointModel.setStatus("Enabled"); endPointModel.setType("CloudService"); definitionModel.getPolicy().getEndPoints().add(endPointModel); return definitionModel; } private DefinitionModel createDefinitionModel(String loadBalancingMethod, String status, int port) { DefinitionModel definition = new DefinitionModel(); DefinitionModel.DnsOptions dnsOptions = new DefinitionModel.DnsOptions(); dnsOptions.setTimeToLiveInSeconds("300"); definition.setDnsOptions(dnsOptions); DefinitionModel.MonitorModel monitor = new DefinitionModel.MonitorModel(); monitor.setIntervalInSeconds("30"); monitor.setTimeoutInSeconds("10"); monitor.setToleratedNumberOfFailures("3"); monitor.setProtocol(HC_PROTOCOL.toString()); monitor.setPort(String.valueOf(port)); DefinitionModel.HttpOptionsModel httpOptions = new DefinitionModel.HttpOptionsModel(); httpOptions.setVerb("GET"); httpOptions.setRelativePath(HC_PATH); httpOptions.setExpectedStatusCode("200"); monitor.setHttpOptions(httpOptions); ArrayList<DefinitionModel.MonitorModel> monitors = new ArrayList<DefinitionModel.MonitorModel>(); monitors.add(monitor); definition.setMonitors(monitors); DefinitionModel.PolicyModel policy = new DefinitionModel.PolicyModel(); policy.setLoadBalancingMethod(loadBalancingMethod); ArrayList<DefinitionModel.EndPointModel> endPointsToAdd = new ArrayList<DefinitionModel.EndPointModel>(); DefinitionModel.EndPointModel endPointModel = new DefinitionModel.EndPointModel(); endPointModel.setDomainName(String.format("%s.cloudapp.net", ROLE_NAME)); endPointModel.setStatus("Enabled"); endPointModel.setType("CloudService"); endPointsToAdd.add(endPointModel); policy.setEndPoints(endPointsToAdd); definition.setPolicy(policy); definition.setStatus(status); return definition; } private ProfilesModel createProfilesModel() { ProfilesModel profilesModel = new ProfilesModel(); List<ProfileModel> profileModels = new ArrayList<ProfileModel>(); profileModels.add(createProfileModel()); profilesModel.setProfiles(profileModels); return profilesModel; } private ProfileModel createProfileModel() { ProfileModel profileModel = new ProfileModel(); profileModel.setDomainName(LB_DOMAIN); profileModel.setName(LB_NAME); return profileModel; } private class CreateLoadBalancerMockUp extends MockUp<CloseableHttpClient> { private String loadBalancingMethod; private int postProfilesCount = 0; private int postDefinitionsCount = 0; private CreateLoadBalancerMockUp(String loadBalancingMethod) { this.loadBalancingMethod = loadBalancingMethod; } @Mock public CloseableHttpResponse execute(Invocation inv, HttpUriRequest request) throws IOException { if ("POST".equals(request.getMethod()) && PROFILES_URL.equals(request.getURI().toString())) { postProfilesCount++; assertPost(request, PROFILES_URL, new Header[] { new BasicHeader("x-ms-version", "2012-03-01") }, createProfileModel()); return getHttpResponseMock(getStatusLineMock(HttpServletResponse.SC_OK), null, new Header[] { new BasicHeader("x-ms-request-id", UUID.randomUUID().toString()) }); } else if ("POST".equals(request.getMethod()) && DEFINITIONS_URL.equals(request.getURI().toString())) { postDefinitionsCount++; assertPost(request, DEFINITIONS_URL, new Header[] { new BasicHeader("x-ms-version", "2012-03-01") }, createDefinitionModel(loadBalancingMethod, null, HC_PORT)); DefinitionModel definitionModel = new DefinitionModel(); definitionModel.setVersion("1"); DaseinObjectToXmlEntity<DefinitionModel> daseinEntity = new DaseinObjectToXmlEntity<DefinitionModel>( definitionModel); return getHttpResponseMock(getStatusLineMock(HttpServletResponse.SC_OK), daseinEntity, new Header[] { new BasicHeader("x-ms-request-id", UUID.randomUUID().toString()) }); } else { throw new IOException("Request is not mocked"); } } } }
apache-2.0
biospi/mzmlb
pwiz/pwiz/data/vendor_readers/ABI/Reader_ABI_Detail.cpp
7869
// // $Id: Reader_ABI_Detail.cpp 10385 2017-01-20 20:35:32Z chambm $ // // // Original author: Matt Chambers <matt.chambers .@. vanderbilt.edu> // // Copyright 2009 Vanderbilt University - Nashville, TN 37232 // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // #define PWIZ_SOURCE #include "Reader_ABI_Detail.hpp" #include "pwiz/utility/misc/Std.hpp" #ifdef PWIZ_READER_ABI using namespace pwiz::vendor_api::ABI; namespace pwiz { namespace msdata { namespace detail { namespace ABI { PWIZ_API_DECL InstrumentConfigurationPtr translateAsInstrumentConfiguration(InstrumentModel instrumentModel, IonSourceType ionSource) { InstrumentConfigurationPtr icPtr(new InstrumentConfiguration); InstrumentConfiguration& ic = *icPtr; ic.id = "IC1"; ic.set(translateAsInstrumentModel(instrumentModel)); Component source(ComponentType_Source, 1); source.set(translateAsIonSource(ionSource)); switch (instrumentModel) { // QqQ case API150MCA: case API150EX: case API2000: case API3000: case API3200: case API4000: case API4500: case API5000: case API5500: case API6500: case API100: case API100LC: case API165: case API300: case API350: case API365: ic.componentList.push_back(source); ic.componentList.push_back(Component(MS_quadrupole, 2)); ic.componentList.push_back(Component(MS_quadrupole, 3)); ic.componentList.push_back(Component(MS_quadrupole, 4)); ic.componentList.push_back(Component(MS_electron_multiplier, 5)); break; // QqLIT case API2000QTrap: case API2500QTrap: case API3200QTrap: case API3500QTrap: case API4000QTrap: case API4500QTrap: case API5500QTrap: case API6500QTrap: ic.componentList.push_back(source); ic.componentList.push_back(Component(MS_quadrupole, 2)); ic.componentList.push_back(Component(MS_quadrupole, 3)); ic.componentList.push_back(Component(MS_axial_ejection_linear_ion_trap, 4)); ic.componentList.push_back(Component(MS_electron_multiplier, 5)); break; // QqTOF case QStar: case QStarPulsarI: case QStarXL: case QStarElite: case API4600TripleTOF: case API5600TripleTOF: case API6600TripleTOF: case X500QTOF: case NlxTof: ic.componentList.push_back(source); ic.componentList.push_back(Component(MS_quadrupole, 2)); ic.componentList.push_back(Component(MS_quadrupole, 3)); ic.componentList.push_back(Component(MS_time_of_flight, 4)); ic.componentList.push_back(Component(MS_electron_multiplier, 5)); break; case InstrumentModel_Unknown: break; default: throw runtime_error("[translateAsInstrumentConfiguration] unhandled instrument model: " + lexical_cast<string>(instrumentModel)); } return icPtr; } PWIZ_API_DECL CVID translateAsInstrumentModel(InstrumentModel instrumentModel) { switch (instrumentModel) { case API100: return MS_API_100; case API100LC: return MS_API_100LC; case API150MCA: return MS_API_150EX; case API150EX: return MS_API_150EX; case API165: return MS_API_165; case API300: return MS_API_300; case API350: return MS_API_350; case API365: return MS_API_365; case API2000: return MS_API_2000; case API3000: return MS_API_3000; case API3200: return MS_API_3200; case API4000: return MS_API_4000; case API4500: return MS_Triple_Quad_4500; case API5000: return MS_API_5000; case API5500: return MS_Triple_Quad_5500; case API6500: return MS_Triple_Quad_6500; case API2000QTrap: return MS_2000_QTRAP; case API2500QTrap: return MS_2500_QTRAP; case API3200QTrap: return MS_3200_QTRAP; case API3500QTrap: return MS_3500_QTRAP; case API4000QTrap: return MS_4000_QTRAP; case API4500QTrap: return MS_QTRAP_4500; case API5500QTrap: return MS_QTRAP_5500; case API6500QTrap: return MS_QTRAP_6500; case API4600TripleTOF: return MS_TripleTOF_4600; case API5600TripleTOF: return MS_TripleTOF_5600; case API6600TripleTOF: return MS_TripleTOF_6600; case QStar: return MS_QSTAR; case QStarPulsarI: return MS_QSTAR_Pulsar; case QStarXL: return MS_QSTAR_XL; case QStarElite: return MS_QSTAR_Elite; case NlxTof: return MS_TripleTOF_5600; case X500QTOF: return MS_X500R_QTOF; case InstrumentModel_Unknown: return MS_Applied_Biosystems_instrument_model; default: throw runtime_error("[translateAsInstrumentModel] unhandled instrument model: " + lexical_cast<string>(instrumentModel)); } } PWIZ_API_DECL CVID translateAsIonSource(IonSourceType ionSourceType) { switch (ionSourceType) { case IonSourceType_Unknown: return MS_ionization_type; case FlowNanoSpray: return MS_nanoelectrospray; case HeatedNebulizer: return MS_atmospheric_pressure_chemical_ionization; case TurboSpray: return MS_electrospray_ionization; case IonSpray: return MS_electrospray_ionization; case Maldi: return MS_matrix_assisted_laser_desorption_ionization; case PhotoSpray: return MS_atmospheric_pressure_photoionization; case Medusa: case Duo: case None: return CVID_Unknown; default: throw runtime_error("[translateAsIonSource] unhandled ion source: " + lexical_cast<string>(ionSourceType)); } } PWIZ_API_DECL CVID translateAsSpectrumType(ExperimentType experimentType) { switch (experimentType) { case MS: return MS_MS1_spectrum; case vendor_api::ABI::Product: return MS_MSn_spectrum; case vendor_api::ABI::Precursor: return MS_precursor_ion_spectrum; case NeutralGainOrLoss: return MS_constant_neutral_loss_spectrum; case SIM: return MS_SIM_spectrum; case MRM: return MS_SRM_spectrum; default: return CVID_Unknown; } } PWIZ_API_DECL CVID translate(Polarity polarity) { switch (polarity) { case Positive: return MS_positive_scan; case Negative: return MS_negative_scan; case Undefined: default: return CVID_Unknown; } } } // ABI } // detail } // msdata } // pwiz #endif // PWIZ_READER_ABI
apache-2.0
raml-org/raml-dotnet-parser-2
source/Raml.Parser/node_modules/raml-1-0-parser/node_modules/webpack/lib/JsonpExportMainTemplatePlugin.js
852
/* MIT License http://www.opensource.org/licenses/mit-license.php Author Tobias Koppers @sokra */ var ConcatSource = require("webpack-core/lib/ConcatSource"); function JsonpExportMainTemplatePlugin(name) { this.name = name; } module.exports = JsonpExportMainTemplatePlugin; JsonpExportMainTemplatePlugin.prototype.apply = function(mainTemplate) { mainTemplate.plugin("render", function(source, chunk, hash) { var name = this.applyPluginsWaterfall("asset-path", this.name || "", { hash: hash, chunk: chunk }); return new ConcatSource(name + "(", source, ");"); }.bind(this)); mainTemplate.plugin("global-hash-paths", function(paths) { if (this.name) paths.push(this.name); return paths; }.bind(this)); mainTemplate.plugin("hash", function(hash) { hash.update("jsonp export"); hash.update(this.name + ""); }.bind(this)); };
apache-2.0
savantly-net/sprout-platform
frontend/apps/sidebar/jest.config.js
314
module.exports = { rootDir: "src", testEnvironment: "jsdom", transform: { "^.+\\.(j|t)sx?$": "babel-jest", }, moduleNameMapper: { "\\.(css)$": "identity-obj-proxy", "single-spa-react/parcel": "single-spa-react/lib/cjs/parcel.cjs", }, setupFilesAfterEnv: ["@testing-library/jest-dom"], };
apache-2.0
TennisGazelle/cs791ALopez
ogldev-source/tutorial22/mesh.cpp
6372
/* Copyright 2011 Etay Meiri This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. */ #include "mesh.h" Mesh::MeshEntry::MeshEntry() { VB = INVALID_OGL_VALUE; IB = INVALID_OGL_VALUE; NumIndices = 0; MaterialIndex = INVALID_MATERIAL; }; Mesh::MeshEntry::~MeshEntry() { if (VB != INVALID_OGL_VALUE) { glDeleteBuffers(1, &VB); } if (IB != INVALID_OGL_VALUE) { glDeleteBuffers(1, &IB); } } void Mesh::MeshEntry::Init(const std::vector<Vertex>& Vertices, const std::vector<unsigned int>& Indices) { NumIndices = Indices.size(); glGenBuffers(1, &VB); glBindBuffer(GL_ARRAY_BUFFER, VB); glBufferData(GL_ARRAY_BUFFER, sizeof(Vertex) * Vertices.size(), &Vertices[0], GL_STATIC_DRAW); glGenBuffers(1, &IB); glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, IB); glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(unsigned int) * NumIndices, &Indices[0], GL_STATIC_DRAW); } Mesh::Mesh() { } Mesh::~Mesh() { Clear(); } void Mesh::Clear() { for (unsigned int i = 0 ; i < m_Textures.size() ; i++) { SAFE_DELETE(m_Textures[i]); } } bool Mesh::LoadMesh(const std::string& Filename) { // Release the previously loaded mesh (if it exists) Clear(); bool Ret = false; Assimp::Importer Importer; const aiScene* pScene = Importer.ReadFile(Filename.c_str(), ASSIMP_LOAD_FLAGS); if (pScene) { Ret = InitFromScene(pScene, Filename); } else { printf("Error parsing '%s': '%s'\n", Filename.c_str(), Importer.GetErrorString()); } return Ret; } bool Mesh::InitFromScene(const aiScene* pScene, const std::string& Filename) { m_Entries.resize(pScene->mNumMeshes); m_Textures.resize(pScene->mNumMaterials); // Initialize the meshes in the scene one by one for (unsigned int i = 0 ; i < m_Entries.size() ; i++) { const aiMesh* paiMesh = pScene->mMeshes[i]; InitMesh(i, paiMesh); } return InitMaterials(pScene, Filename); } void Mesh::InitMesh(unsigned int Index, const aiMesh* paiMesh) { m_Entries[Index].MaterialIndex = paiMesh->mMaterialIndex; std::vector<Vertex> Vertices; std::vector<unsigned int> Indices; const aiVector3D Zero3D(0.0f, 0.0f, 0.0f); for (unsigned int i = 0 ; i < paiMesh->mNumVertices ; i++) { const aiVector3D* pPos = &(paiMesh->mVertices[i]); const aiVector3D* pNormal = &(paiMesh->mNormals[i]); const aiVector3D* pTexCoord = paiMesh->HasTextureCoords(0) ? &(paiMesh->mTextureCoords[0][i]) : &Zero3D; Vertex v(Vector3f(pPos->x, pPos->y, pPos->z), Vector2f(pTexCoord->x, pTexCoord->y), Vector3f(pNormal->x, pNormal->y, pNormal->z)); Vertices.push_back(v); } for (unsigned int i = 0 ; i < paiMesh->mNumFaces ; i++) { const aiFace& Face = paiMesh->mFaces[i]; assert(Face.mNumIndices == 3); Indices.push_back(Face.mIndices[0]); Indices.push_back(Face.mIndices[1]); Indices.push_back(Face.mIndices[2]); } m_Entries[Index].Init(Vertices, Indices); } bool Mesh::InitMaterials(const aiScene* pScene, const std::string& Filename) { // Extract the directory part from the file name std::string::size_type SlashIndex = Filename.find_last_of("/"); std::string Dir; if (SlashIndex == std::string::npos) { Dir = "."; } else if (SlashIndex == 0) { Dir = "/"; } else { Dir = Filename.substr(0, SlashIndex); } bool Ret = true; // Initialize the materials for (unsigned int i = 0 ; i < pScene->mNumMaterials ; i++) { const aiMaterial* pMaterial = pScene->mMaterials[i]; m_Textures[i] = NULL; if (pMaterial->GetTextureCount(aiTextureType_DIFFUSE) > 0) { aiString Path; if (pMaterial->GetTexture(aiTextureType_DIFFUSE, 0, &Path, NULL, NULL, NULL, NULL, NULL) == AI_SUCCESS) { std::string FullPath = Dir + "/" + Path.data; m_Textures[i] = new Texture(GL_TEXTURE_2D, FullPath.c_str()); if (!m_Textures[i]->Load()) { printf("Error loading texture '%s'\n", FullPath.c_str()); delete m_Textures[i]; m_Textures[i] = NULL; Ret = false; } else { printf("Loaded texture '%s'\n", FullPath.c_str()); } } } // Load a white texture in case the model does not include its own texture if (!m_Textures[i]) { m_Textures[i] = new Texture(GL_TEXTURE_2D, "../Content/white.png"); Ret = m_Textures[i]->Load(); } } return Ret; } void Mesh::Render() { glEnableVertexAttribArray(0); glEnableVertexAttribArray(1); glEnableVertexAttribArray(2); for (unsigned int i = 0 ; i < m_Entries.size() ; i++) { glBindBuffer(GL_ARRAY_BUFFER, m_Entries[i].VB); glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, sizeof(Vertex), 0); glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, sizeof(Vertex), (const GLvoid*)12); glVertexAttribPointer(2, 3, GL_FLOAT, GL_FALSE, sizeof(Vertex), (const GLvoid*)20); glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, m_Entries[i].IB); const unsigned int MaterialIndex = m_Entries[i].MaterialIndex; if (MaterialIndex < m_Textures.size() && m_Textures[MaterialIndex]) { m_Textures[MaterialIndex]->Bind(GL_TEXTURE0); } glDrawElements(GL_TRIANGLES, m_Entries[i].NumIndices, GL_UNSIGNED_INT, 0); } glDisableVertexAttribArray(0); glDisableVertexAttribArray(1); glDisableVertexAttribArray(2); }
apache-2.0
ArturVasilov/AndroidCourses
Android Material Design/Palette/app/src/main/java/ru/sportics/palette/MainActivity.java
1998
package ru.sportics.palette; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.graphics.Color; import android.support.v7.app.ActionBarActivity; import android.os.Bundle; import android.support.v7.graphics.Palette; import android.view.View; public class MainActivity extends ActionBarActivity { private final View[] mViews = new View[6]; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); int[] ids = {R.id.colorView1, R.id.colorView2, R.id.colorView3, R.id.colorView4, R.id.colorView5, R.id.colorView6,}; for (int i = 0; i < ids.length; i++) { mViews[i] = findViewById(ids[i]); } extractColors(); setPaletteAsync(); } private void extractColors() { Bitmap bitmap = BitmapFactory.decodeResource(getResources(), R.drawable.image); Palette palette = Palette.generate(bitmap); setColors(palette); } private void setPaletteAsync() { //Generating bitmap isn't not async, but OK, it's not so important. Bitmap bitmap = BitmapFactory.decodeResource(getResources(), R.drawable.image); Palette.generateAsync(bitmap, new Palette.PaletteAsyncListener() { @Override public void onGenerated(Palette palette) { setColors(palette); } }); } private void setColors(Palette palette) { int[] colors = { palette.getVibrantColor(Color.BLACK), palette.getLightVibrantColor(Color.BLACK), palette.getDarkVibrantColor(Color.BLACK), palette.getMutedColor(Color.BLACK), palette.getLightMutedColor(Color.BLACK), palette.getDarkMutedColor(Color.BLACK), }; for (int i = 0; i < colors.length; i++) { mViews[i].setBackgroundColor(colors[i]); } } }
apache-2.0
jsenko/pnc
moduleconfig/src/main/java/org/jboss/pnc/common/json/moduleconfig/OpenshiftEnvironmentDriverModuleConfig.java
5960
/** * JBoss, Home of Professional Open Source. * Copyright 2014 Red Hat, Inc., and individual contributors * as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jboss.pnc.common.json.moduleconfig; import com.fasterxml.jackson.annotation.JsonProperty; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Configuration for DockerEnvironmentDriver * * @author <a href="mailto:matejonnet@gmail.com">Matej Lazar</a> * */ public class OpenshiftEnvironmentDriverModuleConfig extends EnvironmentDriverModuleConfigBase { private static final Logger log = LoggerFactory.getLogger(OpenshiftEnvironmentDriverModuleConfig.class); public static String MODULE_NAME = "openshift-environment-driver"; private String restEndpointUrl; private String buildAgentHost; private String buildAgentBindPath; private String executorThreadPoolSize; private String podNamespace; private String restAuthToken; private String containerPort; private boolean keepBuildAgentInstance; private boolean exposeBuildAgentOnPublicUrl; public OpenshiftEnvironmentDriverModuleConfig(@JsonProperty("restEndpointUrl") String restEndpointUrl, @JsonProperty("buildAgentHost") String buildAgentHost, @JsonProperty("imageId") String imageId, @JsonProperty("firewallAllowedDestinations") String firewallAllowedDestinations, @JsonProperty("proxyServer") String proxyServer, @JsonProperty("proxyPort") String proxyPort, @JsonProperty("nonProxyHosts") String nonProxyHosts, @JsonProperty("podNamespace") String podNamespace, @JsonProperty("buildAgentBindPath") String buildAgentBindPath, @JsonProperty("executorThreadPoolSize") String executorThreadPoolSize, @JsonProperty("restAuthToken") String restAuthToken, @JsonProperty("containerPort") String containerPort, @JsonProperty("workingDirectory") String workingDirectory, @JsonProperty("disabled") Boolean disabled, @JsonProperty("keepBuildAgentInstance") Boolean keepBuildAgentInstance, @JsonProperty("exposeBuildAgentOnPublicUrl") Boolean exposeBuildAgentOnPublicUrl) { super(imageId, firewallAllowedDestinations, proxyServer, proxyPort, nonProxyHosts,workingDirectory, disabled); this.restEndpointUrl = restEndpointUrl; this.buildAgentHost = buildAgentHost; this.buildAgentBindPath = buildAgentBindPath; this.executorThreadPoolSize = executorThreadPoolSize; this.podNamespace = podNamespace; this.restAuthToken = restAuthToken; this.containerPort = containerPort; this.keepBuildAgentInstance = keepBuildAgentInstance != null ? keepBuildAgentInstance: false; this.exposeBuildAgentOnPublicUrl = exposeBuildAgentOnPublicUrl != null ? exposeBuildAgentOnPublicUrl: false; log.debug("Created new instance {}", toString()); } public String getRestEndpointUrl() { return restEndpointUrl; } public String getBuildAgentHost() { return buildAgentHost; } public String getPncNamespace() { return podNamespace; } public String getRestAuthToken() { return restAuthToken; } public String getContainerPort() { return containerPort; } public String getBuildAgentBindPath() { return buildAgentBindPath; } public String getExecutorThreadPoolSize() { return executorThreadPoolSize; } public boolean getKeepBuildAgentInstance() { return keepBuildAgentInstance; } public boolean getExposeBuildAgentOnPublicUrl() { return exposeBuildAgentOnPublicUrl; } @Override public String toString() { return "OpenshiftEnvironmentDriverModuleConfig{" + "restEndpointUrl='" + restEndpointUrl + '\'' + ", imageId='" + imageId + '\'' + ", firewallAllowedDestinations='" + firewallAllowedDestinations + '\'' + ", proxyServer='" + proxyServer + '\'' + ", proxyPort='" + proxyPort + '\'' + ", nonProxyHosts='" + nonProxyHosts + '\'' + ", podNamespace='" + podNamespace + '\'' + ", buildAgentHost='" + buildAgentHost + '\'' + ", buildAgentBindPath='" + buildAgentBindPath + '\'' + ", executorThreadPoolSize='" + executorThreadPoolSize + '\'' + ", restAuthToken= HIDDEN " + ", containerPort='" + containerPort + '\'' + ", disabled='" + disabled + '\'' + ", keepBuildAgentInstance='" + keepBuildAgentInstance + '\'' + ", exposeBuildAgentOnPublicUrl='" + exposeBuildAgentOnPublicUrl + '\'' + '}'; } }
apache-2.0
brettwooldridge/buck
src/com/facebook/buck/android/AbstractModuleInfo.java
1773
/* * Copyright 2018-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.android; import com.facebook.buck.core.util.immutables.BuckStyleImmutable; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import java.nio.file.Path; import javax.annotation.Nullable; import org.immutables.value.Value; @BuckStyleImmutable @Value.Immutable(copy = false) /** This class contains files needed to build a apk module within a app bundle. */ public abstract class AbstractModuleInfo { @Value.Parameter public abstract String getModuleName(); @Value.Parameter @Nullable public abstract Path getResourceApk(); @Value.Parameter public abstract ImmutableSet<Path> getDexFile(); @Value.Parameter public abstract ImmutableMap<Path, String> getAssetDirectories(); @Value.Parameter @Nullable public abstract Path getTempAssets(); @Value.Parameter public abstract ImmutableSet<Path> getNativeLibraryDirectories(); @Value.Parameter @Nullable public abstract Path getTempNatives(); @Value.Parameter public abstract ImmutableSet<Path> getZipFiles(); @Value.Parameter public abstract ImmutableSet<Path> getJarFilesThatMayContainResources(); }
apache-2.0
Wogan/monix
monix-reactive/shared/src/main/scala/monix/reactive/subjects/ReplaySubject.scala
7786
/* * Copyright (c) 2014-2018 by The Monix Project Developers. * See the project homepage at: https://monix.io * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package monix.reactive.subjects import monix.execution.Ack.{Continue, Stop} import monix.execution.{Ack, Cancelable} import monix.reactive.Observable import monix.reactive.internal.util.PromiseCounter import monix.reactive.observers.{ConnectableSubscriber, Subscriber} import monix.execution.atomic.Atomic import scala.util.control.NonFatal import scala.annotation.tailrec import scala.collection.immutable.Queue import scala.concurrent.Future /** `ReplaySubject` emits to any observer all of the items that were emitted * by the source, regardless of when the observer subscribes. */ final class ReplaySubject[A] private (initialState: ReplaySubject.State[A]) extends Subject[A,A] { self => private[this] val stateRef = Atomic(initialState) def size: Int = stateRef.get.subscribers.size @tailrec def unsafeSubscribeFn(subscriber: Subscriber[A]): Cancelable = { def streamOnDone(buffer: Iterable[A], errorThrown: Throwable): Cancelable = { implicit val s = subscriber.scheduler Observable.fromIterable(buffer).unsafeSubscribeFn(new Subscriber[A] { implicit val scheduler = subscriber.scheduler def onNext(elem: A) = subscriber.onNext(elem) def onError(ex: Throwable) = subscriber.onError(ex) def onComplete() = if (errorThrown != null) subscriber.onError(errorThrown) else subscriber.onComplete() }) } val state = stateRef.get val buffer = state.buffer if (state.isDone) { // fast path streamOnDone(buffer, state.errorThrown) } else { val c = ConnectableSubscriber(subscriber) val newState = state.addNewSubscriber(c) if (stateRef.compareAndSet(state, newState)) { c.pushFirstAll(buffer) import subscriber.scheduler val connecting = c.connect() connecting.syncOnStopOrFailure(_ => removeSubscriber(c)) Cancelable { () => try removeSubscriber(c) finally connecting.cancel() } } else { // retry unsafeSubscribeFn(subscriber) } } } @tailrec def onNext(elem: A): Future[Ack] = { val state = stateRef.get if (state.isDone) Stop else { val newState = state.appendElem(elem) if (!stateRef.compareAndSet(state, newState)) { onNext(elem) // retry } else { val iterator = state.subscribers.iterator // counter that's only used when we go async, hence the null var result: PromiseCounter[Continue.type] = null while (iterator.hasNext) { val subscriber = iterator.next() // using the scheduler defined by each subscriber import subscriber.scheduler val ack = try subscriber.onNext(elem) catch { case ex if NonFatal(ex) => Future.failed(ex) } // if execution is synchronous, takes the fast-path if (ack.isCompleted) { // subscriber canceled or triggered an error? then remove if (ack != Continue && ack.value.get != Continue.AsSuccess) removeSubscriber(subscriber) } else { // going async, so we've got to count active futures for final Ack // the counter starts from 1 because zero implies isCompleted if (result == null) result = PromiseCounter(Continue, 1) result.acquire() ack.onComplete { case Continue.AsSuccess => result.countdown() case _ => // subscriber canceled or triggered an error? then remove removeSubscriber(subscriber) result.countdown() } } } // has fast-path for completely synchronous invocation if (result == null) Continue else { result.countdown() result.future } } } } override def onError(ex: Throwable): Unit = onCompleteOrError(ex) override def onComplete(): Unit = onCompleteOrError(null) @tailrec private def onCompleteOrError(ex: Throwable): Unit = { val state = stateRef.get if (!state.isDone) { if (!stateRef.compareAndSet(state, state.markDone(ex))) onCompleteOrError(ex) else { val iterator = state.subscribers.iterator while (iterator.hasNext) { val ref = iterator.next() if (ex != null) ref.onError(ex) else ref.onComplete() } } } } @tailrec private def removeSubscriber(s: ConnectableSubscriber[A]): Unit = { val state = stateRef.get val newState = state.removeSubscriber(s) if (!stateRef.compareAndSet(state, newState)) removeSubscriber(s) } } object ReplaySubject { /** Creates an unbounded replay subject. */ def apply[A](initial: A*): ReplaySubject[A] = create(initial) /** Creates an unbounded replay subject. */ def create[A](initial: Seq[A]): ReplaySubject[A] = new ReplaySubject[A](State[A](initial.toVector, 0)) /** Creates a size-bounded replay subject. * * In this setting, the ReplaySubject holds at most size items in its * internal buffer and discards the oldest item. * * @param capacity is the maximum size of the internal buffer */ def createLimited[A](capacity: Int): ReplaySubject[A] = { require(capacity > 0, "capacity must be strictly positive") new ReplaySubject[A](State[A](Queue.empty, capacity)) } /** Creates a size-bounded replay subject, prepopulated. * * In this setting, the ReplaySubject holds at most size items in its * internal buffer and discards the oldest item. * * @param capacity is the maximum size of the internal buffer * @param initial is an initial sequence of elements to prepopulate the buffer */ def createLimited[A](capacity: Int, initial: Seq[A]): ReplaySubject[A] = { require(capacity > 0, "capacity must be strictly positive") val elems = initial.takeRight(capacity) new ReplaySubject[A](State[A](Queue(elems:_*), capacity)) } /** Internal state for [[monix.reactive.subjects.ReplaySubject]] */ private final case class State[A]( buffer: Seq[A], capacity: Int, subscribers: Set[ConnectableSubscriber[A]] = Set.empty[ConnectableSubscriber[A]], length: Int = 0, isDone: Boolean = false, errorThrown: Throwable = null) { def appendElem(elem: A): State[A] = { if (capacity == 0) copy(buffer = buffer :+ elem) else if (length >= capacity) copy(buffer = buffer.tail :+ elem) else copy(buffer = buffer :+ elem, length = length + 1) } def addNewSubscriber(s: ConnectableSubscriber[A]): State[A] = copy(subscribers = subscribers + s) def removeSubscriber(toRemove: ConnectableSubscriber[A]): State[A] = { val newSet = subscribers - toRemove copy(subscribers = newSet) } def markDone(ex: Throwable): State[A] = { copy(subscribers = Set.empty, isDone = true, errorThrown = ex) } } }
apache-2.0
thexerteproject/xerteonlinetoolkits
preview.php
3450
<?php /** * Licensed to The Apereo Foundation under one or more contributor license * agreements. See the NOTICE file distributed with this work for * additional information regarding copyright ownership. * The Apereo Foundation licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * * preview page, brings up a preview page for the editor to see their changes * * @author Patrick Lockley * @version 1.0 * @package */ require_once(dirname(__FILE__) . "/config.php"); _load_language_file("/preview.inc"); require $xerte_toolkits_site->php_library_path . "screen_size_library.php"; require $xerte_toolkits_site->php_library_path . "template_status.php"; require $xerte_toolkits_site->php_library_path . "user_library.php"; /* * Check the ID is numeric */ if(isset($_SESSION['toolkits_logon_id'])) { if(is_numeric($_GET['template_id'])) { $safe_template_id = (int) $_GET['template_id']; /* * Standard query */ $query_for_preview_content = "select otd.template_name, otd.parent_template, ld.username, otd.template_framework, tr.user_id, tr.folder, tr.template_id, td.access_to_whom, td.date_modified, td.date_created, td.number_of_uses, td.extra_flags,"; $query_for_preview_content .= "td.tsugi_published, td.tsugi_xapi_enabled, td.tsugi_xapi_endpoint, td.tsugi_xapi_key, td.tsugi_xapi_secret"; $query_for_preview_content .= " from " . $xerte_toolkits_site->database_table_prefix . "originaltemplatesdetails otd, " . $xerte_toolkits_site->database_table_prefix . "templaterights tr, " . $xerte_toolkits_site->database_table_prefix . "templatedetails td, " . $xerte_toolkits_site->database_table_prefix . "logindetails ld"; $query_for_preview_content .= " where td.template_type_id = otd.template_type_id and td.creator_id = ld.login_id and tr.template_id = td.template_id and tr.template_id=" . $safe_template_id . " and role='creator'"; $row = db_query_one($query_for_preview_content); if(!empty($row)) { // get their username from the db which matches their login_id from the $_SESSION // ???? This is just the same user as in the previous query, NOT from the session. WHY? //$row_username = db_query_one("select username from {$xerte_toolkits_site->database_table_prefix}logindetails where login_id=?", array($row['user_id'])); require $xerte_toolkits_site->root_file_path . "modules/" . $row['template_framework'] . "/preview.php"; // is there a matching template? // if they're an admin or have rights to see the template, then show it. if(is_user_admin() || has_rights_to_this_template($row['template_id'], $_SESSION['toolkits_logon_id'])){ show_preview_code($row); exit(0); } } }else{ echo PREVIEW_RESOURCE_FAIL; } }else{ echo PREVIEW_RESOURCE_FAIL; }
apache-2.0
srottem/indy-sdk
vcx/wrappers/python3/tests/conftest.py
641
import logging import pytest import time from vcx.api.vcx_init import vcx_init from vcx.common import shutdown as vcx_shutdown flag = False logging.basicConfig(level=logging.DEBUG) @pytest.mark.asyncio @pytest.fixture async def vcx_init_test_mode(): global flag if not flag: await vcx_init('ENABLE_TEST_MODE') flag = True @pytest.fixture async def cleanup(): def _shutdown(erase): global flag vcx_shutdown(erase) if flag: flag = False return _shutdown @pytest.fixture(scope='session', autouse=True) def wait_libindy(): yield time.sleep(1) # FIXME IS-1060
apache-2.0
mohanaraosv/commons-vfs
core/src/main/java/org/apache/commons/vfs2/impl/DefaultFileContentInfo.java
1466
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.vfs2.impl; import org.apache.commons.vfs2.FileContentInfo; /** * The default file content information. */ public class DefaultFileContentInfo implements FileContentInfo { private final String contentType; private final String contentEncoding; public DefaultFileContentInfo(final String contentType, final String contentEncoding) { this.contentType = contentType; this.contentEncoding = contentEncoding; } @Override public String getContentType() { return contentType; } @Override public String getContentEncoding() { return contentEncoding; } }
apache-2.0
gameontext/gameon-mediator
mediator-app/src/main/java/org/gameontext/mediator/events/MediatorEvents.java
4556
/******************************************************************************* * Copyright (c) 2016 IBM Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *******************************************************************************/ package org.gameontext.mediator.events; import java.io.IOException; import java.util.logging.Level; import javax.annotation.Resource; import javax.inject.Inject; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import org.gameontext.mediator.Log; import org.gameontext.mediator.kafka.GameOnEvent; import org.gameontext.mediator.kafka.KafkaRxJavaObservable; import rx.Subscription; public class MediatorEvents { public interface PlayerEventHandler { public void playerUpdated(String userId, String userName, String favoriteColor); public void locationUpdated(String userId, String newLocation); // add additional methods as required for other event types... } /** * The uuid of the server. * * @see {@code serverUuid} in * {@code /mediator-wlpcfg/servers/gameon-mediator/server.xml} */ @Resource(lookup = "serverUuid") String SERVER_UUID; @Inject KafkaRxJavaObservable kafka; public EventSubscription subscribeToPlayerEvents(String userId, PlayerEventHandler peh) { Subscription subscription = kafka.consume().filter(event -> "playerEvents".equals(event.getTopic())) .filter(event -> userId.equals(event.getKey())).subscribe(event -> handlePlayerEvent(event, peh)); return new EventSubscription(subscription); } // Map events into player event handler callbacks. private void handlePlayerEvent(GameOnEvent goe, PlayerEventHandler peh) { ObjectMapper om = new ObjectMapper(); JsonNode tree; try { // the value in the GameOnEvent is JSON, with a type field that // dictates the content. tree = om.readTree(goe.getValue()); String type = tree.get("type").asText(); // current known values for type.. may change if we start using more // refined events. switch (type) { case "UPDATE": { // update(_*) and create, have the player json as a value under // the key 'player' this may change, to at least obscure/remove // restricted info like apikey but for now, this is ok while we // figure out events, since messagehub is not webfacing. // get the player json, and parse it to a JsonNode JsonNode player = tree.get("player"); // grab the name field from the json.. String username = player.get("name").asText(); String color = player.get("favoriteColor").asText(); peh.playerUpdated(goe.getKey(), username, color); break; } case "DELETE": { // note JSON only has id field.. rest is already deleted. break; } case "UPDATE_LOCATION": { JsonNode player = tree.get("player"); String location = player.get("location").asText(); if ( tree.has("origin") ){ String origin = tree.get("origin").asText(); if ( SERVER_UUID.equals(origin) ) { Log.log(Level.FINER, this, "Skipping location change event", goe); break; // skip location change events we sent } } peh.locationUpdated(goe.getKey(), location); break; } case "UPDATE_APIKEY": { break; } case "CREATE": { break; } default: break; } } catch (IOException e) { Log.log(Level.SEVERE, this, "Error parsing event", e); } } }
apache-2.0
esl/Smack
smack-extensions/src/test/java/org/jivesoftware/util/ConnectionUtils.java
5239
/** * * Copyright the original author or authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jivesoftware.util; import static org.mockito.Matchers.anyInt; import static org.mockito.Matchers.anyLong; import static org.mockito.Matchers.isA; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import org.jivesoftware.smack.StanzaCollector; import org.jivesoftware.smack.SmackException; import org.jivesoftware.smack.XMPPConnection; import org.jivesoftware.smack.XMPPException.XMPPErrorException; import org.jivesoftware.smack.filter.StanzaFilter; import org.jivesoftware.smack.packet.IQ; import org.jivesoftware.smack.packet.Stanza; import org.jivesoftware.smackx.disco.ServiceDiscoveryManager; import org.jxmpp.jid.DomainBareJid; import org.jxmpp.jid.EntityFullJid; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; /** * A collection of utility methods to create mocked XMPP connections. * * @author Henning Staib */ public class ConnectionUtils { /** * Creates a mocked XMPP connection that stores every stanza(/packet) that is send over this * connection in the given protocol instance and returns the predefined answer packets * form the protocol instance. * <p> * This mocked connection can used to collect packets that require a reply using a * StanzaCollector. * * <pre> * <code> * StanzaCollector collector = connection.createStanzaCollector(new PacketFilter()); * connection.sendStanza(packet); * Stanza(/Packet) reply = collector.nextResult(); * </code> * </pre> * * @param protocol protocol helper containing answer packets * @param initiatorJID the user associated to the XMPP connection * @param xmppServer the XMPP server associated to the XMPP connection * @return a mocked XMPP connection * @throws SmackException * @throws XMPPErrorException * @throws InterruptedException */ public static XMPPConnection createMockedConnection(final Protocol protocol, EntityFullJid initiatorJID, DomainBareJid xmppServer) throws SmackException, XMPPErrorException, InterruptedException { // mock XMPP connection XMPPConnection connection = mock(XMPPConnection.class); when(connection.getUser()).thenReturn(initiatorJID); when(connection.getXMPPServiceDomain()).thenReturn(xmppServer); // mock packet collector final StanzaCollector collector = mock(StanzaCollector.class); when(connection.createStanzaCollector(isA(StanzaFilter.class))).thenReturn( collector); Answer<StanzaCollector> collectorAndSend = new Answer<StanzaCollector>() { @Override public StanzaCollector answer(InvocationOnMock invocation) throws Throwable { Stanza packet = (Stanza) invocation.getArguments()[0]; protocol.getRequests().add(packet); return collector; } }; when(connection.createStanzaCollectorAndSend(isA(IQ.class))).thenAnswer(collectorAndSend); // mock send method Answer<Object> addIncoming = new Answer<Object>() { @Override public Object answer(InvocationOnMock invocation) throws Throwable { protocol.getRequests().add((Stanza) invocation.getArguments()[0]); return null; } }; doAnswer(addIncoming).when(connection).sendStanza(isA(Stanza.class)); // mock receive methods Answer<Stanza> answer = new Answer<Stanza>() { @Override public Stanza answer(InvocationOnMock invocation) throws Throwable { return protocol.getResponses().poll(); } }; when(collector.nextResult(anyInt())).thenAnswer(answer); when(collector.nextResult()).thenAnswer(answer); Answer<Stanza> answerOrThrow = new Answer<Stanza>() { @Override public Stanza answer(InvocationOnMock invocation) throws Throwable { Stanza packet = protocol.getResponses().poll(); if (packet == null) return packet; XMPPErrorException.ifHasErrorThenThrow(packet); return packet; } }; when(collector.nextResultOrThrow()).thenAnswer(answerOrThrow); when(collector.nextResultOrThrow(anyLong())).thenAnswer(answerOrThrow); // initialize service discovery manager for this connection ServiceDiscoveryManager.getInstanceFor(connection); return connection; } }
apache-2.0
apache/incubator-groovy
subprojects/groovy-sql/src/main/java/groovy/sql/SqlOrderByVisitor.java
1487
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package groovy.sql; import org.codehaus.groovy.ast.CodeVisitorSupport; import org.codehaus.groovy.ast.expr.PropertyExpression; import org.codehaus.groovy.ast.stmt.ReturnStatement; public class SqlOrderByVisitor extends CodeVisitorSupport { private final StringBuffer buffer = new StringBuffer(); public String getOrderBy() { return buffer.toString(); } @Override public void visitReturnStatement(ReturnStatement statement) { statement.getExpression().visit(this); } @Override public void visitPropertyExpression(PropertyExpression expression) { buffer.append(expression.getPropertyAsString()); } }
apache-2.0
brendanluu/CO2BLACK
Co2BlackUnityProject/Assets/Mapbox/Core/mapbox-sdk-cs/Map/TileCover.cs
6006
//----------------------------------------------------------------------- // <copyright file="TileCover.cs" company="Mapbox"> // Copyright (c) 2016 Mapbox. All rights reserved. // </copyright> //----------------------------------------------------------------------- namespace Mapbox.Map { using System; using System.Collections.Generic; using Mapbox.Utils; using UnityEngine; /// <summary> /// Helper funtions to get a tile cover, i.e. a set of tiles needed for /// covering a bounding box. /// </summary> public static class TileCover { /// <summary> Get a tile cover for the specified bounds and zoom. </summary> /// <param name="bounds"> Geographic bounding box.</param> /// <param name="zoom"> Zoom level. </param> /// <returns> The tile cover set. </returns> /// <example> /// Build a map of Colorado using TileCover: /// <code> /// var sw = new Vector2d(36.997749, -109.0524961); /// var ne = new Vector2d(41.0002612, -102.0609668); /// var coloradoBounds = new Vector2dBounds(sw, ne); /// var tileCover = TileCover.Get(coloradoBounds, 8); /// Console.Write("Tiles Needed: " + tileCover.Count); /// foreach (var id in tileCover) /// { /// var tile = new RasterTile(); /// var parameters = new Tile.Parameters(); /// parameters.Id = id; /// parameters.Fs = MapboxAccess.Instance; /// parameters.MapId = "mapbox://styles/mapbox/outdoors-v10"; /// tile.Initialize(parameters, (Action)(() =&gt; /// { /// // Place tiles and load textures. /// })); /// } /// </code> /// </example> public static HashSet<CanonicalTileId> Get(Vector2dBounds bounds, int zoom) { var tiles = new HashSet<CanonicalTileId>(); if (bounds.IsEmpty() || bounds.South > Constants.LatitudeMax || bounds.North < -Constants.LatitudeMax) { return tiles; } var hull = Vector2dBounds.FromCoordinates( new Vector2d(Math.Max(bounds.South, -Constants.LatitudeMax), bounds.West), new Vector2d(Math.Min(bounds.North, Constants.LatitudeMax), bounds.East)); var sw = CoordinateToTileId(hull.SouthWest, zoom); var ne = CoordinateToTileId(hull.NorthEast, zoom); // Scanlines. for (var x = sw.X; x <= ne.X; ++x) { for (var y = ne.Y; y <= sw.Y; ++y) { tiles.Add(new UnwrappedTileId(zoom, x, y).Canonical); } } return tiles; } public static HashSet<UnwrappedTileId> GetWithWebMerc(Vector2dBounds bounds, int zoom) { HashSet<UnwrappedTileId> tiles = new HashSet<UnwrappedTileId>(); HashSet<CanonicalTileId> canonicalTiles = new HashSet<CanonicalTileId>(); if (bounds.IsEmpty()) { return tiles; } //stay within WebMerc bounds Vector2d swWebMerc = new Vector2d(Math.Max(bounds.SouthWest.x, -Constants.WebMercMax), Math.Max(bounds.SouthWest.y, -Constants.WebMercMax)); Vector2d neWebMerc = new Vector2d(Math.Min(bounds.NorthEast.x, Constants.WebMercMax), Math.Min(bounds.NorthEast.y, Constants.WebMercMax)); //UnityEngine.Debug.LogFormat("swWebMerc:{0}/{1} neWebMerc:{2}/{3}", swWebMerc.x, swWebMerc.y, neWebMerc.x, neWebMerc.y); UnwrappedTileId swTile = WebMercatorToTileId(swWebMerc, zoom); UnwrappedTileId neTile = WebMercatorToTileId(neWebMerc, zoom); //UnityEngine.Debug.LogFormat("swTile:{0} neTile:{1}", swTile, neTile); for (int x = swTile.X; x <= neTile.X; x++) { for (int y = neTile.Y; y <= swTile.Y; y++) { UnwrappedTileId uwtid = new UnwrappedTileId(zoom, x, y); //hack: currently too many tiles are created at lower zoom levels //investigate formulas, this worked before if (!canonicalTiles.Contains(uwtid.Canonical)) { //Debug.LogFormat("TileCover.GetWithWebMerc: {0}/{1}/{2}", zoom, x, y); tiles.Add(uwtid); canonicalTiles.Add(uwtid.Canonical); } } } return tiles; } /// <summary> Converts a coordinate to a tile identifier. </summary> /// <param name="coord"> Geographic coordinate. </param> /// <param name="zoom"> Zoom level. </param> /// <returns>The to tile identifier.</returns> /// <example> /// Convert a geocoordinate to a TileId: /// <code> /// var unwrappedTileId = TileCover.CoordinateToTileId(new Vector2d(40.015, -105.2705), 18); /// Console.Write("UnwrappedTileId: " + unwrappedTileId.ToString()); /// </code> /// </example> public static UnwrappedTileId CoordinateToTileId(Vector2d coord, int zoom) { var lat = coord.x; var lng = coord.y; // See: http://wiki.openstreetmap.org/wiki/Slippy_map_tilenames var x = (int)Math.Floor((lng + 180.0) / 360.0 * Math.Pow(2.0, zoom)); var y = (int)Math.Floor((1.0 - Math.Log(Math.Tan(lat * Math.PI / 180.0) + 1.0 / Math.Cos(lat * Math.PI / 180.0)) / Math.PI) / 2.0 * Math.Pow(2.0, zoom)); return new UnwrappedTileId(zoom, x, y); } /// <summary> /// Converts a Web Mercator coordinate to a tile identifier. https://wiki.openstreetmap.org/wiki/Slippy_map_tilenames#Derivation_of_tile_names /// </summary> /// <param name="webMerc">Web Mercator coordinate</param> /// <param name="zoom">Zoom level</param> /// <returns>The to tile identifier.</returns> public static UnwrappedTileId WebMercatorToTileId(Vector2d webMerc, int zoom) { // See: https://wiki.openstreetmap.org/wiki/Slippy_map_tilenames#Derivation_of_tile_names double tileCount = Math.Pow(2, zoom); //this SDK defines Vector2d.x as latitude and Vector2d.y as longitude //same for WebMerc, so we have to flip x/y to make this formula work double dblX = webMerc.x / Constants.WebMercMax; double dblY = webMerc.y / Constants.WebMercMax; //dblX = 1 + dblX; //dblY = 1 - dblY; //dblX /= 2; //dblY /= 2; //dblX *= tileCount; //dblY *= tileCount; //int x = (int)Math.Floor(dblX); //int y = (int)Math.Floor(dblY); //return new UnwrappedTileId(zoom, x, y); int x = (int)Math.Floor((1 + dblX) / 2 * tileCount); int y = (int)Math.Floor((1 - dblY) / 2 * tileCount); return new UnwrappedTileId(zoom, x, y); } } }
apache-2.0
ammmze/swagger-springmvc
springfox-spi/src/main/java/springfox/documentation/spi/schema/contexts/ModelContext.java
6812
/* * * Copyright 2015 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * */ package springfox.documentation.spi.schema.contexts; import com.fasterxml.classmate.ResolvedType; import com.fasterxml.classmate.TypeResolver; import com.google.common.base.Objects; import springfox.documentation.builders.ModelBuilder; import springfox.documentation.spi.DocumentationType; import springfox.documentation.spi.schema.AlternateTypeProvider; import springfox.documentation.spi.schema.GenericTypeNamingStrategy; import java.lang.reflect.Type; import java.util.Set; import static com.google.common.collect.Sets.*; public class ModelContext { private final Type type; private final boolean returnType; private final DocumentationType documentationType; private final ModelContext parentContext; private final Set<ResolvedType> seenTypes = newHashSet(); private final ModelBuilder modelBuilder; private final AlternateTypeProvider alternateTypeProvider; private GenericTypeNamingStrategy genericNamingStrategy; ModelContext(Type type, boolean returnType, DocumentationType documentationType, AlternateTypeProvider alternateTypeProvider, GenericTypeNamingStrategy genericNamingStrategy) { this.documentationType = documentationType; this.alternateTypeProvider = alternateTypeProvider; this.genericNamingStrategy = genericNamingStrategy; this.parentContext = null; this.type = type; this.returnType = returnType; this.modelBuilder = new ModelBuilder(); } ModelContext(ModelContext parentContext, ResolvedType input) { this.parentContext = parentContext; this.type = input; this.returnType = parentContext.isReturnType(); this.documentationType = parentContext.getDocumentationType(); this.modelBuilder = new ModelBuilder(); this.alternateTypeProvider = parentContext.alternateTypeProvider; } /** * @return type behind this context */ public Type getType() { return type; } /** * @param resolver - type resolved * @return resolved type */ public ResolvedType resolvedType(TypeResolver resolver) { return resolver.resolve(getType()); } /** * @return is the context for a return type */ public boolean isReturnType() { return returnType; } /** * @return alternate type provider thats available to this context */ public AlternateTypeProvider getAlternateTypeProvider() { return alternateTypeProvider; } /** * @param resolved - type to find an alternate type for * @return alternate type for given resolved type */ public ResolvedType alternateFor(ResolvedType resolved) { return alternateTypeProvider.alternateFor(resolved); } /** * Convenience method to provide an new context for an input parameter * * @param type - type * @param documentationType - for documenation type * @param alternateTypeProvider - alternate type provider * @param genericNamingStrategy - how generic types should be named * @return new context */ public static ModelContext inputParam(Type type, DocumentationType documentationType, AlternateTypeProvider alternateTypeProvider, GenericTypeNamingStrategy genericNamingStrategy) { return new ModelContext(type, false, documentationType, alternateTypeProvider, genericNamingStrategy); } /** * Convenience method to provide an new context for an return parameter * * @param type - type * @param documentationType - for documenation type * @param alternateTypeProvider - alternate type provider * @param genericNamingStrategy - how generic types should be named * @return new context */ public static ModelContext returnValue(Type type, DocumentationType documentationType, AlternateTypeProvider alternateTypeProvider, GenericTypeNamingStrategy genericNamingStrategy) { return new ModelContext(type, true, documentationType, alternateTypeProvider, genericNamingStrategy); } /** * Convenience method to provide an new context for an input parameter * * @param input - context for given input * @return new context based on parent context for a given input */ public static ModelContext fromParent(ModelContext context, ResolvedType input) { return new ModelContext(context, input); } /** * Answers the question, has the given type been processed? * * @param resolvedType - type to check * @return true or false */ public boolean hasSeenBefore(ResolvedType resolvedType) { return seenTypes.contains(resolvedType) || seenTypes.contains(new TypeResolver().resolve(resolvedType.getErasedType())) || parentHasSeenBefore(resolvedType); } public DocumentationType getDocumentationType() { return documentationType; } /** * Answers the question, has the given type been processed by its parent context? * * @param resolvedType - type to check * @return true or false */ private boolean parentHasSeenBefore(ResolvedType resolvedType) { if (parentContext == null) { return false; } return parentContext.hasSeenBefore(resolvedType); } public GenericTypeNamingStrategy getGenericNamingStrategy() { if (parentContext == null) { return genericNamingStrategy; } return parentContext.getGenericNamingStrategy(); } public ModelBuilder getBuilder() { return modelBuilder; } public void seen(ResolvedType resolvedType) { seenTypes.add(resolvedType); } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } ModelContext that = (ModelContext) o; return Objects.equal(type, that.type) && Objects.equal(documentationType, that.documentationType) && Objects.equal(returnType, that.returnType); } @Override public int hashCode() { return Objects.hashCode(type, documentationType, returnType); } }
apache-2.0
akkadotnet/Akka.Persistence.SqlServer
src/Akka.Persistence.SqlServer.Tests/Query/SqlServerEventsByPersistenceIdSpec.cs
2137
// ----------------------------------------------------------------------- // <copyright file="SqlServerEventsByPersistenceIdSpec.cs" company="Akka.NET Project"> // Copyright (C) 2013 - 2019 .NET Foundation <https://github.com/akkadotnet/akka.net> // </copyright> // ----------------------------------------------------------------------- using Akka.Configuration; using Akka.Persistence.Query; using Akka.Persistence.Query.Sql; using Akka.Persistence.TCK.Query; using Xunit; using Xunit.Abstractions; namespace Akka.Persistence.SqlServer.Tests.Query { [Collection("SqlServerSpec")] public class SqlServerEventsByPersistenceIdSpec : EventsByPersistenceIdSpec { public SqlServerEventsByPersistenceIdSpec(ITestOutputHelper output, SqlServerFixture fixture) : base( InitConfig(fixture), nameof(SqlServerEventsByPersistenceIdSpec), output) { ReadJournal = Sys.ReadJournalFor<SqlReadJournal>(SqlReadJournal.Identifier); } public static Config InitConfig(SqlServerFixture fixture) { DbUtils.Initialize(fixture.ConnectionString); return ConfigurationFactory.ParseString($@" akka.loglevel = INFO akka.test.single-expect-default = 10s akka.persistence.journal.plugin = ""akka.persistence.journal.sql-server"" akka.persistence.journal.sql-server {{ class = ""Akka.Persistence.SqlServer.Journal.SqlServerJournal, Akka.Persistence.SqlServer"" plugin-dispatcher = ""akka.actor.default-dispatcher"" table-name = EventJournal schema-name = dbo auto-initialize = on connection-string = ""{DbUtils.ConnectionString}"" refresh-interval = 1s }}") .WithFallback(SqlReadJournal.DefaultConfiguration()); } protected override void Dispose(bool disposing) { base.Dispose(disposing); DbUtils.Clean(); } } }
apache-2.0
ttislerdg/mrgeo
mrgeo-services/mrgeo-services-wcs/src/main/java/org/mrgeo/resources/wcs/WcsGenerator.java
21099
/* * Copyright 2009-2016 DigitalGlobe, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and limitations under the License. * */ package org.mrgeo.resources.wcs; import org.mrgeo.core.MrGeoConstants; import org.mrgeo.core.MrGeoProperties; import org.mrgeo.data.DataProviderFactory; import org.mrgeo.data.ProviderProperties; import org.mrgeo.data.image.MrsImageDataProvider; import org.mrgeo.services.SecurityUtils; import org.mrgeo.services.Version; import org.mrgeo.services.mrspyramid.rendering.ImageHandlerFactory; import org.mrgeo.services.mrspyramid.rendering.ImageRenderer; import org.mrgeo.services.mrspyramid.rendering.ImageResponseWriter; import org.mrgeo.services.utils.DocumentUtils; import org.mrgeo.services.utils.RequestUtils; import org.mrgeo.services.wcs.DescribeCoverageDocumentGenerator; import org.mrgeo.services.wcs.WcsCapabilities; import org.mrgeo.utils.XmlUtils; import org.mrgeo.utils.tms.Bounds; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.w3c.dom.CDATASection; import org.w3c.dom.Document; import org.w3c.dom.Element; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.core.*; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.TransformerException; import java.awt.image.Raster; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.PrintWriter; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; @Path("/wcs") public class WcsGenerator { private static final Logger log = LoggerFactory.getLogger(WcsGenerator.class); public static final String WCS_VERSION = "1.1.0"; private static final String WCS_SERVICE = "wcs"; private Version version = new Version(WCS_VERSION); private static Map<Version, Document> capabilities = new HashMap<>(); private static UriInfo baseURI = null; static { if (MrGeoProperties.getInstance().getProperty(MrGeoConstants.MRGEO_WCS_CAPABILITIES_CACHE, "true").equals("true")) { new Thread() { public void run() { long sleeptime = 60 * 1000 * Integer.parseInt(MrGeoProperties.getInstance().getProperty(MrGeoConstants.MRGEO_WCS_CAPABILITIES_REFRESH, "5")); boolean stop = false; while (!stop) { try { for (Version version : capabilities.keySet()) { try { log.info("refreshing capabilities for version {}", version); ProviderProperties providerProperties = SecurityUtils.getProviderProperties(); Document doc = generateCapabilities(version, baseURI, providerProperties); capabilities.put(version, doc); } catch (ParserConfigurationException | IOException e) { e.printStackTrace(); } } Thread.sleep(sleeptime); } catch (InterruptedException e) { e.printStackTrace(); stop = true; } } } }.start(); } } @GET public Response doGet(@Context UriInfo uriInfo) { log.info("GET URI: {}", uriInfo.getRequestUri().toString()); return handleRequest(uriInfo); } @POST public Response doPost(@Context UriInfo uriInfo) { log.info("POST URI: {}", uriInfo.getRequestUri().toString()); return handleRequest(uriInfo); } private Response handleRequest(UriInfo uriInfo) { long start = System.currentTimeMillis(); baseURI = uriInfo; MultivaluedMap<String, String> allParams = uriInfo.getQueryParameters(); String request = getQueryParam(allParams, "request", "GetCapabilities"); ProviderProperties providerProperties = SecurityUtils.getProviderProperties(); try { String serviceName = getQueryParam(allParams, "service"); if (serviceName == null) { return writeError(Response.Status.BAD_REQUEST, "Missing required SERVICE parameter. Should be set to \"WCS\""); } if (!serviceName.equalsIgnoreCase("wcs")) { return writeError(Response.Status.BAD_REQUEST, "Invalid SERVICE parameter. Should be set to \"WCS\""); } if (request.equalsIgnoreCase("getcapabilities")) { return getCapabilities(uriInfo, allParams, providerProperties); } else if (request.equalsIgnoreCase("describecoverage")) { return describeCoverage(uriInfo, allParams, providerProperties); } else if (request.equalsIgnoreCase("getcoverage")) { return getCoverage(allParams, providerProperties); } return writeError(Response.Status.BAD_REQUEST, "Invalid request"); } finally { //if (log.isDebugEnabled()) { log.info("WCS request time: {}ms", (System.currentTimeMillis() - start)); // this can be resource intensive. System.gc(); final Runtime rt = Runtime.getRuntime(); log.info(String.format("WMS request memory: %.1fMB / %.1fMB\n", (rt.totalMemory() - rt .freeMemory()) / 1e6, rt.maxMemory() / 1e6)); } } } /** * Returns the value for the specified paramName case-insensitively. If the * parameter does not exist, it returns null. * * */ private String getQueryParam(MultivaluedMap<String, String> allParams, String paramName) { for (String key: allParams.keySet()) { if (key.equalsIgnoreCase(paramName)) { List<String> value = allParams.get(key); if (value.size() == 1) { return value.get(0); } } } return null; } private Response describeCoverage(UriInfo uriInfo, MultivaluedMap<String,String> allParams, final ProviderProperties providerProperties) { String versionStr = getQueryParam(allParams, "version", WCS_VERSION); version = new Version(versionStr); String[] layers; if (version.isLess("1.1.0")) { String layer = getQueryParam(allParams, "coverage"); if (layer == null) { return writeError(Response.Status.BAD_REQUEST, "Missing required COVERAGE parameter"); } layers = new String[]{layer}; } else { String layerStr = getQueryParam(allParams, "identifiers"); if (layerStr == null) { return writeError(Response.Status.BAD_REQUEST, "Missing required IDENTIFIERS parameter"); } layers = layerStr.split(","); } try { final DescribeCoverageDocumentGenerator docGen = new DescribeCoverageDocumentGenerator(); final Document doc = docGen.generateDoc(version, uriInfo.getRequestUri().toString(), layers); ByteArrayOutputStream xmlStream = new ByteArrayOutputStream(); final PrintWriter out = new PrintWriter(xmlStream); // DocumentUtils.checkForErrors(doc); DocumentUtils.writeDocument(doc, version, WCS_SERVICE, out); out.close(); return Response.ok(xmlStream.toString()).type(MediaType.APPLICATION_XML).build(); } catch (Exception e) { return writeError(Response.Status.BAD_REQUEST, e.getMessage()); } } private Response getCapabilities(UriInfo uriInfo, MultivaluedMap<String, String> allParams, ProviderProperties providerProperties) { // The versionParamName will be null if the request did not include the // version parameter. String acceptVersions = getQueryParam(allParams, "acceptversions", null); Version version = null; if (acceptVersions != null) { String[] versions = acceptVersions.split(","); for (String ver: versions) { if (version == null || version.isLess(ver)) { version = new Version(ver); } } } else { version = new Version(getQueryParam(allParams, "version", WCS_VERSION)); } try { Document doc; if (capabilities.containsKey(version)) { log.warn("*** cached!"); doc = capabilities.get(version); } else { log.warn("*** NOT cached!"); doc = generateCapabilities(version, uriInfo, providerProperties); capabilities.put(version, doc); } ByteArrayOutputStream xmlStream = new ByteArrayOutputStream(); final PrintWriter out = new PrintWriter(xmlStream); // DocumentUtils.checkForErrors(doc); DocumentUtils.writeDocument(doc, version, WCS_SERVICE, out); out.close(); return Response.ok(xmlStream.toString()).type(MediaType.APPLICATION_XML).build(); } catch (Exception e) { return writeError(Response.Status.BAD_REQUEST, e.getMessage()); } // return writeError(Response.Status.BAD_REQUEST, "Not Implemented"); } private static Document generateCapabilities(Version version, UriInfo uriInfo, ProviderProperties providerProperties) throws IOException, ParserConfigurationException, InterruptedException { final WcsCapabilities docGen = new WcsCapabilities(); // The following code re-builds the request URI to include in the GetCapabilities // output. It sorts the parameters so that they are included in the URI in a // predictable order. The reason for this is so that test cases can compare XML // golden files against the XML generated here without worrying about parameters // shifting locations in the URI. // Set<String> keys = uriInfo.getQueryParameters().keySet(); // String[] sortedKeys = new String[keys.size()]; // keys.toArray(sortedKeys); // Arrays.sort(sortedKeys); UriBuilder builder = uriInfo.getBaseUriBuilder().path(uriInfo.getPath()); return docGen.generateDoc(version, builder.build().toString() + "?", getPyramidFilesList(providerProperties)); } /* * Returns a list of all MrsPyramid version 2 data in the home data directory */ private static MrsImageDataProvider[] getPyramidFilesList( final ProviderProperties providerProperties) throws IOException { String[] images = DataProviderFactory.listImages(providerProperties); Arrays.sort(images); MrsImageDataProvider[] providers = new MrsImageDataProvider[images.length]; for (int i = 0; i < images.length; i++) { providers[i] = DataProviderFactory.getMrsImageDataProvider(images[i], DataProviderFactory.AccessMode.READ, providerProperties); } return providers; } private Response getCoverage(MultivaluedMap<String, String> allParams, ProviderProperties providerProperties) { // Get all of the query parameter values needed and validate them String versionStr = getQueryParam(allParams, "version", WCS_VERSION); version = new Version(versionStr); String layer; if (version.isLess("1.1.0")) { layer = getQueryParam(allParams, "coverage"); } else { layer = getQueryParam(allParams, "identifier"); } if (layer == null) { return writeError(Response.Status.BAD_REQUEST, "Missing required COVERAGE parameter"); } String crs; Bounds bounds = null; try { if (version.isLess("1.1.0")) { bounds = getBoundsParam(allParams, "bbox", bounds); } else { bounds = getBoundsParam(allParams, "boundingbox", bounds); } crs = getCrsParam(allParams); } catch (Exception e) { return writeError(Response.Status.BAD_REQUEST, e.getMessage()); } String format = getQueryParam(allParams, "format"); if (format == null) { return writeError(Response.Status.BAD_REQUEST, "Missing required FORMAT parameter"); } int width = getQueryParamAsInt(allParams, "width", -1); if (width < 0) { return writeError(Response.Status.BAD_REQUEST, "Missing required WIDTH parameter"); } else if (width == 0) { return writeError(Response.Status.BAD_REQUEST, "WIDTH parameter must be greater than 0"); } int height = getQueryParamAsInt(allParams, "height", -1); if (height < 0) { return writeError(Response.Status.BAD_REQUEST, "Missing required HEIGHT parameter"); } else if (height == 0) { return writeError(Response.Status.BAD_REQUEST, "HEIGHT parameter must be greater than 0"); } ImageRenderer renderer; try { renderer = (ImageRenderer) ImageHandlerFactory.getHandler(format, ImageRenderer.class); } catch (Exception e) { return writeError(Response.Status.BAD_REQUEST, e.getMessage()); } // Reproject bounds to EPSG:4326 if necessary try { bounds = RequestUtils.reprojectBounds(bounds, crs); } catch (Exception e) { return writeError(Response.Status.BAD_REQUEST, e.getMessage()); } // Return the resulting image try { log.info("Rendering " + layer); Raster result = renderer.renderImage(layer, bounds, width, height, providerProperties, crs); log.info("Generating response"); Response.ResponseBuilder builder = ((ImageResponseWriter) ImageHandlerFactory .getHandler(format, ImageResponseWriter.class)) .write(result, layer, bounds); log.info("Building and returning response"); return builder.build(); } catch (Exception e) { log.error("Unable to render the image in getCoverage", e); return writeError(Response.Status.BAD_REQUEST, e.getMessage()); } } /** * Returns the value for the specified paramName case-insensitively. If the * parameter does not exist, it returns defaultValue. * */ private String getQueryParam(MultivaluedMap<String, String> allParams, String paramName, String defaultValue) { String value = getQueryParam(allParams, paramName); if (value != null) { return value; } return defaultValue; } //private String getActualQueryParamName(MultivaluedMap<String, String> allParams, // String paramName) //{ // for (String key: allParams.keySet()) // { // if (key.equalsIgnoreCase(paramName)) // { // return key; // } // } // return null; //} /** * Returns the int value for the specified paramName case-insensitively. If * the parameter value exists, but is not an int, it throws a NumberFormatException. * If it does not exist, it returns defaultValue. * */ private int getQueryParamAsInt(MultivaluedMap<String, String> allParams, String paramName, int defaultValue) throws NumberFormatException { for (String key: allParams.keySet()) { if (key.equalsIgnoreCase(paramName)) { List<String> value = allParams.get(key); if (value.size() == 1) { return Integer.parseInt(value.get(0)); } } } return defaultValue; } /** * Returns the int value for the specified paramName case-insensitively. If * the parameter value exists, but is not an int, it throws a NumberFormatException. * If it does not exist, it returns defaultValue. * */ //private double getQueryParamAsDouble(MultivaluedMap<String, String> allParams, // String paramName, // double defaultValue) // throws NumberFormatException //{ // for (String key: allParams.keySet()) // { // if (key.equalsIgnoreCase(paramName)) // { // List<String> value = allParams.get(key); // if (value.size() == 1) // { // return Double.parseDouble(value.get(0)); // } // } // } // return defaultValue; //} private Bounds getBoundsParam(MultivaluedMap<String, String> allParams, String paramName, Bounds bounds) throws Exception { String bbox = getQueryParam(allParams, paramName); if (bbox == null) { throw new Exception("Missing required " + paramName.toUpperCase() + " parameter"); } String[] bboxComponents = bbox.split(","); if (!(bboxComponents.length == 5 || bboxComponents.length == 4)) { throw new Exception("Invalid \" + paramName.toUpperCase() + \" parameter. Should contain minX, minY, maxX, maxY"); } double[] bboxValues = new double[4]; for (int index=0; index < bboxComponents.length; index++) { try { bboxValues[index] = Double.parseDouble(bboxComponents[index]); } catch (NumberFormatException nfe) { throw new Exception("Invalid BBOX value: " + bboxComponents[index]); } } if (bounds == null) { return new Bounds(bboxValues[0], bboxValues[1], bboxValues[2], bboxValues[3]); } return bounds.expand(bboxValues[0], bboxValues[1], bboxValues[2], bboxValues[3]); } private String getCrsParam(MultivaluedMap<String, String> allParams) throws Exception { String crs = getQueryParam(allParams, "crs"); if (crs == null || crs.isEmpty()) { // CRS can also be buried in bbox (in earlier versions of the spec) String bbox = getQueryParam(allParams, "bbox"); if (bbox != null) { String[] bboxComponents = bbox.split(","); if (bboxComponents.length == 5) { return bboxComponents[4]; } } return null; } else { return crs; } } /* * Writes OGC spec error messages to the response */ //private Response writeError(Response.Status httpStatus, final Exception e) //{ // try // { // Document doc; // final DocumentBuilderFactory dBF = DocumentBuilderFactory.newInstance(); // final DocumentBuilder builder; // builder = dBF.newDocumentBuilder(); // doc = builder.newDocument(); // // final Element ser = doc.createElement("ServiceExceptionReport"); // doc.appendChild(ser); // ser.setAttribute("version", WCS_VERSION); // final Element se = XmlUtils.createElement(ser, "ServiceException"); // String msg = e.getLocalizedMessage(); // if (msg == null || msg.isEmpty()) // { // msg = e.getClass().getName(); // } // final ByteArrayOutputStream strm = new ByteArrayOutputStream(); // e.printStackTrace(new PrintStream(strm)); // CDATASection msgNode = doc.createCDATASection(strm.toString()); // se.appendChild(msgNode); // final ByteArrayOutputStream xmlStream = new ByteArrayOutputStream(); // final PrintWriter out = new PrintWriter(xmlStream); // DocumentUtils.writeDocument(doc, version, WCS_SERVICE, out); // out.close(); // return Response // .status(httpStatus) // .header("Content-Type", MediaType.TEXT_XML) // .entity(xmlStream.toString()) // .build(); // } // catch (ParserConfigurationException e1) // { // } // catch (TransformerException e1) // { // } // // Fallback in case there is an XML exception above // return Response.status(httpStatus).entity(e.getLocalizedMessage()).build(); //} /* * Writes OGC spec error messages to the response */ private Response writeError(Response.Status httpStatus, final String msg) { try { Document doc; final DocumentBuilderFactory dBF = DocumentBuilderFactory.newInstance(); final DocumentBuilder builder = dBF.newDocumentBuilder(); doc = builder.newDocument(); final Element ser = doc.createElement("ServiceExceptionReport"); doc.appendChild(ser); ser.setAttribute("version", WCS_VERSION); final Element se = XmlUtils.createElement(ser, "ServiceException"); CDATASection msgNode = doc.createCDATASection(msg); se.appendChild(msgNode); final ByteArrayOutputStream xmlStream = new ByteArrayOutputStream(); final PrintWriter out = new PrintWriter(xmlStream); DocumentUtils.writeDocument(doc, version, WCS_SERVICE, out); out.close(); return Response .status(httpStatus) .header("Content-Type", MediaType.TEXT_XML) .entity(xmlStream.toString()) .build(); } catch (ParserConfigurationException | TransformerException ignored) { } // Fallback in case there is an XML exception above return Response.status(httpStatus).entity(msg).build(); } /* * Writes OGC spec error messages to the response */ //private Response writeError(Response.Status httpStatus, final String code, final String msg) //{ // try // { // Document doc; // final DocumentBuilderFactory dBF = DocumentBuilderFactory.newInstance(); // final DocumentBuilder builder = dBF.newDocumentBuilder(); // doc = builder.newDocument(); // // final Element ser = doc.createElement("ServiceExceptionReport"); // doc.appendChild(ser); // ser.setAttribute("version", WCS_VERSION); // final Element se = XmlUtils.createElement(ser, "ServiceException"); // se.setAttribute("code", code); // CDATASection msgNode = doc.createCDATASection(msg); // se.appendChild(msgNode); // final ByteArrayOutputStream xmlStream = new ByteArrayOutputStream(); // final PrintWriter out = new PrintWriter(xmlStream); // DocumentUtils.writeDocument(doc, version, WCS_SERVICE, out); // out.close(); // return Response // .status(httpStatus) // .header("Content-Type", MediaType.TEXT_XML) // .entity(xmlStream.toString()) // .build(); // } // catch (ParserConfigurationException e1) // { // } // catch (TransformerException e1) // { // } // // Fallback in case there is an XML exception above // return Response.status(httpStatus).entity(msg).build(); //} }
apache-2.0
stephanrauh/ExploringAngular
Blog02/chessboardui.ts
3052
import {Injectable} from 'angular2/angular2'; import {Move} from './move'; import {Moves} from './moves'; import {Chessboard} from './chessboard'; import {Suggestor} from './suggestor'; export module ChessEngineAPI { @Injectable() export class ChessboardUI { selectedPieceRow: number; selectedPieceCol: number; isPieceSelected: boolean = false; private chessboard: Chessboard = new Chessboard(new Array<Move>()); get fields(): number[][] { return this.chessboard.fields; } get isWhitePlaying(): boolean { return this.chessboard.isWhitePlaying } get capturedPieces(): Array<number> { return this.chessboard.capturedPieces } get check(): boolean { return this.chessboard.check } get checkMate(): boolean { return this.chessboard.checkMate } get staleMate(): boolean { return this.chessboard.staleMate } get ownCheck(): boolean { return this.chessboard.ownCheck } get ownCheckMate(): boolean { return this.chessboard.ownCheckMate } public ownThreats(row: number, col: number): number { return this.chessboard.ownThreats(row, col); } public opponentThreats(row: number, col: number): number { return this.chessboard.opponentThreats(row, col); } public suggestMove(): Move { return new Suggestor(this.chessboard).suggestMove(); } get moveHistory(): Array<Move> { return this.chessboard.moveHistory } onclick(row: number, col: number): void { if (!this.isPieceSelected) this.setSelectedPiece(row, col); else { this.isPieceSelected = false; if (this.chessboard.isLegalMove(this.selectedPieceRow, this.selectedPieceCol, row, col)) { this.chessboard.move(this.selectedPieceRow, this.selectedPieceCol, row, col, this.isWhitePlaying ? 5 : -5); var answer = new Suggestor(this.chessboard).suggestMove() if (null != answer) this.move(answer) } } } public move(mv: Move) { this.chessboard.move(mv.fromRow, mv.fromCol, mv.toRow, mv.toCol, mv.promotion) } setSelectedPiece(row: number, col: number): void { var piece = this.chessboard.fields[row][col]; if (this.isWhitePlaying) { if (piece <= 0) return; } else { if (piece >= 0) return; } this.isPieceSelected = true this.selectedPieceRow = row this.selectedPieceCol = col } isLegalMove2(toRow: number, toCol: number): boolean { if (!this.isPieceSelected) return false; return this.chessboard.isLegalMove(this.selectedPieceRow, this.selectedPieceCol, toRow, toCol) } public revertLastMove(): void { this.chessboard.revertLastMove(); } } }
apache-2.0
arsi-apli/NBANDROID-V2
nbandroid.core/src/main/java/org/netbeans/modules/android/avd/manager/ui/SystemImageDescription.java
8657
/* * Copyright (C) 2015 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.netbeans.modules.android.avd.manager.ui; import com.android.repository.Revision; import com.android.repository.api.RemotePackage; import com.android.repository.api.RepoPackage; import com.android.repository.impl.meta.TypeDetails; import com.android.sdklib.AndroidVersion; import com.android.sdklib.ISystemImage; import com.android.sdklib.SdkVersionInfo; import com.android.sdklib.repository.IdDisplay; import com.android.sdklib.repository.meta.DetailsTypes; import com.android.sdklib.repository.targets.PlatformTarget; import com.android.sdklib.repository.targets.SystemImage; import com.google.common.base.Objects; import java.io.File; import static org.netbeans.modules.android.avd.manager.AvdManager.TAGS_WITH_GOOGLE_API; /** * Information on a system image. Used internally by the avd manager. */ public final class SystemImageDescription { private ISystemImage mySystemImage; private RemotePackage myRemotePackage; private boolean downloadInProgress = false; public SystemImageDescription(ISystemImage systemImage) { mySystemImage = systemImage; } public SystemImageDescription(RemotePackage remotePackage) { this.myRemotePackage = remotePackage; assert hasSystemImage(remotePackage); mySystemImage = new RemoteSystemImage(remotePackage); } public boolean isDownloadInProgress() { return downloadInProgress; } public void setDownloadInProgress(boolean downloadInProgress) { this.downloadInProgress = downloadInProgress; } static boolean hasSystemImage(RepoPackage p) { TypeDetails details = p.getTypeDetails(); if (!(details instanceof DetailsTypes.ApiDetailsType)) { return false; } int apiLevel = ((DetailsTypes.ApiDetailsType) details).getApiLevel(); if (details instanceof DetailsTypes.SysImgDetailsType) { return true; } // Platforms up to 13 included a bundled system image if (details instanceof DetailsTypes.PlatformDetailsType && apiLevel <= 13) { return true; } // Google APIs addons up to 19 included a bundled system image if (details instanceof DetailsTypes.AddonDetailsType && ((DetailsTypes.AddonDetailsType) details).getVendor().getId().equals("google") && TAGS_WITH_GOOGLE_API.contains(((DetailsTypes.AddonDetailsType) details).getTag()) && apiLevel <= 19) { return true; } return false; } @Override public int hashCode() { return Objects.hashCode(mySystemImage, myRemotePackage); } @Override public boolean equals(Object obj) { if (!(obj instanceof SystemImageDescription)) { return false; } SystemImageDescription other = (SystemImageDescription) obj; return Objects.equal(mySystemImage, other.mySystemImage) && Objects.equal(myRemotePackage, other.myRemotePackage); } public AndroidVersion getVersion() { return mySystemImage.getAndroidVersion(); } public RemotePackage getRemotePackage() { return myRemotePackage; } public boolean isRemote() { return myRemotePackage != null; } public boolean obsolete() { return mySystemImage.obsolete(); } public String getAbiType() { return mySystemImage.getAbiType(); } public IdDisplay getTag() { return mySystemImage.getTag(); } public String getName() { String versionString = SdkVersionInfo.getVersionString(getVersion().getFeatureLevel()); return String.format("Android %s", versionString == null ? "API " + getVersion().getApiString() : versionString); } public String getVendor() { if (mySystemImage.getAddonVendor() != null) { return mySystemImage.getAddonVendor().getDisplay(); } return PlatformTarget.PLATFORM_VENDOR; } public String getVersionName() { return SdkVersionInfo.getVersionString(mySystemImage.getAndroidVersion().getApiLevel()); } Revision getRevision() { return mySystemImage.getRevision(); } public File[] getSkins() { return mySystemImage.getSkins(); } public ISystemImage getSystemImage() { return mySystemImage; } private static class RemoteSystemImage implements ISystemImage { private final RemotePackage myRemotePackage; private final IdDisplay myTag; private final IdDisplay myVendor; private final String myAbi; private final AndroidVersion myAndroidVersion; RemoteSystemImage(RemotePackage p) { myRemotePackage = p; TypeDetails details = myRemotePackage.getTypeDetails(); assert details instanceof DetailsTypes.ApiDetailsType; myAndroidVersion = ((DetailsTypes.ApiDetailsType) details).getAndroidVersion(); IdDisplay tag = null; IdDisplay vendor = null; String abi = "armeabi"; if (details instanceof DetailsTypes.AddonDetailsType) { tag = ((DetailsTypes.AddonDetailsType) details).getTag(); vendor = ((DetailsTypes.AddonDetailsType) details).getVendor(); if (SystemImage.GOOGLE_APIS_X86_TAG.equals(tag)) { abi = "x86"; } } if (details instanceof DetailsTypes.SysImgDetailsType) { tag = ((DetailsTypes.SysImgDetailsType) details).getTag(); vendor = ((DetailsTypes.SysImgDetailsType) details).getVendor(); abi = ((DetailsTypes.SysImgDetailsType) details).getAbi(); } myTag = tag != null ? tag : SystemImage.DEFAULT_TAG; myVendor = vendor; myAbi = abi; } @Override public File getLocation() { assert false : "Can't get location for remote image"; return new File(""); } @Override public IdDisplay getTag() { return myTag; } @com.android.annotations.Nullable @Override public IdDisplay getAddonVendor() { return myVendor; } @Override public String getAbiType() { return myAbi; } @Override public File[] getSkins() { return new File[0]; } @Override public Revision getRevision() { return myRemotePackage.getVersion(); } @Override public AndroidVersion getAndroidVersion() { return myAndroidVersion; } @Override public boolean hasPlayStore() { if (SystemImage.PLAY_STORE_TAG.equals(myTag)) { return true; } // A Wear system image has Play Store if it is // a recent API version and is NOT Wear-for-China. if (SystemImage.WEAR_TAG.equals(getTag()) && myAndroidVersion.getApiLevel() >= AndroidVersion.MIN_RECOMMENDED_WEAR_API && !myRemotePackage.getPath().contains(WEAR_CN_DIRECTORY)) { return true; } return false; } @Override public boolean obsolete() { return myRemotePackage.obsolete(); } @Override public int compareTo(ISystemImage o) { if (o instanceof RemoteSystemImage) { return myRemotePackage.compareTo(((RemoteSystemImage) o).myRemotePackage); } return 1; } @Override public int hashCode() { return myRemotePackage.hashCode(); } @Override public boolean equals(Object o) { if (!(o instanceof RemoteSystemImage)) { return false; } RemoteSystemImage other = (RemoteSystemImage) o; return myRemotePackage.equals(other.myRemotePackage); } } }
apache-2.0
bskiba/autoscaler
cluster-autoscaler/cloudprovider/ionoscloud/ionos-cloud-sdk-go/model_ip_block.go
6125
/* * CLOUD API * * An enterprise-grade Infrastructure is provided as a Service (IaaS) solution that can be managed through a browser-based \"Data Center Designer\" (DCD) tool or via an easy to use API. The API allows you to perform a variety of management tasks such as spinning up additional servers, adding volumes, adjusting networking, and so forth. It is designed to allow users to leverage the same power and flexibility found within the DCD visual tool. Both tools are consistent with their concepts and lend well to making the experience smooth and intuitive. * * API version: 5.0 */ // Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. package ionossdk import ( "encoding/json" ) // IpBlock struct for IpBlock type IpBlock struct { // The resource's unique identifier Id *string `json:"id,omitempty"` // The type of object that has been created Type *Type `json:"type,omitempty"` // URL to the object representation (absolute path) Href *string `json:"href,omitempty"` Metadata *DatacenterElementMetadata `json:"metadata,omitempty"` Properties *IpBlockProperties `json:"properties"` } // GetId returns the Id field value // If the value is explicit nil, the zero value for string will be returned func (o *IpBlock) GetId() *string { if o == nil { return nil } return o.Id } // GetIdOk returns a tuple with the Id field value // and a boolean to check if the value has been set. // NOTE: If the value is an explicit nil, `nil, true` will be returned func (o *IpBlock) GetIdOk() (*string, bool) { if o == nil { return nil, false } return o.Id, true } // SetId sets field value func (o *IpBlock) SetId(v string) { o.Id = &v } // HasId returns a boolean if a field has been set. func (o *IpBlock) HasId() bool { if o != nil && o.Id != nil { return true } return false } // GetType returns the Type field value // If the value is explicit nil, the zero value for Type will be returned func (o *IpBlock) GetType() *Type { if o == nil { return nil } return o.Type } // GetTypeOk returns a tuple with the Type field value // and a boolean to check if the value has been set. // NOTE: If the value is an explicit nil, `nil, true` will be returned func (o *IpBlock) GetTypeOk() (*Type, bool) { if o == nil { return nil, false } return o.Type, true } // SetType sets field value func (o *IpBlock) SetType(v Type) { o.Type = &v } // HasType returns a boolean if a field has been set. func (o *IpBlock) HasType() bool { if o != nil && o.Type != nil { return true } return false } // GetHref returns the Href field value // If the value is explicit nil, the zero value for string will be returned func (o *IpBlock) GetHref() *string { if o == nil { return nil } return o.Href } // GetHrefOk returns a tuple with the Href field value // and a boolean to check if the value has been set. // NOTE: If the value is an explicit nil, `nil, true` will be returned func (o *IpBlock) GetHrefOk() (*string, bool) { if o == nil { return nil, false } return o.Href, true } // SetHref sets field value func (o *IpBlock) SetHref(v string) { o.Href = &v } // HasHref returns a boolean if a field has been set. func (o *IpBlock) HasHref() bool { if o != nil && o.Href != nil { return true } return false } // GetMetadata returns the Metadata field value // If the value is explicit nil, the zero value for DatacenterElementMetadata will be returned func (o *IpBlock) GetMetadata() *DatacenterElementMetadata { if o == nil { return nil } return o.Metadata } // GetMetadataOk returns a tuple with the Metadata field value // and a boolean to check if the value has been set. // NOTE: If the value is an explicit nil, `nil, true` will be returned func (o *IpBlock) GetMetadataOk() (*DatacenterElementMetadata, bool) { if o == nil { return nil, false } return o.Metadata, true } // SetMetadata sets field value func (o *IpBlock) SetMetadata(v DatacenterElementMetadata) { o.Metadata = &v } // HasMetadata returns a boolean if a field has been set. func (o *IpBlock) HasMetadata() bool { if o != nil && o.Metadata != nil { return true } return false } // GetProperties returns the Properties field value // If the value is explicit nil, the zero value for IpBlockProperties will be returned func (o *IpBlock) GetProperties() *IpBlockProperties { if o == nil { return nil } return o.Properties } // GetPropertiesOk returns a tuple with the Properties field value // and a boolean to check if the value has been set. // NOTE: If the value is an explicit nil, `nil, true` will be returned func (o *IpBlock) GetPropertiesOk() (*IpBlockProperties, bool) { if o == nil { return nil, false } return o.Properties, true } // SetProperties sets field value func (o *IpBlock) SetProperties(v IpBlockProperties) { o.Properties = &v } // HasProperties returns a boolean if a field has been set. func (o *IpBlock) HasProperties() bool { if o != nil && o.Properties != nil { return true } return false } func (o IpBlock) MarshalJSON() ([]byte, error) { toSerialize := map[string]interface{}{} if o.Id != nil { toSerialize["id"] = o.Id } if o.Type != nil { toSerialize["type"] = o.Type } if o.Href != nil { toSerialize["href"] = o.Href } if o.Metadata != nil { toSerialize["metadata"] = o.Metadata } if o.Properties != nil { toSerialize["properties"] = o.Properties } return json.Marshal(toSerialize) } type NullableIpBlock struct { value *IpBlock isSet bool } func (v NullableIpBlock) Get() *IpBlock { return v.value } func (v *NullableIpBlock) Set(val *IpBlock) { v.value = val v.isSet = true } func (v NullableIpBlock) IsSet() bool { return v.isSet } func (v *NullableIpBlock) Unset() { v.value = nil v.isSet = false } func NewNullableIpBlock(val *IpBlock) *NullableIpBlock { return &NullableIpBlock{value: val, isSet: true} } func (v NullableIpBlock) MarshalJSON() ([]byte, error) { return json.Marshal(v.value) } func (v *NullableIpBlock) UnmarshalJSON(src []byte) error { v.isSet = true return json.Unmarshal(src, &v.value) }
apache-2.0
oss-laboratries/OpenPIE
cmdb-portlet/WEB-INF/src/CopyOftest.java
1667
import java.util.*; import java.util.Map; import java.text.SimpleDateFormat; import org.codehaus.jackson.*; import java.io.*; import org.json.JSONObject; public class CopyOftest { public static void main(String[] args) throws Exception { JsonFactory factory = new JsonFactory(); JsonParser parser = factory.createJsonParser(new File("/var/tmp/zbxlist2.json")); while (parser.nextToken() != JsonToken.END_OBJECT) { String name = parser.getCurrentName(); if(name != null) { if (name.equals("hosts")) { System.out.println("test1="+parser.getText()); while (parser.nextToken() != JsonToken.END_OBJECT) { String name1 = parser.getCurrentName(); if(name1 != null) { if (name1.equals("host")) { System.out.println("test10="+parser.getText()); } } } } if (name.equals("comments")) { System.out.println("test2="+parser.getText()); } else{ parser.skipChildren(); } } } } } /* if (parser.nextToken() == JsonToken.START_ARRAY) { while (parser.nextToken() != JsonToken.END_ARRAY) { if (parser.getCurrentToken() == JsonToken.START_OBJECT) { while (parser.nextToken() != JsonToken.END_OBJECT) { String name = parser.getCurrentName(); parser.nextToken(); if (name.equals("hosts")) { System.out.println("test1="+parser.getText()); } if (name.equals("comments")) { System.out.println("test2="+parser.getText()); } else{ parser.skipChildren(); } } } } }}} */
apache-2.0
dbrimley/hazelcast
hazelcast/src/test/java/com/hazelcast/client/protocol/compatibility/ClientCompatibilityNullTest_1_2.java
244121
/* * Copyright (c) 2008-2017, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.client.protocol.compatibility; import com.hazelcast.client.impl.MemberImpl; import com.hazelcast.client.impl.client.DistributedObjectInfo; import com.hazelcast.client.impl.protocol.ClientMessage; import com.hazelcast.client.impl.protocol.codec.*; import com.hazelcast.client.impl.protocol.util.SafeBuffer; import com.hazelcast.core.Member; import com.hazelcast.internal.serialization.impl.HeapData; import com.hazelcast.map.impl.SimpleEntryView; import com.hazelcast.map.impl.querycache.event.DefaultQueryCacheEventData; import com.hazelcast.map.impl.querycache.event.QueryCacheEventData; import com.hazelcast.mapreduce.JobPartitionState; import com.hazelcast.mapreduce.impl.task.JobPartitionStateImpl; import com.hazelcast.nio.Address; import com.hazelcast.scheduledexecutor.ScheduledTaskHandler; import com.hazelcast.nio.serialization.Data; import com.hazelcast.transaction.impl.xa.SerializableXID; import java.io.IOException; import com.hazelcast.test.HazelcastParallelClassRunner; import com.hazelcast.test.annotation.ParallelTest; import com.hazelcast.test.annotation.QuickTest; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import java.util.Arrays; import java.io.IOException; import java.io.DataInputStream; import java.io.InputStream; import java.lang.reflect.Array; import java.net.UnknownHostException; import javax.transaction.xa.Xid; import java.util.AbstractMap; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.ListIterator; import java.util.Map; import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertFalse; import static com.hazelcast.client.protocol.compatibility.ReferenceObjects.*; @RunWith(HazelcastParallelClassRunner.class) @Category({QuickTest.class, ParallelTest.class}) public class ClientCompatibilityNullTest_1_2 { private static final int FRAME_LEN_FIELD_SIZE = 4; @org.junit.Test public void test() throws IOException { InputStream input = getClass().getResourceAsStream("/1.2.protocol.compatibility.null.binary"); DataInputStream inputStream = new DataInputStream(input); { ClientMessage clientMessage = ClientAuthenticationCodec.encodeRequest( aString , aString , null , null , aBoolean , aString , aByte , aString ); int length = inputStream.readInt(); // Since the test is generated for protocol version (1.2) which is earlier than latest change in the message // (version 1.3), only the bytes after frame length fields are compared int frameLength = clientMessage.getFrameLength(); assertTrue(frameLength >= length); inputStream.skipBytes(FRAME_LEN_FIELD_SIZE); byte[] bytes = new byte[length - FRAME_LEN_FIELD_SIZE]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOfRange(clientMessage.buffer().byteArray(), FRAME_LEN_FIELD_SIZE, length), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); ClientAuthenticationCodec.ResponseParameters params = ClientAuthenticationCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aByte, params.status)); assertTrue(isEqual(null, params.address)); assertTrue(isEqual(null, params.uuid)); assertTrue(isEqual(null, params.ownerUuid)); assertTrue(isEqual(aByte, params.serializationVersion)); assertFalse(params.serverHazelcastVersionExist); assertFalse(params.clientUnregisteredMembersExist); } { ClientMessage clientMessage = ClientAuthenticationCustomCodec.encodeRequest( aData , null , null , aBoolean , aString , aByte , aString ); int length = inputStream.readInt(); // Since the test is generated for protocol version (1.2) which is earlier than latest change in the message // (version 1.3), only the bytes after frame length fields are compared int frameLength = clientMessage.getFrameLength(); assertTrue(frameLength >= length); inputStream.skipBytes(FRAME_LEN_FIELD_SIZE); byte[] bytes = new byte[length - FRAME_LEN_FIELD_SIZE]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOfRange(clientMessage.buffer().byteArray(), FRAME_LEN_FIELD_SIZE, length), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); ClientAuthenticationCustomCodec.ResponseParameters params = ClientAuthenticationCustomCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aByte, params.status)); assertTrue(isEqual(null, params.address)); assertTrue(isEqual(null, params.uuid)); assertTrue(isEqual(null, params.ownerUuid)); assertTrue(isEqual(aByte, params.serializationVersion)); assertFalse(params.serverHazelcastVersionExist); assertFalse(params.clientUnregisteredMembersExist); } { ClientMessage clientMessage = ClientAddMembershipListenerCodec.encodeRequest( aBoolean ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); ClientAddMembershipListenerCodec.ResponseParameters params = ClientAddMembershipListenerCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aString, params.response)); } { class ClientAddMembershipListenerCodecHandler extends ClientAddMembershipListenerCodec.AbstractEventHandler { @Override public void handle( com.hazelcast.core.Member member , int eventType ) { assertTrue(isEqual(aMember, member)); assertTrue(isEqual(anInt, eventType)); } @Override public void handle( java.util.Collection<com.hazelcast.core.Member> members ) { assertTrue(isEqual(members, members)); } @Override public void handle( java.lang.String uuid , java.lang.String key , int operationType , java.lang.String value ) { assertTrue(isEqual(aString, uuid)); assertTrue(isEqual(aString, key)); assertTrue(isEqual(anInt, operationType)); assertTrue(isEqual(null, value)); } } ClientAddMembershipListenerCodecHandler handler = new ClientAddMembershipListenerCodecHandler(); { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); handler.handle(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); handler.handle(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); handler.handle(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } } { ClientMessage clientMessage = ClientCreateProxyCodec.encodeRequest( aString , aString , anAddress ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); ClientCreateProxyCodec.ResponseParameters params = ClientCreateProxyCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } { ClientMessage clientMessage = ClientDestroyProxyCodec.encodeRequest( aString , aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); ClientDestroyProxyCodec.ResponseParameters params = ClientDestroyProxyCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } { ClientMessage clientMessage = ClientGetPartitionsCodec.encodeRequest( ); int length = inputStream.readInt(); // Since the test is generated for protocol version (1.2) which is earlier than latest change in the message // (version 1.5), only the bytes after frame length fields are compared int frameLength = clientMessage.getFrameLength(); assertTrue(frameLength >= length); inputStream.skipBytes(FRAME_LEN_FIELD_SIZE); byte[] bytes = new byte[length - FRAME_LEN_FIELD_SIZE]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOfRange(clientMessage.buffer().byteArray(), FRAME_LEN_FIELD_SIZE, length), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); ClientGetPartitionsCodec.ResponseParameters params = ClientGetPartitionsCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aPartitionTable, params.partitions)); assertFalse(params.partitionStateVersionExist); } { ClientMessage clientMessage = ClientRemoveAllListenersCodec.encodeRequest( ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); ClientRemoveAllListenersCodec.ResponseParameters params = ClientRemoveAllListenersCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } { ClientMessage clientMessage = ClientAddPartitionLostListenerCodec.encodeRequest( aBoolean ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); ClientAddPartitionLostListenerCodec.ResponseParameters params = ClientAddPartitionLostListenerCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aString, params.response)); } { class ClientAddPartitionLostListenerCodecHandler extends ClientAddPartitionLostListenerCodec.AbstractEventHandler { @Override public void handle( int partitionId , int lostBackupCount , com.hazelcast.nio.Address source ) { assertTrue(isEqual(anInt, partitionId)); assertTrue(isEqual(anInt, lostBackupCount)); assertTrue(isEqual(null, source)); } } ClientAddPartitionLostListenerCodecHandler handler = new ClientAddPartitionLostListenerCodecHandler(); { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); handler.handle(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } } { ClientMessage clientMessage = ClientRemovePartitionLostListenerCodec.encodeRequest( aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); ClientRemovePartitionLostListenerCodec.ResponseParameters params = ClientRemovePartitionLostListenerCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = ClientGetDistributedObjectsCodec.encodeRequest( ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); ClientGetDistributedObjectsCodec.ResponseParameters params = ClientGetDistributedObjectsCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(distributedObjectInfos, params.response)); } { ClientMessage clientMessage = ClientAddDistributedObjectListenerCodec.encodeRequest( aBoolean ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); ClientAddDistributedObjectListenerCodec.ResponseParameters params = ClientAddDistributedObjectListenerCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aString, params.response)); } { class ClientAddDistributedObjectListenerCodecHandler extends ClientAddDistributedObjectListenerCodec.AbstractEventHandler { @Override public void handle( java.lang.String name , java.lang.String serviceName , java.lang.String eventType ) { assertTrue(isEqual(aString, name)); assertTrue(isEqual(aString, serviceName)); assertTrue(isEqual(aString, eventType)); } } ClientAddDistributedObjectListenerCodecHandler handler = new ClientAddDistributedObjectListenerCodecHandler(); { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); handler.handle(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } } { ClientMessage clientMessage = ClientRemoveDistributedObjectListenerCodec.encodeRequest( aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); ClientRemoveDistributedObjectListenerCodec.ResponseParameters params = ClientRemoveDistributedObjectListenerCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = ClientPingCodec.encodeRequest( ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); ClientPingCodec.ResponseParameters params = ClientPingCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } { ClientMessage clientMessage = MapPutCodec.encodeRequest( aString , aData , aData , aLong , aLong ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MapPutCodec.ResponseParameters params = MapPutCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(null, params.response)); } { ClientMessage clientMessage = MapGetCodec.encodeRequest( aString , aData , aLong ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MapGetCodec.ResponseParameters params = MapGetCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(null, params.response)); } { ClientMessage clientMessage = MapRemoveCodec.encodeRequest( aString , aData , aLong ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MapRemoveCodec.ResponseParameters params = MapRemoveCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(null, params.response)); } { ClientMessage clientMessage = MapReplaceCodec.encodeRequest( aString , aData , aData , aLong ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MapReplaceCodec.ResponseParameters params = MapReplaceCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(null, params.response)); } { ClientMessage clientMessage = MapReplaceIfSameCodec.encodeRequest( aString , aData , aData , aData , aLong ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MapReplaceIfSameCodec.ResponseParameters params = MapReplaceIfSameCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = MapContainsKeyCodec.encodeRequest( aString , aData , aLong ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MapContainsKeyCodec.ResponseParameters params = MapContainsKeyCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = MapContainsValueCodec.encodeRequest( aString , aData ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MapContainsValueCodec.ResponseParameters params = MapContainsValueCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = MapRemoveIfSameCodec.encodeRequest( aString , aData , aData , aLong ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MapRemoveIfSameCodec.ResponseParameters params = MapRemoveIfSameCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = MapDeleteCodec.encodeRequest( aString , aData , aLong ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MapDeleteCodec.ResponseParameters params = MapDeleteCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } { ClientMessage clientMessage = MapFlushCodec.encodeRequest( aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MapFlushCodec.ResponseParameters params = MapFlushCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } { ClientMessage clientMessage = MapTryRemoveCodec.encodeRequest( aString , aData , aLong , aLong ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MapTryRemoveCodec.ResponseParameters params = MapTryRemoveCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = MapTryPutCodec.encodeRequest( aString , aData , aData , aLong , aLong ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MapTryPutCodec.ResponseParameters params = MapTryPutCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = MapPutTransientCodec.encodeRequest( aString , aData , aData , aLong , aLong ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MapPutTransientCodec.ResponseParameters params = MapPutTransientCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } { ClientMessage clientMessage = MapPutIfAbsentCodec.encodeRequest( aString , aData , aData , aLong , aLong ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MapPutIfAbsentCodec.ResponseParameters params = MapPutIfAbsentCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(null, params.response)); } { ClientMessage clientMessage = MapSetCodec.encodeRequest( aString , aData , aData , aLong , aLong ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MapSetCodec.ResponseParameters params = MapSetCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } { ClientMessage clientMessage = MapLockCodec.encodeRequest( aString , aData , aLong , aLong , aLong ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MapLockCodec.ResponseParameters params = MapLockCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } { ClientMessage clientMessage = MapTryLockCodec.encodeRequest( aString , aData , aLong , aLong , aLong , aLong ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MapTryLockCodec.ResponseParameters params = MapTryLockCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = MapIsLockedCodec.encodeRequest( aString , aData ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MapIsLockedCodec.ResponseParameters params = MapIsLockedCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = MapUnlockCodec.encodeRequest( aString , aData , aLong , aLong ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MapUnlockCodec.ResponseParameters params = MapUnlockCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } { ClientMessage clientMessage = MapAddInterceptorCodec.encodeRequest( aString , aData ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MapAddInterceptorCodec.ResponseParameters params = MapAddInterceptorCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aString, params.response)); } { ClientMessage clientMessage = MapRemoveInterceptorCodec.encodeRequest( aString , aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MapRemoveInterceptorCodec.ResponseParameters params = MapRemoveInterceptorCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = MapAddEntryListenerToKeyWithPredicateCodec.encodeRequest( aString , aData , aData , aBoolean , anInt , aBoolean ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MapAddEntryListenerToKeyWithPredicateCodec.ResponseParameters params = MapAddEntryListenerToKeyWithPredicateCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aString, params.response)); } { class MapAddEntryListenerToKeyWithPredicateCodecHandler extends MapAddEntryListenerToKeyWithPredicateCodec.AbstractEventHandler { @Override public void handle( com.hazelcast.nio.serialization.Data key , com.hazelcast.nio.serialization.Data value , com.hazelcast.nio.serialization.Data oldValue , com.hazelcast.nio.serialization.Data mergingValue , int eventType , java.lang.String uuid , int numberOfAffectedEntries ) { assertTrue(isEqual(null, key)); assertTrue(isEqual(null, value)); assertTrue(isEqual(null, oldValue)); assertTrue(isEqual(null, mergingValue)); assertTrue(isEqual(anInt, eventType)); assertTrue(isEqual(aString, uuid)); assertTrue(isEqual(anInt, numberOfAffectedEntries)); } } MapAddEntryListenerToKeyWithPredicateCodecHandler handler = new MapAddEntryListenerToKeyWithPredicateCodecHandler(); { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); handler.handle(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } } { ClientMessage clientMessage = MapAddEntryListenerWithPredicateCodec.encodeRequest( aString , aData , aBoolean , anInt , aBoolean ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MapAddEntryListenerWithPredicateCodec.ResponseParameters params = MapAddEntryListenerWithPredicateCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aString, params.response)); } { class MapAddEntryListenerWithPredicateCodecHandler extends MapAddEntryListenerWithPredicateCodec.AbstractEventHandler { @Override public void handle( com.hazelcast.nio.serialization.Data key , com.hazelcast.nio.serialization.Data value , com.hazelcast.nio.serialization.Data oldValue , com.hazelcast.nio.serialization.Data mergingValue , int eventType , java.lang.String uuid , int numberOfAffectedEntries ) { assertTrue(isEqual(null, key)); assertTrue(isEqual(null, value)); assertTrue(isEqual(null, oldValue)); assertTrue(isEqual(null, mergingValue)); assertTrue(isEqual(anInt, eventType)); assertTrue(isEqual(aString, uuid)); assertTrue(isEqual(anInt, numberOfAffectedEntries)); } } MapAddEntryListenerWithPredicateCodecHandler handler = new MapAddEntryListenerWithPredicateCodecHandler(); { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); handler.handle(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } } { ClientMessage clientMessage = MapAddEntryListenerToKeyCodec.encodeRequest( aString , aData , aBoolean , anInt , aBoolean ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MapAddEntryListenerToKeyCodec.ResponseParameters params = MapAddEntryListenerToKeyCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aString, params.response)); } { class MapAddEntryListenerToKeyCodecHandler extends MapAddEntryListenerToKeyCodec.AbstractEventHandler { @Override public void handle( com.hazelcast.nio.serialization.Data key , com.hazelcast.nio.serialization.Data value , com.hazelcast.nio.serialization.Data oldValue , com.hazelcast.nio.serialization.Data mergingValue , int eventType , java.lang.String uuid , int numberOfAffectedEntries ) { assertTrue(isEqual(null, key)); assertTrue(isEqual(null, value)); assertTrue(isEqual(null, oldValue)); assertTrue(isEqual(null, mergingValue)); assertTrue(isEqual(anInt, eventType)); assertTrue(isEqual(aString, uuid)); assertTrue(isEqual(anInt, numberOfAffectedEntries)); } } MapAddEntryListenerToKeyCodecHandler handler = new MapAddEntryListenerToKeyCodecHandler(); { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); handler.handle(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } } { ClientMessage clientMessage = MapAddEntryListenerCodec.encodeRequest( aString , aBoolean , anInt , aBoolean ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MapAddEntryListenerCodec.ResponseParameters params = MapAddEntryListenerCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aString, params.response)); } { class MapAddEntryListenerCodecHandler extends MapAddEntryListenerCodec.AbstractEventHandler { @Override public void handle( com.hazelcast.nio.serialization.Data key , com.hazelcast.nio.serialization.Data value , com.hazelcast.nio.serialization.Data oldValue , com.hazelcast.nio.serialization.Data mergingValue , int eventType , java.lang.String uuid , int numberOfAffectedEntries ) { assertTrue(isEqual(null, key)); assertTrue(isEqual(null, value)); assertTrue(isEqual(null, oldValue)); assertTrue(isEqual(null, mergingValue)); assertTrue(isEqual(anInt, eventType)); assertTrue(isEqual(aString, uuid)); assertTrue(isEqual(anInt, numberOfAffectedEntries)); } } MapAddEntryListenerCodecHandler handler = new MapAddEntryListenerCodecHandler(); { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); handler.handle(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } } { ClientMessage clientMessage = MapAddNearCacheEntryListenerCodec.encodeRequest( aString , anInt , aBoolean ); int length = inputStream.readInt(); // Since the test is generated for protocol version (1.2) which is earlier than latest change in the message // (version 1.4), only the bytes after frame length fields are compared int frameLength = clientMessage.getFrameLength(); assertTrue(frameLength >= length); inputStream.skipBytes(FRAME_LEN_FIELD_SIZE); byte[] bytes = new byte[length - FRAME_LEN_FIELD_SIZE]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOfRange(clientMessage.buffer().byteArray(), FRAME_LEN_FIELD_SIZE, length), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MapAddNearCacheEntryListenerCodec.ResponseParameters params = MapAddNearCacheEntryListenerCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aString, params.response)); } { class MapAddNearCacheEntryListenerCodecHandler extends MapAddNearCacheEntryListenerCodec.AbstractEventHandler { @Override public void handle( com.hazelcast.nio.serialization.Data key , java.lang.String sourceUuid , java.util.UUID partitionUuid , long sequence ) { assertTrue(isEqual(null, key)); } @Override public void handle( java.util.Collection<com.hazelcast.nio.serialization.Data> keys , java.util.Collection<java.lang.String> sourceUuids , java.util.Collection<java.util.UUID> partitionUuids , java.util.Collection<java.lang.Long> sequences ) { assertTrue(isEqual(datas, keys)); } } MapAddNearCacheEntryListenerCodecHandler handler = new MapAddNearCacheEntryListenerCodecHandler(); { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); handler.handle(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); handler.handle(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } } { ClientMessage clientMessage = MapRemoveEntryListenerCodec.encodeRequest( aString , aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MapRemoveEntryListenerCodec.ResponseParameters params = MapRemoveEntryListenerCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = MapAddPartitionLostListenerCodec.encodeRequest( aString , aBoolean ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MapAddPartitionLostListenerCodec.ResponseParameters params = MapAddPartitionLostListenerCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aString, params.response)); } { class MapAddPartitionLostListenerCodecHandler extends MapAddPartitionLostListenerCodec.AbstractEventHandler { @Override public void handle( int partitionId , java.lang.String uuid ) { assertTrue(isEqual(anInt, partitionId)); assertTrue(isEqual(aString, uuid)); } } MapAddPartitionLostListenerCodecHandler handler = new MapAddPartitionLostListenerCodecHandler(); { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); handler.handle(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } } { ClientMessage clientMessage = MapRemovePartitionLostListenerCodec.encodeRequest( aString , aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MapRemovePartitionLostListenerCodec.ResponseParameters params = MapRemovePartitionLostListenerCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = MapGetEntryViewCodec.encodeRequest( aString , aData , aLong ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MapGetEntryViewCodec.ResponseParameters params = MapGetEntryViewCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(null, params.response)); } { ClientMessage clientMessage = MapEvictCodec.encodeRequest( aString , aData , aLong ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MapEvictCodec.ResponseParameters params = MapEvictCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = MapEvictAllCodec.encodeRequest( aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MapEvictAllCodec.ResponseParameters params = MapEvictAllCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } { ClientMessage clientMessage = MapLoadAllCodec.encodeRequest( aString , aBoolean ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MapLoadAllCodec.ResponseParameters params = MapLoadAllCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } { ClientMessage clientMessage = MapLoadGivenKeysCodec.encodeRequest( aString , datas , aBoolean ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MapLoadGivenKeysCodec.ResponseParameters params = MapLoadGivenKeysCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } { ClientMessage clientMessage = MapKeySetCodec.encodeRequest( aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MapKeySetCodec.ResponseParameters params = MapKeySetCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(datas, params.response)); } { ClientMessage clientMessage = MapGetAllCodec.encodeRequest( aString , datas ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MapGetAllCodec.ResponseParameters params = MapGetAllCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aListOfEntry, params.response)); } { ClientMessage clientMessage = MapValuesCodec.encodeRequest( aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MapValuesCodec.ResponseParameters params = MapValuesCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(datas, params.response)); } { ClientMessage clientMessage = MapEntrySetCodec.encodeRequest( aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MapEntrySetCodec.ResponseParameters params = MapEntrySetCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aListOfEntry, params.response)); } { ClientMessage clientMessage = MapKeySetWithPredicateCodec.encodeRequest( aString , aData ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MapKeySetWithPredicateCodec.ResponseParameters params = MapKeySetWithPredicateCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(datas, params.response)); } { ClientMessage clientMessage = MapValuesWithPredicateCodec.encodeRequest( aString , aData ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MapValuesWithPredicateCodec.ResponseParameters params = MapValuesWithPredicateCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(datas, params.response)); } { ClientMessage clientMessage = MapEntriesWithPredicateCodec.encodeRequest( aString , aData ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MapEntriesWithPredicateCodec.ResponseParameters params = MapEntriesWithPredicateCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aListOfEntry, params.response)); } { ClientMessage clientMessage = MapAddIndexCodec.encodeRequest( aString , aString , aBoolean ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MapAddIndexCodec.ResponseParameters params = MapAddIndexCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } { ClientMessage clientMessage = MapSizeCodec.encodeRequest( aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MapSizeCodec.ResponseParameters params = MapSizeCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(anInt, params.response)); } { ClientMessage clientMessage = MapIsEmptyCodec.encodeRequest( aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MapIsEmptyCodec.ResponseParameters params = MapIsEmptyCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = MapPutAllCodec.encodeRequest( aString , aListOfEntry ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MapPutAllCodec.ResponseParameters params = MapPutAllCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } { ClientMessage clientMessage = MapClearCodec.encodeRequest( aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MapClearCodec.ResponseParameters params = MapClearCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } { ClientMessage clientMessage = MapExecuteOnKeyCodec.encodeRequest( aString , aData , aData , aLong ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MapExecuteOnKeyCodec.ResponseParameters params = MapExecuteOnKeyCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(null, params.response)); } { ClientMessage clientMessage = MapSubmitToKeyCodec.encodeRequest( aString , aData , aData , aLong ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MapSubmitToKeyCodec.ResponseParameters params = MapSubmitToKeyCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(null, params.response)); } { ClientMessage clientMessage = MapExecuteOnAllKeysCodec.encodeRequest( aString , aData ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MapExecuteOnAllKeysCodec.ResponseParameters params = MapExecuteOnAllKeysCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aListOfEntry, params.response)); } { ClientMessage clientMessage = MapExecuteWithPredicateCodec.encodeRequest( aString , aData , aData ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MapExecuteWithPredicateCodec.ResponseParameters params = MapExecuteWithPredicateCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aListOfEntry, params.response)); } { ClientMessage clientMessage = MapExecuteOnKeysCodec.encodeRequest( aString , aData , datas ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MapExecuteOnKeysCodec.ResponseParameters params = MapExecuteOnKeysCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aListOfEntry, params.response)); } { ClientMessage clientMessage = MapForceUnlockCodec.encodeRequest( aString , aData , aLong ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MapForceUnlockCodec.ResponseParameters params = MapForceUnlockCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } { ClientMessage clientMessage = MapKeySetWithPagingPredicateCodec.encodeRequest( aString , aData ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MapKeySetWithPagingPredicateCodec.ResponseParameters params = MapKeySetWithPagingPredicateCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(datas, params.response)); } { ClientMessage clientMessage = MapValuesWithPagingPredicateCodec.encodeRequest( aString , aData ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MapValuesWithPagingPredicateCodec.ResponseParameters params = MapValuesWithPagingPredicateCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aListOfEntry, params.response)); } { ClientMessage clientMessage = MapEntriesWithPagingPredicateCodec.encodeRequest( aString , aData ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MapEntriesWithPagingPredicateCodec.ResponseParameters params = MapEntriesWithPagingPredicateCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aListOfEntry, params.response)); } { ClientMessage clientMessage = MapClearNearCacheCodec.encodeRequest( aString , anAddress ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MapClearNearCacheCodec.ResponseParameters params = MapClearNearCacheCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } { ClientMessage clientMessage = MapFetchKeysCodec.encodeRequest( aString , anInt , anInt , anInt ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MapFetchKeysCodec.ResponseParameters params = MapFetchKeysCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(anInt, params.tableIndex)); assertTrue(isEqual(datas, params.keys)); } { ClientMessage clientMessage = MapFetchEntriesCodec.encodeRequest( aString , anInt , anInt , anInt ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MapFetchEntriesCodec.ResponseParameters params = MapFetchEntriesCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(anInt, params.tableIndex)); assertTrue(isEqual(aListOfEntry, params.entries)); } { ClientMessage clientMessage = MultiMapPutCodec.encodeRequest( aString , aData , aData , aLong ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MultiMapPutCodec.ResponseParameters params = MultiMapPutCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = MultiMapGetCodec.encodeRequest( aString , aData , aLong ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MultiMapGetCodec.ResponseParameters params = MultiMapGetCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(datas, params.response)); } { ClientMessage clientMessage = MultiMapRemoveCodec.encodeRequest( aString , aData , aLong ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MultiMapRemoveCodec.ResponseParameters params = MultiMapRemoveCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(datas, params.response)); } { ClientMessage clientMessage = MultiMapKeySetCodec.encodeRequest( aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MultiMapKeySetCodec.ResponseParameters params = MultiMapKeySetCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(datas, params.response)); } { ClientMessage clientMessage = MultiMapValuesCodec.encodeRequest( aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MultiMapValuesCodec.ResponseParameters params = MultiMapValuesCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(datas, params.response)); } { ClientMessage clientMessage = MultiMapEntrySetCodec.encodeRequest( aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MultiMapEntrySetCodec.ResponseParameters params = MultiMapEntrySetCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aListOfEntry, params.response)); } { ClientMessage clientMessage = MultiMapContainsKeyCodec.encodeRequest( aString , aData , aLong ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MultiMapContainsKeyCodec.ResponseParameters params = MultiMapContainsKeyCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = MultiMapContainsValueCodec.encodeRequest( aString , aData ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MultiMapContainsValueCodec.ResponseParameters params = MultiMapContainsValueCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = MultiMapContainsEntryCodec.encodeRequest( aString , aData , aData , aLong ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MultiMapContainsEntryCodec.ResponseParameters params = MultiMapContainsEntryCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = MultiMapSizeCodec.encodeRequest( aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MultiMapSizeCodec.ResponseParameters params = MultiMapSizeCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(anInt, params.response)); } { ClientMessage clientMessage = MultiMapClearCodec.encodeRequest( aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MultiMapClearCodec.ResponseParameters params = MultiMapClearCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } { ClientMessage clientMessage = MultiMapValueCountCodec.encodeRequest( aString , aData , aLong ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MultiMapValueCountCodec.ResponseParameters params = MultiMapValueCountCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(anInt, params.response)); } { ClientMessage clientMessage = MultiMapAddEntryListenerToKeyCodec.encodeRequest( aString , aData , aBoolean , aBoolean ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MultiMapAddEntryListenerToKeyCodec.ResponseParameters params = MultiMapAddEntryListenerToKeyCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aString, params.response)); } { class MultiMapAddEntryListenerToKeyCodecHandler extends MultiMapAddEntryListenerToKeyCodec.AbstractEventHandler { @Override public void handle( com.hazelcast.nio.serialization.Data key , com.hazelcast.nio.serialization.Data value , com.hazelcast.nio.serialization.Data oldValue , com.hazelcast.nio.serialization.Data mergingValue , int eventType , java.lang.String uuid , int numberOfAffectedEntries ) { assertTrue(isEqual(null, key)); assertTrue(isEqual(null, value)); assertTrue(isEqual(null, oldValue)); assertTrue(isEqual(null, mergingValue)); assertTrue(isEqual(anInt, eventType)); assertTrue(isEqual(aString, uuid)); assertTrue(isEqual(anInt, numberOfAffectedEntries)); } } MultiMapAddEntryListenerToKeyCodecHandler handler = new MultiMapAddEntryListenerToKeyCodecHandler(); { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); handler.handle(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } } { ClientMessage clientMessage = MultiMapAddEntryListenerCodec.encodeRequest( aString , aBoolean , aBoolean ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MultiMapAddEntryListenerCodec.ResponseParameters params = MultiMapAddEntryListenerCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aString, params.response)); } { class MultiMapAddEntryListenerCodecHandler extends MultiMapAddEntryListenerCodec.AbstractEventHandler { @Override public void handle( com.hazelcast.nio.serialization.Data key , com.hazelcast.nio.serialization.Data value , com.hazelcast.nio.serialization.Data oldValue , com.hazelcast.nio.serialization.Data mergingValue , int eventType , java.lang.String uuid , int numberOfAffectedEntries ) { assertTrue(isEqual(null, key)); assertTrue(isEqual(null, value)); assertTrue(isEqual(null, oldValue)); assertTrue(isEqual(null, mergingValue)); assertTrue(isEqual(anInt, eventType)); assertTrue(isEqual(aString, uuid)); assertTrue(isEqual(anInt, numberOfAffectedEntries)); } } MultiMapAddEntryListenerCodecHandler handler = new MultiMapAddEntryListenerCodecHandler(); { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); handler.handle(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } } { ClientMessage clientMessage = MultiMapRemoveEntryListenerCodec.encodeRequest( aString , aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MultiMapRemoveEntryListenerCodec.ResponseParameters params = MultiMapRemoveEntryListenerCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = MultiMapLockCodec.encodeRequest( aString , aData , aLong , aLong , aLong ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MultiMapLockCodec.ResponseParameters params = MultiMapLockCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } { ClientMessage clientMessage = MultiMapTryLockCodec.encodeRequest( aString , aData , aLong , aLong , aLong , aLong ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MultiMapTryLockCodec.ResponseParameters params = MultiMapTryLockCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = MultiMapIsLockedCodec.encodeRequest( aString , aData ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MultiMapIsLockedCodec.ResponseParameters params = MultiMapIsLockedCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = MultiMapUnlockCodec.encodeRequest( aString , aData , aLong , aLong ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MultiMapUnlockCodec.ResponseParameters params = MultiMapUnlockCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } { ClientMessage clientMessage = MultiMapForceUnlockCodec.encodeRequest( aString , aData , aLong ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MultiMapForceUnlockCodec.ResponseParameters params = MultiMapForceUnlockCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } { ClientMessage clientMessage = MultiMapRemoveEntryCodec.encodeRequest( aString , aData , aData , aLong ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MultiMapRemoveEntryCodec.ResponseParameters params = MultiMapRemoveEntryCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = QueueOfferCodec.encodeRequest( aString , aData , aLong ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); QueueOfferCodec.ResponseParameters params = QueueOfferCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = QueuePutCodec.encodeRequest( aString , aData ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); QueuePutCodec.ResponseParameters params = QueuePutCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } { ClientMessage clientMessage = QueueSizeCodec.encodeRequest( aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); QueueSizeCodec.ResponseParameters params = QueueSizeCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(anInt, params.response)); } { ClientMessage clientMessage = QueueRemoveCodec.encodeRequest( aString , aData ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); QueueRemoveCodec.ResponseParameters params = QueueRemoveCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = QueuePollCodec.encodeRequest( aString , aLong ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); QueuePollCodec.ResponseParameters params = QueuePollCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(null, params.response)); } { ClientMessage clientMessage = QueueTakeCodec.encodeRequest( aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); QueueTakeCodec.ResponseParameters params = QueueTakeCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(null, params.response)); } { ClientMessage clientMessage = QueuePeekCodec.encodeRequest( aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); QueuePeekCodec.ResponseParameters params = QueuePeekCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(null, params.response)); } { ClientMessage clientMessage = QueueIteratorCodec.encodeRequest( aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); QueueIteratorCodec.ResponseParameters params = QueueIteratorCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(datas, params.response)); } { ClientMessage clientMessage = QueueDrainToCodec.encodeRequest( aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); QueueDrainToCodec.ResponseParameters params = QueueDrainToCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(datas, params.response)); } { ClientMessage clientMessage = QueueDrainToMaxSizeCodec.encodeRequest( aString , anInt ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); QueueDrainToMaxSizeCodec.ResponseParameters params = QueueDrainToMaxSizeCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(datas, params.response)); } { ClientMessage clientMessage = QueueContainsCodec.encodeRequest( aString , aData ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); QueueContainsCodec.ResponseParameters params = QueueContainsCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = QueueContainsAllCodec.encodeRequest( aString , datas ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); QueueContainsAllCodec.ResponseParameters params = QueueContainsAllCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = QueueCompareAndRemoveAllCodec.encodeRequest( aString , datas ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); QueueCompareAndRemoveAllCodec.ResponseParameters params = QueueCompareAndRemoveAllCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = QueueCompareAndRetainAllCodec.encodeRequest( aString , datas ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); QueueCompareAndRetainAllCodec.ResponseParameters params = QueueCompareAndRetainAllCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = QueueClearCodec.encodeRequest( aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); QueueClearCodec.ResponseParameters params = QueueClearCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } { ClientMessage clientMessage = QueueAddAllCodec.encodeRequest( aString , datas ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); QueueAddAllCodec.ResponseParameters params = QueueAddAllCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = QueueAddListenerCodec.encodeRequest( aString , aBoolean , aBoolean ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); QueueAddListenerCodec.ResponseParameters params = QueueAddListenerCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aString, params.response)); } { class QueueAddListenerCodecHandler extends QueueAddListenerCodec.AbstractEventHandler { @Override public void handle( com.hazelcast.nio.serialization.Data item , java.lang.String uuid , int eventType ) { assertTrue(isEqual(null, item)); assertTrue(isEqual(aString, uuid)); assertTrue(isEqual(anInt, eventType)); } } QueueAddListenerCodecHandler handler = new QueueAddListenerCodecHandler(); { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); handler.handle(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } } { ClientMessage clientMessage = QueueRemoveListenerCodec.encodeRequest( aString , aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); QueueRemoveListenerCodec.ResponseParameters params = QueueRemoveListenerCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = QueueRemainingCapacityCodec.encodeRequest( aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); QueueRemainingCapacityCodec.ResponseParameters params = QueueRemainingCapacityCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(anInt, params.response)); } { ClientMessage clientMessage = QueueIsEmptyCodec.encodeRequest( aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); QueueIsEmptyCodec.ResponseParameters params = QueueIsEmptyCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = TopicPublishCodec.encodeRequest( aString , aData ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); TopicPublishCodec.ResponseParameters params = TopicPublishCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } { ClientMessage clientMessage = TopicAddMessageListenerCodec.encodeRequest( aString , aBoolean ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); TopicAddMessageListenerCodec.ResponseParameters params = TopicAddMessageListenerCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aString, params.response)); } { class TopicAddMessageListenerCodecHandler extends TopicAddMessageListenerCodec.AbstractEventHandler { @Override public void handle( com.hazelcast.nio.serialization.Data item , long publishTime , java.lang.String uuid ) { assertTrue(isEqual(aData, item)); assertTrue(isEqual(aLong, publishTime)); assertTrue(isEqual(aString, uuid)); } } TopicAddMessageListenerCodecHandler handler = new TopicAddMessageListenerCodecHandler(); { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); handler.handle(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } } { ClientMessage clientMessage = TopicRemoveMessageListenerCodec.encodeRequest( aString , aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); TopicRemoveMessageListenerCodec.ResponseParameters params = TopicRemoveMessageListenerCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = ListSizeCodec.encodeRequest( aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); ListSizeCodec.ResponseParameters params = ListSizeCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(anInt, params.response)); } { ClientMessage clientMessage = ListContainsCodec.encodeRequest( aString , aData ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); ListContainsCodec.ResponseParameters params = ListContainsCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = ListContainsAllCodec.encodeRequest( aString , datas ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); ListContainsAllCodec.ResponseParameters params = ListContainsAllCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = ListAddCodec.encodeRequest( aString , aData ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); ListAddCodec.ResponseParameters params = ListAddCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = ListRemoveCodec.encodeRequest( aString , aData ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); ListRemoveCodec.ResponseParameters params = ListRemoveCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = ListAddAllCodec.encodeRequest( aString , datas ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); ListAddAllCodec.ResponseParameters params = ListAddAllCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = ListCompareAndRemoveAllCodec.encodeRequest( aString , datas ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); ListCompareAndRemoveAllCodec.ResponseParameters params = ListCompareAndRemoveAllCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = ListCompareAndRetainAllCodec.encodeRequest( aString , datas ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); ListCompareAndRetainAllCodec.ResponseParameters params = ListCompareAndRetainAllCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = ListClearCodec.encodeRequest( aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); ListClearCodec.ResponseParameters params = ListClearCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } { ClientMessage clientMessage = ListGetAllCodec.encodeRequest( aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); ListGetAllCodec.ResponseParameters params = ListGetAllCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(datas, params.response)); } { ClientMessage clientMessage = ListAddListenerCodec.encodeRequest( aString , aBoolean , aBoolean ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); ListAddListenerCodec.ResponseParameters params = ListAddListenerCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aString, params.response)); } { class ListAddListenerCodecHandler extends ListAddListenerCodec.AbstractEventHandler { @Override public void handle( com.hazelcast.nio.serialization.Data item , java.lang.String uuid , int eventType ) { assertTrue(isEqual(null, item)); assertTrue(isEqual(aString, uuid)); assertTrue(isEqual(anInt, eventType)); } } ListAddListenerCodecHandler handler = new ListAddListenerCodecHandler(); { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); handler.handle(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } } { ClientMessage clientMessage = ListRemoveListenerCodec.encodeRequest( aString , aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); ListRemoveListenerCodec.ResponseParameters params = ListRemoveListenerCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = ListIsEmptyCodec.encodeRequest( aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); ListIsEmptyCodec.ResponseParameters params = ListIsEmptyCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = ListAddAllWithIndexCodec.encodeRequest( aString , anInt , datas ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); ListAddAllWithIndexCodec.ResponseParameters params = ListAddAllWithIndexCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = ListGetCodec.encodeRequest( aString , anInt ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); ListGetCodec.ResponseParameters params = ListGetCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(null, params.response)); } { ClientMessage clientMessage = ListSetCodec.encodeRequest( aString , anInt , aData ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); ListSetCodec.ResponseParameters params = ListSetCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(null, params.response)); } { ClientMessage clientMessage = ListAddWithIndexCodec.encodeRequest( aString , anInt , aData ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); ListAddWithIndexCodec.ResponseParameters params = ListAddWithIndexCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } { ClientMessage clientMessage = ListRemoveWithIndexCodec.encodeRequest( aString , anInt ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); ListRemoveWithIndexCodec.ResponseParameters params = ListRemoveWithIndexCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(null, params.response)); } { ClientMessage clientMessage = ListLastIndexOfCodec.encodeRequest( aString , aData ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); ListLastIndexOfCodec.ResponseParameters params = ListLastIndexOfCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(anInt, params.response)); } { ClientMessage clientMessage = ListIndexOfCodec.encodeRequest( aString , aData ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); ListIndexOfCodec.ResponseParameters params = ListIndexOfCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(anInt, params.response)); } { ClientMessage clientMessage = ListSubCodec.encodeRequest( aString , anInt , anInt ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); ListSubCodec.ResponseParameters params = ListSubCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(datas, params.response)); } { ClientMessage clientMessage = ListIteratorCodec.encodeRequest( aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); ListIteratorCodec.ResponseParameters params = ListIteratorCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(datas, params.response)); } { ClientMessage clientMessage = ListListIteratorCodec.encodeRequest( aString , anInt ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); ListListIteratorCodec.ResponseParameters params = ListListIteratorCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(datas, params.response)); } { ClientMessage clientMessage = SetSizeCodec.encodeRequest( aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); SetSizeCodec.ResponseParameters params = SetSizeCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(anInt, params.response)); } { ClientMessage clientMessage = SetContainsCodec.encodeRequest( aString , aData ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); SetContainsCodec.ResponseParameters params = SetContainsCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = SetContainsAllCodec.encodeRequest( aString , datas ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); SetContainsAllCodec.ResponseParameters params = SetContainsAllCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = SetAddCodec.encodeRequest( aString , aData ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); SetAddCodec.ResponseParameters params = SetAddCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = SetRemoveCodec.encodeRequest( aString , aData ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); SetRemoveCodec.ResponseParameters params = SetRemoveCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = SetAddAllCodec.encodeRequest( aString , datas ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); SetAddAllCodec.ResponseParameters params = SetAddAllCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = SetCompareAndRemoveAllCodec.encodeRequest( aString , datas ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); SetCompareAndRemoveAllCodec.ResponseParameters params = SetCompareAndRemoveAllCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = SetCompareAndRetainAllCodec.encodeRequest( aString , datas ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); SetCompareAndRetainAllCodec.ResponseParameters params = SetCompareAndRetainAllCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = SetClearCodec.encodeRequest( aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); SetClearCodec.ResponseParameters params = SetClearCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } { ClientMessage clientMessage = SetGetAllCodec.encodeRequest( aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); SetGetAllCodec.ResponseParameters params = SetGetAllCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(datas, params.response)); } { ClientMessage clientMessage = SetAddListenerCodec.encodeRequest( aString , aBoolean , aBoolean ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); SetAddListenerCodec.ResponseParameters params = SetAddListenerCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aString, params.response)); } { class SetAddListenerCodecHandler extends SetAddListenerCodec.AbstractEventHandler { @Override public void handle( com.hazelcast.nio.serialization.Data item , java.lang.String uuid , int eventType ) { assertTrue(isEqual(null, item)); assertTrue(isEqual(aString, uuid)); assertTrue(isEqual(anInt, eventType)); } } SetAddListenerCodecHandler handler = new SetAddListenerCodecHandler(); { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); handler.handle(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } } { ClientMessage clientMessage = SetRemoveListenerCodec.encodeRequest( aString , aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); SetRemoveListenerCodec.ResponseParameters params = SetRemoveListenerCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = SetIsEmptyCodec.encodeRequest( aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); SetIsEmptyCodec.ResponseParameters params = SetIsEmptyCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = LockIsLockedCodec.encodeRequest( aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); LockIsLockedCodec.ResponseParameters params = LockIsLockedCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = LockIsLockedByCurrentThreadCodec.encodeRequest( aString , aLong ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); LockIsLockedByCurrentThreadCodec.ResponseParameters params = LockIsLockedByCurrentThreadCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = LockGetLockCountCodec.encodeRequest( aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); LockGetLockCountCodec.ResponseParameters params = LockGetLockCountCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(anInt, params.response)); } { ClientMessage clientMessage = LockGetRemainingLeaseTimeCodec.encodeRequest( aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); LockGetRemainingLeaseTimeCodec.ResponseParameters params = LockGetRemainingLeaseTimeCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aLong, params.response)); } { ClientMessage clientMessage = LockLockCodec.encodeRequest( aString , aLong , aLong , aLong ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); LockLockCodec.ResponseParameters params = LockLockCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } { ClientMessage clientMessage = LockUnlockCodec.encodeRequest( aString , aLong , aLong ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); LockUnlockCodec.ResponseParameters params = LockUnlockCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } { ClientMessage clientMessage = LockForceUnlockCodec.encodeRequest( aString , aLong ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); LockForceUnlockCodec.ResponseParameters params = LockForceUnlockCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } { ClientMessage clientMessage = LockTryLockCodec.encodeRequest( aString , aLong , aLong , aLong , aLong ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); LockTryLockCodec.ResponseParameters params = LockTryLockCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = ConditionAwaitCodec.encodeRequest( aString , aLong , aLong , aString , aLong ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); ConditionAwaitCodec.ResponseParameters params = ConditionAwaitCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = ConditionBeforeAwaitCodec.encodeRequest( aString , aLong , aString , aLong ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); ConditionBeforeAwaitCodec.ResponseParameters params = ConditionBeforeAwaitCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } { ClientMessage clientMessage = ConditionSignalCodec.encodeRequest( aString , aLong , aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); ConditionSignalCodec.ResponseParameters params = ConditionSignalCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } { ClientMessage clientMessage = ConditionSignalAllCodec.encodeRequest( aString , aLong , aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); ConditionSignalAllCodec.ResponseParameters params = ConditionSignalAllCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } { ClientMessage clientMessage = ExecutorServiceShutdownCodec.encodeRequest( aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); ExecutorServiceShutdownCodec.ResponseParameters params = ExecutorServiceShutdownCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } { ClientMessage clientMessage = ExecutorServiceIsShutdownCodec.encodeRequest( aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); ExecutorServiceIsShutdownCodec.ResponseParameters params = ExecutorServiceIsShutdownCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = ExecutorServiceCancelOnPartitionCodec.encodeRequest( aString , anInt , aBoolean ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); ExecutorServiceCancelOnPartitionCodec.ResponseParameters params = ExecutorServiceCancelOnPartitionCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = ExecutorServiceCancelOnAddressCodec.encodeRequest( aString , anAddress , aBoolean ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); ExecutorServiceCancelOnAddressCodec.ResponseParameters params = ExecutorServiceCancelOnAddressCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = ExecutorServiceSubmitToPartitionCodec.encodeRequest( aString , aString , aData , anInt ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); ExecutorServiceSubmitToPartitionCodec.ResponseParameters params = ExecutorServiceSubmitToPartitionCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(null, params.response)); } { ClientMessage clientMessage = ExecutorServiceSubmitToAddressCodec.encodeRequest( aString , aString , aData , anAddress ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); ExecutorServiceSubmitToAddressCodec.ResponseParameters params = ExecutorServiceSubmitToAddressCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(null, params.response)); } { ClientMessage clientMessage = AtomicLongApplyCodec.encodeRequest( aString , aData ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); AtomicLongApplyCodec.ResponseParameters params = AtomicLongApplyCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(null, params.response)); } { ClientMessage clientMessage = AtomicLongAlterCodec.encodeRequest( aString , aData ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); AtomicLongAlterCodec.ResponseParameters params = AtomicLongAlterCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } { ClientMessage clientMessage = AtomicLongAlterAndGetCodec.encodeRequest( aString , aData ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); AtomicLongAlterAndGetCodec.ResponseParameters params = AtomicLongAlterAndGetCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aLong, params.response)); } { ClientMessage clientMessage = AtomicLongGetAndAlterCodec.encodeRequest( aString , aData ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); AtomicLongGetAndAlterCodec.ResponseParameters params = AtomicLongGetAndAlterCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aLong, params.response)); } { ClientMessage clientMessage = AtomicLongAddAndGetCodec.encodeRequest( aString , aLong ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); AtomicLongAddAndGetCodec.ResponseParameters params = AtomicLongAddAndGetCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aLong, params.response)); } { ClientMessage clientMessage = AtomicLongCompareAndSetCodec.encodeRequest( aString , aLong , aLong ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); AtomicLongCompareAndSetCodec.ResponseParameters params = AtomicLongCompareAndSetCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = AtomicLongDecrementAndGetCodec.encodeRequest( aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); AtomicLongDecrementAndGetCodec.ResponseParameters params = AtomicLongDecrementAndGetCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aLong, params.response)); } { ClientMessage clientMessage = AtomicLongGetCodec.encodeRequest( aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); AtomicLongGetCodec.ResponseParameters params = AtomicLongGetCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aLong, params.response)); } { ClientMessage clientMessage = AtomicLongGetAndAddCodec.encodeRequest( aString , aLong ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); AtomicLongGetAndAddCodec.ResponseParameters params = AtomicLongGetAndAddCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aLong, params.response)); } { ClientMessage clientMessage = AtomicLongGetAndSetCodec.encodeRequest( aString , aLong ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); AtomicLongGetAndSetCodec.ResponseParameters params = AtomicLongGetAndSetCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aLong, params.response)); } { ClientMessage clientMessage = AtomicLongIncrementAndGetCodec.encodeRequest( aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); AtomicLongIncrementAndGetCodec.ResponseParameters params = AtomicLongIncrementAndGetCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aLong, params.response)); } { ClientMessage clientMessage = AtomicLongGetAndIncrementCodec.encodeRequest( aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); AtomicLongGetAndIncrementCodec.ResponseParameters params = AtomicLongGetAndIncrementCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aLong, params.response)); } { ClientMessage clientMessage = AtomicLongSetCodec.encodeRequest( aString , aLong ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); AtomicLongSetCodec.ResponseParameters params = AtomicLongSetCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } { ClientMessage clientMessage = AtomicReferenceApplyCodec.encodeRequest( aString , aData ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); AtomicReferenceApplyCodec.ResponseParameters params = AtomicReferenceApplyCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(null, params.response)); } { ClientMessage clientMessage = AtomicReferenceAlterCodec.encodeRequest( aString , aData ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); AtomicReferenceAlterCodec.ResponseParameters params = AtomicReferenceAlterCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } { ClientMessage clientMessage = AtomicReferenceAlterAndGetCodec.encodeRequest( aString , aData ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); AtomicReferenceAlterAndGetCodec.ResponseParameters params = AtomicReferenceAlterAndGetCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(null, params.response)); } { ClientMessage clientMessage = AtomicReferenceGetAndAlterCodec.encodeRequest( aString , aData ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); AtomicReferenceGetAndAlterCodec.ResponseParameters params = AtomicReferenceGetAndAlterCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(null, params.response)); } { ClientMessage clientMessage = AtomicReferenceContainsCodec.encodeRequest( aString , null ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); AtomicReferenceContainsCodec.ResponseParameters params = AtomicReferenceContainsCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = AtomicReferenceCompareAndSetCodec.encodeRequest( aString , null , null ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); AtomicReferenceCompareAndSetCodec.ResponseParameters params = AtomicReferenceCompareAndSetCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = AtomicReferenceGetCodec.encodeRequest( aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); AtomicReferenceGetCodec.ResponseParameters params = AtomicReferenceGetCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(null, params.response)); } { ClientMessage clientMessage = AtomicReferenceSetCodec.encodeRequest( aString , null ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); AtomicReferenceSetCodec.ResponseParameters params = AtomicReferenceSetCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } { ClientMessage clientMessage = AtomicReferenceClearCodec.encodeRequest( aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); AtomicReferenceClearCodec.ResponseParameters params = AtomicReferenceClearCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } { ClientMessage clientMessage = AtomicReferenceGetAndSetCodec.encodeRequest( aString , null ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); AtomicReferenceGetAndSetCodec.ResponseParameters params = AtomicReferenceGetAndSetCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(null, params.response)); } { ClientMessage clientMessage = AtomicReferenceSetAndGetCodec.encodeRequest( aString , null ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); AtomicReferenceSetAndGetCodec.ResponseParameters params = AtomicReferenceSetAndGetCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(null, params.response)); } { ClientMessage clientMessage = AtomicReferenceIsNullCodec.encodeRequest( aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); AtomicReferenceIsNullCodec.ResponseParameters params = AtomicReferenceIsNullCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = CountDownLatchAwaitCodec.encodeRequest( aString , aLong ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); CountDownLatchAwaitCodec.ResponseParameters params = CountDownLatchAwaitCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = CountDownLatchCountDownCodec.encodeRequest( aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); CountDownLatchCountDownCodec.ResponseParameters params = CountDownLatchCountDownCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } { ClientMessage clientMessage = CountDownLatchGetCountCodec.encodeRequest( aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); CountDownLatchGetCountCodec.ResponseParameters params = CountDownLatchGetCountCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(anInt, params.response)); } { ClientMessage clientMessage = CountDownLatchTrySetCountCodec.encodeRequest( aString , anInt ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); CountDownLatchTrySetCountCodec.ResponseParameters params = CountDownLatchTrySetCountCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = SemaphoreInitCodec.encodeRequest( aString , anInt ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); SemaphoreInitCodec.ResponseParameters params = SemaphoreInitCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = SemaphoreAcquireCodec.encodeRequest( aString , anInt ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); SemaphoreAcquireCodec.ResponseParameters params = SemaphoreAcquireCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } { ClientMessage clientMessage = SemaphoreAvailablePermitsCodec.encodeRequest( aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); SemaphoreAvailablePermitsCodec.ResponseParameters params = SemaphoreAvailablePermitsCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(anInt, params.response)); } { ClientMessage clientMessage = SemaphoreDrainPermitsCodec.encodeRequest( aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); SemaphoreDrainPermitsCodec.ResponseParameters params = SemaphoreDrainPermitsCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(anInt, params.response)); } { ClientMessage clientMessage = SemaphoreReducePermitsCodec.encodeRequest( aString , anInt ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); SemaphoreReducePermitsCodec.ResponseParameters params = SemaphoreReducePermitsCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } { ClientMessage clientMessage = SemaphoreReleaseCodec.encodeRequest( aString , anInt ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); SemaphoreReleaseCodec.ResponseParameters params = SemaphoreReleaseCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } { ClientMessage clientMessage = SemaphoreTryAcquireCodec.encodeRequest( aString , anInt , aLong ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); SemaphoreTryAcquireCodec.ResponseParameters params = SemaphoreTryAcquireCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = ReplicatedMapPutCodec.encodeRequest( aString , aData , aData , aLong ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); ReplicatedMapPutCodec.ResponseParameters params = ReplicatedMapPutCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(null, params.response)); } { ClientMessage clientMessage = ReplicatedMapSizeCodec.encodeRequest( aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); ReplicatedMapSizeCodec.ResponseParameters params = ReplicatedMapSizeCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(anInt, params.response)); } { ClientMessage clientMessage = ReplicatedMapIsEmptyCodec.encodeRequest( aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); ReplicatedMapIsEmptyCodec.ResponseParameters params = ReplicatedMapIsEmptyCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = ReplicatedMapContainsKeyCodec.encodeRequest( aString , aData ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); ReplicatedMapContainsKeyCodec.ResponseParameters params = ReplicatedMapContainsKeyCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = ReplicatedMapContainsValueCodec.encodeRequest( aString , aData ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); ReplicatedMapContainsValueCodec.ResponseParameters params = ReplicatedMapContainsValueCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = ReplicatedMapGetCodec.encodeRequest( aString , aData ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); ReplicatedMapGetCodec.ResponseParameters params = ReplicatedMapGetCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(null, params.response)); } { ClientMessage clientMessage = ReplicatedMapRemoveCodec.encodeRequest( aString , aData ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); ReplicatedMapRemoveCodec.ResponseParameters params = ReplicatedMapRemoveCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(null, params.response)); } { ClientMessage clientMessage = ReplicatedMapPutAllCodec.encodeRequest( aString , aListOfEntry ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); ReplicatedMapPutAllCodec.ResponseParameters params = ReplicatedMapPutAllCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } { ClientMessage clientMessage = ReplicatedMapClearCodec.encodeRequest( aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); ReplicatedMapClearCodec.ResponseParameters params = ReplicatedMapClearCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } { ClientMessage clientMessage = ReplicatedMapAddEntryListenerToKeyWithPredicateCodec.encodeRequest( aString , aData , aData , aBoolean ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); ReplicatedMapAddEntryListenerToKeyWithPredicateCodec.ResponseParameters params = ReplicatedMapAddEntryListenerToKeyWithPredicateCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aString, params.response)); } { class ReplicatedMapAddEntryListenerToKeyWithPredicateCodecHandler extends ReplicatedMapAddEntryListenerToKeyWithPredicateCodec.AbstractEventHandler { @Override public void handle( com.hazelcast.nio.serialization.Data key , com.hazelcast.nio.serialization.Data value , com.hazelcast.nio.serialization.Data oldValue , com.hazelcast.nio.serialization.Data mergingValue , int eventType , java.lang.String uuid , int numberOfAffectedEntries ) { assertTrue(isEqual(null, key)); assertTrue(isEqual(null, value)); assertTrue(isEqual(null, oldValue)); assertTrue(isEqual(null, mergingValue)); assertTrue(isEqual(anInt, eventType)); assertTrue(isEqual(aString, uuid)); assertTrue(isEqual(anInt, numberOfAffectedEntries)); } } ReplicatedMapAddEntryListenerToKeyWithPredicateCodecHandler handler = new ReplicatedMapAddEntryListenerToKeyWithPredicateCodecHandler(); { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); handler.handle(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } } { ClientMessage clientMessage = ReplicatedMapAddEntryListenerWithPredicateCodec.encodeRequest( aString , aData , aBoolean ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); ReplicatedMapAddEntryListenerWithPredicateCodec.ResponseParameters params = ReplicatedMapAddEntryListenerWithPredicateCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aString, params.response)); } { class ReplicatedMapAddEntryListenerWithPredicateCodecHandler extends ReplicatedMapAddEntryListenerWithPredicateCodec.AbstractEventHandler { @Override public void handle( com.hazelcast.nio.serialization.Data key , com.hazelcast.nio.serialization.Data value , com.hazelcast.nio.serialization.Data oldValue , com.hazelcast.nio.serialization.Data mergingValue , int eventType , java.lang.String uuid , int numberOfAffectedEntries ) { assertTrue(isEqual(null, key)); assertTrue(isEqual(null, value)); assertTrue(isEqual(null, oldValue)); assertTrue(isEqual(null, mergingValue)); assertTrue(isEqual(anInt, eventType)); assertTrue(isEqual(aString, uuid)); assertTrue(isEqual(anInt, numberOfAffectedEntries)); } } ReplicatedMapAddEntryListenerWithPredicateCodecHandler handler = new ReplicatedMapAddEntryListenerWithPredicateCodecHandler(); { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); handler.handle(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } } { ClientMessage clientMessage = ReplicatedMapAddEntryListenerToKeyCodec.encodeRequest( aString , aData , aBoolean ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); ReplicatedMapAddEntryListenerToKeyCodec.ResponseParameters params = ReplicatedMapAddEntryListenerToKeyCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aString, params.response)); } { class ReplicatedMapAddEntryListenerToKeyCodecHandler extends ReplicatedMapAddEntryListenerToKeyCodec.AbstractEventHandler { @Override public void handle( com.hazelcast.nio.serialization.Data key , com.hazelcast.nio.serialization.Data value , com.hazelcast.nio.serialization.Data oldValue , com.hazelcast.nio.serialization.Data mergingValue , int eventType , java.lang.String uuid , int numberOfAffectedEntries ) { assertTrue(isEqual(null, key)); assertTrue(isEqual(null, value)); assertTrue(isEqual(null, oldValue)); assertTrue(isEqual(null, mergingValue)); assertTrue(isEqual(anInt, eventType)); assertTrue(isEqual(aString, uuid)); assertTrue(isEqual(anInt, numberOfAffectedEntries)); } } ReplicatedMapAddEntryListenerToKeyCodecHandler handler = new ReplicatedMapAddEntryListenerToKeyCodecHandler(); { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); handler.handle(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } } { ClientMessage clientMessage = ReplicatedMapAddEntryListenerCodec.encodeRequest( aString , aBoolean ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); ReplicatedMapAddEntryListenerCodec.ResponseParameters params = ReplicatedMapAddEntryListenerCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aString, params.response)); } { class ReplicatedMapAddEntryListenerCodecHandler extends ReplicatedMapAddEntryListenerCodec.AbstractEventHandler { @Override public void handle( com.hazelcast.nio.serialization.Data key , com.hazelcast.nio.serialization.Data value , com.hazelcast.nio.serialization.Data oldValue , com.hazelcast.nio.serialization.Data mergingValue , int eventType , java.lang.String uuid , int numberOfAffectedEntries ) { assertTrue(isEqual(null, key)); assertTrue(isEqual(null, value)); assertTrue(isEqual(null, oldValue)); assertTrue(isEqual(null, mergingValue)); assertTrue(isEqual(anInt, eventType)); assertTrue(isEqual(aString, uuid)); assertTrue(isEqual(anInt, numberOfAffectedEntries)); } } ReplicatedMapAddEntryListenerCodecHandler handler = new ReplicatedMapAddEntryListenerCodecHandler(); { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); handler.handle(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } } { ClientMessage clientMessage = ReplicatedMapRemoveEntryListenerCodec.encodeRequest( aString , aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); ReplicatedMapRemoveEntryListenerCodec.ResponseParameters params = ReplicatedMapRemoveEntryListenerCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = ReplicatedMapKeySetCodec.encodeRequest( aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); ReplicatedMapKeySetCodec.ResponseParameters params = ReplicatedMapKeySetCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(datas, params.response)); } { ClientMessage clientMessage = ReplicatedMapValuesCodec.encodeRequest( aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); ReplicatedMapValuesCodec.ResponseParameters params = ReplicatedMapValuesCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(datas, params.response)); } { ClientMessage clientMessage = ReplicatedMapEntrySetCodec.encodeRequest( aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); ReplicatedMapEntrySetCodec.ResponseParameters params = ReplicatedMapEntrySetCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aListOfEntry, params.response)); } { ClientMessage clientMessage = ReplicatedMapAddNearCacheEntryListenerCodec.encodeRequest( aString , aBoolean , aBoolean ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); ReplicatedMapAddNearCacheEntryListenerCodec.ResponseParameters params = ReplicatedMapAddNearCacheEntryListenerCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aString, params.response)); } { class ReplicatedMapAddNearCacheEntryListenerCodecHandler extends ReplicatedMapAddNearCacheEntryListenerCodec.AbstractEventHandler { @Override public void handle( com.hazelcast.nio.serialization.Data key , com.hazelcast.nio.serialization.Data value , com.hazelcast.nio.serialization.Data oldValue , com.hazelcast.nio.serialization.Data mergingValue , int eventType , java.lang.String uuid , int numberOfAffectedEntries ) { assertTrue(isEqual(null, key)); assertTrue(isEqual(null, value)); assertTrue(isEqual(null, oldValue)); assertTrue(isEqual(null, mergingValue)); assertTrue(isEqual(anInt, eventType)); assertTrue(isEqual(aString, uuid)); assertTrue(isEqual(anInt, numberOfAffectedEntries)); } } ReplicatedMapAddNearCacheEntryListenerCodecHandler handler = new ReplicatedMapAddNearCacheEntryListenerCodecHandler(); { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); handler.handle(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } } { ClientMessage clientMessage = MapReduceCancelCodec.encodeRequest( aString , aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MapReduceCancelCodec.ResponseParameters params = MapReduceCancelCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = MapReduceJobProcessInformationCodec.encodeRequest( aString , aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MapReduceJobProcessInformationCodec.ResponseParameters params = MapReduceJobProcessInformationCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(jobPartitionStates, params.jobPartitionStates)); assertTrue(isEqual(anInt, params.processRecords)); } { ClientMessage clientMessage = MapReduceForMapCodec.encodeRequest( aString , aString , null , aData , null , null , aString , anInt , null , null ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MapReduceForMapCodec.ResponseParameters params = MapReduceForMapCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aListOfEntry, params.response)); } { ClientMessage clientMessage = MapReduceForListCodec.encodeRequest( aString , aString , null , aData , null , null , aString , anInt , null , null ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MapReduceForListCodec.ResponseParameters params = MapReduceForListCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aListOfEntry, params.response)); } { ClientMessage clientMessage = MapReduceForSetCodec.encodeRequest( aString , aString , null , aData , null , null , aString , anInt , null , null ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MapReduceForSetCodec.ResponseParameters params = MapReduceForSetCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aListOfEntry, params.response)); } { ClientMessage clientMessage = MapReduceForMultiMapCodec.encodeRequest( aString , aString , null , aData , null , null , aString , anInt , null , null ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MapReduceForMultiMapCodec.ResponseParameters params = MapReduceForMultiMapCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aListOfEntry, params.response)); } { ClientMessage clientMessage = MapReduceForCustomCodec.encodeRequest( aString , aString , null , aData , null , null , aData , anInt , null , null ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); MapReduceForCustomCodec.ResponseParameters params = MapReduceForCustomCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aListOfEntry, params.response)); } { ClientMessage clientMessage = TransactionalMapContainsKeyCodec.encodeRequest( aString , aString , aLong , aData ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); TransactionalMapContainsKeyCodec.ResponseParameters params = TransactionalMapContainsKeyCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = TransactionalMapGetCodec.encodeRequest( aString , aString , aLong , aData ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); TransactionalMapGetCodec.ResponseParameters params = TransactionalMapGetCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(null, params.response)); } { ClientMessage clientMessage = TransactionalMapGetForUpdateCodec.encodeRequest( aString , aString , aLong , aData ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); TransactionalMapGetForUpdateCodec.ResponseParameters params = TransactionalMapGetForUpdateCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(null, params.response)); } { ClientMessage clientMessage = TransactionalMapSizeCodec.encodeRequest( aString , aString , aLong ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); TransactionalMapSizeCodec.ResponseParameters params = TransactionalMapSizeCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(anInt, params.response)); } { ClientMessage clientMessage = TransactionalMapIsEmptyCodec.encodeRequest( aString , aString , aLong ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); TransactionalMapIsEmptyCodec.ResponseParameters params = TransactionalMapIsEmptyCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = TransactionalMapPutCodec.encodeRequest( aString , aString , aLong , aData , aData , aLong ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); TransactionalMapPutCodec.ResponseParameters params = TransactionalMapPutCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(null, params.response)); } { ClientMessage clientMessage = TransactionalMapSetCodec.encodeRequest( aString , aString , aLong , aData , aData ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); TransactionalMapSetCodec.ResponseParameters params = TransactionalMapSetCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } { ClientMessage clientMessage = TransactionalMapPutIfAbsentCodec.encodeRequest( aString , aString , aLong , aData , aData ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); TransactionalMapPutIfAbsentCodec.ResponseParameters params = TransactionalMapPutIfAbsentCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(null, params.response)); } { ClientMessage clientMessage = TransactionalMapReplaceCodec.encodeRequest( aString , aString , aLong , aData , aData ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); TransactionalMapReplaceCodec.ResponseParameters params = TransactionalMapReplaceCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(null, params.response)); } { ClientMessage clientMessage = TransactionalMapReplaceIfSameCodec.encodeRequest( aString , aString , aLong , aData , aData , aData ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); TransactionalMapReplaceIfSameCodec.ResponseParameters params = TransactionalMapReplaceIfSameCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = TransactionalMapRemoveCodec.encodeRequest( aString , aString , aLong , aData ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); TransactionalMapRemoveCodec.ResponseParameters params = TransactionalMapRemoveCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(null, params.response)); } { ClientMessage clientMessage = TransactionalMapDeleteCodec.encodeRequest( aString , aString , aLong , aData ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); TransactionalMapDeleteCodec.ResponseParameters params = TransactionalMapDeleteCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } { ClientMessage clientMessage = TransactionalMapRemoveIfSameCodec.encodeRequest( aString , aString , aLong , aData , aData ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); TransactionalMapRemoveIfSameCodec.ResponseParameters params = TransactionalMapRemoveIfSameCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = TransactionalMapKeySetCodec.encodeRequest( aString , aString , aLong ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); TransactionalMapKeySetCodec.ResponseParameters params = TransactionalMapKeySetCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(datas, params.response)); } { ClientMessage clientMessage = TransactionalMapKeySetWithPredicateCodec.encodeRequest( aString , aString , aLong , aData ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); TransactionalMapKeySetWithPredicateCodec.ResponseParameters params = TransactionalMapKeySetWithPredicateCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(datas, params.response)); } { ClientMessage clientMessage = TransactionalMapValuesCodec.encodeRequest( aString , aString , aLong ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); TransactionalMapValuesCodec.ResponseParameters params = TransactionalMapValuesCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(datas, params.response)); } { ClientMessage clientMessage = TransactionalMapValuesWithPredicateCodec.encodeRequest( aString , aString , aLong , aData ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); TransactionalMapValuesWithPredicateCodec.ResponseParameters params = TransactionalMapValuesWithPredicateCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(datas, params.response)); } { ClientMessage clientMessage = TransactionalMultiMapPutCodec.encodeRequest( aString , aString , aLong , aData , aData ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); TransactionalMultiMapPutCodec.ResponseParameters params = TransactionalMultiMapPutCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = TransactionalMultiMapGetCodec.encodeRequest( aString , aString , aLong , aData ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); TransactionalMultiMapGetCodec.ResponseParameters params = TransactionalMultiMapGetCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(datas, params.response)); } { ClientMessage clientMessage = TransactionalMultiMapRemoveCodec.encodeRequest( aString , aString , aLong , aData ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); TransactionalMultiMapRemoveCodec.ResponseParameters params = TransactionalMultiMapRemoveCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(datas, params.response)); } { ClientMessage clientMessage = TransactionalMultiMapRemoveEntryCodec.encodeRequest( aString , aString , aLong , aData , aData ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); TransactionalMultiMapRemoveEntryCodec.ResponseParameters params = TransactionalMultiMapRemoveEntryCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = TransactionalMultiMapValueCountCodec.encodeRequest( aString , aString , aLong , aData ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); TransactionalMultiMapValueCountCodec.ResponseParameters params = TransactionalMultiMapValueCountCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(anInt, params.response)); } { ClientMessage clientMessage = TransactionalMultiMapSizeCodec.encodeRequest( aString , aString , aLong ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); TransactionalMultiMapSizeCodec.ResponseParameters params = TransactionalMultiMapSizeCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(anInt, params.response)); } { ClientMessage clientMessage = TransactionalSetAddCodec.encodeRequest( aString , aString , aLong , aData ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); TransactionalSetAddCodec.ResponseParameters params = TransactionalSetAddCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = TransactionalSetRemoveCodec.encodeRequest( aString , aString , aLong , aData ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); TransactionalSetRemoveCodec.ResponseParameters params = TransactionalSetRemoveCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = TransactionalSetSizeCodec.encodeRequest( aString , aString , aLong ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); TransactionalSetSizeCodec.ResponseParameters params = TransactionalSetSizeCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(anInt, params.response)); } { ClientMessage clientMessage = TransactionalListAddCodec.encodeRequest( aString , aString , aLong , aData ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); TransactionalListAddCodec.ResponseParameters params = TransactionalListAddCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = TransactionalListRemoveCodec.encodeRequest( aString , aString , aLong , aData ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); TransactionalListRemoveCodec.ResponseParameters params = TransactionalListRemoveCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = TransactionalListSizeCodec.encodeRequest( aString , aString , aLong ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); TransactionalListSizeCodec.ResponseParameters params = TransactionalListSizeCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(anInt, params.response)); } { ClientMessage clientMessage = TransactionalQueueOfferCodec.encodeRequest( aString , aString , aLong , aData , aLong ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); TransactionalQueueOfferCodec.ResponseParameters params = TransactionalQueueOfferCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = TransactionalQueueTakeCodec.encodeRequest( aString , aString , aLong ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); TransactionalQueueTakeCodec.ResponseParameters params = TransactionalQueueTakeCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(null, params.response)); } { ClientMessage clientMessage = TransactionalQueuePollCodec.encodeRequest( aString , aString , aLong , aLong ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); TransactionalQueuePollCodec.ResponseParameters params = TransactionalQueuePollCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(null, params.response)); } { ClientMessage clientMessage = TransactionalQueuePeekCodec.encodeRequest( aString , aString , aLong , aLong ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); TransactionalQueuePeekCodec.ResponseParameters params = TransactionalQueuePeekCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(null, params.response)); } { ClientMessage clientMessage = TransactionalQueueSizeCodec.encodeRequest( aString , aString , aLong ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); TransactionalQueueSizeCodec.ResponseParameters params = TransactionalQueueSizeCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(anInt, params.response)); } { ClientMessage clientMessage = CacheAddEntryListenerCodec.encodeRequest( aString , aBoolean ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); CacheAddEntryListenerCodec.ResponseParameters params = CacheAddEntryListenerCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aString, params.response)); } { class CacheAddEntryListenerCodecHandler extends CacheAddEntryListenerCodec.AbstractEventHandler { @Override public void handle( int type , java.util.Collection<com.hazelcast.cache.impl.CacheEventData> keys , int completionId ) { assertTrue(isEqual(anInt, type)); assertTrue(isEqual(cacheEventDatas, keys)); assertTrue(isEqual(anInt, completionId)); } } CacheAddEntryListenerCodecHandler handler = new CacheAddEntryListenerCodecHandler(); { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); handler.handle(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } } { ClientMessage clientMessage = CacheAddInvalidationListenerCodec.encodeRequest( aString , aBoolean ); int length = inputStream.readInt(); // Since the test is generated for protocol version (1.2) which is earlier than latest change in the message // (version 1.4), only the bytes after frame length fields are compared int frameLength = clientMessage.getFrameLength(); assertTrue(frameLength >= length); inputStream.skipBytes(FRAME_LEN_FIELD_SIZE); byte[] bytes = new byte[length - FRAME_LEN_FIELD_SIZE]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOfRange(clientMessage.buffer().byteArray(), FRAME_LEN_FIELD_SIZE, length), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); CacheAddInvalidationListenerCodec.ResponseParameters params = CacheAddInvalidationListenerCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aString, params.response)); } { class CacheAddInvalidationListenerCodecHandler extends CacheAddInvalidationListenerCodec.AbstractEventHandler { @Override public void handle( java.lang.String name , com.hazelcast.nio.serialization.Data key , java.lang.String sourceUuid , java.util.UUID partitionUuid , long sequence ) { assertTrue(isEqual(aString, name)); assertTrue(isEqual(null, key)); assertTrue(isEqual(null, sourceUuid)); } @Override public void handle( java.lang.String name , java.util.Collection<com.hazelcast.nio.serialization.Data> keys , java.util.Collection<java.lang.String> sourceUuids , java.util.Collection<java.util.UUID> partitionUuids , java.util.Collection<java.lang.Long> sequences ) { assertTrue(isEqual(aString, name)); assertTrue(isEqual(datas, keys)); assertTrue(isEqual(null, sourceUuids)); } } CacheAddInvalidationListenerCodecHandler handler = new CacheAddInvalidationListenerCodecHandler(); { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); handler.handle(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); handler.handle(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } } { ClientMessage clientMessage = CacheClearCodec.encodeRequest( aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); CacheClearCodec.ResponseParameters params = CacheClearCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } { ClientMessage clientMessage = CacheRemoveAllKeysCodec.encodeRequest( aString , datas , anInt ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); CacheRemoveAllKeysCodec.ResponseParameters params = CacheRemoveAllKeysCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } { ClientMessage clientMessage = CacheRemoveAllCodec.encodeRequest( aString , anInt ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); CacheRemoveAllCodec.ResponseParameters params = CacheRemoveAllCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } { ClientMessage clientMessage = CacheContainsKeyCodec.encodeRequest( aString , aData ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); CacheContainsKeyCodec.ResponseParameters params = CacheContainsKeyCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = CacheCreateConfigCodec.encodeRequest( aData , aBoolean ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); CacheCreateConfigCodec.ResponseParameters params = CacheCreateConfigCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(null, params.response)); } { ClientMessage clientMessage = CacheDestroyCodec.encodeRequest( aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); CacheDestroyCodec.ResponseParameters params = CacheDestroyCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } { ClientMessage clientMessage = CacheEntryProcessorCodec.encodeRequest( aString , aData , aData , datas , anInt ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); CacheEntryProcessorCodec.ResponseParameters params = CacheEntryProcessorCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(null, params.response)); } { ClientMessage clientMessage = CacheGetAllCodec.encodeRequest( aString , datas , null ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); CacheGetAllCodec.ResponseParameters params = CacheGetAllCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aListOfEntry, params.response)); } { ClientMessage clientMessage = CacheGetAndRemoveCodec.encodeRequest( aString , aData , anInt ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); CacheGetAndRemoveCodec.ResponseParameters params = CacheGetAndRemoveCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(null, params.response)); } { ClientMessage clientMessage = CacheGetAndReplaceCodec.encodeRequest( aString , aData , aData , null , anInt ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); CacheGetAndReplaceCodec.ResponseParameters params = CacheGetAndReplaceCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(null, params.response)); } { ClientMessage clientMessage = CacheGetConfigCodec.encodeRequest( aString , aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); CacheGetConfigCodec.ResponseParameters params = CacheGetConfigCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(null, params.response)); } { ClientMessage clientMessage = CacheGetCodec.encodeRequest( aString , aData , null ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); CacheGetCodec.ResponseParameters params = CacheGetCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(null, params.response)); } { ClientMessage clientMessage = CacheIterateCodec.encodeRequest( aString , anInt , anInt , anInt ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); CacheIterateCodec.ResponseParameters params = CacheIterateCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(anInt, params.tableIndex)); assertTrue(isEqual(datas, params.keys)); } { ClientMessage clientMessage = CacheListenerRegistrationCodec.encodeRequest( aString , aData , aBoolean , anAddress ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); CacheListenerRegistrationCodec.ResponseParameters params = CacheListenerRegistrationCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } { ClientMessage clientMessage = CacheLoadAllCodec.encodeRequest( aString , datas , aBoolean ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); CacheLoadAllCodec.ResponseParameters params = CacheLoadAllCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } { ClientMessage clientMessage = CacheManagementConfigCodec.encodeRequest( aString , aBoolean , aBoolean , anAddress ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); CacheManagementConfigCodec.ResponseParameters params = CacheManagementConfigCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } { ClientMessage clientMessage = CachePutIfAbsentCodec.encodeRequest( aString , aData , aData , null , anInt ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); CachePutIfAbsentCodec.ResponseParameters params = CachePutIfAbsentCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = CachePutCodec.encodeRequest( aString , aData , aData , null , aBoolean , anInt ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); CachePutCodec.ResponseParameters params = CachePutCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(null, params.response)); } { ClientMessage clientMessage = CacheRemoveEntryListenerCodec.encodeRequest( aString , aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); CacheRemoveEntryListenerCodec.ResponseParameters params = CacheRemoveEntryListenerCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = CacheRemoveInvalidationListenerCodec.encodeRequest( aString , aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); CacheRemoveInvalidationListenerCodec.ResponseParameters params = CacheRemoveInvalidationListenerCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = CacheRemoveCodec.encodeRequest( aString , aData , null , anInt ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); CacheRemoveCodec.ResponseParameters params = CacheRemoveCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = CacheReplaceCodec.encodeRequest( aString , aData , null , aData , null , anInt ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); CacheReplaceCodec.ResponseParameters params = CacheReplaceCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(null, params.response)); } { ClientMessage clientMessage = CacheSizeCodec.encodeRequest( aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); CacheSizeCodec.ResponseParameters params = CacheSizeCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(anInt, params.response)); } { ClientMessage clientMessage = CacheAddPartitionLostListenerCodec.encodeRequest( aString , aBoolean ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); CacheAddPartitionLostListenerCodec.ResponseParameters params = CacheAddPartitionLostListenerCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aString, params.response)); } { class CacheAddPartitionLostListenerCodecHandler extends CacheAddPartitionLostListenerCodec.AbstractEventHandler { @Override public void handle( int partitionId , java.lang.String uuid ) { assertTrue(isEqual(anInt, partitionId)); assertTrue(isEqual(aString, uuid)); } } CacheAddPartitionLostListenerCodecHandler handler = new CacheAddPartitionLostListenerCodecHandler(); { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); handler.handle(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } } { ClientMessage clientMessage = CacheRemovePartitionLostListenerCodec.encodeRequest( aString , aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); CacheRemovePartitionLostListenerCodec.ResponseParameters params = CacheRemovePartitionLostListenerCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = CachePutAllCodec.encodeRequest( aString , aListOfEntry , null , anInt ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); CachePutAllCodec.ResponseParameters params = CachePutAllCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } { ClientMessage clientMessage = CacheIterateEntriesCodec.encodeRequest( aString , anInt , anInt , anInt ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); CacheIterateEntriesCodec.ResponseParameters params = CacheIterateEntriesCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(anInt, params.tableIndex)); assertTrue(isEqual(aListOfEntry, params.entries)); } { ClientMessage clientMessage = XATransactionClearRemoteCodec.encodeRequest( anXid ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); XATransactionClearRemoteCodec.ResponseParameters params = XATransactionClearRemoteCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } { ClientMessage clientMessage = XATransactionCollectTransactionsCodec.encodeRequest( ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); XATransactionCollectTransactionsCodec.ResponseParameters params = XATransactionCollectTransactionsCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(datas, params.response)); } { ClientMessage clientMessage = XATransactionFinalizeCodec.encodeRequest( anXid , aBoolean ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); XATransactionFinalizeCodec.ResponseParameters params = XATransactionFinalizeCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } { ClientMessage clientMessage = XATransactionCommitCodec.encodeRequest( aString , aBoolean ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); XATransactionCommitCodec.ResponseParameters params = XATransactionCommitCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } { ClientMessage clientMessage = XATransactionCreateCodec.encodeRequest( anXid , aLong ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); XATransactionCreateCodec.ResponseParameters params = XATransactionCreateCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aString, params.response)); } { ClientMessage clientMessage = XATransactionPrepareCodec.encodeRequest( aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); XATransactionPrepareCodec.ResponseParameters params = XATransactionPrepareCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } { ClientMessage clientMessage = XATransactionRollbackCodec.encodeRequest( aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); XATransactionRollbackCodec.ResponseParameters params = XATransactionRollbackCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } { ClientMessage clientMessage = TransactionCommitCodec.encodeRequest( aString , aLong ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); TransactionCommitCodec.ResponseParameters params = TransactionCommitCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } { ClientMessage clientMessage = TransactionCreateCodec.encodeRequest( aLong , anInt , anInt , aLong ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); TransactionCreateCodec.ResponseParameters params = TransactionCreateCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aString, params.response)); } { ClientMessage clientMessage = TransactionRollbackCodec.encodeRequest( aString , aLong ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); TransactionRollbackCodec.ResponseParameters params = TransactionRollbackCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } { ClientMessage clientMessage = ContinuousQueryPublisherCreateWithValueCodec.encodeRequest( aString , aString , aData , anInt , anInt , aLong , aBoolean , aBoolean ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); ContinuousQueryPublisherCreateWithValueCodec.ResponseParameters params = ContinuousQueryPublisherCreateWithValueCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aListOfEntry, params.response)); } { ClientMessage clientMessage = ContinuousQueryPublisherCreateCodec.encodeRequest( aString , aString , aData , anInt , anInt , aLong , aBoolean , aBoolean ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); ContinuousQueryPublisherCreateCodec.ResponseParameters params = ContinuousQueryPublisherCreateCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(datas, params.response)); } { ClientMessage clientMessage = ContinuousQueryMadePublishableCodec.encodeRequest( aString , aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); ContinuousQueryMadePublishableCodec.ResponseParameters params = ContinuousQueryMadePublishableCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = ContinuousQueryAddListenerCodec.encodeRequest( aString , aBoolean ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); ContinuousQueryAddListenerCodec.ResponseParameters params = ContinuousQueryAddListenerCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aString, params.response)); } { class ContinuousQueryAddListenerCodecHandler extends ContinuousQueryAddListenerCodec.AbstractEventHandler { @Override public void handle( com.hazelcast.map.impl.querycache.event.QueryCacheEventData data ) { assertTrue(isEqual(aQueryCacheEventData, data)); } @Override public void handle( java.util.Collection<com.hazelcast.map.impl.querycache.event.QueryCacheEventData> events , java.lang.String source , int partitionId ) { assertTrue(isEqual(queryCacheEventDatas, events)); assertTrue(isEqual(aString, source)); assertTrue(isEqual(anInt, partitionId)); } } ContinuousQueryAddListenerCodecHandler handler = new ContinuousQueryAddListenerCodecHandler(); { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); handler.handle(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); handler.handle(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } } { ClientMessage clientMessage = ContinuousQuerySetReadCursorCodec.encodeRequest( aString , aString , aLong ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); ContinuousQuerySetReadCursorCodec.ResponseParameters params = ContinuousQuerySetReadCursorCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = ContinuousQueryDestroyCacheCodec.encodeRequest( aString , aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); ContinuousQueryDestroyCacheCodec.ResponseParameters params = ContinuousQueryDestroyCacheCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = RingbufferSizeCodec.encodeRequest( aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); RingbufferSizeCodec.ResponseParameters params = RingbufferSizeCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aLong, params.response)); } { ClientMessage clientMessage = RingbufferTailSequenceCodec.encodeRequest( aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); RingbufferTailSequenceCodec.ResponseParameters params = RingbufferTailSequenceCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aLong, params.response)); } { ClientMessage clientMessage = RingbufferHeadSequenceCodec.encodeRequest( aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); RingbufferHeadSequenceCodec.ResponseParameters params = RingbufferHeadSequenceCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aLong, params.response)); } { ClientMessage clientMessage = RingbufferCapacityCodec.encodeRequest( aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); RingbufferCapacityCodec.ResponseParameters params = RingbufferCapacityCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aLong, params.response)); } { ClientMessage clientMessage = RingbufferRemainingCapacityCodec.encodeRequest( aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); RingbufferRemainingCapacityCodec.ResponseParameters params = RingbufferRemainingCapacityCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aLong, params.response)); } { ClientMessage clientMessage = RingbufferAddCodec.encodeRequest( aString , anInt , aData ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); RingbufferAddCodec.ResponseParameters params = RingbufferAddCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aLong, params.response)); } { ClientMessage clientMessage = RingbufferReadOneCodec.encodeRequest( aString , aLong ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); RingbufferReadOneCodec.ResponseParameters params = RingbufferReadOneCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(null, params.response)); } { ClientMessage clientMessage = RingbufferAddAllCodec.encodeRequest( aString , datas , anInt ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); RingbufferAddAllCodec.ResponseParameters params = RingbufferAddAllCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aLong, params.response)); } { ClientMessage clientMessage = RingbufferReadManyCodec.encodeRequest( aString , aLong , anInt , anInt , null ); int length = inputStream.readInt(); // Since the test is generated for protocol version (1.2) which is earlier than latest change in the message // (version 1.5), only the bytes after frame length fields are compared int frameLength = clientMessage.getFrameLength(); assertTrue(frameLength >= length); inputStream.skipBytes(FRAME_LEN_FIELD_SIZE); byte[] bytes = new byte[length - FRAME_LEN_FIELD_SIZE]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOfRange(clientMessage.buffer().byteArray(), FRAME_LEN_FIELD_SIZE, length), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); RingbufferReadManyCodec.ResponseParameters params = RingbufferReadManyCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(anInt, params.readCount)); assertTrue(isEqual(datas, params.items)); assertFalse(params.itemSeqsExist); } { ClientMessage clientMessage = DurableExecutorShutdownCodec.encodeRequest( aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); DurableExecutorShutdownCodec.ResponseParameters params = DurableExecutorShutdownCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } { ClientMessage clientMessage = DurableExecutorIsShutdownCodec.encodeRequest( aString ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); DurableExecutorIsShutdownCodec.ResponseParameters params = DurableExecutorIsShutdownCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(aBoolean, params.response)); } { ClientMessage clientMessage = DurableExecutorSubmitToPartitionCodec.encodeRequest( aString , aData ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); DurableExecutorSubmitToPartitionCodec.ResponseParameters params = DurableExecutorSubmitToPartitionCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(anInt, params.response)); } { ClientMessage clientMessage = DurableExecutorRetrieveResultCodec.encodeRequest( aString , anInt ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); DurableExecutorRetrieveResultCodec.ResponseParameters params = DurableExecutorRetrieveResultCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(null, params.response)); } { ClientMessage clientMessage = DurableExecutorDisposeResultCodec.encodeRequest( aString , anInt ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); DurableExecutorDisposeResultCodec.ResponseParameters params = DurableExecutorDisposeResultCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); } { ClientMessage clientMessage = DurableExecutorRetrieveAndDisposeResultCodec.encodeRequest( aString , anInt ); int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); assertTrue(isEqual(Arrays.copyOf(clientMessage.buffer().byteArray(), clientMessage.getFrameLength()), bytes)); } { int length = inputStream.readInt(); byte[] bytes = new byte[length]; inputStream.read(bytes); DurableExecutorRetrieveAndDisposeResultCodec.ResponseParameters params = DurableExecutorRetrieveAndDisposeResultCodec.decodeResponse(ClientMessage.createForDecode(new SafeBuffer(bytes), 0)); assertTrue(isEqual(null, params.response)); } inputStream.close(); input.close(); } }
apache-2.0
tomsnail/snail-dev-console
src/main/java/com/thinkgem/jeesite/common/test/SpringTransactionalContextTests.java
820
package com.thinkgem.jeesite.common.test; import javax.sql.DataSource; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.test.context.ActiveProfiles; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit4.AbstractTransactionalJUnit4SpringContextTests; /** * Spring 单元测试基类 * @author ThinkGem * @version 2013-05-15 */ @ActiveProfiles("production") @ContextConfiguration(locations = {"/spring-context.xml"}) public class SpringTransactionalContextTests extends AbstractTransactionalJUnit4SpringContextTests { protected DataSource dataSource; @Autowired public void setDataSource(DataSource dataSource) { super.setDataSource(dataSource); this.dataSource = dataSource; } }
apache-2.0
porkybrain/Kvasir
Lib/Chip/CM4/Freescale/MK20D10/NVIC.hpp
41881
#pragma once #include <Register/Utility.hpp> namespace Kvasir { //Nested Vectored Interrupt Controller namespace NvicNviciser0{ ///<Interrupt Set Enable Register n using Addr = Register::Address<0xe000e100,0x00000000,0x00000000,unsigned>; ///Interrupt set enable bits constexpr Register::FieldLocation<Addr,Register::maskFromRange(31,0),Register::ReadWriteAccess,unsigned> setena{}; } namespace NvicNviciser1{ ///<Interrupt Set Enable Register n using Addr = Register::Address<0xe000e104,0x00000000,0x00000000,unsigned>; ///Interrupt set enable bits constexpr Register::FieldLocation<Addr,Register::maskFromRange(31,0),Register::ReadWriteAccess,unsigned> setena{}; } namespace NvicNviciser2{ ///<Interrupt Set Enable Register n using Addr = Register::Address<0xe000e108,0x00000000,0x00000000,unsigned>; ///Interrupt set enable bits constexpr Register::FieldLocation<Addr,Register::maskFromRange(31,0),Register::ReadWriteAccess,unsigned> setena{}; } namespace NvicNviciser3{ ///<Interrupt Set Enable Register n using Addr = Register::Address<0xe000e10c,0x00000000,0x00000000,unsigned>; ///Interrupt set enable bits constexpr Register::FieldLocation<Addr,Register::maskFromRange(31,0),Register::ReadWriteAccess,unsigned> setena{}; } namespace NvicNvicicer0{ ///<Interrupt Clear Enable Register n using Addr = Register::Address<0xe000e180,0x00000000,0x00000000,unsigned>; ///Interrupt clear-enable bits constexpr Register::FieldLocation<Addr,Register::maskFromRange(31,0),Register::ReadWriteAccess,unsigned> clrena{}; } namespace NvicNvicicer1{ ///<Interrupt Clear Enable Register n using Addr = Register::Address<0xe000e184,0x00000000,0x00000000,unsigned>; ///Interrupt clear-enable bits constexpr Register::FieldLocation<Addr,Register::maskFromRange(31,0),Register::ReadWriteAccess,unsigned> clrena{}; } namespace NvicNvicicer2{ ///<Interrupt Clear Enable Register n using Addr = Register::Address<0xe000e188,0x00000000,0x00000000,unsigned>; ///Interrupt clear-enable bits constexpr Register::FieldLocation<Addr,Register::maskFromRange(31,0),Register::ReadWriteAccess,unsigned> clrena{}; } namespace NvicNvicicer3{ ///<Interrupt Clear Enable Register n using Addr = Register::Address<0xe000e18c,0x00000000,0x00000000,unsigned>; ///Interrupt clear-enable bits constexpr Register::FieldLocation<Addr,Register::maskFromRange(31,0),Register::ReadWriteAccess,unsigned> clrena{}; } namespace NvicNvicispr0{ ///<Interrupt Set Pending Register n using Addr = Register::Address<0xe000e200,0x00000000,0x00000000,unsigned>; ///Interrupt set-pending bits constexpr Register::FieldLocation<Addr,Register::maskFromRange(31,0),Register::ReadWriteAccess,unsigned> setpend{}; } namespace NvicNvicispr1{ ///<Interrupt Set Pending Register n using Addr = Register::Address<0xe000e204,0x00000000,0x00000000,unsigned>; ///Interrupt set-pending bits constexpr Register::FieldLocation<Addr,Register::maskFromRange(31,0),Register::ReadWriteAccess,unsigned> setpend{}; } namespace NvicNvicispr2{ ///<Interrupt Set Pending Register n using Addr = Register::Address<0xe000e208,0x00000000,0x00000000,unsigned>; ///Interrupt set-pending bits constexpr Register::FieldLocation<Addr,Register::maskFromRange(31,0),Register::ReadWriteAccess,unsigned> setpend{}; } namespace NvicNvicispr3{ ///<Interrupt Set Pending Register n using Addr = Register::Address<0xe000e20c,0x00000000,0x00000000,unsigned>; ///Interrupt set-pending bits constexpr Register::FieldLocation<Addr,Register::maskFromRange(31,0),Register::ReadWriteAccess,unsigned> setpend{}; } namespace NvicNvicicpr0{ ///<Interrupt Clear Pending Register n using Addr = Register::Address<0xe000e280,0x00000000,0x00000000,unsigned>; ///Interrupt clear-pending bits constexpr Register::FieldLocation<Addr,Register::maskFromRange(31,0),Register::ReadWriteAccess,unsigned> clrpend{}; } namespace NvicNvicicpr1{ ///<Interrupt Clear Pending Register n using Addr = Register::Address<0xe000e284,0x00000000,0x00000000,unsigned>; ///Interrupt clear-pending bits constexpr Register::FieldLocation<Addr,Register::maskFromRange(31,0),Register::ReadWriteAccess,unsigned> clrpend{}; } namespace NvicNvicicpr2{ ///<Interrupt Clear Pending Register n using Addr = Register::Address<0xe000e288,0x00000000,0x00000000,unsigned>; ///Interrupt clear-pending bits constexpr Register::FieldLocation<Addr,Register::maskFromRange(31,0),Register::ReadWriteAccess,unsigned> clrpend{}; } namespace NvicNvicicpr3{ ///<Interrupt Clear Pending Register n using Addr = Register::Address<0xe000e28c,0x00000000,0x00000000,unsigned>; ///Interrupt clear-pending bits constexpr Register::FieldLocation<Addr,Register::maskFromRange(31,0),Register::ReadWriteAccess,unsigned> clrpend{}; } namespace NvicNviciabr0{ ///<Interrupt Active bit Register n using Addr = Register::Address<0xe000e300,0x00000000,0x00000000,unsigned>; ///Interrupt active flags constexpr Register::FieldLocation<Addr,Register::maskFromRange(31,0),Register::ReadWriteAccess,unsigned> active{}; } namespace NvicNviciabr1{ ///<Interrupt Active bit Register n using Addr = Register::Address<0xe000e304,0x00000000,0x00000000,unsigned>; ///Interrupt active flags constexpr Register::FieldLocation<Addr,Register::maskFromRange(31,0),Register::ReadWriteAccess,unsigned> active{}; } namespace NvicNviciabr2{ ///<Interrupt Active bit Register n using Addr = Register::Address<0xe000e308,0x00000000,0x00000000,unsigned>; ///Interrupt active flags constexpr Register::FieldLocation<Addr,Register::maskFromRange(31,0),Register::ReadWriteAccess,unsigned> active{}; } namespace NvicNviciabr3{ ///<Interrupt Active bit Register n using Addr = Register::Address<0xe000e30c,0x00000000,0x00000000,unsigned>; ///Interrupt active flags constexpr Register::FieldLocation<Addr,Register::maskFromRange(31,0),Register::ReadWriteAccess,unsigned> active{}; } namespace NvicNvicip0{ ///<Interrupt Priority Register 0 using Addr = Register::Address<0xe000e400,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_DMA0interrupt 0 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri0{}; } namespace NvicNvicip1{ ///<Interrupt Priority Register 1 using Addr = Register::Address<0xe000e401,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_DMA1interrupt 1 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri1{}; } namespace NvicNvicip2{ ///<Interrupt Priority Register 2 using Addr = Register::Address<0xe000e402,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_DMA2interrupt 2 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri2{}; } namespace NvicNvicip3{ ///<Interrupt Priority Register 3 using Addr = Register::Address<0xe000e403,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_DMA3interrupt 3 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri3{}; } namespace NvicNvicip4{ ///<Interrupt Priority Register 4 using Addr = Register::Address<0xe000e404,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_DMA4interrupt 4 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri4{}; } namespace NvicNvicip5{ ///<Interrupt Priority Register 5 using Addr = Register::Address<0xe000e405,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_DMA5interrupt 5 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri5{}; } namespace NvicNvicip6{ ///<Interrupt Priority Register 6 using Addr = Register::Address<0xe000e406,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_DMA6interrupt 6 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri6{}; } namespace NvicNvicip7{ ///<Interrupt Priority Register 7 using Addr = Register::Address<0xe000e407,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_DMA7interrupt 7 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri7{}; } namespace NvicNvicip8{ ///<Interrupt Priority Register 8 using Addr = Register::Address<0xe000e408,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_DMA8interrupt 8 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri8{}; } namespace NvicNvicip9{ ///<Interrupt Priority Register 9 using Addr = Register::Address<0xe000e409,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_DMA9interrupt 9 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri9{}; } namespace NvicNvicip10{ ///<Interrupt Priority Register 10 using Addr = Register::Address<0xe000e40a,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_DMA10interrupt 10 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri10{}; } namespace NvicNvicip11{ ///<Interrupt Priority Register 11 using Addr = Register::Address<0xe000e40b,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_DMA11interrupt 11 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri11{}; } namespace NvicNvicip12{ ///<Interrupt Priority Register 12 using Addr = Register::Address<0xe000e40c,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_DMA12interrupt 12 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri12{}; } namespace NvicNvicip13{ ///<Interrupt Priority Register 13 using Addr = Register::Address<0xe000e40d,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_DMA13interrupt 13 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri13{}; } namespace NvicNvicip14{ ///<Interrupt Priority Register 14 using Addr = Register::Address<0xe000e40e,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_DMA14interrupt 14 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri14{}; } namespace NvicNvicip15{ ///<Interrupt Priority Register 15 using Addr = Register::Address<0xe000e40f,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_DMA15interrupt 15 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri15{}; } namespace NvicNvicip16{ ///<Interrupt Priority Register 16 using Addr = Register::Address<0xe000e410,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_DMA_Errorinterrupt 16 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri16{}; } namespace NvicNvicip17{ ///<Interrupt Priority Register 17 using Addr = Register::Address<0xe000e411,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_MCMinterrupt 17 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri17{}; } namespace NvicNvicip18{ ///<Interrupt Priority Register 18 using Addr = Register::Address<0xe000e412,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_FTFLinterrupt 18 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri18{}; } namespace NvicNvicip19{ ///<Interrupt Priority Register 19 using Addr = Register::Address<0xe000e413,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_Read_Collisioninterrupt 19 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri19{}; } namespace NvicNvicip20{ ///<Interrupt Priority Register 20 using Addr = Register::Address<0xe000e414,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_LVD_LVWinterrupt 20 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri20{}; } namespace NvicNvicip21{ ///<Interrupt Priority Register 21 using Addr = Register::Address<0xe000e415,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_LLWinterrupt 21 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri21{}; } namespace NvicNvicip22{ ///<Interrupt Priority Register 22 using Addr = Register::Address<0xe000e416,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_WDOG_EWMinterrupt 22 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri22{}; } namespace NvicNvicip23{ ///<Interrupt Priority Register 23 using Addr = Register::Address<0xe000e417,0xffffff00,0x00000000,unsigned char>; ///Priority of interrupt 23 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri23{}; } namespace NvicNvicip24{ ///<Interrupt Priority Register 24 using Addr = Register::Address<0xe000e418,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_I2C0interrupt 24 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri24{}; } namespace NvicNvicip25{ ///<Interrupt Priority Register 25 using Addr = Register::Address<0xe000e419,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_I2C1interrupt 25 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri25{}; } namespace NvicNvicip26{ ///<Interrupt Priority Register 26 using Addr = Register::Address<0xe000e41a,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_SPI0interrupt 26 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri26{}; } namespace NvicNvicip27{ ///<Interrupt Priority Register 27 using Addr = Register::Address<0xe000e41b,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_SPI1interrupt 27 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri27{}; } namespace NvicNvicip28{ ///<Interrupt Priority Register 28 using Addr = Register::Address<0xe000e41c,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_SPI2interrupt 28 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri28{}; } namespace NvicNvicip29{ ///<Interrupt Priority Register 29 using Addr = Register::Address<0xe000e41d,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_CAN0_ORed_Message_bufferinterrupt 29 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri29{}; } namespace NvicNvicip30{ ///<Interrupt Priority Register 30 using Addr = Register::Address<0xe000e41e,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_CAN0_Bus_Offinterrupt 30 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri30{}; } namespace NvicNvicip31{ ///<Interrupt Priority Register 31 using Addr = Register::Address<0xe000e41f,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_CAN0_Errorinterrupt 31 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri31{}; } namespace NvicNvicip32{ ///<Interrupt Priority Register 32 using Addr = Register::Address<0xe000e420,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_CAN0_Tx_Warninginterrupt 32 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri32{}; } namespace NvicNvicip33{ ///<Interrupt Priority Register 33 using Addr = Register::Address<0xe000e421,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_CAN0_Rx_Warninginterrupt 33 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri33{}; } namespace NvicNvicip34{ ///<Interrupt Priority Register 34 using Addr = Register::Address<0xe000e422,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_CAN0_Wake_Upinterrupt 34 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri34{}; } namespace NvicNvicip35{ ///<Interrupt Priority Register 35 using Addr = Register::Address<0xe000e423,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_I2S0_Txinterrupt 35 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri35{}; } namespace NvicNvicip36{ ///<Interrupt Priority Register 36 using Addr = Register::Address<0xe000e424,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_I2S0_Rxinterrupt 36 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri36{}; } namespace NvicNvicip37{ ///<Interrupt Priority Register 37 using Addr = Register::Address<0xe000e425,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_CAN1_ORed_Message_bufferinterrupt 37 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri37{}; } namespace NvicNvicip38{ ///<Interrupt Priority Register 38 using Addr = Register::Address<0xe000e426,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_CAN1_Bus_Offinterrupt 38 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri38{}; } namespace NvicNvicip39{ ///<Interrupt Priority Register 39 using Addr = Register::Address<0xe000e427,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_CAN1_Errorinterrupt 39 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri39{}; } namespace NvicNvicip40{ ///<Interrupt Priority Register 40 using Addr = Register::Address<0xe000e428,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_CAN1_Tx_Warninginterrupt 40 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri40{}; } namespace NvicNvicip41{ ///<Interrupt Priority Register 41 using Addr = Register::Address<0xe000e429,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_CAN1_Rx_Warninginterrupt 41 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri41{}; } namespace NvicNvicip42{ ///<Interrupt Priority Register 42 using Addr = Register::Address<0xe000e42a,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_CAN1_Wake_Upinterrupt 42 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri42{}; } namespace NvicNvicip43{ ///<Interrupt Priority Register 43 using Addr = Register::Address<0xe000e42b,0xffffff00,0x00000000,unsigned char>; ///Priority of interrupt 43 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri43{}; } namespace NvicNvicip44{ ///<Interrupt Priority Register 44 using Addr = Register::Address<0xe000e42c,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_UART0_LONinterrupt 44 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri44{}; } namespace NvicNvicip45{ ///<Interrupt Priority Register 45 using Addr = Register::Address<0xe000e42d,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_UART0_RX_TXinterrupt 45 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri45{}; } namespace NvicNvicip46{ ///<Interrupt Priority Register 46 using Addr = Register::Address<0xe000e42e,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_UART0_ERRinterrupt 46 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri46{}; } namespace NvicNvicip47{ ///<Interrupt Priority Register 47 using Addr = Register::Address<0xe000e42f,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_UART1_RX_TXinterrupt 47 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri47{}; } namespace NvicNvicip48{ ///<Interrupt Priority Register 48 using Addr = Register::Address<0xe000e430,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_UART1_ERRinterrupt 48 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri48{}; } namespace NvicNvicip49{ ///<Interrupt Priority Register 49 using Addr = Register::Address<0xe000e431,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_UART2_RX_TXinterrupt 49 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri49{}; } namespace NvicNvicip50{ ///<Interrupt Priority Register 50 using Addr = Register::Address<0xe000e432,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_UART2_ERRinterrupt 50 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri50{}; } namespace NvicNvicip51{ ///<Interrupt Priority Register 51 using Addr = Register::Address<0xe000e433,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_UART3_RX_TXinterrupt 51 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri51{}; } namespace NvicNvicip52{ ///<Interrupt Priority Register 52 using Addr = Register::Address<0xe000e434,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_UART3_ERRinterrupt 52 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri52{}; } namespace NvicNvicip53{ ///<Interrupt Priority Register 53 using Addr = Register::Address<0xe000e435,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_UART4_RX_TXinterrupt 53 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri53{}; } namespace NvicNvicip54{ ///<Interrupt Priority Register 54 using Addr = Register::Address<0xe000e436,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_UART4_ERRinterrupt 54 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri54{}; } namespace NvicNvicip55{ ///<Interrupt Priority Register 55 using Addr = Register::Address<0xe000e437,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_UART5_RX_TXinterrupt 55 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri55{}; } namespace NvicNvicip56{ ///<Interrupt Priority Register 56 using Addr = Register::Address<0xe000e438,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_UART5_ERRinterrupt 56 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri56{}; } namespace NvicNvicip57{ ///<Interrupt Priority Register 57 using Addr = Register::Address<0xe000e439,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_ADC0interrupt 57 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri57{}; } namespace NvicNvicip58{ ///<Interrupt Priority Register 58 using Addr = Register::Address<0xe000e43a,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_ADC1interrupt 58 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri58{}; } namespace NvicNvicip59{ ///<Interrupt Priority Register 59 using Addr = Register::Address<0xe000e43b,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_CMP0interrupt 59 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri59{}; } namespace NvicNvicip60{ ///<Interrupt Priority Register 60 using Addr = Register::Address<0xe000e43c,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_CMP1interrupt 60 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri60{}; } namespace NvicNvicip61{ ///<Interrupt Priority Register 61 using Addr = Register::Address<0xe000e43d,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_CMP2interrupt 61 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri61{}; } namespace NvicNvicip62{ ///<Interrupt Priority Register 62 using Addr = Register::Address<0xe000e43e,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_FTM0interrupt 62 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri62{}; } namespace NvicNvicip63{ ///<Interrupt Priority Register 63 using Addr = Register::Address<0xe000e43f,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_FTM1interrupt 63 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri63{}; } namespace NvicNvicip64{ ///<Interrupt Priority Register 64 using Addr = Register::Address<0xe000e440,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_FTM2interrupt 64 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri64{}; } namespace NvicNvicip65{ ///<Interrupt Priority Register 65 using Addr = Register::Address<0xe000e441,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_CMTinterrupt 65 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri65{}; } namespace NvicNvicip66{ ///<Interrupt Priority Register 66 using Addr = Register::Address<0xe000e442,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_RTCinterrupt 66 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri66{}; } namespace NvicNvicip67{ ///<Interrupt Priority Register 67 using Addr = Register::Address<0xe000e443,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_RTC_Secondsinterrupt 67 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri67{}; } namespace NvicNvicip68{ ///<Interrupt Priority Register 68 using Addr = Register::Address<0xe000e444,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_PIT0interrupt 68 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri68{}; } namespace NvicNvicip69{ ///<Interrupt Priority Register 69 using Addr = Register::Address<0xe000e445,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_PIT1interrupt 69 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri69{}; } namespace NvicNvicip70{ ///<Interrupt Priority Register 70 using Addr = Register::Address<0xe000e446,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_PIT2interrupt 70 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri70{}; } namespace NvicNvicip71{ ///<Interrupt Priority Register 71 using Addr = Register::Address<0xe000e447,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_PIT3interrupt 71 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri71{}; } namespace NvicNvicip72{ ///<Interrupt Priority Register 72 using Addr = Register::Address<0xe000e448,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_PDB0interrupt 72 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri72{}; } namespace NvicNvicip73{ ///<Interrupt Priority Register 73 using Addr = Register::Address<0xe000e449,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_USB0interrupt 73 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri73{}; } namespace NvicNvicip74{ ///<Interrupt Priority Register 74 using Addr = Register::Address<0xe000e44a,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_USBDCDinterrupt 74 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri74{}; } namespace NvicNvicip75{ ///<Interrupt Priority Register 75 using Addr = Register::Address<0xe000e44b,0xffffff00,0x00000000,unsigned char>; ///Priority of interrupt 75 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri75{}; } namespace NvicNvicip76{ ///<Interrupt Priority Register 76 using Addr = Register::Address<0xe000e44c,0xffffff00,0x00000000,unsigned char>; ///Priority of interrupt 76 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri76{}; } namespace NvicNvicip77{ ///<Interrupt Priority Register 77 using Addr = Register::Address<0xe000e44d,0xffffff00,0x00000000,unsigned char>; ///Priority of interrupt 77 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri77{}; } namespace NvicNvicip78{ ///<Interrupt Priority Register 78 using Addr = Register::Address<0xe000e44e,0xffffff00,0x00000000,unsigned char>; ///Priority of interrupt 78 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri78{}; } namespace NvicNvicip79{ ///<Interrupt Priority Register 79 using Addr = Register::Address<0xe000e44f,0xffffff00,0x00000000,unsigned char>; ///Priority of interrupt 79 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri79{}; } namespace NvicNvicip80{ ///<Interrupt Priority Register 80 using Addr = Register::Address<0xe000e450,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_SDHCinterrupt 80 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri80{}; } namespace NvicNvicip81{ ///<Interrupt Priority Register 81 using Addr = Register::Address<0xe000e451,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_DAC0interrupt 81 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri81{}; } namespace NvicNvicip82{ ///<Interrupt Priority Register 82 using Addr = Register::Address<0xe000e452,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_DAC1interrupt 82 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri82{}; } namespace NvicNvicip83{ ///<Interrupt Priority Register 83 using Addr = Register::Address<0xe000e453,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_TSI0interrupt 83 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri83{}; } namespace NvicNvicip84{ ///<Interrupt Priority Register 84 using Addr = Register::Address<0xe000e454,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_MCGinterrupt 84 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri84{}; } namespace NvicNvicip85{ ///<Interrupt Priority Register 85 using Addr = Register::Address<0xe000e455,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_LPTMR0interrupt 85 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri85{}; } namespace NvicNvicip86{ ///<Interrupt Priority Register 86 using Addr = Register::Address<0xe000e456,0xffffff00,0x00000000,unsigned char>; ///Priority of interrupt 86 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri86{}; } namespace NvicNvicip87{ ///<Interrupt Priority Register 87 using Addr = Register::Address<0xe000e457,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_PORTAinterrupt 87 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri87{}; } namespace NvicNvicip88{ ///<Interrupt Priority Register 88 using Addr = Register::Address<0xe000e458,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_PORTBinterrupt 88 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri88{}; } namespace NvicNvicip89{ ///<Interrupt Priority Register 89 using Addr = Register::Address<0xe000e459,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_PORTCinterrupt 89 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri89{}; } namespace NvicNvicip90{ ///<Interrupt Priority Register 90 using Addr = Register::Address<0xe000e45a,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_PORTDinterrupt 90 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri90{}; } namespace NvicNvicip91{ ///<Interrupt Priority Register 91 using Addr = Register::Address<0xe000e45b,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_PORTEinterrupt 91 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri91{}; } namespace NvicNvicip92{ ///<Interrupt Priority Register 92 using Addr = Register::Address<0xe000e45c,0xffffff00,0x00000000,unsigned char>; ///Priority of interrupt 92 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri92{}; } namespace NvicNvicip93{ ///<Interrupt Priority Register 93 using Addr = Register::Address<0xe000e45d,0xffffff00,0x00000000,unsigned char>; ///Priority of interrupt 93 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri93{}; } namespace NvicNvicip94{ ///<Interrupt Priority Register 94 using Addr = Register::Address<0xe000e45e,0xffffff00,0x00000000,unsigned char>; ///Priority of the INT_SWIinterrupt 94 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri94{}; } namespace NvicNvicip95{ ///<Interrupt Priority Register 95 using Addr = Register::Address<0xe000e45f,0xffffff00,0x00000000,unsigned char>; ///Priority of interrupt 95 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri95{}; } namespace NvicNvicip96{ ///<Interrupt Priority Register 96 using Addr = Register::Address<0xe000e460,0xffffff00,0x00000000,unsigned char>; ///Priority of interrupt 96 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri96{}; } namespace NvicNvicip97{ ///<Interrupt Priority Register 97 using Addr = Register::Address<0xe000e461,0xffffff00,0x00000000,unsigned char>; ///Priority of interrupt 97 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri97{}; } namespace NvicNvicip98{ ///<Interrupt Priority Register 98 using Addr = Register::Address<0xe000e462,0xffffff00,0x00000000,unsigned char>; ///Priority of interrupt 98 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri98{}; } namespace NvicNvicip99{ ///<Interrupt Priority Register 99 using Addr = Register::Address<0xe000e463,0xffffff00,0x00000000,unsigned char>; ///Priority of interrupt 99 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri99{}; } namespace NvicNvicip100{ ///<Interrupt Priority Register 100 using Addr = Register::Address<0xe000e464,0xffffff00,0x00000000,unsigned char>; ///Priority of interrupt 100 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri100{}; } namespace NvicNvicip101{ ///<Interrupt Priority Register 101 using Addr = Register::Address<0xe000e465,0xffffff00,0x00000000,unsigned char>; ///Priority of interrupt 101 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri101{}; } namespace NvicNvicip102{ ///<Interrupt Priority Register 102 using Addr = Register::Address<0xe000e466,0xffffff00,0x00000000,unsigned char>; ///Priority of interrupt 102 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri102{}; } namespace NvicNvicip103{ ///<Interrupt Priority Register 103 using Addr = Register::Address<0xe000e467,0xffffff00,0x00000000,unsigned char>; ///Priority of interrupt 103 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri103{}; } namespace NvicNvicip104{ ///<Interrupt Priority Register 104 using Addr = Register::Address<0xe000e468,0xffffff00,0x00000000,unsigned char>; ///Priority of interrupt 104 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri104{}; } namespace NvicNvicip105{ ///<Interrupt Priority Register 105 using Addr = Register::Address<0xe000e469,0xffffff00,0x00000000,unsigned char>; ///Priority of interrupt 105 constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,0),Register::ReadWriteAccess,unsigned> pri105{}; } namespace NvicNvicstir{ ///<Software Trigger Interrupt Register using Addr = Register::Address<0xe000ef00,0xfffffe00,0x00000000,unsigned>; ///Interrupt ID of the interrupt to trigger, in the range 0-239. For example, a value of 0x03 specifies interrupt IRQ3. constexpr Register::FieldLocation<Addr,Register::maskFromRange(8,0),Register::ReadWriteAccess,unsigned> intid{}; } }
apache-2.0
adessaigne/camel
catalog/camel-route-parser/src/main/java/org/apache/camel/parser/helper/CamelJavaTreeParserHelper.java
24016
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.parser.helper; import java.io.BufferedReader; import java.io.File; import java.io.FileReader; import java.util.ArrayList; import java.util.List; import org.apache.camel.catalog.CamelCatalog; import org.apache.camel.catalog.DefaultCamelCatalog; import org.apache.camel.parser.model.CamelNodeDetails; import org.apache.camel.parser.model.CamelNodeDetailsFactory; import org.apache.camel.parser.roaster.StatementFieldSource; import org.apache.camel.tooling.model.JsonMapper; import org.jboss.forge.roaster._shade.org.eclipse.jdt.core.dom.ASTNode; import org.jboss.forge.roaster._shade.org.eclipse.jdt.core.dom.AnonymousClassDeclaration; import org.jboss.forge.roaster._shade.org.eclipse.jdt.core.dom.Block; import org.jboss.forge.roaster._shade.org.eclipse.jdt.core.dom.BooleanLiteral; import org.jboss.forge.roaster._shade.org.eclipse.jdt.core.dom.Expression; import org.jboss.forge.roaster._shade.org.eclipse.jdt.core.dom.ExpressionStatement; import org.jboss.forge.roaster._shade.org.eclipse.jdt.core.dom.FieldDeclaration; import org.jboss.forge.roaster._shade.org.eclipse.jdt.core.dom.InfixExpression; import org.jboss.forge.roaster._shade.org.eclipse.jdt.core.dom.MemberValuePair; import org.jboss.forge.roaster._shade.org.eclipse.jdt.core.dom.MethodDeclaration; import org.jboss.forge.roaster._shade.org.eclipse.jdt.core.dom.MethodInvocation; import org.jboss.forge.roaster._shade.org.eclipse.jdt.core.dom.NormalAnnotation; import org.jboss.forge.roaster._shade.org.eclipse.jdt.core.dom.NumberLiteral; import org.jboss.forge.roaster._shade.org.eclipse.jdt.core.dom.ParenthesizedExpression; import org.jboss.forge.roaster._shade.org.eclipse.jdt.core.dom.QualifiedName; import org.jboss.forge.roaster._shade.org.eclipse.jdt.core.dom.SimpleName; import org.jboss.forge.roaster._shade.org.eclipse.jdt.core.dom.SingleMemberAnnotation; import org.jboss.forge.roaster._shade.org.eclipse.jdt.core.dom.StringLiteral; import org.jboss.forge.roaster._shade.org.eclipse.jdt.core.dom.Type; import org.jboss.forge.roaster._shade.org.eclipse.jdt.core.dom.VariableDeclarationFragment; import org.jboss.forge.roaster._shade.org.eclipse.jdt.core.dom.VariableDeclarationStatement; import org.jboss.forge.roaster.model.Annotation; import org.jboss.forge.roaster.model.source.FieldSource; import org.jboss.forge.roaster.model.source.JavaClassSource; import org.jboss.forge.roaster.model.source.MethodSource; /** * A Camel Java tree parser that only depends on the Roaster API. * <p/> * This implement is used for parsing the Camel routes and build a tree structure of the EIP nodes. * * @see CamelJavaParserHelper for parser that can discover endpoints and simple expressions */ public final class CamelJavaTreeParserHelper { private final CamelCatalog camelCatalog = new DefaultCamelCatalog(true); public List<CamelNodeDetails> parseCamelRouteTree( JavaClassSource clazz, String baseDir, String fullyQualifiedFileName, MethodSource<JavaClassSource> configureMethod) { // find any from which is the start of the route CamelNodeDetailsFactory nodeFactory = CamelNodeDetailsFactory.newInstance(); CamelNodeDetails route = nodeFactory.newNode(null, "route"); if (configureMethod != null) { MethodDeclaration md = (MethodDeclaration) configureMethod.getInternal(); Block block = md.getBody(); if (block != null) { for (Object statement : md.getBody().statements()) { // must be a method call expression if (statement instanceof ExpressionStatement) { ExpressionStatement es = (ExpressionStatement) statement; Expression exp = es.getExpression(); boolean valid = isFromCamelRoute(exp); if (valid) { parseExpression(nodeFactory, fullyQualifiedFileName, clazz, configureMethod, block, exp, route); } } } } } List<CamelNodeDetails> answer = new ArrayList<>(); if (route.getOutputs() == null || route.getOutputs().isEmpty()) { // okay no routes found return answer; } // now parse the route node and build the correct model/tree structure of the EIPs // re-create factory as we rebuild the tree nodeFactory = CamelNodeDetailsFactory.newInstance(); CamelNodeDetails parent = route.getOutputs().get(0); for (int i = 0; i < route.getOutputs().size(); i++) { CamelNodeDetails node = route.getOutputs().get(i); String name = node.getName(); if ("from".equals(name)) { CamelNodeDetails from = nodeFactory.copyNode(null, "from", node); from.setFileName(fullyQualifiedFileName); answer.add(from); parent = from; } else if ("routeId".equals(name)) { // should be set on the parent parent.setRouteId(node.getRouteId()); } else if ("end".equals(name) || "endParent".equals(name) || "endRest".equals(name) || "endDoTry".equals(name) || "endHystrix".equals(name)) { // parent should be grand parent if (parent.getParent() != null) { parent = parent.getParent(); } } else if ("endChoice".equals(name)) { // we are in a choice block so parent should be the first choice up the parent tree while (!"from".equals(parent.getName()) && !"choice".equals(parent.getName())) { if (parent.getParent() != null) { parent = parent.getParent(); } else { break; } } } else if ("choice".equals(name)) { // special for some EIPs CamelNodeDetails output = nodeFactory.copyNode(parent, name, node); parent.addOutput(output); parent = output; } else if ("when".equals(name) || "otherwise".equals(name)) { // we are in a choice block so parent should be the first choice up the parent tree while (!"from".equals(parent.getName()) && !"choice".equals(parent.getName())) { if (parent.getParent() != null) { parent = parent.getParent(); } else { break; } } } else { boolean hasOutput = hasOutput(name); if (hasOutput) { // has output so add as new child node CamelNodeDetails output = nodeFactory.copyNode(parent, name, node); parent.addOutput(output); parent = output; } else { // add straight to itself CamelNodeDetails output = nodeFactory.copyNode(parent, name, node); parent.addOutput(output); } } } return answer; } private boolean isFromCamelRoute(Expression exp) { String rootMethodName = null; // find out if this is from a Camel route (eg from, route etc.) Expression sub = exp; while (sub instanceof MethodInvocation) { sub = ((MethodInvocation) sub).getExpression(); if (sub instanceof MethodInvocation) { Expression parent = ((MethodInvocation) sub).getExpression(); if (parent == null) { break; } } } if (sub instanceof MethodInvocation) { rootMethodName = ((MethodInvocation) sub).getName().getIdentifier(); } else if (sub instanceof SimpleName) { rootMethodName = ((SimpleName) sub).getIdentifier(); } // a route starts either via from or route return "from".equals(rootMethodName) || "route".equals(rootMethodName); } private boolean hasOutput(String name) { String json = camelCatalog.modelJSonSchema(name); return JsonMapper.generateEipModel(json).isOutput(); } private boolean hasInput(String name) { String json = camelCatalog.modelJSonSchema(name); return JsonMapper.generateEipModel(json).isInput(); } private static CamelNodeDetails grandParent(CamelNodeDetails node, String parentName) { if (node == null) { return null; } if (parentName.equals(node.getName())) { return node; } else { return grandParent(node.getParent(), parentName); } } private void parseExpression( CamelNodeDetailsFactory nodeFactory, String fullyQualifiedFileName, JavaClassSource clazz, MethodSource<JavaClassSource> configureMethod, Block block, Expression exp, CamelNodeDetails node) { if (exp == null) { return; } if (exp instanceof MethodInvocation) { MethodInvocation mi = (MethodInvocation) exp; node = doParseCamelModels(nodeFactory, fullyQualifiedFileName, clazz, configureMethod, block, mi, node); // if the method was called on another method, then recursive exp = mi.getExpression(); parseExpression(nodeFactory, fullyQualifiedFileName, clazz, configureMethod, block, exp, node); } } private CamelNodeDetails doParseCamelModels( CamelNodeDetailsFactory nodeFactory, String fullyQualifiedFileName, JavaClassSource clazz, MethodSource<JavaClassSource> configureMethod, Block block, MethodInvocation mi, CamelNodeDetails node) { String name = mi.getName().getIdentifier(); // special for Java DSL having some endXXX boolean isEnd = "end".equals(name) || "endChoice".equals(name) || "endDoTry".equals(name) || "endHystrix".equals(name) || "endParent".equals(name) || "endRest".equals(name); boolean isRoute = "route".equals(name) || "from".equals(name) || "routeId".equals(name); // must be an eip model that has either input or output as we only want to track processors (also accept from) boolean isEip = camelCatalog.findModelNames().contains(name) && (hasInput(name) || hasOutput(name)); // only include if its a known Camel model (dont include languages) if (isEnd || isRoute || isEip) { CamelNodeDetails newNode = nodeFactory.newNode(node, name); // include source code details int pos = mi.getName().getStartPosition(); int line = findLineNumber(fullyQualifiedFileName, pos); if (line > -1) { newNode.setLineNumber("" + line); } pos = mi.getName().getStartPosition() + mi.getName().getLength(); line = findLineNumber(fullyQualifiedFileName, pos); if (line > -1) { newNode.setLineNumberEnd("" + line); } newNode.setFileName(fullyQualifiedFileName); newNode.setClassName(clazz.getQualifiedName()); newNode.setMethodName(configureMethod.getName()); if ("routeId".equals(name)) { // grab the route id List args = mi.arguments(); if (args != null && args.size() > 0) { // the first argument has the route id Expression exp = (Expression) args.get(0); String routeId = getLiteralValue(clazz, block, exp); if (routeId != null) { newNode.setRouteId(routeId); } } } node.addPreliminaryOutput(newNode); return node; } return node; } @SuppressWarnings("unchecked") private static FieldSource<JavaClassSource> getField(JavaClassSource clazz, Block block, SimpleName ref) { String fieldName = ref.getIdentifier(); if (fieldName != null) { // find field in class FieldSource field = clazz != null ? clazz.getField(fieldName) : null; if (field == null) { field = findFieldInBlock(clazz, block, fieldName); } return field; } return null; } @SuppressWarnings("unchecked") private static FieldSource<JavaClassSource> findFieldInBlock(JavaClassSource clazz, Block block, String fieldName) { for (Object statement : block.statements()) { // try local statements first in the block if (statement instanceof VariableDeclarationStatement) { final Type type = ((VariableDeclarationStatement) statement).getType(); for (Object obj : ((VariableDeclarationStatement) statement).fragments()) { if (obj instanceof VariableDeclarationFragment) { VariableDeclarationFragment fragment = (VariableDeclarationFragment) obj; SimpleName name = fragment.getName(); if (name != null && fieldName.equals(name.getIdentifier())) { return new StatementFieldSource(clazz, fragment, type); } } } } // okay the field may be burried inside an anonymous inner class as a field declaration // outside the configure method, so lets go back to the parent and see what we can find ASTNode node = block.getParent(); if (node instanceof MethodDeclaration) { node = node.getParent(); } if (node instanceof AnonymousClassDeclaration) { List declarations = ((AnonymousClassDeclaration) node).bodyDeclarations(); for (Object dec : declarations) { if (dec instanceof FieldDeclaration) { FieldDeclaration fd = (FieldDeclaration) dec; final Type type = fd.getType(); for (Object obj : fd.fragments()) { if (obj instanceof VariableDeclarationFragment) { VariableDeclarationFragment fragment = (VariableDeclarationFragment) obj; SimpleName name = fragment.getName(); if (name != null && fieldName.equals(name.getIdentifier())) { return new StatementFieldSource(clazz, fragment, type); } } } } } } } return null; } /** * @deprecated currently not in use */ @Deprecated public static String getLiteralValue(JavaClassSource clazz, Block block, Expression expression) { // unwrap parenthesis if (expression instanceof ParenthesizedExpression) { expression = ((ParenthesizedExpression) expression).getExpression(); } if (expression instanceof StringLiteral) { return ((StringLiteral) expression).getLiteralValue(); } else if (expression instanceof BooleanLiteral) { return "" + ((BooleanLiteral) expression).booleanValue(); } else if (expression instanceof NumberLiteral) { return ((NumberLiteral) expression).getToken(); } // if it a method invocation then add a dummy value assuming the method invocation will return a valid response if (expression instanceof MethodInvocation) { String name = ((MethodInvocation) expression).getName().getIdentifier(); return "{{" + name + "}}"; } // if its a qualified name (usually a constant field in another class) // then add a dummy value as we cannot find the field value in other classes and maybe even outside the // source code we have access to if (expression instanceof QualifiedName) { QualifiedName qn = (QualifiedName) expression; String name = qn.getFullyQualifiedName(); return "{{" + name + "}}"; } if (expression instanceof SimpleName) { FieldSource<JavaClassSource> field = getField(clazz, block, (SimpleName) expression); if (field != null) { // is the field annotated with a Camel endpoint if (field.getAnnotations() != null) { for (Annotation ann : field.getAnnotations()) { boolean valid = "org.apache.camel.EndpointInject".equals(ann.getQualifiedName()) || "org.apache.camel.cdi.Uri".equals(ann.getQualifiedName()); if (valid) { Expression exp = (Expression) ann.getInternal(); if (exp instanceof SingleMemberAnnotation) { exp = ((SingleMemberAnnotation) exp).getValue(); } else if (exp instanceof NormalAnnotation) { List values = ((NormalAnnotation) exp).values(); for (Object value : values) { MemberValuePair pair = (MemberValuePair) value; if ("uri".equals(pair.getName().toString())) { exp = pair.getValue(); break; } } } if (exp != null) { return getLiteralValue(clazz, block, exp); } } } } // is the field an org.apache.camel.Endpoint type? if ("Endpoint".equals(field.getType().getSimpleName())) { // then grab the uri from the first argument VariableDeclarationFragment vdf = (VariableDeclarationFragment) field.getInternal(); expression = vdf.getInitializer(); if (expression instanceof MethodInvocation) { MethodInvocation mi = (MethodInvocation) expression; List args = mi.arguments(); if (args != null && args.size() > 0) { // the first argument has the endpoint uri expression = (Expression) args.get(0); return getLiteralValue(clazz, block, expression); } } } else { // no annotations so try its initializer VariableDeclarationFragment vdf = (VariableDeclarationFragment) field.getInternal(); expression = vdf.getInitializer(); if (expression == null) { // its a field which has no initializer, then add a dummy value assuming the field will be initialized at runtime return "{{" + field.getName() + "}}"; } else { return getLiteralValue(clazz, block, expression); } } } else { // we could not find the field in this class/method, so its maybe from some other super class, so insert a dummy value final String fieldName = ((SimpleName) expression).getIdentifier(); return "{{" + fieldName + "}}"; } } else if (expression instanceof InfixExpression) { String answer = null; // is it a string that is concat together? InfixExpression ie = (InfixExpression) expression; if (InfixExpression.Operator.PLUS.equals(ie.getOperator())) { String val1 = getLiteralValue(clazz, block, ie.getLeftOperand()); String val2 = getLiteralValue(clazz, block, ie.getRightOperand()); // if numeric then we plus the values, otherwise we string concat boolean numeric = isNumericOperator(clazz, block, ie.getLeftOperand()) && isNumericOperator(clazz, block, ie.getRightOperand()); if (numeric) { long num1 = val1 != null ? Long.parseLong(val1) : 0; long num2 = val2 != null ? Long.parseLong(val2) : 0; answer = Long.toString(num1 + num2); } else { answer = (val1 != null ? val1 : "") + (val2 != null ? val2 : ""); } if (!answer.isEmpty()) { // include extended when we concat on 2 or more lines List extended = ie.extendedOperands(); if (extended != null) { for (Object ext : extended) { String val3 = getLiteralValue(clazz, block, (Expression) ext); if (numeric) { long num3 = val3 != null ? Long.parseLong(val3) : 0; long num = Long.parseLong(answer); answer = Long.toString(num + num3); } else { answer += val3 != null ? val3 : ""; } } } } } return answer; } return null; } private static boolean isNumericOperator(JavaClassSource clazz, Block block, Expression expression) { if (expression instanceof NumberLiteral) { return true; } else if (expression instanceof SimpleName) { FieldSource field = getField(clazz, block, (SimpleName) expression); if (field != null) { return field.getType().isType("int") || field.getType().isType("long") || field.getType().isType("Integer") || field.getType().isType("Long"); } } return false; } private static int findLineNumber(String fullyQualifiedFileName, int position) { int lines = 0; try { int current = 0; try (BufferedReader br = new BufferedReader(new FileReader(new File(fullyQualifiedFileName)))) { String line; while ((line = br.readLine()) != null) { lines++; current += line.length() + 1; // add 1 for line feed if (current >= position) { return lines; } } } } catch (Exception e) { // ignore return -1; } return lines; } }
apache-2.0
youdonghai/intellij-community
plugins/javaFX/common-javaFX-plugin/src/org/jetbrains/plugins/javaFX/packaging/AbstractJavaFxPackager.java
12252
/* * Copyright 2000-2013 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.plugins.javaFX.packaging; import com.intellij.execution.CommandLineUtil; import com.intellij.openapi.application.PathManager; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.CharsetToolkit; import com.intellij.util.ArrayUtilRt; import com.intellij.util.Base64; import com.intellij.util.PathUtilRt; import com.intellij.util.io.ZipUtil; import java.io.File; import java.io.IOException; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.List; /** * User: anna * Date: 3/12/13 */ public abstract class AbstractJavaFxPackager { private static final Logger LOG = Logger.getInstance("#" + AbstractJavaFxPackager.class.getName()); private static final String JB_JFX_JKS = "jb-jfx.jks"; private static final String NATIVE_BUNDLES = "bundles"; //artifact description protected String getArtifactRootName() { return PathUtilRt.getFileName(getArtifactOutputFilePath()); } protected abstract String getArtifactName(); protected abstract String getArtifactOutputPath(); protected abstract String getArtifactOutputFilePath(); //artifact properties protected abstract String getAppClass(); protected abstract String getTitle(); protected abstract String getVendor(); protected abstract String getDescription(); protected abstract String getVersion(); protected abstract String getWidth(); protected abstract String getHeight(); protected abstract String getHtmlTemplateFile(); protected abstract String getHtmlPlaceholderId(); protected abstract String getHtmlParamFile(); protected abstract String getParamFile(); protected abstract String getUpdateMode(); protected abstract JavaFxPackagerConstants.NativeBundles getNativeBundle(); protected abstract void registerJavaFxPackagerError(final String message); protected abstract JavaFxApplicationIcons getIcons(); public void buildJavaFxArtifact(final String homePath) { if (!checkNotEmpty(getAppClass(), "Application class")) return; if (!checkNotEmpty(getWidth(), "Width")) return; if (!checkNotEmpty(getHeight(), "Height")) return; final String zipPath = getArtifactOutputFilePath(); final File tempUnzippedArtifactOutput; try { tempUnzippedArtifactOutput = FileUtil.createTempDirectory("artifact", "unzipped"); final File artifactOutputFile = new File(zipPath); ZipUtil.extract(artifactOutputFile, tempUnzippedArtifactOutput, null); copyLibraries(FileUtil.getNameWithoutExtension(artifactOutputFile), tempUnzippedArtifactOutput); } catch (IOException e) { registerJavaFxPackagerError(e); return; } final File tempDirectory = new File(tempUnzippedArtifactOutput, "deploy"); try { final StringBuilder buf = new StringBuilder(); buf.append("<project default=\"build artifact\">\n"); buf.append("<taskdef resource=\"com/sun/javafx/tools/ant/antlib.xml\" uri=\"javafx:com.sun.javafx.tools.ant\" ") .append("classpath=\"").append(homePath).append("/lib/ant-javafx.jar\"/>\n"); buf.append("<target name=\"build artifact\" xmlns:fx=\"javafx:com.sun.javafx.tools.ant\">"); final String artifactFileName = getArtifactRootName(); final List<JavaFxAntGenerator.SimpleTag> tags = JavaFxAntGenerator.createJarAndDeployTasks(this, artifactFileName, getArtifactName(), tempUnzippedArtifactOutput.getPath(), tempDirectory.getPath(), null); for (JavaFxAntGenerator.SimpleTag tag : tags) { tag.generate(buf); } buf.append("</target>"); buf.append("</project>"); final int result = startAntTarget(buf.toString(), homePath); if (result == 0) { if (isEnabledSigning()) { signApp(homePath + File.separator + "bin", tempDirectory); } } else { registerJavaFxPackagerError("fx:deploy task has failed."); } } finally { copyResultsToArtifactsOutput(tempDirectory); FileUtil.delete(tempUnzippedArtifactOutput); } } private void copyLibraries(String zipPath, File tempUnzippedArtifactOutput) throws IOException { final File[] outFiles = new File(getArtifactOutputPath()).listFiles(); if (outFiles != null) { final String[] generatedItems = new String[]{JB_JFX_JKS, zipPath + ".jar", zipPath + ".jnlp", zipPath + ".html", NATIVE_BUNDLES}; for (File file : outFiles) { final String fileName = file.getName(); if (ArrayUtilRt.find(generatedItems, fileName) < 0) { final File destination = new File(tempUnzippedArtifactOutput, fileName); FileUtil.copyFileOrDir(file, destination); } } } } private boolean checkNotEmpty(final String text, final String title) { if (StringUtil.isEmptyOrSpaces(text)) { registerJavaFxPackagerError("Unable to build JavaFX artifact. " + title + " should be specified in artifact's settings."); return false; } return true; } private void signApp(String binPath, File tempDirectory) { final boolean selfSigning = isSelfSigning(); final int genResult = selfSigning ? genKey(binPath) : 0; if (genResult == 0) { final File[] files = tempDirectory.listFiles(); if (files != null) { for (File file : files) { if (file.isFile() && file.getName().endsWith(".jar")) { sign(binPath, selfSigning, file.getPath()); } } } } else { registerJavaFxPackagerError("JavaFX generate certificate task has failed."); } } private void sign(String binPath, boolean selfSigning, final String jar2Sign) { final List<String> signCommandLine = new ArrayList<String>(); addParameter(signCommandLine, FileUtil.toSystemDependentName(binPath + File.separator + "jarsigner")); collectStoreParams(selfSigning, signCommandLine); addParameter(signCommandLine, jar2Sign); addParameter(signCommandLine, getAlias(selfSigning)); final int signedResult = startProcess(signCommandLine); if (signedResult != 0) { registerJavaFxPackagerError("JavaFX sign task has failed for: " + jar2Sign + "."); } } private int genKey(String binPath) { final String keyStorePath = getKeystore(true); final File keyStoreFile = new File(keyStorePath); if (keyStoreFile.isFile()) { FileUtil.delete(keyStoreFile); } final List<String> genCommandLine = new ArrayList<String>(); addParameter(genCommandLine, FileUtil.toSystemDependentName(binPath + File.separator + "keytool")); addParameter(genCommandLine, "-genkeypair"); addParameter(genCommandLine, "-dname"); String vendor = getVendor(); if (StringUtil.isEmptyOrSpaces(vendor)) { vendor = "jb-fx-build"; } addParameter(genCommandLine, "CN=" + vendor.replaceAll(",", "\\\\,")); addParameter(genCommandLine, "-alias"); addParameter(genCommandLine, getAlias(true)); collectStoreParams(true, genCommandLine); return startProcess(genCommandLine); } private void collectStoreParams(boolean selfSigning, List<String> signCommandLine) { addParameter(signCommandLine, "-keyStore"); addParameter(signCommandLine, getKeystore(selfSigning)); addParameter(signCommandLine, "-storepass"); addParameter(signCommandLine, getStorepass(selfSigning)); addParameter(signCommandLine, "-keypass"); addParameter(signCommandLine, getKeypass(selfSigning)); } private void copyResultsToArtifactsOutput(final File tempDirectory) { try { final File resultedJar = new File(getArtifactOutputPath()); FileUtil.copyDir(tempDirectory, resultedJar); } catch (IOException e) { LOG.info(e); } FileUtil.delete(tempDirectory); } private void registerJavaFxPackagerError(Exception ex) { registerJavaFxPackagerError(ex.getMessage()); } private static void addParameter(List<String> commandLine, String param) { if (!StringUtil.isEmptyOrSpaces(param)) { commandLine.add(param); } } private int startProcess(List<String> commands) { try { final Process process = new ProcessBuilder(CommandLineUtil.toCommandLine(commands)).start(); final String message = new String(FileUtil.loadBytes(process.getErrorStream())); if (!StringUtil.isEmptyOrSpaces(message)) { registerJavaFxPackagerError(message); } final int result = process.waitFor(); if (result != 0) { final String explanationMessage = new String(FileUtil.loadBytes(process.getInputStream())); if (!StringUtil.isEmptyOrSpaces(explanationMessage)) { registerJavaFxPackagerError(explanationMessage); } } return result; } catch (Exception e) { registerJavaFxPackagerError(e); return -1; } } private int startAntTarget(String buildText, String javaHome) { final String antHome = getAntHome(); if (antHome == null) { registerJavaFxPackagerError("Bundled ant not found."); return -1; } final ArrayList<String> commands = new ArrayList<String>(); commands.add(javaHome + File.separator + "bin" + File.separator + "java"); commands.add("-Dant.home=" + antHome); commands.add("-classpath"); commands.add(antHome + "/lib/ant.jar" + File.pathSeparator + antHome + "/lib/ant-launcher.jar" + File.pathSeparator + javaHome + "/lib/ant-javafx.jar" + File.pathSeparator + javaHome + "/jre/lib/jfxrt.jar"); commands.add("org.apache.tools.ant.launch.Launcher"); commands.add("-f"); try { File tempFile = FileUtil.createTempFile("build", ".xml"); tempFile.deleteOnExit(); FileUtil.writeToFile(tempFile, buildText.getBytes(Charset.defaultCharset())); commands.add(tempFile.getCanonicalPath()); } catch (IOException e) { registerJavaFxPackagerError(e); return -1; } return startProcess(commands); } private static String getAntHome() { final String appHome = PathManager.getHomePath(); if (appHome == null) { return null; } File antHome = new File(appHome, "lib" + File.separator + "ant"); if (!antHome.exists()) { File communityAntHome = new File(appHome, "community" + File.separator + "lib" + File.separator + "ant"); if (communityAntHome.exists()) { antHome = communityAntHome; } } if (!antHome.exists()) { return null; } return antHome.getPath(); } private String getAlias(boolean selfSigning) { return selfSigning ? "jb" : getAlias(); } private String getKeypass(boolean selfSigning) { return selfSigning ? "keypass" : new String(Base64.decode(getKeypass()), CharsetToolkit.UTF8_CHARSET); } private String getKeystore(boolean selfSigning) { return selfSigning ? getArtifactOutputPath() + File.separator + JB_JFX_JKS : getKeystore(); } private String getStorepass(boolean selfSigning) { return selfSigning ? "storepass" : new String(Base64.decode(getStorepass()), CharsetToolkit.UTF8_CHARSET); } public abstract String getKeypass(); public abstract String getStorepass(); public abstract String getKeystore(); public abstract String getAlias(); public abstract boolean isSelfSigning(); public abstract boolean isEnabledSigning(); public abstract String getPreloaderClass(); public abstract String getPreloaderJar(); public abstract boolean convertCss2Bin(); public abstract List<JavaFxManifestAttribute> getCustomManifestAttributes(); }
apache-2.0
nkreipke/rethinkdb-net
rethinkdb-net-test/Integration/HasFieldsTests.cs
4275
using System.Linq; using NUnit.Framework; using System.Collections.Generic; namespace RethinkDb.Test.Integration { [TestFixture] public class HasFieldsTests : TestBase { private ITableQuery<TestObject> testTable; public override void TestFixtureSetUp() { base.TestFixtureSetUp(); connection.RunAsync(Query.DbCreate("test")).Wait(); connection.RunAsync(Query.Db("test").TableCreate("table")).Wait(); testTable = Query.Db("test").Table<TestObject>("table"); connection.Run(testTable.IndexCreate("index1", o => o.Name)); connection.Run(testTable.IndexWait("index1")).ToArray(); // ToArray ensures that the IEnumerable is actually evaluated completely and the wait is completed } [SetUp] public virtual void SetUp() { connection.RunAsync(testTable.Insert(new TestObject[] { new TestObject() { Id = "1", Name = null, Children = new TestObject[0], ChildrenList = new List<TestObject>(), ChildrenIList = new List<TestObject>() }, new TestObject() { Id = "2", Name = "2", Children = new TestObject[0], ChildrenList = new List<TestObject>(), ChildrenIList = new List<TestObject>() }, new TestObject() { Id = "3", Name = null, Children = null }, new TestObject() { Id = "4", Name = string.Empty, Children = null } })).Wait(); } [TearDown] public virtual void TearDown() { connection.RunAsync(testTable.Delete()).Wait(); } [Test] public void HasFields_OnSequence_ReturnsResultsWithNonNullFieldValues() { TestObject[] hasFields = connection.Run(testTable.HasFields(m => m.Name)).ToArray(); Assert.That(hasFields.Length, Is.EqualTo(2)); Assert.That(hasFields, Has.Exactly(1).EqualTo(new TestObject { Id = "2" })); Assert.That(hasFields, Has.Exactly(1).EqualTo(new TestObject { Id = "4" })); } [Test] public void HasFields_OnSequence_ReturnsResultsWithMultipleNonNullNamesAndChildren() { TestObject[] hasFields = connection.Run(testTable.HasFields(m => m.Name, m => m.Children)).ToArray(); Assert.That(hasFields.Length, Is.EqualTo(1)); Assert.That(hasFields, Has.Exactly(1).EqualTo(new TestObject { Id = "2" })); } [Test] public void HasFields_OnSequence_ReturnsResultsWithMultipleNonNullNamesAndChildrenList() { TestObject[] hasFields = connection.Run(testTable.HasFields(m => m.Name, m => m.ChildrenList)).ToArray(); Assert.That(hasFields.Length, Is.EqualTo(1)); Assert.That(hasFields, Has.Exactly(1).EqualTo(new TestObject { Id = "2" })); } [Test] public void HasFields_OnSequence_ReturnsResultsWithMultipleNonNullNamesAndChildrenIList() { TestObject[] hasFields = connection.Run(testTable.HasFields(m => m.Name, m => m.ChildrenIList)).ToArray(); Assert.That(hasFields.Length, Is.EqualTo(1)); Assert.That(hasFields, Has.Exactly(1).EqualTo(new TestObject { Id = "2" })); } [Test] public void HasFields_OnSingleObject_ReturnsFalseWhenFieldIsNull() { var result = this.connection.Run(testTable.Get("1").HasFields(m => m.Name)); Assert.That(result, Is.False); } [Test] public void HasFields_OnSingleObject_ReturnsTrueWhenFieldIsNotNull() { var result = connection.Run(testTable.Get("1").HasFields(m => m.Children)); Assert.That(result, Is.True); } [Test] public void HasFields_OnSingleObject_ReturnsTrueWhenAllFieldsAreNotNull() { var result = connection.Run(testTable.Get("1").HasFields(m => m.Children, m => m.Id)); Assert.That(result, Is.True); } [Test] public void HasFields_OnSingleObject_ReturnsFalseWhenSomeFieldIsNull() { var result = connection.Run(testTable.Get("1").HasFields(m => m.Children, m => m.Name)); Assert.That(result, Is.False); } } }
apache-2.0
basho/jira-github-service
src/main/java/net/mostlyharmless/jghservice/connector/jira/JiraConnector.java
4915
/* * Copyright 2014 Brian Roach <roach at mostlyharmless dot net>. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.mostlyharmless.jghservice.connector.jira; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStreamWriter; import java.net.HttpURLConnection; import java.util.concurrent.ExecutionException; import java.util.logging.Level; import java.util.logging.Logger; import javax.xml.bind.DatatypeConverter; import net.mostlyharmless.jghservice.connector.UnexpectedResponseException; import net.mostlyharmless.jghservice.connector.github.GithubCommand; import net.mostlyharmless.jghservice.resources.ServiceConfig; /** * * @author Brian Roach <roach at mostlyharmless dot net> */ public class JiraConnector { private final String encodedUserPass; private final String apiUrlBase; private static final Logger LOGGER = Logger.getLogger(JiraConnector.class.getName()); public JiraConnector(ServiceConfig config) { encodedUserPass = DatatypeConverter.printBase64Binary((config.getJira().getUsername() + ":" + config.getJira().getPassword()) .getBytes()); String base = config.getJira().getUrl(); if (!base.endsWith("/")) { base = base + "/"; } apiUrlBase = base; } public <T> T execute(JiraCommand<T> command) throws ExecutionException { try { HttpURLConnection conn = (HttpURLConnection) command.getUrl(apiUrlBase).openConnection(); conn.setRequestMethod(command.getRequestMethod()); conn.setRequestProperty("Authorization", "Basic " + encodedUserPass); conn.setDoOutput(true); if (!command.getRequestMethod().equals(GithubCommand.GET)) { conn.setRequestProperty("Content-Type", "application/json; charset=utf8"); try (OutputStreamWriter wr = new OutputStreamWriter(conn.getOutputStream())) { wr.write(command.getJson()); } } int responseCode = conn.getResponseCode(); if (responseCode != command.getExpectedResponseCode()) { LOGGER.log(Level.WARNING, "Incorrect response; expected " + command.getExpectedResponseCode() + " received " + responseCode); LOGGER.log(Level.INFO, command.getUrl(apiUrlBase).toString()); if (!command.getRequestMethod().equals(GithubCommand.GET)) { LOGGER.log(Level.INFO, command.getJson()); } if (responseCode >= 400) { InputStream is = conn.getErrorStream(); if (is == null) { is = conn.getInputStream(); } if (is != null) { StringBuilder sb = new StringBuilder(); BufferedReader br = new BufferedReader(new InputStreamReader(is)); String resp = null; while ((resp = br.readLine()) != null) { sb.append(resp); } LOGGER.log(Level.INFO, sb.toString()); } throw new ExecutionException(new UnexpectedResponseException(responseCode, conn.getResponseMessage())); } } StringBuilder sb = new StringBuilder(); String line; BufferedReader br = new BufferedReader(new InputStreamReader(conn.getInputStream())); while ((line = br.readLine()) != null) { sb.append(line); } return command.processResponse(sb.toString()); } catch (IOException ex) { throw new ExecutionException(ex); } } }
apache-2.0
supergiant/supergiant
cmd/ui/assets/src/app/shared/cookies/cookies.service.ts
1787
import { Injectable } from '@angular/core'; @Injectable() export class CookieMonster { constructor() { } public getCookie(name: string) { const ca: Array<string> = document.cookie.split(';'); const caLen: number = ca.length; const cookieName = `${name}=`; let c: string; for (let i = 0; i < caLen; i += 1) { c = ca[i].replace(/^\s+/g, ''); if (c.indexOf(cookieName) === 0) { return c.substring(cookieName.length, c.length); } } return ''; } public deleteCookie(cookieName) { this.setCookie({ name: cookieName, value: '', expireDays: -1 }); } /** * Expires default 1 day * If params.session is set and true expires is not added * If params.path is not set or value is not greater than 0 its default value will be root "/" * Secure flag can be activated only with https implemented * Examples of usage: * {service instance}.setCookie({name:'token',value:'abcd12345', session:true }); <- This cookie will not expire * {service instance}.setCookie({name:'userName',value:'John Doe', secure:true }); <- If page is not https then secure will not apply * {service instance}.setCookie({name:'niceCar', value:'red', expireDays:10 }); */ public setCookie(params: any) { const d: Date = new Date(); d.setTime(d.getTime() + (params.expireDays ? params.expireDays : 1) * 24 * 60 * 60 * 1000); document.cookie = (params.name ? params.name : '') + '=' + (params.value ? params.value : '') + ';' + (params.session && params.session === true ? '' : 'expires=' + d.toUTCString() + ';') + 'path=' + (params.path && params.path.length > 0 ? params.path : '/') + ';' + (location.protocol === 'https:' && params.secure && params.secure === true ? 'secure' : ''); } }
apache-2.0
porkybrain/Kvasir
Lib/Chip/Unknown/Fujitsu/S6E2CC/QPRC3_NF.hpp
1965
#pragma once #include <Register/Utility.hpp> namespace Kvasir { //Quadrature Position/Revolution Counter 0 Noise Filter namespace Qprc3NfNfctla{ ///<AIN Noise Control Register using Addr = Register::Address<0x40026130,0xffffffc8,0x00000000,unsigned char>; ///Mask bit constexpr Register::FieldLocation<Addr,Register::maskFromRange(5,5),Register::ReadWriteAccess,unsigned> ainmd{}; ///Input invert bit constexpr Register::FieldLocation<Addr,Register::maskFromRange(4,4),Register::ReadWriteAccess,unsigned> ainlv{}; ///Noise filter width select bits constexpr Register::FieldLocation<Addr,Register::maskFromRange(2,0),Register::ReadWriteAccess,unsigned> ainnws{}; } namespace Qprc3NfNfctlb{ ///<BIN Noise Control Register using Addr = Register::Address<0x40026134,0xffffffc8,0x00000000,unsigned char>; ///Mask bit constexpr Register::FieldLocation<Addr,Register::maskFromRange(5,5),Register::ReadWriteAccess,unsigned> binmd{}; ///Input invert bit constexpr Register::FieldLocation<Addr,Register::maskFromRange(4,4),Register::ReadWriteAccess,unsigned> binlv{}; ///Noise filter width select bits constexpr Register::FieldLocation<Addr,Register::maskFromRange(2,0),Register::ReadWriteAccess,unsigned> binnws{}; } namespace Qprc3NfNfctlz{ ///<ZIN Noise Control Register using Addr = Register::Address<0x40026138,0xffffffc8,0x00000000,unsigned char>; ///Mask bit constexpr Register::FieldLocation<Addr,Register::maskFromRange(5,5),Register::ReadWriteAccess,unsigned> zinmd{}; ///Input invert bit constexpr Register::FieldLocation<Addr,Register::maskFromRange(4,4),Register::ReadWriteAccess,unsigned> zinlv{}; ///Noise filter width select bits constexpr Register::FieldLocation<Addr,Register::maskFromRange(2,0),Register::ReadWriteAccess,unsigned> zinnws{}; } }
apache-2.0
travisbrown/circe
modules/generic-simple/src/main/scala/io/circe/generic/simple/AutoDerivation.scala
832
package io.circe.generic.simple import io.circe.{ Decoder, Encoder } import io.circe.export.Exported import io.circe.generic.simple.decoding.DerivedDecoder import io.circe.generic.simple.encoding.DerivedAsObjectEncoder import io.circe.generic.simple.util.macros.ExportMacros import scala.language.experimental.macros /** * Fully automatic codec derivation. * * Extending this trait provides [[io.circe.Decoder]] and [[io.circe.Encoder]] * instances for case classes (if all members have instances), "incomplete" case classes, sealed * trait hierarchies, etc. */ trait AutoDerivation { implicit def exportDecoder[A]: Exported[Decoder[A]] = macro ExportMacros.exportDecoder[DerivedDecoder, A] implicit def exportEncoder[A]: Exported[Encoder.AsObject[A]] = macro ExportMacros.exportEncoder[DerivedAsObjectEncoder, A] }
apache-2.0
apetro/uPortal
uportal-war/src/main/java/org/apereo/portal/permission/dao/jpa/JpaPermissionOwnerDao.java
6032
/** * Licensed to Apereo under one or more contributor license * agreements. See the NOTICE file distributed with this work * for additional information regarding copyright ownership. * Apereo licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a * copy of the License at the following location: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apereo.portal.permission.dao.jpa; import java.util.ArrayList; import java.util.LinkedHashSet; import java.util.List; import java.util.Set; import javax.persistence.TypedQuery; import javax.persistence.criteria.CriteriaBuilder; import javax.persistence.criteria.CriteriaQuery; import javax.persistence.criteria.JoinType; import javax.persistence.criteria.Root; import org.apereo.portal.jpa.BasePortalJpaDao; import org.apereo.portal.jpa.OpenEntityManager; import org.apereo.portal.permission.IPermissionActivity; import org.apereo.portal.permission.IPermissionOwner; import org.apereo.portal.permission.dao.IPermissionOwnerDao; import org.springframework.stereotype.Repository; import com.google.common.base.Function; /** * JpaPermissionOwnerDao provides a default JPA/Hibernate implementation of * the IPermissionOwnerDao interface. * * @author Jen Bourey, jbourey@unicon.net * @version $Revision$ * @since 3.3 */ @Repository("permissionOwnerDao") public class JpaPermissionOwnerDao extends BasePortalJpaDao implements IPermissionOwnerDao { private CriteriaQuery<PermissionOwnerImpl> findAllPermissionOwners; @Override public void afterPropertiesSet() throws Exception { this.findAllPermissionOwners = this.createCriteriaQuery(new Function<CriteriaBuilder, CriteriaQuery<PermissionOwnerImpl>>() { @Override public CriteriaQuery<PermissionOwnerImpl> apply(CriteriaBuilder cb) { final CriteriaQuery<PermissionOwnerImpl> criteriaQuery = cb.createQuery(PermissionOwnerImpl.class); final Root<PermissionOwnerImpl> ownerRoot = criteriaQuery.from(PermissionOwnerImpl.class); criteriaQuery.select(ownerRoot); ownerRoot.fetch(PermissionOwnerImpl_.activities, JoinType.LEFT); return criteriaQuery; } }); } @Override public List<IPermissionOwner> getAllPermissionOwners() { final TypedQuery<PermissionOwnerImpl> query = this.createCachedQuery(this.findAllPermissionOwners); final List<PermissionOwnerImpl> resultList = query.getResultList(); return new ArrayList<IPermissionOwner>(new LinkedHashSet<IPermissionOwner>(resultList)); } @Override @PortalTransactional public IPermissionOwner getOrCreatePermissionOwner(String name, String fname) { IPermissionOwner owner = getPermissionOwner(fname); if (owner == null) { owner = new PermissionOwnerImpl(name, fname); this.getEntityManager().persist(owner); } return owner; } @Override public IPermissionOwner getPermissionOwner(long id){ return getEntityManager().find(PermissionOwnerImpl.class, id); } @OpenEntityManager(unitName = PERSISTENCE_UNIT_NAME) @Override public IPermissionOwner getPermissionOwner(String fname){ final NaturalIdQuery<PermissionOwnerImpl> query = this.createNaturalIdQuery(PermissionOwnerImpl.class); query.using(PermissionOwnerImpl_.fname, fname); return query.load(); } @Override @PortalTransactional public IPermissionOwner saveOwner(IPermissionOwner owner) { this.getEntityManager().persist(owner); return owner; } @Override @PortalTransactional public IPermissionActivity getOrCreatePermissionActivity( IPermissionOwner owner, String name, String fname, String targetProviderKey) { IPermissionActivity activity = getPermissionActivity(owner.getId(), fname); if (activity == null) { activity = new PermissionActivityImpl(name, fname, targetProviderKey); owner.getActivities().add(activity); } return activity; } @Override public IPermissionActivity getPermissionActivity(long id) { return getEntityManager().find(PermissionActivityImpl.class, id); } @Override public IPermissionActivity getPermissionActivity(long ownerId, String activityFname) { final IPermissionOwner permissionOwner = this.getPermissionOwner(ownerId); return findActivity(permissionOwner, activityFname); } @PortalTransactionalReadOnly @Override public IPermissionActivity getPermissionActivity(String ownerFname, String activityFname) { final IPermissionOwner permissionOwner = this.getPermissionOwner(ownerFname); return findActivity(permissionOwner, activityFname); } @Override @PortalTransactional public IPermissionActivity savePermissionActivity(IPermissionActivity activity) { this.getEntityManager().persist(activity); return activity; } protected IPermissionActivity findActivity(final IPermissionOwner permissionOwner, String activityFname) { if (permissionOwner == null) { return null; } final Set<IPermissionActivity> activities = permissionOwner.getActivities(); for (final IPermissionActivity permissionActivity : activities) { if (activityFname.equals(permissionActivity.getFname())) { return permissionActivity; } } return null; } }
apache-2.0
xenit-eu/dynamic-extensions-for-alfresco
annotations-runtime/scheduler-quartz-1/src/main/java/com/github/dynamicextensionsalfresco/schedule/quartz/QuartzJobAdaptor.java
1173
package com.github.dynamicextensionsalfresco.schedule.quartz; import com.github.dynamicextensionsalfresco.schedule.Task; import java.util.Map; import org.quartz.Job; import org.quartz.JobDataMap; import org.quartz.JobDetail; import org.quartz.JobExecutionContext; import org.quartz.JobExecutionException; public class QuartzJobAdaptor { static void execute(JobExecutionContext jobExecutionContext) throws JobExecutionException { JobDetail jobDetail = jobExecutionContext.getJobDetail(); Map jobDataMap = jobDetail.getJobDataMap(); Object obj = jobDataMap.get(QuartzTaskScheduler.BEAN_ID); if (obj == null) { throw new JobExecutionException(String.format("Job not found in %s", JobDataMap.class.getSimpleName())); } if (Task.class.isAssignableFrom(obj.getClass())) { Task task = (Task) obj; task.execute(); } else if (Job.class.isAssignableFrom(obj.getClass())) { Job lockedJob = (Job) obj; lockedJob.execute(jobExecutionContext); } else { throw new JobExecutionException("Unexpected type: " + obj.getClass()); } } }
apache-2.0
migue/fabric8
forge/addons/kubernetes/src/main/java/io/fabric8/forge/kubernetes/ServiceDelete.java
3735
/** * Copyright 2005-2015 Red Hat, Inc. * * Red Hat licenses this file to you under the Apache License, version * 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. */ package io.fabric8.forge.kubernetes; import io.fabric8.kubernetes.api.KubernetesHelper; import io.fabric8.kubernetes.api.model.Service; import io.fabric8.kubernetes.api.model.ServiceList; import org.jboss.forge.addon.ui.context.UIBuilder; import org.jboss.forge.addon.ui.context.UIContext; import org.jboss.forge.addon.ui.context.UIExecutionContext; import org.jboss.forge.addon.ui.input.InputComponent; import org.jboss.forge.addon.ui.input.UICompleter; import org.jboss.forge.addon.ui.input.UIInput; import org.jboss.forge.addon.ui.metadata.UICommandMetadata; import org.jboss.forge.addon.ui.metadata.WithAttributes; import org.jboss.forge.addon.ui.result.Result; import org.jboss.forge.addon.ui.util.Categories; import org.jboss.forge.addon.ui.util.Metadata; import javax.inject.Inject; import java.util.ArrayList; import java.util.Collections; import java.util.List; import static io.fabric8.kubernetes.api.KubernetesHelper.getName; /** * Deletes a service from kubernetes */ public class ServiceDelete extends AbstractKubernetesCommand { @Inject @WithAttributes(label = "Service ID", description = "The ID of the service to delete.", required = true) UIInput<String> serviceId; @Override public UICommandMetadata getMetadata(UIContext context) { return Metadata.from(super.getMetadata(context), getClass()) .category(Categories.create(CATEGORY)) .name(CATEGORY + ": Service Delete") .description("Deletes the given service from the kubernetes cloud"); } @Override public void initializeUI(UIBuilder builder) throws Exception { super.initializeUI(builder); // populate autocompletion options serviceId.setCompleter(new UICompleter<String>() { @Override public Iterable<String> getCompletionProposals(UIContext context, InputComponent<?, String> input, String value) { List<String> list = new ArrayList<String>(); ServiceList services = getKubernetes().getServices(); if (services != null) { List<Service> items = services.getItems(); if (items != null) { for (Service item : items) { String id = KubernetesHelper.getName(item); list.add(id); } } } Collections.sort(list); return list; } }); builder.add(serviceId); } @Override public Result execute(UIExecutionContext context) throws Exception { String idText = serviceId.getValue(); Service service = getKubernetes().getService(idText); if (service == null) { System.out.println("No service for id: " + idText); } else { executeService(service); } return null; } protected void executeService(Service service) throws Exception { getKubernetes().deleteService(KubernetesHelper.getName(service)); } }
apache-2.0
bazelbuild/buildtools
differ/isatty_other.go
930
// Copyright 2017 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // +build !windows package differ import "syscall" // isatty reports whether fd is a tty. // Actually it reports whether fd is a character device, which is close enough. func isatty(fd int) bool { var st syscall.Stat_t if err := syscall.Fstat(fd, &st); err != nil { return false } return st.Mode&syscall.S_IFMT == syscall.S_IFCHR }
apache-2.0
apache/lucenenet
src/Lucene.Net.Suggest/Spell/SuggestWordScoreComparator.cs
2338
using J2N.Text; using System; using System.Collections.Generic; namespace Lucene.Net.Search.Spell { /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /// <summary> /// Score first, then frequency /// </summary> // LUCENENET: It is no longer good practice to use binary serialization. // See: https://github.com/dotnet/corefx/issues/23584#issuecomment-325724568 #if FEATURE_SERIALIZABLE [Serializable] #endif public class SuggestWordScoreComparer : IComparer<SuggestWord> { /// <summary> /// Creates a new comparer that will compare by <see cref="SuggestWord.Score"/>, /// then by <see cref="SuggestWord.Freq"/>, then by <see cref="SuggestWord.String"/>. /// </summary> public SuggestWordScoreComparer() { } public virtual int Compare(SuggestWord first, SuggestWord second) { // first criteria: the distance if (first.Score > second.Score) { return 1; } if (first.Score < second.Score) { return -1; } // second criteria (if first criteria is equal): the popularity if (first.Freq > second.Freq) { return 1; } if (first.Freq < second.Freq) { return -1; } // third criteria: term text return second.String.CompareToOrdinal(first.String); } } }
apache-2.0
uvalib/staff-directory
bower_components/json-formatter/bower_components/swiper/src/components/core/events/onTouchEnd.js
8327
import Utils from '../../../utils/utils'; export default function (event) { const swiper = this; const data = swiper.touchEventsData; const { params, touches, rtl, $wrapperEl, slidesGrid, snapGrid } = swiper; let e = event; if (e.originalEvent) e = e.originalEvent; if (data.allowTouchCallbacks) { swiper.emit('touchEnd', e); } data.allowTouchCallbacks = false; if (!data.isTouched) return; // Return Grab Cursor if (params.grabCursor && data.isMoved && data.isTouched && (swiper.allowSlideNext === true || swiper.allowSlidePrev === true)) { swiper.setGrabCursor(false); } // Time diff const touchEndTime = Utils.now(); const timeDiff = touchEndTime - data.touchStartTime; // Tap, doubleTap, Click if (swiper.allowClick) { swiper.updateClickedSlide(e); swiper.emit('tap', e); if (timeDiff < 300 && (touchEndTime - data.lastClickTime) > 300) { if (data.clickTimeout) clearTimeout(data.clickTimeout); data.clickTimeout = Utils.nextTick(() => { if (!swiper || swiper.destroyed) return; swiper.emit('click', e); }, 300); } if (timeDiff < 300 && (touchEndTime - data.lastClickTime) < 300) { if (data.clickTimeout) clearTimeout(data.clickTimeout); swiper.emit('doubleTap', e); } } data.lastClickTime = Utils.now(); Utils.nextTick(() => { if (!swiper.destroyed) swiper.allowClick = true; }); if (!data.isTouched || !data.isMoved || !swiper.swipeDirection || touches.diff === 0 || data.currentTranslate === data.startTranslate) { data.isTouched = false; data.isMoved = false; return; } data.isTouched = false; data.isMoved = false; let currentPos; if (params.followFinger) { currentPos = rtl ? swiper.translate : -swiper.translate; } else { currentPos = -data.currentTranslate; } if (params.freeMode) { if (currentPos < -swiper.minTranslate()) { swiper.slideTo(swiper.activeIndex); return; } else if (currentPos > -swiper.maxTranslate()) { if (swiper.slides.length < snapGrid.length) { swiper.slideTo(snapGrid.length - 1); } else { swiper.slideTo(swiper.slides.length - 1); } return; } if (params.freeModeMomentum) { if (data.velocities.length > 1) { const lastMoveEvent = data.velocities.pop(); const velocityEvent = data.velocities.pop(); const distance = lastMoveEvent.position - velocityEvent.position; const time = lastMoveEvent.time - velocityEvent.time; swiper.velocity = distance / time; swiper.velocity /= 2; if (Math.abs(swiper.velocity) < params.freeModeMinimumVelocity) { swiper.velocity = 0; } // this implies that the user stopped moving a finger then released. // There would be no events with distance zero, so the last event is stale. if (time > 150 || (Utils.now() - lastMoveEvent.time) > 300) { swiper.velocity = 0; } } else { swiper.velocity = 0; } swiper.velocity *= params.freeModeMomentumVelocityRatio; data.velocities.length = 0; let momentumDuration = 1000 * params.freeModeMomentumRatio; const momentumDistance = swiper.velocity * momentumDuration; let newPosition = swiper.translate + momentumDistance; if (rtl) newPosition = -newPosition; let doBounce = false; let afterBouncePosition; const bounceAmount = Math.abs(swiper.velocity) * 20 * params.freeModeMomentumBounceRatio; if (newPosition < swiper.maxTranslate()) { if (params.freeModeMomentumBounce) { if (newPosition + swiper.maxTranslate() < -bounceAmount) { newPosition = swiper.maxTranslate() - bounceAmount; } afterBouncePosition = swiper.maxTranslate(); doBounce = true; data.allowMomentumBounce = true; } else { newPosition = swiper.maxTranslate(); } } else if (newPosition > swiper.minTranslate()) { if (params.freeModeMomentumBounce) { if (newPosition - swiper.minTranslate() > bounceAmount) { newPosition = swiper.minTranslate() + bounceAmount; } afterBouncePosition = swiper.minTranslate(); doBounce = true; data.allowMomentumBounce = true; } else { newPosition = swiper.minTranslate(); } } else if (params.freeModeSticky) { let nextSlide; for (let j = 0; j < snapGrid.length; j += 1) { if (snapGrid[j] > -newPosition) { nextSlide = j; break; } } if (Math.abs(snapGrid[nextSlide] - newPosition) < Math.abs(snapGrid[nextSlide - 1] - newPosition) || swiper.swipeDirection === 'next') { newPosition = snapGrid[nextSlide]; } else { newPosition = snapGrid[nextSlide - 1]; } newPosition = -newPosition; } // Fix duration if (swiper.velocity !== 0) { if (rtl) { momentumDuration = Math.abs((-newPosition - swiper.translate) / swiper.velocity); } else { momentumDuration = Math.abs((newPosition - swiper.translate) / swiper.velocity); } } else if (params.freeModeSticky) { swiper.slideReset(); return; } if (params.freeModeMomentumBounce && doBounce) { swiper.updateProgress(afterBouncePosition); swiper.setTransition(momentumDuration); swiper.setTranslate(newPosition); swiper.transitionStart(); swiper.animating = true; $wrapperEl.transitionEnd(() => { if (!swiper || swiper.destroyed || !data.allowMomentumBounce) return; swiper.emit('momentumBounce'); swiper.setTransition(params.speed); swiper.setTranslate(afterBouncePosition); $wrapperEl.transitionEnd(() => { if (!swiper || swiper.destroyed) return; swiper.transitionEnd(); }); }); } else if (swiper.velocity) { swiper.updateProgress(newPosition); swiper.setTransition(momentumDuration); swiper.setTranslate(newPosition); swiper.transitionStart(); if (!swiper.animating) { swiper.animating = true; $wrapperEl.transitionEnd(() => { if (!swiper || swiper.destroyed) return; swiper.transitionEnd(); }); } } else { swiper.updateProgress(newPosition); } swiper.updateActiveIndex(); swiper.updateSlidesClasses(); } if (!params.freeModeMomentum || timeDiff >= params.longSwipesMs) { swiper.updateProgress(); swiper.updateActiveIndex(); swiper.updateSlidesClasses(); } return; } // Find current slide let stopIndex = 0; let groupSize = swiper.slidesSizesGrid[0]; for (let i = 0; i < slidesGrid.length; i += params.slidesPerGroup) { if (typeof slidesGrid[i + params.slidesPerGroup] !== 'undefined') { if (currentPos >= slidesGrid[i] && currentPos < slidesGrid[i + params.slidesPerGroup]) { stopIndex = i; groupSize = slidesGrid[i + params.slidesPerGroup] - slidesGrid[i]; } } else if (currentPos >= slidesGrid[i]) { stopIndex = i; groupSize = slidesGrid[slidesGrid.length - 1] - slidesGrid[slidesGrid.length - 2]; } } // Find current slide size const ratio = (currentPos - slidesGrid[stopIndex]) / groupSize; if (timeDiff > params.longSwipesMs) { // Long touches if (!params.longSwipes) { swiper.slideTo(swiper.activeIndex); return; } if (swiper.swipeDirection === 'next') { if (ratio >= params.longSwipesRatio) swiper.slideTo(stopIndex + params.slidesPerGroup); else swiper.slideTo(stopIndex); } if (swiper.swipeDirection === 'prev') { if (ratio > (1 - params.longSwipesRatio)) swiper.slideTo(stopIndex + params.slidesPerGroup); else swiper.slideTo(stopIndex); } } else { // Short swipes if (!params.shortSwipes) { swiper.slideTo(swiper.activeIndex); return; } if (swiper.swipeDirection === 'next') { swiper.slideTo(stopIndex + params.slidesPerGroup); } if (swiper.swipeDirection === 'prev') { swiper.slideTo(stopIndex); } } }
apache-2.0
rkpagadala/mixer
adapter/stackdriver/metric/metric_test.go
11661
// Copyright 2017 Istio Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package metric import ( "context" "fmt" "reflect" "strings" "testing" "time" monitoring "cloud.google.com/go/monitoring/apiv3" "github.com/golang/protobuf/ptypes" "google.golang.org/genproto/googleapis/api/distribution" metricpb "google.golang.org/genproto/googleapis/api/metric" "google.golang.org/genproto/googleapis/api/monitoredres" monitoringpb "google.golang.org/genproto/googleapis/monitoring/v3" descriptor "istio.io/api/mixer/v1/config/descriptor" "istio.io/mixer/adapter/stackdriver/config" "istio.io/mixer/pkg/adapter/test" metrict "istio.io/mixer/template/metric" ) type fakebuf struct { buf []*monitoringpb.TimeSeries } func (f *fakebuf) Record(in []*monitoringpb.TimeSeries) { f.buf = append(f.buf, in...) } func (*fakebuf) Close() error { return nil } var clientFunc = func(err error) createClientFunc { return func(cfg *config.Params) (*monitoring.MetricClient, error) { return nil, err } } func TestFactory_NewMetricsAspect(t *testing.T) { tests := []struct { name string cfg *config.Params metricNames []string missingMetrics []string // We check that the method logged these metric names because they're not mapped in cfg err string // If != "" we expect an error containing this string }{ {"empty", &config.Params{}, []string{}, []string{}, ""}, {"missing metric", &config.Params{}, []string{"request_count"}, []string{"request_count"}, ""}, { "happy path", &config.Params{MetricInfo: map[string]*config.Params_MetricInfo{"request_count": {}}}, []string{"request_count"}, []string{}, "", }, } for idx, tt := range tests { t.Run(fmt.Sprintf("[%d] %s", idx, tt.name), func(t *testing.T) { metrics := make(map[string]*metrict.Type) for _, name := range tt.metricNames { metrics[name] = &metrict.Type{} } env := test.NewEnv(t) b := &builder{createClient: clientFunc(nil)} b.SetMetricTypes(metrics) b.SetAdapterConfig(tt.cfg) _, err := b.Build(context.Background(), env) if err != nil || tt.err != "" { if tt.err == "" { t.Fatalf("factory{}.NewMetricsAspect(test.NewEnv(t), nil, nil) = '%s', wanted no err", err.Error()) } else if !strings.Contains(err.Error(), tt.err) { t.Fatalf("Expected errors containing the string '%s', actual: '%s'", tt.err, err.Error()) } } // If we expect missing metrics make sure they're present in the logs; otherwise make sure none were missing. if len(tt.missingMetrics) > 0 { for _, missing := range tt.missingMetrics { found := false for _, log := range env.GetLogs() { found = found || strings.Contains(log, missing) } if !found { t.Errorf("Wanted missing log %s, got logs: %v", missing, env.GetLogs()) } } } else { for _, log := range env.GetLogs() { if strings.Contains(log, "No stackdriver info found for metric") { t.Errorf("Expected no missing metrics, found log entry: %s", log) } } } }) } } func TestFactory_NewMetricsAspect_Errs(t *testing.T) { err := fmt.Errorf("expected") b := &builder{createClient: clientFunc(err)} b.SetAdapterConfig(&config.Params{}) res, e := b.Build(context.Background(), test.NewEnv(t)) if e != nil && !strings.Contains(e.Error(), err.Error()) { t.Fatalf("Expected error from factory.createClient to be propagated, got %v, %v", res, e) } else if e == nil { t.Fatalf("Got no error") } } func TestRecord(t *testing.T) { projectID := "pid" resource := &monitoredres.MonitoredResource{ Type: "global", Labels: map[string]string{ "project_id": projectID, }, } m := &metricpb.Metric{ Type: "type", Labels: map[string]string{"str": "str", "int": "34"}, } info := map[string]info{ "gauge": { ttype: "type", minfo: &config.Params_MetricInfo{Kind: metricpb.MetricDescriptor_GAUGE, Value: metricpb.MetricDescriptor_INT64}, vtype: descriptor.INT64, }, "cumulative": { ttype: "type", minfo: &config.Params_MetricInfo{Kind: metricpb.MetricDescriptor_CUMULATIVE, Value: metricpb.MetricDescriptor_STRING}, vtype: descriptor.STRING, }, "delta": { ttype: "type", minfo: &config.Params_MetricInfo{Kind: metricpb.MetricDescriptor_DELTA, Value: metricpb.MetricDescriptor_BOOL}, vtype: descriptor.BOOL, }, "distribution-linear": { ttype: "type", minfo: &config.Params_MetricInfo{ Kind: metricpb.MetricDescriptor_CUMULATIVE, Value: metricpb.MetricDescriptor_DISTRIBUTION, Buckets: &config.Params_MetricInfo_BucketsDefinition{Definition: &config.Params_MetricInfo_BucketsDefinition_LinearBuckets{ // under, 1-6, 6-11, over LinearBuckets: &config.Params_MetricInfo_BucketsDefinition_Linear{ NumFiniteBuckets: 2, Offset: 1, Width: 5, }}}, }, vtype: descriptor.DOUBLE, }, "distribution-exp": { ttype: "type", minfo: &config.Params_MetricInfo{ Kind: metricpb.MetricDescriptor_CUMULATIVE, Value: metricpb.MetricDescriptor_DISTRIBUTION, Buckets: &config.Params_MetricInfo_BucketsDefinition{Definition: &config.Params_MetricInfo_BucketsDefinition_ExponentialBuckets{ // under, 10-100, 100-1000, over ExponentialBuckets: &config.Params_MetricInfo_BucketsDefinition_Exponential{ NumFiniteBuckets: 2, Scale: 10, GrowthFactor: 10, }}}, }, vtype: descriptor.DOUBLE, }, "distribution-explicit": { ttype: "type", minfo: &config.Params_MetricInfo{ Kind: metricpb.MetricDescriptor_CUMULATIVE, Value: metricpb.MetricDescriptor_DISTRIBUTION, Buckets: &config.Params_MetricInfo_BucketsDefinition{Definition: &config.Params_MetricInfo_BucketsDefinition_ExplicitBuckets{ // under, 1-10, 10-100, over ExplicitBuckets: &config.Params_MetricInfo_BucketsDefinition_Explicit{ Bounds: []float64{1, 10, 100}, }}}, }, vtype: descriptor.DOUBLE, }, } now := time.Now() pbnow, _ := ptypes.TimestampProto(now) tests := []struct { name string vals []*metrict.Instance expected []*monitoringpb.TimeSeries }{ {"empty", []*metrict.Instance{}, []*monitoringpb.TimeSeries{}}, {"missing", []*metrict.Instance{{Name: "not in the info map"}}, []*monitoringpb.TimeSeries{}}, {"gauge", []*metrict.Instance{ { Name: "gauge", Value: int64(7), Dimensions: map[string]interface{}{"str": "str", "int": int64(34)}, }, }, []*monitoringpb.TimeSeries{ { Metric: m, Resource: resource, MetricKind: metricpb.MetricDescriptor_GAUGE, ValueType: metricpb.MetricDescriptor_INT64, Points: []*monitoringpb.Point{{ Interval: &monitoringpb.TimeInterval{StartTime: pbnow, EndTime: pbnow}, Value: &monitoringpb.TypedValue{&monitoringpb.TypedValue_Int64Value{Int64Value: int64(7)}}, }}, }, }}, {"cumulative", []*metrict.Instance{ { Name: "cumulative", Value: "asldkfj", Dimensions: map[string]interface{}{"str": "str", "int": int64(34)}, }, }, []*monitoringpb.TimeSeries{ { Metric: m, Resource: resource, MetricKind: metricpb.MetricDescriptor_CUMULATIVE, ValueType: metricpb.MetricDescriptor_STRING, Points: []*monitoringpb.Point{{ Interval: &monitoringpb.TimeInterval{StartTime: pbnow, EndTime: pbnow}, Value: &monitoringpb.TypedValue{&monitoringpb.TypedValue_StringValue{StringValue: "asldkfj"}}, }}, }, }}, {"delta", []*metrict.Instance{ { Name: "delta", Value: true, Dimensions: map[string]interface{}{"str": "str", "int": int64(34)}, }, }, []*monitoringpb.TimeSeries{ { Metric: m, Resource: resource, MetricKind: metricpb.MetricDescriptor_DELTA, ValueType: metricpb.MetricDescriptor_BOOL, Points: []*monitoringpb.Point{{ Interval: &monitoringpb.TimeInterval{StartTime: pbnow, EndTime: pbnow}, Value: &monitoringpb.TypedValue{&monitoringpb.TypedValue_BoolValue{BoolValue: true}}, }}, }, }}, {"distribution-linear", []*metrict.Instance{ { Name: "distribution-linear", Value: float64(6), Dimensions: map[string]interface{}{"str": "str", "int": int64(34)}, }, }, []*monitoringpb.TimeSeries{ { Metric: m, Resource: resource, MetricKind: metricpb.MetricDescriptor_CUMULATIVE, ValueType: metricpb.MetricDescriptor_DISTRIBUTION, Points: []*monitoringpb.Point{{ Interval: &monitoringpb.TimeInterval{StartTime: pbnow, EndTime: pbnow}, Value: &monitoringpb.TypedValue{Value: &monitoringpb.TypedValue_DistributionValue{ DistributionValue: &distribution.Distribution{ Count: 1, BucketOptions: linear(1, 5, 2), BucketCounts: []int64{0, 0, 1, 0}, }}}, }}, }, }}, {"distribution-exp", []*metrict.Instance{ { Name: "distribution-exp", Value: float64(99), Dimensions: map[string]interface{}{"str": "str", "int": int64(34)}, }, }, []*monitoringpb.TimeSeries{ { Metric: m, Resource: resource, MetricKind: metricpb.MetricDescriptor_CUMULATIVE, ValueType: metricpb.MetricDescriptor_DISTRIBUTION, Points: []*monitoringpb.Point{{ Interval: &monitoringpb.TimeInterval{StartTime: pbnow, EndTime: pbnow}, Value: &monitoringpb.TypedValue{Value: &monitoringpb.TypedValue_DistributionValue{ DistributionValue: &distribution.Distribution{ Count: 1, BucketOptions: exp(10, 10, 2), BucketCounts: []int64{0, 1, 0, 0}, }}}, }}, }, }}, {"distribution-explicit", []*metrict.Instance{ { Name: "distribution-explicit", Value: float64(9), Dimensions: map[string]interface{}{"str": "str", "int": int64(34)}, }, }, []*monitoringpb.TimeSeries{ { Metric: m, Resource: resource, MetricKind: metricpb.MetricDescriptor_CUMULATIVE, ValueType: metricpb.MetricDescriptor_DISTRIBUTION, Points: []*monitoringpb.Point{{ Interval: &monitoringpb.TimeInterval{StartTime: pbnow, EndTime: pbnow}, Value: &monitoringpb.TypedValue{Value: &monitoringpb.TypedValue_DistributionValue{ DistributionValue: &distribution.Distribution{ Count: 1, BucketOptions: explicit([]float64{1, 10, 100}), BucketCounts: []int64{0, 1, 0, 0}, }}}, }}, }, }}, } for idx, tt := range tests { t.Run(fmt.Sprintf("[%d] %s", idx, tt.name), func(t *testing.T) { buf := &fakebuf{} s := &handler{metricInfo: info, projectID: projectID, client: buf, l: test.NewEnv(t).Logger(), now: func() time.Time { return now }} _ = s.HandleMetric(context.Background(), tt.vals) if len(buf.buf) != len(tt.expected) { t.Errorf("Want %d values to send, got %d", len(tt.expected), len(buf.buf)) } for _, expected := range tt.expected { found := false for _, actual := range buf.buf { found = found || reflect.DeepEqual(expected, actual) } if !found { t.Errorf("Want timeseries %v, but not present: %v", expected, buf.buf) } } }) } }
apache-2.0
dirkgroenen/mopidy-mopify
src/app/directives/module.js
112
'use strict'; var widgetModule = angular.module('mopify.widgets', [ 'spotify', 'mopify.services.mopidy' ]);
apache-2.0
flax3lbs/cpptasks-parallel
src/main/java/net/sf/antcontrib/cpptasks/ide/ProjectWriter.java
1695
/* * * Copyright 2002-2004 The Ant-Contrib project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.sf.antcontrib.cpptasks.ide; import java.io.File; import java.io.IOException; import java.util.List; import java.util.Map; import net.sf.antcontrib.cpptasks.CCTask; import net.sf.antcontrib.cpptasks.TargetInfo; import org.xml.sax.SAXException; /** * Project writer interface. * * @author curta * */ public interface ProjectWriter { /** * Write project definition file. * @param baseName File name base, writer may append appropriate extension * @param task task * @param projectDef project element * @param files source and header files * @param targets compilation targets * @param linkTarget link target * @throws IOException if I/O error is encountered * @throws SAXException if I/O error during XML serialization */ void writeProject(final File baseName, final CCTask task, final ProjectDef projectDef, final List files, final Map targets, final TargetInfo linkTarget) throws IOException, SAXException; }
apache-2.0
codeprimate-software/cp-elements
src/test/java/org/cp/elements/time/DateTimeUtilsUnitTests.java
3427
/* * Copyright 2011-Present Author or Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.cp.elements.time; import static org.assertj.core.api.Assertions.assertThat; import java.util.Calendar; import org.cp.elements.test.TestUtils; import org.junit.Test; /** * Unit Tests for {@link DateTimeUtils}. * * @author John J. Blum * @see java.util.Calendar * @see org.junit.Test * @see org.cp.elements.test.TestUtils * @see org.cp.elements.time.DateTimeUtils * @since 1.0.0 */ public class DateTimeUtilsUnitTests { @Test public void cloneIsCorrect() { Calendar expectedDateTime = TestUtils.createCalendar(2011, Calendar.NOVEMBER, 9, 1, 45, 30); Calendar actualDateTime = DateTimeUtils.clone(expectedDateTime); assertThat(actualDateTime).isNotNull(); assertThat(actualDateTime).isNotSameAs(expectedDateTime); assertThat(actualDateTime).isEqualTo(expectedDateTime); } @Test public void cloneWithNullIsNullSafe() { assertThat(DateTimeUtils.clone(null)).isNull(); } @Test public void createIsCorrect() { Calendar expectedDateTime = TestUtils.createCalendar(2013, Calendar.OCTOBER, 19, 10, 36, 0); Calendar actualDateTime = DateTimeUtils.create(expectedDateTime.getTimeInMillis()); assertThat(actualDateTime).isNotNull(); assertThat(actualDateTime).isNotSameAs(expectedDateTime); assertThat(actualDateTime).isEqualTo(expectedDateTime); } @Test public void truncateIsCorrect() { Calendar dateTime = TestUtils.createCalendar(2011, Calendar.NOVEMBER, 8, 16, 15, 30); Calendar expectedDateTime = TestUtils.createCalendar(2011, Calendar.NOVEMBER, 8); Calendar actualDateTime = DateTimeUtils.truncate(dateTime); assertThat(actualDateTime).isNotNull(); assertThat(actualDateTime).isSameAs(dateTime); assertThat(actualDateTime).isEqualTo(expectedDateTime); } @Test public void truncateWithNoTimeIsCorrect() { Calendar expectedDateTime = TestUtils.createCalendar(2011, Calendar.NOVEMBER, 7); assertThat(expectedDateTime).isNotNull(); assertThat(expectedDateTime.get(Calendar.HOUR_OF_DAY)).isEqualTo(0); assertThat(expectedDateTime.get(Calendar.MINUTE)).isEqualTo(0); assertThat(expectedDateTime.get(Calendar.SECOND)).isEqualTo(0); assertThat(expectedDateTime.get(Calendar.MILLISECOND)).isEqualTo(0); Calendar actualDateTime = DateTimeUtils.truncate(expectedDateTime); assertThat(actualDateTime).isNotNull(); assertThat(actualDateTime).isSameAs(expectedDateTime); assertThat(actualDateTime.get(Calendar.HOUR_OF_DAY)).isEqualTo(0); assertThat(actualDateTime.get(Calendar.MINUTE)).isEqualTo(0); assertThat(actualDateTime.get(Calendar.SECOND)).isEqualTo(0); assertThat(actualDateTime.get(Calendar.MILLISECOND)).isEqualTo(0); } @Test public void truncateWithNullIsNullSafe() { assertThat(DateTimeUtils.truncate(null)).isNull(); } }
apache-2.0
antoinesd/weld-core
impl/src/main/java/org/jboss/weld/injection/AbstractResourceInjection.java
2463
/* * JBoss, Home of Professional Open Source * Copyright 2008, Red Hat, Inc., and individual contributors * by the @authors tag. See the copyright.txt in the distribution for a * full listing of individual contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jboss.weld.injection; import java.lang.reflect.Member; import javax.enterprise.context.spi.CreationalContext; import org.jboss.weld.contexts.WeldCreationalContext; import org.jboss.weld.injection.spi.ResourceReference; import org.jboss.weld.injection.spi.ResourceReferenceFactory; import org.jboss.weld.logging.UtilLogger; import org.jboss.weld.util.reflection.Formats; import org.jboss.weld.util.reflection.Reflections; /** * Abstract resource injection. * * @author Martin Kouba * * @param <T> */ abstract class AbstractResourceInjection<T> implements ResourceInjection<T> { private final ResourceReferenceFactory<T> factory; AbstractResourceInjection(ResourceReferenceFactory<T> factory) { this.factory = factory; } @Override public T getResourceReference(CreationalContext<?> ctx) { ResourceReference<T> reference = null; if (factory != null) { reference = factory.createResource(); } if (reference != null) { if (ctx instanceof WeldCreationalContext<?>) { Reflections.<WeldCreationalContext<?>> cast(ctx).addDependentResourceReference(reference); } return reference.getInstance(); } UtilLogger.LOG.unableToInjectResource(getMember(), Formats.formatAsStackTraceElement(getMember())); return null; } @Override public void injectResourceReference(Object declaringInstance, CreationalContext<?> ctx) { injectMember(declaringInstance, getResourceReference(ctx)); } protected abstract void injectMember(Object declaringInstance, Object reference); abstract Member getMember(); }
apache-2.0
cdesjardins/poderosa
TerminalEmulator/TerminalUtil.cs
4317
/* * Copyright 2004,2006 The Poderosa Project. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * * $Id: TerminalUtil.cs,v 1.2 2011/10/27 23:21:58 kzmi Exp $ */ using System; using System.IO; using System.Net; using System.Net.Sockets; using System.Text; using System.Windows.Forms; using Poderosa.ConnectionParam; using Poderosa.Util; namespace Poderosa.Terminal { /// <summary> /// /// </summary> /// <exclude/> public enum TerminalMode { Normal, Application } /// <summary> /// /// </summary> /// <exclude/> public class TerminalUtil { public static char[] NewLineChars(NewLine nl) { switch (nl) { case NewLine.CR: return new char[1] { '\r' }; case NewLine.LF: return new char[1] { '\n' }; case NewLine.CRLF: return new char[2] { '\r', '\n' }; default: throw new ArgumentException("Unknown NewLine " + nl); } } //TODO static‚É‚µ‚½‚Ù‚¤‚ª‚¢‚¢H ‚¤‚Á‚©‚è”j‰ó‚ª•|‚¢‚ª public static byte[] NewLineBytes(NewLine nl) { switch (nl) { case NewLine.CR: return new byte[1] { (byte)'\r' }; case NewLine.LF: return new byte[1] { (byte)'\n' }; case NewLine.CRLF: return new byte[2] { (byte)'\r', (byte)'\n' }; default: throw new ArgumentException("Unknown NewLine " + nl); } } public static NewLine NextNewLineOption(NewLine nl) { switch (nl) { case NewLine.CR: return NewLine.LF; case NewLine.LF: return NewLine.CRLF; case NewLine.CRLF: return NewLine.CR; default: throw new ArgumentException("Unknown NewLine " + nl); } } //—LŒø‚ȃ{[ƒŒ[ƒg‚ÌƒŠƒXƒg public static string[] BaudRates { get { return new string[] { "110", "300", "600", "1200", "2400", "4800", "9600", "14400", "19200", "38400", "57600", "115200" }; } } //”é–§Œ®ƒtƒ@ƒCƒ‹‘I‘ð public static string SelectPrivateKeyFileByDialog(Form parent) { OpenFileDialog dlg = new OpenFileDialog(); dlg.CheckFileExists = true; dlg.Multiselect = false; dlg.Title = "Select Private Key File"; dlg.Filter = "Key Files(*.bin;*)|*.bin;*"; if (dlg.ShowDialog(parent) == DialogResult.OK) { return dlg.FileName; } else return null; } } //‚±‚ê‚Æ“¯“™‚̏ˆ—‚ÍToAscii API‚ðŽg‚Á‚Ä‚à‚Å‚«‚邪A‚¿‚å‚Á‚Æ‚â‚è‚Â炢‚̂ŋtˆø‚«ƒ}ƒbƒv‚ðstatic‚ÉŽ‚Á‚Ä‚¨‚­ internal class KeyboardInfo { public static char[] _defaultGroup; public static char[] _shiftGroup; public static void Init() { _defaultGroup = new char[256]; _shiftGroup = new char[256]; for (int i = 32; i < 128; i++) { short v = Win32.VkKeyScan((char)i); bool shift = (v & 0x0100) != 0; short body = (short)(v & 0x00FF); if (shift) _shiftGroup[body] = (char)i; else _defaultGroup[body] = (char)i; } } public static char Scan(Keys body, bool shift) { if (_defaultGroup == null) Init(); //§Œä•¶Žš‚Ì‚¤‚¿’P•i‚̃L[‚Å‘—M‚Å‚«‚é‚à‚Ì if (body == Keys.Escape) return (char)0x1B; else if (body == Keys.Tab) return (char)0x09; else if (body == Keys.Back) return (char)0x08; else if (body == Keys.Delete) return (char)0x7F; if (shift) return _shiftGroup[(int)body]; else return _defaultGroup[(int)body]; } } }
apache-2.0
Skunnyk/dbpedia-spotlight-model
core/src/main/scala/org/dbpedia/spotlight/disambiguate/mixtures/Fader2Mixture.scala
817
package org.dbpedia.spotlight.disambiguate.mixtures import org.dbpedia.spotlight.model.DBpediaResourceOccurrence /** * Adaptation of Fader et al. (2009) mixture */ class Fader2Mixture(override val contextWeight: Double, val alpha: Double) extends Mixture(contextWeight) { // def getScore(contextScore: Double, uriCount: Int) = { // val prominence = 1 + math.log( 1 + uriCount / alpha ) // // (contextWeight * contextScore) + (1 - contextWeight) * prominence // } def getScore(occurrence: DBpediaResourceOccurrence) : Double = { val prominence = 1 + math.log( 1 + occurrence.resource.prior * alpha ) (contextWeight * occurrence.contextualScore) + (1 - contextWeight) * prominence } override def toString = "Fader2Mixture[alpha="+alpha+"]("+contextWeight+")" }
apache-2.0
flashback2k14/yc
mobile/src/main/java/common/services/billing/IabResult.java
1741
/* Copyright (c) 2012 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package common.services.billing; /** * Represents the result of an in-app billing operation. * A result is composed of a response code (an integer) and possibly a * message (String). You can get those by calling * {@link #getResponse} and {@link #getMessage()}, respectively. You * can also inquire whether a result is a success or a failure by * calling {@link #isSuccess()} and {@link #isFailure()}. */ public class IabResult { int mResponse; String mMessage; public IabResult(int response, String message) { mResponse = response; if (message == null || message.trim().length() == 0) { mMessage = IabHelper.getResponseDesc(response); } else { mMessage = message + " (response: " + IabHelper.getResponseDesc(response) + ")"; } } public int getResponse() { return mResponse; } public String getMessage() { return mMessage; } public boolean isSuccess() { return mResponse == IabHelper.BILLING_RESPONSE_RESULT_OK; } public boolean isFailure() { return !isSuccess(); } public String toString() { return "IabResult: " + getMessage(); } }
apache-2.0
ContextLogic/luigi
luigi/contrib/postgres.py
13368
# -*- coding: utf-8 -*- # # Copyright 2012-2015 Spotify AB # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # """ Implements a subclass of :py:class:`~luigi.target.Target` that writes data to Postgres. Also provides a helper task to copy data into a Postgres table. """ import datetime import logging import re import tempfile from luigi import six import luigi from luigi.contrib import rdbms logger = logging.getLogger('luigi-interface') try: import psycopg2 import psycopg2.errorcodes import psycopg2.extensions except ImportError: logger.warning("Loading postgres module without psycopg2 installed. Will crash at runtime if postgres functionality is used.") class MultiReplacer(object): """ Object for one-pass replace of multiple words Substituted parts will not be matched against other replace patterns, as opposed to when using multipass replace. The order of the items in the replace_pairs input will dictate replacement precedence. Constructor arguments: replace_pairs -- list of 2-tuples which hold strings to be replaced and replace string Usage: .. code-block:: python >>> replace_pairs = [("a", "b"), ("b", "c")] >>> MultiReplacer(replace_pairs)("abcd") 'bccd' >>> replace_pairs = [("ab", "x"), ("a", "x")] >>> MultiReplacer(replace_pairs)("ab") 'x' >>> replace_pairs.reverse() >>> MultiReplacer(replace_pairs)("ab") 'xb' """ # TODO: move to misc/util module def __init__(self, replace_pairs): """ Initializes a MultiReplacer instance. :param replace_pairs: list of 2-tuples which hold strings to be replaced and replace string. :type replace_pairs: tuple """ replace_list = list(replace_pairs) # make a copy in case input is iterable self._replace_dict = dict(replace_list) pattern = '|'.join(re.escape(x) for x, y in replace_list) self._search_re = re.compile(pattern) def _replacer(self, match_object): # this method is used as the replace function in the re.sub below return self._replace_dict[match_object.group()] def __call__(self, search_string): # using function replacing for a per-result replace return self._search_re.sub(self._replacer, search_string) # these are the escape sequences recognized by postgres COPY # according to http://www.postgresql.org/docs/8.1/static/sql-copy.html default_escape = MultiReplacer([('\\', '\\\\'), ('\t', '\\t'), ('\n', '\\n'), ('\r', '\\r'), ('\v', '\\v'), ('\b', '\\b'), ('\f', '\\f') ]) class PostgresTarget(luigi.Target): """ Target for a resource in Postgres. This will rarely have to be directly instantiated by the user. """ marker_table = luigi.configuration.get_config().get('postgres', 'marker-table', 'table_updates') # Use DB side timestamps or client side timestamps in the marker_table use_db_timestamps = True def __init__( self, host, database, user, password, table, update_id, port=5432 ): """ Args: host (str): Postgres server address. Possibly a host:port string. database (str): Database name user (str): Database user password (str): Password for specified user update_id (str): An identifier for this data set port (int): Postgres server port. """ if ':' in host: self.host, self.port = host.split(':') else: self.host = host self.port = port self.database = database self.user = user self.password = password self.table = table self.update_id = update_id def touch(self, connection=None): """ Mark this update as complete. Important: If the marker table doesn't exist, the connection transaction will be aborted and the connection reset. Then the marker table will be created. """ self.create_marker_table() if connection is None: # TODO: test this connection = self.connect() connection.autocommit = True # if connection created here, we commit it here if self.use_db_timestamps: connection.cursor().execute( """INSERT INTO {marker_table} (update_id, target_table) VALUES (%s, %s) """.format(marker_table=self.marker_table), (self.update_id, self.table)) else: connection.cursor().execute( """INSERT INTO {marker_table} (update_id, target_table, inserted) VALUES (%s, %s, %s); """.format(marker_table=self.marker_table), (self.update_id, self.table, datetime.datetime.now())) def exists(self, connection=None): if connection is None: connection = self.connect() connection.autocommit = True cursor = connection.cursor() try: cursor.execute("""SELECT 1 FROM {marker_table} WHERE update_id = %s LIMIT 1""".format(marker_table=self.marker_table), (self.update_id,) ) row = cursor.fetchone() except psycopg2.ProgrammingError as e: if e.pgcode == psycopg2.errorcodes.UNDEFINED_TABLE: row = None else: raise return row is not None def connect(self): """ Get a psycopg2 connection object to the database where the table is. """ connection = psycopg2.connect( host=self.host, port=self.port, database=self.database, user=self.user, password=self.password) connection.set_client_encoding('utf-8') return connection def create_marker_table(self): """ Create marker table if it doesn't exist. Using a separate connection since the transaction might have to be reset. """ connection = self.connect() connection.autocommit = True cursor = connection.cursor() if self.use_db_timestamps: sql = """ CREATE TABLE {marker_table} ( update_id TEXT PRIMARY KEY, target_table TEXT, inserted TIMESTAMP DEFAULT NOW()) """.format(marker_table=self.marker_table) else: sql = """ CREATE TABLE {marker_table} ( update_id TEXT PRIMARY KEY, target_table TEXT, inserted TIMESTAMP); """.format(marker_table=self.marker_table) try: cursor.execute(sql) except psycopg2.ProgrammingError as e: if e.pgcode == psycopg2.errorcodes.DUPLICATE_TABLE: pass else: raise connection.close() def open(self, mode): raise NotImplementedError("Cannot open() PostgresTarget") class CopyToTable(rdbms.CopyToTable): """ Template task for inserting a data set into Postgres Usage: Subclass and override the required `host`, `database`, `user`, `password`, `table` and `columns` attributes. To customize how to access data from an input task, override the `rows` method with a generator that yields each row as a tuple with fields ordered according to `columns`. """ def rows(self): """ Return/yield tuples or lists corresponding to each row to be inserted. """ with self.input().open('r') as fobj: for line in fobj: yield line.strip('\n').split('\t') def map_column(self, value): """ Applied to each column of every row returned by `rows`. Default behaviour is to escape special characters and identify any self.null_values. """ if value in self.null_values: return r'\\N' else: return default_escape(six.text_type(value)) # everything below will rarely have to be overridden def output(self): """ Returns a PostgresTarget representing the inserted dataset. Normally you don't override this. """ return PostgresTarget( host=self.host, database=self.database, user=self.user, password=self.password, table=self.table, update_id=self.update_id ) def copy(self, cursor, file): if isinstance(self.columns[0], six.string_types): column_names = self.columns elif len(self.columns[0]) == 2: column_names = [c[0] for c in self.columns] else: raise Exception('columns must consist of column strings or (column string, type string) tuples (was %r ...)' % (self.columns[0],)) cursor.copy_from(file, self.table, null=r'\\N', sep=self.column_separator, columns=column_names) def run(self): """ Inserts data generated by rows() into target table. If the target table doesn't exist, self.create_table will be called to attempt to create the table. Normally you don't want to override this. """ if not (self.table and self.columns): raise Exception("table and columns need to be specified") connection = self.output().connect() # transform all data generated by rows() using map_column and write data # to a temporary file for import using postgres COPY tmp_dir = luigi.configuration.get_config().get('postgres', 'local-tmp-dir', None) tmp_file = tempfile.TemporaryFile(dir=tmp_dir) n = 0 for row in self.rows(): n += 1 if n % 100000 == 0: logger.info("Wrote %d lines", n) rowstr = self.column_separator.join(self.map_column(val) for val in row) rowstr += "\n" tmp_file.write(rowstr.encode('utf-8')) logger.info("Done writing, importing at %s", datetime.datetime.now()) tmp_file.seek(0) # attempt to copy the data into postgres # if it fails because the target table doesn't exist # try to create it by running self.create_table for attempt in range(2): try: cursor = connection.cursor() self.init_copy(connection) self.copy(cursor, tmp_file) self.post_copy(connection) except psycopg2.ProgrammingError as e: if e.pgcode == psycopg2.errorcodes.UNDEFINED_TABLE and attempt == 0: # if first attempt fails with "relation not found", try creating table logger.info("Creating table %s", self.table) connection.reset() self.create_table(connection) else: raise else: break # mark as complete in same transaction self.output().touch(connection) # commit and clean up connection.commit() connection.close() tmp_file.close() class PostgresQuery(rdbms.Query): """ Template task for querying a Postgres compatible database Usage: Subclass and override the required `host`, `database`, `user`, `password`, `table`, and `query` attributes. Override the `run` method if your use case requires some action with the query result. Task instances require a dynamic `update_id`, e.g. via parameter(s), otherwise the query will only execute once To customize the query signature as recorded in the database marker table, override the `update_id` property. """ def run(self): connection = self.output().connect() cursor = connection.cursor() sql = self.query logger.info('Executing query from task: {name}'.format(name=self.__class__)) cursor.execute(sql) # Update marker table self.output().touch(connection) # commit and close connection connection.commit() connection.close() def output(self): """ Returns a PostgresTarget representing the executed query. Normally you don't override this. """ return PostgresTarget( host=self.host, database=self.database, user=self.user, password=self.password, table=self.table, update_id=self.update_id )
apache-2.0
lankavitharana/product-dss
modules/integration/tests-integration/tests/src/test/java/org/wso2/dss/integration/test/odata/ODataSuperTenantUserTestCase.java
10581
/* * Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.dss.integration.test.odata; import org.apache.http.HttpEntity; import org.apache.http.HttpResponse; import org.apache.http.client.HttpClient; import org.apache.http.client.methods.HttpDelete; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPatch; import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpPut; import org.apache.http.entity.ByteArrayEntity; import org.apache.http.impl.client.DefaultHttpClient; import org.testng.Assert; import org.testng.annotations.AfterClass; import org.testng.annotations.BeforeClass; import org.testng.annotations.Test; import org.wso2.dss.integration.test.DSSIntegrationTest; import java.io.BufferedReader; import java.io.File; import java.io.IOException; import java.io.InputStreamReader; import java.util.ArrayList; import java.util.List; /** * This class contains OData specific test cases. to verify the functionality of odata services in super tenant mode. */ public class ODataSuperTenantUserTestCase extends DSSIntegrationTest { private final String serviceName = "ODataSampleSuperTenantService"; private final String configId = "default"; private String webAppUrl; @BeforeClass(alwaysRun = true) public void serviceDeployment() throws Exception { super.init(); List<File> sqlFileLis = new ArrayList<>(); sqlFileLis.add(selectSqlFile("CreateODataTables.sql")); sqlFileLis.add(selectSqlFile("Customers.sql")); deployService(serviceName, createArtifact(getResourceLocation() + File.separator + "dbs" + File.separator + "odata" + File.separator + "ODataSampleSuperTenantService.dbs", sqlFileLis)); webAppUrl = dssContext.getContextUrls().getWebAppURL(); } @AfterClass(alwaysRun = true) public void destroy() throws Exception { deleteService(serviceName); cleanup(); } @Test(groups = { "wso2.dss" }, description = "service document retrieval test") public void validateServiceDocumentTestCase() throws Exception { String endpoint = webAppUrl + "/odata/" + serviceName + "/" + configId + "/$metadata"; Object[] response = sendGET(endpoint, "Application/xml"); Assert.assertEquals(response[0], ODataTestUtils.OK); endpoint = webAppUrl + "/odata/" + serviceName + "/" + configId + "/"; response = sendGET(endpoint, "Application/json"); Assert.assertEquals(response[0], ODataTestUtils.OK); } @Test(groups = { "wso2.dss" }, description = "entity retrieval test") public void validateRetrievingData() throws Exception { String endpoint = webAppUrl + "/odata/" + serviceName + "/" + configId + "/CUSTOMERS"; Object[] response = sendGET(endpoint, "Application/json"); Assert.assertEquals(response[0], ODataTestUtils.OK); } @Test(groups = { "wso2.dss" }, description = "insertion entity test") public void validatePostingData() throws Exception { String endpoint = webAppUrl + "/odata/" + serviceName + "/" + configId + "/FILES"; String content = "{\"FILENAME\": \"M.K.H.Gunasekara\" ,\"TYPE\" : \"dss\"}"; int responseCode = sendPOST(endpoint, content, "application/json"); Assert.assertEquals(responseCode, ODataTestUtils.NO_CONTENT); endpoint = webAppUrl + "/odata/" + serviceName + "/" + configId + "/FILES(\'M.K.H.Gunasekara\')"; Object[] response = sendGET(endpoint, "Application/json"); Assert.assertEquals(response[0], ODataTestUtils.OK); endpoint = webAppUrl + "/odata/" + serviceName + "/" + configId + "/STUDENT"; content = "{\"STUDENTID\" : 1 , \"FIRSTNAME\" : \"Madhawa\" , \"LASTNAME\" : \"Kasun\"}"; responseCode = sendPOST(endpoint, content, "application/json"); Assert.assertEquals(responseCode, ODataTestUtils.NO_CONTENT); endpoint = webAppUrl + "/odata/" + serviceName + "/" + configId + "/STUDENT(1)"; response = sendGET(endpoint, "Application/json"); Assert.assertEquals(response[0], ODataTestUtils.OK); content = "{\"STUDENTID\" : 2 , \"FIRSTNAME\" : \"Rajith\" , \"LASTNAME\" : \"Vitharana\"}"; responseCode = sendPOST(endpoint, content, "application/json"); Assert.assertEquals(responseCode, ODataTestUtils.NO_CONTENT); endpoint = webAppUrl + "/odata/" + serviceName + "/" + configId + "/STUDENT(2)"; response = sendGET(endpoint, "Application/json"); Assert.assertEquals(response[0], ODataTestUtils.OK); } @Test(groups = { "wso2.dss" }, description = "entity modification with put method test", dependsOnMethods = "validatePatchingData") public void validatePuttingData() throws Exception { String endpoint = webAppUrl + "/odata/" + serviceName + "/" + configId + "/STUDENT(1)"; String content = "{\"LASTNAME\" : \"GUNASEKARA\"}"; int responseCode = sendPUT(endpoint, content, "application/json"); Assert.assertEquals(responseCode, ODataTestUtils.NO_CONTENT); Object[] response = sendGET(endpoint, "Application/json"); Assert.assertEquals(response[0], ODataTestUtils.OK); Assert.assertTrue(response[1].toString().contains("\"FIRSTNAME\":null") && response[1].toString().contains("\"LASTNAME\":\"GUNASEKARA\"")); } @Test(groups = { "wso2.dss" }, description = "entity modification with patch method test", dependsOnMethods = "validatePostingData") public void validatePatchingData() throws Exception { String endpoint = webAppUrl + "/odata/" + serviceName + "/" + configId + "/STUDENT(2)"; String content = "{\"LASTNAME\" : \"Lanka\"}"; int responseCode = sendPATCH(endpoint, content, "application/json"); Assert.assertEquals(responseCode, ODataTestUtils.NO_CONTENT); Object[] response = sendGET(endpoint, "Application/json"); Assert.assertEquals(response[0], ODataTestUtils.OK); Assert.assertTrue(response[1].toString().contains("\"FIRSTNAME\":\"Rajith\"") && response[1].toString().contains("\"LASTNAME\":\"Lanka\"")); } @Test(groups = { "wso2.dss" }, description = "entity deletion test", dependsOnMethods = "validatePuttingData") public void validateDeletingData() throws Exception { String endpoint = webAppUrl + "/odata/" + serviceName + "/" + configId + "/STUDENT(1)"; int responseCode = sendDELETE(endpoint, "application/json"); Assert.assertEquals(responseCode, ODataTestUtils.NO_CONTENT); Object[] response = sendGET(endpoint, "Application/json"); Assert.assertEquals(response[0], ODataTestUtils.NOT_FOUND); } @Test(groups = { "wso2.dss" }, description = "entity retrieval with select test") public void validateSelectingData() throws Exception { String endpoint = webAppUrl + "/odata/" + serviceName + "/" + configId + "/CUSTOMERS?$select=PHONE,COUNTRY,POSTALCODE"; Object[] response = sendGET(endpoint, "Application/json"); Assert.assertEquals(response[0], ODataTestUtils.OK); Assert.assertTrue(response[1].toString().contains("PHONE") && response[1].toString().contains("COUNTRY") && response[1].toString().contains("POSTALCODE")); Assert.assertTrue(!response[1].toString().contains("CONTACTLASTNAME") || !response[1].toString().contains("CUSTOMERNUMBER")); } private static int sendPOST(String endpoint, String content, String acceptType) throws IOException { HttpClient httpClient = new DefaultHttpClient(); HttpPost httpPost = new HttpPost(endpoint); httpPost.setHeader("Accept", acceptType); if (null != content) { HttpEntity httpEntity = new ByteArrayEntity(content.getBytes("UTF-8")); httpPost.setHeader("Content-Type", "application/json"); httpPost.setEntity(httpEntity); } HttpResponse httpResponse = httpClient.execute(httpPost); return httpResponse.getStatusLine().getStatusCode(); } private static Object[] sendGET(String endpoint, String acceptType) throws IOException { HttpClient httpClient = new DefaultHttpClient(); HttpGet httpGet = new HttpGet(endpoint); httpGet.setHeader("Accept", acceptType); HttpResponse httpResponse = httpClient.execute(httpGet); if (httpResponse.getEntity() != null) { BufferedReader reader = new BufferedReader(new InputStreamReader(httpResponse.getEntity().getContent())); String inputLine; StringBuilder response = new StringBuilder(); while ((inputLine = reader.readLine()) != null) { response.append(inputLine); } reader.close(); return new Object[] { httpResponse.getStatusLine().getStatusCode(), response.toString() }; } else { return new Object[] { httpResponse.getStatusLine().getStatusCode() }; } } private static int sendPUT(String endpoint, String content, String acceptType) throws IOException { HttpClient httpClient = new DefaultHttpClient(); HttpPut httpPut = new HttpPut(endpoint); httpPut.setHeader("Accept", acceptType); if (null != content) { HttpEntity httpEntity = new ByteArrayEntity(content.getBytes("UTF-8")); httpPut.setHeader("Content-Type", "application/json"); httpPut.setEntity(httpEntity); } HttpResponse httpResponse = httpClient.execute(httpPut); return httpResponse.getStatusLine().getStatusCode(); } private static int sendPATCH(String endpoint, String content, String acceptType) throws IOException { HttpClient httpClient = new DefaultHttpClient(); HttpPatch httpPatch = new HttpPatch(endpoint); httpPatch.setHeader("Accept", acceptType); if (null != content) { HttpEntity httpEntity = new ByteArrayEntity(content.getBytes("UTF-8")); httpPatch.setHeader("Content-Type", "application/json"); httpPatch.setEntity(httpEntity); } HttpResponse httpResponse = httpClient.execute(httpPatch); return httpResponse.getStatusLine().getStatusCode(); } private static int sendDELETE(String endpoint, String acceptType) throws IOException { HttpClient httpClient = new DefaultHttpClient(); HttpDelete httpDelete = new HttpDelete(endpoint); httpDelete.setHeader("Accept", acceptType); HttpResponse httpResponse = httpClient.execute(httpDelete); return httpResponse.getStatusLine().getStatusCode(); } }
apache-2.0
barnyard/pi
p2p-instancemanager/src/main/java/com/bt/pi/app/instancemanager/images/ImageLoader.java
214
/* (c) British Telecommunications plc, 2009, All Rights Reserved */ package com.bt.pi.app.instancemanager.images; public interface ImageLoader { String saveImage(String imagePath, String piCacheDirectory); }
apache-2.0
quattor/aquilon
lib/aquilon/worker/commands/show_bunker_bunker.py
1045
# -*- cpy-indent-level: 4; indent-tabs-mode: nil -*- # ex: set expandtab softtabstop=4 shiftwidth=4: # # Copyright (C) 2008,2009,2010,2011,2013,2016 Contributor # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Contains the logic for `aq show bunker --bunker`.""" from aquilon.aqdb.model import Bunker from aquilon.worker.broker import BrokerCommand class CommandShowBunkerBunker(BrokerCommand): required_parameters = ["bunker"] def render(self, session, bunker, **_): return Bunker.get_unique(session, bunker, compel=True)
apache-2.0
bcferrycoder/java-buildpack-cf-w-jboss
spec/java_buildpack/container/spring_boot_cli_spec.rb
3859
# Encoding: utf-8 # Cloud Foundry Java Buildpack # Copyright 2013 the original author or authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. require 'spec_helper' require 'component_helper' require 'java_buildpack/container/spring_boot_cli' describe JavaBuildpack::Container::SpringBootCLI do include_context 'component_helper' it 'should not detect a non-Groovy project', app_fixture: 'container_main' do expect(component.detect).to be_nil end it 'should not detect a .groovy directory', app_fixture: 'container_groovy_dot_groovy' do expect(component.detect).to be_nil end it 'should not detect if the application has a WEB-INF directory', app_fixture: 'container_spring_boot_cli_groovy_with_web_inf' do expect(component.detect).to be_nil end it 'should not detect if one of the Groovy files is not a POGO', app_fixture: 'container_spring_boot_cli_non_pogo' do expect(component.detect).to be_nil end it 'should not detect if one of the Groovy files has a shebang', app_fixture: 'container_groovy_shebang' do expect(component.detect).to be_nil end it 'should not detect a Groovy file which has a shebang but which also contains a class', app_fixture: 'container_groovy_shebang_containing_class' do expect(component.detect).to be_nil end it 'should not detect if one of the Groovy files has a main() method', app_fixture: 'container_spring_boot_cli_main_method' do expect(component.detect).to be_nil end it 'should detect if there are Groovy files and they are all POGOs plus a beans-style configuration', app_fixture: 'container_spring_boot_cli_beans_configuration' do expect(component.detect).to eq("spring-boot-cli=#{version}") end it 'should detect if there are Groovy files and they are all POGOs with no main method and there is no WEB-INF directory', app_fixture: 'container_spring_boot_cli_valid_app' do expect(component.detect).to eq("spring-boot-cli=#{version}") end it 'should extract Spring Boot CLI from a ZIP', app_fixture: 'container_spring_boot_cli_valid_app', cache_fixture: 'stub-spring-boot-cli.tar.gz' do component.compile expect(sandbox + 'bin/spring').to exist end it 'should link classpath JARs', app_fixture: 'container_spring_boot_cli_valid_app', cache_fixture: 'stub-spring-boot-cli.tar.gz' do component.compile lib = sandbox + 'lib' jar_1 = lib + 'test-jar-1.jar' expect(jar_1).to exist expect(jar_1).to be_symlink expect(jar_1.readlink).to eq((additional_libs_directory + 'test-jar-1.jar').relative_path_from(lib)) jar_2 = lib + 'test-jar-2.jar' expect(jar_2).to exist expect(jar_2).to be_symlink expect(jar_2.readlink).to eq((additional_libs_directory + 'test-jar-2.jar').relative_path_from(lib)) end it 'should return command', app_fixture: 'container_spring_boot_cli_valid_app' do expect(component.release).to eq("#{java_home.as_env_var} JAVA_OPTS=#{java_opts_str} " \ '$PWD/.java-buildpack/spring_boot_cli/bin/spring run directory/pogo_4.groovy ' \ 'invalid.groovy pogo_1.groovy pogo_2.groovy pogo_3.groovy -- --server.port=$PORT') end def java_opts_str "\"#{java_opts.sort.join(' ')}\"" end end
apache-2.0
deliveryhero/distconfig
distconfig/tests/unit/test_backend.py
3932
import json import unittest import mock from distconfig.backends import base class FakeBackend(base.BaseBackend): def __init__(self, data, **kwargs): super(FakeBackend, self).__init__(**kwargs) self._data = data def get_raw(self, path): return self._data.get(path) class BackendTestCase(unittest.TestCase): def setUp(self): self.value = {'foo': 'bar'} self.raw_value = json.dumps(self.value) self.backend = FakeBackend({ '/some/path': self.raw_value }) def test_backend_get(self): self.assertEqual(self.backend.get('/some/path'), self.value) def test_backend_get_unexistant_path(self): self.assertEqual(self.backend.get('/no/existing/path'), {}) def test_add_listeners(self): callback = mock.Mock(return_value=None) self.backend.add_listener(callback) self.backend._notify_listeners(self.raw_value) callback.assert_called_once_with(self.value) def test_add_multiple_listeners(self): callback1 = mock.Mock(return_value=None) self.backend.add_listener(callback1) callback2 = mock.Mock(return_value=None) self.backend.add_listener(callback2) self.backend._notify_listeners(self.raw_value) callback1.assert_called_once_with(self.value) callback2.assert_called_once_with(self.value) def test_add_listeners_multiple_time(self): callback = mock.Mock(return_value=None) self.backend.add_listener(callback) self.backend.add_listener(callback) self.backend.add_listener(callback) self.backend._notify_listeners(self.raw_value) self.assertEqual(callback.call_count, 3) callback.assert_called_with(self.value) def test_remove_listeners(self): callback = mock.Mock(return_value=None) self.backend.add_listener(callback) self.backend.remove_listener(callback) self.backend._notify_listeners(self.raw_value) self.assertEqual(callback.call_count, 0) def test_remove_listeners_multiple_time(self): callback = mock.Mock(return_value=None) self.backend.add_listener(callback) self.backend.add_listener(callback) self.backend.add_listener(callback) self.backend.remove_listener(callback) self.backend.remove_listener(callback) self.backend.remove_listener(callback) self.backend._notify_listeners(self.raw_value) self.assertEqual(callback.call_count, 0) def test_remove_listeners_multiple_time_keeping_others(self): callback = mock.Mock(return_value=None) self.backend.add_listener(callback) self.backend.add_listener(callback) self.backend.add_listener(callback) self.backend.remove_listener(callback) self.backend._notify_listeners(self.raw_value) self.assertEqual(callback.call_count, 2) def test_remove_listeners_more_than_you_should(self): callback = mock.Mock(return_value=None) self.backend.add_listener(callback) self.backend.remove_listener(callback) with self.assertRaises(ValueError): self.backend.remove_listener(callback) def test_listener_exception(self): callback = mock.Mock(side_effect=Exception('foo')) self.backend.add_listener(callback) with self.assertRaises(Exception): self.backend._notify_listeners(self.raw_value) def test_listener_log_exception_on_error(self): logger_mock = mock.Mock() backend = FakeBackend( {'/some/path': self.raw_value}, logger=logger_mock) callback = mock.Mock(side_effect=Exception('foo')) backend.add_listener(callback) with self.assertRaises(Exception): backend._notify_listeners(self.raw_value) self.assertTrue(logger_mock.exception.called)
apache-2.0
drewrobb/bamboo
configuration/haproxy.go
268
package configuration type HAProxy struct { TemplatePath string OutputPath string ReloadCommand string ReloadValidationCommand string ReloadCleanupCommand string ShutdownCommand string GraceSeconds int }
apache-2.0
simonhorlick/grpc-java
benchmarks/src/generated/main/grpc/io/grpc/benchmarks/proto/ReportQpsScenarioServiceGrpc.java
11694
package io.grpc.benchmarks.proto; import static io.grpc.MethodDescriptor.generateFullMethodName; import static io.grpc.stub.ClientCalls.asyncBidiStreamingCall; import static io.grpc.stub.ClientCalls.asyncClientStreamingCall; import static io.grpc.stub.ClientCalls.asyncServerStreamingCall; import static io.grpc.stub.ClientCalls.asyncUnaryCall; import static io.grpc.stub.ClientCalls.blockingServerStreamingCall; import static io.grpc.stub.ClientCalls.blockingUnaryCall; import static io.grpc.stub.ClientCalls.futureUnaryCall; import static io.grpc.stub.ServerCalls.asyncBidiStreamingCall; import static io.grpc.stub.ServerCalls.asyncClientStreamingCall; import static io.grpc.stub.ServerCalls.asyncServerStreamingCall; import static io.grpc.stub.ServerCalls.asyncUnaryCall; import static io.grpc.stub.ServerCalls.asyncUnimplementedStreamingCall; import static io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall; /** */ @javax.annotation.Generated( value = "by gRPC proto compiler", comments = "Source: services.proto") public final class ReportQpsScenarioServiceGrpc { private ReportQpsScenarioServiceGrpc() {} public static final String SERVICE_NAME = "grpc.testing.ReportQpsScenarioService"; // Static method descriptors that strictly reflect the proto. @io.grpc.ExperimentalApi("https://github.com/grpc/grpc-java/issues/1901") @java.lang.Deprecated // Use {@link #getReportScenarioMethod()} instead. public static final io.grpc.MethodDescriptor<io.grpc.benchmarks.proto.Control.ScenarioResult, io.grpc.benchmarks.proto.Control.Void> METHOD_REPORT_SCENARIO = getReportScenarioMethod(); private static volatile io.grpc.MethodDescriptor<io.grpc.benchmarks.proto.Control.ScenarioResult, io.grpc.benchmarks.proto.Control.Void> getReportScenarioMethod; @io.grpc.ExperimentalApi("https://github.com/grpc/grpc-java/issues/1901") public static io.grpc.MethodDescriptor<io.grpc.benchmarks.proto.Control.ScenarioResult, io.grpc.benchmarks.proto.Control.Void> getReportScenarioMethod() { io.grpc.MethodDescriptor<io.grpc.benchmarks.proto.Control.ScenarioResult, io.grpc.benchmarks.proto.Control.Void> getReportScenarioMethod; if ((getReportScenarioMethod = ReportQpsScenarioServiceGrpc.getReportScenarioMethod) == null) { synchronized (ReportQpsScenarioServiceGrpc.class) { if ((getReportScenarioMethod = ReportQpsScenarioServiceGrpc.getReportScenarioMethod) == null) { ReportQpsScenarioServiceGrpc.getReportScenarioMethod = getReportScenarioMethod = io.grpc.MethodDescriptor.<io.grpc.benchmarks.proto.Control.ScenarioResult, io.grpc.benchmarks.proto.Control.Void>newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName( "grpc.testing.ReportQpsScenarioService", "ReportScenario")) .setSampledToLocalTracing(true) .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( io.grpc.benchmarks.proto.Control.ScenarioResult.getDefaultInstance())) .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller( io.grpc.benchmarks.proto.Control.Void.getDefaultInstance())) .setSchemaDescriptor(new ReportQpsScenarioServiceMethodDescriptorSupplier("ReportScenario")) .build(); } } } return getReportScenarioMethod; } /** * Creates a new async stub that supports all call types for the service */ public static ReportQpsScenarioServiceStub newStub(io.grpc.Channel channel) { return new ReportQpsScenarioServiceStub(channel); } /** * Creates a new blocking-style stub that supports unary and streaming output calls on the service */ public static ReportQpsScenarioServiceBlockingStub newBlockingStub( io.grpc.Channel channel) { return new ReportQpsScenarioServiceBlockingStub(channel); } /** * Creates a new ListenableFuture-style stub that supports unary calls on the service */ public static ReportQpsScenarioServiceFutureStub newFutureStub( io.grpc.Channel channel) { return new ReportQpsScenarioServiceFutureStub(channel); } /** */ public static abstract class ReportQpsScenarioServiceImplBase implements io.grpc.BindableService { /** * <pre> * Report results of a QPS test benchmark scenario. * </pre> */ public void reportScenario(io.grpc.benchmarks.proto.Control.ScenarioResult request, io.grpc.stub.StreamObserver<io.grpc.benchmarks.proto.Control.Void> responseObserver) { asyncUnimplementedUnaryCall(getReportScenarioMethod(), responseObserver); } @java.lang.Override public final io.grpc.ServerServiceDefinition bindService() { return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor()) .addMethod( getReportScenarioMethod(), asyncUnaryCall( new MethodHandlers< io.grpc.benchmarks.proto.Control.ScenarioResult, io.grpc.benchmarks.proto.Control.Void>( this, METHODID_REPORT_SCENARIO))) .build(); } } /** */ public static final class ReportQpsScenarioServiceStub extends io.grpc.stub.AbstractStub<ReportQpsScenarioServiceStub> { private ReportQpsScenarioServiceStub(io.grpc.Channel channel) { super(channel); } private ReportQpsScenarioServiceStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected ReportQpsScenarioServiceStub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new ReportQpsScenarioServiceStub(channel, callOptions); } /** * <pre> * Report results of a QPS test benchmark scenario. * </pre> */ public void reportScenario(io.grpc.benchmarks.proto.Control.ScenarioResult request, io.grpc.stub.StreamObserver<io.grpc.benchmarks.proto.Control.Void> responseObserver) { asyncUnaryCall( getChannel().newCall(getReportScenarioMethod(), getCallOptions()), request, responseObserver); } } /** */ public static final class ReportQpsScenarioServiceBlockingStub extends io.grpc.stub.AbstractStub<ReportQpsScenarioServiceBlockingStub> { private ReportQpsScenarioServiceBlockingStub(io.grpc.Channel channel) { super(channel); } private ReportQpsScenarioServiceBlockingStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected ReportQpsScenarioServiceBlockingStub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new ReportQpsScenarioServiceBlockingStub(channel, callOptions); } /** * <pre> * Report results of a QPS test benchmark scenario. * </pre> */ public io.grpc.benchmarks.proto.Control.Void reportScenario(io.grpc.benchmarks.proto.Control.ScenarioResult request) { return blockingUnaryCall( getChannel(), getReportScenarioMethod(), getCallOptions(), request); } } /** */ public static final class ReportQpsScenarioServiceFutureStub extends io.grpc.stub.AbstractStub<ReportQpsScenarioServiceFutureStub> { private ReportQpsScenarioServiceFutureStub(io.grpc.Channel channel) { super(channel); } private ReportQpsScenarioServiceFutureStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected ReportQpsScenarioServiceFutureStub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new ReportQpsScenarioServiceFutureStub(channel, callOptions); } /** * <pre> * Report results of a QPS test benchmark scenario. * </pre> */ public com.google.common.util.concurrent.ListenableFuture<io.grpc.benchmarks.proto.Control.Void> reportScenario( io.grpc.benchmarks.proto.Control.ScenarioResult request) { return futureUnaryCall( getChannel().newCall(getReportScenarioMethod(), getCallOptions()), request); } } private static final int METHODID_REPORT_SCENARIO = 0; private static final class MethodHandlers<Req, Resp> implements io.grpc.stub.ServerCalls.UnaryMethod<Req, Resp>, io.grpc.stub.ServerCalls.ServerStreamingMethod<Req, Resp>, io.grpc.stub.ServerCalls.ClientStreamingMethod<Req, Resp>, io.grpc.stub.ServerCalls.BidiStreamingMethod<Req, Resp> { private final ReportQpsScenarioServiceImplBase serviceImpl; private final int methodId; MethodHandlers(ReportQpsScenarioServiceImplBase serviceImpl, int methodId) { this.serviceImpl = serviceImpl; this.methodId = methodId; } @java.lang.Override @java.lang.SuppressWarnings("unchecked") public void invoke(Req request, io.grpc.stub.StreamObserver<Resp> responseObserver) { switch (methodId) { case METHODID_REPORT_SCENARIO: serviceImpl.reportScenario((io.grpc.benchmarks.proto.Control.ScenarioResult) request, (io.grpc.stub.StreamObserver<io.grpc.benchmarks.proto.Control.Void>) responseObserver); break; default: throw new AssertionError(); } } @java.lang.Override @java.lang.SuppressWarnings("unchecked") public io.grpc.stub.StreamObserver<Req> invoke( io.grpc.stub.StreamObserver<Resp> responseObserver) { switch (methodId) { default: throw new AssertionError(); } } } private static abstract class ReportQpsScenarioServiceBaseDescriptorSupplier implements io.grpc.protobuf.ProtoFileDescriptorSupplier, io.grpc.protobuf.ProtoServiceDescriptorSupplier { ReportQpsScenarioServiceBaseDescriptorSupplier() {} @java.lang.Override public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() { return io.grpc.benchmarks.proto.Services.getDescriptor(); } @java.lang.Override public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() { return getFileDescriptor().findServiceByName("ReportQpsScenarioService"); } } private static final class ReportQpsScenarioServiceFileDescriptorSupplier extends ReportQpsScenarioServiceBaseDescriptorSupplier { ReportQpsScenarioServiceFileDescriptorSupplier() {} } private static final class ReportQpsScenarioServiceMethodDescriptorSupplier extends ReportQpsScenarioServiceBaseDescriptorSupplier implements io.grpc.protobuf.ProtoMethodDescriptorSupplier { private final String methodName; ReportQpsScenarioServiceMethodDescriptorSupplier(String methodName) { this.methodName = methodName; } @java.lang.Override public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() { return getServiceDescriptor().findMethodByName(methodName); } } private static volatile io.grpc.ServiceDescriptor serviceDescriptor; public static io.grpc.ServiceDescriptor getServiceDescriptor() { io.grpc.ServiceDescriptor result = serviceDescriptor; if (result == null) { synchronized (ReportQpsScenarioServiceGrpc.class) { result = serviceDescriptor; if (result == null) { serviceDescriptor = result = io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME) .setSchemaDescriptor(new ReportQpsScenarioServiceFileDescriptorSupplier()) .addMethod(getReportScenarioMethod()) .build(); } } } return result; } }
apache-2.0
agis-/mongo-ruby-driver
lib/mongo/loggable.rb
3737
# Copyright (C) 2014-2015 MongoDB, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. module Mongo # Allows objects to easily log operations. # # @since 2.0.0 module Loggable # The standard MongoDB log prefix. # # @since 2.0.0 PREFIX = 'MONGODB'.freeze # Log the operations. If a block is provided it will be yielded to, # otherwise only the logging will take place. # # @example Log a query operation. # loggable.log(:debug, "MONGO.query", operations) # # @param [ Symbol ] level The log level. # @param [ String ] prefix The prefix for the log line. # @param [ Array<Object> ] operations The operations to log. The must # respond to #log_message. # # @return [ Object ] The result of the block or nil if no block given. # # @since 2.0.0 def log(level, prefix, operations) started = Time.new begin yield(operations) if block_given? rescue Exception => e raise e ensure if Logger.allow?(level) runtime = format("%.4fms", (Time.now.to_f - started.to_f) * 1000.0) operations.each do |operation| Logger.log(level, prefix, log_inspect(operation), runtime) end end end end # Convenience method to log debug messages with the standard prefix. # # @example Log a debug message. # log_debug([ 'Message' ]) # # @param [ Array<Operation, String> ] operations The operations or messages # to log. # # @since 2.0.0 def log_debug(operations, &block) log(:debug, PREFIX, operations, &block) end # Convenience method to log error messages with the standard prefix. # # @example Log a error message. # log_error([ 'Message' ]) # # @param [ Array<Operation, String> ] operations The operations or messages # to log. # # @since 2.0.0 def log_error(operations, &block) log(:error, PREFIX, operations, &block) end # Convenience method to log fatal messages with the standard prefix. # # @example Log a fatal message. # log_fatal([ 'Message' ]) # # @param [ Array<Operation, String> ] operations The operations or messages # to log. # # @since 2.0.0 def log_fatal(operations, &block) log(:fatal, PREFIX, operations, &block) end # Convenience method to log info messages with the standard prefix. # # @example Log a info message. # log_info([ 'Message' ]) # # @param [ Array<Operation, String> ] operations The operations or messages # to log. # # @since 2.0.0 def log_info(operations, &block) log(:info, PREFIX, operations, &block) end # Convenience method to log warn messages with the standard prefix. # # @example Log a warn message. # log_warn([ 'Message' ]) # # @param [ Array<Operation, String> ] operations The operations or messages # to log. # # @since 2.0.0 def log_warn(operations, &block) log(:warn, PREFIX, operations, &block) end private def log_inspect(operation) operation.respond_to?(:log_message) ? operation.log_message : operation end end end
apache-2.0
apache/commons-io
src/main/java/org/apache/commons/io/output/NullWriter.java
3893
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.io.output; import java.io.Writer; /** * Never writes data. Calls never go beyond this class. * <p> * This {@code Writer} has no destination (file/socket etc.) and all characters written to it are ignored and lost. * </p> */ public class NullWriter extends Writer { /** * The singleton instance. * * @since 2.12.0 */ public static final NullWriter INSTANCE = new NullWriter(); /** * The singleton instance. * * @deprecated Use {@link #INSTANCE}. */ @Deprecated public static final NullWriter NULL_WRITER = INSTANCE; /** * Constructs a new NullWriter. */ public NullWriter() { } /** * Does nothing - output to {@code /dev/null}. * @param c The character to write * @return this writer * @since 2.0 */ @Override public Writer append(final char c) { //to /dev/null return this; } /** * Does nothing - output to {@code /dev/null}. * @param csq The character sequence to write * @return this writer * @since 2.0 */ @Override public Writer append(final CharSequence csq) { //to /dev/null return this; } /** * Does nothing - output to {@code /dev/null}. * @param csq The character sequence to write * @param start The index of the first character to write * @param end The index of the first character to write (exclusive) * @return this writer * @since 2.0 */ @Override public Writer append(final CharSequence csq, final int start, final int end) { //to /dev/null return this; } /** @see java.io.Writer#close() */ @Override public void close() { //to /dev/null } /** @see java.io.Writer#flush() */ @Override public void flush() { //to /dev/null } /** * Does nothing - output to {@code /dev/null}. * @param chr The characters to write */ @Override public void write(final char[] chr) { //to /dev/null } /** * Does nothing - output to {@code /dev/null}. * @param chr The characters to write * @param st The start offset * @param end The number of characters to write */ @Override public void write(final char[] chr, final int st, final int end) { //to /dev/null } /** * Does nothing - output to {@code /dev/null}. * @param idx The character to write */ @Override public void write(final int idx) { //to /dev/null } /** * Does nothing - output to {@code /dev/null}. * @param str The string to write */ @Override public void write(final String str) { //to /dev/null } /** * Does nothing - output to {@code /dev/null}. * @param str The string to write * @param st The start offset * @param end The number of characters to write */ @Override public void write(final String str, final int st, final int end) { //to /dev/null } }
apache-2.0
sharifulgeo/arcgis-runtime-samples-dotnet
src/Desktop/ArcGISRuntimeSamplesDesktopViewer/Properties/AssemblyInfo.cs
2215
using System.Reflection; using System.Runtime.InteropServices; using System.Windows; // General Information about an assembly is controlled through the following // set of attributes. Change these attribute values to modify the information // associated with an assembly. [assembly: AssemblyTitle("ArcGISRuntime.Samples.DesktopViewer")] [assembly: AssemblyDescription("")] [assembly: AssemblyConfiguration("")] [assembly: AssemblyCompany("")] [assembly: AssemblyProduct("ArcGISRuntime.Samples.DesktopViewer")] [assembly: AssemblyCopyright("Copyright © 2014")] [assembly: AssemblyTrademark("")] [assembly: AssemblyCulture("")] // Setting ComVisible to false makes the types in this assembly not visible // to COM components. If you need to access a type in this assembly from // COM, set the ComVisible attribute to true on that type. [assembly: ComVisible(false)] //In order to begin building localizable applications, set //<UICulture>CultureYouAreCodingWith</UICulture> in your .csproj file //inside a <PropertyGroup>. For example, if you are using US english //in your source files, set the <UICulture> to en-US. Then uncomment //the NeutralResourceLanguage attribute below. Update the "en-US" in //the line below to match the UICulture setting in the project file. //[assembly: NeutralResourcesLanguage("en-US", UltimateResourceFallbackLocation.Satellite)] [assembly: ThemeInfo( ResourceDictionaryLocation.None, //where theme specific resource dictionaries are located //(used if a resource is not found in the page, // or application resource dictionaries) ResourceDictionaryLocation.SourceAssembly //where the generic resource dictionary is located //(used if a resource is not found in the page, // app, or any theme specific resource dictionaries) )] // Version information for an assembly consists of the following four values: // // Major Version // Minor Version // Build Number // Revision // // You can specify all the values or you can default the Build and Revision Numbers // by using the '*' as shown below: // [assembly: AssemblyVersion("1.0.*")] [assembly: AssemblyVersion("1.0.0.0")] [assembly: AssemblyFileVersion("1.0.0.0")]
apache-2.0
Noora-q/quizbot-alexa
src/node_modules/bespoken-tools/lib/client/bespoke-client.js
6044
"use strict"; const global_1 = require("../core/global"); const socket_handler_1 = require("../core/socket-handler"); const webhook_request_1 = require("../core/webhook-request"); const tcp_client_1 = require("./tcp-client"); const global_2 = require("../core/global"); const logging_helper_1 = require("../core/logging-helper"); const keep_alive_1 = require("./keep-alive"); const string_util_1 = require("../core/string-util"); const http_buffer_1 = require("../core/http-buffer"); const Logger = "BST-CLIENT"; class BespokeClient { constructor(nodeID, host, port, targetDomain, targetPort) { this.nodeID = nodeID; this.host = host; this.port = port; this.targetDomain = targetDomain; this.targetPort = targetPort; this.onConnect = null; this.shuttingDown = false; } connect(onConnect) { let self = this; if (onConnect !== undefined && onConnect !== null) { this.onConnect = onConnect; } this.socketHandler = socket_handler_1.SocketHandler.connect(this.host, this.port, function (error) { self.connected(error); }, function (data, messageID) { self.messageReceived(data, messageID); }); this.socketHandler.onCloseCallback = function () { if (!self.shuttingDown) { logging_helper_1.LoggingHelper.error(Logger, "Socket closed by bst server: " + self.host + ":" + self.port); logging_helper_1.LoggingHelper.error(Logger, "Check your network settings - and try connecting again."); logging_helper_1.LoggingHelper.error(Logger, "If the issue persists, contact us at Bespoken:"); logging_helper_1.LoggingHelper.error(Logger, "\thttps://gitter.im/bespoken/bst"); self.shutdown(); } }; this.keepAlive = this.newKeepAlive(this.socketHandler); this.keepAlive.start(function () { logging_helper_1.LoggingHelper.error(Logger, "Socket not communicating with bst server: " + self.socketHandler.remoteEndPoint()); logging_helper_1.LoggingHelper.error(Logger, "Check your network settings - and maybe try connecting again."); logging_helper_1.LoggingHelper.error(Logger, "If the issue persists, contact us at Bespoken:"); logging_helper_1.LoggingHelper.error(Logger, "\thttps://gitter.im/bespoken/bst"); }); } newKeepAlive(handler) { return new keep_alive_1.KeepAlive(handler); } onWebhookReceived(request) { let self = this; logging_helper_1.LoggingHelper.info(Logger, "RequestReceived: " + request.toString() + " ID: " + request.id()); logging_helper_1.LoggingHelper.verbose(Logger, "Payload:\n" + string_util_1.StringUtil.prettyPrintJSON(request.body)); let tcpClient = new tcp_client_1.TCPClient(request.id() + ""); let httpBuffer = new http_buffer_1.HTTPBuffer(); tcpClient.transmit(self.targetDomain, self.targetPort, request.toTCP(), function (data, error, message) { if (data != null) { httpBuffer.append(data); if (httpBuffer.complete()) { logging_helper_1.LoggingHelper.info(Logger, "ResponseReceived ID: " + request.id()); let payload = null; if (httpBuffer.isJSON()) { payload = string_util_1.StringUtil.prettyPrintJSON(httpBuffer.body().toString()); } else { payload = httpBuffer.body().toString(); } logging_helper_1.LoggingHelper.verbose(Logger, "Payload:\n" + payload); self.socketHandler.send(httpBuffer.raw().toString(), request.id()); } } else if (error !== null && error !== undefined) { if (error === global_2.NetworkErrorType.CONNECTION_REFUSED) { logging_helper_1.LoggingHelper.error(Logger, "CLIENT Connection Refused, Port " + self.targetPort + ". Is your server running?"); } const errorMessage = "BST Proxy - Local Forwarding Error\n" + message; self.socketHandler.send(http_buffer_1.HTTPBuffer.errorResponse(errorMessage).raw().toString(), request.id()); if (self.onError != null) { self.onError(error, message); } } }); } connected(error) { if (error !== undefined && error !== null) { logging_helper_1.LoggingHelper.error(Logger, "Unable to connect to: " + this.host + ":" + this.port); this.shutdown(); if (this.onConnect !== undefined && this.onConnect !== null) { this.onConnect(error); } } else { logging_helper_1.LoggingHelper.info(Logger, "Connected - " + this.host + ":" + this.port); let messageJSON = { "id": this.nodeID }; let message = JSON.stringify(messageJSON); this.socketHandler.send(message); if (this.onConnect !== undefined && this.onConnect !== null) { this.onConnect(); } } } messageReceived(message, messageID) { if (message.indexOf("ACK") !== -1) { } else if (message.indexOf(global_1.Global.KeepAliveMessage) !== -1) { this.keepAlive.received(); } else { this.onWebhookReceived(webhook_request_1.WebhookRequest.fromString(this.socketHandler.socket, message, messageID)); } } shutdown(callback) { logging_helper_1.LoggingHelper.info(Logger, "Shutting down proxy"); this.shuttingDown = true; this.keepAlive.stop(); this.socketHandler.disconnect(); if (callback !== undefined && callback !== null) { callback(); } } } exports.BespokeClient = BespokeClient; //# sourceMappingURL=bespoke-client.js.map
apache-2.0
cedral/aws-sdk-cpp
aws-cpp-sdk-dynamodb/source/model/ContinuousBackupsDescription.cpp
2447
/* * Copyright 2010-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ #include <aws/dynamodb/model/ContinuousBackupsDescription.h> #include <aws/core/utils/json/JsonSerializer.h> #include <utility> using namespace Aws::Utils::Json; using namespace Aws::Utils; namespace Aws { namespace DynamoDB { namespace Model { ContinuousBackupsDescription::ContinuousBackupsDescription() : m_continuousBackupsStatus(ContinuousBackupsStatus::NOT_SET), m_continuousBackupsStatusHasBeenSet(false), m_pointInTimeRecoveryDescriptionHasBeenSet(false) { } ContinuousBackupsDescription::ContinuousBackupsDescription(JsonView jsonValue) : m_continuousBackupsStatus(ContinuousBackupsStatus::NOT_SET), m_continuousBackupsStatusHasBeenSet(false), m_pointInTimeRecoveryDescriptionHasBeenSet(false) { *this = jsonValue; } ContinuousBackupsDescription& ContinuousBackupsDescription::operator =(JsonView jsonValue) { if(jsonValue.ValueExists("ContinuousBackupsStatus")) { m_continuousBackupsStatus = ContinuousBackupsStatusMapper::GetContinuousBackupsStatusForName(jsonValue.GetString("ContinuousBackupsStatus")); m_continuousBackupsStatusHasBeenSet = true; } if(jsonValue.ValueExists("PointInTimeRecoveryDescription")) { m_pointInTimeRecoveryDescription = jsonValue.GetObject("PointInTimeRecoveryDescription"); m_pointInTimeRecoveryDescriptionHasBeenSet = true; } return *this; } JsonValue ContinuousBackupsDescription::Jsonize() const { JsonValue payload; if(m_continuousBackupsStatusHasBeenSet) { payload.WithString("ContinuousBackupsStatus", ContinuousBackupsStatusMapper::GetNameForContinuousBackupsStatus(m_continuousBackupsStatus)); } if(m_pointInTimeRecoveryDescriptionHasBeenSet) { payload.WithObject("PointInTimeRecoveryDescription", m_pointInTimeRecoveryDescription.Jsonize()); } return payload; } } // namespace Model } // namespace DynamoDB } // namespace Aws
apache-2.0
psibre/AuToBI
src/edu/cuny/qc/speech/AuToBI/featureextractor/NormalizationParameterFeatureExtractor.java
4187
/* NormalizationParameterFeatureExtractor.java Copyright (c) 2011-2014 Andrew Rosenberg This file is part of the AuToBI prosodic analysis package. AuToBI is free software: you can redistribute it and/or modify it under the terms of the Apache License (see boilerplate below) *********************************************************************************************************************** * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You should have received a copy of the Apache 2.0 License along with AuToBI. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. * *********************************************************************************************************************** */ package edu.cuny.qc.speech.AuToBI.featureextractor; import edu.cuny.qc.speech.AuToBI.core.*; import edu.cuny.qc.speech.AuToBI.util.ContourUtils; import java.util.List; /** * NormalizationParameterFeatureExtractor generates a set of normalization parameters based on pitch and intensity * across the full set of regions. * <p/> * This is used in situations where there is not previously generated speaker normalization parameters. * <p/> * The operation does assume, however, that the file contains speech by a single speaker. */ @SuppressWarnings("unchecked") public class NormalizationParameterFeatureExtractor extends FeatureExtractor { public static final String moniker = "spkrNormParams"; private String destination_feature; // the destination feature name /** * Constructs a new NormalizationParameterFeatureExtractor. * <p/> * Currently this class requires "f0" and "I" attributes, and generates normalization parameters for pitch and * intensity. * * @param destination_feature the name of the feature to store the feature in. */ @Deprecated public NormalizationParameterFeatureExtractor(String destination_feature) { this.destination_feature = destination_feature; this.extracted_features.add(destination_feature); // TODO: allow the normalized features to be specified through parameters this.required_features.add("f0"); this.required_features.add("I"); } public NormalizationParameterFeatureExtractor() { this.destination_feature = moniker; this.extracted_features.add(destination_feature); // TODO: allow the normalized features to be specified through parameters this.required_features.add("f0"); this.required_features.add("I"); } /** * Generates a SpeakerNormalizationParameter across all available pitch and intensity information and associates this * object with each region. * * @param regions The regions to extract features from. * @throws FeatureExtractorException if there is a problem. */ @Override public void extractFeatures(List regions) throws FeatureExtractorException { SpeakerNormalizationParameter snp = new SpeakerNormalizationParameter(); for (Region r : (List<Region>) regions) { if (r.hasAttribute("f0")) { Contour pitch; try { pitch = ContourUtils.getSubContour((Contour) r.getAttribute("f0"), r.getStart(), r.getEnd()); } catch (AuToBIException e) { throw new FeatureExtractorException(e.getMessage()); } snp.insertPitch(pitch); } if (r.hasAttribute("I")) { Contour intensity; try { intensity = ContourUtils.getSubContour((Contour) r.getAttribute("I"), r.getStart(), r.getEnd()); } catch (AuToBIException e) { throw new FeatureExtractorException(e.getMessage()); } snp.insertIntensity(intensity); } } for (Region r : (List<Region>) regions) { r.setAttribute(destination_feature, snp); } } }
apache-2.0
typesafehub/async-http-client
providers/netty/src/test/java/org/asynchttpclient/providers/netty/RetryNonBlockingIssue.java
11005
/* * Copyright (c) 2010-2012 Sonatype, Inc. All rights reserved. * * This program is licensed to you under the Apache License Version 2.0, * and you may not use this file except in compliance with the Apache License Version 2.0. * You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0. * * Unless required by applicable law or agreed to in writing, * software distributed under the Apache License Version 2.0 is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ package org.asynchttpclient.providers.netty; import org.asynchttpclient.AsyncHttpClient; import org.asynchttpclient.AsyncHttpClientConfig; import org.asynchttpclient.ListenableFuture; import org.asynchttpclient.RequestBuilder; import org.asynchttpclient.Response; import org.asynchttpclient.providers.netty.NettyAsyncHttpProviderConfig; import org.asynchttpclient.Request; import org.eclipse.jetty.server.Connector; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.server.nio.SelectChannelConnector; import org.eclipse.jetty.servlet.ServletContextHandler; import org.eclipse.jetty.servlet.ServletHolder; import org.testng.annotations.AfterMethod; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.IOException; import java.net.ServerSocket; import java.net.URI; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ExecutionException; import static org.testng.Assert.assertTrue; public class RetryNonBlockingIssue { private URI servletEndpointUri; private Server server; private int port1; public static int findFreePort() throws IOException { ServerSocket socket = null; try { // 0 is open a socket on any free port socket = new ServerSocket(0); return socket.getLocalPort(); } finally { if (socket != null) { socket.close(); } } } @BeforeMethod public void setUp() throws Exception { server = new Server(); port1 = findFreePort(); Connector listener = new SelectChannelConnector(); listener.setHost("127.0.0.1"); listener.setPort(port1); server.addConnector(listener); ServletContextHandler context = new ServletContextHandler(ServletContextHandler.SESSIONS); context.setContextPath("/"); server.setHandler(context); context.addServlet(new ServletHolder(new MockExceptionServlet()), "/*"); server.start(); servletEndpointUri = new URI("http://127.0.0.1:" + port1 + "/"); } @AfterMethod public void stop() { try { if (server != null) server.stop(); } catch (Exception e) { } } private ListenableFuture<Response> testMethodRequest(AsyncHttpClient fetcher, int requests, String action, String id) throws IOException { RequestBuilder builder = new RequestBuilder("GET"); builder.addQueryParameter(action, "1"); builder.addQueryParameter("maxRequests", "" + requests); builder.addQueryParameter("id", id); builder.setUrl(servletEndpointUri.toString()); Request r = builder.build(); return fetcher.executeRequest(r); } /** * Tests that a head request can be made * * @throws IOException * @throws ExecutionException * @throws InterruptedException */ @Test public void testRetryNonBlocking() throws IOException, InterruptedException, ExecutionException { AsyncHttpClient c = null; List<ListenableFuture<Response>> res = new ArrayList<ListenableFuture<Response>>(); try { AsyncHttpClientConfig.Builder bc = new AsyncHttpClientConfig.Builder(); bc.setAllowPoolingConnection(true); bc.setMaximumConnectionsTotal(100); bc.setConnectionTimeoutInMs(60000); bc.setRequestTimeoutInMs(30000); NettyAsyncHttpProviderConfig config = new NettyAsyncHttpProviderConfig(); bc.setAsyncHttpClientProviderConfig(config); c = new AsyncHttpClient(bc.build()); for (int i = 0; i < 32; i++) { res.add(testMethodRequest(c, 3, "servlet", UUID.randomUUID().toString())); } StringBuilder b = new StringBuilder(); for (ListenableFuture<Response> r : res) { Response theres = r.get(); b.append("==============\r\n"); b.append("Response Headers\r\n"); Map<String, List<String>> heads = theres.getHeaders(); b.append(heads + "\r\n"); b.append("==============\r\n"); assertTrue(heads.size() > 0); } System.out.println(b.toString()); System.out.flush(); } finally { if (c != null) c.close(); } } @Test public void testRetryNonBlockingAsyncConnect() throws IOException, InterruptedException, ExecutionException { AsyncHttpClient c = null; List<ListenableFuture<Response>> res = new ArrayList<ListenableFuture<Response>>(); try { AsyncHttpClientConfig.Builder bc = new AsyncHttpClientConfig.Builder(); bc.setAllowPoolingConnection(true); bc.setMaximumConnectionsTotal(100); bc.setConnectionTimeoutInMs(60000); bc.setRequestTimeoutInMs(30000); bc.setAsyncConnectMode(true); c = new AsyncHttpClient(bc.build()); for (int i = 0; i < 32; i++) { res.add(testMethodRequest(c, 3, "servlet", UUID.randomUUID().toString())); } StringBuilder b = new StringBuilder(); for (ListenableFuture<Response> r : res) { Response theres = r.get(); b.append("==============\r\n"); b.append("Response Headers\r\n"); Map<String, List<String>> heads = theres.getHeaders(); b.append(heads + "\r\n"); b.append("==============\r\n"); assertTrue(heads.size() > 0); } System.out.println(b.toString()); System.out.flush(); } finally { if (c != null) c.close(); } } @Test public void testRetryBlocking() throws IOException, InterruptedException, ExecutionException { AsyncHttpClient c = null; List<ListenableFuture<Response>> res = new ArrayList<ListenableFuture<Response>>(); try { AsyncHttpClientConfig.Builder bc = new AsyncHttpClientConfig.Builder(); bc.setAllowPoolingConnection(true); bc.setMaximumConnectionsTotal(100); bc.setConnectionTimeoutInMs(30000); bc.setRequestTimeoutInMs(30000); NettyAsyncHttpProviderConfig config = new NettyAsyncHttpProviderConfig(); config.setUseBlockingIO(true); bc.setAsyncHttpClientProviderConfig(config); c = new AsyncHttpClient(bc.build()); for (int i = 0; i < 32; i++) { res.add(testMethodRequest(c, 3, "servlet", UUID.randomUUID().toString())); } StringBuilder b = new StringBuilder(); for (ListenableFuture<Response> r : res) { Response theres = r.get(); b.append("==============\r\n"); b.append("Response Headers\r\n"); Map<String, List<String>> heads = theres.getHeaders(); b.append(heads + "\r\n"); b.append("==============\r\n"); assertTrue(heads.size() > 0); } System.out.println(b.toString()); System.out.flush(); } finally { if (c != null) c.close(); } } @SuppressWarnings("serial") public class MockExceptionServlet extends HttpServlet { private Map<String, Integer> requests = new ConcurrentHashMap<String, Integer>(); private synchronized int increment(String id) { int val = 0; if (requests.containsKey(id)) { Integer i = requests.get(id); val = i + 1; requests.put(id, val); } else { requests.put(id, 1); val = 1; } System.out.println("REQUESTS: " + requests); return val; } public void service(HttpServletRequest req, HttpServletResponse res) throws ServletException, IOException { String maxRequests = req.getParameter("maxRequests"); int max = 0; try { max = Integer.parseInt(maxRequests); } catch (NumberFormatException e) { max = 3; } String id = req.getParameter("id"); int requestNo = increment(id); String servlet = req.getParameter("servlet"); String io = req.getParameter("io"); String error = req.getParameter("500"); if (requestNo >= max) { res.setHeader("Success-On-Attempt", "" + requestNo); res.setHeader("id", id); if (servlet != null && servlet.trim().length() > 0) res.setHeader("type", "servlet"); if (error != null && error.trim().length() > 0) res.setHeader("type", "500"); if (io != null && io.trim().length() > 0) res.setHeader("type", "io"); res.setStatus(200); res.setContentLength(0); return; } res.setStatus(200); res.setContentLength(100); res.setContentType("application/octet-stream"); res.flushBuffer(); if (servlet != null && servlet.trim().length() > 0) throw new ServletException("Servlet Exception"); if (io != null && io.trim().length() > 0) throw new IOException("IO Exception"); if (error != null && error.trim().length() > 0) res.sendError(500, "servlet process was 500"); } } }
apache-2.0
fitash/epnoi
knowledgebase/src/main/java/org/epnoi/knowledgebase/wikidata/CassandraWikidataView.java
5440
package org.epnoi.knowledgebase.wikidata; import org.epnoi.model.modules.Core; import org.epnoi.model.modules.InformationStoreHelper; import org.epnoi.uia.informationstore.CassandraInformationStore; import org.epnoi.uia.informationstore.dao.cassandra.WikidataViewCassandraHelper; import java.util.HashSet; import java.util.Set; public class CassandraWikidataView { private String URI; Core core; CassandraInformationStore cassandraInformationStore; private final String dictionaryURI; private final String reverseDictionaryURI; private final String relationsURI; // ------------------------------------------------------------------------------------------------------ public CassandraWikidataView(Core core, String URI) { this.core=core; this.URI = URI; this.cassandraInformationStore =(CassandraInformationStore) this.core.getInformationHandler() .getInformationStoresByType(InformationStoreHelper.CASSANDRA_INFORMATION_STORE).get(0); this.dictionaryURI=this.URI+"/dictionary"; this.reverseDictionaryURI=this.URI+"/reverseDictionary"; this.relationsURI=this.URI+"/relations"; } // ------------------------------------------------------------------------------------------------------ public String getURI() { return URI; } // ------------------------------------------------------------------------------------------------------ public Set<String> getRelated(String sourceLabel, String type) { Set<String> targetLabels = new HashSet<String>(); //Map<String, Set<String>> consideredRelations = this.wikidataView.getRelations().get(type); // Firstly we retrieve the IRIs of the source label Set<String> sourceIRIs = this.getIRIsOfLabel(sourceLabel); // System.out.println("Inital sourceIRIs obtained from the label" +sourceIRIs); if (sourceIRIs != null) { for (String sourceIRI : sourceIRIs) { //System.out.println("sourceIRI " + sourceIRI); Set<String> targetIRIs = this.getIRIRelatedIRIs(type, sourceIRI); // System.out.println(" ("+sourceIRI+") targetIRIs " + // targetIRIs); if (targetIRIs != null) { for (String targetIRI : targetIRIs) { // System.out.println(" trying > "+ targetIRI); // // .getLabelsReverseDictionary().get( // targetIRI)); if (targetIRI != null) { if (this.getLabelsOfIRI(targetIRI) != null) { for (String destinationTarget : this.getLabelsOfIRI(targetIRI)) { targetLabels.add(destinationTarget); } } } } } } } return targetLabels; } // ------------------------------------------------------------------------------------------------------ // ------------------------------------------------------------------------------------------------------ /* FOR_TEST public static void main(String[] args) { Core core = CoreUtility.getUIACore(); WikidataHandlerParameters parameters = new WikidataHandlerParameters(); parameters.setParameter(WikidataHandlerParameters.WIKIDATA_VIEW_URI, WikidataHandlerParameters.DEFAULT_URI); parameters.setParameter(WikidataHandlerParameters.OFFLINE_MODE, true); parameters.setParameter(WikidataHandlerParameters.DUMP_FILE_MODE, DumpProcessingMode.JSON); parameters.setParameter(WikidataHandlerParameters.TIMEOUT, 100); parameters.setParameter(WikidataHandlerParameters.DUMP_PATH, "/opt/epnoi/epnoideployment/wikidata"); WikidataViewCreator wikidataViewCreator = new WikidataViewCreator(); try { wikidataViewCreator.init(core, parameters); } catch (EpnoiInitializationException e) { e.printStackTrace(); } long currentTime = System.currentTimeMillis(); CassandraWikidataView wikidataView = wikidataViewCreator.retrieve(WikidataHandlerParameters.DEFAULT_URI); System.out.println("It took " + (System.currentTimeMillis() - currentTime) + " to retrieve the wikidata view"); currentTime = System.currentTimeMillis(); wikidataView.count(); System.out.println("It took " + (System.currentTimeMillis() - currentTime) + " to clean the wikidata view"); } */ // ------------------------------------------------------------------------------------------------------ /** * Given a label it returns all the IRIs of items with such label * * @param label * @return */ public Set<String> getIRIsOfLabel(String label) { System.out.println("_---------------------------------------------------------------________>"+label+"<"); //String labelIRI = this.URI + "/labels#" + label; if(label.length()>1) { return cassandraInformationStore.getQueryResolver().getValues(this.dictionaryURI, label, WikidataViewCassandraHelper.COLUMN_FAMILY); } return new HashSet<>(); } // ------------------------------------------------------------------------------------------------------ /** * * @param type * @param sourceIRI * @return */ public Set<String> getIRIRelatedIRIs(String type, String sourceIRI) { return cassandraInformationStore.getQueryResolver().getValues(this.relationsURI+"/"+type,sourceIRI, WikidataViewCassandraHelper.COLUMN_FAMILY); } // ------------------------------------------------------------------------------------------------------ /** * * @param targetIRI * @return */ public Set<String> getLabelsOfIRI(String IRI) { return this.cassandraInformationStore.getQueryResolver().getValues(this.reverseDictionaryURI, IRI, WikidataViewCassandraHelper.COLUMN_FAMILY); } }
apache-2.0
Xhanim/libgdx
extensions/gdx-bullet/jni/swig-src/collision/com/badlogic/gdx/physics/bullet/collision/btContactArray.java
2862
/* ---------------------------------------------------------------------------- * This file was automatically generated by SWIG (http://www.swig.org). * Version 3.0.8 * * Do not make changes to this file unless you know what you are doing--modify * the SWIG interface file instead. * ----------------------------------------------------------------------------- */ package com.badlogic.gdx.physics.bullet.collision; import com.badlogic.gdx.physics.bullet.BulletBase; import com.badlogic.gdx.physics.bullet.linearmath.*; import com.badlogic.gdx.math.Vector3; import com.badlogic.gdx.math.Quaternion; import com.badlogic.gdx.math.Matrix3; import com.badlogic.gdx.math.Matrix4; public class btContactArray extends BulletBase { private long swigCPtr; protected btContactArray(final String className, long cPtr, boolean cMemoryOwn) { super(className, cPtr, cMemoryOwn); swigCPtr = cPtr; } /** Construct a new btContactArray, normally you should not need this constructor it's intended for low-level usage. */ public btContactArray(long cPtr, boolean cMemoryOwn) { this("btContactArray", cPtr, cMemoryOwn); construct(); } @Override protected void reset(long cPtr, boolean cMemoryOwn) { if (!destroyed) destroy(); super.reset(swigCPtr = cPtr, cMemoryOwn); } public static long getCPtr(btContactArray obj) { return (obj == null) ? 0 : obj.swigCPtr; } @Override protected void finalize() throws Throwable { if (!destroyed) destroy(); super.finalize(); } @Override protected synchronized void delete() { if (swigCPtr != 0) { if (swigCMemOwn) { swigCMemOwn = false; CollisionJNI.delete_btContactArray(swigCPtr); } swigCPtr = 0; } super.delete(); } public btContactArray() { this(CollisionJNI.new_btContactArray(), true); } public void push_contact(Vector3 point, Vector3 normal, float depth, int feature1, int feature2) { CollisionJNI.btContactArray_push_contact(swigCPtr, this, point, normal, depth, feature1, feature2); } public void push_triangle_contacts(GIM_TRIANGLE_CONTACT tricontact, int feature1, int feature2) { CollisionJNI.btContactArray_push_triangle_contacts(swigCPtr, this, GIM_TRIANGLE_CONTACT.getCPtr(tricontact), tricontact, feature1, feature2); } public void merge_contacts(btContactArray contacts, boolean normal_contact_average) { CollisionJNI.btContactArray_merge_contacts__SWIG_0(swigCPtr, this, btContactArray.getCPtr(contacts), contacts, normal_contact_average); } public void merge_contacts(btContactArray contacts) { CollisionJNI.btContactArray_merge_contacts__SWIG_1(swigCPtr, this, btContactArray.getCPtr(contacts), contacts); } public void merge_contacts_unique(btContactArray contacts) { CollisionJNI.btContactArray_merge_contacts_unique(swigCPtr, this, btContactArray.getCPtr(contacts), contacts); } }
apache-2.0
JetBrains/resharper-unity
resharper/resharper-unity/test/data/Unity/CSharp/Daemon/Stages/PerformanceCriticalCodeAnalysis/DisabledWarningTest.cs
241
using UnityEngine; public class Test : MonoBehaviour { public void Update() { // ReSharper disable once Unity.PerformanceCriticalCodeCameraMain var x = Camera.main; var fov = Camera.main.fieldOfView; } }
apache-2.0
mylog00/flink
flink-state-backends/flink-statebackend-rocksdb/src/main/java/org/apache/flink/contrib/streaming/state/RocksDBReducingState.java
5873
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.contrib.streaming.state; import org.apache.flink.api.common.functions.ReduceFunction; import org.apache.flink.api.common.state.ReducingState; import org.apache.flink.api.common.state.ReducingStateDescriptor; import org.apache.flink.api.common.state.State; import org.apache.flink.api.common.state.StateDescriptor; import org.apache.flink.api.common.typeutils.TypeSerializer; import org.apache.flink.api.java.tuple.Tuple2; import org.apache.flink.runtime.state.RegisteredKeyValueStateBackendMetaInfo; import org.apache.flink.runtime.state.internal.InternalReducingState; import org.apache.flink.util.FlinkRuntimeException; import org.rocksdb.ColumnFamilyHandle; import java.util.Collection; /** * {@link ReducingState} implementation that stores state in RocksDB. * * @param <K> The type of the key. * @param <N> The type of the namespace. * @param <V> The type of value that the state state stores. */ class RocksDBReducingState<K, N, V> extends AbstractRocksDBAppendingState<K, N, V, V, V, ReducingState<V>> implements InternalReducingState<K, N, V> { /** User-specified reduce function. */ private final ReduceFunction<V> reduceFunction; /** * Creates a new {@code RocksDBReducingState}. * * @param columnFamily The RocksDB column family that this state is associated to. * @param namespaceSerializer The serializer for the namespace. * @param valueSerializer The serializer for the state. * @param defaultValue The default value for the state. * @param reduceFunction The reduce function used for reducing state. * @param backend The backend for which this state is bind to. */ private RocksDBReducingState(ColumnFamilyHandle columnFamily, TypeSerializer<N> namespaceSerializer, TypeSerializer<V> valueSerializer, V defaultValue, ReduceFunction<V> reduceFunction, RocksDBKeyedStateBackend<K> backend) { super(columnFamily, namespaceSerializer, valueSerializer, defaultValue, backend); this.reduceFunction = reduceFunction; } @Override public TypeSerializer<K> getKeySerializer() { return backend.getKeySerializer(); } @Override public TypeSerializer<N> getNamespaceSerializer() { return namespaceSerializer; } @Override public TypeSerializer<V> getValueSerializer() { return valueSerializer; } @Override public V get() { return getInternal(); } @Override public void add(V value) throws Exception { byte[] key = getKeyBytes(); V oldValue = getInternal(key); V newValue = oldValue == null ? value : reduceFunction.reduce(oldValue, value); updateInternal(key, newValue); } @Override public void mergeNamespaces(N target, Collection<N> sources) { if (sources == null || sources.isEmpty()) { return; } // cache key and namespace final K key = backend.getCurrentKey(); final int keyGroup = backend.getCurrentKeyGroupIndex(); try { V current = null; // merge the sources to the target for (N source : sources) { if (source != null) { writeKeyWithGroupAndNamespace(keyGroup, key, source, dataOutputView); final byte[] sourceKey = dataOutputView.getCopyOfBuffer(); final byte[] valueBytes = backend.db.get(columnFamily, sourceKey); backend.db.delete(columnFamily, writeOptions, sourceKey); if (valueBytes != null) { dataInputView.setBuffer(valueBytes); V value = valueSerializer.deserialize(dataInputView); if (current != null) { current = reduceFunction.reduce(current, value); } else { current = value; } } } } // if something came out of merging the sources, merge it or write it to the target if (current != null) { // create the target full-binary-key writeKeyWithGroupAndNamespace(keyGroup, key, target, dataOutputView); final byte[] targetKey = dataOutputView.getCopyOfBuffer(); final byte[] targetValueBytes = backend.db.get(columnFamily, targetKey); if (targetValueBytes != null) { dataInputView.setBuffer(targetValueBytes); // target also had a value, merge V value = valueSerializer.deserialize(dataInputView); current = reduceFunction.reduce(current, value); } // serialize the resulting value dataOutputView.clear(); valueSerializer.serialize(current, dataOutputView); // write the resulting value backend.db.put(columnFamily, writeOptions, targetKey, dataOutputView.getCopyOfBuffer()); } } catch (Exception e) { throw new FlinkRuntimeException("Error while merging state in RocksDB", e); } } @SuppressWarnings("unchecked") static <K, N, SV, S extends State, IS extends S> IS create( StateDescriptor<S, SV> stateDesc, Tuple2<ColumnFamilyHandle, RegisteredKeyValueStateBackendMetaInfo<N, SV>> registerResult, RocksDBKeyedStateBackend<K> backend) { return (IS) new RocksDBReducingState<>( registerResult.f0, registerResult.f1.getNamespaceSerializer(), registerResult.f1.getStateSerializer(), stateDesc.getDefaultValue(), ((ReducingStateDescriptor<SV>) stateDesc).getReduceFunction(), backend); } }
apache-2.0
GabrielBrascher/cloudstack
vmware-base/src/main/java/com/cloud/hypervisor/vmware/util/VcenterSessionHandler.java
3054
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package com.cloud.hypervisor.vmware.util; import java.util.Set; import javax.xml.namespace.QName; import javax.xml.soap.SOAPElement; import javax.xml.soap.SOAPException; import javax.xml.soap.SOAPHeader; import javax.xml.ws.handler.MessageContext; import javax.xml.ws.handler.soap.SOAPHandler; import javax.xml.ws.handler.soap.SOAPMessageContext; import org.apache.log4j.Logger; import org.w3c.dom.DOMException; import com.cloud.utils.exception.CloudRuntimeException; public class VcenterSessionHandler implements SOAPHandler<SOAPMessageContext> { public static final Logger s_logger = Logger.getLogger(VcenterSessionHandler.class); private final String vcSessionCookie; public VcenterSessionHandler(String vcSessionCookie) { this.vcSessionCookie = vcSessionCookie; } @Override public boolean handleMessage(SOAPMessageContext smc) { if (isOutgoingMessage(smc)) { try { SOAPHeader header = getSOAPHeader(smc); SOAPElement vcsessionHeader = header.addChildElement(new javax.xml.namespace.QName("#", "vcSessionCookie")); vcsessionHeader.setValue(vcSessionCookie); } catch (DOMException e) { s_logger.debug(e); throw new CloudRuntimeException(e); } catch (SOAPException e) { s_logger.debug(e); throw new CloudRuntimeException(e); } } return true; } @Override public void close(MessageContext arg0) { } @Override public boolean handleFault(SOAPMessageContext arg0) { return false; } @Override public Set<QName> getHeaders() { return null; } SOAPHeader getSOAPHeader(SOAPMessageContext smc) throws SOAPException { return smc.getMessage().getSOAPPart().getEnvelope().getHeader() == null ? smc .getMessage().getSOAPPart().getEnvelope().addHeader() : smc.getMessage().getSOAPPart().getEnvelope().getHeader(); } boolean isOutgoingMessage(SOAPMessageContext smc) { Boolean outboundProperty = (Boolean)smc.get(MessageContext.MESSAGE_OUTBOUND_PROPERTY); return outboundProperty; } }
apache-2.0
firebase/grpc-SwiftPM
src/core/ext/transport/chttp2/transport/writing.cc
26261
/* * * Copyright 2015 gRPC authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ #include <grpc/support/port_platform.h> #include "src/core/ext/transport/chttp2/transport/context_list.h" #include "src/core/ext/transport/chttp2/transport/internal.h" #include <limits.h> #include <grpc/support/log.h> #include "src/core/lib/compression/stream_compression.h" #include "src/core/lib/debug/stats.h" #include "src/core/lib/profiling/timers.h" #include "src/core/lib/slice/slice_internal.h" #include "src/core/lib/transport/http2_errors.h" static void add_to_write_list(grpc_chttp2_write_cb** list, grpc_chttp2_write_cb* cb) { cb->next = *list; *list = cb; } static void finish_write_cb(grpc_chttp2_transport* t, grpc_chttp2_stream* s, grpc_chttp2_write_cb* cb, grpc_error* error) { grpc_chttp2_complete_closure_step(t, s, &cb->closure, error, "finish_write_cb"); cb->next = t->write_cb_pool; t->write_cb_pool = cb; } static void maybe_initiate_ping(grpc_chttp2_transport* t) { grpc_chttp2_ping_queue* pq = &t->ping_queue; if (grpc_closure_list_empty(pq->lists[GRPC_CHTTP2_PCL_NEXT])) { /* no ping needed: wait */ return; } if (!grpc_closure_list_empty(pq->lists[GRPC_CHTTP2_PCL_INFLIGHT])) { /* ping already in-flight: wait */ if (GRPC_TRACE_FLAG_ENABLED(grpc_http_trace) || GRPC_TRACE_FLAG_ENABLED(grpc_bdp_estimator_trace)) { gpr_log(GPR_INFO, "%s: Ping delayed [%p]: already pinging", t->is_client ? "CLIENT" : "SERVER", t->peer_string); } return; } if (t->ping_state.pings_before_data_required == 0 && t->ping_policy.max_pings_without_data != 0) { /* need to receive something of substance before sending a ping again */ if (GRPC_TRACE_FLAG_ENABLED(grpc_http_trace) || GRPC_TRACE_FLAG_ENABLED(grpc_bdp_estimator_trace)) { gpr_log(GPR_INFO, "%s: Ping delayed [%p]: too many recent pings: %d/%d", t->is_client ? "CLIENT" : "SERVER", t->peer_string, t->ping_state.pings_before_data_required, t->ping_policy.max_pings_without_data); } return; } grpc_millis now = grpc_core::ExecCtx::Get()->Now(); grpc_millis next_allowed_ping_interval = (t->keepalive_permit_without_calls == 0 && grpc_chttp2_stream_map_size(&t->stream_map) == 0) ? 7200 * GPR_MS_PER_SEC : t->ping_policy.min_sent_ping_interval_without_data; grpc_millis next_allowed_ping = t->ping_state.last_ping_sent_time + next_allowed_ping_interval; if (next_allowed_ping > now) { /* not enough elapsed time between successive pings */ if (GRPC_TRACE_FLAG_ENABLED(grpc_http_trace) || GRPC_TRACE_FLAG_ENABLED(grpc_bdp_estimator_trace)) { gpr_log(GPR_INFO, "%s: Ping delayed [%p]: not enough time elapsed since last ping. " " Last ping %f: Next ping %f: Now %f", t->is_client ? "CLIENT" : "SERVER", t->peer_string, static_cast<double>(t->ping_state.last_ping_sent_time), static_cast<double>(next_allowed_ping), static_cast<double>(now)); } if (!t->ping_state.is_delayed_ping_timer_set) { t->ping_state.is_delayed_ping_timer_set = true; GRPC_CHTTP2_REF_TRANSPORT(t, "retry_initiate_ping_locked"); GRPC_CLOSURE_INIT(&t->retry_initiate_ping_locked, grpc_chttp2_retry_initiate_ping, t, grpc_schedule_on_exec_ctx); grpc_timer_init(&t->ping_state.delayed_ping_timer, next_allowed_ping, &t->retry_initiate_ping_locked); } return; } pq->inflight_id = t->ping_ctr; t->ping_ctr++; grpc_core::ExecCtx::RunList(DEBUG_LOCATION, &pq->lists[GRPC_CHTTP2_PCL_INITIATE]); grpc_closure_list_move(&pq->lists[GRPC_CHTTP2_PCL_NEXT], &pq->lists[GRPC_CHTTP2_PCL_INFLIGHT]); grpc_slice_buffer_add(&t->outbuf, grpc_chttp2_ping_create(false, pq->inflight_id)); GRPC_STATS_INC_HTTP2_PINGS_SENT(); t->ping_state.last_ping_sent_time = now; if (GRPC_TRACE_FLAG_ENABLED(grpc_http_trace) || GRPC_TRACE_FLAG_ENABLED(grpc_bdp_estimator_trace)) { gpr_log(GPR_INFO, "%s: Ping sent [%s]: %d/%d", t->is_client ? "CLIENT" : "SERVER", t->peer_string, t->ping_state.pings_before_data_required, t->ping_policy.max_pings_without_data); } t->ping_state.pings_before_data_required -= (t->ping_state.pings_before_data_required != 0); } static bool update_list(grpc_chttp2_transport* t, grpc_chttp2_stream* s, int64_t send_bytes, grpc_chttp2_write_cb** list, int64_t* ctr, grpc_error* error) { bool sched_any = false; grpc_chttp2_write_cb* cb = *list; *list = nullptr; *ctr += send_bytes; while (cb) { grpc_chttp2_write_cb* next = cb->next; if (cb->call_at_byte <= *ctr) { sched_any = true; finish_write_cb(t, s, cb, GRPC_ERROR_REF(error)); } else { add_to_write_list(list, cb); } cb = next; } GRPC_ERROR_UNREF(error); return sched_any; } static void report_stall(grpc_chttp2_transport* t, grpc_chttp2_stream* s, const char* staller) { if (GRPC_TRACE_FLAG_ENABLED(grpc_flowctl_trace)) { gpr_log( GPR_DEBUG, "%s:%p stream %d moved to stalled list by %s. This is FULLY expected " "to happen in a healthy program that is not seeing flow control stalls." " However, if you know that there are unwanted stalls, here is some " "helpful data: [fc:pending=%" PRIdPTR ":pending-compressed=%" PRIdPTR ":flowed=%" PRId64 ":peer_initwin=%d:t_win=%" PRId64 ":s_win=%d:s_delta=%" PRId64 "]", t->peer_string, t, s->id, staller, s->flow_controlled_buffer.length, s->stream_compression_method == GRPC_STREAM_COMPRESSION_IDENTITY_COMPRESS ? 0 : s->compressed_data_buffer.length, s->flow_controlled_bytes_flowed, t->settings[GRPC_ACKED_SETTINGS] [GRPC_CHTTP2_SETTINGS_INITIAL_WINDOW_SIZE], t->flow_control->remote_window(), static_cast<uint32_t> GPR_MAX( 0, s->flow_control->remote_window_delta() + (int64_t)t->settings[GRPC_PEER_SETTINGS] [GRPC_CHTTP2_SETTINGS_INITIAL_WINDOW_SIZE]), s->flow_control->remote_window_delta()); } } /* How many bytes would we like to put on the wire during a single syscall */ static uint32_t target_write_size(grpc_chttp2_transport* /*t*/) { return 1024 * 1024; } // Returns true if initial_metadata contains only default headers. static bool is_default_initial_metadata(grpc_metadata_batch* initial_metadata) { return initial_metadata->list.default_count == initial_metadata->list.count; } namespace { class StreamWriteContext; class WriteContext { public: WriteContext(grpc_chttp2_transport* t) : t_(t) { GRPC_STATS_INC_HTTP2_WRITES_BEGUN(); GPR_TIMER_SCOPE("grpc_chttp2_begin_write", 0); } // TODO(ctiller): make this the destructor void FlushStats() { GRPC_STATS_INC_HTTP2_SEND_INITIAL_METADATA_PER_WRITE( initial_metadata_writes_); GRPC_STATS_INC_HTTP2_SEND_MESSAGE_PER_WRITE(message_writes_); GRPC_STATS_INC_HTTP2_SEND_TRAILING_METADATA_PER_WRITE( trailing_metadata_writes_); GRPC_STATS_INC_HTTP2_SEND_FLOWCTL_PER_WRITE(flow_control_writes_); } void FlushSettings() { if (t_->dirtied_local_settings && !t_->sent_local_settings) { grpc_slice_buffer_add( &t_->outbuf, grpc_chttp2_settings_create( t_->settings[GRPC_SENT_SETTINGS], t_->settings[GRPC_LOCAL_SETTINGS], t_->force_send_settings, GRPC_CHTTP2_NUM_SETTINGS)); t_->force_send_settings = false; t_->dirtied_local_settings = false; t_->sent_local_settings = true; GRPC_STATS_INC_HTTP2_SETTINGS_WRITES(); } } void FlushQueuedBuffers() { /* simple writes are queued to qbuf, and flushed here */ grpc_slice_buffer_move_into(&t_->qbuf, &t_->outbuf); t_->num_pending_induced_frames = 0; GPR_ASSERT(t_->qbuf.count == 0); } void FlushWindowUpdates() { uint32_t transport_announce = t_->flow_control->MaybeSendUpdate(t_->outbuf.count > 0); if (transport_announce) { grpc_transport_one_way_stats throwaway_stats; grpc_slice_buffer_add( &t_->outbuf, grpc_chttp2_window_update_create(0, transport_announce, &throwaway_stats)); ResetPingClock(); } } void FlushPingAcks() { for (size_t i = 0; i < t_->ping_ack_count; i++) { grpc_slice_buffer_add(&t_->outbuf, grpc_chttp2_ping_create(true, t_->ping_acks[i])); } t_->ping_ack_count = 0; } void EnactHpackSettings() { grpc_chttp2_hpack_compressor_set_max_table_size( &t_->hpack_compressor, t_->settings[GRPC_PEER_SETTINGS] [GRPC_CHTTP2_SETTINGS_HEADER_TABLE_SIZE]); } void UpdateStreamsNoLongerStalled() { grpc_chttp2_stream* s; while (grpc_chttp2_list_pop_stalled_by_transport(t_, &s)) { if (t_->closed_with_error == GRPC_ERROR_NONE && grpc_chttp2_list_add_writable_stream(t_, s)) { if (!s->refcount->refs.RefIfNonZero()) { grpc_chttp2_list_remove_writable_stream(t_, s); } } } } grpc_chttp2_stream* NextStream() { if (t_->outbuf.length > target_write_size(t_)) { result_.partial = true; return nullptr; } grpc_chttp2_stream* s; if (!grpc_chttp2_list_pop_writable_stream(t_, &s)) { return nullptr; } return s; } void ResetPingClock() { if (!t_->is_client) { t_->ping_recv_state.last_ping_recv_time = GRPC_MILLIS_INF_PAST; t_->ping_recv_state.ping_strikes = 0; } t_->ping_state.pings_before_data_required = t_->ping_policy.max_pings_without_data; } void IncInitialMetadataWrites() { ++initial_metadata_writes_; } void IncWindowUpdateWrites() { ++flow_control_writes_; } void IncMessageWrites() { ++message_writes_; } void IncTrailingMetadataWrites() { ++trailing_metadata_writes_; } void NoteScheduledResults() { result_.early_results_scheduled = true; } grpc_chttp2_transport* transport() const { return t_; } grpc_chttp2_begin_write_result Result() { result_.writing = t_->outbuf.count > 0; return result_; } private: grpc_chttp2_transport* const t_; /* stats histogram counters: we increment these throughout this function, and at the end publish to the central stats histograms */ int flow_control_writes_ = 0; int initial_metadata_writes_ = 0; int trailing_metadata_writes_ = 0; int message_writes_ = 0; grpc_chttp2_begin_write_result result_ = {false, false, false}; }; class DataSendContext { public: DataSendContext(WriteContext* write_context, grpc_chttp2_transport* t, grpc_chttp2_stream* s) : write_context_(write_context), t_(t), s_(s), sending_bytes_before_(s_->sending_bytes) {} uint32_t stream_remote_window() const { return static_cast<uint32_t> GPR_MAX( 0, s_->flow_control->remote_window_delta() + (int64_t)t_->settings[GRPC_PEER_SETTINGS] [GRPC_CHTTP2_SETTINGS_INITIAL_WINDOW_SIZE]); } uint32_t max_outgoing() const { return static_cast<uint32_t> GPR_MIN( t_->settings[GRPC_PEER_SETTINGS][GRPC_CHTTP2_SETTINGS_MAX_FRAME_SIZE], GPR_MIN(stream_remote_window(), t_->flow_control->remote_window())); } bool AnyOutgoing() const { return max_outgoing() > 0; } void FlushUncompressedBytes() { uint32_t send_bytes = static_cast<uint32_t> GPR_MIN( max_outgoing(), s_->flow_controlled_buffer.length); is_last_frame_ = send_bytes == s_->flow_controlled_buffer.length && s_->fetching_send_message == nullptr && s_->send_trailing_metadata != nullptr && grpc_metadata_batch_is_empty(s_->send_trailing_metadata); grpc_chttp2_encode_data(s_->id, &s_->flow_controlled_buffer, send_bytes, is_last_frame_, &s_->stats.outgoing, &t_->outbuf); s_->flow_control->SentData(send_bytes); s_->sending_bytes += send_bytes; } void FlushCompressedBytes() { GPR_DEBUG_ASSERT(s_->stream_compression_method != GRPC_STREAM_COMPRESSION_IDENTITY_COMPRESS); uint32_t send_bytes = static_cast<uint32_t> GPR_MIN( max_outgoing(), s_->compressed_data_buffer.length); bool is_last_data_frame = (send_bytes == s_->compressed_data_buffer.length && s_->flow_controlled_buffer.length == 0 && s_->fetching_send_message == nullptr); if (is_last_data_frame && s_->send_trailing_metadata != nullptr && s_->stream_compression_ctx != nullptr) { if (GPR_UNLIKELY(!grpc_stream_compress( s_->stream_compression_ctx, &s_->flow_controlled_buffer, &s_->compressed_data_buffer, nullptr, MAX_SIZE_T, GRPC_STREAM_COMPRESSION_FLUSH_FINISH))) { gpr_log(GPR_ERROR, "Stream compression failed."); } grpc_stream_compression_context_destroy(s_->stream_compression_ctx); s_->stream_compression_ctx = nullptr; /* After finish, bytes in s->compressed_data_buffer may be * more than max_outgoing. Start another round of the current * while loop so that send_bytes and is_last_data_frame are * recalculated. */ return; } is_last_frame_ = is_last_data_frame && s_->send_trailing_metadata != nullptr && grpc_metadata_batch_is_empty(s_->send_trailing_metadata); grpc_chttp2_encode_data(s_->id, &s_->compressed_data_buffer, send_bytes, is_last_frame_, &s_->stats.outgoing, &t_->outbuf); s_->flow_control->SentData(send_bytes); if (s_->compressed_data_buffer.length == 0) { s_->sending_bytes += s_->uncompressed_data_size; } } void CompressMoreBytes() { GPR_DEBUG_ASSERT(s_->stream_compression_method != GRPC_STREAM_COMPRESSION_IDENTITY_COMPRESS); if (s_->stream_compression_ctx == nullptr) { s_->stream_compression_ctx = grpc_stream_compression_context_create(s_->stream_compression_method); } s_->uncompressed_data_size = s_->flow_controlled_buffer.length; if (GPR_UNLIKELY(!grpc_stream_compress( s_->stream_compression_ctx, &s_->flow_controlled_buffer, &s_->compressed_data_buffer, nullptr, MAX_SIZE_T, GRPC_STREAM_COMPRESSION_FLUSH_SYNC))) { gpr_log(GPR_ERROR, "Stream compression failed."); } } bool is_last_frame() const { return is_last_frame_; } void CallCallbacks() { if (update_list( t_, s_, static_cast<int64_t>(s_->sending_bytes - sending_bytes_before_), &s_->on_flow_controlled_cbs, &s_->flow_controlled_bytes_flowed, GRPC_ERROR_NONE)) { write_context_->NoteScheduledResults(); } } private: WriteContext* write_context_; grpc_chttp2_transport* t_; grpc_chttp2_stream* s_; const size_t sending_bytes_before_; bool is_last_frame_ = false; }; class StreamWriteContext { public: StreamWriteContext(WriteContext* write_context, grpc_chttp2_stream* s) : write_context_(write_context), t_(write_context->transport()), s_(s) { GRPC_CHTTP2_IF_TRACING( gpr_log(GPR_INFO, "W:%p %s[%d] im-(sent,send)=(%d,%d) announce=%d", t_, t_->is_client ? "CLIENT" : "SERVER", s->id, s->sent_initial_metadata, s->send_initial_metadata != nullptr, (int)(s->flow_control->local_window_delta() - s->flow_control->announced_window_delta()))); } void FlushInitialMetadata() { /* send initial metadata if it's available */ if (s_->sent_initial_metadata) return; if (s_->send_initial_metadata == nullptr) return; // We skip this on the server side if there is no custom initial // metadata, there are no messages to send, and we are also sending // trailing metadata. This results in a Trailers-Only response, // which is required for retries, as per: // https://github.com/grpc/proposal/blob/master/A6-client-retries.md#when-retries-are-valid if (!t_->is_client && s_->fetching_send_message == nullptr && s_->flow_controlled_buffer.length == 0 && compressed_data_buffer_len() == 0 && s_->send_trailing_metadata != nullptr && is_default_initial_metadata(s_->send_initial_metadata)) { ConvertInitialMetadataToTrailingMetadata(); } else { grpc_encode_header_options hopt = { s_->id, // stream_id false, // is_eof t_->settings[GRPC_PEER_SETTINGS] [GRPC_CHTTP2_SETTINGS_GRPC_ALLOW_TRUE_BINARY_METADATA] != 0, // use_true_binary_metadata t_->settings[GRPC_PEER_SETTINGS] [GRPC_CHTTP2_SETTINGS_MAX_FRAME_SIZE], // max_frame_size &s_->stats.outgoing // stats }; grpc_chttp2_encode_header(&t_->hpack_compressor, nullptr, 0, s_->send_initial_metadata, &hopt, &t_->outbuf); write_context_->ResetPingClock(); write_context_->IncInitialMetadataWrites(); } s_->send_initial_metadata = nullptr; s_->sent_initial_metadata = true; write_context_->NoteScheduledResults(); grpc_chttp2_complete_closure_step( t_, s_, &s_->send_initial_metadata_finished, GRPC_ERROR_NONE, "send_initial_metadata_finished"); } size_t compressed_data_buffer_len() { return s_->stream_compression_method == GRPC_STREAM_COMPRESSION_IDENTITY_COMPRESS ? 0 : s_->compressed_data_buffer.length; } void FlushWindowUpdates() { /* send any window updates */ const uint32_t stream_announce = s_->flow_control->MaybeSendUpdate(); if (stream_announce == 0) return; grpc_slice_buffer_add( &t_->outbuf, grpc_chttp2_window_update_create(s_->id, stream_announce, &s_->stats.outgoing)); write_context_->ResetPingClock(); write_context_->IncWindowUpdateWrites(); } void FlushData() { if (!s_->sent_initial_metadata) return; if (s_->flow_controlled_buffer.length == 0 && compressed_data_buffer_len() == 0) { return; // early out: nothing to do } DataSendContext data_send_context(write_context_, t_, s_); if (!data_send_context.AnyOutgoing()) { if (t_->flow_control->remote_window() <= 0) { report_stall(t_, s_, "transport"); grpc_chttp2_list_add_stalled_by_transport(t_, s_); } else if (data_send_context.stream_remote_window() <= 0) { report_stall(t_, s_, "stream"); grpc_chttp2_list_add_stalled_by_stream(t_, s_); } return; // early out: nothing to do } if (s_->stream_compression_method == GRPC_STREAM_COMPRESSION_IDENTITY_COMPRESS) { while (s_->flow_controlled_buffer.length > 0 && data_send_context.max_outgoing() > 0) { data_send_context.FlushUncompressedBytes(); } } else { while ((s_->flow_controlled_buffer.length > 0 || s_->compressed_data_buffer.length > 0) && data_send_context.max_outgoing() > 0) { if (s_->compressed_data_buffer.length > 0) { data_send_context.FlushCompressedBytes(); } else { data_send_context.CompressMoreBytes(); } } } write_context_->ResetPingClock(); if (data_send_context.is_last_frame()) { SentLastFrame(); } data_send_context.CallCallbacks(); stream_became_writable_ = true; if (s_->flow_controlled_buffer.length > 0 || compressed_data_buffer_len() > 0) { GRPC_CHTTP2_STREAM_REF(s_, "chttp2_writing:fork"); grpc_chttp2_list_add_writable_stream(t_, s_); } write_context_->IncMessageWrites(); } void FlushTrailingMetadata() { if (!s_->sent_initial_metadata) return; if (s_->send_trailing_metadata == nullptr) return; if (s_->fetching_send_message != nullptr) return; if (s_->flow_controlled_buffer.length != 0) return; if (compressed_data_buffer_len() != 0) return; GRPC_CHTTP2_IF_TRACING(gpr_log(GPR_INFO, "sending trailing_metadata")); if (grpc_metadata_batch_is_empty(s_->send_trailing_metadata)) { grpc_chttp2_encode_data(s_->id, &s_->flow_controlled_buffer, 0, true, &s_->stats.outgoing, &t_->outbuf); } else { grpc_encode_header_options hopt = { s_->id, true, t_->settings[GRPC_PEER_SETTINGS] [GRPC_CHTTP2_SETTINGS_GRPC_ALLOW_TRUE_BINARY_METADATA] != 0, t_->settings[GRPC_PEER_SETTINGS][GRPC_CHTTP2_SETTINGS_MAX_FRAME_SIZE], &s_->stats.outgoing}; grpc_chttp2_encode_header(&t_->hpack_compressor, extra_headers_for_trailing_metadata_, num_extra_headers_for_trailing_metadata_, s_->send_trailing_metadata, &hopt, &t_->outbuf); } write_context_->IncTrailingMetadataWrites(); write_context_->ResetPingClock(); SentLastFrame(); write_context_->NoteScheduledResults(); grpc_chttp2_complete_closure_step( t_, s_, &s_->send_trailing_metadata_finished, GRPC_ERROR_NONE, "send_trailing_metadata_finished"); } bool stream_became_writable() { return stream_became_writable_; } private: void ConvertInitialMetadataToTrailingMetadata() { GRPC_CHTTP2_IF_TRACING( gpr_log(GPR_INFO, "not sending initial_metadata (Trailers-Only)")); // When sending Trailers-Only, we need to move the :status and // content-type headers to the trailers. if (s_->send_initial_metadata->idx.named.status != nullptr) { extra_headers_for_trailing_metadata_ [num_extra_headers_for_trailing_metadata_++] = &s_->send_initial_metadata->idx.named.status->md; } if (s_->send_initial_metadata->idx.named.content_type != nullptr) { extra_headers_for_trailing_metadata_ [num_extra_headers_for_trailing_metadata_++] = &s_->send_initial_metadata->idx.named.content_type->md; } } void SentLastFrame() { s_->send_trailing_metadata = nullptr; s_->sent_trailing_metadata = true; s_->eos_sent = true; if (!t_->is_client && !s_->read_closed) { grpc_slice_buffer_add( &t_->outbuf, grpc_chttp2_rst_stream_create( s_->id, GRPC_HTTP2_NO_ERROR, &s_->stats.outgoing)); } grpc_chttp2_mark_stream_closed(t_, s_, !t_->is_client, true, GRPC_ERROR_NONE); } WriteContext* const write_context_; grpc_chttp2_transport* const t_; grpc_chttp2_stream* const s_; bool stream_became_writable_ = false; grpc_mdelem* extra_headers_for_trailing_metadata_[2]; size_t num_extra_headers_for_trailing_metadata_ = 0; }; } // namespace grpc_chttp2_begin_write_result grpc_chttp2_begin_write( grpc_chttp2_transport* t) { WriteContext ctx(t); ctx.FlushSettings(); ctx.FlushPingAcks(); ctx.FlushQueuedBuffers(); ctx.EnactHpackSettings(); if (t->flow_control->remote_window() > 0) { ctx.UpdateStreamsNoLongerStalled(); } /* for each grpc_chttp2_stream that's become writable, frame it's data (according to available window sizes) and add to the output buffer */ while (grpc_chttp2_stream* s = ctx.NextStream()) { StreamWriteContext stream_ctx(&ctx, s); size_t orig_len = t->outbuf.length; stream_ctx.FlushInitialMetadata(); stream_ctx.FlushWindowUpdates(); stream_ctx.FlushData(); stream_ctx.FlushTrailingMetadata(); if (t->outbuf.length > orig_len) { /* Add this stream to the list of the contexts to be traced at TCP */ s->byte_counter += t->outbuf.length - orig_len; if (s->traced && grpc_endpoint_can_track_err(t->ep)) { grpc_core::ContextList::Append(&t->cl, s); } } if (stream_ctx.stream_became_writable()) { if (!grpc_chttp2_list_add_writing_stream(t, s)) { /* already in writing list: drop ref */ GRPC_CHTTP2_STREAM_UNREF(s, "chttp2_writing:already_writing"); } else { /* ref will be dropped at end of write */ } } else { GRPC_CHTTP2_STREAM_UNREF(s, "chttp2_writing:no_write"); } } ctx.FlushWindowUpdates(); maybe_initiate_ping(t); return ctx.Result(); } void grpc_chttp2_end_write(grpc_chttp2_transport* t, grpc_error* error) { GPR_TIMER_SCOPE("grpc_chttp2_end_write", 0); grpc_chttp2_stream* s; if (t->channelz_socket != nullptr) { t->channelz_socket->RecordMessagesSent(t->num_messages_in_next_write); } t->num_messages_in_next_write = 0; while (grpc_chttp2_list_pop_writing_stream(t, &s)) { if (s->sending_bytes != 0) { update_list(t, s, static_cast<int64_t>(s->sending_bytes), &s->on_write_finished_cbs, &s->flow_controlled_bytes_written, GRPC_ERROR_REF(error)); s->sending_bytes = 0; } GRPC_CHTTP2_STREAM_UNREF(s, "chttp2_writing:end"); } grpc_slice_buffer_reset_and_unref_internal(&t->outbuf); GRPC_ERROR_UNREF(error); }
apache-2.0
meetdestiny/geronimo-trader
modules/common/src/java/org/apache/geronimo/common/propertyeditor/URIEditor.java
1042
/** * * Copyright 2003-2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.geronimo.common.propertyeditor; import java.net.URI; import java.net.URISyntaxException; /** * * * @version $Rev$ $Date$ * * */ public class URIEditor extends TextPropertyEditorSupport { public Object getValue() { try { return new URI(getAsText()); } catch (URISyntaxException e) { throw new PropertyEditorException(e); } } }
apache-2.0
bwasti/caffe2
caffe2/core/registry_test.cc
1099
#include <iostream> #include <memory> #include "caffe2/core/registry.h" #include <gtest/gtest.h> #include "caffe2/core/logging.h" namespace caffe2 { namespace { class Foo { public: explicit Foo(int x) { LOG(INFO) << "Foo " << x; } }; CAFFE_DECLARE_REGISTRY(FooRegistry, Foo, int); CAFFE_DEFINE_REGISTRY(FooRegistry, Foo, int); #define REGISTER_FOO(clsname) \ CAFFE_REGISTER_CLASS(FooRegistry, clsname, clsname) class Bar : public Foo { public: explicit Bar(int x) : Foo(x) { LOG(INFO) << "Bar " << x; } }; REGISTER_FOO(Bar); class AnotherBar : public Foo { public: explicit AnotherBar(int x) : Foo(x) { LOG(INFO) << "AnotherBar " << x; } }; REGISTER_FOO(AnotherBar); TEST(RegistryTest, CanRunCreator) { unique_ptr<Foo> bar(FooRegistry()->Create("Bar", 1)); EXPECT_TRUE(bar != nullptr) << "Cannot create bar."; unique_ptr<Foo> another_bar(FooRegistry()->Create("AnotherBar", 1)); EXPECT_TRUE(another_bar != nullptr); } TEST(RegistryTest, ReturnNullOnNonExistingCreator) { EXPECT_EQ(FooRegistry()->Create("Non-existing bar", 1), nullptr); } } } // namespace caffe2
apache-2.0
dixonsatit/yii-project
protected/extensions/starship/RestfullYii/config/routes.php
1539
<?php return [ 'api/<controller:\w+>'=>['<controller>/REST.GET', 'verb'=>'GET'], 'api/<controller:\w+>/<id:\w*>'=>['<controller>/REST.GET', 'verb'=>'GET'], 'api/<controller:\w+>/<id:\w*>/<param1:\w*>'=>['<controller>/REST.GET', 'verb'=>'GET'], 'api/<controller:\w+>/<id:\w*>/<param1:\w*>/<param2:\w*>'=>['<controller>/REST.GET', 'verb'=>'GET'], ['<controller>/REST.PUT', 'pattern'=>'api/<controller:\w+>/<id:\w*>', 'verb'=>'PUT'], ['<controller>/REST.PUT', 'pattern'=>'api/<controller:\w+>/<id:\w*>/<param1:\w*>', 'verb'=>'PUT'], ['<controller>/REST.PUT', 'pattern'=>'api/<controller:\w*>/<id:\w*>/<param1:\w*>/<param2:\w*>', 'verb'=>'PUT'], ['<controller>/REST.DELETE', 'pattern'=>'api/<controller:\w+>/<id:\w*>', 'verb'=>'DELETE'], ['<controller>/REST.DELETE', 'pattern'=>'api/<controller:\w+>/<id:\w*>/<param1:\w*>', 'verb'=>'DELETE'], ['<controller>/REST.DELETE', 'pattern'=>'api/<controller:\w+>/<id:\w*>/<param1:\w*>/<param2:\w*>', 'verb'=>'DELETE'], ['<controller>/REST.POST', 'pattern'=>'api/<controller:\w+>', 'verb'=>'POST'], ['<controller>/REST.POST', 'pattern'=>'api/<controller:\w+>/<id:\w+>', 'verb'=>'POST'], ['<controller>/REST.POST', 'pattern'=>'api/<controller:\w+>/<id:\w*>/<param1:\w*>', 'verb'=>'POST'], ['<controller>/REST.POST', 'pattern'=>'api/<controller:\w+>/<id:\w*>/<param1:\w*>/<param2:\w*>', 'verb'=>'POST'], '<controller:\w+>/<id:\d+>'=>'<controller>/view', '<controller:\w+>/<action:\w+>/<id:\d+>'=>'<controller>/<action>', '<controller:\w+>/<action:\w+>'=>'<controller>/<action>', ];
apache-2.0
erikssonorjan/ecommerce-framework
connectors/demandware-ecommerce-connector/src/main/java/com/sdl/ecommerce/demandware/api/model/AuthenticationRequest.java
598
package com.sdl.ecommerce.demandware.api.model; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; /** * AuthenticationType * * @author nic */ @JsonIgnoreProperties(ignoreUnknown = true) public class AuthenticationRequest { public static String GUEST = "guest"; public static String CREDENTIALS = "credentials"; public static String REFRESH = "refresh"; public static String SESSION = "session"; private String type; public AuthenticationRequest(String type) { this.type = type; } public String getType() { return type; } }
apache-2.0
nickperez1285/truck-hunt-hackathon
client/stemapp/jimu.js/LayerInfos/LayerInfoForDefault.js
8864
/////////////////////////////////////////////////////////////////////////// // Copyright © 2014 Esri. All Rights Reserved. // // Licensed under the Apache License Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /////////////////////////////////////////////////////////////////////////// define([ 'dojo/_base/declare', 'dojo/_base/array', 'dojo/_base/lang', 'esri/graphicsUtils', 'dojo/aspect', './LayerInfo', 'dojox/gfx', 'dojo/dom-construct', 'dojo/dom-attr', 'dojo/Deferred', 'esri/geometry/webMercatorUtils', 'esri/symbols/jsonUtils' ], function(declare, array, lang, graphicsUtils, aspect, LayerInfo, gfx, domConstruct, domAttr, Deferred, webMercatorUtils, jsonUtils) { var clazz = declare(LayerInfo, { _legendsNode: null, // operLayer = { // layerObject: layer, // title: layer.label || layer.title || layer.name || layer.id || " ", // id: layerId || " ", // subLayers: [operLayer, ... ], // mapService: {layerInfo: , subId: }, // collection: {layerInfo: } // }; constructor: function( operLayer, map ) { this.layerLoadedDef = new Deferred(); /* if(this.layerObject) { this.layerObject.on('load', lang.hitch(this, function(){ this.layerLoadedDef.resolve(); })); } */ /*jshint unused: false*/ // about popupMenu if (operLayer.selfType) { this.popupMenuInfo.menuItems = [{ key: 'table', label: this.nls.itemToAttributeTable }, { key: null, label: '' },{ key: 'description', label: '<a class="menu-item-description" target="_blank" href=' + ((this.layerObject && this.layerObject.url) ? this.layerObject.url : '') + '>' + this.nls.itemDesc + '</a>' }]; } else if (this.layerObject.declaredClass === 'esri.layers.FeatureLayer' || this.layerObject.declaredClass === 'esri.layers.CSVLayer' /*this.layerObject.declaredClass === 'esri.layers.StreamLayer'*/) { var index = -1; var i = 0; for(i = 0; i < this.popupMenuInfo.menuItems.length; i++) { if (this.popupMenuInfo.menuItems[i].key === 'movedown') { index = i; break; } } this.popupMenuInfo.menuItems .splice(index + 1, 0, '', {key: "table", label: this.nls.itemToAttributeTable}); } }, getExtent: function() { return this._convertGeometryToMapSpatialRef(this.originOperLayer.layerObject.fullExtent) || this._convertGeometryToMapSpatialRef(this.originOperLayer.layerObject.initialExtent); }, initVisible: function() { /*jshint unused: false*/ var visible = false; visible = this.originOperLayer.layerObject.visible; this._visible = visible; }, _setTopLayerVisible: function(visible) { if(this.originOperLayer.collection){ //collection //click directly if(this.originOperLayer.collection.layerInfo._visible) { if(visible) { this.layerObject.show(); this._visible = true; } else { this.layerObject.hide(); this._visible = false; } } else { if(visible) { this.layerObject.hide(); this._visible = true; } else { this.layerObject.hide(); this._visible = false; } } } else { if (visible) { this.layerObject.show(); } else { this.layerObject.hide(); } this._visible = visible; } }, setLayerVisiblefromTopLayer: function() { //click from top collecton if(this.originOperLayer.collection.layerInfo._visible) { if(this._visible) { this.layerObject.show(); } } else { this.layerObject.hide(); } }, //---------------new section----------------------------------------- // obtainLegendsNode: function() { // var layer = this.originOperLayer.layerObject; // var legendsNode = domConstruct.create("div", { // "class": "legends-div" // }); // if (layer && layer.renderer) { // this.initLegendsNode(legendsNode); // } else { // this.layerLoadedDef.then(lang.hitch(this, function(){ // this.initLegendsNode(legendsNode); // })); // } // return legendsNode; // }, createLegendsNode: function() { var legendsNode = domConstruct.create("div", { "class": "legends-div jimu-leading-margin1" }, document.body); domConstruct.create("img", { "class": "legends-loading-img", "src": require.toUrl('jimu') + '/images/loading.gif' }, legendsNode); return legendsNode; }, drawLegends: function(legendsNode) { this.initLegendsNode(legendsNode); }, initLegendsNode: function(legendsNode) { var legendInfos = []; var layer = this.layerObject; if( this.layerObject && !this.layerObject.empty && (!this.originOperLayer.subLayer || this.originOperLayer.subLayers.length === 0)) { // delete loading image, this condition means the layer already loaded. domConstruct.empty(legendsNode); // layer has renderer that means the layer has loadded. if (layer.renderer) { if (layer.renderer.infos) { legendInfos = lang.clone(layer.renderer.infos); // todo } else { legendInfos.push({ label: layer.renderer.label, symbol: layer.renderer.symbol }); } array.forEach(legendInfos, function(legendInfo) { legendInfo.legendDiv = domConstruct.create("div", { "class": "legend-div" }, legendsNode); legendInfo.symbolDiv= domConstruct.create("div", { "class": "legend-symbol jimu-float-leading" }, legendInfo.legendDiv); legendInfo.labelDiv= domConstruct.create("div", { "class": "legend-label jimu-float-leading", "innerHTML": legendInfo.label || " " }, legendInfo.legendDiv); if(legendInfo.symbol.type === "textsymbol") { domAttr.set(legendInfo.symbolDiv, "innerHTML", legendInfo.symbol.text); } else { var mySurface = gfx.createSurface(legendInfo.symbolDiv, 50, 50); var descriptors = jsonUtils.getShapeDescriptors(legendInfo.symbol); var shape = mySurface.createShape(descriptors.defaultShape) .setFill(descriptors.fill).setStroke(descriptors.stroke); shape.setTransform(gfx.matrix.translate(25, 25)); } }, this); } } }, obtainNewSubLayers: function() { var newSubLayers = []; /* if(!this.originOperLayer.subLayers || this.originOperLayer.subLayers.length === 0) { //*** } else { */ if(this.originOperLayer.subLayers && this.originOperLayer.subLayers.length !== 0) { array.forEach(this.originOperLayer.subLayers, function(subOperLayer){ var subLayerInfo = new clazz(subOperLayer, this.map); newSubLayers.push(subLayerInfo); subLayerInfo.init(); }, this); } return newSubLayers; }, getOpacity: function() { if (this.layerObject.opacity) { return this.layerObject.opacity; } else { return 1; } }, setOpacity: function(opacity) { if (this.layerObject.setOpacity) { this.layerObject.setOpacity(opacity); } } // isShowInMap: function() { // var visible = false; // if(this.originOperLayer.collection){ // visible = this.originOperLayer.collection.layerInfo._visible && this.layerObject.visible; // } else { // visible = this.layerObject.visible; // } // return visible; // } }); return clazz; });
apache-2.0
henricasanova/databet_meteor_1.4
app/imports/api/meteor_methods/collection_operations.js
1448
// These are brute-force with redundancy (i.e, some calls will // find everything removed already. But at least there is no // infinite recursion :) import '../../api/databet_collections'; import { collection_dictionary } from '../../startup/both/collection_dictionary'; import { Meteor } from 'meteor/meteor'; import { Random } from 'meteor/random'; Meteor.methods({ insert_document_into_collection: function (collection_name, doc) { const collection = collection_dictionary[collection_name]; if (collection == null) { throw new Meteor.Error("Unknown Collection " + collection_name); } const id = collection.insert(doc); console.log("Inserted new document ", id, " into collection", collection_name); }, update_document_in_collection: function (collection_name, doc_id, modifier) { const collection = collection_dictionary[collection_name]; if (collection == null) { throw new Meteor.Error("Unknown Collection " + collection_name); } console.log("Updating document", doc_id, "in collection", collection_name, "modifier = ", modifier); collection.update({"_id": doc_id}, {$set: modifier}); }, remove_document_from_collection: function (collection_name, doc_id) { const collection = collection_dictionary[collection_name]; if (collection == null) { throw new Meteor.Error("Unknown Collection " + collection_name); } collection.remove({"_id": doc_id}); } });
apache-2.0
kuali/kuality-kfs
lib/kuality-kfs/data_objects/vendor/PhoneNumberLinesMixin.rb
651
module PhoneNumberLinesMixin attr_accessor :phone_numbers, :initial_phone_numbers def default_phone_numbers(opts={}) # This just makes it so we don't have to be so repetitive. It can certainly be # overridden in a subclass if you don't want to chuck things in via opts. { phone_numbers: collection('PhoneLineObject'), initial_phone_numbers: [] }.merge(opts) end def post_create super @initial_phone_numbers.each{ |il| @phone_numbers.add il } @initial_phone_numbers = nil end def update_line_objects_from_page!(target=:new) @phone_numbers.update_from_page! target super end end
apache-2.0
gnodet/camel
components/camel-sjms/src/test/java/org/apache/camel/component/sjms/producer/QueueProducerQoSTest.java
4853
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.sjms.producer; import org.apache.activemq.broker.BrokerService; import org.apache.activemq.broker.jmx.DestinationViewMBean; import org.apache.activemq.broker.region.policy.PolicyEntry; import org.apache.activemq.broker.region.policy.PolicyMap; import org.apache.activemq.command.ActiveMQQueue; import org.apache.camel.EndpointInject; import org.apache.camel.builder.RouteBuilder; import org.apache.camel.component.mock.MockEndpoint; import org.apache.camel.component.sjms.support.JmsTestSupport; import org.junit.jupiter.api.Test; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.fail; import static org.junit.jupiter.api.Assumptions.assumeFalse; public class QueueProducerQoSTest extends JmsTestSupport { private static final String TEST_INONLY_DESTINATION_NAME = "queue.producer.test.qos.inonly"; private static final String TEST_INOUT_DESTINATION_NAME = "queue.producer.test.qos.inout"; private static final String EXPIRED_MESSAGE_ROUTE_ID = "expiredAdvisoryRoute"; private static final String MOCK_EXPIRED_ADVISORY = "mock:expiredAdvisory"; @EndpointInject(MOCK_EXPIRED_ADVISORY) MockEndpoint mockExpiredAdvisory; @Test public void testInOutQueueProducerTTL() throws Exception { assumeFalse(externalAmq); mockExpiredAdvisory.expectedMessageCount(1); String endpoint = String.format("sjms:queue:%s?timeToLive=1000&exchangePattern=InOut&requestTimeout=500", TEST_INOUT_DESTINATION_NAME); try { template.requestBody(endpoint, "test message"); fail("we aren't expecting any consumers, so should not succeed"); } catch (Exception e) { // we are expecting an exception here because there are no consumers on this queue, // so we will not be able to do a real InOut/request-response, but that's okay // we're just interested in the message becoming expired } assertMockEndpointsSatisfied(); DestinationViewMBean queue = getQueueMBean(TEST_INOUT_DESTINATION_NAME); assertEquals(0, queue.getQueueSize(), "There were unexpected messages left in the queue: " + TEST_INOUT_DESTINATION_NAME); } @Test public void testInOnlyQueueProducerTTL() throws Exception { assumeFalse(externalAmq); mockExpiredAdvisory.expectedMessageCount(1); String endpoint = String.format("sjms:queue:%s?timeToLive=1000", TEST_INONLY_DESTINATION_NAME); template.sendBody(endpoint, "test message"); assertMockEndpointsSatisfied(); DestinationViewMBean queue = getQueueMBean(TEST_INONLY_DESTINATION_NAME); assertEquals(0, queue.getQueueSize(), "There were unexpected messages left in the queue: " + TEST_INONLY_DESTINATION_NAME); } @Override protected void configureBroker(BrokerService broker) throws Exception { broker.setUseJmx(true); broker.setPersistent(true); broker.setDataDirectory("target/activemq-data"); broker.deleteAllMessages(); broker.setAdvisorySupport(true); broker.addConnector(brokerUri); // configure expiration rate ActiveMQQueue queueName = new ActiveMQQueue(">"); PolicyEntry entry = new PolicyEntry(); entry.setDestination(queueName); entry.setExpireMessagesPeriod(1000); PolicyMap policyMap = new PolicyMap(); policyMap.put(queueName, entry); broker.setDestinationPolicy(policyMap); } @Override protected RouteBuilder createRouteBuilder() throws Exception { return new RouteBuilder() { @Override public void configure() throws Exception { from("sjms:topic:ActiveMQ.Advisory.Expired.Queue.>") .routeId(EXPIRED_MESSAGE_ROUTE_ID) .log("Expired message") .to(MOCK_EXPIRED_ADVISORY); } }; } }
apache-2.0
xianfengxiong/how-tomcat-work
book/tomcat-4.1.12-src/catalina/src/share/org/apache/catalina/core/StandardEngine.java
10380
/* * $Header: /home/cvs/jakarta-tomcat-4.0/catalina/src/share/org/apache/catalina/core/StandardEngine.java,v 1.15 2002/05/02 22:14:45 craigmcc Exp $ * $Revision: 1.15 $ * $Date: 2002/05/02 22:14:45 $ * * ==================================================================== * * The Apache Software License, Version 1.1 * * Copyright (c) 1999-2001 The Apache Software Foundation. All rights * reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in * the documentation and/or other materials provided with the * distribution. * * 3. The end-user documentation included with the redistribution, if * any, must include the following acknowlegement: * "This product includes software developed by the * Apache Software Foundation (http://www.apache.org/)." * Alternately, this acknowlegement may appear in the software itself, * if and wherever such third-party acknowlegements normally appear. * * 4. The names "The Jakarta Project", "Tomcat", and "Apache Software * Foundation" must not be used to endorse or promote products derived * from this software without prior written permission. For written * permission, please contact apache@apache.org. * * 5. Products derived from this software may not be called "Apache" * nor may "Apache" appear in their names without prior written * permission of the Apache Group. * * THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF * USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF * SUCH DAMAGE. * ==================================================================== * * This software consists of voluntary contributions made by many * individuals on behalf of the Apache Software Foundation. For more * information on the Apache Software Foundation, please see * <http://www.apache.org/>. * * [Additional notices, if required by prior licensing conditions] * */ package org.apache.catalina.core; import java.io.IOException; import javax.servlet.ServletException; import javax.servlet.ServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.catalina.Container; import org.apache.catalina.Context; import org.apache.catalina.DefaultContext; import org.apache.catalina.Engine; import org.apache.catalina.Host; import org.apache.catalina.LifecycleException; import org.apache.catalina.Request; import org.apache.catalina.Response; import org.apache.catalina.Service; import org.apache.catalina.util.ServerInfo; /** * Standard implementation of the <b>Engine</b> interface. Each * child container must be a Host implementation to process the specific * fully qualified host name of that virtual host. * * @author Craig R. McClanahan * @version $Revision: 1.15 $ $Date: 2002/05/02 22:14:45 $ */ public class StandardEngine extends ContainerBase implements Engine { // ----------------------------------------------------------- Constructors /** * Create a new StandardEngine component with the default basic Valve. */ public StandardEngine() { super(); pipeline.setBasic(new StandardEngineValve()); } // ----------------------------------------------------- Instance Variables /** * Host name to use when no server host, or an unknown host, * is specified in the request. */ private String defaultHost = null; /** * The descriptive information string for this implementation. */ private static final String info = "org.apache.catalina.core.StandardEngine/1.0"; /** * The Java class name of the default Mapper class for this Container. */ private String mapperClass = "org.apache.catalina.core.StandardEngineMapper"; /** * The <code>Service</code> that owns this Engine, if any. */ private Service service = null; /** * DefaultContext config */ private DefaultContext defaultContext; /** * The JVM Route ID for this Tomcat instance. All Route ID's must be unique * across the cluster. */ private String jvmRouteId; // ------------------------------------------------------------- Properties /** * Return the default host. */ public String getDefaultHost() { return (defaultHost); } /** * Set the default host. * * @param host The new default host */ public void setDefaultHost(String host) { String oldDefaultHost = this.defaultHost; if (host == null) { this.defaultHost = null; } else { this.defaultHost = host.toLowerCase(); } support.firePropertyChange("defaultHost", oldDefaultHost, this.defaultHost); } /** * Set the cluster-wide unique identifier for this Engine. * This value is only useful in a load-balancing scenario. * <p> * This property should not be changed once it is set. */ public void setJvmRoute(String routeId) { this.log("setJvmRoute=" + routeId); jvmRouteId = routeId; } /** * Retrieve the cluster-wide unique identifier for this Engine. * This value is only useful in a load-balancing scenario. */ public String getJvmRoute() { return jvmRouteId; } /** * Set the DefaultContext * for new web applications. * * @param defaultContext The new DefaultContext */ public void addDefaultContext(DefaultContext defaultContext) { DefaultContext oldDefaultContext = this.defaultContext; this.defaultContext = defaultContext; support.firePropertyChange("defaultContext", oldDefaultContext, this.defaultContext); } /** * Retrieve the DefaultContext for new web applications. */ public DefaultContext getDefaultContext() { return (this.defaultContext); } /** * Return the default Mapper class name. */ public String getMapperClass() { return (this.mapperClass); } /** * Set the default Mapper class name. * * @param mapperClass The new default Mapper class name */ public void setMapperClass(String mapperClass) { String oldMapperClass = this.mapperClass; this.mapperClass = mapperClass; support.firePropertyChange("mapperClass", oldMapperClass, this.mapperClass); } /** * Return the <code>Service</code> with which we are associated (if any). */ public Service getService() { return (this.service); } /** * Set the <code>Service</code> with which we are associated (if any). * * @param service The service that owns this Engine */ public void setService(Service service) { this.service = service; } // --------------------------------------------------------- Public Methods /** * Import the DefaultContext config into a web application context. * * @param context web application context to import default context */ public void importDefaultContext(Context context) { if ( this.defaultContext != null ) this.defaultContext.importDefaultContext(context); } /** * Add a child Container, only if the proposed child is an implementation * of Host. * * @param child Child container to be added */ public void addChild(Container child) { if (!(child instanceof Host)) throw new IllegalArgumentException (sm.getString("standardEngine.notHost")); super.addChild(child); } /** * Return descriptive information about this Container implementation and * the corresponding version number, in the format * <code>&lt;description&gt;/&lt;version&gt;</code>. */ public String getInfo() { return (info); } /** * Disallow any attempt to set a parent for this Container, since an * Engine is supposed to be at the top of the Container hierarchy. * * @param container Proposed parent Container */ public void setParent(Container container) { throw new IllegalArgumentException (sm.getString("standardEngine.notParent")); } /** * Start this Engine component. * * @exception LifecycleException if a startup error occurs */ public void start() throws LifecycleException { // Log our server identification information System.out.println(ServerInfo.getServerInfo()); // Standard container startup super.start(); } /** * Return a String representation of this component. */ public String toString() { StringBuffer sb = new StringBuffer("StandardEngine["); sb.append(getName()); sb.append("]"); return (sb.toString()); } // ------------------------------------------------------ Protected Methods /** * Add a default Mapper implementation if none have been configured * explicitly. * * @param mapperClass The default mapper class name to add */ protected void addDefaultMapper(String mapperClass) { super.addDefaultMapper(this.mapperClass); } }
apache-2.0
mlatold/latova
plugins/bbtags/letters_numbers_spaces.php
202
<?php class bb_letters_numbers_spaces { function initialize($value, $option="", $argument=array()) { $option = preg_replace("{[^A-Za-z0-9 ]}", "", $option); return array($value, $option); } } ?>
apache-2.0