focal_method
stringlengths 13
60.9k
| test_case
stringlengths 25
109k
|
---|---|
public static MySQLCommandPacket newInstance(final MySQLCommandPacketType commandPacketType, final MySQLPacketPayload payload,
final ConnectionSession connectionSession) {
switch (commandPacketType) {
case COM_QUIT:
return new MySQLComQuitPacket();
case COM_INIT_DB:
return new MySQLComInitDbPacket(payload);
case COM_FIELD_LIST:
return new MySQLComFieldListPacket(payload);
case COM_QUERY:
return new MySQLComQueryPacket(payload);
case COM_STMT_PREPARE:
return new MySQLComStmtPreparePacket(payload);
case COM_STMT_EXECUTE:
MySQLServerPreparedStatement serverPreparedStatement =
connectionSession.getServerPreparedStatementRegistry().getPreparedStatement(payload.getByteBuf().getIntLE(payload.getByteBuf().readerIndex()));
return new MySQLComStmtExecutePacket(payload, serverPreparedStatement.getSqlStatementContext().getSqlStatement().getParameterCount());
case COM_STMT_SEND_LONG_DATA:
return new MySQLComStmtSendLongDataPacket(payload);
case COM_STMT_RESET:
return new MySQLComStmtResetPacket(payload);
case COM_STMT_CLOSE:
return new MySQLComStmtClosePacket(payload);
case COM_SET_OPTION:
return new MySQLComSetOptionPacket(payload);
case COM_PING:
return new MySQLComPingPacket();
case COM_RESET_CONNECTION:
return new MySQLComResetConnectionPacket();
default:
return new MySQLUnsupportedCommandPacket(commandPacketType);
}
} | @Test
void assertNewInstanceWithComResetConnectionPacket() {
assertThat(MySQLCommandPacketFactory.newInstance(MySQLCommandPacketType.COM_RESET_CONNECTION, payload, connectionSession), instanceOf(MySQLComResetConnectionPacket.class));
} |
public SearchResults<ProjectBindingInformation> findProjectBindingsByRequest(ProjectBindingsSearchRequest request) {
ProjectAlmSettingQuery query = buildProjectAlmSettingQuery(request);
try (DbSession session = dbClient.openSession(false)) {
int total = dbClient.projectAlmSettingDao().countProjectAlmSettings(session, query);
if (request.pageSize() == 0) {
return new SearchResults<>(List.of(), total);
}
List<ProjectBindingInformation> searchResults = performSearch(session, query, request.page(), request.pageSize());
return new SearchResults<>(searchResults, total);
}
} | @Test
void findProjectBindingsByRequest_whenPageSize0_returnsOnlyTotal() {
when(dbClient.projectAlmSettingDao().countProjectAlmSettings(eq(dbSession), any()))
.thenReturn(12);
ProjectBindingsSearchRequest request = new ProjectBindingsSearchRequest(null, null, 42, 0);
SearchResults<ProjectBindingInformation> actualResults = underTest.findProjectBindingsByRequest(request);
assertThat(actualResults.total()).isEqualTo(12);
assertThat(actualResults.searchResults()).isEmpty();
verify(dbClient.projectAlmSettingDao(), never()).selectProjectAlmSettings(eq(dbSession), any(), anyInt(), anyInt());
} |
@Override
protected CompletableFuture<R> handleRequest(
@Nonnull HandlerRequest<EmptyRequestBody> request, @Nonnull RestfulGateway gateway)
throws RestHandlerException {
JobID jobId = request.getPathParameter(JobIDPathParameter.class);
try {
return checkpointStatsSnapshotCache
.get(jobId, () -> gateway.requestCheckpointStats(jobId, timeout))
.thenApplyAsync(
checkpointStatsSnapshot -> {
try {
return handleCheckpointStatsRequest(
request, checkpointStatsSnapshot);
} catch (RestHandlerException e) {
throw new CompletionException(e);
}
},
executor)
.exceptionally(
throwable -> {
throwable = ExceptionUtils.stripCompletionException(throwable);
if (throwable instanceof FlinkJobNotFoundException) {
throw new CompletionException(
new NotFoundException(
String.format("Job %s not found", jobId),
throwable));
} else {
throw new CompletionException(throwable);
}
});
} catch (ExecutionException e) {
CompletableFuture<R> future = new CompletableFuture<>();
future.completeExceptionally(e);
return future;
}
} | @Test
void testRetrieveSnapshotFromCache() throws Exception {
GatewayRetriever<RestfulGateway> leaderRetriever =
() -> CompletableFuture.completedFuture(null);
CheckpointingStatistics checkpointingStatistics = getTestCheckpointingStatistics();
CheckpointStatsSnapshot checkpointStatsSnapshot1 = getTestCheckpointStatsSnapshot();
// Create a passthrough cache so the latest object will always be returned
Cache<JobID, CompletableFuture<CheckpointStatsSnapshot>> cache =
CacheBuilder.newBuilder().build();
try (RecordingCheckpointStatsHandler checkpointStatsHandler =
new RecordingCheckpointStatsHandler(
leaderRetriever,
TIMEOUT,
Collections.emptyMap(),
CheckpointingStatisticsHeaders.getInstance(),
cache,
Executors.directExecutor(),
checkpointingStatistics)) {
RestfulGateway functioningRestfulGateway =
new TestingRestfulGateway.Builder()
.setRequestCheckpointStatsSnapshotFunction(
jobID ->
CompletableFuture.completedFuture(
checkpointStatsSnapshot1))
.build();
HandlerRequest<EmptyRequestBody> request =
HandlerRequest.resolveParametersAndCreate(
EmptyRequestBody.getInstance(),
new JobMessageParameters(),
Collections.singletonMap(JobIDPathParameter.KEY, JOB_ID.toString()),
Collections.emptyMap(),
Collections.emptyList());
assertThat(
checkpointStatsHandler
.handleRequest(request, functioningRestfulGateway)
.get())
.usingRecursiveComparison()
.isEqualTo(checkpointingStatistics);
assertThat(checkpointStatsHandler.getStoredCheckpointStats())
.isEqualTo(checkpointStatsSnapshot1);
// Refresh the checkpoint stats data
CheckpointStatsSnapshot checkpointStatsSnapshot2 = getTestCheckpointStatsSnapshot();
RestfulGateway refreshedRestfulGateway =
new TestingRestfulGateway.Builder()
.setRequestCheckpointStatsSnapshotFunction(
jobID ->
CompletableFuture.completedFuture(
checkpointStatsSnapshot2))
.build();
assertThat(checkpointStatsHandler.handleRequest(request, refreshedRestfulGateway).get())
.usingRecursiveComparison()
.isEqualTo(checkpointingStatistics);
assertThat(checkpointStatsHandler.getStoredCheckpointStats())
.isEqualTo(checkpointStatsSnapshot2);
}
} |
@NonNull
@Override
public FileName toProviderFileName( @NonNull ConnectionFileName pvfsFileName, @NonNull T details )
throws KettleException {
StringBuilder providerUriBuilder = new StringBuilder();
appendProviderUriConnectionRoot( providerUriBuilder, details );
// Examples:
// providerUriBuilder: "hcp://domain.my:443/root/path" | "local:///C:/root/path" | "s3://"
// getPath(): "/folder/sub-folder" | "/"
appendProviderUriRestPath( providerUriBuilder, pvfsFileName.getPath(), details );
// Examples: "hcp://domain.my:443/root/path/folder/sub-folder" | "s3://folder/sub-folder"
// Preserve file type information.
if ( pvfsFileName.getType().hasChildren() ) {
providerUriBuilder.append( SEPARATOR );
}
return parseUri( providerUriBuilder.toString() );
} | @Test
public void testToProviderFileNameHandlesFolders() throws Exception {
ConnectionFileName pvfsFileName = mockPvfsFileNameWithPath( "/rest/path/" );
FileName providerFileName = transformer.toProviderFileName( pvfsFileName, details1 );
assertEquals( "scheme1://rest/path", providerFileName.getURI() );
assertFalse( providerFileName.isFile() );
// Should do provider uri normalization.
verify( kettleVFS, times( 1 ) ).resolveURI( any() );
} |
public static CompositeData parseComposite(URI uri) throws URISyntaxException {
CompositeData rc = new CompositeData();
rc.scheme = uri.getScheme();
String ssp = stripPrefix(uri.getRawSchemeSpecificPart().trim(), "//").trim();
parseComposite(uri, rc, ssp);
rc.fragment = uri.getFragment();
return rc;
} | @Test
public void testEmptyCompositePath() throws Exception {
CompositeData data = URISupport.parseComposite(new URI("broker:()/localhost?persistent=false"));
assertEquals(0, data.getComponents().length);
} |
@Override
public boolean isOperational() {
if (nodeOperational) {
return true;
}
boolean flag = false;
try {
flag = checkOperational();
} catch (InterruptedException e) {
LOG.trace("Interrupted while checking ES node is operational", e);
Thread.currentThread().interrupt();
} finally {
if (flag) {
esConnector.stop();
nodeOperational = true;
}
}
return nodeOperational;
} | @Test
public void isOperational_should_return_true_if_Elasticsearch_is_GREEN() {
EsConnector esConnector = mock(EsConnector.class);
when(esConnector.getClusterHealthStatus()).thenReturn(Optional.of(ClusterHealthStatus.GREEN));
EsManagedProcess underTest = new EsManagedProcess(mock(Process.class), ProcessId.ELASTICSEARCH, esConnector, WAIT_FOR_UP_TIMEOUT);
assertThat(underTest.isOperational()).isTrue();
} |
@Override
public void set(File file, String view, String attribute, Object value, boolean create) {
if (attribute.equals("acl")) {
checkNotCreate(view, attribute, create);
file.setAttribute("acl", "acl", toAcl(checkType(view, attribute, value, List.class)));
}
} | @Test
public void testSet() {
assertSetAndGetSucceeds("acl", ImmutableList.of());
assertSetFailsOnCreate("acl", ImmutableList.of());
assertSetFails("acl", ImmutableSet.of());
assertSetFails("acl", ImmutableList.of("hello"));
} |
public String anonymize(final ParseTree tree) {
return build(tree);
} | @Test
public void shouldAnonymizeCreateStreamQueryCorrectly() {
final String output = anon.anonymize(
"CREATE STREAM my_stream (profileId VARCHAR, latitude DOUBLE, longitude DOUBLE)\n"
+ "WITH (kafka_topic='locations', value_format='json', partitions=1);");
Approvals.verify(output);
} |
public static Read read() {
return new AutoValue_HCatalogIO_Read.Builder()
.setDatabase(DEFAULT_DATABASE)
.setPartitionCols(new ArrayList<>())
.build();
} | @Test
@NeedsTestData
public void testReadFromSource() throws Exception {
ReaderContext context = getReaderContext(getConfigPropertiesAsMap(service.getHiveConf()));
HCatalogIO.Read spec =
HCatalogIO.read()
.withConfigProperties(getConfigPropertiesAsMap(service.getHiveConf()))
.withContext(context)
.withTable(TEST_TABLE);
List<String> records = new ArrayList<>();
for (int i = 0; i < context.numSplits(); i++) {
BoundedHCatalogSource source = new BoundedHCatalogSource(spec.withSplitId(i));
for (HCatRecord record : SourceTestUtils.readFromSource(source, OPTIONS)) {
records.add(record.get(0).toString());
}
}
assertThat(records, containsInAnyOrder(getExpectedRecords(TEST_RECORDS_COUNT).toArray()));
} |
public ReliableTopicConfig addMessageListenerConfig(ListenerConfig listenerConfig) {
checkNotNull(listenerConfig, "listenerConfig can't be null");
listenerConfigs.add(listenerConfig);
return this;
} | @Test(expected = NullPointerException.class)
public void addMessageListenerConfig_whenNull() {
ReliableTopicConfig config = new ReliableTopicConfig("foo");
config.addMessageListenerConfig(null);
} |
@Deprecated
public static boolean isEmpty( String val ) {
return Utils.isEmpty( val );
} | @Test
public void testIsEmptyObjectArray() {
assertTrue( Const.isEmpty( (Object[]) null ) );
assertTrue( Const.isEmpty( new Object[] {} ) );
assertFalse( Const.isEmpty( new Object[] { "test" } ) );
} |
@Override
public void collectAndOverwriteTimestamp(OUT record, long timestamp) {
setTimestamp(timestamp);
output.collect(reuse.replace(record));
} | @Test
void testCollectAndOverwriteTimestamp() {
List<StreamElement> list = new ArrayList<>();
CollectorOutput<Integer> collectorOutput = new CollectorOutput<>(list);
OutputCollector<Integer> collector = new OutputCollector<>(collectorOutput);
collector.collectAndOverwriteTimestamp(1, 10L);
collector.collectAndOverwriteTimestamp(2, 20L);
assertThat(list).containsExactly(new StreamRecord<>(1, 10L), new StreamRecord<>(2, 20L));
} |
@Override
public boolean isEmpty() {
return data.isEmpty();
} | @Test(dataProvider = "caches")
@CacheSpec(compute = Compute.SYNC, population = Population.EMPTY, maximumSize = Maximum.FULL)
public void drain_blocksOrderedMap(BoundedLocalCache<Int, Int> cache,
CacheContext context, Eviction<Int, Int> eviction) {
checkDrainBlocks(cache, () -> {
var results = eviction.coldest(((int) context.maximumSize()));
assertThat(results).isEmpty();
});
} |
public static String toJson(TableIdentifier identifier) {
return toJson(identifier, false);
} | @Test
public void testTableIdentifierToJson() {
String json = "{\"namespace\":[\"accounting\",\"tax\"],\"name\":\"paid\"}";
TableIdentifier identifier = TableIdentifier.of(Namespace.of("accounting", "tax"), "paid");
assertThat(TableIdentifierParser.toJson(identifier))
.as("Should be able to serialize a table identifier with both namespace and name")
.isEqualTo(json);
TableIdentifier identifierWithEmptyNamespace = TableIdentifier.of(Namespace.empty(), "paid");
String jsonWithEmptyNamespace = "{\"namespace\":[],\"name\":\"paid\"}";
assertThat(TableIdentifierParser.toJson(identifierWithEmptyNamespace))
.as("Should be able to serialize a table identifier that uses the empty namespace")
.isEqualTo(jsonWithEmptyNamespace);
} |
@Override
public AnalyticsPluginInfo pluginInfoFor(GoPluginDescriptor descriptor) {
Capabilities capabilities = capabilities(descriptor.id());
PluggableInstanceSettings pluginSettingsAndView = getPluginSettingsAndView(descriptor, extension);
Image image = image(descriptor.id());
return new AnalyticsPluginInfo(descriptor, image, capabilities, pluginSettingsAndView);
} | @Test
public void shouldBuildPluginInfoWithCapabilities() {
GoPluginDescriptor descriptor = GoPluginDescriptor.builder().id("plugin1").build();
Capabilities capabilities = new Capabilities(Collections.emptyList());
when(extension.getCapabilities(descriptor.id())).thenReturn(capabilities);
AnalyticsPluginInfo pluginInfo = new AnalyticsPluginInfoBuilder(extension).pluginInfoFor(descriptor);
assertThat(pluginInfo.getCapabilities(), is(capabilities));
} |
@Override
public Map<String, Metric> getMetrics() {
final Map<String, Metric> gauges = new HashMap<>();
for (String pool : POOLS) {
for (int i = 0; i < ATTRIBUTES.length; i++) {
final String attribute = ATTRIBUTES[i];
final String name = NAMES[i];
try {
final ObjectName on = new ObjectName("java.nio:type=BufferPool,name=" + pool);
mBeanServer.getMBeanInfo(on);
gauges.put(name(pool, name), new JmxAttributeGauge(mBeanServer, on, attribute));
} catch (JMException ignored) {
LOGGER.debug("Unable to load buffer pool MBeans, possibly running on Java 6");
}
}
}
return Collections.unmodifiableMap(gauges);
} | @Test
public void includesAGaugeForMappedMemoryUsed() throws Exception {
final Gauge gauge = (Gauge) buffers.getMetrics().get("mapped.used");
when(mBeanServer.getAttribute(mapped, "MemoryUsed")).thenReturn(100);
assertThat(gauge.getValue())
.isEqualTo(100);
} |
@Override
public synchronized void putConnectorConfig(String connName,
final Map<String, String> config,
boolean allowReplace,
final Callback<Created<ConnectorInfo>> callback) {
putConnectorConfig(connName, config, null, allowReplace, callback);
} | @Test
public void testCorruptConfig() {
initialize(false);
Map<String, String> config = new HashMap<>();
config.put(ConnectorConfig.NAME_CONFIG, CONNECTOR_NAME);
config.put(ConnectorConfig.CONNECTOR_CLASS_CONFIG, BogusSinkConnector.class.getName());
config.put(SinkConnectorConfig.TOPICS_CONFIG, TOPICS_LIST_STR);
Connector connectorMock = mock(SinkConnector.class);
String error = "This is an error in your config!";
List<String> errors = new ArrayList<>(singletonList(error));
String key = "foo.invalid.key";
when(connectorMock.validate(config)).thenReturn(
new Config(
singletonList(new ConfigValue(key, null, Collections.emptyList(), errors))
)
);
ConfigDef configDef = new ConfigDef();
configDef.define(key, ConfigDef.Type.STRING, ConfigDef.Importance.HIGH, "");
when(worker.configTransformer()).thenReturn(transformer);
final ArgumentCaptor<Map<String, String>> configCapture = ArgumentCaptor.forClass(Map.class);
when(transformer.transform(configCapture.capture())).thenAnswer(invocation -> configCapture.getValue());
when(worker.getPlugins()).thenReturn(plugins);
when(plugins.connectorLoader(anyString())).thenReturn(pluginLoader);
when(plugins.withClassLoader(pluginLoader)).thenReturn(loaderSwap);
when(plugins.newConnector(anyString())).thenReturn(connectorMock);
when(connectorMock.config()).thenReturn(configDef);
herder.putConnectorConfig(CONNECTOR_NAME, config, true, createCallback);
ExecutionException e = assertThrows(
ExecutionException.class,
() -> createCallback.get(WAIT_TIME_MS, TimeUnit.MILLISECONDS),
"Should have failed to configure connector"
);
assertNotNull(e.getCause());
Throwable cause = e.getCause();
assertInstanceOf(BadRequestException.class, cause);
assertEquals(
cause.getMessage(),
"Connector configuration is invalid and contains the following 1 error(s):\n" +
error + "\n" +
"You can also find the above list of errors at the endpoint `/connector-plugins/{connectorType}/config/validate`"
);
verify(loaderSwap).close();
} |
@Udf(description = "Returns a new string encoded using the outputEncoding ")
public String encode(
@UdfParameter(
description = "The source string. If null, then function returns null.") final String str,
@UdfParameter(
description = "The input encoding."
+ " If null, then function returns null.") final String inputEncoding,
@UdfParameter(
description = "The output encoding."
+ " If null, then function returns null.") final String outputEncoding) {
if (str == null || inputEncoding == null || outputEncoding == null) {
return null;
}
final String encodedString = inputEncoding.toLowerCase() + outputEncoding.toLowerCase();
final Encode.Encoder encoder = ENCODER_MAP.get(encodedString);
if (encoder == null) {
throw new KsqlFunctionException("Supported input and output encodings are: "
+ "hex, utf8, ascii and base64");
}
return encoder.apply(str);
} | @Test(expected = KsqlFunctionException.class)
public void shouldThrowIfUnsupportedEncodingTypes() {
udf.encode("4578616d706C6521", "hex", "hex");
udf.encode("Ελλάδα", "utf8", "utf8");
udf.encode("1 + 1 = 1", "ascii", "ascii");
udf.encode("w5xiZXJtZW5zY2g=", "base64", "base64");
} |
@Override
public int run(String[] args) throws Exception {
if (args.length != 2) {
return usage(args);
}
String action = args[0];
String name = args[1];
int result;
if (A_LOAD.equals(action)) {
result = loadClass(name);
} else if (A_CREATE.equals(action)) {
//first load to separate load errors from create
result = loadClass(name);
if (result == SUCCESS) {
//class loads, so instantiate it
result = createClassInstance(name);
}
} else if (A_RESOURCE.equals(action)) {
result = loadResource(name);
} else if (A_PRINTRESOURCE.equals(action)) {
result = dumpResource(name);
} else {
result = usage(args);
}
return result;
} | @Test
public void testCreateFailsInStaticInit() throws Throwable {
run(FindClass.E_LOAD_FAILED,
FindClass.A_CREATE,
"org.apache.hadoop.util.TestFindClass$FailInStaticInit");
} |
@Override
public void execute(Context context) {
if (analysisMetadataHolder.isPullRequest() && targetInputFactory.hasTargetBranchAnalysis()) {
int fixedIssuesCount = pullRequestFixedIssueRepository.getFixedIssues().size();
measureRepository.add(treeRootHolder.getRoot(), metricRepository.getByKey(CoreMetrics.PULL_REQUEST_FIXED_ISSUES_KEY),
Measure.newMeasureBuilder().create(fixedIssuesCount));
}
} | @Test
public void execute_whenComponentIsNotPullRequest_shouldNotCreateMeasure() {
when(analysisMetadataHolder.isPullRequest()).thenReturn(false);
underTest.execute(new TestComputationStepContext());
assertThat(measureRepository.getAddedRawMeasures(ROOT_REF)).isEmpty();
} |
@Override
public boolean exceedsRateLimitPerFrequency(Task task, TaskDef taskDef) {
int rateLimit =
taskDef == null ? task.getRateLimitPerFrequency() : taskDef.getRateLimitPerFrequency();
if (rateLimit <= 0) {
return false;
}
int bucketSize =
taskDef == null
? task.getRateLimitFrequencyInSeconds()
: taskDef.getRateLimitFrequencyInSeconds();
String taskName = task.getTaskDefName();
try {
return withRetryableQuery(
GET_RUNNING_TASK_COUNT_BY_NAME_STATEMENT,
statement -> {
statement.setString(1, taskName);
statement.setLong(2, System.currentTimeMillis() - 1000 * bucketSize);
},
result -> {
if (result.next()) {
int cnt = result.getInt(COUNT_COLUMN);
if (cnt > rateLimit) {
LOG.info(
"Got {} running instance for the task name {} in the past {} second exceeding a limit {}",
cnt,
taskName,
bucketSize,
rateLimit);
return true;
} else {
LOG.debug(
"Got {} running instance for the task name {} in the past {} second within a limit {}",
cnt,
taskName,
bucketSize,
rateLimit);
}
}
return false;
});
} catch (Exception e) {
LOG.warn("Failed checking rate limit for task {} due to {}", taskName, e.getMessage());
return true;
}
} | @Test
public void testExceedsRateLimitWhenNoRateLimitSet() {
TaskDef taskDef = createTaskDef(0, 0);
Task task = createRunningTestTask(TEST_TASK_ID_1);
assertFalse(dao.exceedsRateLimitPerFrequency(task, taskDef));
} |
public static Set<String> verifyTopologyOptimizationConfigs(final String config) {
final List<String> configs = Arrays.asList(config.split("\\s*,\\s*"));
final Set<String> verifiedConfigs = new HashSet<>();
// Verify it doesn't contain none or all plus a list of optimizations
if (configs.contains(NO_OPTIMIZATION) || configs.contains(OPTIMIZE)) {
if (configs.size() > 1) {
throw new ConfigException("\"" + config + "\" is not a valid optimization config. " + CONFIG_ERROR_MSG);
}
}
for (final String conf: configs) {
if (!TOPOLOGY_OPTIMIZATION_CONFIGS.contains(conf)) {
throw new ConfigException("Unrecognized config. " + CONFIG_ERROR_MSG);
}
}
if (configs.contains(OPTIMIZE)) {
verifiedConfigs.add(REUSE_KTABLE_SOURCE_TOPICS);
verifiedConfigs.add(MERGE_REPARTITION_TOPICS);
verifiedConfigs.add(SINGLE_STORE_SELF_JOIN);
} else if (!configs.contains(NO_OPTIMIZATION)) {
verifiedConfigs.addAll(configs);
}
return verifiedConfigs;
} | @Test
public void shouldNotEnableAnyOptimizationsWithNoOptimizationConfig() {
final Set<String> configs = StreamsConfig.verifyTopologyOptimizationConfigs(StreamsConfig.NO_OPTIMIZATION);
assertEquals(0, configs.size());
} |
@Override
public String getGroupKeyColumnName(int groupKeyColumnIndex) {
throw new AssertionError("No group key column name for aggregation results");
} | @Test(expectedExceptions = AssertionError.class)
public void testGetGroupKeyColumnName() {
_aggregationResultSetUnderTest.getGroupKeyColumnName(0);
} |
public static <T> T copyProperties(Object source, Class<T> tClass, String... ignoreProperties) {
if (null == source) {
return null;
}
T target = ReflectUtil.newInstanceIfPossible(tClass);
copyProperties(source, target, CopyOptions.create().setIgnoreProperties(ignoreProperties));
return target;
} | @Test
public void beanToBeanOverlayFieldTest() {
final SubPersonWithOverlayTransientField source = new SubPersonWithOverlayTransientField();
source.setName("zhangsan");
source.setAge(20);
source.setOpenid("1");
final SubPersonWithOverlayTransientField dest = new SubPersonWithOverlayTransientField();
BeanUtil.copyProperties(source, dest);
assertEquals(source.getName(), dest.getName());
assertEquals(source.getAge(), dest.getAge());
assertEquals(source.getOpenid(), dest.getOpenid());
} |
@Override
public void delete(final Map<Path, TransferStatus> files, final PasswordCallback prompt, final Callback callback) throws BackgroundException {
final CountDownLatch signal = new CountDownLatch(1);
final AtomicReference<BackgroundException> failure = new AtomicReference<>();
final ScheduledThreadPool scheduler = new ScheduledThreadPool(new LoggingUncaughtExceptionHandler() {
@Override
public void uncaughtException(final Thread t, final Throwable e) {
super.uncaughtException(t, e);
failure.set(new BackgroundException(e));
signal.countDown();
}
}, "deletebatch");
try {
final Map<Path, List<String>> containers = new HashMap<>();
for(Path f : files.keySet()) {
final Path container = containerService.getContainer(f);
if(containers.containsKey(container)) {
containers.get(container).add(containerService.getKey(f));
}
else {
final List<String> keys = new ArrayList<>();
keys.add(containerService.getKey(f));
containers.put(container, keys);
}
callback.delete(f);
}
for(Path container : containers.keySet()) {
final DbxUserFilesRequests requests = new DbxUserFilesRequests(session.getClient(container));
final DeleteBatchLaunch job = requests.deleteBatch(containers.get(container).stream().map(DeleteArg::new).collect(Collectors.toList()));
final ScheduledFuture<?> f = scheduler.repeat(() -> {
try {
// Poll status
final DeleteBatchJobStatus status = requests.deleteBatchCheck(job.getAsyncJobIdValue());
if(status.isComplete()) {
final List<DeleteBatchResultEntry> entries = status.getCompleteValue().getEntries();
for(DeleteBatchResultEntry entry : entries) {
if(entry.isFailure()) {
switch(entry.getFailureValue().tag()) {
case PATH_LOOKUP:
failure.set(new NotfoundException(entry.getFailureValue().toString()));
break;
default:
failure.set(new InteroperabilityException());
}
}
}
signal.countDown();
}
if(status.isFailed()) {
signal.countDown();
}
}
catch(DbxException e) {
failure.set(new DropboxExceptionMappingService().map(e));
signal.countDown();
}
}, new HostPreferences(session.getHost()).getLong("dropbox.delete.poll.interval.ms"), TimeUnit.MILLISECONDS);
while(!Uninterruptibles.awaitUninterruptibly(signal, Duration.ofSeconds(1))) {
try {
if(f.isDone()) {
Uninterruptibles.getUninterruptibly(f);
}
}
catch(ExecutionException e) {
for(Throwable cause : ExceptionUtils.getThrowableList(e)) {
Throwables.throwIfInstanceOf(cause, BackgroundException.class);
}
throw new DefaultExceptionMappingService().map(Throwables.getRootCause(e));
}
}
if(null != failure.get()) {
throw failure.get();
}
}
}
catch(DbxException e) {
throw new DropboxExceptionMappingService().map(e);
}
finally {
scheduler.shutdown();
}
} | @Test(expected = NotfoundException.class)
public void testDeleteNotFound() throws Exception {
final Path test = new Path(new DefaultHomeFinderService(session).find(), new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file));
new DropboxBatchDeleteFeature(session).delete(Collections.singletonList(test), new DisabledLoginCallback(), new Delete.DisabledCallback());
} |
@Override
public void handleWayTags(int edgeId, EdgeIntAccess edgeIntAccess, ReaderWay way, IntsRef relationFlags) {
PointList pointList = way.getTag("point_list", null);
if (pointList != null) {
if (pointList.isEmpty() || !pointList.is3D()) {
if (maxSlopeEnc != null)
maxSlopeEnc.setDecimal(false, edgeId, edgeIntAccess, 0);
if (averageSlopeEnc != null)
averageSlopeEnc.setDecimal(false, edgeId, edgeIntAccess, 0);
return;
}
// Calculate 2d distance, although pointList might be 3D.
// This calculation is a bit expensive and edge_distance is available already, but this would be in 3D
double distance2D = DistanceCalcEarth.calcDistance(pointList, false);
if (distance2D < MIN_LENGTH) {
if (averageSlopeEnc != null)
// default is minimum of average_slope is negative so we have to explicitly set it to 0
averageSlopeEnc.setDecimal(false, edgeId, edgeIntAccess, 0);
return;
}
double towerNodeSlope = calcSlope(pointList.getEle(pointList.size() - 1) - pointList.getEle(0), distance2D);
if (Double.isNaN(towerNodeSlope))
throw new IllegalArgumentException("average_slope was NaN for OSM way ID " + way.getId());
if (averageSlopeEnc != null) {
if (towerNodeSlope >= 0)
averageSlopeEnc.setDecimal(false, edgeId, edgeIntAccess, Math.min(towerNodeSlope, averageSlopeEnc.getMaxStorableDecimal()));
else
averageSlopeEnc.setDecimal(true, edgeId, edgeIntAccess, Math.min(Math.abs(towerNodeSlope), averageSlopeEnc.getMaxStorableDecimal()));
}
if (maxSlopeEnc != null) {
// max_slope is more error-prone as the shorter distances increase the fluctuation
// so apply some more filtering (here we use the average elevation delta of the previous two points)
double maxSlope = 0, prevDist = 0, prevLat = pointList.getLat(0), prevLon = pointList.getLon(0);
for (int i = 1; i < pointList.size(); i++) {
double pillarDistance2D = DistanceCalcEarth.DIST_EARTH.calcDist(prevLat, prevLon, pointList.getLat(i), pointList.getLon(i));
if (i > 1 && prevDist > MIN_LENGTH) {
double averagedPrevEle = (pointList.getEle(i - 1) + pointList.getEle(i - 2)) / 2;
double tmpSlope = calcSlope(pointList.getEle(i) - averagedPrevEle, pillarDistance2D + prevDist / 2);
maxSlope = Math.abs(tmpSlope) > Math.abs(maxSlope) ? tmpSlope : maxSlope;
}
prevDist = pillarDistance2D;
prevLat = pointList.getLat(i);
prevLon = pointList.getLon(i);
}
// For tunnels and bridges we cannot trust the pillar node elevation and ignore all changes.
// Probably we should somehow recalculate even the average_slope after elevation interpolation? See EdgeElevationInterpolator
if (way.hasTag("tunnel", "yes") || way.hasTag("bridge", "yes") || way.hasTag("highway", "steps"))
maxSlope = towerNodeSlope;
else
maxSlope = Math.abs(towerNodeSlope) > Math.abs(maxSlope) ? towerNodeSlope : maxSlope;
if (Double.isNaN(maxSlope))
throw new IllegalArgumentException("max_slope was NaN for OSM way ID " + way.getId());
double val = Math.max(maxSlope, maxSlopeEnc.getMinStorableDecimal());
maxSlopeEnc.setDecimal(false, edgeId, edgeIntAccess, Math.min(maxSlopeEnc.getMaxStorableDecimal(), val));
}
}
} | @Test
public void testMaxSlopeLargerThanMaxStorableDecimal() {
PointList pointList = new PointList(5, true);
pointList.add(47.7281561, 11.9993135, 1163.0);
pointList.add(47.7282782, 11.9991944, 1163.0);
pointList.add(47.7283135, 11.9991135, 1178.0);
ReaderWay way = new ReaderWay(1);
way.setTag("point_list", pointList);
ArrayEdgeIntAccess intAccess = new ArrayEdgeIntAccess(1);
DecimalEncodedValue averageEnc = AverageSlope.create();
DecimalEncodedValue maxEnc = MaxSlope.create();
new EncodingManager.Builder().add(averageEnc).add(maxEnc).build();
SlopeCalculator creator = new SlopeCalculator(maxEnc, averageEnc);
int edgeId = 0;
creator.handleWayTags(edgeId, intAccess, way, IntsRef.EMPTY);
assertEquals(31, maxEnc.getDecimal(false, edgeId, intAccess), 1e-3);
assertEquals(-31, averageEnc.getDecimal(true, edgeId, intAccess), 1e-3);
} |
static EndTransactionMarker deserializeValue(ControlRecordType type, ByteBuffer value) {
ensureTransactionMarkerControlType(type);
if (value.remaining() < CURRENT_END_TXN_MARKER_VALUE_SIZE)
throw new InvalidRecordException("Invalid value size found for end transaction marker. Must have " +
"at least " + CURRENT_END_TXN_MARKER_VALUE_SIZE + " bytes, but found only " + value.remaining());
short version = value.getShort(0);
if (version < 0)
throw new InvalidRecordException("Invalid version found for end transaction marker: " + version +
". May indicate data corruption");
if (version > CURRENT_END_TXN_MARKER_VERSION)
log.debug("Received end transaction marker value version {}. Parsing as version {}", version,
CURRENT_END_TXN_MARKER_VERSION);
int coordinatorEpoch = value.getInt(2);
return new EndTransactionMarker(type, coordinatorEpoch);
} | @Test
public void testNotEnoughBytes() {
assertThrows(InvalidRecordException.class,
() -> EndTransactionMarker.deserializeValue(ControlRecordType.COMMIT, ByteBuffer.wrap(new byte[0])));
} |
public boolean matchStage(StageConfigIdentifier stageIdentifier, StageEvent event) {
return this.event.include(event) && appliesTo(stageIdentifier.getPipelineName(), stageIdentifier.getStageName());
} | @Test
void anyStageShouldNotMatchWithinADifferentPipeline() {
NotificationFilter filter = new NotificationFilter("cruise", GoConstants.ANY_STAGE, StageEvent.Breaks, false);
assertThat(filter.matchStage(new StageConfigIdentifier("cruise2", "dev"), StageEvent.Breaks)).isFalse();
} |
@Override
public Object convertFromSourceToTargetDataType( int sourceValueMetaType, int targetValueMetaType, Object value )
throws ValueMetaConversionException {
if ( value == null ) {
return null;
}
switch ( sourceValueMetaType ) {
case ValueMetaInterface.TYPE_INET:
return convertFromInetMetaInterface( targetValueMetaType, value );
case ValueMetaInterface.TYPE_STRING:
return convertFromStringMetaInterface( targetValueMetaType, value );
case ValueMetaInterface.TYPE_INTEGER:
return convertFromIntegerMetaInterface( targetValueMetaType, value );
case ValueMetaInterface.TYPE_NUMBER:
return convertFromNumberMetaInterface( targetValueMetaType, value );
case ValueMetaInterface.TYPE_BIGNUMBER:
return convertFromBigNumberMetaInterface( targetValueMetaType, value );
case ValueMetaInterface.TYPE_TIMESTAMP:
return convertFromTimestampMetaInterface( targetValueMetaType, value );
case ValueMetaInterface.TYPE_DATE:
return convertFromDateMetaInterface( targetValueMetaType, value );
case ValueMetaInterface.TYPE_BOOLEAN:
return convertFromBooleanMetaInterface( targetValueMetaType, value );
case ValueMetaInterface.TYPE_BINARY:
return convertFromBinaryMetaInterface( targetValueMetaType, value );
case ValueMetaInterface.TYPE_SERIALIZABLE:
return convertFromSerializableMetaInterface( targetValueMetaType, value );
default:
throwBadConversionCombination( sourceValueMetaType, targetValueMetaType, value );
}
return null;
} | @Test
public void convertFromSourceToTargetDataTypeTest() throws Exception {
//"-", "Number", "String", "Date", "Boolean", "Integer", "BigNumber", "Serializable", "Binary", "Timestamp",
// "Internet Address", }
DateFormat dateFormat = new SimpleDateFormat( "yyyy/MM/dd HH:mm:ss.SSS" );
Date date1 = ( dateFormat.parse( "1999/12/31 00:00:00.000" ) );
Date timeStamp1 = new Timestamp( dateFormat.parse( "2001/11/01 20:30:15.123" ).getTime() );
final String inetHost = "127.0.0.1";
InetAddress inetAddress1 = InetAddress.getByName( inetHost );
//All combination not listed here should generate ValueMetaConversionExceptions
// source type, destination type, source object, expected result
Object[][] tests = new Object[][] {
{ ValueMetaInterface.TYPE_NUMBER, ValueMetaInterface.TYPE_NONE, 1234.56d, null },
{ ValueMetaInterface.TYPE_NUMBER, ValueMetaInterface.TYPE_STRING, 1234.56d, "1234.56" },
{ ValueMetaInterface.TYPE_NUMBER, ValueMetaInterface.TYPE_NUMBER, 1234.56d, 1234.56d },
{ ValueMetaInterface.TYPE_NUMBER, ValueMetaInterface.TYPE_INTEGER, 1234.56d, 1234L },
{ ValueMetaInterface.TYPE_NUMBER, ValueMetaInterface.TYPE_BIGNUMBER, 1234.56d, new BigDecimal( 1234.56 ) },
{ ValueMetaInterface.TYPE_STRING, ValueMetaInterface.TYPE_NONE, inetHost, null },
{ ValueMetaInterface.TYPE_STRING, ValueMetaInterface.TYPE_STRING, "foobar", "foobar" },
{ ValueMetaInterface.TYPE_STRING, ValueMetaInterface.TYPE_INET, inetHost, inetAddress1 },
{ ValueMetaInterface.TYPE_STRING, ValueMetaInterface.TYPE_INTEGER, "1234", 1234L },
{ ValueMetaInterface.TYPE_STRING, ValueMetaInterface.TYPE_NUMBER, "1234.56", 1234.56 },
{ ValueMetaInterface.TYPE_STRING, ValueMetaInterface.TYPE_BIGNUMBER, "123456789.123456789",
new BigDecimal( "123456789.123456789" ) },
{ ValueMetaInterface.TYPE_STRING, ValueMetaInterface.TYPE_TIMESTAMP, "2001/11/01 20:30:15.123", timeStamp1 },
{ ValueMetaInterface.TYPE_STRING, ValueMetaInterface.TYPE_DATE, "1999/12/31 00:00:00.000", date1 },
{ ValueMetaInterface.TYPE_STRING, ValueMetaInterface.TYPE_BOOLEAN, "true", true },
{ ValueMetaInterface.TYPE_STRING, ValueMetaInterface.TYPE_BINARY, "foobar", "foobar".getBytes() },
{ ValueMetaInterface.TYPE_DATE, ValueMetaInterface.TYPE_NONE, date1, null },
{ ValueMetaInterface.TYPE_DATE, ValueMetaInterface.TYPE_DATE, date1, date1 },
{ ValueMetaInterface.TYPE_DATE, ValueMetaInterface.TYPE_INTEGER, date1, date1.getTime() },
{ ValueMetaInterface.TYPE_DATE, ValueMetaInterface.TYPE_STRING, date1, "1999/12/31 00:00:00.000" },
{ ValueMetaInterface.TYPE_DATE, ValueMetaInterface.TYPE_TIMESTAMP, date1, new Timestamp( date1.getTime() ) },
{ ValueMetaInterface.TYPE_BOOLEAN, ValueMetaInterface.TYPE_NONE, true, null },
{ ValueMetaInterface.TYPE_BOOLEAN, ValueMetaInterface.TYPE_STRING, true, "true" },
{ ValueMetaInterface.TYPE_BOOLEAN, ValueMetaInterface.TYPE_BOOLEAN, true, true },
{ ValueMetaInterface.TYPE_INTEGER, ValueMetaInterface.TYPE_NONE, 1234L, null },
{ ValueMetaInterface.TYPE_INTEGER, ValueMetaInterface.TYPE_DATE, date1.getTime(), date1 },
{ ValueMetaInterface.TYPE_INTEGER, ValueMetaInterface.TYPE_STRING, 1234L, "1234" },
{ ValueMetaInterface.TYPE_INTEGER, ValueMetaInterface.TYPE_INTEGER, 1234L, 1234L },
{ ValueMetaInterface.TYPE_INTEGER, ValueMetaInterface.TYPE_NUMBER, 1234L, 1234.0 },
{ ValueMetaInterface.TYPE_INTEGER, ValueMetaInterface.TYPE_BIGNUMBER, 1234L, new BigDecimal( "1234" ) },
{ ValueMetaInterface.TYPE_INTEGER, ValueMetaInterface.TYPE_TIMESTAMP, timeStamp1.getTime(), timeStamp1 },
{ ValueMetaInterface.TYPE_BIGNUMBER, ValueMetaInterface.TYPE_NONE, new BigDecimal( "123456.123456" ), null },
{ ValueMetaInterface.TYPE_BIGNUMBER, ValueMetaInterface.TYPE_STRING, new BigDecimal( "123456.123456" ),
"123456.123456" },
{ ValueMetaInterface.TYPE_BIGNUMBER, ValueMetaInterface.TYPE_NUMBER, new BigDecimal( "123456.123456" ),
123456.123456d },
{ ValueMetaInterface.TYPE_BIGNUMBER, ValueMetaInterface.TYPE_BIGNUMBER, new BigDecimal( "123456.123456" ),
new BigDecimal( "123456.123456" ) },
{ ValueMetaInterface.TYPE_SERIALIZABLE, ValueMetaInterface.TYPE_NONE, "foobar", null },
{ ValueMetaInterface.TYPE_SERIALIZABLE, ValueMetaInterface.TYPE_SERIALIZABLE, "foobar", "foobar" },
{ ValueMetaInterface.TYPE_BINARY, ValueMetaInterface.TYPE_NONE, "foobar".getBytes(), null },
{ ValueMetaInterface.TYPE_BINARY, ValueMetaInterface.TYPE_BINARY, "foobar".getBytes(), "foobar".getBytes() },
{ ValueMetaInterface.TYPE_TIMESTAMP, ValueMetaInterface.TYPE_NONE, timeStamp1, null },
{ ValueMetaInterface.TYPE_TIMESTAMP, ValueMetaInterface.TYPE_STRING, timeStamp1, "2001/11/01 20:30:15.123" },
{ ValueMetaInterface.TYPE_TIMESTAMP, ValueMetaInterface.TYPE_INTEGER, timeStamp1, timeStamp1.getTime() },
{ ValueMetaInterface.TYPE_TIMESTAMP, ValueMetaInterface.TYPE_TIMESTAMP, timeStamp1, timeStamp1 },
{ ValueMetaInterface.TYPE_TIMESTAMP, ValueMetaInterface.TYPE_DATE, timeStamp1, new Date( timeStamp1.getTime() ) },
{ ValueMetaInterface.TYPE_INET, ValueMetaInterface.TYPE_NONE, inetAddress1, null },
{ ValueMetaInterface.TYPE_INET, ValueMetaInterface.TYPE_STRING, inetAddress1, inetAddress1.getHostAddress() },
{ ValueMetaInterface.TYPE_INET, ValueMetaInterface.TYPE_INET, inetAddress1, inetAddress1 },
};
//Get the tests in a map so that they can be quickly referenced while testing all permutations
Map<String, Object[]> testMap = new HashMap<>();
for ( Object[] testSpec : tests ) {
testMap.put( getKey( (Integer) testSpec[ 0 ], (Integer) testSpec[ 1 ] ), testSpec );
}
ValueMetaConverter converter = new ValueMetaConverter();
for ( int sourceType = startSource; sourceType <= endSource; sourceType++ ) {
for ( int targetType = startTarget; targetType <= endTarget; targetType++ ) {
Object[] testSpec = testMap.get( getKey( sourceType, targetType ) );
if ( testSpec != null ) {
Object targetValue = converter.convertFromSourceToTargetDataType( sourceType, targetType, testSpec[ 2 ] );
if ( IS_VERBOSE ) {
System.out.println(
"type " + sourceType + "/" + targetType + ":" + testSpec[ 3 ].toString() + "=" + targetValue.toString() );
}
if ( targetType == ValueMetaInterface.TYPE_BINARY ) {
assertArrayEquals( (byte[]) testSpec[ 3 ], (byte[]) targetValue );
} else {
assertEquals( testSpec[ 3 ], targetValue );
}
} else {
// Attempt a non-defined conversion. Should throw an exception.
try {
//Get a source object of the correct type
testSpec = testMap.get( getKey( sourceType, ValueMetaInterface.TYPE_NONE ) );
if ( IS_VERBOSE ) {
System.out.println( "type " + sourceType + "/" + targetType + ":" + testSpec[ 2 ].toString() + " should throw Exception" );
}
converter.convertFromSourceToTargetDataType( sourceType, targetType, testSpec[ 2 ] );
fail( "Did not throw exception. Probably need to make a test entry for this combination." );
} catch ( ValueMetaConversionException e ) {
// We are expecting this exception. Any combination we are not testing should not be supported
if ( !e.getMessage().contains( "Error. Can not convert from" ) ) {
fail( "Got a diferent exception than what was expected" );
}
}
}
//Now Try and send a null, should always return null
assertNull( converter.convertFromSourceToTargetDataType( sourceType, targetType, null ) );
//Now Try to send in a source that is an invalid type - should always fail
try {
converter.convertFromSourceToTargetDataType( sourceType, targetType, new Object() );
} catch ( ValueMetaConversionException e ) {
// We are expecting this exception. Any combination we are not testing should not be supported
if ( !e.getMessage().contains( "Error. Expecting value of type" ) ) {
fail( "Got a diferent exception than what was expected" );
}
}
}
}
} |
@Override
public String getHeader(String name) {
// The browser javascript WebSocket client couldn't add the auth param to the request header, use the
// query param `token` to transport the auth token for the browser javascript WebSocket client.
if (name.equals(HTTP_HEADER_NAME)
&& !((UpgradeHttpServletRequest) this.getRequest()).getHeaders().containsKey(HTTP_HEADER_NAME)) {
String token = getRequest().getParameter(TOKEN);
if (token != null && !token.startsWith(HTTP_HEADER_VALUE_PREFIX)) {
return HTTP_HEADER_VALUE_PREFIX + token;
}
return token;
}
return super.getHeader(name);
} | @Test
public void testTokenParamWithBearerPrefix() {
UpgradeHttpServletRequest httpServletRequest = Mockito.mock(UpgradeHttpServletRequest.class);
Mockito.when(httpServletRequest.getParameter(WebSocketHttpServletRequestWrapper.TOKEN))
.thenReturn(BEARER_TOKEN);
WebSocketHttpServletRequestWrapper webSocketHttpServletRequestWrapper =
new WebSocketHttpServletRequestWrapper(httpServletRequest);
Assert.assertEquals(
webSocketHttpServletRequestWrapper.getHeader(WebSocketHttpServletRequestWrapper.HTTP_HEADER_NAME),
BEARER_TOKEN);
} |
public static JsonMapper validateJsonMapper(JsonMapper jsonMapper) {
try {
final String serializedJob = jsonMapper.serialize(getJobForTesting());
testTimeFields(serializedJob);
testUseFieldsNotMethods(serializedJob);
testUsePolymorphism(serializedJob);
testCanConvertBackToJob(jsonMapper, serializedJob);
return jsonMapper;
} catch (Exception e) {
throw new IllegalArgumentException("The JsonMapper you provided cannot be used as it deserializes jobs in an incorrect way.", e);
}
} | @Test
void testValidJsonBJsonMapper() {
assertThatCode(() -> validateJsonMapper(new JsonbJsonMapper())).doesNotThrowAnyException();
} |
@Override
public String command() {
return TYPE;
} | @Test
public void shouldShowCommandName() {
assertThat(new RakeTask().command(), is("rake"));
} |
public static void mergeMap(boolean decrypt, Map<String, Object> config) {
merge(decrypt, config);
} | @Test
public void testMap_valueCastToDouble() {
Map<String, Object> testMap = new HashMap<>();
testMap.put("key", "${TEST.double: 1.1}");
CentralizedManagement.mergeMap(true, testMap);
Assert.assertTrue(testMap.get("key") instanceof Double);
} |
public Analysis analyze(Statement statement)
{
return analyze(statement, false);
} | @Test
public void testGroupByWithQualifiedName3()
{
// TODO: verify output
analyze("SELECT * FROM t1 GROUP BY t1.a, t1.b, t1.c, t1.d");
} |
public static AsyncArchiveService startAsyncArchiveIfEnabled(BaseHoodieWriteClient writeClient) {
HoodieWriteConfig config = writeClient.getConfig();
if (!config.isAutoArchive() || !config.isAsyncArchive()) {
LOG.info("The HoodieWriteClient is not configured to auto & async archive. Async archive service will not start.");
return null;
}
AsyncArchiveService asyncArchiveService = new AsyncArchiveService(writeClient);
asyncArchiveService.start(null);
return asyncArchiveService;
} | @Test
void startAsyncArchiveReturnsNullWhenAutoArchiveDisabled() {
when(config.isAutoArchive()).thenReturn(false);
when(writeClient.getConfig()).thenReturn(config);
assertNull(AsyncArchiveService.startAsyncArchiveIfEnabled(writeClient));
} |
@Override
public RouteContext route(final RouteContext routeContext, final BroadcastRule broadcastRule) {
RouteMapper dataSourceMapper = getDataSourceRouteMapper(broadcastRule.getDataSourceNames());
routeContext.getRouteUnits().add(new RouteUnit(dataSourceMapper, createTableRouteMappers()));
return routeContext;
} | @Test
void assertRoute() {
SQLStatementContext sqlStatementContext = mock(SQLStatementContext.class);
Collection<String> logicTables = Collections.singleton("t_address");
ConnectionContext connectionContext = mock(ConnectionContext.class);
BroadcastUnicastRoutingEngine engine = new BroadcastUnicastRoutingEngine(sqlStatementContext, logicTables, connectionContext);
RouteContext routeContext = engine.route(new RouteContext(), broadcastRule);
assertThat(routeContext.getRouteUnits().size(), is(1));
assertTableRouteMapper(routeContext);
} |
@Override
public boolean supportsNamedParameters() {
return false;
} | @Test
void assertSupportsNamedParameters() {
assertFalse(metaData.supportsNamedParameters());
} |
public Environment addAll(@NonNull Map<String, String> map) {
map.forEach((key, value) -> this.props.setProperty(key, value));
return this;
} | @Test
public void testAddAll() {
Environment environment = Environment.empty();
Properties prop = new Properties();
prop.setProperty("a", "1");
environment.addAll(prop);
assertEquals(1, environment.size());
Map<String, String> map = Collections.singletonMap("aa", "bb");
environment.addAll(map);
assertEquals(2, environment.size());
} |
ByteBuffer packDecimal() {
ByteBuffer buffer = ByteBuffer.allocate(type.getTypeSize());
buffer.order(ByteOrder.LITTLE_ENDIAN);
int scale = ((ScalarType) type).getScalarScale();
BigDecimal scaledValue = value.multiply(SCALE_FACTOR[scale]);
switch (type.getPrimitiveType()) {
case DECIMAL32:
buffer.putInt(scaledValue.intValue());
break;
case DECIMAL64:
buffer.putLong(scaledValue.longValue());
break;
case DECIMAL128:
case DECIMALV2:
// BigInteger::toByteArray returns a big-endian byte[], so copy in reverse order one by one byte.
byte[] bytes = scaledValue.toBigInteger().toByteArray();
for (int i = bytes.length - 1; i >= 0; --i) {
buffer.put(bytes[i]);
}
// pad with sign bits
byte prefixByte = scaledValue.signum() >= 0 ? (byte) 0 : (byte) 0xff;
int numPaddingBytes = 16 - bytes.length;
for (int i = 0; i < numPaddingBytes; ++i) {
buffer.put(prefixByte);
}
break;
default:
Preconditions.checkArgument(false, "Type bust be decimal type");
}
buffer.flip();
return buffer;
} | @Test
public void testPackDecimal() {
BigInteger[] bigIntegers = new BigInteger[] {
BigInteger.ZERO,
BigInteger.ONE,
BigInteger.ONE.shiftLeft(31).subtract(BigInteger.ONE),
BigInteger.ONE.shiftLeft(31).negate(),
BigInteger.ONE.shiftLeft(32).subtract(BigInteger.ONE),
BigInteger.ONE.shiftLeft(32).negate(),
BigInteger.ONE.shiftLeft(63).subtract(BigInteger.ONE),
BigInteger.ONE.shiftLeft(63).negate(),
BigInteger.ONE.shiftLeft(64).subtract(BigInteger.ONE),
BigInteger.ONE.shiftLeft(64).negate(),
BigInteger.ONE.shiftLeft(126).subtract(BigInteger.ONE),
BigInteger.ONE.shiftLeft(126).negate(),
};
for (BigInteger integer : bigIntegers) {
BigDecimal decimal = new BigDecimal(integer, 3);
DecimalLiteral decimalLiteral = new DecimalLiteral(decimal);
ByteBuffer packed = decimalLiteral.packDecimal();
int numBytes = packed.limit();
byte[] bytes = new byte[numBytes];
packed.get(bytes);
int i = 0, j = numBytes - 1;
while (i < j) {
byte tmp = bytes[j];
bytes[j] = bytes[i];
bytes[i] = tmp;
++i;
--j;
}
BigInteger expected = new BigInteger(bytes);
Assert.assertEquals(expected, integer);
}
} |
public List<CodegenColumnDO> buildColumns(Long tableId, List<TableField> tableFields) {
List<CodegenColumnDO> columns = CodegenConvert.INSTANCE.convertList(tableFields);
int index = 1;
for (CodegenColumnDO column : columns) {
column.setTableId(tableId);
column.setOrdinalPosition(index++);
// 特殊处理:Byte => Integer
if (Byte.class.getSimpleName().equals(column.getJavaType())) {
column.setJavaType(Integer.class.getSimpleName());
}
// 初始化 Column 列的默认字段
processColumnOperation(column); // 处理 CRUD 相关的字段的默认值
processColumnUI(column); // 处理 UI 相关的字段的默认值
processColumnExample(column); // 处理字段的 swagger example 示例
}
return columns;
} | @Test
public void testBuildColumns() {
// 准备参数
Long tableId = randomLongId();
TableField tableField = mock(TableField.class);
List<TableField> tableFields = Collections.singletonList(tableField);
// mock 方法
TableField.MetaInfo metaInfo = mock(TableField.MetaInfo.class);
when(tableField.getMetaInfo()).thenReturn(metaInfo);
when(metaInfo.getJdbcType()).thenReturn(JdbcType.BIGINT);
when(tableField.getComment()).thenReturn("编号");
when(tableField.isKeyFlag()).thenReturn(true);
IColumnType columnType = mock(IColumnType.class);
when(tableField.getColumnType()).thenReturn(columnType);
when(columnType.getType()).thenReturn("Long");
when(tableField.getName()).thenReturn("id2");
when(tableField.getPropertyName()).thenReturn("id");
// 调用
List<CodegenColumnDO> columns = codegenBuilder.buildColumns(tableId, tableFields);
// 断言
assertEquals(1, columns.size());
CodegenColumnDO column = columns.get(0);
assertEquals(tableId, column.getTableId());
assertEquals("id2", column.getColumnName());
assertEquals("BIGINT", column.getDataType());
assertEquals("编号", column.getColumnComment());
assertFalse(column.getNullable());
assertTrue(column.getPrimaryKey());
assertEquals(1, column.getOrdinalPosition());
assertEquals("Long", column.getJavaType());
assertEquals("id", column.getJavaField());
assertNull(column.getDictType());
assertNotNull(column.getExample());
assertFalse(column.getCreateOperation());
assertTrue(column.getUpdateOperation());
assertFalse(column.getListOperation());
assertEquals("=", column.getListOperationCondition());
assertTrue(column.getListOperationResult());
assertEquals("input", column.getHtmlType());
} |
public static byte[] toJsonBytes(Slime slime) throws IOException {
return toJsonBytes(slime.get());
} | @Test
public void test_slime_to_json() throws IOException {
Slime slime = new Slime();
Cursor root = slime.setObject();
root.setString("foo", "foobie");
root.setObject("bar");
String json = Utf8.toString(SlimeUtils.toJsonBytes(slime));
assertEquals("{\"foo\":\"foobie\",\"bar\":{}}", json);
} |
@Override
public boolean isEnabled() {
return branchFeatureExtension != null && branchFeatureExtension.isAvailable();
} | @Test
public void return_true_when_extension_returns_ftrue() {
when(branchFeatureExtension.isAvailable()).thenReturn(true);
assertThat(new BranchFeatureProxyImpl(branchFeatureExtension).isEnabled()).isTrue();
} |
public static byte[] adjustParity(byte[] data) {
for (int i = 0; i < data.length; i++) {
// Mask with fe to get first 7 bits
int b = data[i] & 0xfe;
data[i] = (byte) (b | ((Integer.bitCount(b) & 1) ^ 1));
}
return data;
} | @Test
public void adjustParity() {
assertArrayEquals(Hex.decode("010102020404070708080b0b0d0d0e0e101013131515161619191a1a1c1c1f1f"),
CryptoUtils.adjustParity(
Hex.decode("000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f")
)
);
assertArrayEquals(Hex.decode("202023232525262629292a2a2c2c2f2f313132323434373738383b3b3d3d3e3e"),
CryptoUtils.adjustParity(
Hex.decode("202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3f")
)
);
assertArrayEquals(Hex.decode("404043434545464649494a4a4c4c4f4f515152525454575758585b5b5d5d5e5e"),
CryptoUtils.adjustParity(
Hex.decode("404142434445464748494a4b4c4d4e4f505152535455565758595a5b5c5d5e5f")
)
);
assertArrayEquals(Hex.decode("616162626464676768686b6b6d6d6e6e707073737575767679797a7a7c7c7f7f"),
CryptoUtils.adjustParity(
Hex.decode("606162636465666768696a6b6c6d6e6f707172737475767778797a7b7c7d7e7f")
)
);
assertArrayEquals(Hex.decode("808083838585868689898a8a8c8c8f8f919192929494979798989b9b9d9d9e9e"),
CryptoUtils.adjustParity(
Hex.decode("808182838485868788898a8b8c8d8e8f909192939495969798999a9b9c9d9e9f")
)
);
assertArrayEquals(Hex.decode("a1a1a2a2a4a4a7a7a8a8ababadadaeaeb0b0b3b3b5b5b6b6b9b9bababcbcbfbf"),
CryptoUtils.adjustParity(
Hex.decode("a0a1a2a3a4a5a6a7a8a9aaabacadaeafb0b1b2b3b4b5b6b7b8b9babbbcbdbebf")
)
);
assertArrayEquals(Hex.decode("c1c1c2c2c4c4c7c7c8c8cbcbcdcdceced0d0d3d3d5d5d6d6d9d9dadadcdcdfdf"),
CryptoUtils.adjustParity(
Hex.decode("c0c1c2c3c4c5c6c7c8c9cacbcccdcecfd0d1d2d3d4d5d6d7d8d9dadbdcdddedf")
)
);
assertArrayEquals(Hex.decode("e0e0e3e3e5e5e6e6e9e9eaeaececefeff1f1f2f2f4f4f7f7f8f8fbfbfdfdfefe"),
CryptoUtils.adjustParity(
Hex.decode("e0e1e2e3e4e5e6e7e8e9eaebecedeeeff0f1f2f3f4f5f6f7f8f9fafbfcfdfeff")
)
);
} |
@Override
public boolean supportsStatementPooling() {
return false;
} | @Test
void assertSupportsStatementPooling() {
assertFalse(metaData.supportsStatementPooling());
} |
@Override
public void checkBeforeUpdate(final CreateEncryptRuleStatement sqlStatement) {
if (!sqlStatement.isIfNotExists()) {
checkDuplicateRuleNames(sqlStatement);
}
checkColumnNames(sqlStatement);
checkAlgorithmTypes(sqlStatement);
checkToBeCreatedEncryptors(sqlStatement);
checkDataSources();
} | @Test
void assertCreateAESEncryptRuleWithPropertiesNotExists() {
CreateEncryptRuleStatement sqlStatement = createWrongAESEncryptorSQLStatement();
EncryptRule rule = mock(EncryptRule.class);
when(rule.getConfiguration()).thenReturn(getCurrentRuleConfiguration());
executor.setRule(rule);
assertThrows(AlgorithmInitializationException.class, () -> executor.checkBeforeUpdate(sqlStatement));
} |
public static String buildGlueExpression(Map<Column, Domain> partitionPredicates)
{
List<String> perColumnExpressions = new ArrayList<>();
int expressionLength = 0;
for (Map.Entry<Column, Domain> partitionPredicate : partitionPredicates.entrySet()) {
String columnName = partitionPredicate.getKey().getName();
if (JSQL_PARSER_RESERVED_KEYWORDS.contains(columnName.toUpperCase(ENGLISH))) {
// The column name is a reserved keyword in the grammar of the SQL parser used internally by Glue API
continue;
}
Domain domain = partitionPredicate.getValue();
if (domain != null && !domain.isAll()) {
Optional<String> columnExpression = buildGlueExpressionForSingleDomain(columnName, domain);
if (columnExpression.isPresent()) {
int newExpressionLength = expressionLength + columnExpression.get().length();
if (expressionLength > 0) {
newExpressionLength += CONJUNCT_SEPARATOR.length();
}
if (newExpressionLength > GLUE_EXPRESSION_CHAR_LIMIT) {
continue;
}
perColumnExpressions.add((columnExpression.get()));
expressionLength = newExpressionLength;
}
}
}
return Joiner.on(CONJUNCT_SEPARATOR).join(perColumnExpressions);
} | @Test
public void testBuildGlueExpressionMaxLengthOneColumn()
{
Map<Column, Domain> predicates = new PartitionFilterBuilder(HIVE_TYPE_TRANSLATOR)
.addStringValues("col1", Strings.repeat("x", GLUE_EXPRESSION_CHAR_LIMIT))
.addStringValues("col2", Strings.repeat("x", 5))
.build();
String expression = buildGlueExpression(predicates);
assertEquals(expression, "((col2 = 'xxxxx'))");
} |
public Object execute(ProceedingJoinPoint proceedingJoinPoint, Method method, String fallbackMethodValue, CheckedSupplier<Object> primaryFunction) throws Throwable {
String fallbackMethodName = spelResolver.resolve(method, proceedingJoinPoint.getArgs(), fallbackMethodValue);
FallbackMethod fallbackMethod = null;
if (StringUtils.hasLength(fallbackMethodName)) {
try {
fallbackMethod = FallbackMethod
.create(fallbackMethodName, method, proceedingJoinPoint.getArgs(), proceedingJoinPoint.getTarget(), proceedingJoinPoint.getThis());
} catch (NoSuchMethodException ex) {
logger.warn("No fallback method match found", ex);
}
}
if (fallbackMethod == null) {
return primaryFunction.get();
} else {
return fallbackDecorators.decorate(fallbackMethod, primaryFunction).get();
}
} | @Test
public void testPrimaryMethodExecutionWithFallback() throws Throwable {
Method method = this.getClass().getMethod("getName", String.class);
final CheckedSupplier<Object> primaryFunction = () -> getName("Name");
final String fallbackMethodValue = "getNameValidFallback";
when(proceedingJoinPoint.getArgs()).thenReturn(new Object[]{});
when(proceedingJoinPoint.getTarget()).thenReturn(this);
when(spelResolver.resolve(method, proceedingJoinPoint.getArgs(), fallbackMethodValue)).thenReturn(fallbackMethodValue);
when(fallbackDecorators.decorate(any(),eq(primaryFunction))).thenReturn(primaryFunction);
final Object result = fallbackExecutor.execute(proceedingJoinPoint, method, fallbackMethodValue, primaryFunction);
assertThat(result).isEqualTo("Name");
verify(spelResolver, times(1)).resolve(method, proceedingJoinPoint.getArgs(), fallbackMethodValue);
verify(fallbackDecorators, times(1)).decorate(any(),eq(primaryFunction));
} |
public static Map<TopicPartition, Long> parseSinkConnectorOffsets(Map<Map<String, ?>, Map<String, ?>> partitionOffsets) {
Map<TopicPartition, Long> parsedOffsetMap = new HashMap<>();
for (Map.Entry<Map<String, ?>, Map<String, ?>> partitionOffset : partitionOffsets.entrySet()) {
Map<String, ?> partitionMap = partitionOffset.getKey();
if (partitionMap == null) {
throw new BadRequestException("The partition for a sink connector offset cannot be null or missing");
}
if (!partitionMap.containsKey(KAFKA_TOPIC_KEY) || !partitionMap.containsKey(KAFKA_PARTITION_KEY)) {
throw new BadRequestException(String.format("The partition for a sink connector offset must contain the keys '%s' and '%s'",
KAFKA_TOPIC_KEY, KAFKA_PARTITION_KEY));
}
if (partitionMap.get(KAFKA_TOPIC_KEY) == null) {
throw new BadRequestException("Kafka topic names must be valid strings and may not be null");
}
if (partitionMap.get(KAFKA_PARTITION_KEY) == null) {
throw new BadRequestException("Kafka partitions must be valid numbers and may not be null");
}
String topic = String.valueOf(partitionMap.get(KAFKA_TOPIC_KEY));
int partition;
try {
// We parse it this way because both "10" and 10 should be accepted as valid partition values in the REST API's
// JSON request payload. If it throws an exception, we should propagate it since it's indicative of a badly formatted value.
partition = Integer.parseInt(String.valueOf(partitionMap.get(KAFKA_PARTITION_KEY)));
} catch (Exception e) {
throw new BadRequestException("Failed to parse the following Kafka partition value in the provided offsets: '" +
partitionMap.get(KAFKA_PARTITION_KEY) + "'. Partition values for sink connectors need " +
"to be integers.", e);
}
TopicPartition tp = new TopicPartition(topic, partition);
Map<String, ?> offsetMap = partitionOffset.getValue();
if (offsetMap == null) {
// represents an offset reset
parsedOffsetMap.put(tp, null);
} else {
if (!offsetMap.containsKey(KAFKA_OFFSET_KEY)) {
throw new BadRequestException(String.format("The offset for a sink connector should either be null or contain " +
"the key '%s'", KAFKA_OFFSET_KEY));
}
long offset;
try {
// We parse it this way because both "1000" and 1000 should be accepted as valid offset values in the REST API's
// JSON request payload. If it throws an exception, we should propagate it since it's indicative of a badly formatted value.
offset = Long.parseLong(String.valueOf(offsetMap.get(KAFKA_OFFSET_KEY)));
} catch (Exception e) {
throw new BadRequestException("Failed to parse the following Kafka offset value in the provided offsets: '" +
offsetMap.get(KAFKA_OFFSET_KEY) + "'. Offset values for sink connectors need " +
"to be integers.", e);
}
parsedOffsetMap.put(tp, offset);
}
}
return parsedOffsetMap;
} | @Test
public void testValidateAndParseIntegerPartitionValue() {
Map<Map<String, ?>, Map<String, ?>> partitionOffsets = createPartitionOffsetMap("topic", 10, "100");
Map<TopicPartition, Long> parsedOffsets = SinkUtils.parseSinkConnectorOffsets(partitionOffsets);
assertEquals(1, parsedOffsets.size());
TopicPartition tp = parsedOffsets.keySet().iterator().next();
assertEquals(10, tp.partition());
} |
@Operation(summary= "Abort current session from digid")
@PostMapping(value = "abort", consumes = "application/json", produces = "application/json")
public Map<String, String> abort(@Valid @RequestBody AppRequest request) {
RdaSession session = findSession(request, null);
if (session != null) {
session.setStatus(Status.ABORTED);
sessionRepo.save(session);
} else {
logger.info("Session not found");
}
// Result OK
return ImmutableMap.of("status", "OK");
} | @Test
public void testAbortRestService() {
AppRequest appRequest = new AppRequest();
appRequest.setSessionId("sessionId");
RdaSession session = new RdaSession();
session.setId("sessionId");
mockSession(session);
Map<String, String> responseData = controller.abort(appRequest);
assertEquals("OK", responseData.get("status"));
Mockito.verify(sessionRepo, Mockito.times(1)).save(Mockito.isA(RdaSession.class));
} |
@Override
public SearchVersion convertFrom(String value) {
try {
// only major version - we know it's elasticsearch
final int majorVersion = Integer.parseInt(value);
return SearchVersion.elasticsearch(majorVersion, 0, 0);
} catch (NumberFormatException nfe) {
// It's probably a distribution:version format
// caution, this format assumes full version X.Y.Z, not just major number
return SearchVersion.decode(value);
}
} | @Test
void convertEncodedValue() {
final SearchVersion version = converter.convertFrom("OPENSEARCH:1.2.0");
assertThat(version).isEqualTo(SearchVersion.create(SearchVersion.Distribution.OPENSEARCH, Version.parse("1.2.0")));
} |
@InvokeOnHeader(Web3jConstants.DB_GET_STRING)
void dbGetString(Message message) throws IOException {
String databaseName = message.getHeader(Web3jConstants.DATABASE_NAME, configuration::getDatabaseName, String.class);
String keyName = message.getHeader(Web3jConstants.KEY_NAME, configuration::getKeyName, String.class);
Request<?, DbGetString> request = web3j.dbGetString(databaseName, keyName);
setRequestId(message, request);
DbGetString response = request.send();
boolean hasError = checkForError(message, response);
if (!hasError) {
message.setBody(response.getStoredValue());
}
} | @Test
public void dbGetStringTest() throws Exception {
DbGetString response = Mockito.mock(DbGetString.class);
Mockito.when(mockWeb3j.dbGetString(any(), any())).thenReturn(request);
Mockito.when(request.send()).thenReturn(response);
Mockito.when(response.getStoredValue()).thenReturn("test");
Exchange exchange = createExchangeWithBodyAndHeader(null, OPERATION, Web3jConstants.DB_GET_STRING);
template.send(exchange);
String body = exchange.getIn().getBody(String.class);
assertEquals("test", body);
} |
public void and(BitmapValue other) {
switch (other.bitmapType) {
case EMPTY:
clear();
break;
case SINGLE_VALUE:
switch (this.bitmapType) {
case EMPTY:
break;
case SINGLE_VALUE:
if (this.singleValue != other.singleValue) {
clear();
}
break;
case BITMAP_VALUE:
if (!this.bitmap.contains(other.singleValue)) {
clear();
} else {
clear();
this.singleValue = other.singleValue;
this.bitmapType = SINGLE_VALUE;
}
break;
case SET_VALUE:
if (!this.set.contains(other.singleValue)) {
clear();
} else {
clear();
this.singleValue = other.singleValue;
this.bitmapType = SINGLE_VALUE;
}
break;
}
break;
case BITMAP_VALUE:
switch (this.bitmapType) {
case EMPTY:
break;
case SINGLE_VALUE:
if (!other.bitmap.contains(this.singleValue)) {
clear();
}
break;
case BITMAP_VALUE:
this.bitmap.and(other.bitmap);
convertBitmapToSmallerType();
break;
case SET_VALUE:
Set<Long> newSet = new HashSet<>();
for (Long v : set) {
if (other.bitmap.contains(v)) {
newSet.add(v);
}
}
set = newSet;
break;
}
break;
case SET_VALUE:
switch (this.bitmapType) {
case EMPTY:
break;
case SINGLE_VALUE:
if (!other.set.contains(this.singleValue)) {
clear();
}
break;
case BITMAP_VALUE: {
Set<Long> newSet = new HashSet<>();
for (Long v : other.set) {
if (this.bitmap.contains(v)) {
newSet.add(v);
}
}
set = newSet;
bitmapType = SET_VALUE;
break;
}
case SET_VALUE: {
Set<Long> newSet = new HashSet<>();
for (Long v : other.set) {
if (this.set.contains(v)) {
newSet.add(v);
}
}
set = newSet;
break;
}
}
break;
}
} | @Test
public void testBitmapValueAnd() throws IOException {
// empty and empty
BitmapValue bitmap = new BitmapValue(emptyBitmap);
bitmap.and(emptyBitmap);
checkBitmap(bitmap, BitmapValue.EMPTY, 0, 0);
// empty and single
bitmap = new BitmapValue(emptyBitmap);
bitmap.and(singleBitmap);
checkBitmap(bitmap, BitmapValue.EMPTY, 0, 0);
// empty and set
bitmap = new BitmapValue(emptyBitmap);
bitmap.and(mediumBitmap);
checkBitmap(bitmap, BitmapValue.EMPTY, 0, 0);
// empty and bitmap
bitmap = new BitmapValue(emptyBitmap);
bitmap.and(largeBitmap);
checkBitmap(bitmap, BitmapValue.EMPTY, 0, 0);
// single and empty
bitmap = new BitmapValue(singleBitmap);
bitmap.and(emptyBitmap);
checkBitmap(bitmap, BitmapValue.EMPTY, 0, 0);
// single and single (equal)
bitmap = new BitmapValue(singleBitmap);
bitmap.and(new BitmapValue(2));
checkBitmap(bitmap, BitmapValue.EMPTY, 0, 0);
// single and single (not equal)
bitmap = new BitmapValue(singleBitmap);
bitmap.and(new BitmapValue(1));
checkBitmap(bitmap, BitmapValue.SINGLE_VALUE, 1, 2);
// single and bitmap (not contains)
bitmap = new BitmapValue(singleBitmap);
bitmap.and(new BitmapValue(2, 4));
checkBitmap(bitmap, BitmapValue.EMPTY, 0, 0);
// single and bitmap (contains)
bitmap = new BitmapValue(singleBitmap);
bitmap.and(largeBitmap);
checkBitmap(bitmap, BitmapValue.SINGLE_VALUE, 1, 2);
// single and set (not contains)
bitmap = new BitmapValue(singleBitmap);
bitmap.and(new BitmapValue(100, 101));
checkBitmap(bitmap, BitmapValue.EMPTY, 0, 0);
// single and set (contains)
bitmap = new BitmapValue(singleBitmap);
bitmap.and(mediumBitmap);
checkBitmap(bitmap, BitmapValue.SINGLE_VALUE, 1, 2);
// bitmap and empty
bitmap = new BitmapValue(largeBitmap);
bitmap.and(emptyBitmap);
checkBitmap(bitmap, BitmapValue.EMPTY, 0, 0);
// bitmap and single (contains)
bitmap = new BitmapValue(largeBitmap);
bitmap.and(singleBitmap);
checkBitmap(bitmap, BitmapValue.SINGLE_VALUE, 1, 2);
// bitmap and single (not contains)
bitmap = new BitmapValue(largeBitmap);
bitmap.and(new BitmapValue(1000));
checkBitmap(bitmap, BitmapValue.EMPTY, 0, 0);
// bitmap and bitmap (-> bitmap)
bitmap = new BitmapValue(largeBitmap);
bitmap.and(new BitmapValue(20, 60));
checkBitmap(bitmap, BitmapValue.BITMAP_VALUE, 20, 40);
// bitmap and bitmap (-> empty)
bitmap = new BitmapValue(largeBitmap);
bitmap.and(new BitmapValue(100, 180));
checkBitmap(bitmap, BitmapValue.EMPTY, 0, 0);
// bitmap and bitmap (-> single)
bitmap = new BitmapValue(largeBitmap);
bitmap.and(new BitmapValue(39, 100));
checkBitmap(bitmap, BitmapValue.SINGLE_VALUE, 39, 40);
// bitmap and set (->set)
bitmap = new BitmapValue(largeBitmap);
bitmap.and(mediumBitmap);
checkBitmap(bitmap, BitmapValue.SET_VALUE, 0, 10);
// bitmap and set (->empty)
bitmap = new BitmapValue(largeBitmap);
bitmap.and(new BitmapValue(100, 120));
checkBitmap(bitmap, BitmapValue.SET_VALUE, 0, 0);
// bitmap and set (->single)
bitmap = new BitmapValue(largeBitmap);
bitmap.and(new BitmapValue(30, 50));
checkBitmap(bitmap, BitmapValue.SET_VALUE, 30, 40);
// set and empty
bitmap = new BitmapValue(mediumBitmap);
bitmap.and(emptyBitmap);
checkBitmap(bitmap, BitmapValue.EMPTY, 0, 0);
// set and single (contains)
bitmap = new BitmapValue(mediumBitmap);
bitmap.and(singleBitmap);
checkBitmap(bitmap, BitmapValue.SINGLE_VALUE, 1, 2);
// set and single (not contains)
bitmap = new BitmapValue(mediumBitmap);
bitmap.and(new BitmapValue(100));
checkBitmap(bitmap, BitmapValue.EMPTY, 0, 0);
// set and set
bitmap = new BitmapValue(mediumBitmap);
bitmap.and(new BitmapValue(5, 20));
checkBitmap(bitmap, BitmapValue.SET_VALUE, 5, 10);
} |
public static Options options() {
return new Options("/tmp", 100, SorterType.HADOOP);
} | @Test
public void testZeroMemory() {
thrown.expect(IllegalArgumentException.class);
thrown.expectMessage("memoryMB must be greater than zero");
BufferedExternalSorter.Options options = BufferedExternalSorter.options();
options.withMemoryMB(0);
} |
@Override
public void handleFetchError(String key, Exception e) {
log.error("Failed retrieving rate for " + key + ", will create new rate", e);
} | @Test
public void testHandleFetchErrorShouldNotThrowException() {
target.handleFetchError("key", new Exception());
} |
public static InetSocketAddress createUnresolved(String hostname, int port) {
return createInetSocketAddress(hostname, port, false);
} | @Test
void createUnresolvedBadValues() {
assertThatExceptionOfType(NullPointerException.class)
.isThrownBy(() -> AddressUtils.createUnresolved(null, 0))
.withMessage("hostname");
assertThatExceptionOfType(IllegalArgumentException.class)
.isThrownBy(() -> AddressUtils.createUnresolved("hostname", -1))
.withMessage("port out of range:-1");
} |
public void loadFromSnapshot(ConcurrentMap<String, IpPortBasedClient> clients) {
ConcurrentMap<String, IpPortBasedClient> oldClients = this.clients;
this.clients = clients;
oldClients.clear();
} | @Test
void testLoadFromSnapshot() {
ConcurrentMap<String, IpPortBasedClient> snapshotClients = new ConcurrentHashMap<>();
snapshotClients.put(snapshotClientId, snapshotClient);
persistentIpPortClientManager.loadFromSnapshot(snapshotClients);
Collection<String> allClientIds = persistentIpPortClientManager.allClientId();
assertEquals(1, allClientIds.size());
assertTrue(allClientIds.contains(snapshotClientId));
} |
public ValidationResult validate(final Map<String, InternalTopicConfig> topicConfigs) {
log.info("Starting to validate internal topics {}.", topicConfigs.keySet());
final long now = time.milliseconds();
final long deadline = now + retryTimeoutMs;
final ValidationResult validationResult = new ValidationResult();
final Set<String> topicDescriptionsStillToValidate = new HashSet<>(topicConfigs.keySet());
final Set<String> topicConfigsStillToValidate = new HashSet<>(topicConfigs.keySet());
while (!topicDescriptionsStillToValidate.isEmpty() || !topicConfigsStillToValidate.isEmpty()) {
Map<String, KafkaFuture<TopicDescription>> descriptionsForTopic = Collections.emptyMap();
if (!topicDescriptionsStillToValidate.isEmpty()) {
final DescribeTopicsResult describeTopicsResult = adminClient.describeTopics(topicDescriptionsStillToValidate);
descriptionsForTopic = describeTopicsResult.topicNameValues();
}
Map<String, KafkaFuture<Config>> configsForTopic = Collections.emptyMap();
if (!topicConfigsStillToValidate.isEmpty()) {
final DescribeConfigsResult describeConfigsResult = adminClient.describeConfigs(
topicConfigsStillToValidate.stream()
.map(topic -> new ConfigResource(Type.TOPIC, topic))
.collect(Collectors.toSet())
);
configsForTopic = describeConfigsResult.values().entrySet().stream()
.collect(Collectors.toMap(entry -> entry.getKey().name(), Map.Entry::getValue));
}
while (!descriptionsForTopic.isEmpty() || !configsForTopic.isEmpty()) {
if (!descriptionsForTopic.isEmpty()) {
doValidateTopic(
validationResult,
descriptionsForTopic,
topicConfigs,
topicDescriptionsStillToValidate,
(streamsSide, brokerSide) -> validatePartitionCount(validationResult, streamsSide, brokerSide)
);
}
if (!configsForTopic.isEmpty()) {
doValidateTopic(
validationResult,
configsForTopic,
topicConfigs,
topicConfigsStillToValidate,
(streamsSide, brokerSide) -> validateCleanupPolicy(validationResult, streamsSide, brokerSide)
);
}
maybeThrowTimeoutException(
Arrays.asList(topicDescriptionsStillToValidate, topicConfigsStillToValidate),
deadline,
String.format("Could not validate internal topics within %d milliseconds. " +
"This can happen if the Kafka cluster is temporarily not available.", retryTimeoutMs)
);
if (!descriptionsForTopic.isEmpty() || !configsForTopic.isEmpty()) {
Utils.sleep(100);
}
}
maybeSleep(
Arrays.asList(topicDescriptionsStillToValidate, topicConfigsStillToValidate),
deadline,
"validated"
);
}
log.info("Completed validation of internal topics {}.", topicConfigs.keySet());
return validationResult;
} | @Test
public void shouldOnlyRetryDescribeConfigsWhenDescribeConfigsThrowsLeaderNotAvailableExceptionDuringValidation() {
final AdminClient admin = mock(AdminClient.class);
final InternalTopicManager topicManager = new InternalTopicManager(
time,
admin,
new StreamsConfig(config)
);
final KafkaFutureImpl<TopicDescription> topicDescriptionSuccessfulFuture = new KafkaFutureImpl<>();
topicDescriptionSuccessfulFuture.complete(new TopicDescription(
topic1,
false,
Collections.singletonList(new TopicPartitionInfo(0, broker1, cluster, Collections.emptyList()))
));
when(admin.describeTopics(Collections.singleton(topic1)))
.thenAnswer(answer -> new MockDescribeTopicsResult(mkMap(mkEntry(topic1, topicDescriptionSuccessfulFuture))));
final KafkaFutureImpl<Config> topicConfigsFailFuture = new KafkaFutureImpl<>();
topicConfigsFailFuture.completeExceptionally(new LeaderNotAvailableException("Leader Not Available!"));
final KafkaFutureImpl<Config> topicConfigSuccessfulFuture = new KafkaFutureImpl<>();
topicConfigSuccessfulFuture.complete(
new Config(repartitionTopicConfig().entrySet().stream()
.map(entry -> new ConfigEntry(entry.getKey(), entry.getValue())).collect(Collectors.toSet()))
);
final ConfigResource topicResource = new ConfigResource(Type.TOPIC, topic1);
when(admin.describeConfigs(Collections.singleton(topicResource)))
.thenAnswer(answer -> new MockDescribeConfigsResult(mkMap(mkEntry(topicResource, topicConfigsFailFuture))))
.thenAnswer(answer -> new MockDescribeConfigsResult(mkMap(mkEntry(topicResource, topicConfigSuccessfulFuture))));
final InternalTopicConfig internalTopicConfig = setupRepartitionTopicConfig(topic1, 1);
final ValidationResult validationResult = topicManager.validate(Collections.singletonMap(topic1, internalTopicConfig));
assertThat(validationResult.missingTopics(), empty());
assertThat(validationResult.misconfigurationsForTopics(), anEmptyMap());
} |
@Config("resource-groups.config-file")
public FileResourceGroupConfig setConfigFile(String configFile)
{
this.configFile = configFile;
return this;
} | @Test
public void testDefaults()
{
assertRecordedDefaults(ConfigAssertions.recordDefaults(FileResourceGroupConfig.class)
.setConfigFile(null));
} |
protected static SimpleDateFormat getLog4j2Appender() {
Optional<Appender> log4j2xmlAppender =
configuration.getAppenders().values().stream()
.filter( a -> a.getName().equalsIgnoreCase( log4J2Appender ) ).findFirst();
if ( log4j2xmlAppender.isPresent() ) {
ArrayList<String> matchesArray = new ArrayList<>();
String dateFormatFromLog4j2xml = log4j2xmlAppender.get().getLayout().getContentFormat().get( "format" );
Pattern pattern = Pattern.compile( "(\\{(.*?)})" );
Matcher matcher = pattern.matcher( dateFormatFromLog4j2xml );
while ( matcher.find() ) {
matchesArray.add( matcher.group( 2 ) );
}
if ( !matchesArray.isEmpty() ) {
return processMatches( matchesArray );
}
}
return new SimpleDateFormat( "yyyy/MM/dd HH:mm:ss" );
} | @Test
public void testGetLog4j2UsingAppender11() {
// Testing adding TimeZone GMT-5
KettleLogLayout.log4J2Appender = "pdi-execution-appender-test-11";
Assert.assertEquals( "HH:mm:ss",
KettleLogLayout.getLog4j2Appender().toPattern() );
} |
@Override
public String getSerializableForm(boolean includeConfidence) {
StringBuilder builder = new StringBuilder();
for (int i = 0; i < names.length; i++) {
builder.append(names[i]);
builder.append('=');
builder.append(values[i]);
if (includeConfidence && !Double.isNaN(variances[i])) {
builder.append('\u00B1');
builder.append(variances[i]);
}
builder.append(',');
}
builder.deleteCharAt(builder.length()-1);
return builder.toString();
} | @Test
public void getsCorrectSerializableForm() {
Regressor mr = new Regressor(
new String[]{"a", "b", "c"},
new double[]{1d, 2d, 3d}
);
assertEquals("a=1.0,b=2.0,c=3.0", mr.getSerializableForm(false));
// Should be the same for includeConfidence either way, since we ignore NaN variances
assertEquals("a=1.0,b=2.0,c=3.0", mr.getSerializableForm(true));
Regressor scored = new Regressor(
new String[]{"a", "b", "c"},
new double[]{1d, 2d, 3d},
new double[]{0d, 0d, 0.5}
);
assertEquals("a=1.0,b=2.0,c=3.0", scored.getSerializableForm(false));
assertEquals("a=1.0\u00B10.0,b=2.0\u00B10.0,c=3.0\u00B10.5", scored.getSerializableForm(true));
} |
@Operation(summary = "Check if credentials are valid.", tags = { SwaggerConfig.ACTIVATE_WEBSITE, SwaggerConfig.ACTIVATE_SMS, SwaggerConfig.ACTIVATE_LETTER, SwaggerConfig.ACTIVATE_RDA }, operationId = "auth",
parameters = {@Parameter(ref = "API-V"), @Parameter(ref = "OS-T"), @Parameter(ref = "APP-V"), @Parameter(ref = "OS-V"), @Parameter(ref = "REL-T")})
@PostMapping(value = "auth", produces= "application/json")
@ResponseBody
public AppResponse authenticate(
@Valid @RequestBody ActivationUsernamePasswordRequest request) throws FlowNotDefinedException, NoSuchAlgorithmException, FlowStateNotDefinedException, IOException, SharedServiceClientException {
return service.startFlow(UndefinedFlow.NAME, Action.CONFIRM_PASSWORD, request);
} | @Test
void validateIfCorrectProcessesAreCalledAuthenticate() throws FlowNotDefinedException, NoSuchAlgorithmException, IOException, FlowStateNotDefinedException, SharedServiceClientException {
ActivationUsernamePasswordRequest activationUsernamePasswordRequest = new ActivationUsernamePasswordRequest();
activationController.authenticate(activationUsernamePasswordRequest);
verify(flowService, times(1)).startFlow(eq(UndefinedFlow.NAME), any(Action.class), any(ActivationUsernamePasswordRequest.class));
} |
public synchronized void executePeriodically(final Runnable task, long period) {
TimerTask existing = timerTasks.get(task);
if (existing != null) {
LOG.debug("Task {} already scheduled, cancelling and rescheduling", task);
cancel(task);
}
TimerTask timerTask = new SchedulerTimerTask(task);
timer.schedule(timerTask, period, period);
timerTasks.put(task, timerTask);
} | @Test
public void testExecutePeriodically() throws Exception {
final CountDownLatch latch = new CountDownLatch(1);
scheduler.executePeriodically(new CountDownRunnable(latch), 10);
assertTrue(latch.await(5000, TimeUnit.MILLISECONDS));
} |
@Nonnull
@Override
public Result addChunk(ByteBuf buffer) {
final byte[] readable = new byte[buffer.readableBytes()];
buffer.readBytes(readable, buffer.readerIndex(), buffer.readableBytes());
final GELFMessage msg = new GELFMessage(readable);
final ByteBuf aggregatedBuffer;
switch (msg.getGELFType()) {
case CHUNKED:
try {
chunkCounter.inc();
aggregatedBuffer = checkForCompletion(msg);
if (aggregatedBuffer == null) {
return VALID_EMPTY_RESULT;
}
} catch (IllegalArgumentException | IllegalStateException | IndexOutOfBoundsException e) {
log.debug("Invalid gelf message chunk, dropping message.", e);
return INVALID_RESULT;
}
break;
case ZLIB:
case GZIP:
case UNCOMPRESSED:
aggregatedBuffer = Unpooled.wrappedBuffer(readable);
break;
case UNSUPPORTED:
return INVALID_RESULT;
default:
return INVALID_RESULT;
}
return new Result(aggregatedBuffer, true);
} | @Test
public void tooManyChunks() {
final ByteBuf[] chunks = createChunkedMessage(129 * 1024, 1024);
int i = 1;
for (final ByteBuf chunk : chunks) {
final CodecAggregator.Result result = aggregator.addChunk(chunk);
if (i == 129) {
assertFalse("Message invalidated (chunk #" + i + ")", result.isValid());
assertNull("Message discarded (chunk #" + i + ")", result.getMessage());
} else {
assertTrue("Incomplete message valid (chunk #" + i + ")", result.isValid());
assertNull("Message not complete (chunk #" + i + ")", result.getMessage());
}
i++;
}
} |
public Optional<Measure> toMeasure(@Nullable ScannerReport.Measure batchMeasure, Metric metric) {
Objects.requireNonNull(metric);
if (batchMeasure == null) {
return Optional.empty();
}
Measure.NewMeasureBuilder builder = Measure.newMeasureBuilder();
switch (metric.getType().getValueType()) {
case INT:
return toIntegerMeasure(builder, batchMeasure);
case LONG:
return toLongMeasure(builder, batchMeasure);
case DOUBLE:
return toDoubleMeasure(builder, batchMeasure);
case BOOLEAN:
return toBooleanMeasure(builder, batchMeasure);
case STRING:
return toStringMeasure(builder, batchMeasure);
case LEVEL:
return toLevelMeasure(builder, batchMeasure);
case NO_VALUE:
return toNoValueMeasure(builder);
default:
throw new IllegalArgumentException("Unsupported Measure.ValueType " + metric.getType().getValueType());
}
} | @Test
public void toMeasure_returns_long_part_of_value_in_dto_for_Long_Metric() {
Optional<Measure> measure = underTest.toMeasure(ScannerReport.Measure.newBuilder().setLongValue(LongValue.newBuilder().setValue(15L)).build(), SOME_LONG_METRIC);
assertThat(measure).isPresent();
assertThat(measure.get().getValueType()).isEqualTo(Measure.ValueType.LONG);
assertThat(measure.get().getLongValue()).isEqualTo(15);
} |
public Map<String, Integer> getNotAnalysedFilesByLanguage() {
return ImmutableMap.copyOf(notAnalysedFilesByLanguage);
} | @Test
public void stores_not_analysed_cpp_file_count_in_sq_community_edition() {
when(sonarRuntime.getEdition()).thenReturn(SonarEdition.COMMUNITY);
InputComponentStoreTester underTest = new InputComponentStoreTester(sonarRuntime);
String mod1Key = "mod1";
underTest.addFile(mod1Key, "src/main/java/Foo.java", "java");
underTest.addFile(mod1Key, "src/main/c/file1.c");
underTest.addFile(mod1Key, "src/main/c/file2.cpp");
underTest.addFile(mod1Key, "src/main/c/file3.cxx");
underTest.addFile(mod1Key, "src/main/c/file4.c++");
underTest.addFile(mod1Key, "src/main/c/file5.cc");
underTest.addFile(mod1Key, "src/main/c/file6.CPP");
String mod2Key = "mod2";
underTest.addFile(mod2Key, "src/main/groovy/Foo.groovy", "groovy");
underTest.addFile(mod2Key, "src/main/c/file3.cpp");
assertThat(underTest.getNotAnalysedFilesByLanguage()).hasSize(2);
assertThat(underTest.getNotAnalysedFilesByLanguage()).containsEntry("C++", 6);
} |
@Cacheable(value = CACHE_LATEST_EXTENSION_VERSION, keyGenerator = GENERATOR_LATEST_EXTENSION_VERSION)
public ExtensionVersion getLatest(List<ExtensionVersion> versions, boolean groupedByTargetPlatform) {
return getLatest(versions, groupedByTargetPlatform, false);
} | @Test
public void testGetLatestPreRelease() {
var release = new ExtensionVersion();
release.setTargetPlatform(TargetPlatform.NAME_UNIVERSAL);
release.setPreRelease(false);
release.setVersion("1.0.0");
var minor = new ExtensionVersion();
minor.setTargetPlatform(TargetPlatform.NAME_LINUX_ARM64);
minor.setPreRelease(true);
minor.setVersion("0.0.5");
var major = new ExtensionVersion();
major.setTargetPlatform(TargetPlatform.NAME_LINUX_ARM64);
major.setPreRelease(true);
major.setVersion("0.3.0");
var latest = versions.getLatest(List.of(major, minor, release), false, true);
assertEquals(major, latest);
} |
public URI getUri() {
return _uri;
} | @Test
public void testCreateGcsUriUsingADifferentScheme() {
URI uri = URI.create("file://bucket/file");
GcsUri gcsUri = new GcsUri(uri);
assertEquals(gcsUri.getUri().getScheme(), SCHEME);
} |
@Override
public V put(K key, V value, Duration ttl) {
return get(putAsync(key, value, ttl));
} | @Test
public void testReadAllValuesTTL() {
final RMapCacheNative<String, String> map = redisson.getMapCacheNative("testRMapCacheAllValues");
map.put("1234", "5678", Duration.ofMinutes(1));
assertThat(map.readAllValues()).containsOnly("5678");
map.destroy();
} |
@Udf(description = "Returns the base 10 logarithm of an INT value.")
public Double log(
@UdfParameter(
value = "value",
description = "the value get the base 10 logarithm of."
) final Integer value
) {
return log(value == null ? null : value.doubleValue());
} | @Test
public void shouldHandleNull() {
assertThat(udf.log((Integer)null), is(nullValue()));
assertThat(udf.log((Long)null), is(nullValue()));
assertThat(udf.log((Double)null), is(nullValue()));
assertThat(udf.log(null, 13), is(nullValue()));
assertThat(udf.log(null, 13L), is(nullValue()));
assertThat(udf.log(null, 13.0), is(nullValue()));
assertThat(udf.log(13, null), is(nullValue()));
assertThat(udf.log(13L, null), is(nullValue()));
assertThat(udf.log(13.0, null), is(nullValue()));
} |
@Override
public Num calculate(Position position, int currentIndex) {
return this.calculate(position);
} | @Test
public void calculateOpenSellPosition() {
// Calculate the transaction costs of an open position
int currentIndex = 4;
Position position = new Position(Trade.TradeType.BUY, transactionModel, new ZeroCostModel());
position.operate(0, DoubleNum.valueOf(100), DoubleNum.valueOf(1));
Num costsFromModel = transactionModel.calculate(position, currentIndex);
assertNumEquals(costsFromModel, DoubleNum.valueOf(1));
} |
@Override
public boolean exists(Env targetEnv) {
return addresses.containsKey(targetEnv);
} | @Test
public void testExists() {
assertTrue(databasePortalMetaServerProvider.exists(Env.DEV));
assertFalse(databasePortalMetaServerProvider.exists(Env.PRO));
assertTrue(databasePortalMetaServerProvider.exists(Env.addEnvironment("nothing")));
} |
public static DisplayData from(HasDisplayData component) {
checkNotNull(component, "component argument cannot be null");
InternalBuilder builder = new InternalBuilder();
builder.include(Path.root(), component);
return builder.build();
} | @Test
public void testIncludeNullPath() {
thrown.expectCause(isA(NullPointerException.class));
DisplayData.from(
new HasDisplayData() {
@Override
public void populateDisplayData(DisplayData.Builder builder) {
builder.include(null, new NoopDisplayData());
}
});
} |
void runOnce() {
if (transactionManager != null) {
try {
transactionManager.maybeResolveSequences();
RuntimeException lastError = transactionManager.lastError();
// do not continue sending if the transaction manager is in a failed state
if (transactionManager.hasFatalError()) {
if (lastError != null)
maybeAbortBatches(lastError);
client.poll(retryBackoffMs, time.milliseconds());
return;
}
if (transactionManager.hasAbortableError() && shouldHandleAuthorizationError(lastError)) {
return;
}
// Check whether we need a new producerId. If so, we will enqueue an InitProducerId
// request which will be sent below
transactionManager.bumpIdempotentEpochAndResetIdIfNeeded();
if (maybeSendAndPollTransactionalRequest()) {
return;
}
} catch (AuthenticationException e) {
// This is already logged as error, but propagated here to perform any clean ups.
log.trace("Authentication exception while processing transactional request", e);
transactionManager.authenticationFailed(e);
}
}
long currentTimeMs = time.milliseconds();
long pollTimeout = sendProducerData(currentTimeMs);
client.poll(pollTimeout, currentTimeMs);
} | @Test
public void testNodeNotReady() {
final long producerId = 123456L;
time = new MockTime(10);
client = new MockClient(time, metadata);
TransactionManager transactionManager = new TransactionManager(new LogContext(), "testNodeNotReady",
60000, 100L, new ApiVersions());
setupWithTransactionState(transactionManager, false, null, true);
ProducerIdAndEpoch producerIdAndEpoch = new ProducerIdAndEpoch(producerId, (short) 0);
transactionManager.initializeTransactions();
sender.runOnce();
Node node = metadata.fetch().nodes().get(0);
client.delayReady(node, REQUEST_TIMEOUT + 20);
prepareFindCoordinatorResponse(Errors.NONE, "testNodeNotReady");
sender.runOnce();
sender.runOnce();
assertNotNull(transactionManager.coordinator(CoordinatorType.TRANSACTION), "Coordinator not found");
client.throttle(node, REQUEST_TIMEOUT + 20);
prepareFindCoordinatorResponse(Errors.NONE, "Coordinator not found");
prepareInitProducerResponse(Errors.NONE, producerIdAndEpoch.producerId, producerIdAndEpoch.epoch);
waitForProducerId(transactionManager, producerIdAndEpoch);
} |
public PlanNode plan(Analysis analysis)
{
return planStatement(analysis, analysis.getStatement());
} | @Test
public void testRedundantTopNNodeRemoval()
{
Session exploitConstraints = Session.builder(this.getQueryRunner().getDefaultSession())
.setSystemProperty(EXPLOIT_CONSTRAINTS, Boolean.toString(true))
.build();
String query = "SELECT count(*) FROM orders ORDER BY 1 LIMIT 10";
assertFalse(
searchFrom(plan(query, OPTIMIZED, exploitConstraints).getRoot())
.where(isInstanceOfAny(TopNNode.class, SortNode.class))
.matches(),
format("Unexpected TopN node for query: '%s'", query));
assertPlan(
"SELECT orderkey, count(*) FROM orders GROUP BY orderkey ORDER BY 1 LIMIT 10",
output(
node(TopNNode.class,
anyTree(
tableScan("orders")))));
assertPlan(
"SELECT orderkey, count(*) FROM orders GROUP BY orderkey ORDER BY 1 LIMIT 0",
output(
node(ValuesNode.class)));
} |
public synchronized <K> KeyQueryMetadata getKeyQueryMetadataForKey(final String storeName,
final K key,
final Serializer<K> keySerializer) {
Objects.requireNonNull(keySerializer, "keySerializer can't be null");
if (topologyMetadata.hasNamedTopologies()) {
throw new IllegalArgumentException("Cannot invoke the getKeyQueryMetadataForKey(storeName, key, keySerializer)"
+ "method when using named topologies, please use the overload that"
+ "accepts a topologyName parameter to identify the correct store");
}
return getKeyQueryMetadataForKey(storeName,
key,
new DefaultStreamPartitioner<>(keySerializer));
} | @Test
public void shouldGetQueryMetadataForGlobalStoreWithKeyAndPartitioner() {
final KeyQueryMetadata metadata = metadataState.getKeyQueryMetadataForKey(globalTable, "key", partitioner);
assertEquals(hostOne, metadata.activeHost());
assertTrue(metadata.standbyHosts().isEmpty());
} |
List<String> decorateTextWithHtml(String text, DecorationDataHolder decorationDataHolder) {
return decorateTextWithHtml(text, decorationDataHolder, null, null);
} | @Test
public void should_close_tags_at_end_of_file() {
String classDeclarationSample =
"/*" + LF_END_OF_LINE +
" * Header" + LF_END_OF_LINE +
" */" + LF_END_OF_LINE +
LF_END_OF_LINE +
"public class HelloWorld {" + LF_END_OF_LINE +
"}";
DecorationDataHolder decorationData = new DecorationDataHolder();
decorationData.loadSyntaxHighlightingData("0,16,cppd;18,25,k;25,31,k;");
HtmlTextDecorator htmlTextDecorator = new HtmlTextDecorator();
List<String> htmlOutput = htmlTextDecorator.decorateTextWithHtml(classDeclarationSample, decorationData);
assertThat(htmlOutput).containsExactly(
"<span class=\"cppd\">/*</span>",
"<span class=\"cppd\"> * Header</span>",
"<span class=\"cppd\"> */</span>",
"",
"<span class=\"k\">public </span><span class=\"k\">class </span>HelloWorld {",
"}"
);
} |
public static long estimateSize(StructType tableSchema, long totalRecords) {
if (totalRecords == Long.MAX_VALUE) {
return totalRecords;
}
long result;
try {
result = LongMath.checkedMultiply(tableSchema.defaultSize(), totalRecords);
} catch (ArithmeticException e) {
result = Long.MAX_VALUE;
}
return result;
} | @Test
public void testEstimateSize() throws IOException {
long tableSize = SparkSchemaUtil.estimateSize(SparkSchemaUtil.convert(TEST_SCHEMA), 1);
Assert.assertEquals("estimateSize matches with expected approximation", 24, tableSize);
} |
@VisibleForTesting
Pair<String, SearchQueryOperator> extractOperator(String value, SearchQueryOperator defaultOperator) {
if (value.length() >= 3) {
final String substring2 = value.substring(0, 2);
switch (substring2) {
case ">=":
return Pair.of(value.substring(2).trim(), SearchQueryOperators.GREATER_EQUALS);
case "<=":
return Pair.of(value.substring(2).trim(), SearchQueryOperators.LESS_EQUALS);
case "=~":
return Pair.of(value.substring(2).trim(), SearchQueryOperators.REGEXP);
}
}
if (value.length() >= 2) {
final String substring1 = value.substring(0, 1);
switch (substring1) {
case ">":
return Pair.of(value.substring(1).trim(), SearchQueryOperators.GREATER);
case "<":
return Pair.of(value.substring(1).trim(), SearchQueryOperators.LESS);
case "=":
return Pair.of(value.substring(1).trim(), SearchQueryOperators.EQUALS);
}
}
return Pair.of(value, defaultOperator);
} | @Test
void extractOperator() {
final SearchQueryParser parser = new SearchQueryParser("defaultfield",
ImmutableMap.of(
"id", SearchQueryField.create("real_id"),
"date", SearchQueryField.create("created_at", SearchQueryField.Type.DATE),
"int", SearchQueryField.create("int", SearchQueryField.Type.INT))
);
final SearchQueryOperator defaultOp = SearchQueryOperators.REGEXP;
checkQuery(parser, "", SearchQueryField.Type.STRING, "", defaultOp);
checkQuery(parser, "h", SearchQueryField.Type.STRING, "h", defaultOp);
checkQuery(parser, "he", SearchQueryField.Type.STRING, "he", defaultOp);
checkQuery(parser, "hel", SearchQueryField.Type.STRING, "hel", defaultOp);
checkQuery(parser, "hello", SearchQueryField.Type.STRING, "hello", defaultOp);
checkQuery(parser, "=~ hello", SearchQueryField.Type.STRING, "hello", SearchQueryOperators.REGEXP);
checkQuery(parser, "= hello", SearchQueryField.Type.STRING, "hello", SearchQueryOperators.EQUALS);
checkQuery(parser, ">=2017-03-23", SearchQueryField.Type.DATE, "2017-03-23", SearchQueryOperators.GREATER_EQUALS);
checkQuery(parser, ">= 2017-03-23", SearchQueryField.Type.DATE, "2017-03-23", SearchQueryOperators.GREATER_EQUALS);
checkQuery(parser, "<=2017-03-23", SearchQueryField.Type.DATE, "2017-03-23", SearchQueryOperators.LESS_EQUALS);
checkQuery(parser, "<= 2017-03-23", SearchQueryField.Type.DATE, "2017-03-23", SearchQueryOperators.LESS_EQUALS);
checkQuery(parser, ">2017-03-23", SearchQueryField.Type.DATE, "2017-03-23", SearchQueryOperators.GREATER);
checkQuery(parser, "> 2017-03-23", SearchQueryField.Type.DATE, "2017-03-23", SearchQueryOperators.GREATER);
checkQuery(parser, "<2017-03-23", SearchQueryField.Type.DATE, "2017-03-23", SearchQueryOperators.LESS);
checkQuery(parser, "< 2017-03-23", SearchQueryField.Type.DATE, "2017-03-23", SearchQueryOperators.LESS);
checkQuery(parser, "2017-03-23", SearchQueryField.Type.DATE, "2017-03-23", SearchQueryOperators.EQUALS);
checkQuery(parser, ">=1", SearchQueryField.Type.INT, "1", SearchQueryOperators.GREATER_EQUALS);
checkQuery(parser, "<=1", SearchQueryField.Type.INT, "1", SearchQueryOperators.LESS_EQUALS);
checkQuery(parser, ">1", SearchQueryField.Type.INT, "1", SearchQueryOperators.GREATER);
checkQuery(parser, "<1", SearchQueryField.Type.INT, "1", SearchQueryOperators.LESS);
checkQuery(parser, "=1", SearchQueryField.Type.INT, "1", SearchQueryOperators.EQUALS);
checkQuery(parser, "1", SearchQueryField.Type.INT, "1", SearchQueryOperators.EQUALS);
checkQuery(parser, ">=1", SearchQueryField.Type.LONG, "1", SearchQueryOperators.GREATER_EQUALS);
checkQuery(parser, "<=1", SearchQueryField.Type.LONG, "1", SearchQueryOperators.LESS_EQUALS);
checkQuery(parser, ">1", SearchQueryField.Type.LONG, "1", SearchQueryOperators.GREATER);
checkQuery(parser, "<1", SearchQueryField.Type.LONG, "1", SearchQueryOperators.LESS);
checkQuery(parser, "=1", SearchQueryField.Type.LONG, "1", SearchQueryOperators.EQUALS);
checkQuery(parser, "1", SearchQueryField.Type.LONG, "1", SearchQueryOperators.EQUALS);
} |
public static String Lpad( String valueToPad, String filler, int size ) {
if ( ( size == 0 ) || ( valueToPad == null ) || ( filler == null ) ) {
return valueToPad;
}
int vSize = valueToPad.length();
int fSize = filler.length();
// This next if ensures previous behavior, but prevents infinite loop
// if "" is passed in as a filler.
if ( ( vSize >= size ) || ( fSize == 0 ) ) {
return valueToPad;
}
int tgt = ( size - vSize );
StringBuilder sb = new StringBuilder( size );
sb.append( filler );
while ( sb.length() < tgt ) {
// instead of adding one character at a time, this
// is exponential - much fewer times in loop
sb.append( sb );
}
sb.append( valueToPad );
return sb.substring( Math.max( 0, sb.length() - size ) ); // this makes sure you have the right size string returned.
} | @Test
public void testLpad() {
final String s = "pad me";
assertEquals( s, Const.Lpad( s, "-", 0 ) );
assertEquals( s, Const.Lpad( s, "-", 3 ) );
assertEquals( "--" + s, Const.Lpad( s, "-", 8 ) );
// add in some edge cases
assertEquals( s, Const.Lpad( s, null, 15 ) ); // No NPE
assertEquals( s, Const.Lpad( s, "", 15 ) );
assertEquals( s, Const.Lpad( s, "*", 5 ) );
assertEquals( null, Const.Lpad( null, "*", 15 ) );
assertEquals( "****Test", Const.Lpad( "Test", "**********", 8 ) );
assertEquals( "*Test", Const.Lpad( "Test", "**", 5 ) );
assertEquals( "****", Const.Lpad( "", "*", 4 ) );
} |
@Override
public String toString() {
return toStringHelper(getClass())
.toString();
// TODO: need to handle options
} | @Test
public void testToStringRS() throws Exception {
RouterSolicitation rs = deserializer.deserialize(bytePacket, 0, bytePacket.length);
String str = rs.toString();
// TODO: need to handle Options
} |
public static int getPid() {
return PID;
} | @Test
public void testGetPid() {
assertThat(UtilAll.getPid()).isGreaterThan(0);
} |
public static DateTime parse(CharSequence dateStr, DateFormat dateFormat) {
return new DateTime(dateStr, dateFormat);
} | @Test
public void parseSingleMonthAndDayTest() {
DateTime parse = DateUtil.parse("2021-1-1");
assertNotNull(parse);
assertEquals("2021-01-01 00:00:00", parse.toString());
parse = DateUtil.parse("2021-1-22 00:00:00");
assertNotNull(parse);
assertEquals("2021-01-22 00:00:00", parse.toString());
} |
@Override
public Set<Map.Entry> entrySet() {
return null;
} | @Test
public void testEntrySet() throws Exception {
assertNull(NULL_QUERY_CACHE.entrySet());
} |
@Override
public SelArray assignOps(SelOp op, SelType rhs) {
if (op == SelOp.ASSIGN) {
SelTypeUtil.checkTypeMatch(this.type(), rhs.type());
this.val = ((SelArray) rhs).val; // direct assignment
return this;
}
throw new UnsupportedOperationException(
this.type() + " DO NOT support assignment operation " + op);
} | @Test(expected = UnsupportedOperationException.class)
public void testInvalidAssignOp() {
one.assignOps(SelOp.ADD_ASSIGN, another);
} |
@Override
@CacheEvict(value = RedisKeyConstants.PERMISSION_MENU_ID_LIST,
allEntries = true) // allEntries 清空所有缓存,因为 permission 如果变更,涉及到新老两个 permission。直接清理,简单有效
public void updateMenu(MenuSaveVO updateReqVO) {
// 校验更新的菜单是否存在
if (menuMapper.selectById(updateReqVO.getId()) == null) {
throw exception(MENU_NOT_EXISTS);
}
// 校验父菜单存在
validateParentMenu(updateReqVO.getParentId(), updateReqVO.getId());
// 校验菜单(自己)
validateMenu(updateReqVO.getParentId(), updateReqVO.getName(), updateReqVO.getId());
// 更新到数据库
MenuDO updateObj = BeanUtils.toBean(updateReqVO, MenuDO.class);
initMenuProperty(updateObj);
menuMapper.updateById(updateObj);
} | @Test
public void testUpdateMenu_sonIdNotExist() {
// 准备参数
MenuSaveVO reqVO = randomPojo(MenuSaveVO.class);
// 调用,并断言异常
assertServiceException(() -> menuService.updateMenu(reqVO), MENU_NOT_EXISTS);
} |
@Override
@SuppressWarnings("rawtypes")
public void report(SortedMap<String, Gauge> gauges, SortedMap<String, Counter> counters,
SortedMap<String, Histogram> histograms, SortedMap<String, Meter> meters, SortedMap<String, Timer> timers) {
MetaData.Builder metaData = new MetaData.Builder(sanitize, hostName, clock.getTime() / 1000, period)
.type(COLLECTD_TYPE_GAUGE);
try {
connect(sender);
for (Map.Entry<String, Gauge> entry : gauges.entrySet()) {
serializeGauge(metaData.plugin(entry.getKey()), entry.getValue());
}
for (Map.Entry<String, Counter> entry : counters.entrySet()) {
serializeCounter(metaData.plugin(entry.getKey()), entry.getValue());
}
for (Map.Entry<String, Histogram> entry : histograms.entrySet()) {
serializeHistogram(metaData.plugin(entry.getKey()), entry.getValue());
}
for (Map.Entry<String, Meter> entry : meters.entrySet()) {
serializeMeter(metaData.plugin(entry.getKey()), entry.getValue());
}
for (Map.Entry<String, Timer> entry : timers.entrySet()) {
serializeTimer(metaData.plugin(entry.getKey()), entry.getValue());
}
} catch (IOException e) {
LOG.warn("Unable to report to Collectd", e);
} finally {
disconnect(sender);
}
} | @Test
public void reportsBooleanGauges() throws Exception {
reporter.report(
map("gauge", () -> true),
map(),
map(),
map(),
map());
assertThat(nextValues(receiver)).containsExactly(1d);
reporter.report(
map("gauge", () -> false),
map(),
map(),
map(),
map());
assertThat(nextValues(receiver)).containsExactly(0d);
} |
void resolveSelectors(EngineDiscoveryRequest request, CucumberEngineDescriptor engineDescriptor) {
Predicate<String> packageFilter = buildPackageFilter(request);
resolve(request, engineDescriptor, packageFilter);
filter(engineDescriptor, packageFilter);
pruneTree(engineDescriptor);
} | @Test
void resolveRequestWithClasspathResourceSelectorAndWithSpaceInFilename() {
DiscoverySelector resource = selectClasspathResource("io/cucumber/junit/platform/engine/with space.feature");
EngineDiscoveryRequest discoveryRequest = new SelectorRequest(resource);
resolver.resolveSelectors(discoveryRequest, testDescriptor);
assertEquals(1, testDescriptor.getChildren().size());
} |
@RequiresApi(Build.VERSION_CODES.R)
@Override
public boolean onInlineSuggestionsResponse(@NonNull InlineSuggestionsResponse response) {
final List<InlineSuggestion> inlineSuggestions = response.getInlineSuggestions();
if (inlineSuggestions.size() > 0) {
mInlineSuggestionAction.onNewSuggestions(inlineSuggestions);
getInputViewContainer().addStripAction(mInlineSuggestionAction, true);
getInputViewContainer().setActionsStripVisibility(true);
}
return !inlineSuggestions.isEmpty();
} | @Test
public void testActionStripAddedForUnknown() {
simulateOnStartInputFlow();
mAnySoftKeyboardUnderTest.onInlineSuggestionsResponse(
mockResponse(
new String[] {"I", "do", "not", "know"}, Mockito.mock(InlineContentView.class)));
ImageView icon =
mAnySoftKeyboardUnderTest
.getInputViewContainer()
.findViewById(R.id.inline_suggestions_strip_icon);
Assert.assertEquals(
R.drawable.ic_inline_suggestions,
Shadows.shadowOf(icon.getDrawable()).getCreatedFromResId());
} |
@Override
public HttpHeaders filter(HttpHeaders headers, ServerWebExchange exchange) {
HttpHeaders updated = new HttpHeaders();
for (Map.Entry<String, List<String>> entry : headers.entrySet()) {
updated.addAll(entry.getKey(), entry.getValue());
}
// https://datatracker.ietf.org/doc/html/rfc7540#section-8.1.2.2
if (isGRPC(headers.getFirst(HttpHeaders.CONTENT_TYPE))) {
updated.add("te", "trailers");
}
return updated;
} | @Test
public void shouldIncludeTrailersHeaderIfGRPC() {
MockServerHttpRequest request = MockServerHttpRequest.get("http://localhost:8080/get")
.header(HttpHeaders.CONTENT_TYPE, "application/grpc")
.build();
GRPCRequestHeadersFilter filter = new GRPCRequestHeadersFilter();
HttpHeaders headers = filter.filter(request.getHeaders(), MockServerWebExchange.from(request));
assertThat(headers).containsKeys("te");
assertThat(headers.getFirst("te")).isEqualTo("trailers");
} |
public void initCustomSigners() {
String[] customSigners = ownerConf.getTrimmedStrings(CUSTOM_SIGNERS);
if (customSigners == null || customSigners.length == 0) {
// No custom signers specified, nothing to do.
LOG.debug("No custom signers specified");
return;
}
for (String customSigner : customSigners) {
String[] parts = customSigner.split(":");
if (!(parts.length == 1 || parts.length == 2 || parts.length == 3)) {
String message = "Invalid format (Expected name, name:SignerClass,"
+ " name:SignerClass:SignerInitializerClass)"
+ " for CustomSigner: [" + customSigner + "]";
LOG.error(message);
throw new IllegalArgumentException(message);
}
if (parts.length == 1) {
// Nothing to do. Trying to use a pre-defined Signer
} else {
// Register any custom Signer
maybeRegisterSigner(parts[0], parts[1], ownerConf);
// If an initializer is specified, take care of instantiating it and
// setting it up
if (parts.length == 3) {
Class<? extends AwsSignerInitializer> clazz = null;
try {
clazz = (Class<? extends AwsSignerInitializer>) ownerConf
.getClassByName(parts[2]);
} catch (ClassNotFoundException e) {
throw new RuntimeException(String.format(
"SignerInitializer class" + " [%s] not found for signer [%s]",
parts[2], parts[0]), e);
}
LOG.debug("Creating signer initializer: [{}] for signer: [{}]",
parts[2], parts[0]);
AwsSignerInitializer signerInitializer = ReflectionUtils
.newInstance(clazz, null);
initializers.add(signerInitializer);
signerInitializer
.registerStore(bucketName, ownerConf, delegationTokenProvider,
ownerUgi);
}
}
}
} | @Test
public void testCustomSignerFailureIfNotRegistered() throws Exception {
Configuration config = new Configuration();
config.set(CUSTOM_SIGNERS, "testsignerUnregistered");
SignerManager signerManager = new SignerManager("dontcare", null, config,
UserGroupInformation.getCurrentUser());
// Make sure the config is respected.
signerManager.initCustomSigners();
// Simulate a call from the AWS SDK to create the signer.
intercept(InstantiationIOException.class,
() -> SignerFactory.createSigner("testsignerUnregistered", null));
} |
@Override public List<RunConfiguration> load() {
List<RunConfiguration> runConfigurations = new ArrayList<>();
for ( RunConfigurationProvider runConfigurationProvider : getRunConfigurationProviders() ) {
runConfigurations.addAll( runConfigurationProvider.load() );
}
Collections.sort( runConfigurations, ( o1, o2 ) -> {
if ( o2.getName().equals( DefaultRunConfigurationProvider.DEFAULT_CONFIG_NAME ) ) {
return 1;
}
return o1.getName().compareToIgnoreCase( o2.getName() );
} );
return runConfigurations;
} | @Test
public void testLoad() {
List<RunConfiguration> runConfigurations = executionConfigurationManager.load();
assertEquals( 2, runConfigurations.size() ); //Includes default
} |
public static String sanitizeDefaultPort(String url) {
int afterSchemeIndex = url.indexOf("://");
if(afterSchemeIndex < 0) {
return url;
}
String scheme = url.substring(0, afterSchemeIndex);
int fromIndex = scheme.length() + 3;
//Let's see if it is an IPv6 Address
int ipv6StartIndex = url.indexOf('[', fromIndex);
if (ipv6StartIndex > 0) {
fromIndex = url.indexOf(']', ipv6StartIndex);
}
int portIndex = url.indexOf(':', fromIndex);
if(portIndex >= 0) {
int port = Integer.parseInt(url.substring(portIndex + 1));
if(isDefaultPort(port, scheme)) {
return url.substring(0, portIndex);
}
}
return url;
} | @Test
public void testSanitizeDefaultPort() {
String url = "http://127.0.0.1:80";
assertThat(CorsUtil.sanitizeDefaultPort(url), is("http://127.0.0.1"));
url = "http://127.0.0.1";
assertThat(CorsUtil.sanitizeDefaultPort(url), is("http://127.0.0.1"));
url = "http://127.0.0.1:443";
assertThat(CorsUtil.sanitizeDefaultPort(url), is("http://127.0.0.1:443"));
url = "http://127.0.0.1:7080";
assertThat(CorsUtil.sanitizeDefaultPort(url), is("http://127.0.0.1:7080"));
url = "https://127.0.0.1:80";
assertThat(CorsUtil.sanitizeDefaultPort(url), is("https://127.0.0.1:80"));
url = "https://127.0.0.1:443";
assertThat(CorsUtil.sanitizeDefaultPort(url), is("https://127.0.0.1"));
url = "https://127.0.0.1";
assertThat(CorsUtil.sanitizeDefaultPort(url), is("https://127.0.0.1"));
url = "http://[::FFFF:129.144.52.38]:7080";
assertThat(CorsUtil.sanitizeDefaultPort(url), is("http://[::FFFF:129.144.52.38]:7080"));
url = "http://[::FFFF:129.144.52.38]:80";
assertThat(CorsUtil.sanitizeDefaultPort(url), is("http://[::FFFF:129.144.52.38]"));
} |
public Map<String, Parameter> getAllParams(
Step stepDefinition, WorkflowSummary workflowSummary, StepRuntimeSummary runtimeSummary) {
return paramsManager.generateMergedStepParams(
workflowSummary, stepDefinition, getStepRuntime(stepDefinition.getType()), runtimeSummary);
} | @Test
public void testMergeNestedParamMap() {
when(this.defaultParamManager.getDefaultStepParams())
.thenReturn(
ImmutableMap.of(
"nested-default-new",
MapParamDefinition.builder()
.name("nested-default-new")
.value(
singletonMap(
"default-new",
ParamDefinition.buildParamDefinition("default-new", "from-default")))
.build(),
"nested-common",
MapParamDefinition.builder()
.name("nested-common")
.value(
Maps.newHashMap(
ImmutableMap.of(
"default-param",
ParamDefinition.buildParamDefinition(
"default-param", "from-default"),
"common-param",
ParamDefinition.buildParamDefinition(
"common-param", "from-default"),
"double-nested-common",
MapParamDefinition.builder()
.name("nested-common")
.value(
twoItemMap(
"default-param",
ParamDefinition.buildParamDefinition(
"default-param", "from-default"),
"common-param",
ParamDefinition.buildParamDefinition(
"common-param", "from-default")))
.build(),
"double-nested-common-string",
StringMapParamDefinition.builder()
.name("nested-common")
.value(
twoItemMap(
"default-param", "from-default",
"common-param", "from-default"))
.build())))
.build(),
"foo",
ParamDefinition.buildParamDefinition("foo", "some-default"),
"test-param",
ParamDefinition.buildParamDefinition("test-param", "some-other-default")));
TypedStep testStep = new TypedStep();
testStep.setParams(
ImmutableMap.of(
"nested-step-new",
MapParamDefinition.builder()
.name("nested-step-new")
.value(
singletonMap(
"step-new", ParamDefinition.buildParamDefinition("step-new", "from-step")))
.build(),
"nested-common",
MapParamDefinition.builder()
.name("nested-common")
.value(
Maps.newHashMap(
ImmutableMap.of(
"step-param",
ParamDefinition.buildParamDefinition("step-param", "from-step"),
"common-param",
ParamDefinition.buildParamDefinition("common-param", "from-step"),
"double-nested-common",
MapParamDefinition.builder()
.name("nested-common")
.value(
twoItemMap(
"step-param",
ParamDefinition.buildParamDefinition(
"default-param", "from-step"),
"common-param",
ParamDefinition.buildParamDefinition(
"common-param", "from-step")))
.build(),
"double-nested-common-string",
StringMapParamDefinition.builder()
.name("nested-common")
.value(
twoItemMap(
"step-param", "from-step",
"common-param", "from-step"))
.build())))
.build(),
"foo",
ParamDefinition.buildParamDefinition("foo", "bar"),
"test-param",
ParamDefinition.buildParamDefinition("test-param", "hello")));
testStep.setType(StepType.NOOP);
testStep.setId("step1");
Map<String, Parameter> params =
runtimeManager.getAllParams(testStep, workflowSummary, runtimeSummary);
assertEquals("bar", params.get("foo").getValue());
assertEquals("hello", params.get("test-param").getValue());
assertEquals(
"from-step",
((MapParameter) params.get("nested-step-new")).getValue().get("step-new").getValue());
assertEquals(
"from-default",
((MapParameter) params.get("nested-default-new")).getValue().get("default-new").getValue());
assertEquals(
"from-default",
((MapParameter) params.get("nested-common")).getValue().get("default-param").getValue());
assertEquals(
"from-step",
((MapParameter) params.get("nested-common")).getValue().get("step-param").getValue());
assertEquals(
"from-step",
((MapParameter) params.get("nested-common")).getValue().get("common-param").getValue());
MapParamDefinition nestedMap =
(MapParamDefinition)
((MapParameter) params.get("nested-common")).getValue().get("double-nested-common");
assertEquals("from-default", nestedMap.getValue().get("default-param").getValue());
assertEquals("from-step", nestedMap.getValue().get("step-param").getValue());
assertEquals("from-step", nestedMap.getValue().get("common-param").getValue());
StringMapParamDefinition nestedStringMap =
(StringMapParamDefinition)
((MapParameter) params.get("nested-common"))
.getValue()
.get("double-nested-common-string");
assertEquals("from-default", nestedStringMap.getValue().get("default-param"));
assertEquals("from-step", nestedStringMap.getValue().get("step-param"));
assertEquals("from-step", nestedStringMap.getValue().get("common-param"));
} |
public static int read(final AtomicBuffer buffer, final EntryConsumer entryConsumer)
{
final int capacity = buffer.capacity();
int recordsRead = 0;
int offset = 0;
while (offset < capacity)
{
final long observationCount = buffer.getLongVolatile(offset + OBSERVATION_COUNT_OFFSET);
if (observationCount <= 0)
{
break;
}
++recordsRead;
final String channel = buffer.getStringAscii(offset + CHANNEL_OFFSET);
final String source = buffer.getStringAscii(
offset + CHANNEL_OFFSET + BitUtil.align(SIZE_OF_INT + channel.length(), SIZE_OF_INT));
entryConsumer.accept(
observationCount,
buffer.getLongVolatile(offset + TOTAL_BYTES_LOST_OFFSET),
buffer.getLong(offset + FIRST_OBSERVATION_OFFSET),
buffer.getLongVolatile(offset + LAST_OBSERVATION_OFFSET),
buffer.getInt(offset + SESSION_ID_OFFSET),
buffer.getInt(offset + STREAM_ID_OFFSET),
channel,
source);
final int recordLength =
CHANNEL_OFFSET +
BitUtil.align(SIZE_OF_INT + channel.length(), SIZE_OF_INT) +
SIZE_OF_INT + source.length();
offset += BitUtil.align(recordLength, ENTRY_ALIGNMENT);
}
return recordsRead;
} | @Test
void shouldReadNoEntriesInEmptyReport()
{
assertEquals(0, LossReportReader.read(buffer, entryConsumer));
verifyNoInteractions(entryConsumer);
} |
@Override
public boolean needsInitializationOrRestoration() {
return task.needsInitializationOrRestoration();
} | @Test
public void shouldDelegateNeedsInitializationOrRestoration() {
final ReadOnlyTask readOnlyTask = new ReadOnlyTask(task);
readOnlyTask.needsInitializationOrRestoration();
verify(task).needsInitializationOrRestoration();
} |
public static AccessTokenRetriever create(Map<String, ?> configs, Map<String, Object> jaasConfig) {
return create(configs, null, jaasConfig);
} | @Test
public void testConfigureRefreshingFileAccessTokenRetrieverWithInvalidDirectory() {
// Should fail because the parent path doesn't exist.
Map<String, ?> configs = getSaslConfigs(SASL_OAUTHBEARER_TOKEN_ENDPOINT_URL, new File("/tmp/this-directory-does-not-exist/foo.json").toURI().toString());
Map<String, Object> jaasConfig = Collections.emptyMap();
assertThrowsWithMessage(ConfigException.class, () -> AccessTokenRetrieverFactory.create(configs, jaasConfig), "that doesn't exist");
} |
@Override
public PageResult<DictDataDO> getDictDataPage(DictDataPageReqVO pageReqVO) {
return dictDataMapper.selectPage(pageReqVO);
} | @Test
public void testGetDictDataPage() {
// mock 数据
DictDataDO dbDictData = randomPojo(DictDataDO.class, o -> { // 等会查询到
o.setLabel("芋艿");
o.setDictType("yunai");
o.setStatus(CommonStatusEnum.ENABLE.getStatus());
});
dictDataMapper.insert(dbDictData);
// 测试 label 不匹配
dictDataMapper.insert(cloneIgnoreId(dbDictData, o -> o.setLabel("艿")));
// 测试 dictType 不匹配
dictDataMapper.insert(cloneIgnoreId(dbDictData, o -> o.setDictType("nai")));
// 测试 status 不匹配
dictDataMapper.insert(cloneIgnoreId(dbDictData, o -> o.setStatus(CommonStatusEnum.DISABLE.getStatus())));
// 准备参数
DictDataPageReqVO reqVO = new DictDataPageReqVO();
reqVO.setLabel("芋");
reqVO.setDictType("yunai");
reqVO.setStatus(CommonStatusEnum.ENABLE.getStatus());
// 调用
PageResult<DictDataDO> pageResult = dictDataService.getDictDataPage(reqVO);
// 断言
assertEquals(1, pageResult.getTotal());
assertEquals(1, pageResult.getList().size());
assertPojoEquals(dbDictData, pageResult.getList().get(0));
} |
private static void convertToTelemetry(JsonElement jsonElement, long systemTs, Map<Long, List<KvEntry>> result, PostTelemetryMsg.Builder builder) {
if (jsonElement.isJsonObject()) {
parseObject(systemTs, result, builder, jsonElement.getAsJsonObject());
} else if (jsonElement.isJsonArray()) {
jsonElement.getAsJsonArray().forEach(je -> {
if (je.isJsonObject()) {
parseObject(systemTs, result, builder, je.getAsJsonObject());
} else {
throw new JsonSyntaxException(CAN_T_PARSE_VALUE + je);
}
});
} else {
throw new JsonSyntaxException(CAN_T_PARSE_VALUE + jsonElement);
}
} | @Test
public void testParseAsLong() {
var result = JsonConverter.convertToTelemetry(JsonParser.parseString("{\"meterReadingDelta\": 11}"), 0L);
Assertions.assertEquals(11L, result.get(0L).get(0).getLongValue().get().longValue());
} |
public SmppCommand createSmppCommand(SMPPSession session, Exchange exchange) {
SmppCommandType commandType = SmppCommandType.fromExchange(exchange);
return commandType.createCommand(session, configuration);
} | @Test
public void createSmppSubmitMultiCommand() {
SMPPSession session = new SMPPSession();
Exchange exchange = new DefaultExchange(new DefaultCamelContext());
exchange.getIn().setHeader(SmppConstants.COMMAND, "SubmitMulti");
SmppCommand command = binding.createSmppCommand(session, exchange);
assertTrue(command instanceof SmppSubmitMultiCommand);
} |
@Override
public ObjectName createName(String type, String domain, MetricName metricName) {
String name = metricName.getKey();
try {
ObjectName objectName = new ObjectName(domain, "name", name);
if (objectName.isPattern()) {
objectName = new ObjectName(domain, "name", ObjectName.quote(name));
}
return objectName;
} catch (MalformedObjectNameException e) {
try {
return new ObjectName(domain, "name", ObjectName.quote(name));
} catch (MalformedObjectNameException e1) {
LOGGER.warn("Unable to register {} {}", type, name, e1);
throw new RuntimeException(e1);
}
}
} | @Test
public void createsObjectNameWithNameAsKeyPropertyName() {
DefaultObjectNameFactory f = new DefaultObjectNameFactory();
ObjectName on = f.createName("type", "com.domain", MetricName.build("something.with.dots"));
assertThat(on.getKeyProperty("name")).isEqualTo("something.with.dots");
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.