focal_method
stringlengths 13
60.9k
| test_case
stringlengths 25
109k
|
---|---|
@Override
public BytesInput getBytes() {
// The Page Header should include: blockSizeInValues, numberOfMiniBlocks, totalValueCount
if (deltaValuesToFlush != 0) {
flushBlockBuffer();
}
return BytesInput.concat(
config.toBytesInput(),
BytesInput.fromUnsignedVarInt(totalValueCount),
BytesInput.fromZigZagVarLong(firstValue),
BytesInput.from(baos));
} | @Test
public void shouldReturnCorrectOffsetAfterInitialization() throws IOException {
long[] data = new long[2 * blockSize + 3];
for (int i = 0; i < data.length; i++) {
data[i] = i * 32;
}
writeData(data);
reader = new DeltaBinaryPackingValuesReader();
BytesInput bytes = writer.getBytes();
byte[] valueContent = bytes.toByteArray();
byte[] pageContent = new byte[valueContent.length * 10];
int contentOffsetInPage = 33;
System.arraycopy(valueContent, 0, pageContent, contentOffsetInPage, valueContent.length);
// offset should be correct
ByteBufferInputStream stream = ByteBufferInputStream.wrap(ByteBuffer.wrap(pageContent));
stream.skipFully(contentOffsetInPage);
reader.initFromPage(100, stream);
long offset = stream.position();
assertEquals(valueContent.length + contentOffsetInPage, offset);
// should be able to read data correctly
for (long i : data) {
assertEquals(i, reader.readLong());
}
// Testing the deprecated behavior of using byte arrays directly
reader = new DeltaBinaryPackingValuesReader();
reader.initFromPage(100, pageContent, contentOffsetInPage);
assertEquals(valueContent.length + contentOffsetInPage, reader.getNextOffset());
for (long i : data) {
assertEquals(i, reader.readLong());
}
} |
@Primary
@Bean("OkHttpClient")
public OkHttpClient provide(Configuration config, SonarQubeVersion version) {
OkHttpClientBuilder builder = new OkHttpClientBuilder();
builder.setConnectTimeoutMs(DEFAULT_CONNECT_TIMEOUT_IN_MS);
builder.setReadTimeoutMs(DEFAULT_READ_TIMEOUT_IN_MS);
// no need to define proxy URL as system-wide proxy is used and properly
// configured by bootstrap process.
builder.setProxyLogin(config.get(HTTP_PROXY_USER.getKey()).orElse(null));
builder.setProxyPassword(config.get(HTTP_PROXY_PASSWORD.getKey()).orElse(null));
builder.setUserAgent(format("SonarQube/%s", version));
return builder.build();
} | @Test
public void get_returns_a_OkHttpClient_with_default_configuration() throws Exception {
OkHttpClient client = underTest.provide(settings.asConfig(), sonarQubeVersion);
assertThat(client.connectTimeoutMillis()).isEqualTo(10_000);
assertThat(client.readTimeoutMillis()).isEqualTo(10_000);
assertThat(client.proxy()).isNull();
RecordedRequest recordedRequest = call(client);
assertThat(recordedRequest.getHeader("User-Agent")).isEqualTo("SonarQube/6.2");
assertThat(recordedRequest.getHeader("Proxy-Authorization")).isNull();
} |
public boolean eval(StructLike data) {
return new EvalVisitor().eval(data);
} | @Test
public void testAnd() {
Evaluator evaluator = new Evaluator(STRUCT, and(equal("x", 7), notNull("z")));
assertThat(evaluator.eval(TestHelpers.Row.of(7, 0, 3))).as("7, 3 => true").isTrue();
assertThat(evaluator.eval(TestHelpers.Row.of(8, 0, 3))).as("8, 3 => false").isFalse();
assertThat(evaluator.eval(TestHelpers.Row.of(7, 0, null))).as("7, null => false").isFalse();
assertThat(evaluator.eval(TestHelpers.Row.of(8, 0, null))).as("8, null => false").isFalse();
Evaluator structEvaluator =
new Evaluator(STRUCT, and(equal("s1.s2.s3.s4.i", 7), notNull("s1.s2.s3.s4.i")));
assertThat(
structEvaluator.eval(
TestHelpers.Row.of(
7,
0,
3,
TestHelpers.Row.of(
TestHelpers.Row.of(TestHelpers.Row.of(TestHelpers.Row.of(7)))))))
.as("7, 7 => true")
.isTrue();
assertThat(
structEvaluator.eval(
TestHelpers.Row.of(
8,
0,
3,
TestHelpers.Row.of(
TestHelpers.Row.of(TestHelpers.Row.of(TestHelpers.Row.of(8)))))))
.as("8, 8 => false")
.isFalse();
assertThat(structEvaluator.eval(TestHelpers.Row.of(7, 0, null, null)))
.as("7, null => false")
.isFalse();
assertThat(
structEvaluator.eval(
TestHelpers.Row.of(
8,
0,
null,
TestHelpers.Row.of(
TestHelpers.Row.of(TestHelpers.Row.of(TestHelpers.Row.of(8)))))))
.as("8, notnull => false")
.isFalse();
} |
public static ParseResult parse(String text) {
Map<String, String> localProperties = new HashMap<>();
String intpText = "";
String scriptText = null;
Matcher matcher = REPL_PATTERN.matcher(text);
if (matcher.find()) {
String headingSpace = matcher.group(1);
intpText = matcher.group(2);
int startPos = headingSpace.length() + intpText.length() + 1;
if (startPos < text.length() && text.charAt(startPos) == '(') {
startPos = parseLocalProperties(text, startPos, localProperties);
}
scriptText = text.substring(startPos);
} else {
intpText = "";
scriptText = text;
}
return new ParseResult(intpText, removeLeadingWhiteSpaces(scriptText), localProperties);
} | @Test
void testParagraphTextLocalPropertiesNoText() {
ParagraphTextParser.ParseResult parseResult = ParagraphTextParser.parse("%spark.pyspark(pool=pool_1)");
assertEquals("spark.pyspark", parseResult.getIntpText());
assertEquals(1, parseResult.getLocalProperties().size());
assertEquals("pool_1", parseResult.getLocalProperties().get("pool"));
assertEquals("", parseResult.getScriptText());
} |
public static SeataMQProducer createSingle(String nameServer, String producerGroup) throws MQClientException {
return createSingle(nameServer, null, producerGroup, null);
} | @Test
public void testCreateSingle() throws Exception {
SeataMQProducerFactory.createSingle("127.0.0.1:9876", "test");
Assertions.assertThrows(NotSupportYetException.class, () -> SeataMQProducerFactory.createSingle("127.0.0.1:9876", "test"));
SeataMQProducer producer = SeataMQProducerFactory.getProducer();
Assertions.assertNotNull(producer);
} |
public static Builder builder() {
return new Builder();
} | @TestTemplate
public void rowDeltaWithDeletesAndDuplicates() {
assumeThat(formatVersion).isGreaterThan(1);
assertThat(listManifestFiles()).isEmpty();
table
.newRowDelta()
.addRows(FILE_A)
.addRows(DataFiles.builder(SPEC).copy(FILE_A).build())
.addRows(FILE_A)
.addDeletes(FILE_A_DELETES)
.addDeletes(FileMetadata.deleteFileBuilder(SPEC).copy(FILE_A_DELETES).build())
.addDeletes(FILE_A_DELETES)
.commit();
assertThat(table.currentSnapshot().summary())
.hasSize(14)
.containsEntry(SnapshotSummary.ADDED_FILES_PROP, "1")
.containsEntry(SnapshotSummary.ADDED_DELETE_FILES_PROP, "1")
.containsEntry(SnapshotSummary.ADDED_FILE_SIZE_PROP, "20") // size of data + delete file
.containsEntry(SnapshotSummary.ADD_POS_DELETE_FILES_PROP, "1")
.containsEntry(SnapshotSummary.ADDED_POS_DELETES_PROP, "1")
.containsEntry(SnapshotSummary.ADDED_RECORDS_PROP, "1")
.containsEntry(SnapshotSummary.CHANGED_PARTITION_COUNT_PROP, "1")
.containsEntry(SnapshotSummary.TOTAL_DATA_FILES_PROP, "1")
.containsEntry(SnapshotSummary.TOTAL_DELETE_FILES_PROP, "1")
.containsEntry(SnapshotSummary.TOTAL_EQ_DELETES_PROP, "0")
.containsEntry(SnapshotSummary.TOTAL_POS_DELETES_PROP, "1")
.containsEntry(SnapshotSummary.TOTAL_FILE_SIZE_PROP, "20")
.containsEntry(SnapshotSummary.TOTAL_RECORDS_PROP, "1");
} |
@Override
public Map<Errors, Integer> errorCounts() {
Errors error = error();
if (error != Errors.NONE) {
// Minor optimization since the top-level error applies to all partitions
if (version < 5)
return Collections.singletonMap(error, data.partitionErrors().size() + 1);
return Collections.singletonMap(error,
data.topics().stream().mapToInt(t -> t.partitionErrors().size()).sum() + 1);
}
Map<Errors, Integer> errors;
if (version < 5)
errors = errorCounts(data.partitionErrors().stream().map(l -> Errors.forCode(l.errorCode())));
else
errors = errorCounts(data.topics().stream().flatMap(t -> t.partitionErrors().stream()).map(l ->
Errors.forCode(l.errorCode())));
updateErrorCounts(errors, Errors.NONE);
return errors;
} | @Test
public void testErrorCountsNoTopLevelError() {
for (short version : LEADER_AND_ISR.allVersions()) {
LeaderAndIsrResponse response;
if (version < 5) {
List<LeaderAndIsrPartitionError> partitions = createPartitions("foo",
asList(Errors.NONE, Errors.CLUSTER_AUTHORIZATION_FAILED));
response = new LeaderAndIsrResponse(new LeaderAndIsrResponseData()
.setErrorCode(Errors.NONE.code())
.setPartitionErrors(partitions), version);
} else {
Uuid id = Uuid.randomUuid();
LeaderAndIsrTopicErrorCollection topics = createTopic(id, asList(Errors.NONE, Errors.CLUSTER_AUTHORIZATION_FAILED));
response = new LeaderAndIsrResponse(new LeaderAndIsrResponseData()
.setErrorCode(Errors.NONE.code())
.setTopics(topics), version);
}
Map<Errors, Integer> errorCounts = response.errorCounts();
assertEquals(2, errorCounts.size());
assertEquals(2, errorCounts.get(Errors.NONE).intValue());
assertEquals(1, errorCounts.get(Errors.CLUSTER_AUTHORIZATION_FAILED).intValue());
}
} |
@Override
public CloseableIterator<String> readLines(Component file) {
requireNonNull(file, "Component should not be null");
checkArgument(file.getType() == FILE, "Component '%s' is not a file", file);
Optional<CloseableIterator<String>> linesIteratorOptional = reportReader.readFileSource(file.getReportAttributes().getRef());
checkState(linesIteratorOptional.isPresent(), "File '%s' has no source code", file);
CloseableIterator<String> lineIterator = linesIteratorOptional.get();
return new ComponentLinesCloseableIterator(file, lineIterator, file.getFileAttributes().getLines());
} | @Test
public void fail_with_ISE_when_file_has_no_source() {
assertThatThrownBy(() -> {
underTest.readLines(builder(Component.Type.FILE, FILE_REF)
.setKey(FILE_KEY)
.setUuid(FILE_UUID)
.build());
})
.isInstanceOf(IllegalStateException.class)
.hasMessage("File 'ReportComponent{ref=2, key='FILE_KEY', type=FILE}' has no source code");
} |
static BigtableDataSettings translateReadToVeneerSettings(
@NonNull BigtableConfig config,
@NonNull BigtableReadOptions options,
@Nullable BigtableReadOptions optionsFromBigtableOptions,
@NonNull PipelineOptions pipelineOptions)
throws IOException {
BigtableDataSettings.Builder settings = buildBigtableDataSettings(config, pipelineOptions);
return configureReadSettings(settings, options, optionsFromBigtableOptions);
} | @Test
public void testVeneerReadSettings() throws Exception {
BigtableConfig config =
BigtableConfig.builder()
.setProjectId(ValueProvider.StaticValueProvider.of("project"))
.setInstanceId(ValueProvider.StaticValueProvider.of("instance"))
.setAppProfileId(ValueProvider.StaticValueProvider.of("app"))
.setValidate(true)
.build();
BigtableReadOptions readOptions =
BigtableReadOptions.builder()
.setTableId(ValueProvider.StaticValueProvider.of("table"))
.setAttemptTimeout(org.joda.time.Duration.millis(101))
.setOperationTimeout(org.joda.time.Duration.millis(1001))
.build();
PipelineOptions pipelineOptions = PipelineOptionsFactory.as(GcpOptions.class);
BigtableDataSettings settings =
BigtableConfigTranslator.translateReadToVeneerSettings(
config, readOptions, null, pipelineOptions);
EnhancedBigtableStubSettings stubSettings = settings.getStubSettings();
assertEquals(config.getProjectId().get(), stubSettings.getProjectId());
assertEquals(config.getInstanceId().get(), stubSettings.getInstanceId());
assertEquals(config.getAppProfileId().get(), stubSettings.getAppProfileId());
assertEquals(
Duration.ofMillis(101),
stubSettings.readRowsSettings().getRetrySettings().getInitialRpcTimeout());
assertEquals(
Duration.ofMillis(1001),
stubSettings.readRowsSettings().getRetrySettings().getTotalTimeout());
} |
public FEELFnResult<TemporalAccessor> invoke(@ParameterName( "from" ) String val) {
if ( val == null ) {
return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "from", "cannot be null"));
}
if (!BEGIN_YEAR.matcher(val).find()) { // please notice the regex strictly requires the beginning, so we can use find.
return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "from", "year not compliant with XML Schema Part 2 Datatypes"));
}
try {
return FEELFnResult.ofResult(LocalDate.from(FEEL_DATE.parse(val)));
} catch (DateTimeException e) {
return manageDateTimeException(e, val);
}
} | @Test
void invokeParamStringNull() {
FunctionTestUtil.assertResultError(dateFunction.invoke((String) null), InvalidParametersEvent.class);
} |
@Override
public PathAttributes find(final Path file, final ListProgressListener listener) throws BackgroundException {
if(file.isRoot()) {
return PathAttributes.EMPTY;
}
if(containerService.isContainer(file)) {
final PathAttributes attributes = new PathAttributes();
if(log.isDebugEnabled()) {
log.debug(String.format("Read location for bucket %s", file));
}
attributes.setRegion(new S3LocationFeature(session, session.getClient().getRegionEndpointCache()).getLocation(file).getIdentifier());
return attributes;
}
if(file.getType().contains(Path.Type.upload)) {
final Write.Append append = new S3MultipartUploadService(session, new S3WriteFeature(session, acl), acl).append(file, new TransferStatus());
if(append.append) {
return new PathAttributes().withSize(append.offset);
}
throw new NotfoundException(file.getAbsolute());
}
try {
PathAttributes attr;
final Path bucket = containerService.getContainer(file);
try {
attr = new S3AttributesAdapter(session.getHost()).toAttributes(session.getClient().getVersionedObjectDetails(
file.attributes().getVersionId(), bucket.isRoot() ? StringUtils.EMPTY : bucket.getName(), containerService.getKey(file)));
}
catch(ServiceException e) {
switch(e.getResponseCode()) {
case 405:
if(log.isDebugEnabled()) {
log.debug(String.format("Mark file %s as delete marker", file));
}
// Only DELETE method is allowed for delete markers
attr = new PathAttributes();
attr.setCustom(Collections.singletonMap(KEY_DELETE_MARKER, Boolean.TRUE.toString()));
attr.setDuplicate(true);
return attr;
}
throw new S3ExceptionMappingService().map("Failure to read attributes of {0}", e, file);
}
if(StringUtils.isNotBlank(attr.getVersionId())) {
if(log.isDebugEnabled()) {
log.debug(String.format("Determine if %s is latest version for %s", attr.getVersionId(), file));
}
// Determine if latest version
try {
final String latest = new S3AttributesAdapter(session.getHost()).toAttributes(session.getClient().getObjectDetails(
bucket.isRoot() ? StringUtils.EMPTY : bucket.getName(), containerService.getKey(file))).getVersionId();
if(null != latest) {
if(log.isDebugEnabled()) {
log.debug(String.format("Found later version %s for %s", latest, file));
}
// Duplicate if not latest version
attr.setDuplicate(!latest.equals(attr.getVersionId()));
}
}
catch(ServiceException e) {
final BackgroundException failure = new S3ExceptionMappingService().map("Failure to read attributes of {0}", e, file);
if(failure instanceof NotfoundException) {
attr.setDuplicate(true);
}
else {
throw failure;
}
}
}
return attr;
}
catch(NotfoundException e) {
if(file.isDirectory()) {
if(log.isDebugEnabled()) {
log.debug(String.format("Search for common prefix %s", file));
}
// File may be marked as placeholder but no placeholder file exists. Check for common prefix returned.
try {
new S3ObjectListService(session, acl).list(file, new CancellingListProgressListener(), String.valueOf(Path.DELIMITER), 1);
}
catch(ListCanceledException l) {
// Found common prefix
return PathAttributes.EMPTY;
}
catch(NotfoundException n) {
throw e;
}
// Found common prefix
return PathAttributes.EMPTY;
}
throw e;
}
} | @Test
public void testReadWhitespaceInKey() throws Exception {
final Path container = new Path("test-eu-central-1-cyberduck", EnumSet.of(Path.Type.directory, Path.Type.volume));
final S3AccessControlListFeature acl = new S3AccessControlListFeature(session);
final Path directory = new S3DirectoryFeature(session, new S3WriteFeature(session, acl), acl).mkdir(new Path(container, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory)), new TransferStatus());
final Path file = new S3TouchFeature(session, new S3AccessControlListFeature(session)).touch(
new Path(directory, String.format("%s %s", new AlphanumericRandomStringService(4).random(), new AlphanumericRandomStringService(4).random()), EnumSet.of(Path.Type.file)), new TransferStatus());
new S3AttributesFinderFeature(session, new S3AccessControlListFeature(session)).find(file);
new S3DefaultDeleteFeature(session).delete(Arrays.asList(directory, file), new DisabledLoginCallback(), new Delete.DisabledCallback());
} |
@JsonCreator
public static WindowInfo of(
@JsonProperty(value = "type", required = true) final WindowType type,
@JsonProperty(value = "size") final Optional<Duration> size,
@JsonProperty(value = "emitStrategy") final Optional<OutputRefinement> emitStrategy) {
return new WindowInfo(type, size, emitStrategy);
} | @Test(expected = IllegalArgumentException.class)
public void shouldThrowIfSizeProvidedButNotRequired() {
WindowInfo.of(SESSION, Optional.of(Duration.ofSeconds(10)), Optional.empty());
} |
protected int calculateConcurency() {
final int customLimit = filterConcurrencyCustom.get();
return customLimit != DEFAULT_FILTER_CONCURRENCY_LIMIT ? customLimit : filterConcurrencyDefault.get();
} | @Test
void validateFilterGlobalConcurrencyLimitOverride() {
config.setProperty("zuul.filter.concurrency.limit.default", 7000);
config.setProperty("zuul.ConcInboundFilter.in.concurrency.limit", 4000);
final int[] limit = {0};
class ConcInboundFilter extends BaseFilter {
@Override
public Observable applyAsync(ZuulMessage input) {
limit[0] = calculateConcurency();
return Observable.just("Done");
}
@Override
public FilterType filterType() {
return FilterType.INBOUND;
}
@Override
public boolean shouldFilter(ZuulMessage msg) {
return true;
}
}
new ConcInboundFilter().applyAsync(new ZuulMessageImpl(new SessionContext(), new Headers()));
Truth.assertThat(limit[0]).isEqualTo(7000);
} |
@VisibleForTesting
static String getContainerImageForJob(DataflowPipelineOptions options) {
String containerImage = options.getSdkContainerImage();
if (containerImage == null) {
// If not set, construct and return default image URL.
return getDefaultContainerImageUrl(options);
} else if (containerImage.contains("IMAGE")) {
// Replace placeholder with default image name
return containerImage.replace("IMAGE", getDefaultContainerImageNameForJob(options));
} else {
return containerImage;
}
} | @Test
public void testGetContainerImageForJobFromOptionWithPlaceholder() {
DataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class);
options.setSdkContainerImage("gcr.io/IMAGE/foo");
for (Environments.JavaVersion javaVersion : Environments.JavaVersion.values()) {
System.setProperty("java.specification.version", javaVersion.specification());
// batch legacy
options.setExperiments(null);
options.setStreaming(false);
assertThat(
getContainerImageForJob(options),
equalTo(String.format("gcr.io/beam-%s-batch/foo", javaVersion.legacyName())));
// streaming, legacy
options.setExperiments(null);
options.setStreaming(true);
assertThat(
getContainerImageForJob(options),
equalTo(String.format("gcr.io/beam-%s-streaming/foo", javaVersion.legacyName())));
// batch, FnAPI
options.setExperiments(ImmutableList.of("beam_fn_api"));
options.setStreaming(false);
assertThat(
getContainerImageForJob(options),
equalTo(String.format("gcr.io/beam_%s_sdk/foo", javaVersion.name())));
// streaming, FnAPI
options.setExperiments(ImmutableList.of("beam_fn_api"));
options.setStreaming(true);
assertThat(
getContainerImageForJob(options),
equalTo(String.format("gcr.io/beam_%s_sdk/foo", javaVersion.name())));
}
} |
public Optional<String> fetchFileIfNotModified(String url) throws IOException {
return fetchFile(url, true);
} | @Test
public void doNotRetrieveIfNotModified() throws Exception {
this.server.enqueue(new MockResponse()
.setResponseCode(200)
.setBody("foobar")
.setHeader("Last-Modified", "Fri, 18 Aug 2017 15:02:41 GMT"));
this.server.enqueue(new MockResponse()
.setResponseCode(304)
.setHeader("Last-Modified", "Fri, 18 Aug 2017 15:02:41 GMT"));
server.start();
final HTTPFileRetriever httpFileRetriever = new HTTPFileRetriever(new OkHttpClient());
final Optional<String> body = httpFileRetriever.fetchFileIfNotModified(server.url("/").toString());
final RecordedRequest request = server.takeRequest();
assertThat(request).isNotNull();
assertThat(request.getPath()).isEqualTo("/");
assertThat(body).isNotNull()
.isPresent()
.contains("foobar");
final Optional<String> secondBody = httpFileRetriever.fetchFileIfNotModified(server.url("/").toString());
final RecordedRequest secondRequest = server.takeRequest();
assertThat(secondRequest).isNotNull();
assertThat(secondRequest.getPath()).isEqualTo("/");
assertThat(secondRequest.getHeader("If-Modified-Since")).isEqualTo("Fri, 18 Aug 2017 15:02:41 GMT");
assertThat(secondBody).isNotNull()
.isEmpty();
} |
@Udf(description = "Converts a TIMESTAMP value into the"
+ " string representation of the timestamp in the given format. Single quotes in the"
+ " timestamp format can be escaped with '', for example: 'yyyy-MM-dd''T''HH:mm:ssX'"
+ " The system default time zone is used when no time zone is explicitly provided."
+ " The format pattern should be in the format expected"
+ " by java.time.format.DateTimeFormatter")
public String formatTimestamp(
@UdfParameter(
description = "TIMESTAMP value.") final Timestamp timestamp,
@UdfParameter(
description = "The format pattern should be in the format expected by"
+ " java.time.format.DateTimeFormatter.") final String formatPattern) {
return formatTimestamp(timestamp, formatPattern, ZoneId.of("GMT").getId());
} | @Test
public void shoudlReturnNull() {
// When:
final Object result = udf.formatTimestamp(null, "yyyy-MM-dd'T'HH:mm:ss.SSS");
// Then:
assertNull(result);
} |
@Override
public Optional<IndexSet> get(final String indexSetId) {
return this.indexSetsCache.get()
.stream()
.filter(indexSet -> Objects.equals(indexSet.id(), indexSetId))
.map(indexSetConfig -> (IndexSet) mongoIndexSetFactory.create(indexSetConfig))
.findFirst();
} | @Test
public void indexSetsCacheShouldReturnCachedList() {
final IndexSetConfig indexSetConfig = mock(IndexSetConfig.class);
final List<IndexSetConfig> indexSetConfigs = Collections.singletonList(indexSetConfig);
when(indexSetService.findAll()).thenReturn(indexSetConfigs);
final List<IndexSetConfig> result = this.indexSetsCache.get();
assertThat(result)
.isNotNull()
.hasSize(1)
.containsExactly(indexSetConfig);
final List<IndexSetConfig> cachedResult = this.indexSetsCache.get();
assertThat(cachedResult)
.isNotNull()
.hasSize(1)
.containsExactly(indexSetConfig);
verify(indexSetService, times(1)).findAll();
} |
public boolean isAllBindingTables(final Collection<String> logicTableNames) {
if (logicTableNames.isEmpty()) {
return false;
}
Optional<BindingTableRule> bindingTableRule = findBindingTableRule(logicTableNames);
if (!bindingTableRule.isPresent()) {
return false;
}
Collection<String> result = new TreeSet<>(String.CASE_INSENSITIVE_ORDER);
result.addAll(bindingTableRule.get().getAllLogicTables());
return !result.isEmpty() && result.containsAll(logicTableNames);
} | @Test
void assertIsAllBindingTableWhenLogicTablesIsEmpty() {
assertFalse(createMaximumShardingRule().isAllBindingTables(Collections.emptyList()));
} |
public CompletableFuture<Boolean> put(
@Nonnull final ReceiptSerial receiptSerial,
final long receiptExpiration,
final long receiptLevel,
@Nonnull final UUID accountUuid) {
// fail early if given bad inputs
Objects.requireNonNull(receiptSerial);
Objects.requireNonNull(accountUuid);
final Instant now = clock.instant();
final Instant rowExpiration = now.plus(expirationTime);
final AttributeValue serialAttributeValue = AttributeValues.b(receiptSerial.serialize());
final UpdateItemRequest updateItemRequest = UpdateItemRequest.builder()
.tableName(table)
.key(Map.of(KEY_SERIAL, serialAttributeValue))
.returnValues(ReturnValue.ALL_NEW)
.updateExpression("SET #ttl = if_not_exists(#ttl, :ttl), "
+ "#receipt_expiration = if_not_exists(#receipt_expiration, :receipt_expiration), "
+ "#receipt_level = if_not_exists(#receipt_level, :receipt_level), "
+ "#account_uuid = if_not_exists(#account_uuid, :account_uuid), "
+ "#redemption_time = if_not_exists(#redemption_time, :redemption_time)")
.expressionAttributeNames(Map.of(
"#ttl", KEY_TTL,
"#receipt_expiration", KEY_RECEIPT_EXPIRATION,
"#receipt_level", KEY_RECEIPT_LEVEL,
"#account_uuid", KEY_ACCOUNT_UUID,
"#redemption_time", KEY_REDEMPTION_TIME))
.expressionAttributeValues(Map.of(
":ttl", AttributeValues.n(rowExpiration.getEpochSecond()),
":receipt_expiration", AttributeValues.n(receiptExpiration),
":receipt_level", AttributeValues.n(receiptLevel),
":account_uuid", AttributeValues.b(accountUuid),
":redemption_time", AttributeValues.n(now.getEpochSecond())))
.build();
return client.updateItem(updateItemRequest).thenApply(updateItemResponse -> {
final Map<String, AttributeValue> attributes = updateItemResponse.attributes();
final long ddbReceiptExpiration = Long.parseLong(attributes.get(KEY_RECEIPT_EXPIRATION).n());
final long ddbReceiptLevel = Long.parseLong(attributes.get(KEY_RECEIPT_LEVEL).n());
final UUID ddbAccountUuid = UUIDUtil.fromByteBuffer(attributes.get(KEY_ACCOUNT_UUID).b().asByteBuffer());
return ddbReceiptExpiration == receiptExpiration && ddbReceiptLevel == receiptLevel &&
Objects.equals(ddbAccountUuid, accountUuid);
});
} | @Test
void testPut() throws ExecutionException, InterruptedException {
final long receiptExpiration = 42;
final long receiptLevel = 3;
CompletableFuture<Boolean> put;
// initial insert should return true
put = redeemedReceiptsManager.put(receiptSerial, receiptExpiration, receiptLevel, AuthHelper.VALID_UUID);
assertThat(put.get()).isTrue();
// subsequent attempted inserts with modified parameters should return false
put = redeemedReceiptsManager.put(receiptSerial, receiptExpiration + 1, receiptLevel, AuthHelper.VALID_UUID);
assertThat(put.get()).isFalse();
put = redeemedReceiptsManager.put(receiptSerial, receiptExpiration, receiptLevel + 1, AuthHelper.VALID_UUID);
assertThat(put.get()).isFalse();
put = redeemedReceiptsManager.put(receiptSerial, receiptExpiration, receiptLevel, AuthHelper.VALID_UUID_TWO);
assertThat(put.get()).isFalse();
// repeated insert attempt of the original parameters should return true
put = redeemedReceiptsManager.put(receiptSerial, receiptExpiration, receiptLevel, AuthHelper.VALID_UUID);
assertThat(put.get()).isTrue();
} |
@Override
public void createFunction(SqlInvokedFunction function, boolean replace)
{
checkCatalog(function);
checkFunctionLanguageSupported(function);
checkArgument(!function.hasVersion(), "function '%s' is already versioned", function);
QualifiedObjectName functionName = function.getFunctionId().getFunctionName();
checkFieldLength("Catalog name", functionName.getCatalogName(), MAX_CATALOG_NAME_LENGTH);
checkFieldLength("Schema name", functionName.getSchemaName(), MAX_SCHEMA_NAME_LENGTH);
if (!functionNamespaceDao.functionNamespaceExists(functionName.getCatalogName(), functionName.getSchemaName())) {
throw new PrestoException(NOT_FOUND, format("Function namespace not found: %s", functionName.getCatalogSchemaName()));
}
checkFieldLength("Function name", functionName.getObjectName(), MAX_FUNCTION_NAME_LENGTH);
if (function.getParameters().size() > MAX_PARAMETER_COUNT) {
throw new PrestoException(GENERIC_USER_ERROR, format("Function has more than %s parameters: %s", MAX_PARAMETER_COUNT, function.getParameters().size()));
}
for (Parameter parameter : function.getParameters()) {
checkFieldLength("Parameter name", parameter.getName(), MAX_PARAMETER_NAME_LENGTH);
}
checkFieldLength(
"Parameter type list",
function.getFunctionId().getArgumentTypes().stream()
.map(TypeSignature::toString)
.collect(joining(",")),
MAX_PARAMETER_TYPES_LENGTH);
checkFieldLength("Return type", function.getSignature().getReturnType().toString(), MAX_RETURN_TYPE_LENGTH);
jdbi.useTransaction(handle -> {
FunctionNamespaceDao transactionDao = handle.attach(functionNamespaceDaoClass);
Optional<SqlInvokedFunctionRecord> latestVersion = transactionDao.getLatestRecordForUpdate(hash(function.getFunctionId()), function.getFunctionId());
if (!replace && latestVersion.isPresent() && !latestVersion.get().isDeleted()) {
throw new PrestoException(ALREADY_EXISTS, "Function already exists: " + function.getFunctionId());
}
if (!latestVersion.isPresent() || !latestVersion.get().getFunction().hasSameDefinitionAs(function)) {
long newVersion = latestVersion.map(SqlInvokedFunctionRecord::getFunction).map(MySqlFunctionNamespaceManager::getLongVersion).orElse(0L) + 1;
insertSqlInvokedFunction(transactionDao, function, newVersion);
}
else if (latestVersion.get().isDeleted()) {
SqlInvokedFunction latest = latestVersion.get().getFunction();
checkState(latest.hasVersion(), "Function version missing: %s", latest.getFunctionId());
transactionDao.setDeletionStatus(hash(latest.getFunctionId()), latest.getFunctionId(), getLongVersion(latest), false);
}
});
refreshFunctionsCache(functionName);
} | @Test
public void testGetFunctionMetadata()
{
createFunction(FUNCTION_POWER_TOWER_DOUBLE, true);
FunctionHandle handle1 = getLatestFunctionHandle(FUNCTION_POWER_TOWER_DOUBLE.getFunctionId());
assertGetFunctionMetadata(handle1, FUNCTION_POWER_TOWER_DOUBLE);
createFunction(FUNCTION_POWER_TOWER_DOUBLE_UPDATED, true);
FunctionHandle handle2 = getLatestFunctionHandle(FUNCTION_POWER_TOWER_DOUBLE_UPDATED.getFunctionId());
assertGetFunctionMetadata(handle1, FUNCTION_POWER_TOWER_DOUBLE);
assertGetFunctionMetadata(handle2, FUNCTION_POWER_TOWER_DOUBLE_UPDATED);
} |
@VisibleForTesting
Collection<String> getVolumesLowOnSpace() throws IOException {
if (LOG.isDebugEnabled()) {
LOG.debug("Going to check the following volumes disk space: " + volumes);
}
Collection<String> lowVolumes = new ArrayList<String>();
for (CheckedVolume volume : volumes.values()) {
lowVolumes.add(volume.getVolume());
}
return lowVolumes;
} | @Test
public void testChecking2NameDirsOnOneVolume() throws IOException {
Configuration conf = new Configuration();
File nameDir1 = new File(BASE_DIR, "name-dir1");
File nameDir2 = new File(BASE_DIR, "name-dir2");
nameDir1.mkdirs();
nameDir2.mkdirs();
conf.set(DFSConfigKeys.DFS_NAMENODE_EDITS_DIR_KEY,
nameDir1.getAbsolutePath() + "," + nameDir2.getAbsolutePath());
conf.setLong(DFSConfigKeys.DFS_NAMENODE_DU_RESERVED_KEY, Long.MAX_VALUE);
NameNodeResourceChecker nb = new NameNodeResourceChecker(conf);
assertEquals("Should not check the same volume more than once.",
1, nb.getVolumesLowOnSpace().size());
} |
public void computeCpd(Component component, Collection<Block> originBlocks, Collection<Block> duplicationBlocks) {
CloneIndex duplicationIndex = new PackedMemoryCloneIndex();
populateIndex(duplicationIndex, originBlocks);
populateIndex(duplicationIndex, duplicationBlocks);
List<CloneGroup> duplications = SuffixTreeCloneDetectionAlgorithm.detect(duplicationIndex, originBlocks);
Iterable<CloneGroup> filtered = duplications.stream()
.filter(getNumberOfUnitsNotLessThan(component.getFileAttributes().getLanguageKey()))
.toList();
addDuplications(component, filtered);
} | @Test
public void add_no_duplication_when_no_duplicated_blocks() {
settings.setProperty("sonar.cpd.xoo.minimumTokens", 10);
Collection<Block> originBlocks = singletonList(
new Block.Builder()
.setResourceId(ORIGIN_FILE_KEY)
.setBlockHash(new ByteArray("a8998353e96320ec"))
.setIndexInFile(0)
.setLines(30, 45)
.setUnit(0, 10)
.build());
underTest.computeCpd(ORIGIN_FILE, originBlocks, Collections.emptyList());
assertNoDuplicationAdded(ORIGIN_FILE);
} |
public static Result<Boolean> isRowsEquals(TableMeta tableMetaData, List<Row> oldRows, List<Row> newRows) {
if (!CollectionUtils.isSizeEquals(oldRows, newRows)) {
return Result.build(false, null);
}
return compareRows(tableMetaData, oldRows, newRows);
} | @Test
public void isRowsEquals() {
TableMeta tableMeta = Mockito.mock(TableMeta.class);
Mockito.when(tableMeta.getPrimaryKeyOnlyName()).thenReturn(Arrays.asList(new String[]{"pk"}));
Mockito.when(tableMeta.getTableName()).thenReturn("table_name");
List<Row> rows = new ArrayList<>();
Field field = new Field("pk", 1, "12345");
Row row = new Row();
row.add(field);
rows.add(row);
Assertions.assertFalse(DataCompareUtils.isRowsEquals(tableMeta, rows, null).getResult());
Assertions.assertFalse(DataCompareUtils.isRowsEquals(tableMeta, null, rows).getResult());
List<Row> rows2 = new ArrayList<>();
Field field2 = new Field("pk", 1, "12345");
Row row2 = new Row();
row2.add(field2);
rows2.add(row2);
Assertions.assertTrue(DataCompareUtils.isRowsEquals(tableMeta, rows, rows2).getResult());
field.setValue("23456");
Assertions.assertFalse(DataCompareUtils.isRowsEquals(tableMeta, rows, rows2).getResult());
field.setValue("12345");
Field field3 = new Field("pk", 1, "12346");
Row row3 = new Row();
row3.add(field3);
rows2.add(row3);
Assertions.assertFalse(DataCompareUtils.isRowsEquals(tableMeta, rows, rows2).getResult());
} |
public static TbMathArgumentValue fromMessageBody(TbMathArgument arg, String argKey, Optional<ObjectNode> jsonNodeOpt) {
Double defaultValue = arg.getDefaultValue();
if (jsonNodeOpt.isEmpty()) {
return defaultOrThrow(defaultValue, "Message body is empty!");
}
var json = jsonNodeOpt.get();
if (!json.has(argKey)) {
return defaultOrThrow(defaultValue, "Message body has no '" + argKey + "'!");
}
JsonNode valueNode = json.get(argKey);
if (valueNode.isNull()) {
return defaultOrThrow(defaultValue, "Message body has null '" + argKey + "'!");
}
double value;
if (valueNode.isNumber()) {
value = valueNode.doubleValue();
} else if (valueNode.isTextual()) {
var valueNodeText = valueNode.asText();
if (StringUtils.isNotBlank(valueNodeText)) {
try {
value = Double.parseDouble(valueNode.asText());
} catch (NumberFormatException ne) {
throw new RuntimeException("Can't convert value '" + valueNode.asText() + "' to double!");
}
} else {
return defaultOrThrow(defaultValue, "Message value is empty for '" + argKey + "'!");
}
} else {
throw new RuntimeException("Can't convert value '" + valueNode.toString() + "' to double!");
}
return new TbMathArgumentValue(value);
} | @Test
public void test_fromMessageBody_then_valueEmpty() {
TbMathArgument tbMathArgument = new TbMathArgument(TbMathArgumentType.MESSAGE_BODY, "TestKey");
ObjectNode msgData = JacksonUtil.newObjectNode();
msgData.putNull("TestKey");
//null value
Throwable thrown = assertThrows(RuntimeException.class, () -> TbMathArgumentValue.fromMessageBody(tbMathArgument, tbMathArgument.getKey(), Optional.of(msgData)));
Assertions.assertNotNull(thrown.getMessage());
//empty value
msgData.put("TestKey", "");
thrown = assertThrows(RuntimeException.class, () -> TbMathArgumentValue.fromMessageBody(tbMathArgument, tbMathArgument.getKey(), Optional.of(msgData)));
Assertions.assertNotNull(thrown.getMessage());
} |
public long cardinality() {
HLLRepresentation representation = store();
return representation == null ? 0 : representation.cardinality();
} | @Test(dataProvider = "representations")
public void testSingleThreadOperationsRepresentation(HLLRepresentation representation, long expected) {
assertThat(representation.cardinality()).isEqualTo(0L);
for (int i = 0; i < 1000; i++) {
representation.set(("zt-" + i).getBytes(StandardCharsets.US_ASCII));
}
for (int i = 0; i < 1000; i++) {
representation.set(("tt-" + i).getBytes(StandardCharsets.US_ASCII));
}
assertThat(representation.cardinality()).isEqualTo(expected);
} |
public ParquetMetadataCommand(Logger console) {
super(console);
} | @Test
public void testParquetMetadataCommand() throws IOException {
File file = parquetFile();
ParquetMetadataCommand command = new ParquetMetadataCommand(createLogger());
command.targets = Arrays.asList(file.getAbsolutePath());
command.setConf(new Configuration());
Assert.assertEquals(0, command.run());
} |
@Nullable
@Override
public Message decode(@Nonnull RawMessage rawMessage) {
final byte[] payload = rawMessage.getPayload();
final JsonNode event;
try {
event = objectMapper.readTree(payload);
if (event == null || event.isMissingNode()) {
throw new IOException("null result");
}
} catch (IOException e) {
LOG.error("Couldn't decode raw message {}", rawMessage);
return null;
}
return parseEvent(event);
} | @Test
public void decodeMessagesHandlesPacketbeatV8Messages() throws Exception {
final Message message = codec.decode(messageFromJson("packetbeat-mongodb-v8.json"));
assertThat(message).isNotNull();
assertThat(message.getSource()).isEqualTo("example.local");
assertThat(message.getTimestamp()).isEqualTo(new DateTime(2022, 11, 7, 9, 26, 10, 579, DateTimeZone.UTC));
assertThat(message.getField("beats_type")).isEqualTo("packetbeat");
assertThat(message.getField("packetbeat_type")).isEqualTo("mongodb");
assertThat(message.getField("packetbeat_status")).isEqualTo("OK");
assertThat(message.getField("packetbeat_method")).isEqualTo("msg");
assertThat(message.getField("packetbeat_network_bytes")).isEqualTo(557);
assertThat(message.getField("packetbeat_network_type")).isEqualTo("ipv4");
assertThat(message.getField("packetbeat_source_ip")).isEqualTo("10.0.55.1");
assertThat(message.getField("packetbeat_destination_ip")).isEqualTo("10.0.55.2");
assertThat(message.getField("packetbeat_destination_port")).isEqualTo(27017);
assertThat(message.getField("packetbeat_host_containerized")).isEqualTo(false);
} |
static SamplingFlags toSamplingFlags(int flags) {
switch (flags) {
case 0:
return EMPTY;
case FLAG_SAMPLED_SET:
return NOT_SAMPLED;
case FLAG_SAMPLED_SET | FLAG_SAMPLED:
return SAMPLED;
case FLAG_SAMPLED_SET | FLAG_SAMPLED | FLAG_DEBUG:
return DEBUG;
case FLAG_SAMPLED_LOCAL:
return EMPTY_SAMPLED_LOCAL;
case FLAG_SAMPLED_LOCAL | FLAG_SAMPLED_SET:
return NOT_SAMPLED_SAMPLED_LOCAL;
case FLAG_SAMPLED_LOCAL | FLAG_SAMPLED_SET | FLAG_SAMPLED:
return SAMPLED_SAMPLED_LOCAL;
case FLAG_SAMPLED_LOCAL | FLAG_SAMPLED_SET | FLAG_SAMPLED | FLAG_DEBUG:
return DEBUG_SAMPLED_LOCAL;
default:
assert false; // programming error, but build anyway
return new SamplingFlags(flags);
}
} | @Test void toSamplingFlags_returnsConstantsAndHasNiceToString() {
assertThat(toSamplingFlags(SamplingFlags.EMPTY.flags))
.isSameAs(SamplingFlags.EMPTY)
.hasToString("");
assertThat(toSamplingFlags(SamplingFlags.NOT_SAMPLED.flags))
.isSameAs(SamplingFlags.NOT_SAMPLED)
.hasToString("NOT_SAMPLED_REMOTE");
assertThat(toSamplingFlags(SamplingFlags.SAMPLED.flags))
.isSameAs(SamplingFlags.SAMPLED)
.hasToString("SAMPLED_REMOTE");
assertThat(toSamplingFlags(SamplingFlags.DEBUG.flags))
.isSameAs(SamplingFlags.DEBUG)
.hasToString("DEBUG");
assertThat(toSamplingFlags(SamplingFlags.EMPTY.flags | FLAG_SAMPLED_LOCAL))
.isSameAs(SamplingFlags.EMPTY_SAMPLED_LOCAL)
.hasToString("SAMPLED_LOCAL");
assertThat(toSamplingFlags(SamplingFlags.NOT_SAMPLED.flags | FLAG_SAMPLED_LOCAL))
.isSameAs(SamplingFlags.NOT_SAMPLED_SAMPLED_LOCAL)
.hasToString("NOT_SAMPLED_REMOTE|SAMPLED_LOCAL");
assertThat(toSamplingFlags(SamplingFlags.SAMPLED.flags | FLAG_SAMPLED_LOCAL))
.isSameAs(SamplingFlags.SAMPLED_SAMPLED_LOCAL)
.hasToString("SAMPLED_REMOTE|SAMPLED_LOCAL");
assertThat(toSamplingFlags(SamplingFlags.DEBUG.flags | FLAG_SAMPLED_LOCAL))
.isSameAs(SamplingFlags.DEBUG_SAMPLED_LOCAL)
.hasToString("DEBUG|SAMPLED_LOCAL");
} |
@Override
public void insertEdge(E edge) {
checkNotNull(edge, "Edge cannot be null");
checkArgument(edges.isEmpty() || src().equals(edge.dst()),
"Edge destination must be the same as the current path source");
edges.add(0, edge);
} | @Test
public void insertEdge() {
MutablePath<TestVertex, TestEdge> p = new DefaultMutablePath<>();
p.insertEdge(new TestEdge(B, C));
p.insertEdge(new TestEdge(A, B));
validatePath(p, A, C, 2);
} |
@Override
public void close() throws IOException {
if (mClosed.getAndSet(true)) {
return;
}
mLocalOutputStream.close();
try (BufferedInputStream in = new BufferedInputStream(new FileInputStream(mFile))) {
ObjectMetadata meta = new ObjectMetadata();
meta.setContentLength(mFile.length());
if (mHash != null) {
byte[] hashBytes = mHash.digest();
meta.setContentMD5(new String(Base64.encodeBase64(hashBytes)));
}
mContentHash = mCosClient.putObject(mBucketName, mKey, in, meta).getETag();
} catch (CosClientException e) {
LOG.error("Failed to upload {}. ", mKey);
throw new IOException(e);
} finally {
// Delete the temporary file on the local machine if the COS client completed the
// upload or if the upload failed.
if (!mFile.delete()) {
LOG.error("Failed to delete temporary file @ {}", mFile.getPath());
}
}
return;
} | @Test
@PrepareForTest(COSOutputStream.class)
public void testConstructor() throws Exception {
PowerMockito.whenNew(File.class).withArguments(Mockito.anyString()).thenReturn(mFile);
String errorMessage = "protocol doesn't support output";
PowerMockito.whenNew(FileOutputStream.class).withArguments(mFile)
.thenThrow(new IOException(errorMessage));
mThrown.expect(IOException.class);
mThrown.expectMessage(errorMessage);
new COSOutputStream("testBucketName", "testKey", mCosClient,
sConf.getList(PropertyKey.TMP_DIRS)).close();
} |
public static Predicate parse(String expression)
{
final Stack<Predicate> predicateStack = new Stack<>();
final Stack<Character> operatorStack = new Stack<>();
final String trimmedExpression = TRIMMER_PATTERN.matcher(expression).replaceAll("");
final StringTokenizer tokenizer = new StringTokenizer(trimmedExpression, OPERATORS, true);
boolean isTokenMode = true;
while (true)
{
final Character operator;
final String token;
if (isTokenMode)
{
if (tokenizer.hasMoreTokens())
{
token = tokenizer.nextToken();
}
else
{
break;
}
if (OPERATORS.contains(token))
{
operator = token.charAt(0);
}
else
{
operator = null;
}
}
else
{
operator = operatorStack.pop();
token = null;
}
isTokenMode = true;
if (operator == null)
{
try
{
predicateStack.push(Class.forName(token).asSubclass(Predicate.class).getDeclaredConstructor().newInstance());
}
catch (ClassCastException e)
{
throw new RuntimeException(token + " must implement " + Predicate.class.getName(), e);
}
catch (Exception e)
{
throw new RuntimeException(e);
}
}
else
{
if (operatorStack.empty() || operator == '(')
{
operatorStack.push(operator);
}
else if (operator == ')')
{
while (operatorStack.peek() != '(')
{
evaluate(predicateStack, operatorStack);
}
operatorStack.pop();
}
else
{
if (OPERATOR_PRECEDENCE.get(operator) < OPERATOR_PRECEDENCE.get(operatorStack.peek()))
{
evaluate(predicateStack, operatorStack);
isTokenMode = false;
}
operatorStack.push(operator);
}
}
}
while (!operatorStack.empty())
{
evaluate(predicateStack, operatorStack);
}
if (predicateStack.size() > 1)
{
throw new RuntimeException("Invalid logical expression");
}
return predicateStack.pop();
} | @Test
public void testNotNotAnd()
{
final Predicate parsed = PredicateExpressionParser.parse("!!com.linkedin.data.it.AlwaysTruePredicate & com.linkedin.data.it.AlwaysFalsePredicate");
Assert.assertEquals(parsed.getClass(), AndPredicate.class);
final List<Predicate> andChildren = ((AndPredicate) parsed).getChildPredicates();
Assert.assertEquals(andChildren.get(0).getClass(), NotPredicate.class);
Assert.assertEquals(andChildren.get(1).getClass(), AlwaysFalsePredicate.class);
final Predicate notChild1 = ((NotPredicate) andChildren.get(0)).getChildPredicate();
Assert.assertEquals(notChild1.getClass(), NotPredicate.class);
final Predicate notChild2 = ((NotPredicate) notChild1).getChildPredicate();
Assert.assertEquals(notChild2.getClass(), AlwaysTruePredicate.class);
} |
public static Applications copyApplications(Applications source) {
Applications result = new Applications();
copyApplications(source, result);
return updateMeta(result);
} | @Test
public void testCopyApplicationsIfNotNullReturnApplications() {
Application application1 = createSingleInstanceApp("foo", "foo",
InstanceInfo.ActionType.ADDED);
Application application2 = createSingleInstanceApp("bar", "bar",
InstanceInfo.ActionType.ADDED);
Applications applications = createApplications();
applications.addApplication(application1);
applications.addApplication(application2);
Assert.assertEquals(2,
EurekaEntityFunctions.copyApplications(applications).size());
} |
@Override
public <T> T clone(T object) {
if (object instanceof String) {
return object;
} else if (object instanceof Collection) {
Object firstElement = findFirstNonNullElement((Collection) object);
if (firstElement != null && !(firstElement instanceof Serializable)) {
JavaType type = TypeFactory.defaultInstance().constructParametricType(object.getClass(), firstElement.getClass());
return objectMapperWrapper.fromBytes(objectMapperWrapper.toBytes(object), type);
}
} else if (object instanceof Map) {
Map.Entry firstEntry = this.findFirstNonNullEntry((Map) object);
if (firstEntry != null) {
Object key = firstEntry.getKey();
Object value = firstEntry.getValue();
if (!(key instanceof Serializable) || !(value instanceof Serializable)) {
JavaType type = TypeFactory.defaultInstance().constructParametricType(object.getClass(), key.getClass(), value.getClass());
return (T) objectMapperWrapper.fromBytes(objectMapperWrapper.toBytes(object), type);
}
}
} else if (object instanceof JsonNode) {
return (T) ((JsonNode) object).deepCopy();
}
if (object instanceof Serializable) {
try {
return (T) SerializationHelper.clone((Serializable) object);
} catch (SerializationException e) {
//it is possible that object itself implements java.io.Serializable, but underlying structure does not
//in this case we switch to the other JSON marshaling strategy which doesn't use the Java serialization
}
}
return jsonClone(object);
} | @Test
public void should_clone_map_of_non_serializable_value() {
Map<String, NonSerializableObject> original = new HashMap<>();
original.put("key", new NonSerializableObject("value"));
Object cloned = serializer.clone(original);
assertEquals(original, cloned);
assertNotSame(original, cloned);
} |
@Override
public Flux<ReactiveRedisConnection.BooleanResponse<RenameCommand>> renameNX(Publisher<RenameCommand> commands) {
return execute(commands, command -> {
Assert.notNull(command.getKey(), "Key must not be null!");
Assert.notNull(command.getNewName(), "New name must not be null!");
byte[] keyBuf = toByteArray(command.getKey());
byte[] newKeyBuf = toByteArray(command.getNewName());
if (executorService.getConnectionManager().calcSlot(keyBuf) == executorService.getConnectionManager().calcSlot(newKeyBuf)) {
return super.renameNX(commands);
}
return exists(command.getNewName())
.zipWith(read(keyBuf, ByteArrayCodec.INSTANCE, RedisCommands.DUMP, keyBuf))
.filter(newKeyExistsAndDump -> !newKeyExistsAndDump.getT1() && Objects.nonNull(newKeyExistsAndDump.getT2()))
.map(Tuple2::getT2)
.zipWhen(value ->
pTtl(command.getKey())
.filter(Objects::nonNull)
.map(ttl -> Math.max(0, ttl))
.switchIfEmpty(Mono.just(0L))
)
.flatMap(valueAndTtl -> write(newKeyBuf, StringCodec.INSTANCE, RedisCommands.RESTORE, newKeyBuf, valueAndTtl.getT2(), valueAndTtl.getT1())
.then(Mono.just(true)))
.switchIfEmpty(Mono.just(false))
.doOnSuccess(didRename -> {
if (didRename) {
del(command.getKey());
}
})
.map(didRename -> new BooleanResponse<>(command, didRename));
});
} | @Test
public void testRenameNX() {
connection.stringCommands().set(originalKey, value).block();
if (hasTtl) {
connection.keyCommands().expire(originalKey, Duration.ofSeconds(1000)).block();
}
Integer originalSlot = getSlotForKey(originalKey);
newKey = getNewKeyForSlot(new String(originalKey.array()), getTargetSlot(originalSlot));
Boolean result = connection.keyCommands().renameNX(originalKey, newKey).block();
assertThat(result).isTrue();
assertThat(connection.stringCommands().get(newKey).block()).isEqualTo(value);
if (hasTtl) {
assertThat(connection.keyCommands().ttl(newKey).block()).isGreaterThan(0);
} else {
assertThat(connection.keyCommands().ttl(newKey).block()).isEqualTo(-1);
}
connection.stringCommands().set(originalKey, value).block();
result = connection.keyCommands().renameNX(originalKey, newKey).block();
assertThat(result).isFalse();
} |
@Override
public void execute(ComputationStep.Context context) {
PostMeasuresComputationCheck.Context extensionContext = new ContextImpl();
for (PostMeasuresComputationCheck extension : extensions) {
extension.onCheck(extensionContext);
}
} | @Test
public void do_nothing_if_no_extensions() {
// no failure
newStep().execute(new TestComputationStepContext());
} |
@Override
public @Nullable V replace(K key, V value) {
requireNonNull(key);
requireNonNull(value);
int[] oldWeight = new int[1];
@SuppressWarnings("unchecked")
K[] nodeKey = (K[]) new Object[1];
@SuppressWarnings("unchecked")
V[] oldValue = (V[]) new Object[1];
long[] now = new long[1];
int weight = weigher.weigh(key, value);
Node<K, V> node = data.computeIfPresent(nodeFactory.newLookupKey(key), (k, n) -> {
synchronized (n) {
requireIsAlive(key, n);
nodeKey[0] = n.getKey();
oldValue[0] = n.getValue();
oldWeight[0] = n.getWeight();
if ((nodeKey[0] == null) || (oldValue[0] == null)
|| hasExpired(n, now[0] = expirationTicker().read())) {
oldValue[0] = null;
return n;
}
long varTime = expireAfterUpdate(n, key, value, expiry(), now[0]);
n.setValue(value, valueReferenceQueue());
n.setWeight(weight);
setVariableTime(n, varTime);
setAccessTime(n, now[0]);
setWriteTime(n, now[0]);
discardRefresh(k);
return n;
}
});
if (oldValue[0] == null) {
return null;
}
int weightedDifference = (weight - oldWeight[0]);
if (expiresAfterWrite() || (weightedDifference != 0)) {
afterWrite(new UpdateTask(node, weightedDifference));
} else {
afterRead(node, now[0], /* recordHit */ false);
}
notifyOnReplace(nodeKey[0], oldValue[0], value);
return oldValue[0];
} | @CheckMaxLogLevel(ERROR)
@Test(dataProvider = "caches")
@CacheSpec(population = Population.EMPTY, keys = ReferenceType.STRONG)
public void brokenEquality_replaceConditionally(
BoundedLocalCache<MutableInt, Int> cache, CacheContext context) {
testForBrokenEquality(cache, context, key -> {
boolean replaced = cache.replace(key, context.absentValue(), context.absentValue().negate());
assertThat(replaced).isTrue();
});
} |
@Override
public int hashCode() {
return Objects.hash(
threadName,
threadState,
activeTasks,
standbyTasks,
mainConsumerClientId,
restoreConsumerClientId,
producerClientIds,
adminClientId);
} | @Test
public void shouldBeEqualIfSameObject() {
final ThreadMetadata same = new ThreadMetadataImpl(
THREAD_NAME,
THREAD_STATE,
MAIN_CONSUMER_CLIENT_ID,
RESTORE_CONSUMER_CLIENT_ID,
PRODUCER_CLIENT_IDS,
ADMIN_CLIENT_ID,
ACTIVE_TASKS,
STANDBY_TASKS
);
assertThat(threadMetadata, equalTo(same));
assertThat(threadMetadata.hashCode(), equalTo(same.hashCode()));
} |
public static URI createRemainingURI(URI originalURI, Map<String, Object> params) throws URISyntaxException {
String s = createQueryString(params);
if (s.isEmpty()) {
s = null;
}
return createURIWithQuery(originalURI, s);
} | @Test
public void testCreateRemainingURIEncoding() throws Exception {
// the uri is already encoded, but we create a new one with new query parameters
String uri = "http://localhost:23271/myapp/mytest?columns=name%2Ctotalsens%2Cupsens&username=apiuser";
// these are the parameters which is tricky to encode
Map<String, Object> map = new LinkedHashMap<>();
map.put("foo", "abc def");
map.put("bar", "123,456");
map.put("name", "S\u00F8ren"); // danish letter
// create new uri with the parameters
URI out = URISupport.createRemainingURI(new URI(uri), map);
assertNotNull(out);
assertEquals("http://localhost:23271/myapp/mytest?foo=abc+def&bar=123%2C456&name=S%C3%B8ren", out.toString());
assertEquals("http://localhost:23271/myapp/mytest?foo=abc+def&bar=123%2C456&name=S%C3%B8ren", out.toASCIIString());
} |
@GuardedBy("evictionLock")
protected void setMaximum(long maximum) {
throw new UnsupportedOperationException();
} | @Test(dataProvider = "caches")
@CacheSpec(compute = Compute.SYNC, population = Population.EMPTY, maximumSize = Maximum.FULL)
public void drain_blocksCapacity(BoundedLocalCache<Int, Int> cache,
CacheContext context, Eviction<Int, Int> eviction) {
checkDrainBlocks(cache, () -> eviction.setMaximum(0));
} |
static public long copy(Reader input, Writer output) throws IOException {
char[] buffer = new char[1 << 12];
long count = 0;
for (int n = 0; (n = input.read(buffer)) >= 0; ) {
output.write(buffer, 0, n);
count += n;
}
return count;
} | @Test
public void testCopy() throws Exception {
char[] arr = "testToString".toCharArray();
Reader reader = new CharArrayReader(arr);
Writer writer = new CharArrayWriter();
long count = IOTinyUtils.copy(reader, writer);
assertEquals(arr.length, count);
} |
public static Ip4Address valueOf(int value) {
byte[] bytes =
ByteBuffer.allocate(INET_BYTE_LENGTH).putInt(value).array();
return new Ip4Address(bytes);
} | @Test(expected = NullPointerException.class)
public void testInvalidValueOfNullString() {
Ip4Address ipAddress;
String fromString = null;
ipAddress = Ip4Address.valueOf(fromString);
} |
public H3IndexResolution getResolution() {
return _resolution;
} | @Test
public void withSomeData()
throws JsonProcessingException {
String confStr = "{\n"
+ " \"resolution\": [13, 5, 6]\n"
+ "}";
H3IndexConfig config = JsonUtils.stringToObject(confStr, H3IndexConfig.class);
assertFalse(config.isDisabled(), "Unexpected disabled");
H3IndexResolution resolution = config.getResolution();
Assert.assertEquals(resolution.size(), 3);
Assert.assertEquals(resolution.getLowestResolution(), 5);
Assert.assertEquals(resolution.getResolutions(), Lists.newArrayList(5, 6, 13));
} |
public List<String> listTableNames(String catalogName, String dbName) {
Optional<ConnectorMetadata> connectorMetadata = getOptionalMetadata(catalogName);
ImmutableSet.Builder<String> tableNames = ImmutableSet.builder();
if (connectorMetadata.isPresent()) {
try {
connectorMetadata.get().listTableNames(dbName).forEach(tableNames::add);
} catch (Exception e) {
LOG.error("Failed to listTableNames on [{}.{}]", catalogName, dbName, e);
throw e;
}
}
return ImmutableList.copyOf(tableNames.build());
} | @Test
public void testListTblNames(@Mocked HiveMetaStoreClient metaStoreThriftClient) throws TException, DdlException {
new Expectations() {
{
metaStoreThriftClient.getAllTables("db2");
result = Lists.newArrayList("tbl2");
minTimes = 0;
}
};
MetadataMgr metadataMgr = GlobalStateMgr.getCurrentState().getMetadataMgr();
List<String> internalTables = metadataMgr.listTableNames("default_catalog", "db1");
Assert.assertTrue(internalTables.contains("tbl1"));
try {
metadataMgr.listTableNames("default_catalog", "db_foo");
Assert.fail();
} catch (StarRocksConnectorException e) {
Assert.assertTrue(e.getMessage().contains("Database db_foo doesn't exist"));
}
List<String> externalTables = metadataMgr.listTableNames("hive_catalog", "db2");
Assert.assertTrue(externalTables.contains("tbl2"));
externalTables = metadataMgr.listTableNames("hive_catalog", "db3");
Assert.assertTrue(externalTables.isEmpty());
} |
static String generateDatabaseName(String baseString) {
return generateResourceId(
baseString,
ILLEGAL_DATABASE_NAME_CHARS,
REPLACE_DATABASE_NAME_CHAR,
MAX_DATABASE_NAME_LENGTH,
TIME_FORMAT);
} | @Test
public void testGenerateDatabaseNameShouldReplacePeriod() {
String testBaseString = "Test.DB.Name";
String actual = generateDatabaseName(testBaseString);
assertThat(actual).matches("test-db-name-\\d{8}-\\d{6}-\\d{6}");
} |
@Override
public void updateService(Service service) {
checkNotNull(service, ERR_NULL_SERVICE);
checkArgument(!Strings.isNullOrEmpty(service.getMetadata().getUid()),
ERR_NULL_SERVICE_UID);
k8sServiceStore.updateService(service);
log.info(String.format(MSG_SERVICE, service.getMetadata().getName(), MSG_UPDATED));
} | @Test(expected = IllegalArgumentException.class)
public void testUpdateUnregisteredService() {
target.updateService(SERVICE);
} |
public static Function<Integer, Integer> composeFunctions(Function<Integer, Integer> f1, Function<Integer, Integer> f2) {
return f1.andThen(f2);
} | @Test
public void testComposeFunctions() {
Function<Integer, Integer> timesTwo = x -> x * 2;
Function<Integer, Integer> square = x -> x * x;
Function<Integer, Integer> composed = FunctionComposer.composeFunctions(timesTwo, square);
assertEquals("Expected output of composed functions is 36", 36, (int) composed.apply(3));
} |
public void setError(final int errorCode) {
if (this.destroyed) {
LOG.warn("ThreadId: {} already destroyed, ignore error code: {}", this.data, errorCode);
return;
}
this.lock.lock();
try {
if (this.destroyed) {
LOG.warn("ThreadId: {} already destroyed, ignore error code: {}", this.data, errorCode);
return;
}
if (this.onError != null) {
this.onError.onError(this, this.data, errorCode);
}
} finally {
// It may have been released during onError to avoid throwing an exception.
if (this.lock.isHeldByCurrentThread()) {
this.lock.unlock();
}
}
} | @Test
public void testSetError() throws Exception {
this.id.setError(100);
assertEquals(100, this.errorCode);
CountDownLatch latch = new CountDownLatch(1);
new Thread() {
@Override
public void run() {
ThreadIdTest.this.id.setError(99);
latch.countDown();
}
}.start();
latch.await();
assertEquals(99, this.errorCode);
} |
public static void main(String[] args) {
var conUnix = new ConfigureForUnixVisitor();
var conDos = new ConfigureForDosVisitor();
var zoom = new Zoom();
var hayes = new Hayes();
hayes.accept(conDos); // Hayes modem with Dos configurator
zoom.accept(conDos); // Zoom modem with Dos configurator
hayes.accept(conUnix); // Hayes modem with Unix configurator
zoom.accept(conUnix); // Zoom modem with Unix configurator
} | @Test
void shouldExecuteApplicationWithoutException() {
assertDoesNotThrow(() -> App.main(new String[]{}));
} |
@Override
public IcebergSourceSplit deserialize(int version, byte[] serialized) throws IOException {
switch (version) {
case 1:
return IcebergSourceSplit.deserializeV1(serialized);
case 2:
return IcebergSourceSplit.deserializeV2(serialized, caseSensitive);
case 3:
return IcebergSourceSplit.deserializeV3(serialized, caseSensitive);
default:
throw new IOException(
String.format(
"Failed to deserialize IcebergSourceSplit. "
+ "Encountered unsupported version: %d. Supported version are [1]",
version));
}
} | @Test
public void testDeserializeV1() throws Exception {
final List<IcebergSourceSplit> splits =
SplitHelpers.createSplitsFromTransientHadoopTable(temporaryFolder, 1, 1);
for (IcebergSourceSplit split : splits) {
byte[] result = split.serializeV1();
IcebergSourceSplit deserialized = serializer.deserialize(1, result);
assertSplitEquals(split, deserialized);
}
} |
static TypeName getEventNativeType(TypeName typeName) {
if (typeName instanceof ParameterizedTypeName) {
return TypeName.get(byte[].class);
}
String simpleName = ((ClassName) typeName).simpleName();
if (simpleName.equals(Utf8String.class.getSimpleName())) {
return TypeName.get(byte[].class);
} else {
return getNativeType(typeName);
}
} | @Test
public void testGetEventNativeTypeParameterized() {
assertEquals(
getEventNativeType(
ParameterizedTypeName.get(
ClassName.get(DynamicArray.class), TypeName.get(Address.class))),
(TypeName.get(byte[].class)));
} |
public static RestartBackoffTimeStrategy.Factory createRestartBackoffTimeStrategyFactory(
final RestartStrategies.RestartStrategyConfiguration jobRestartStrategyConfiguration,
final Configuration jobConfiguration,
final Configuration clusterConfiguration,
final boolean isCheckpointingEnabled) {
checkNotNull(jobRestartStrategyConfiguration);
checkNotNull(jobConfiguration);
checkNotNull(clusterConfiguration);
return getJobRestartStrategyFactory(jobRestartStrategyConfiguration)
.orElse(
getRestartStrategyFactoryFromConfig(jobConfiguration)
.orElse(
(getRestartStrategyFactoryFromConfig(clusterConfiguration)
.orElse(
getDefaultRestartStrategyFactory(
isCheckpointingEnabled)))));
} | @Test
void testInvalidStrategySpecifiedInJobConfig() {
final Configuration conf = new Configuration();
conf.set(RestartStrategyOptions.RESTART_STRATEGY, "invalid-strategy");
assertThatThrownBy(
() ->
RestartBackoffTimeStrategyFactoryLoader
.createRestartBackoffTimeStrategyFactory(
DEFAULT_JOB_LEVEL_RESTART_CONFIGURATION,
conf,
new Configuration(),
false))
.isInstanceOf(IllegalArgumentException.class);
} |
public static RawTransaction decode(final String hexTransaction) {
final byte[] transaction = Numeric.hexStringToByteArray(hexTransaction);
TransactionType transactionType = getTransactionType(transaction);
switch (transactionType) {
case EIP1559:
return decodeEIP1559Transaction(transaction);
case EIP4844:
return decodeEIP4844Transaction(transaction);
case EIP2930:
return decodeEIP2930Transaction(transaction);
default:
return decodeLegacyTransaction(transaction);
}
} | @Test
public void testDecoding1559() {
final RawTransaction rawTransaction = createEip1559RawTransaction();
final Transaction1559 transaction1559 = (Transaction1559) rawTransaction.getTransaction();
final byte[] encodedMessage = TransactionEncoder.encode(rawTransaction);
final String hexMessage = Numeric.toHexString(encodedMessage);
final RawTransaction result = TransactionDecoder.decode(hexMessage);
assertTrue(result.getTransaction() instanceof Transaction1559);
final Transaction1559 resultTransaction1559 = (Transaction1559) result.getTransaction();
assertNotNull(result);
assertEquals(transaction1559.getChainId(), resultTransaction1559.getChainId());
assertEquals(transaction1559.getNonce(), resultTransaction1559.getNonce());
assertEquals(transaction1559.getMaxFeePerGas(), resultTransaction1559.getMaxFeePerGas());
assertEquals(
transaction1559.getMaxPriorityFeePerGas(),
resultTransaction1559.getMaxPriorityFeePerGas());
assertEquals(transaction1559.getGasLimit(), resultTransaction1559.getGasLimit());
assertEquals(transaction1559.getTo(), resultTransaction1559.getTo());
assertEquals(transaction1559.getValue(), resultTransaction1559.getValue());
assertEquals(transaction1559.getData(), resultTransaction1559.getData());
} |
@Override
public boolean isClosed() {
State currentState = state.get();
return currentState == State.Closed || currentState == State.Closing;
} | @Test
public void testIsClosed() throws Exception {
ClientConfigurationData conf = new ClientConfigurationData();
conf.setServiceUrl("pulsar://localhost:6650");
initializeEventLoopGroup(conf);
PulsarClientImpl client = new PulsarClientImpl(conf, eventLoopGroup);
assertFalse(client.isClosed());
client.close();
assertTrue(client.isClosed());
} |
@Override
public Column convert(BasicTypeDefine typeDefine) {
PhysicalColumn.PhysicalColumnBuilder builder =
PhysicalColumn.builder()
.name(typeDefine.getName())
.sourceType(typeDefine.getColumnType())
.nullable(typeDefine.isNullable())
.defaultValue(typeDefine.getDefaultValue())
.comment(typeDefine.getComment());
String xuguDataType = typeDefine.getDataType().toUpperCase();
switch (xuguDataType) {
case XUGU_BOOLEAN:
case XUGU_BOOL:
builder.dataType(BasicType.BOOLEAN_TYPE);
break;
case XUGU_TINYINT:
builder.dataType(BasicType.BYTE_TYPE);
break;
case XUGU_SMALLINT:
builder.dataType(BasicType.SHORT_TYPE);
break;
case XUGU_INT:
case XUGU_INTEGER:
builder.dataType(BasicType.INT_TYPE);
break;
case XUGU_BIGINT:
builder.dataType(BasicType.LONG_TYPE);
break;
case XUGU_FLOAT:
builder.dataType(BasicType.FLOAT_TYPE);
break;
case XUGU_DOUBLE:
builder.dataType(BasicType.DOUBLE_TYPE);
break;
case XUGU_NUMBER:
case XUGU_DECIMAL:
case XUGU_NUMERIC:
DecimalType decimalType;
if (typeDefine.getPrecision() != null && typeDefine.getPrecision() > 0) {
decimalType =
new DecimalType(
typeDefine.getPrecision().intValue(), typeDefine.getScale());
} else {
decimalType = new DecimalType(DEFAULT_PRECISION, DEFAULT_SCALE);
}
builder.dataType(decimalType);
builder.columnLength(Long.valueOf(decimalType.getPrecision()));
builder.scale(decimalType.getScale());
break;
case XUGU_CHAR:
case XUGU_NCHAR:
builder.dataType(BasicType.STRING_TYPE);
if (typeDefine.getLength() == null || typeDefine.getLength() <= 0) {
builder.columnLength(TypeDefineUtils.charTo4ByteLength(1L));
} else {
builder.columnLength(typeDefine.getLength());
}
break;
case XUGU_VARCHAR:
case XUGU_VARCHAR2:
builder.dataType(BasicType.STRING_TYPE);
if (typeDefine.getLength() == null || typeDefine.getLength() <= 0) {
builder.columnLength(TypeDefineUtils.charTo4ByteLength(MAX_VARCHAR_LENGTH));
} else {
builder.columnLength(typeDefine.getLength());
}
break;
case XUGU_CLOB:
builder.dataType(BasicType.STRING_TYPE);
builder.columnLength(BYTES_2GB - 1);
break;
case XUGU_JSON:
case XUGU_GUID:
builder.dataType(BasicType.STRING_TYPE);
break;
case XUGU_BINARY:
builder.dataType(PrimitiveByteArrayType.INSTANCE);
builder.columnLength(MAX_BINARY_LENGTH);
break;
case XUGU_BLOB:
builder.dataType(PrimitiveByteArrayType.INSTANCE);
builder.columnLength(BYTES_2GB - 1);
break;
case XUGU_DATE:
builder.dataType(LocalTimeType.LOCAL_DATE_TYPE);
break;
case XUGU_TIME:
case XUGU_TIME_WITH_TIME_ZONE:
builder.dataType(LocalTimeType.LOCAL_TIME_TYPE);
break;
case XUGU_DATETIME:
case XUGU_DATETIME_WITH_TIME_ZONE:
builder.dataType(LocalTimeType.LOCAL_DATE_TIME_TYPE);
break;
case XUGU_TIMESTAMP:
case XUGU_TIMESTAMP_WITH_TIME_ZONE:
builder.dataType(LocalTimeType.LOCAL_DATE_TIME_TYPE);
if (typeDefine.getScale() == null) {
builder.scale(TIMESTAMP_DEFAULT_SCALE);
} else {
builder.scale(typeDefine.getScale());
}
break;
default:
throw CommonError.convertToSeaTunnelTypeError(
DatabaseIdentifier.XUGU, xuguDataType, typeDefine.getName());
}
return builder.build();
} | @Test
public void testConvertOtherString() {
BasicTypeDefine<Object> typeDefine =
BasicTypeDefine.builder().name("test").columnType("clob").dataType("clob").build();
Column column = XuguTypeConverter.INSTANCE.convert(typeDefine);
Assertions.assertEquals(typeDefine.getName(), column.getName());
Assertions.assertEquals(BasicType.STRING_TYPE, column.getDataType());
Assertions.assertEquals(BYTES_2GB - 1, column.getColumnLength());
Assertions.assertEquals(typeDefine.getColumnType(), column.getSourceType());
typeDefine =
BasicTypeDefine.builder().name("test").columnType("json").dataType("json").build();
column = XuguTypeConverter.INSTANCE.convert(typeDefine);
Assertions.assertEquals(typeDefine.getName(), column.getName());
Assertions.assertEquals(BasicType.STRING_TYPE, column.getDataType());
Assertions.assertEquals(null, column.getColumnLength());
Assertions.assertEquals(typeDefine.getColumnType(), column.getSourceType());
} |
List<ProviderInfo> mergeProviderInfo(List<String> userDatas, List<String> configDatas) {
// 是否自己缓存运算后的结果?? TODO
List<ProviderInfo> providers = SofaRegistryHelper.parseProviderInfos(userDatas);
// 交叉比较
if (CommonUtils.isNotEmpty(providers) && CommonUtils.isNotEmpty(configDatas)) {
List<ProviderInfo> override = SofaRegistryHelper.parseProviderInfos(configDatas);
Iterator<ProviderInfo> iterator = providers.iterator();
while (iterator.hasNext()) {
ProviderInfo origin = iterator.next();
for (ProviderInfo over : override) {
if (PROTOCOL_TYPE_OVERRIDE.equals(over.getProtocolType()) &&
StringUtils.equals(origin.getHost(), over.getHost()) && origin.getPort() == over.getPort()) {
// host 和 port 相同 认为是一个地址
if (over.getWeight() != origin.getWeight()) {
origin.setWeight(over.getWeight());
}
if (CommonUtils.isTrue(over.getAttr(ProviderInfoAttrs.ATTR_DISABLED))) {
if (LOGGER.isInfoEnabled()) {
LOGGER.info("Provider is disabled by override. {}", origin.toUrl());
}
iterator.remove(); // 禁用 删掉
}
}
}
}
}
return providers;
} | @Test
public void mergeProviderInfo() throws Exception {
SofaRegistrySubscribeCallback callback = new SofaRegistrySubscribeCallback();
// null + null
List<ProviderInfo> result = callback.mergeProviderInfo(null, null);
Assert.assertTrue(CommonUtils.isEmpty(result));
// 空 + null
List<String> listData = new ArrayList<String>();
result = callback.mergeProviderInfo(listData, null);
Assert.assertTrue(CommonUtils.isEmpty(result));
// null + 空
List<String> attrData = new ArrayList<String>();
result = callback.mergeProviderInfo(null, attrData);
Assert.assertTrue(CommonUtils.isEmpty(result));
// 空+ 空
result = callback.mergeProviderInfo(listData, attrData);
Assert.assertTrue(CommonUtils.isEmpty(result));
// 空+非空
attrData.add("override://127.0.0.1?weight=200");
result = callback.mergeProviderInfo(listData, attrData);
Assert.assertTrue(CommonUtils.isEmpty(result));
// 非空+空
attrData.clear();
listData.add("127.0.0.1:22000?weight=100");
listData.add("127.0.0.1:22001?weight=100");
result = callback.mergeProviderInfo(listData, attrData);
Assert.assertTrue(result.size() == 2);
for (ProviderInfo providerInfo : result) {
if (providerInfo.getPort() == 22000) {
Assert.assertTrue(providerInfo.getWeight() == 100);
} else if (providerInfo.getPort() == 22001) {
Assert.assertTrue(providerInfo.getWeight() == 100);
}
}
// 覆盖未命中
attrData.add("override://127.0.0.1:22005?weight=200");
attrData.add("override://127.0.0.1:22004?disabled=true&weight=200");
result = callback.mergeProviderInfo(listData, attrData);
Assert.assertTrue(result.size() == 2);
for (ProviderInfo providerInfo : result) {
if (providerInfo.getPort() == 22000) {
Assert.assertTrue(providerInfo.getWeight() == 100);
} else if (providerInfo.getPort() == 22001) {
Assert.assertTrue(providerInfo.getWeight() == 100);
}
}
// 覆盖
attrData.add("override://127.0.0.1:22000?weight=200");
attrData.add("override://127.0.0.1:22001?disabled=true&weight=200");
result = callback.mergeProviderInfo(listData, attrData);
Assert.assertTrue(result.size() == 1);
for (ProviderInfo providerInfo : result) {
if (providerInfo.getPort() == 22000) {
Assert.assertTrue(providerInfo.getWeight() == 200);
}
}
} |
@Override
public void register(ProviderConfig config) {
String appName = config.getAppName();
if (!registryConfig.isRegister()) {
if (LOGGER.isInfoEnabled(appName)) {
LOGGER.infoWithApp(appName, LogCodes.getLog(LogCodes.INFO_REGISTRY_IGNORE));
}
return;
}
if (!config.isRegister()) { // 注册中心不注册或者服务不注册
return;
}
List<ServerConfig> serverConfigs = config.getServer();
if (CommonUtils.isNotEmpty(serverConfigs)) {
for (ServerConfig server : serverConfigs) {
String serviceName = MeshRegistryHelper.buildMeshKey(config, server.getProtocol());
ProviderInfo providerInfo = MeshRegistryHelper.convertProviderToProviderInfo(config, server);
if (LOGGER.isInfoEnabled(appName)) {
LOGGER.infoWithApp(appName, LogCodes.getLog(LogCodes.INFO_ROUTE_REGISTRY_PUB_START, serviceName));
}
doRegister(appName, serviceName, providerInfo, server.getProtocol());
if (LOGGER.isInfoEnabled(appName)) {
LOGGER.infoWithApp(appName, LogCodes.getLog(LogCodes.INFO_ROUTE_REGISTRY_PUB_OVER, serviceName));
}
}
if (EventBus.isEnable(ProviderPubEvent.class)) {
ProviderPubEvent event = new ProviderPubEvent(config);
EventBus.post(event);
}
}
} | @Test
public void testOnlyPublish() throws InterruptedException {
Field registedAppField = null;
try {
registedAppField = MeshRegistry.class.getDeclaredField("registedApp");
registedAppField.setAccessible(true);
} catch (NoSuchFieldException e) {
e.printStackTrace();
}
Boolean registedAppValue = null;
// in case of effected by other case.
try {
registedAppValue = (Boolean) registedAppField.get(registry);
registedAppField.set(registry, false);
} catch (IllegalAccessException e) {
e.printStackTrace();
}
ServerConfig serverConfig = new ServerConfig()
.setProtocol("bolt")
.setHost("0.0.0.0")
.setPort(12200);
ProviderConfig<?> provider = new ProviderConfig();
provider.setInterfaceId("com.alipay.xxx.TestService")
.setUniqueId("unique123Id")
.setApplication(new ApplicationConfig().setAppName("test-server"))
.setProxy("javassist")
.setRegister(true)
.setRegistry(registryConfig)
.setSerialization("hessian2")
.setServer(serverConfig)
.setWeight(222)
.setTimeout(3000);
registry.register(provider);
Thread.sleep(3000);
try {
registedAppValue = (Boolean) registedAppField.get(registry);
} catch (IllegalAccessException e) {
e.printStackTrace();
}
LOGGER.info("final registedAppValue is " + registedAppValue);
Assert.assertTrue(registedAppValue);
} |
@Override
public Expression getExpression(String tableName, Alias tableAlias) {
// 只有有登陆用户的情况下,才进行数据权限的处理
LoginUser loginUser = SecurityFrameworkUtils.getLoginUser();
if (loginUser == null) {
return null;
}
// 只有管理员类型的用户,才进行数据权限的处理
if (ObjectUtil.notEqual(loginUser.getUserType(), UserTypeEnum.ADMIN.getValue())) {
return null;
}
// 获得数据权限
DeptDataPermissionRespDTO deptDataPermission = loginUser.getContext(CONTEXT_KEY, DeptDataPermissionRespDTO.class);
// 从上下文中拿不到,则调用逻辑进行获取
if (deptDataPermission == null) {
deptDataPermission = permissionApi.getDeptDataPermission(loginUser.getId()).getCheckedData();
if (deptDataPermission == null) {
log.error("[getExpression][LoginUser({}) 获取数据权限为 null]", JsonUtils.toJsonString(loginUser));
throw new NullPointerException(String.format("LoginUser(%d) Table(%s/%s) 未返回数据权限",
loginUser.getId(), tableName, tableAlias.getName()));
}
// 添加到上下文中,避免重复计算
loginUser.setContext(CONTEXT_KEY, deptDataPermission);
}
// 情况一,如果是 ALL 可查看全部,则无需拼接条件
if (deptDataPermission.getAll()) {
return null;
}
// 情况二,即不能查看部门,又不能查看自己,则说明 100% 无权限
if (CollUtil.isEmpty(deptDataPermission.getDeptIds())
&& Boolean.FALSE.equals(deptDataPermission.getSelf())) {
return new EqualsTo(null, null); // WHERE null = null,可以保证返回的数据为空
}
// 情况三,拼接 Dept 和 User 的条件,最后组合
Expression deptExpression = buildDeptExpression(tableName,tableAlias, deptDataPermission.getDeptIds());
Expression userExpression = buildUserExpression(tableName, tableAlias, deptDataPermission.getSelf(), loginUser.getId());
if (deptExpression == null && userExpression == null) {
// TODO 芋艿:获得不到条件的时候,暂时不抛出异常,而是不返回数据
log.warn("[getExpression][LoginUser({}) Table({}/{}) DeptDataPermission({}) 构建的条件为空]",
JsonUtils.toJsonString(loginUser), tableName, tableAlias, JsonUtils.toJsonString(deptDataPermission));
// throw new NullPointerException(String.format("LoginUser(%d) Table(%s/%s) 构建的条件为空",
// loginUser.getId(), tableName, tableAlias.getName()));
return EXPRESSION_NULL;
}
if (deptExpression == null) {
return userExpression;
}
if (userExpression == null) {
return deptExpression;
}
// 目前,如果有指定部门 + 可查看自己,采用 OR 条件。即,WHERE (dept_id IN ? OR user_id = ?)
return new Parenthesis(new OrExpression(deptExpression, userExpression));
} | @Test // 全部数据权限
public void testGetExpression_allDeptDataPermission() {
try (MockedStatic<SecurityFrameworkUtils> securityFrameworkUtilsMock
= mockStatic(SecurityFrameworkUtils.class)) {
// 准备参数
String tableName = "t_user";
Alias tableAlias = new Alias("u");
// mock 方法(LoginUser)
LoginUser loginUser = randomPojo(LoginUser.class, o -> o.setId(1L)
.setUserType(UserTypeEnum.ADMIN.getValue()));
securityFrameworkUtilsMock.when(SecurityFrameworkUtils::getLoginUser).thenReturn(loginUser);
// mock 方法(DeptDataPermissionRespDTO)
DeptDataPermissionRespDTO deptDataPermission = new DeptDataPermissionRespDTO().setAll(true);
when(permissionApi.getDeptDataPermission(same(1L))).thenReturn(success(deptDataPermission));
// 调用
Expression expression = rule.getExpression(tableName, tableAlias);
// 断言
assertNull(expression);
assertSame(deptDataPermission, loginUser.getContext(DeptDataPermissionRule.CONTEXT_KEY, DeptDataPermissionRespDTO.class));
}
} |
@Override
public AttributedList<Path> list(final Path directory, final ListProgressListener listener) throws BackgroundException {
return this.list(directory, listener, new HostPreferences(session.getHost()).getInteger("sds.listing.chunksize"));
} | @Test
public void testListRoot() throws Exception {
final SDSNodeIdProvider nodeid = new SDSNodeIdProvider(session);
final Path directory = new Path("/", EnumSet.of(AbstractPath.Type.directory, Path.Type.volume));
final AttributedList<Path> list = new SDSListService(session, nodeid).list(
directory, new DisabledListProgressListener());
assertNotSame(AttributedList.emptyList(), list);
assertFalse(list.isEmpty());
for(Path f : list) {
assertSame(directory, f.getParent());
assertEquals(f.attributes(), new SDSAttributesFinderFeature(session, nodeid).find(f));
}
} |
public static SerializableFunction<byte[], Row> getProtoBytesToRowFromSchemaFunction(
String schemaString, String messageName) {
Descriptors.Descriptor descriptor = getDescriptorFromProtoSchema(schemaString, messageName);
ProtoDynamicMessageSchema<DynamicMessage> protoDynamicMessageSchema =
ProtoDynamicMessageSchema.forDescriptor(ProtoDomain.buildFrom(descriptor), descriptor);
return new SimpleFunction<byte[], Row>() {
@Override
public Row apply(byte[] input) {
try {
Descriptors.Descriptor descriptorFunction =
getDescriptorFromProtoSchema(schemaString, messageName);
DynamicMessage dynamicMessage = DynamicMessage.parseFrom(descriptorFunction, input);
SerializableFunction<DynamicMessage, Row> res =
protoDynamicMessageSchema.getToRowFunction();
return res.apply(dynamicMessage);
} catch (InvalidProtocolBufferException e) {
LOG.error("Error parsing to DynamicMessage", e);
throw new RuntimeException(e);
}
}
};
} | @Test
public void testProtoBytesToRowSchemaStringGenerateSerializableFunction() {
SerializableFunction<byte[], Row> protoBytesToRowFunction =
ProtoByteUtils.getProtoBytesToRowFromSchemaFunction(PROTO_STRING_SCHEMA, "MyMessage");
Assert.assertNotNull(protoBytesToRowFunction);
} |
public static int parseMajorVersion(String version) {
if (version.endsWith("-ea")) {
version = version.substring(0, version.length() - 3);
}
if (version.startsWith("1.")) {
version = version.substring(2, 3);
} else {
int dot = version.indexOf(".");
if (dot != -1) {
version = version.substring(0, dot);
}
}
return Integer.parseInt(version);
} | @Test
public void parseVersion() throws Exception {
assertEquals(8, CommonUtils.parseMajorVersion("1.8.0"));
assertEquals(11, CommonUtils.parseMajorVersion("11.0.1"));
assertEquals(9, CommonUtils.parseMajorVersion("9.0.1"));
} |
@Override
public void negativeAcknowledge(MessageId messageId) {
consumerNacksCounter.increment();
negativeAcksTracker.add(messageId);
// Ensure the message is not redelivered for ack-timeout, since we did receive an "ack"
unAckedMessageTracker.remove(MessageIdAdvUtils.discardBatch(messageId));
} | @Test
public void testClose() {
Exception checkException = null;
try {
if (consumer != null) {
consumer.negativeAcknowledge(new MessageIdImpl(-1, -1, -1));
consumer.close();
}
} catch (Exception e) {
checkException = e;
}
Assert.assertNull(checkException);
} |
public void setTemplateEntriesForChild(CapacitySchedulerConfiguration conf,
QueuePath childQueuePath) {
setTemplateEntriesForChild(conf, childQueuePath, false);
} | @Test
public void testTwoLevelWildcardTemplate() {
conf.set(getTemplateKey(TEST_QUEUE_ROOT_WILDCARD, "capacity"), "6w");
conf.set(getTemplateKey(TEST_QUEUE_TWO_LEVEL_WILDCARDS, "capacity"), "5w");
new AutoCreatedQueueTemplate(conf, TEST_QUEUE_A)
.setTemplateEntriesForChild(conf, TEST_QUEUE_AB);
new AutoCreatedQueueTemplate(conf, TEST_QUEUE_AB)
.setTemplateEntriesForChild(conf, TEST_QUEUE_ABC);
Assert.assertEquals("weight is not set", 6f,
conf.getNonLabeledQueueWeight(TEST_QUEUE_AB), 10e-6);
Assert.assertEquals("weight is not set", 5f,
conf.getNonLabeledQueueWeight(TEST_QUEUE_ABC), 10e-6);
} |
@Override
public void setTimestamp(final Path file, final TransferStatus status) throws BackgroundException {
final SMBSession.DiskShareWrapper share = session.openShare(file);
try {
if(file.isDirectory()) {
try (final Directory entry = share.get().openDirectory(new SMBPathContainerService(session).getKey(file),
Collections.singleton(AccessMask.FILE_WRITE_ATTRIBUTES),
Collections.singleton(FileAttributes.FILE_ATTRIBUTE_DIRECTORY),
Collections.singleton(SMB2ShareAccess.FILE_SHARE_READ),
SMB2CreateDisposition.FILE_OPEN,
Collections.singleton(SMB2CreateOptions.FILE_DIRECTORY_FILE))) {
final FileBasicInformation updatedBasicInformation = new FileBasicInformation(
status.getCreated() != null ? FileTime.ofEpochMillis(status.getCreated()) : FileBasicInformation.DONT_SET,
FileBasicInformation.DONT_SET,
status.getModified() != null ? FileTime.ofEpochMillis(status.getModified()) : FileBasicInformation.DONT_SET,
FileBasicInformation.DONT_SET,
FileAttributes.FILE_ATTRIBUTE_DIRECTORY.getValue());
entry.setFileInformation(updatedBasicInformation);
}
}
else {
try (final File entry = share.get().openFile(new SMBPathContainerService(session).getKey(file),
Collections.singleton(AccessMask.FILE_WRITE_ATTRIBUTES),
Collections.singleton(FileAttributes.FILE_ATTRIBUTE_NORMAL),
Collections.singleton(SMB2ShareAccess.FILE_SHARE_READ),
SMB2CreateDisposition.FILE_OPEN,
Collections.singleton(SMB2CreateOptions.FILE_NON_DIRECTORY_FILE))) {
final FileBasicInformation updatedBasicInformation = new FileBasicInformation(
status.getCreated() != null ? FileTime.ofEpochMillis(status.getCreated()) : FileBasicInformation.DONT_SET,
FileBasicInformation.DONT_SET,
status.getModified() != null ? FileTime.ofEpochMillis(status.getModified()) : FileBasicInformation.DONT_SET,
FileBasicInformation.DONT_SET,
FileAttributes.FILE_ATTRIBUTE_NORMAL.getValue());
entry.setFileInformation(updatedBasicInformation);
}
}
}
catch(SMBRuntimeException e) {
throw new SMBExceptionMappingService().map("Cannot change timestamp of {0}", e, file);
}
finally {
session.releaseShare(share);
}
} | @Test
public void testTimestampFileNotfound() throws Exception {
final TransferStatus status = new TransferStatus();
final Path home = new DefaultHomeFinderService(session).find();
final Path f = new Path(home, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file));
status.setModified(System.currentTimeMillis());
assertThrows(NotfoundException.class, () -> new SMBTimestampFeature(session).setTimestamp(f, status));
} |
@NonNull
static Predicate<NotificationTemplate> matchReasonType(String reasonType) {
return template -> template.getSpec().getReasonSelector().getReasonType()
.equals(reasonType);
} | @Test
void matchReasonTypeTest() {
var template = createNotificationTemplate("fake-template");
assertThat(ReasonNotificationTemplateSelectorImpl.matchReasonType("new-comment-on-post")
.test(template)).isTrue();
assertThat(ReasonNotificationTemplateSelectorImpl.matchReasonType("fake-reason-type")
.test(template)).isFalse();
} |
static ProjectMeasuresQuery newProjectMeasuresQuery(List<Criterion> criteria, @Nullable Set<String> projectUuids) {
ProjectMeasuresQuery query = new ProjectMeasuresQuery();
Optional.ofNullable(projectUuids).ifPresent(query::setProjectUuids);
criteria.forEach(criterion -> processCriterion(criterion, query));
return query;
} | @Test
public void fail_to_create_query_on_tag_using_eq_operator_and_values() {
assertThatThrownBy(() -> {
newProjectMeasuresQuery(singletonList(Criterion.builder().setKey("tags").setOperator(EQ).setValues(asList("java")).build()), emptySet());
})
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Tags should be set either by using 'tags = java' or 'tags IN (finance, platform)'");
} |
@Bean
@ConditionalOnMissingBean(LdapOperations.class)
public LdapTemplate ldapTemplate(final LdapContextSource ldapContextSource) {
return new LdapTemplate(ldapContextSource);
} | @Test
public void testLdapTemplate() {
LdapContextSource ldapContextSource = new LdapContextSource();
LdapTemplate ldapTemplate = ldapConfiguration.ldapTemplate(ldapContextSource);
assertNotNull(ldapTemplate);
} |
@Override
public void publishLong(MetricDescriptor descriptor, long value) {
publishNumber(descriptor, value, LONG);
} | @Test
public void when_moreMetrics() throws Exception {
jmxPublisher.publishLong(newDescriptor()
.withMetric("c")
.withTag("tag1", "a")
.withTag("tag2", "b"), 1L);
jmxPublisher.publishLong(newDescriptor()
.withMetric("d")
.withTag("tag1", "a")
.withTag("tag2", "b"), 2L);
jmxPublisher.publishLong(newDescriptor()
.withMetric("d")
.withTag("module", MODULE_NAME)
.withTag("tag1", "a")
.withTag("tag2", "b"), 5L);
jmxPublisher.publishLong(newDescriptor()
.withMetric("a")
.withTag("tag1", "a")
.withTag("tag2", "c"), 3L);
jmxPublisher.publishLong(newDescriptor()
.withMetric("a"), 4L);
helper.assertMBeans(asList(
metric(domainPrefix + ":type=Metrics,instance=inst1,tag0=\"tag1=a\",tag1=\"tag2=b\"",
asList(longValue("c", 1L), longValue("d", 2L))),
metric(domainPrefix + "." + MODULE_NAME + ":type=Metrics,instance=inst1,tag0=\"tag1=a\",tag1=\"tag2=b\"",
singletonList(longValue("d", 5L))),
metric(domainPrefix + ":type=Metrics,instance=inst1,tag0=\"tag1=a\",tag1=\"tag2=c\"",
singletonList(longValue("a", 3L))),
metric(domainPrefix + ":type=Metrics,instance=inst1",
singletonList(longValue("a", 4L)))
));
} |
@Override
public List<Instance> getAllInstances(String serviceName) throws NacosException {
return getAllInstances(serviceName, new ArrayList<>());
} | @Test
void testGetAllInstanceFromFailoverEmpty() throws NacosException {
when(serviceInfoHolder.isFailoverSwitch()).thenReturn(true);
ServiceInfo serviceInfo = new ServiceInfo("group1@@service1");
when(serviceInfoHolder.getFailoverServiceInfo(anyString(), anyString(), anyString())).thenReturn(serviceInfo);
List<Instance> actual = client.getAllInstances("service1", "group1", false);
verify(proxy).queryInstancesOfService(anyString(), anyString(), anyString(), anyBoolean());
assertEquals(0, actual.size());
} |
public JDiscCookieWrapper[] getWrappedCookies() {
List<Cookie> cookies = getCookies();
if (cookies == null) {
return null;
}
List<JDiscCookieWrapper> cookieWrapper = new ArrayList<>(cookies.size());
for(Cookie cookie : cookies) {
cookieWrapper.add(JDiscCookieWrapper.wrap(cookie));
}
return cookieWrapper.toArray(new JDiscCookieWrapper[cookieWrapper.size()]);
} | @Test
void testGetWrapedCookies() {
URI uri = URI.create("http://example.yahoo.com/test");
HttpRequest httpReq = newRequest(uri, HttpRequest.Method.GET, HttpRequest.Version.HTTP_1_1);
httpReq.headers().put(HttpHeaders.Names.COOKIE, "XYZ=value");
DiscFilterRequest request = new DiscFilterRequest(httpReq);
JDiscCookieWrapper[] wrappers = request.getWrappedCookies();
assertEquals(wrappers.length, 1);
assertEquals(wrappers[0].getName(), "XYZ");
assertEquals(wrappers[0].getValue(), "value");
} |
public static Matcher<HttpRequest> pathStartsWith(String pathPrefix) {
if (pathPrefix == null) throw new NullPointerException("pathPrefix == null");
if (pathPrefix.isEmpty()) throw new NullPointerException("pathPrefix is empty");
return new PathStartsWith(pathPrefix);
} | @Test void pathStartsWith_matched_prefix() {
when(httpRequest.path()).thenReturn("/foo/bar");
assertThat(pathStartsWith("/foo").matches(httpRequest)).isTrue();
} |
public static CharSequence escapeCsv(CharSequence value) {
return escapeCsv(value, false);
} | @Test
public void escapeCsvWithMultipleCarriageReturn() {
CharSequence value = "\r\r";
CharSequence expected = "\"\r\r\"";
escapeCsv(value, expected);
} |
public HadoopCatalog() {} | @Test
public void testCreateTableDefaultSortOrder() throws Exception {
TableIdentifier tableIdent = TableIdentifier.of("db", "ns1", "ns2", "tbl");
Table table = hadoopCatalog().createTable(tableIdent, SCHEMA, SPEC);
SortOrder sortOrder = table.sortOrder();
assertThat(sortOrder.orderId()).as("Order ID must match").isEqualTo(0);
assertThat(sortOrder.isUnsorted()).as("Order must be unsorted").isTrue();
} |
@Override
public void maybeExport(Supplier<TraceDescription> traceDescriptionSupplier) {
if (samplingStrategy.shouldSample()) {
wrappedExporter.maybeExport(traceDescriptionSupplier);
}
} | @Test
void sampling_decision_is_deferred_to_provided_sampler() {
var exporter = mock(TraceExporter.class);
var sampler = mock(SamplingStrategy.class);
when(sampler.shouldSample()).thenReturn(true, false);
var samplingExporter = new SamplingTraceExporter(exporter, sampler);
samplingExporter.maybeExport(() -> new TraceDescription(null, ""));
verify(exporter, times(1)).maybeExport(any());
samplingExporter.maybeExport(() -> new TraceDescription(null, ""));
verify(exporter, times(1)).maybeExport(any()); // No further invocations since last
} |
@Override
public double quantile(double p) {
if (p < 0.0 || p > 1.0) {
throw new IllegalArgumentException("Invalid p: " + p);
}
return Gamma.inverseRegularizedIncompleteGamma(k, p) * theta;
} | @Test
public void testQuantile() {
System.out.println("quantile");
GammaDistribution instance = new GammaDistribution(3, 2.1);
instance.rand();
assertEquals(0.4001201, instance.quantile(0.001), 1E-7);
assertEquals(0.9156948, instance.quantile(0.01), 1E-7);
assertEquals(2.314337, instance.quantile(0.1), 1E-6);
assertEquals(3.223593, instance.quantile(0.2), 1E-6);
assertEquals(5.615527, instance.quantile(0.5), 1E-6);
assertEquals(11.17687, instance.quantile(0.9), 1E-5);
assertEquals(17.65249, instance.quantile(0.99), 1E-5);
assertEquals(23.58063, instance.quantile(0.999), 1E-5);
} |
public void set(PropertyKey key, Object value) {
set(key, value, Source.RUNTIME);
} | @Test
public void getMalformedIntThrowsException() {
mThrown.expect(IllegalArgumentException.class);
mConfiguration.set(PropertyKey.WEB_THREADS, 2147483648L); // bigger than MAX_INT
} |
public void pushWithoutAck(String connectionId, ServerRequest request) {
Connection connection = connectionManager.getConnection(connectionId);
if (connection != null) {
try {
connection.request(request, 3000L);
} catch (ConnectionAlreadyClosedException e) {
connectionManager.unregister(connectionId);
} catch (Exception e) {
Loggers.REMOTE_DIGEST
.error("error to send push response to connectionId ={},push response={}", connectionId,
request, e);
}
}
} | @Test
void testPushWithoutAck() {
Mockito.when(connectionManager.getConnection(Mockito.any())).thenReturn(grpcConnection);
try {
Mockito.when(grpcConnection.request(Mockito.any(), Mockito.eq(3000L))).thenThrow(ConnectionAlreadyClosedException.class);
rpcPushService.pushWithoutAck(connectId, null);
Mockito.when(grpcConnection.request(Mockito.any(), Mockito.eq(3000L))).thenThrow(NacosException.class);
rpcPushService.pushWithoutAck(connectId, null);
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
try {
Mockito.when(grpcConnection.request(Mockito.any(), Mockito.eq(3000L))).thenReturn(Mockito.any());
rpcPushService.pushWithoutAck(connectId, null);
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
} |
@VisibleForTesting
static String getFlinkMetricIdentifierString(MetricKey metricKey) {
MetricName metricName = metricKey.metricName();
ArrayList<String> scopeComponents = getNameSpaceArray(metricKey);
List<String> results = scopeComponents.subList(0, scopeComponents.size() / 2);
results.add(metricName.getName());
return String.join(METRIC_KEY_SEPARATOR, results);
} | @Test
void testGetFlinkMetricIdentifierString() {
MetricKey key = MetricKey.create("step", MetricName.named(DEFAULT_NAMESPACE, "name"));
assertThat(FlinkMetricContainer.getFlinkMetricIdentifierString(key))
.isEqualTo("key.value.name");
} |
@Override
public void execute(String commandName, BufferedReader reader, BufferedWriter writer)
throws Py4JException, IOException {
char subCommand = safeReadLine(reader).charAt(0);
String returnCommand = null;
if (subCommand == GET_UNKNOWN_SUB_COMMAND_NAME) {
returnCommand = getUnknownMember(reader);
} else if (subCommand == GET_JAVA_LANG_CLASS_SUB_COMMAND_NAME) {
returnCommand = getJavaLangClass(reader);
} else {
returnCommand = getMember(reader);
}
logger.finest("Returning command: " + returnCommand);
writer.write(returnCommand);
writer.flush();
} | @Test
public void testJavaLangClass() {
String inputCommand1 = ReflectionCommand.GET_JAVA_LANG_CLASS_SUB_COMMAND_NAME + "\n" + "java.lang.String\ne\n";
String inputCommand2 = ReflectionCommand.GET_JAVA_LANG_CLASS_SUB_COMMAND_NAME + "\n" + "java.lang"
+ ".FOOOOO\ne\n"; // does not exist
try {
command.execute("r", new BufferedReader(new StringReader(inputCommand1)), writer);
assertEquals("!yro0\n", sWriter.toString());
command.execute("r", new BufferedReader(new StringReader(inputCommand2)), writer);
assertEquals("!yro0\n!xsThe class java.lang.FOOOOO does not exist.\n", sWriter.toString());
} catch (Exception e) {
e.printStackTrace();
fail();
}
} |
@Override
public List<Integer> applyTransforms(List<Integer> originalGlyphIds)
{
List<Integer> intermediateGlyphsFromGsub = originalGlyphIds;
for (String feature : FEATURES_IN_ORDER)
{
if (!gsubData.isFeatureSupported(feature))
{
LOG.debug("the feature {} was not found", feature);
continue;
}
LOG.debug("applying the feature {}", feature);
ScriptFeature scriptFeature = gsubData.getFeature(feature);
intermediateGlyphsFromGsub = applyGsubFeature(scriptFeature,
intermediateGlyphsFromGsub);
}
return Collections.unmodifiableList(repositionGlyphs(intermediateGlyphsFromGsub));
} | @Test
void testApplyTransforms_e_kar()
{
// given
List<Integer> glyphsAfterGsub = Arrays.asList(438, 89, 94, 101);
// when
List<Integer> result = gsubWorkerForBengali.applyTransforms(getGlyphIds("বেলা"));
// then
assertEquals(glyphsAfterGsub, result);
} |
@Override
public String encrypt(String clearText) {
try {
javax.crypto.Cipher cipher = javax.crypto.Cipher.getInstance(CRYPTO_ALGO);
cipher.init(javax.crypto.Cipher.ENCRYPT_MODE, loadSecretFile());
byte[] cipherData = cipher.doFinal(clearText.getBytes(StandardCharsets.UTF_8.name()));
return Base64.encodeBase64String(cipherData);
} catch (RuntimeException e) {
throw e;
} catch (Exception e) {
throw new IllegalStateException(e);
}
} | @Test
public void encrypt_bad_key() throws Exception {
URL resource = getClass().getResource("/org/sonar/api/config/internal/AesCipherTest/bad_secret_key.txt");
AesECBCipher cipher = new AesECBCipher(new File(resource.toURI()).getCanonicalPath());
assertThatThrownBy(() -> cipher.encrypt("this is a secret"))
.isInstanceOf(RuntimeException.class)
.hasCauseInstanceOf(InvalidKeyException.class);
} |
@Override
public Organizations listOrganizations(String appUrl, AccessToken accessToken, int page, int pageSize) {
checkPageArgs(page, pageSize);
try {
Organizations organizations = new Organizations();
GetResponse response = githubApplicationHttpClient.get(appUrl, accessToken, String.format("/user/installations?page=%s&per_page=%s", page, pageSize));
Optional<GsonInstallations> gsonInstallations = response.getContent().map(content -> GSON.fromJson(content, GsonInstallations.class));
if (!gsonInstallations.isPresent()) {
return organizations;
}
organizations.setTotal(gsonInstallations.get().getTotalCount());
if (gsonInstallations.get().getInstallations() != null) {
organizations.setOrganizations(gsonInstallations.get().getInstallations().stream()
.map(gsonInstallation -> new Organization(gsonInstallation.getAccount().getId(), gsonInstallation.getAccount().getLogin(), null, null, null, null, null,
gsonInstallation.getTargetType()))
.toList());
}
return organizations;
} catch (IOException e) {
throw new IllegalStateException(format("Failed to list all organizations accessible by user access token on %s", appUrl), e);
}
} | @Test
public void listOrganizations_fail_if_pageSize_out_of_bounds() {
UserAccessToken token = new UserAccessToken("token");
assertThatThrownBy(() -> underTest.listOrganizations(appUrl, token, 1, 0))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("'pageSize' must be a value larger than 0 and smaller or equal to 100.");
assertThatThrownBy(() -> underTest.listOrganizations("", token, 1, 101))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("'pageSize' must be a value larger than 0 and smaller or equal to 100.");
} |
T getFunction(final List<SqlArgument> arguments) {
// first try to get the candidates without any implicit casting
Optional<T> candidate = findMatchingCandidate(arguments, false);
if (candidate.isPresent()) {
return candidate.get();
} else if (!supportsImplicitCasts) {
throw createNoMatchingFunctionException(arguments);
}
// if none were found (candidate isn't present) try again with implicit casting
candidate = findMatchingCandidate(arguments, true);
if (candidate.isPresent()) {
return candidate.get();
}
throw createNoMatchingFunctionException(arguments);
} | @Test
public void shouldIncludeAvailableSignaturesIfNotMatchFound() {
// Given:
final ArrayType generic = of(GenericType.of("A"));
givenFunctions(
function(OTHER, true, -1, STRING, INT),
function(OTHER, 0, STRING_VARARGS),
function(OTHER, -1, generic)
);
// When:
final Exception e = assertThrows(
Exception.class,
() -> udfIndex.getFunction(ImmutableList.of(SqlArgument.of(SqlTypes.STRING), SqlArgument.of(INTEGER), SqlArgument.of(SqlTypes.STRING)))
);
// Then:
assertThat(e.getMessage(), containsString("Valid alternatives are:"
+ lineSeparator()
+ "other(VARCHAR...)"
+ lineSeparator()
+ "other(ARRAY<A>)"
+ lineSeparator()
+ "other(VARCHAR paramName, INT paramName)"));
} |
public void expand(String key, long value, RangeHandler rangeHandler, EdgeHandler edgeHandler) {
if (value < lowerBound || value > upperBound) {
// Value outside bounds -> expand to nothing.
return;
}
int maxLevels = value > 0 ? maxPositiveLevels : maxNegativeLevels;
int sign = value > 0 ? 1 : -1;
// Append key to feature string builder
StringBuilder builder = new StringBuilder(128);
builder.append(key).append('=');
long levelSize = arity;
long edgeInterval = (value / arity) * arity;
edgeHandler.handleEdge(createEdgeFeatureHash(builder, edgeInterval), (int) Math.abs(value - edgeInterval));
for (int i = 0; i < maxLevels; ++i) {
long start = (value / levelSize) * levelSize;
if (Math.abs(start) + levelSize - 1 < 0) { // overflow
break;
}
rangeHandler.handleRange(createRangeFeatureHash(builder, start, start + sign * (levelSize - 1)));
levelSize *= arity;
if (levelSize <= 0 && levelSize != Long.MIN_VALUE) { //overflow
break;
}
}
} | @Test
void requireThatUpperAndLowerBoundGreaterThan0Works() {
PredicateRangeTermExpander expander = new PredicateRangeTermExpander(10, 100, 9999);
Iterator<String> expectedLabels = List.of(
"key=140-149",
"key=100-199",
"key=0-999",
"key=0-9999").iterator();
expander.expand("key", 142, range -> assertEquals(PredicateHash.hash64(expectedLabels.next()), range),
(edge, value) -> {
assertEquals(PredicateHash.hash64("key=140"), edge);
assertEquals(2, value);
});
assertFalse(expectedLabels.hasNext());
} |
public static <T, PredicateT extends ProcessFunction<T, Boolean>> Filter<T> by(
PredicateT predicate) {
return new Filter<>(predicate);
} | @Test
public void testFilterParDoOutputTypeDescriptorRawWithLambda() throws Exception {
@SuppressWarnings({"unchecked", "rawtypes"})
PCollection<String> output = p.apply(Create.of("hello")).apply(Filter.by(s -> true));
thrown.expect(CannotProvideCoderException.class);
p.getCoderRegistry().getCoder(output.getTypeDescriptor());
} |
public SearchSourceBuilder create(SearchesConfig config) {
return create(SearchCommand.from(config));
} | @Test
void scrollSearchDoesNotHighlight() {
final SearchSourceBuilder search = this.searchRequestFactory.create(ChunkCommand.builder()
.indices(Collections.singleton("graylog_0"))
.range(RANGE)
.build());
assertThat(search.toString()).doesNotContain("\"highlight\":");
} |
@Override
public KsMaterializedQueryResult<WindowedRow> get(
final GenericKey key,
final int partition,
final Range<Instant> windowStart,
final Range<Instant> windowEnd,
final Optional<Position> position
) {
try {
final ReadOnlySessionStore<GenericKey, GenericRow> store = stateStore
.store(QueryableStoreTypes.sessionStore(), partition);
return KsMaterializedQueryResult.rowIterator(
findSession(store, key, windowStart, windowEnd).iterator());
} catch (final Exception e) {
throw new MaterializationException("Failed to get value from materialized table", e);
}
} | @Test
public void shouldReturnEmptyIfKeyNotPresent() {
// When:
final Iterator<WindowedRow> rowIterator =
table.get(A_KEY, PARTITION, WINDOW_START_BOUNDS, WINDOW_END_BOUNDS).rowIterator;
// Then:
assertThat(rowIterator.hasNext(), is(false));
} |
public static void checkArgument(boolean expression, Object errorMessage) {
if (Objects.isNull(errorMessage)) {
throw new IllegalArgumentException("errorMessage cannot be null.");
}
if (!expression) {
throw new IllegalArgumentException(String.valueOf(errorMessage));
}
} | @Test
void testCheckArgument2Args1true() {
Preconditions.checkArgument(true, ERRORMSG);
} |
public CompletableFuture<Response> executeRequest(Request request) {
return executeRequest(request, () -> new AsyncCompletionHandlerBase());
} | @Test
void testRedirectWithBody() throws ExecutionException, InterruptedException {
server.stubFor(post(urlEqualTo("/path1"))
.willReturn(aResponse()
.withStatus(307)
.withHeader("Location", "/path2")));
server.stubFor(post(urlEqualTo("/path2"))
.willReturn(aResponse()
.withTransformers("copy-body")));
ClientConfigurationData conf = new ClientConfigurationData();
conf.setServiceUrl("http://localhost:" + server.port());
@Cleanup
AsyncHttpConnector connector = new AsyncHttpConnector(5000, 5000,
5000, 0, conf, false);
Request request = new RequestBuilder("POST")
.setUrl("http://localhost:" + server.port() + "/path1")
.setBody("Hello world!")
.build();
Response response = connector.executeRequest(request).get();
assertEquals(response.getResponseBody(), "Hello world!");
} |
public static List<AclEntry> replaceAclEntries(List<AclEntry> existingAcl,
List<AclEntry> inAclSpec) throws AclException {
ValidatedAclSpec aclSpec = new ValidatedAclSpec(inAclSpec);
ArrayList<AclEntry> aclBuilder = Lists.newArrayListWithCapacity(MAX_ENTRIES);
// Replacement is done separately for each scope: access and default.
EnumMap<AclEntryScope, AclEntry> providedMask =
Maps.newEnumMap(AclEntryScope.class);
EnumSet<AclEntryScope> maskDirty = EnumSet.noneOf(AclEntryScope.class);
EnumSet<AclEntryScope> scopeDirty = EnumSet.noneOf(AclEntryScope.class);
for (AclEntry aclSpecEntry: aclSpec) {
scopeDirty.add(aclSpecEntry.getScope());
if (aclSpecEntry.getType() == MASK) {
providedMask.put(aclSpecEntry.getScope(), aclSpecEntry);
maskDirty.add(aclSpecEntry.getScope());
} else {
aclBuilder.add(aclSpecEntry);
}
}
// Copy existing entries if the scope was not replaced.
for (AclEntry existingEntry: existingAcl) {
if (!scopeDirty.contains(existingEntry.getScope())) {
if (existingEntry.getType() == MASK) {
providedMask.put(existingEntry.getScope(), existingEntry);
} else {
aclBuilder.add(existingEntry);
}
}
}
copyDefaultsIfNeeded(aclBuilder);
calculateMasks(aclBuilder, providedMask, maskDirty, scopeDirty);
return buildAndValidateAcl(aclBuilder);
} | @Test(expected=AclException.class)
public void testReplaceAclEntriesMissingUser() throws AclException {
List<AclEntry> existing = new ImmutableList.Builder<AclEntry>()
.add(aclEntry(ACCESS, USER, ALL))
.add(aclEntry(ACCESS, GROUP, READ))
.add(aclEntry(ACCESS, OTHER, NONE))
.build();
List<AclEntry> aclSpec = Lists.newArrayList(
aclEntry(ACCESS, USER, "bruce", READ_WRITE),
aclEntry(ACCESS, GROUP, READ_EXECUTE),
aclEntry(ACCESS, GROUP, "sales", ALL),
aclEntry(ACCESS, MASK, ALL),
aclEntry(ACCESS, OTHER, NONE));
replaceAclEntries(existing, aclSpec);
} |
@Override
public ExecuteContext before(ExecuteContext context) {
Object[] arguments = context.getArguments();
if (arguments[0] instanceof HystrixConcurrencyStrategy) {
if (!HystrixRequestContext.isCurrentThreadInitialized()) {
HystrixRequestContext.initializeContext();
}
HystrixRequestVariableDefault<RequestTag> hystrixRequest = new HystrixRequestVariableDefault<>();
RequestTag requestTag = ThreadLocalUtils.getRequestTag();
// It is forbidden to deposit null, otherwise there will be serious performance problems
hystrixRequest.set(requestTag == null ? EMPTY_REQUEST_HEADER : requestTag);
}
return context;
} | @Test
public void testBefore() {
Map<String, List<String>> header = new HashMap<>();
header.put("bar", Collections.singletonList("bar1"));
header.put("foo", Collections.singletonList("foo1"));
ThreadLocalUtils.addRequestTag(header);
interceptor.before(context);
HystrixRequestContext context = HystrixRequestContext.getContextForCurrentThread();
Assert.assertNotNull(context);
Map<HystrixRequestVariableDefault<?>, ?> state = ReflectUtils.getFieldValue(context, "state")
.map(value -> (Map<HystrixRequestVariableDefault<?>, ?>) value).orElse(Collections.emptyMap());
for (Entry<HystrixRequestVariableDefault<?>, ?> entry : state.entrySet()) {
Object lazyInitializer = entry.getValue();
Object obj = ReflectUtils.getFieldValue(lazyInitializer, "value").orElse(null);
if (obj instanceof RequestTag) {
entry.getKey().remove();
Assert.assertEquals(header, ((RequestTag) obj).getTag());
}
}
} |
public boolean satisfies(ClusterSpec other) {
if ( ! other.id.equals(this.id)) return false; // ID mismatch
if (other.type.isContent() || this.type.isContent()) // Allow seamless transition between content and combined
return other.type.isContent() == this.type.isContent();
return other.type.equals(this.type);
} | @Test
void testSatisfies() {
var tests = Map.of(
List.of(spec(ClusterSpec.Type.content, "id1"), spec(ClusterSpec.Type.content, "id2")), false,
List.of(spec(ClusterSpec.Type.admin, "id1"), spec(ClusterSpec.Type.container, "id1")), false,
List.of(spec(ClusterSpec.Type.admin, "id1"), spec(ClusterSpec.Type.content, "id1")), false,
List.of(spec(ClusterSpec.Type.combined, "id1"), spec(ClusterSpec.Type.container, "id1")), false,
List.of(spec(ClusterSpec.Type.combined, "id1"), spec(ClusterSpec.Type.content, "id1")), true,
List.of(spec(ClusterSpec.Type.content, "id1"), spec(ClusterSpec.Type.content, "id1")), true
);
tests.forEach((specs, satisfies) -> {
var s1 = specs.get(0);
var s2 = specs.get(1);
assertEquals(satisfies, s1.satisfies(s2), s1 + (satisfies ? " satisfies " : " does not satisfy ") + s2);
assertEquals(satisfies, s2.satisfies(s1), s2 + (satisfies ? " satisfies " : " does not satisfy ") + s1);
});
} |
@CanIgnoreReturnValue
public final Ordered containsExactly(@Nullable Object @Nullable ... varargs) {
List<@Nullable Object> expected =
(varargs == null) ? newArrayList((@Nullable Object) null) : asList(varargs);
return containsExactlyElementsIn(
expected, varargs != null && varargs.length == 1 && varargs[0] instanceof Iterable);
} | @Test
public void iterableContainsExactlyWithDuplicatesUnexpectedItemFailure() {
expectFailureWhenTestingThat(asList(1, 2, 2, 2, 2, 3)).containsExactly(1, 2, 2, 3);
assertFailureValue("unexpected (2)", "2 [2 copies]");
} |
private Mono<ServerResponse> listPostsByCategoryName(ServerRequest request) {
final var name = request.pathVariable("name");
final var query = new PostPublicQuery(request.exchange());
var listOptions = query.toListOptions();
var newFieldSelector = listOptions.getFieldSelector()
.andQuery(QueryFactory.equal("spec.categories", name));
listOptions.setFieldSelector(newFieldSelector);
return postPublicQueryService.list(listOptions, query.toPageRequest())
.flatMap(result -> ServerResponse.ok()
.contentType(MediaType.APPLICATION_JSON)
.bodyValue(result)
);
} | @Test
void listPostsByCategoryName() {
ListResult<ListedPostVo> listResult = new ListResult<>(List.of());
when(postPublicQueryService.list(any(), any(PageRequest.class)))
.thenReturn(Mono.just(listResult));
webTestClient.get()
.uri("/categories/test/posts?page=1&size=10")
.exchange()
.expectStatus().isOk()
.expectHeader().contentType(MediaType.APPLICATION_JSON)
.expectBody()
.jsonPath("$.total").isEqualTo(listResult.getTotal())
.jsonPath("$.items").isArray();
} |
@Override
public <T> TypeAdapter<T> create(Gson gson, TypeToken<T> type) {
for (Class<?> t = type.getRawType();
(t != Object.class) && (t.getSuperclass() != null);
t = t.getSuperclass()) {
for (Method m : t.getDeclaredMethods()) {
if (m.isAnnotationPresent(PostConstruct.class)) {
m.setAccessible(true);
TypeAdapter<T> delegate = gson.getDelegateAdapter(this, type);
return new PostConstructAdapter<>(delegate, m);
}
}
}
return null;
} | @Test
public void test() throws Exception {
Gson gson =
new GsonBuilder().registerTypeAdapterFactory(new PostConstructAdapterFactory()).create();
Sandwich unused =
gson.fromJson("{\"bread\": \"white\", \"cheese\": \"cheddar\"}", Sandwich.class);
var e =
assertThrows(
IllegalArgumentException.class,
() ->
gson.fromJson(
"{\"bread\": \"cheesey bread\", \"cheese\": \"swiss\"}", Sandwich.class));
assertThat(e).hasMessageThat().isEqualTo("too cheesey");
} |
private List<String> getNames( IMetaStore metaStore, ConnectionProvider<? extends ConnectionDetails> provider ) {
try {
return getMetaStoreFactory( metaStore, provider.getClassType() ).getElementNames();
} catch ( MetaStoreException mse ) {
logger.error( "Error calling metastore getElementNames()", mse );
return Collections.emptyList();
}
} | @Test
public void testGetNames() {
addOne();
List<String> names = connectionManager.getNames();
assertEquals( 1, names.size() );
assertEquals( CONNECTION_NAME, names.get( 0 ) );
} |
static AnnotatedClusterState generatedStateFrom(final Params params) {
final ContentCluster cluster = params.cluster;
final ClusterState workingState = ClusterState.emptyState();
final Map<Node, NodeStateReason> nodeStateReasons = new HashMap<>();
for (final NodeInfo nodeInfo : cluster.getNodeInfos()) {
final NodeState nodeState = computeEffectiveNodeState(nodeInfo, params, nodeStateReasons);
workingState.setNodeState(nodeInfo.getNode(), nodeState);
}
takeDownGroupsWithTooLowAvailability(workingState, nodeStateReasons, params);
final Optional<ClusterStateReason> reasonToBeDown = clusterDownReason(workingState, params);
if (reasonToBeDown.isPresent()) {
workingState.setClusterState(State.DOWN);
}
workingState.setDistributionBits(inferDistributionBitCount(cluster, workingState, params));
return new AnnotatedClusterState(workingState, reasonToBeDown, nodeStateReasons);
} | @Test
void cluster_down_if_less_than_min_count_of_storage_nodes_available() {
final ClusterFixture fixture = ClusterFixture.forFlatCluster(3)
.bringEntireClusterUp()
.reportStorageNodeState(0, State.DOWN)
.reportStorageNodeState(2, State.DOWN);
final ClusterStateGenerator.Params params = fixture.generatorParams().minStorageNodesUp(2);
final AnnotatedClusterState state = ClusterStateGenerator.generatedStateFrom(params);
assertThat(state.toString(), equalTo("cluster:d distributor:3 storage:2 .0.s:d"));
assertThat(state.getClusterStateReason(), equalTo(Optional.of(ClusterStateReason.TOO_FEW_STORAGE_NODES_AVAILABLE)));
} |
@VisibleForTesting
static void verifyImageMetadata(ImageMetadataTemplate metadata, Path metadataCacheDirectory)
throws CacheCorruptedException {
List<ManifestAndConfigTemplate> manifestsAndConfigs = metadata.getManifestsAndConfigs();
if (manifestsAndConfigs.isEmpty()) {
throw new CacheCorruptedException(metadataCacheDirectory, "Manifest cache empty");
}
if (manifestsAndConfigs.stream().anyMatch(entry -> entry.getManifest() == null)) {
throw new CacheCorruptedException(metadataCacheDirectory, "Manifest(s) missing");
}
if (metadata.getManifestList() == null && manifestsAndConfigs.size() != 1) {
throw new CacheCorruptedException(metadataCacheDirectory, "Manifest list missing");
}
ManifestTemplate firstManifest = manifestsAndConfigs.get(0).getManifest();
if (firstManifest instanceof V21ManifestTemplate) {
if (metadata.getManifestList() != null
|| manifestsAndConfigs.stream().anyMatch(entry -> entry.getConfig() != null)) {
throw new CacheCorruptedException(metadataCacheDirectory, "Schema 1 manifests corrupted");
}
} else if (firstManifest instanceof BuildableManifestTemplate) {
if (manifestsAndConfigs.stream().anyMatch(entry -> entry.getConfig() == null)) {
throw new CacheCorruptedException(metadataCacheDirectory, "Schema 2 manifests corrupted");
}
if (metadata.getManifestList() != null
&& manifestsAndConfigs.stream().anyMatch(entry -> entry.getManifestDigest() == null)) {
throw new CacheCorruptedException(metadataCacheDirectory, "Schema 2 manifests corrupted");
}
} else {
throw new CacheCorruptedException(
metadataCacheDirectory, "Unknown manifest type: " + firstManifest);
}
} | @Test
public void testVerifyImageMetadata_schema1ManifestsCorrupted_containerConfigExists() {
ManifestAndConfigTemplate manifestAndConfig =
new ManifestAndConfigTemplate(
new V21ManifestTemplate(), new ContainerConfigurationTemplate());
ImageMetadataTemplate metadata =
new ImageMetadataTemplate(null, Arrays.asList(manifestAndConfig));
try {
CacheStorageReader.verifyImageMetadata(metadata, Paths.get("/cache/dir"));
Assert.fail();
} catch (CacheCorruptedException ex) {
MatcherAssert.assertThat(
ex.getMessage(), CoreMatchers.startsWith("Schema 1 manifests corrupted"));
}
} |
public ConnectionAuthContext authorizePeer(X509Certificate cert) { return authorizePeer(List.of(cert)); } | @Test
void certificate_must_match_both_san_and_cn_pattern() {
RequiredPeerCredential cnRequirement = createRequiredCredential(CN, "*.matching.cn");
RequiredPeerCredential sanRequirement = createRequiredCredential(SAN_DNS, "*.matching.san");
PeerAuthorizer authorizer = createPeerAuthorizer(createPolicy(POLICY_1, cnRequirement, sanRequirement));
ConnectionAuthContext result = authorizer.authorizePeer(createCertificate("foo.matching.cn", List.of("foo.matching.san", "foo.invalid.san"), List.of()));
assertAuthorized(result);
assertThat(result.matchedPolicies()).containsOnly(POLICY_1);
assertUnauthorized(authorizer.authorizePeer(createCertificate("foo.invalid.cn", List.of("foo.matching.san"), List.of())));
assertUnauthorized(authorizer.authorizePeer(createCertificate("foo.invalid.cn", List.of("foo.matching.san", "foo.invalid.san"), List.of())));
assertUnauthorized(authorizer.authorizePeer(createCertificate("foo.matching.cn", List.of("foo.invalid.san"), List.of())));
} |
public Predicate getPredicate() {
return predicate;
} | @Test
public void requireThatConstructorsWork() {
assertNull(new PredicateFieldValue().getPredicate());
Predicate predicate = SimplePredicates.newPredicate();
assertEquals(predicate, new PredicateFieldValue(predicate).getPredicate());
} |
@Override
protected FieldValue doGet(String fieldName, EventWithContext eventWithContext) {
final ImmutableMap.Builder<String, Object> dataModelBuilder = ImmutableMap.builder();
if (eventWithContext.messageContext().isPresent()) {
dataModelBuilder.put("source", eventWithContext.messageContext().get().getFields());
} else if (eventWithContext.eventContext().isPresent()) {
dataModelBuilder.put("source", eventWithContext.eventContext().get().toDto().fields());
}
final ImmutableMap<String, Object> dataModel = dataModelBuilder.build();
if (!isValidTemplate(config.template(), dataModel)) {
return FieldValue.error();
}
try {
return FieldValue.string(templateEngine.transform(config.template(), dataModel));
} catch (Exception e) {
LOG.error("Couldn't render field template \"{}\"", config.template(), e);
return FieldValue.error();
}
} | @Test
public void templateBooleanFormatting() {
final TestEvent event = new TestEvent();
final EventWithContext eventWithContext = EventWithContext.create(event, newMessage(ImmutableMap.of("success", true)));
final FieldValue fieldValue = newTemplate("success: ${source.success}").doGet("test", eventWithContext);
assertThat(fieldValue.value()).isEqualTo("success: true");
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.