focal_method
stringlengths 13
60.9k
| test_case
stringlengths 25
109k
|
---|---|
@Override
public boolean equals(Object obj)
{
if (obj instanceof IdEntityResponse)
{
IdEntityResponse<?, ?> that = (IdEntityResponse<?, ?>) obj;
return super.equals(that) &&
(this._entity == null ? that._entity == null : this._entity.equals(that._entity));
}
else
{
return false;
}
} | @Test
public void testEquals()
{
IdEntityResponse<Long, AnyRecord> longIdEntityResponse1 = new IdEntityResponse<>(1L, new AnyRecord());
IdEntityResponse<Long, AnyRecord> longIdEntityResponse2 = new IdEntityResponse<>(1L, new AnyRecord());
IdEntityResponse<Long, AnyRecord> nullLongResponse = new IdEntityResponse<>(null, new AnyRecord());
IdEntityResponse<String, AnyRecord> nullStringResponse = new IdEntityResponse<>(null, new AnyRecord());
IdEntityResponse<String, AnyRecord> stringResponse = new IdEntityResponse<>("hello", new AnyRecord());
// equals and non-null.
Assert.assertTrue(longIdEntityResponse1.equals(longIdEntityResponse2));
// equals and null
Assert.assertTrue(nullLongResponse.equals(nullStringResponse));
Assert.assertTrue(nullStringResponse.equals(nullLongResponse));
// unequal and non-null
Assert.assertFalse(longIdEntityResponse1.equals(stringResponse));
// unequal and one null
Assert.assertFalse(longIdEntityResponse1.equals(nullLongResponse));
Assert.assertFalse(nullLongResponse.equals(longIdEntityResponse1));
} |
@Override
public Credentials configure(final Host host) {
final Credentials credentials = new Credentials(host.getCredentials());
final String profile = credentials.getUsername();
final Optional<Map.Entry<String, BasicProfile>> optional = profiles.entrySet().stream().filter(new Predicate<Map.Entry<String, BasicProfile>>() {
@Override
public boolean test(final Map.Entry<String, BasicProfile> entry) {
final String profileName = entry.getKey();
final BasicProfile basicProfile = entry.getValue();
final String awsAccessIdKey = basicProfile.getAwsAccessIdKey();
// Matching access key or profile name
if(StringUtils.equals(profileName, profile)) {
if(log.isDebugEnabled()) {
log.debug(String.format("Found matching profile %s for profile name %s", profile, profileName));
}
return true;
}
else if(StringUtils.equals(awsAccessIdKey, profile)) {
if(log.isDebugEnabled()) {
log.debug(String.format("Found matching profile %s for access key %s", profile, awsAccessIdKey));
}
return true;
}
return false;
}
}).findFirst();
if(optional.isPresent()) {
final Map.Entry<String, BasicProfile> entry = optional.get();
final BasicProfile basicProfile = entry.getValue();
final String tokenCode;
if(basicProfile.getProperties().containsKey("mfa_serial")) {
try {
tokenCode = prompt.prompt(
host, LocaleFactory.localizedString("Provide additional login credentials", "Credentials"),
String.format("%s %s", LocaleFactory.localizedString("Multi-Factor Authentication", "S3"),
basicProfile.getPropertyValue("mfa_serial")),
new LoginOptions(host.getProtocol())
.password(true)
.passwordPlaceholder(LocaleFactory.localizedString("MFA Authentication Code", "S3"))
.keychain(false)
).getPassword();
}
catch(LoginCanceledException e) {
log.warn(String.format("Canceled MFA prompt for profile %s", basicProfile));
return credentials;
}
}
else {
tokenCode = null;
}
final Integer durationSeconds;
if(basicProfile.getProperties().containsKey("duration_seconds")) {
durationSeconds = Integer.valueOf(basicProfile.getPropertyValue("duration_seconds"));
}
else {
durationSeconds = null;
}
if(basicProfile.isRoleBasedProfile()) {
if(log.isDebugEnabled()) {
log.debug(String.format("Configure credentials from role based profile %s", basicProfile.getProfileName()));
}
if(StringUtils.isBlank(basicProfile.getRoleSourceProfile())) {
log.warn(String.format("Missing source profile reference in profile %s", basicProfile.getProfileName()));
return credentials;
}
else if(!profiles.containsKey(basicProfile.getRoleSourceProfile())) {
log.warn(String.format("Missing source profile with name %s", basicProfile.getRoleSourceProfile()));
return credentials;
}
else {
final BasicProfile sourceProfile = profiles.get(basicProfile.getRoleSourceProfile());
final AWSSecurityTokenService service;
if(sourceProfile.getProperties().containsKey("sso_start_url")) {
// Read cached SSO credentials
final CachedCredential cached = this.fetchSsoCredentials(sourceProfile.getProperties());
if(null == cached) {
return credentials;
}
service = this.getTokenService(host, host.getRegion(),
cached.accessKey, cached.secretKey, cached.sessionToken);
}
else {
// If a profile defines the role_arn property then the profile is treated as an assume role profile
service = this.getTokenService(host, host.getRegion(),
sourceProfile.getAwsAccessIdKey(),
sourceProfile.getAwsSecretAccessKey(),
sourceProfile.getAwsSessionToken());
}
// Starts a new session by sending a request to the AWS Security Token Service (STS) to assume a
// role using the long-lived AWS credentials
final AssumeRoleRequest assumeRoleRequest = new AssumeRoleRequest()
.withExternalId(basicProfile.getRoleExternalId())
.withRoleArn(basicProfile.getRoleArn())
// Specify this value if the IAM user has a policy that requires MFA authentication
.withSerialNumber(basicProfile.getPropertyValue("mfa_serial"))
// The value provided by the MFA device, if MFA is required
.withTokenCode(tokenCode
// mfa_serial - The identification number of the MFA device to use when assuming a role. This is an optional parameter.
// Specify this value if the trust policy of the role being assumed includes a condition that requires MFA authentication.
// The value is either the serial number for a hardware device (such as GAHT12345678) or an Amazon Resource Name (ARN) for
// a virtual device (such as arn:aws:iam::123456789012:mfa/user).
)
.withRoleSessionName(basicProfile.getRoleSessionName() == null ? new AsciiRandomStringService().random() : basicProfile.getRoleSessionName())
.withDurationSeconds(durationSeconds
// duration_seconds - Specifies the maximum duration of the role session, in seconds. The value can range from 900 seconds
// (15 minutes) up to the maximum session duration setting for the role (which can be a maximum of 43200). This is an
// optional parameter and by default, the value is set to 3600 seconds.
);
if(log.isDebugEnabled()) {
log.debug(String.format("Request %s from %s", assumeRoleRequest, service));
}
try {
final AssumeRoleResult assumeRoleResult = service.assumeRole(assumeRoleRequest);
if(log.isDebugEnabled()) {
log.debug(String.format("Set credentials from %s", assumeRoleResult));
}
credentials.setTokens(new TemporaryAccessTokens(
assumeRoleResult.getCredentials().getAccessKeyId(),
assumeRoleResult.getCredentials().getSecretAccessKey(),
assumeRoleResult.getCredentials().getSessionToken(),
assumeRoleResult.getCredentials().getExpiration().getTime()));
}
catch(AWSSecurityTokenServiceException e) {
log.warn(e.getErrorMessage(), e);
return credentials;
}
}
}
else {
if(log.isDebugEnabled()) {
log.debug(String.format("Configure credentials from basic profile %s", basicProfile.getProfileName()));
}
final Map<String, String> profileProperties = basicProfile.getProperties();
if(profileProperties.containsKey("sso_start_url") || profileProperties.containsKey("sso_session")) {
// Read cached SSO credentials
final CachedCredential cached = this.fetchSsoCredentials(profileProperties);
if(null == cached) {
return credentials;
}
return credentials.withTokens(new TemporaryAccessTokens(
cached.accessKey, cached.secretKey, cached.sessionToken,
Instant.parse(cached.expiration).toEpochMilli()));
}
if(tokenCode != null) {
// Obtain session token
if(log.isDebugEnabled()) {
log.debug(String.format("Get session token from credentials in profile %s", basicProfile.getProfileName()));
}
final AWSSecurityTokenService service = this.getTokenService(host,
host.getRegion(),
basicProfile.getAwsAccessIdKey(),
basicProfile.getAwsSecretAccessKey(),
basicProfile.getAwsSessionToken());
// The purpose of the sts:GetSessionToken operation is to authenticate the user using MFA.
final GetSessionTokenRequest sessionTokenRequest = new GetSessionTokenRequest()
// The value provided by the MFA device, if MFA is required
.withTokenCode(tokenCode)
// Specify this value if the IAM user has a policy that requires MFA authentication
.withSerialNumber(basicProfile.getPropertyValue("mfa_serial"))
.withDurationSeconds(durationSeconds);
if(log.isDebugEnabled()) {
log.debug(String.format("Request %s from %s", sessionTokenRequest, service));
}
try {
final GetSessionTokenResult sessionTokenResult = service.getSessionToken(sessionTokenRequest);
if(log.isDebugEnabled()) {
log.debug(String.format("Set credentials from %s", sessionTokenResult));
}
return credentials.withTokens(new TemporaryAccessTokens(
sessionTokenResult.getCredentials().getAccessKeyId(),
sessionTokenResult.getCredentials().getSecretAccessKey(),
sessionTokenResult.getCredentials().getSessionToken(),
sessionTokenResult.getCredentials().getExpiration().getTime()));
}
catch(AWSSecurityTokenServiceException e) {
log.warn(e.getErrorMessage(), e);
return credentials;
}
}
if(log.isDebugEnabled()) {
log.debug(String.format("Set credentials from profile %s", basicProfile.getProfileName()));
}
return credentials
.withTokens(new TemporaryAccessTokens(
basicProfile.getAwsAccessIdKey(),
basicProfile.getAwsSecretAccessKey(),
basicProfile.getAwsSessionToken(),
-1L))
.withUsername(basicProfile.getAwsAccessIdKey())
.withPassword(basicProfile.getAwsSecretAccessKey());
}
}
else {
log.warn(String.format("No matching configuration for profile %s in %s", profile, profiles));
}
return credentials;
} | @Test
public void testConfigure() throws Exception {
new S3CredentialsConfigurator(new DisabledX509TrustManager(), new DefaultX509KeyManager(), new DisabledPasswordCallback())
.reload().configure(new Host(new TestProtocol()));
} |
public static void checkNotNullAndNotEmpty(@Nullable String value, String propertyName) {
Preconditions.checkNotNull(value, "Property '" + propertyName + "' cannot be null");
Preconditions.checkArgument(
!value.trim().isEmpty(), "Property '" + propertyName + "' cannot be an empty string");
} | @Test
public void testCheckNotNullAndNotEmpty_stringFailNull() {
try {
Validator.checkNotNullAndNotEmpty((String) null, "test");
Assert.fail();
} catch (NullPointerException npe) {
Assert.assertEquals("Property 'test' cannot be null", npe.getMessage());
}
} |
@ApiOperation(value = "Get Customer Entity Views (getCustomerEntityViews)",
notes = "Returns a page of Entity View objects assigned to customer. " +
PAGE_DATA_PARAMETERS + TENANT_OR_CUSTOMER_AUTHORITY_PARAGRAPH)
@PreAuthorize("hasAnyAuthority('TENANT_ADMIN', 'CUSTOMER_USER')")
@RequestMapping(value = "/customer/{customerId}/entityViews", params = {"pageSize", "page"}, method = RequestMethod.GET)
@ResponseBody
public PageData<EntityView> getCustomerEntityViews(
@Parameter(description = CUSTOMER_ID_PARAM_DESCRIPTION, required = true)
@PathVariable(CUSTOMER_ID) String strCustomerId,
@Parameter(description = PAGE_SIZE_DESCRIPTION, required = true)
@RequestParam int pageSize,
@Parameter(description = PAGE_NUMBER_DESCRIPTION, required = true)
@RequestParam int page,
@Parameter(description = ENTITY_VIEW_TYPE)
@RequestParam(required = false) String type,
@Parameter(description = ENTITY_VIEW_TEXT_SEARCH_DESCRIPTION)
@RequestParam(required = false) String textSearch,
@Parameter(description = SORT_PROPERTY_DESCRIPTION, schema = @Schema(allowableValues = {"createdTime", "name", "type"}))
@RequestParam(required = false) String sortProperty,
@Parameter(description = SORT_ORDER_DESCRIPTION, schema = @Schema(allowableValues = {"ASC", "DESC"}))
@RequestParam(required = false) String sortOrder) throws ThingsboardException {
checkParameter(CUSTOMER_ID, strCustomerId);
TenantId tenantId = getCurrentUser().getTenantId();
CustomerId customerId = new CustomerId(toUUID(strCustomerId));
checkCustomerId(customerId, Operation.READ);
PageLink pageLink = createPageLink(pageSize, page, textSearch, sortProperty, sortOrder);
if (type != null && type.trim().length() > 0) {
return checkNotNull(entityViewService.findEntityViewsByTenantIdAndCustomerIdAndType(tenantId, customerId, pageLink, type));
} else {
return checkNotNull(entityViewService.findEntityViewsByTenantIdAndCustomerId(tenantId, customerId, pageLink));
}
} | @Test
public void testGetCustomerEntityViews() throws Exception {
Customer customer = doPost("/api/customer", getNewCustomer("Test customer"), Customer.class);
CustomerId customerId = customer.getId();
String urlTemplate = "/api/customer/" + customerId.getId().toString() + "/entityViewInfos?";
Mockito.reset(tbClusterService, auditLogService);
int cntEntity = 128;
List<ListenableFuture<EntityViewInfo>> viewFutures = new ArrayList<>(cntEntity);
for (int i = 0; i < cntEntity; i++) {
String entityName = "Test entity view " + i;
viewFutures.add(executor.submit(() ->
new EntityViewInfo(doPost("/api/customer/" + customerId.getId().toString() + "/entityView/"
+ getNewSavedEntityView(entityName).getId().getId().toString(), EntityView.class),
customer.getTitle(), customer.isPublic())));
}
List<EntityViewInfo> entityViewInfos = Futures.allAsList(viewFutures).get(TIMEOUT, SECONDS);
List<EntityViewInfo> loadedViews = loadListOfInfo(new PageLink(23), urlTemplate);
assertThat(entityViewInfos).containsExactlyInAnyOrderElementsOf(loadedViews);
testNotifyEntityBroadcastEntityStateChangeEventMany(new EntityView(), new EntityView(),
tenantId, tenantAdminCustomerId, tenantAdminUserId, TENANT_ADMIN_EMAIL,
ActionType.ADDED, ActionType.ADDED, cntEntity, cntEntity, cntEntity * 2, 0);
testNotifyEntityBroadcastEntityStateChangeEventMany(new EntityView(), new EntityView(),
tenantId, customerId, tenantAdminUserId, TENANT_ADMIN_EMAIL,
ActionType.ASSIGNED_TO_CUSTOMER, ActionType.UPDATED, cntEntity, cntEntity,
cntEntity * 2, 3);
} |
public static TopicPublishInfo topicRouteData2TopicPublishInfo(final String topic, final TopicRouteData route) {
TopicPublishInfo info = new TopicPublishInfo();
// TO DO should check the usage of raw route, it is better to remove such field
info.setTopicRouteData(route);
if (route.getOrderTopicConf() != null && route.getOrderTopicConf().length() > 0) {
String[] brokers = route.getOrderTopicConf().split(";");
for (String broker : brokers) {
String[] item = broker.split(":");
int nums = Integer.parseInt(item[1]);
for (int i = 0; i < nums; i++) {
MessageQueue mq = new MessageQueue(topic, item[0], i);
info.getMessageQueueList().add(mq);
}
}
info.setOrderTopic(true);
} else if (route.getOrderTopicConf() == null
&& route.getTopicQueueMappingByBroker() != null
&& !route.getTopicQueueMappingByBroker().isEmpty()) {
info.setOrderTopic(false);
ConcurrentMap<MessageQueue, String> mqEndPoints = topicRouteData2EndpointsForStaticTopic(topic, route);
info.getMessageQueueList().addAll(mqEndPoints.keySet());
info.getMessageQueueList().sort((mq1, mq2) -> MixAll.compareInteger(mq1.getQueueId(), mq2.getQueueId()));
} else {
List<QueueData> qds = route.getQueueDatas();
Collections.sort(qds);
for (QueueData qd : qds) {
if (PermName.isWriteable(qd.getPerm())) {
BrokerData brokerData = null;
for (BrokerData bd : route.getBrokerDatas()) {
if (bd.getBrokerName().equals(qd.getBrokerName())) {
brokerData = bd;
break;
}
}
if (null == brokerData) {
continue;
}
if (!brokerData.getBrokerAddrs().containsKey(MixAll.MASTER_ID)) {
continue;
}
for (int i = 0; i < qd.getWriteQueueNums(); i++) {
MessageQueue mq = new MessageQueue(topic, qd.getBrokerName(), i);
info.getMessageQueueList().add(mq);
}
}
}
info.setOrderTopic(false);
}
return info;
} | @Test
public void testTopicRouteData2TopicPublishInfoWithTopicQueueMappingByBroker() {
TopicRouteData topicRouteData = createTopicRouteData();
when(topicRouteData.getTopicQueueMappingByBroker()).thenReturn(Collections.singletonMap(topic, new TopicQueueMappingInfo()));
TopicPublishInfo actual = MQClientInstance.topicRouteData2TopicPublishInfo(topic, topicRouteData);
assertFalse(actual.isHaveTopicRouterInfo());
assertEquals(0, actual.getMessageQueueList().size());
} |
public static Sensor punctuateSensor(final String threadId,
final String taskId,
final StreamsMetricsImpl streamsMetrics) {
return invocationRateAndCountAndAvgAndMaxLatencySensor(
threadId,
taskId,
PUNCTUATE,
PUNCTUATE_RATE_DESCRIPTION,
PUNCTUATE_TOTAL_DESCRIPTION,
PUNCTUATE_AVG_LATENCY_DESCRIPTION,
PUNCTUATE_MAX_LATENCY_DESCRIPTION,
Sensor.RecordingLevel.DEBUG,
streamsMetrics
);
} | @Test
public void shouldGetPunctuateSensor() {
final String operation = "punctuate";
when(streamsMetrics.taskLevelSensor(THREAD_ID, TASK_ID, operation, RecordingLevel.DEBUG))
.thenReturn(expectedSensor);
final String operationLatency = operation + StreamsMetricsImpl.LATENCY_SUFFIX;
final String totalDescription = "The total number of calls to punctuate";
final String rateDescription = "The average number of calls to punctuate per second";
final String avgLatencyDescription = "The average latency of calls to punctuate";
final String maxLatencyDescription = "The maximum latency of calls to punctuate";
when(streamsMetrics.taskLevelTagMap(THREAD_ID, TASK_ID)).thenReturn(tagMap);
try (final MockedStatic<StreamsMetricsImpl> streamsMetricsStaticMock = mockStatic(StreamsMetricsImpl.class)) {
final Sensor sensor = TaskMetrics.punctuateSensor(THREAD_ID, TASK_ID, streamsMetrics);
streamsMetricsStaticMock.verify(
() -> StreamsMetricsImpl.addInvocationRateAndCountToSensor(
expectedSensor,
TASK_LEVEL_GROUP,
tagMap,
operation,
rateDescription,
totalDescription
)
);
streamsMetricsStaticMock.verify(
() -> StreamsMetricsImpl.addAvgAndMaxToSensor(
expectedSensor,
TASK_LEVEL_GROUP,
tagMap,
operationLatency,
avgLatencyDescription,
maxLatencyDescription
)
);
assertThat(sensor, is(expectedSensor));
}
} |
public void checkExecutePrerequisites(final ExecutionContext executionContext) {
ShardingSpherePreconditions.checkState(isValidExecutePrerequisites(executionContext), () -> new TableModifyInTransactionException(getTableName(executionContext)));
} | @Test
void assertCheckExecutePrerequisitesWhenExecuteDDLInPostgreSQLTransaction() {
when(transactionRule.getDefaultType()).thenReturn(TransactionType.LOCAL);
ExecutionContext executionContext = new ExecutionContext(
new QueryContext(createPostgreSQLCreateTableStatementContext(), "", Collections.emptyList(), new HintValueContext(), mockConnectionContext(), mock(ShardingSphereMetaData.class)),
Collections.emptyList(), mock(RouteContext.class));
assertThrows(TableModifyInTransactionException.class,
() -> new ProxySQLExecutor(JDBCDriverType.STATEMENT, databaseConnectionManager, mock(DatabaseConnector.class), mockQueryContext()).checkExecutePrerequisites(executionContext));
} |
public int getSeconds(HazelcastProperty property) {
TimeUnit timeUnit = property.getTimeUnit();
return (int) timeUnit.toSeconds(getLong(property));
} | @Test
public void getTimeUnit() {
config.setProperty(ClusterProperty.PARTITION_TABLE_SEND_INTERVAL.getName(), "300");
HazelcastProperties properties = new HazelcastProperties(config);
assertEquals(300, properties.getSeconds(ClusterProperty.PARTITION_TABLE_SEND_INTERVAL));
} |
public static ParameterizedType parameterize(final Class<?> raw, final Type... typeArguments) {
checkParameterizeMethodParameter(raw, typeArguments);
return new ParameterizedTypeImpl(raw, raw.getEnclosingClass(), typeArguments);
} | @Test
void parameterize() {
ParameterizedType stringComparableType = TypeUtils.parameterize(List.class, String.class);
assertEquals("java.util.List<java.lang.String>", stringComparableType.toString());
assertEquals(List.class, stringComparableType.getRawType());
assertNull(stringComparableType.getOwnerType());
assertEquals(1, stringComparableType.getActualTypeArguments().length);
assertEquals(String.class, stringComparableType.getActualTypeArguments()[0]);
ParameterizedType stringIntegerComparableType = TypeUtils.parameterize(Map.class, String.class, Integer.class);
assertEquals("java.util.Map<java.lang.String, java.lang.Integer>", stringIntegerComparableType.toString());
assertEquals(Map.class, stringIntegerComparableType.getRawType());
assertNull(stringComparableType.getOwnerType());
assertEquals(2, stringIntegerComparableType.getActualTypeArguments().length);
assertEquals(String.class, stringIntegerComparableType.getActualTypeArguments()[0]);
assertEquals(Integer.class, stringIntegerComparableType.getActualTypeArguments()[1]);
} |
@Override
public void onOpened() {
digestNotification();
} | @Test
public void onOpened_appVisible_clearNotificationsDrawer() throws Exception {
verify(mNotificationManager, never()).cancelAll();
setUpForegroundApp();
final PushNotification uut = createUUT();
uut.onOpened();
verify(mNotificationManager, never()).cancelAll();
} |
@Override
public String toString() {
StringBuilder stringBuilder = new StringBuilder();
stringBuilder.append("x=");
stringBuilder.append(this.x);
stringBuilder.append(", y=");
stringBuilder.append(this.y);
return stringBuilder.toString();
} | @Test
public void toStringTest() {
Point point = new Point(1, 2);
Assert.assertEquals(POINT_TO_STRING, point.toString());
} |
public void setActiveParamChecker(String activeParamChecker) {
this.activeParamChecker = activeParamChecker;
} | @Test
void setActiveParamChecker() {
ServerParamCheckConfig paramCheckConfig = ServerParamCheckConfig.getInstance();
paramCheckConfig.setActiveParamChecker("test");
assertEquals("test", paramCheckConfig.getActiveParamChecker());
} |
@Beta
public static Application fromBuilder(Builder builder) throws Exception {
return builder.build();
} | @Test
void handler() throws Exception {
try (
ApplicationFacade app = new ApplicationFacade(Application.fromBuilder(new Application.Builder().container("default", new Application.Builder.Container()
.handler("http://*/", MockHttpHandler.class))))
) {
RequestHandler handler = app.getRequestHandlerById(MockHttpHandler.class.getName());
assertNotNull(handler);
Request request = new Request("http://localhost:" + getDefaults().vespaWebServicePort() + "/");
Response response = app.handleRequest(request);
assertNotNull(response);
assertEquals(response.getStatus(), 200);
assertEquals(response.getBodyAsString(), "OK");
request = new Request("http://localhost");
response = app.handleRequest(request);
assertNotNull(response);
assertEquals(response.getStatus(), 200);
assertEquals(response.getBodyAsString(), "OK");
request = new Request("http://localhost/?query=foo");
response = app.handleRequest(request);
assertNotNull(response);
assertEquals(response.getStatus(), 200);
assertEquals(response.getBodyAsString(), "OK");
}
} |
@Override public boolean replace(long key, long oldValue, long newValue) {
assert oldValue != nullValue : "replace() called with null-sentinel oldValue " + nullValue;
assert newValue != nullValue : "replace() called with null-sentinel newValue " + nullValue;
final long valueAddr = hsa.get(key);
if (valueAddr == NULL_ADDRESS) {
return false;
}
final long actualValue = mem.getLong(valueAddr);
if (actualValue != oldValue) {
return false;
}
mem.putLong(valueAddr, newValue);
return true;
} | @Test(expected = AssertionError.class)
@RequireAssertEnabled
public void test_replaceIfEquals_invalidOldValue() {
map.replace(newKey(), MISSING_VALUE, newValue());
} |
protected String parseSubClusterId(CommandLine cliParser) {
// If YARN Federation mode is not enabled, return empty.
if (!isYarnFederationEnabled(getConf())) {
return StringUtils.EMPTY;
}
String subClusterId = cliParser.getOptionValue(OPTION_SUBCLUSTERID);
if (StringUtils.isBlank(subClusterId)) {
return StringUtils.EMPTY;
}
System.out.println("SubClusterId : " + subClusterId);
return subClusterId;
} | @Test
public void testParseSubClusterId() throws Exception {
rmAdminCLI.getConf().setBoolean(YarnConfiguration.FEDERATION_ENABLED, true);
// replaceLabelsOnNode
String[] replaceLabelsOnNodeArgs = {"-replaceLabelsOnNode",
"node1:8000,x node2:8000=y node3,x node4=Y", "-subClusterId", "SC-1"};
assertEquals(0, rmAdminCLI.run(replaceLabelsOnNodeArgs));
String[] refreshQueuesArgs = {"-refreshQueues", "-subClusterId", "SC-1"};
assertEquals(0, rmAdminCLI.run(refreshQueuesArgs));
String[] refreshNodesResourcesArgs = {"-refreshNodesResources", "-subClusterId", "SC-1"};
assertEquals(0, rmAdminCLI.run(refreshNodesResourcesArgs));
String nodeIdStr = "0.0.0.0:0";
String resourceTypes = "memory-mb=1024Mi,vcores=1,resource2";
String[] updateNodeResourceArgs = {"-updateNodeResource", nodeIdStr,
resourceTypes, "-subClusterId", "SC-1"};
rmAdminCLI.parseSubClusterId(updateNodeResourceArgs, false);
assertEquals(-1, rmAdminCLI.run(updateNodeResourceArgs));
} |
public final void contains(@Nullable Object element) {
if (!Iterables.contains(checkNotNull(actual), element)) {
List<@Nullable Object> elementList = newArrayList(element);
if (hasMatchingToStringPair(actual, elementList)) {
failWithoutActual(
fact("expected to contain", element),
fact("an instance of", objectToTypeName(element)),
simpleFact("but did not"),
fact(
"though it did contain",
countDuplicatesAndAddTypeInfo(
retainMatchingToString(actual, /* itemsToCheck= */ elementList))),
fullContents());
} else {
failWithActual("expected to contain", element);
}
}
} | @Test
public void iterableContainsFailsWithSameToString() {
expectFailureWhenTestingThat(asList(1L, 2L, 3L, 2L)).contains(2);
assertFailureKeys(
"expected to contain",
"an instance of",
"but did not",
"though it did contain",
"full contents");
assertFailureValue("expected to contain", "2");
assertFailureValue("an instance of", "java.lang.Integer");
assertFailureValue("though it did contain", "[2 [2 copies]] (java.lang.Long)");
assertFailureValue("full contents", "[1, 2, 3, 2]");
} |
static void maybeReportHybridDiscoveryIssue(PluginDiscoveryMode discoveryMode, PluginScanResult serviceLoadingScanResult, PluginScanResult mergedResult) {
SortedSet<PluginDesc<?>> missingPlugins = new TreeSet<>();
mergedResult.forEach(missingPlugins::add);
serviceLoadingScanResult.forEach(missingPlugins::remove);
if (missingPlugins.isEmpty()) {
if (discoveryMode == PluginDiscoveryMode.HYBRID_WARN || discoveryMode == PluginDiscoveryMode.HYBRID_FAIL) {
log.warn("All plugins have ServiceLoader manifests, consider reconfiguring {}={}",
WorkerConfig.PLUGIN_DISCOVERY_CONFIG, PluginDiscoveryMode.SERVICE_LOAD);
}
} else {
String message = String.format(
"One or more plugins are missing ServiceLoader manifests may not be usable with %s=%s: %s%n" +
"Read the documentation at %s for instructions on migrating your plugins " +
"to take advantage of the performance improvements of %s mode.",
WorkerConfig.PLUGIN_DISCOVERY_CONFIG,
PluginDiscoveryMode.SERVICE_LOAD,
missingPlugins.stream()
.map(pluginDesc -> pluginDesc.location() + "\t" + pluginDesc.className() + "\t" + pluginDesc.type() + "\t" + pluginDesc.version())
.collect(Collectors.joining("\n", "[\n", "\n]")),
"https://kafka.apache.org/documentation.html#connect_plugindiscovery",
PluginDiscoveryMode.SERVICE_LOAD
);
if (discoveryMode == PluginDiscoveryMode.HYBRID_WARN) {
log.warn("{} To silence this warning, set {}={} in the worker config.",
message, WorkerConfig.PLUGIN_DISCOVERY_CONFIG, PluginDiscoveryMode.ONLY_SCAN);
} else if (discoveryMode == PluginDiscoveryMode.HYBRID_FAIL) {
throw new ConnectException(String.format("%s To silence this error, set %s=%s in the worker config.",
message, WorkerConfig.PLUGIN_DISCOVERY_CONFIG, PluginDiscoveryMode.HYBRID_WARN));
}
}
} | @Test
public void testServiceLoadWithPlugins() {
try (LogCaptureAppender logCaptureAppender = LogCaptureAppender.createAndRegister(Plugins.class)) {
Plugins.maybeReportHybridDiscoveryIssue(PluginDiscoveryMode.SERVICE_LOAD, nonEmpty, nonEmpty);
assertTrue(logCaptureAppender.getEvents().stream().noneMatch(e -> e.getLevel().contains("ERROR") || e.getLevel().equals("WARN")));
}
} |
public <E extends Enum> E getEnum(HazelcastProperty property, Class<E> enumClazz) {
String value = getString(property);
for (E enumConstant : enumClazz.getEnumConstants()) {
if (equalsIgnoreCase(enumConstant.name(), value)) {
return enumConstant;
}
}
throw new IllegalArgumentException(format("value '%s' for property '%s' is not a valid %s value",
value, property.getName(), enumClazz.getName()));
} | @Test
public void getEnum_default() {
HazelcastProperties properties = new HazelcastProperties(config.getProperties());
HealthMonitorLevel healthMonitorLevel = properties
.getEnum(ClusterProperty.HEALTH_MONITORING_LEVEL, HealthMonitorLevel.class);
assertEquals(HealthMonitorLevel.SILENT, healthMonitorLevel);
} |
@Override
public Object getObject(final int columnIndex) throws SQLException {
return mergeResultSet.getValue(columnIndex, Object.class);
} | @Test
void assertGetObjectWithOffsetDateTime() throws SQLException {
OffsetDateTime result = OffsetDateTime.now();
when(mergeResultSet.getValue(1, Timestamp.class)).thenReturn(result);
assertThat(shardingSphereResultSet.getObject(1, OffsetDateTime.class), is(result));
} |
public static void extractFiles(File archive, File extractTo) throws ExtractionException {
extractFiles(archive, extractTo, null);
} | @Test(expected = org.owasp.dependencycheck.utils.ExtractionException.class)
public void testExtractFiles_File_File() throws Exception {
File destination = getSettings().getTempDirectory();
File archive = BaseTest.getResourceAsFile(this, "evil.zip");
ExtractionUtil.extractFiles(archive, destination);
} |
@Override
public boolean equals(Object other) {
if (other instanceof Cost) {
return equals((RelOptCost) other);
}
return false;
} | @Test
public void testEquals() {
CostFactory factory = CostFactory.INSTANCE;
checkEquals(factory.makeCost(1.0d, 2.0d, 3.0d), factory.makeCost(1.0d, 2.0d, 3.0d), true);
checkEquals(factory.makeCost(1.0d, 2.0d, 3.0d), factory.makeCost(10.0d, 2.0d, 3.0d), false);
checkEquals(factory.makeCost(1.0d, 2.0d, 3.0d), factory.makeCost(1.0d, 10.0d, 3.0d), false);
checkEquals(factory.makeCost(1.0d, 2.0d, 3.0d), factory.makeCost(1.0d, Double.POSITIVE_INFINITY, 3.0d), false);
checkEquals(factory.makeCost(1.0d, 2.0d, 3.0d), factory.makeCost(1.0d, 2.0d, 10.0d), false);
checkEquals(factory.makeCost(1.0d, 2.0d, 3.0d), factory.makeCost(1.0d, 2.0d, Double.POSITIVE_INFINITY), false);
} |
public static StepRuntimeState retrieveStepRuntimeState(
Map<String, Object> data, ObjectMapper objectMapper) {
Object runtimeSummary =
data.getOrDefault(Constants.STEP_RUNTIME_SUMMARY_FIELD, Collections.emptyMap());
if (runtimeSummary instanceof StepRuntimeSummary) {
return ((StepRuntimeSummary) runtimeSummary).getRuntimeState();
}
Object state = ((Map<String, Object>) runtimeSummary).getOrDefault(RUNTIME_STATE_FIELD, null);
if (state != null) {
return objectMapper.convertValue(state, StepRuntimeState.class);
}
return new StepRuntimeState();
} | @Test
public void testRetrieveStepRuntimeStateNotExists() {
StepRuntimeState expected = new StepRuntimeState();
Assert.assertEquals(
expected,
StepHelper.retrieveStepRuntimeState(
singletonMap(Constants.STEP_RUNTIME_SUMMARY_FIELD, Collections.emptyMap()), MAPPER));
} |
@SuppressWarnings("deprecation")
public static String toString(final Object obj) {
if (obj == null) {
return "null";
}
//region Convert simple types to String directly
if (obj instanceof CharSequence) {
return "\"" + obj + "\"";
}
if (obj instanceof Character) {
return "'" + obj + "'";
}
if (obj instanceof Date) {
Date date = (Date)obj;
long time = date.getTime();
String dateFormat;
if (date.getHours() == 0 && date.getMinutes() == 0 && date.getSeconds() == 0 && time % 1000 == 0) {
dateFormat = "yyyy-MM-dd";
} else if (time % (60 * 1000) == 0) {
dateFormat = "yyyy-MM-dd HH:mm";
} else if (time % 1000 == 0) {
dateFormat = "yyyy-MM-dd HH:mm:ss";
} else {
dateFormat = "yyyy-MM-dd HH:mm:ss.SSS";
}
return new SimpleDateFormat(dateFormat).format(obj);
}
if (obj instanceof Enum) {
return obj.getClass().getSimpleName() + "." + ((Enum)obj).name();
}
if (obj instanceof Class) {
return ReflectionUtil.classToString((Class<?>)obj);
}
if (obj instanceof Field) {
return ReflectionUtil.fieldToString((Field)obj);
}
if (obj instanceof Method) {
return ReflectionUtil.methodToString((Method)obj);
}
if (obj instanceof Annotation) {
return ReflectionUtil.annotationToString((Annotation)obj);
}
//endregion
//region Convert the Collection and Map
if (obj instanceof Collection) {
return CollectionUtils.toString((Collection<?>)obj);
}
if (obj.getClass().isArray()) {
return ArrayUtils.toString(obj);
}
if (obj instanceof Map) {
return CollectionUtils.toString((Map<?, ?>)obj);
}
//endregion
//the jdk classes
if (obj.getClass().getClassLoader() == null) {
return obj.toString();
}
return CycleDependencyHandler.wrap(obj, o -> {
StringBuilder sb = new StringBuilder(32);
// handle the anonymous class
String classSimpleName;
if (obj.getClass().isAnonymousClass()) {
if (!obj.getClass().getSuperclass().equals(Object.class)) {
classSimpleName = obj.getClass().getSuperclass().getSimpleName();
} else {
classSimpleName = obj.getClass().getInterfaces()[0].getSimpleName();
}
// Connect a '$', different from ordinary class
classSimpleName += "$";
} else {
classSimpleName = obj.getClass().getSimpleName();
}
sb.append(classSimpleName).append("(");
final int initialLength = sb.length();
// Gets all fields, excluding static or synthetic fields
Field[] fields = ReflectionUtil.getAllFields(obj.getClass());
for (Field field : fields) {
field.setAccessible(true);
if (sb.length() > initialLength) {
sb.append(", ");
}
sb.append(field.getName());
sb.append("=");
try {
Object f = field.get(obj);
if (f == obj) {
sb.append("(this ").append(f.getClass().getSimpleName()).append(")");
} else {
sb.append(toString(f));
}
} catch (Exception ignore) {
}
}
sb.append(")");
return sb.toString();
});
} | @Test
void testToStringAndCycleDependency() throws Exception {
//case: String
Assertions.assertEquals("\"aaa\"", StringUtils.toString("aaa"));
//case: CharSequence
Assertions.assertEquals("\"bbb\"", StringUtils.toString(new StringBuilder("bbb")));
//case: Number
Assertions.assertEquals("1", StringUtils.toString(1));
//case: Boolean
Assertions.assertEquals("true", StringUtils.toString(true));
//case: Character
Assertions.assertEquals("'2'", StringUtils.toString('2'));
//case: Charset
Assertions.assertEquals("UTF-8", StringUtils.toString(StandardCharsets.UTF_8));
//case: Thread
try {
Assertions.assertEquals("Thread[main,5,main]", StringUtils.toString(Thread.currentThread()));
} catch (AssertionFailedError e) {
// for java21 and above
Assertions.assertEquals("Thread[#" + Thread.currentThread().getId() + ",main,5,main]", StringUtils.toString(Thread.currentThread()));
}
//case: Date
Date date = new Date(2021 - 1900, 6 - 1, 15);
Assertions.assertEquals("2021-06-15", StringUtils.toString(date));
date.setTime(date.getTime() + 3600000);
Assertions.assertEquals("2021-06-15 01:00", StringUtils.toString(date));
date.setTime(date.getTime() + 60000);
Assertions.assertEquals("2021-06-15 01:01", StringUtils.toString(date));
date.setTime(date.getTime() + 50000);
Assertions.assertEquals("2021-06-15 01:01:50", StringUtils.toString(date));
date.setTime(date.getTime() + 12);
Assertions.assertEquals("2021-06-15 01:01:50.012", StringUtils.toString(date));
//case: Enum
Assertions.assertEquals("ObjectHolder.INSTANCE", StringUtils.toString(ObjectHolder.INSTANCE));
//case: Annotation
TestAnnotation annotation = TestClass.class.getAnnotation(TestAnnotation.class);
Assertions.assertEquals("@" + TestAnnotation.class.getSimpleName() + "(test=true)", StringUtils.toString(annotation));
//case: Class
Class<?> clazz = TestClass.class;
Assertions.assertEquals("Class<" + clazz.getSimpleName() + ">", StringUtils.toString(clazz));
//case: Method
Method method = clazz.getMethod("setObj", TestClass.class);
Assertions.assertEquals("Method<" + clazz.getSimpleName() + ".setObj(" + clazz.getSimpleName() + ")>", StringUtils.toString(method));
//case: Field
Field field = clazz.getDeclaredField("s");
Assertions.assertEquals("Field<" + clazz.getSimpleName() + ".(String s)>", StringUtils.toString(field));
//case: List, and cycle dependency
List<Object> list = new ArrayList<>();
list.add("xxx");
list.add(111);
list.add(list);
Assertions.assertEquals("[\"xxx\", 111, (this ArrayList)]", StringUtils.toString(list));
//case: String Array
String[] strArr = new String[2];
strArr[0] = "11";
strArr[1] = "22";
Assertions.assertEquals("[\"11\", \"22\"]", StringUtils.toString(strArr));
//case: int Array
int[] intArr = new int[2];
intArr[0] = 11;
intArr[1] = 22;
Assertions.assertEquals("[11, 22]", StringUtils.toString(intArr));
//case: Array, and cycle dependency
Object[] array = new Object[3];
array[0] = 1;
array[1] = '2';
array[2] = array;
Assertions.assertEquals("[1, '2', (this Object[])]", StringUtils.toString(array));
//case: Map, and cycle dependency
Map<Object, Object> map = new HashMap<>();
map.put("aaa", 111);
map.put("bbb", true);
map.put("self", map);
Assertions.assertEquals("{\"aaa\"->111, \"bbb\"->true, \"self\"->(this HashMap)}", StringUtils.toString(map));
Assertions.assertFalse(CycleDependencyHandler.isStarting());
//case: Map, and cycle dependency(deep case)
List<Object> list2 = new ArrayList<>();
list2.add(map);
list2.add('c');
map.put("list", list2);
Assertions.assertEquals("{\"aaa\"->111, \"bbb\"->true, \"self\"->(this HashMap), \"list\"->[(ref HashMap), 'c']}", StringUtils.toString(map));
Assertions.assertFalse(CycleDependencyHandler.isStarting());
//case: Object
Assertions.assertEquals("CycleDependency(s=\"a\", obj=null)", StringUtils.toString(CycleDependency.A));
//case: Object, and cycle dependency
CycleDependency obj = new CycleDependency("c");
obj.setObj(obj);
Assertions.assertEquals("CycleDependency(s=\"c\", obj=(this CycleDependency))", StringUtils.toString(obj));
//case: Object
CycleDependency obj2 = new CycleDependency("d");
obj.setObj(obj2);
Assertions.assertEquals("CycleDependency(s=\"c\", obj=CycleDependency(s=\"d\", obj=null))", StringUtils.toString(obj));
//case: Object, and cycle dependency
TestClass a = new TestClass();
a.setObj(a);
Assertions.assertEquals("TestClass(obj=(this TestClass), s=null)", StringUtils.toString(a));
//case: Object, and cycle dependency(deep case)
TestClass b = new TestClass();
TestClass c = new TestClass();
b.setObj(c);
c.setObj(a);
a.setObj(b);
Assertions.assertEquals("TestClass(obj=TestClass(obj=TestClass(obj=(ref TestClass), s=null), s=null), s=null)", StringUtils.toString(a));
//case: anonymous class from an interface
Object anonymousObj = new TestInterface() {
private String a = "aaa";
@Override
public void test() {
}
};
Assertions.assertEquals("TestInterface$(a=\"aaa\")", StringUtils.toString(anonymousObj));
//case: anonymous class from an abstract class
anonymousObj = new TestAbstractClass() {
private String a = "aaa";
@Override
public void test() {
}
};
Assertions.assertEquals("TestAbstractClass$(a=\"aaa\")", StringUtils.toString(anonymousObj));
//final confirm: do not triggered the `toString` and `hashCode` methods
Assertions.assertFalse(TestClass.hashCodeTriggered);
Assertions.assertFalse(TestClass.toStringTriggered);
Assertions.assertFalse(CycleDependency.hashCodeTriggered);
Assertions.assertFalse(CycleDependency.toStringTriggered);
} |
public static SinkConfig validateUpdate(SinkConfig existingConfig, SinkConfig newConfig) {
SinkConfig mergedConfig = clone(existingConfig);
if (!existingConfig.getTenant().equals(newConfig.getTenant())) {
throw new IllegalArgumentException("Tenants differ");
}
if (!existingConfig.getNamespace().equals(newConfig.getNamespace())) {
throw new IllegalArgumentException("Namespaces differ");
}
if (!existingConfig.getName().equals(newConfig.getName())) {
throw new IllegalArgumentException("Sink Names differ");
}
if (!StringUtils.isEmpty(newConfig.getClassName())) {
mergedConfig.setClassName(newConfig.getClassName());
}
if (!StringUtils.isEmpty(newConfig.getSourceSubscriptionName()) && !newConfig.getSourceSubscriptionName()
.equals(existingConfig.getSourceSubscriptionName())) {
throw new IllegalArgumentException("Subscription Name cannot be altered");
}
if (newConfig.getInputSpecs() == null) {
newConfig.setInputSpecs(new HashMap<>());
}
if (mergedConfig.getInputSpecs() == null) {
mergedConfig.setInputSpecs(new HashMap<>());
}
if (!StringUtils.isEmpty(newConfig.getLogTopic())) {
mergedConfig.setLogTopic(newConfig.getLogTopic());
}
if (newConfig.getInputs() != null) {
newConfig.getInputs().forEach((topicName -> {
newConfig.getInputSpecs().putIfAbsent(topicName,
ConsumerConfig.builder().isRegexPattern(false).build());
}));
}
if (newConfig.getTopicsPattern() != null && !newConfig.getTopicsPattern().isEmpty()) {
newConfig.getInputSpecs().put(newConfig.getTopicsPattern(),
ConsumerConfig.builder()
.isRegexPattern(true)
.build());
}
if (newConfig.getTopicToSerdeClassName() != null) {
newConfig.getTopicToSerdeClassName().forEach((topicName, serdeClassName) -> {
newConfig.getInputSpecs().put(topicName,
ConsumerConfig.builder()
.serdeClassName(serdeClassName)
.isRegexPattern(false)
.build());
});
}
if (newConfig.getTopicToSchemaType() != null) {
newConfig.getTopicToSchemaType().forEach((topicName, schemaClassname) -> {
newConfig.getInputSpecs().put(topicName,
ConsumerConfig.builder()
.schemaType(schemaClassname)
.isRegexPattern(false)
.build());
});
}
if (!newConfig.getInputSpecs().isEmpty()) {
SinkConfig finalMergedConfig = mergedConfig;
newConfig.getInputSpecs().forEach((topicName, consumerConfig) -> {
if (!existingConfig.getInputSpecs().containsKey(topicName)) {
throw new IllegalArgumentException("Input Topics cannot be altered");
}
if (consumerConfig.isRegexPattern() != existingConfig.getInputSpecs().get(topicName).isRegexPattern()) {
throw new IllegalArgumentException(
"isRegexPattern for input topic " + topicName + " cannot be altered");
}
finalMergedConfig.getInputSpecs().put(topicName, consumerConfig);
});
}
if (newConfig.getProcessingGuarantees() != null && !newConfig.getProcessingGuarantees()
.equals(existingConfig.getProcessingGuarantees())) {
throw new IllegalArgumentException("Processing Guarantees cannot be altered");
}
if (newConfig.getConfigs() != null) {
mergedConfig.setConfigs(newConfig.getConfigs());
}
if (newConfig.getSecrets() != null) {
mergedConfig.setSecrets(newConfig.getSecrets());
}
if (newConfig.getParallelism() != null) {
mergedConfig.setParallelism(newConfig.getParallelism());
}
if (newConfig.getRetainOrdering() != null && !newConfig.getRetainOrdering()
.equals(existingConfig.getRetainOrdering())) {
throw new IllegalArgumentException("Retain Ordering cannot be altered");
}
if (newConfig.getRetainKeyOrdering() != null && !newConfig.getRetainKeyOrdering()
.equals(existingConfig.getRetainKeyOrdering())) {
throw new IllegalArgumentException("Retain Key Ordering cannot be altered");
}
if (newConfig.getAutoAck() != null && !newConfig.getAutoAck().equals(existingConfig.getAutoAck())) {
throw new IllegalArgumentException("AutoAck cannot be altered");
}
if (newConfig.getResources() != null) {
mergedConfig
.setResources(ResourceConfigUtils.merge(existingConfig.getResources(), newConfig.getResources()));
}
if (newConfig.getTimeoutMs() != null) {
mergedConfig.setTimeoutMs(newConfig.getTimeoutMs());
}
if (newConfig.getCleanupSubscription() != null) {
mergedConfig.setCleanupSubscription(newConfig.getCleanupSubscription());
}
if (!StringUtils.isEmpty(newConfig.getArchive())) {
mergedConfig.setArchive(newConfig.getArchive());
}
if (!StringUtils.isEmpty(newConfig.getRuntimeFlags())) {
mergedConfig.setRuntimeFlags(newConfig.getRuntimeFlags());
}
if (!StringUtils.isEmpty(newConfig.getCustomRuntimeOptions())) {
mergedConfig.setCustomRuntimeOptions(newConfig.getCustomRuntimeOptions());
}
if (newConfig.getTransformFunction() != null) {
mergedConfig.setTransformFunction(newConfig.getTransformFunction());
}
if (newConfig.getTransformFunctionClassName() != null) {
mergedConfig.setTransformFunctionClassName(newConfig.getTransformFunctionClassName());
}
if (newConfig.getTransformFunctionConfig() != null) {
mergedConfig.setTransformFunctionConfig(newConfig.getTransformFunctionConfig());
}
return mergedConfig;
} | @Test
public void testMergeDifferentTransformFunctionConfig() {
SinkConfig sinkConfig = createSinkConfig();
String newFunctionConfig = "{\"new-key\": \"new-value\"}";
SinkConfig newSinkConfig = createUpdatedSinkConfig("transformFunctionConfig", newFunctionConfig);
SinkConfig mergedConfig = SinkConfigUtils.validateUpdate(sinkConfig, newSinkConfig);
assertEquals(
mergedConfig.getTransformFunctionConfig(),
newFunctionConfig
);
mergedConfig.setTransformFunctionConfig(sinkConfig.getTransformFunctionConfig());
assertEquals(
new Gson().toJson(sinkConfig),
new Gson().toJson(mergedConfig)
);
} |
@Override
public ColumnStatisticsObj aggregate(List<ColStatsObjWithSourceInfo> colStatsWithSourceInfo,
List<String> partNames, boolean areAllPartsFound) throws MetaException {
checkStatisticsList(colStatsWithSourceInfo);
ColumnStatisticsObj statsObj = null;
String colType;
String colName;
BooleanColumnStatsData aggregateData = null;
for (ColStatsObjWithSourceInfo csp : colStatsWithSourceInfo) {
ColumnStatisticsObj cso = csp.getColStatsObj();
if (statsObj == null) {
colName = cso.getColName();
colType = cso.getColType();
statsObj = ColumnStatsAggregatorFactory.newColumnStaticsObj(colName, colType,
cso.getStatsData().getSetField());
}
BooleanColumnStatsData newData = cso.getStatsData().getBooleanStats();
if (aggregateData == null) {
aggregateData = newData.deepCopy();
} else {
aggregateData.setNumTrues(aggregateData.getNumTrues() + newData.getNumTrues());
aggregateData.setNumFalses(aggregateData.getNumFalses() + newData.getNumFalses());
aggregateData.setNumNulls(aggregateData.getNumNulls() + newData.getNumNulls());
}
}
ColumnStatisticsData columnStatisticsData = initColumnStatisticsData();
columnStatisticsData.setBooleanStats(aggregateData);
statsObj.setStatsData(columnStatisticsData);
return statsObj;
} | @Test
public void testAggregateSingleStat() throws MetaException {
List<String> partitions = Collections.singletonList("part1");
ColumnStatisticsData data1 = new ColStatsBuilder<>(Boolean.class).numNulls(1).numFalses(2).numTrues(13).build();
List<ColStatsObjWithSourceInfo> statsList =
Collections.singletonList(createStatsWithInfo(data1, TABLE, COL, partitions.get(0)));
BooleanColumnStatsAggregator aggregator = new BooleanColumnStatsAggregator();
ColumnStatisticsObj computedStatsObj = aggregator.aggregate(statsList, partitions, true);
Assert.assertEquals(data1, computedStatsObj.getStatsData());
} |
public String computeIfAbsent(String key, Function<String, String> function) {
String value = cache.get(key);
// value might be empty here!
// empty value from config center will be cached here
if (value == null) {
// lock free, tolerate repeat apply, will return previous value
cache.putIfAbsent(key, function.apply(key));
value = cache.get(key);
}
return value;
} | @Test
void test() {
ConfigurationCache configurationCache = new ConfigurationCache();
String value = configurationCache.computeIfAbsent("k1", k -> "v1");
Assertions.assertEquals(value, "v1");
value = configurationCache.computeIfAbsent("k1", k -> "v2");
Assertions.assertEquals(value, "v1");
} |
private int unconsumedBytes(Http2Stream stream) {
return flowController().unconsumedBytes(stream);
} | @Test
public void errorDuringDeliveryShouldReturnCorrectNumberOfBytes() throws Exception {
final ByteBuf data = dummyData();
final int padding = 10;
final AtomicInteger unprocessed = new AtomicInteger(data.readableBytes() + padding);
doAnswer(new Answer<Integer>() {
@Override
public Integer answer(InvocationOnMock in) throws Throwable {
return unprocessed.get();
}
}).when(localFlow).unconsumedBytes(eq(stream));
doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock in) throws Throwable {
int delta = (Integer) in.getArguments()[1];
int newValue = unprocessed.addAndGet(-delta);
if (newValue < 0) {
throw new RuntimeException("Returned too many bytes");
}
return null;
}
}).when(localFlow).consumeBytes(eq(stream), anyInt());
// When the listener callback is called, process a few bytes and then throw.
doAnswer(new Answer<Integer>() {
@Override
public Integer answer(InvocationOnMock in) throws Throwable {
localFlow.consumeBytes(stream, 4);
throw new RuntimeException("Fake Exception");
}
}).when(listener).onDataRead(eq(ctx), eq(STREAM_ID), any(ByteBuf.class), eq(10), eq(true));
try {
assertThrows(RuntimeException.class, new Executable() {
@Override
public void execute() throws Throwable {
decode().onDataRead(ctx, STREAM_ID, data, padding, true);
}
});
verify(localFlow)
.receiveFlowControlledFrame(eq(stream), eq(data), eq(padding), eq(true));
verify(listener).onDataRead(eq(ctx), eq(STREAM_ID), eq(data), eq(padding), eq(true));
assertEquals(0, localFlow.unconsumedBytes(stream));
} finally {
data.release();
}
} |
public String toJSON() {
final HashMap<String, Object> artifactStoreAsHashMap = new HashMap<>();
artifactStoreAsHashMap.put("id", getId());
artifactStoreAsHashMap.put("storeId", getStoreId());
artifactStoreAsHashMap.put("configuration", this.getConfiguration().getConfigurationAsMap(true));
return new Gson().toJson(artifactStoreAsHashMap);
} | @Test
public void shouldSerializeToJson() {
final PluggableArtifactConfig config = new PluggableArtifactConfig("id1", "Store-ID", create("Foo", false, "Bar"));
final String actual = config.toJSON();
assertThat(actual, is("{\"configuration\":{\"Foo\":\"Bar\"},\"id\":\"id1\",\"storeId\":\"Store-ID\"}"));
} |
@Override
public Predicate createPredicate(Expression source, String expression, Object[] properties) {
return doCreateJsonPathExpression(source, expression, properties, true);
} | @Test
public void testPredicate() {
// Test books.json file
Exchange exchange = new DefaultExchange(context);
exchange.getIn().setBody(new File("src/test/resources/books.json"));
Language lan = context.resolveLanguage("jsonpath");
Predicate pre = lan.createPredicate("$.store.book[?(@.price < 10)]");
boolean cheap = pre.matches(exchange);
assertTrue(cheap, "Should have cheap books");
pre = lan.createPredicate("$.store.book[?(@.price > 30)]");
boolean expensive = pre.matches(exchange);
assertFalse(expensive, "Should not have expensive books");
} |
@Override
public void activateClusterStateVersion(int clusterStateVersion, NodeInfo node, Waiter<ActivateClusterStateVersionRequest> externalWaiter) {
var waiter = new RPCActivateClusterStateVersionWaiter(externalWaiter);
Target connection = getConnection(node);
if ( ! connection.isValid()) {
log.log(Level.FINE, () -> String.format("Connection to '%s' could not be created.", node.getRpcAddress()));
return;
}
var req = new Request(ACTIVATE_CLUSTER_STATE_VERSION_RPC_METHOD_NAME);
req.parameters().add(new Int32Value(clusterStateVersion));
log.log(Level.FINE, () -> String.format("Sending '%s' RPC to %s for state version %d",
req.methodName(), node.getRpcAddress(), clusterStateVersion));
var activationRequest = new RPCActivateClusterStateVersionRequest(node, req, clusterStateVersion);
waiter.setRequest(activationRequest);
connection.invokeAsync(req, Duration.ofSeconds(60), waiter);
node.setClusterStateVersionActivationSent(clusterStateVersion);
} | @Test
void activateClusterStateVersion_sends_version_activation_rpc() {
var f = new Fixture<ActivateClusterStateVersionRequest>();
var cf = ClusterFixture.forFlatCluster(3).bringEntireClusterUp().assignDummyRpcAddresses();
f.communicator.activateClusterStateVersion(12345, cf.cluster().getNodeInfo(Node.ofDistributor(1)), f.mockWaiter);
Request req = f.receivedRequest.get();
assertNotNull(req);
assertEquals(req.methodName(), RPCCommunicator.ACTIVATE_CLUSTER_STATE_VERSION_RPC_METHOD_NAME);
assertTrue(req.parameters().satisfies("i")); // <cluster state version>
assertEquals(req.parameters().get(0).asInt32(), 12345);
} |
public PutMessageResult putHalfMessage(MessageExtBrokerInner messageInner) {
return store.putMessage(parseHalfMessageInner(messageInner));
} | @Test
public void testPutHalfMessage() {
when(messageStore.putMessage(any(MessageExtBrokerInner.class)))
.thenReturn(new PutMessageResult(PutMessageStatus.PUT_OK, new AppendMessageResult(AppendMessageStatus.PUT_OK)));
PutMessageResult result = transactionBridge.putHalfMessage(createMessageBrokerInner());
assertThat(result.getPutMessageStatus()).isEqualTo(PutMessageStatus.PUT_OK);
} |
@CheckForNull
public String getDecoratedSourceAsHtml(@Nullable String sourceLine, @Nullable String highlighting, @Nullable String symbols) {
if (sourceLine == null) {
return null;
}
DecorationDataHolder decorationDataHolder = new DecorationDataHolder();
if (StringUtils.isNotBlank(highlighting)) {
decorationDataHolder.loadSyntaxHighlightingData(highlighting);
}
if (StringUtils.isNotBlank(symbols)) {
decorationDataHolder.loadLineSymbolReferences(symbols);
}
HtmlTextDecorator textDecorator = new HtmlTextDecorator();
List<String> decoratedSource = textDecorator.decorateTextWithHtml(sourceLine, decorationDataHolder, 1, 1);
if (decoratedSource == null) {
return null;
} else {
if (decoratedSource.isEmpty()) {
return "";
} else {
return decoratedSource.get(0);
}
}
} | @Test
public void should_handle_highlighting_too_long() {
String sourceLine = "abc";
String highlighting = "0,5,c";
String symbols = "";
assertThat(sourceDecorator.getDecoratedSourceAsHtml(sourceLine, highlighting, symbols)).isEqualTo("<span class=\"c\">abc</span>");
} |
public static UTypeVar create(String name, UType lowerBound, UType upperBound) {
return new UTypeVar(name, lowerBound, upperBound);
} | @Test
public void serialization() {
UType nullType = UPrimitiveType.create(TypeKind.NULL);
UType charSequenceType = UClassType.create("java.lang.CharSequence", ImmutableList.<UType>of());
SerializableTester.reserializeAndAssert(UTypeVar.create("T", nullType, charSequenceType));
} |
@Override
public void loadConfiguration(NacosLoggingProperties loggingProperties) {
Log4j2NacosLoggingPropertiesHolder.setProperties(loggingProperties);
String location = loggingProperties.getLocation();
loadConfiguration(location);
} | @Test
void testLoadConfiguration() {
LoggerContext loggerContext = (LoggerContext) LogManager.getContext(false);
Configuration contextConfiguration = loggerContext.getConfiguration();
assertEquals(0, contextConfiguration.getLoggers().size());
log4J2NacosLoggingAdapter.loadConfiguration(nacosLoggingProperties);
//then
verify(propertyChangeListener).propertyChange(any());
loggerContext = (LoggerContext) LogManager.getContext(false);
contextConfiguration = loggerContext.getConfiguration();
Map<String, LoggerConfig> nacosClientLoggers = contextConfiguration.getLoggers();
assertEquals(6, nacosClientLoggers.size());
for (Map.Entry<String, LoggerConfig> loggerEntry : nacosClientLoggers.entrySet()) {
String loggerName = loggerEntry.getKey();
assertTrue(loggerName.startsWith(NACOS_LOGGER_PREFIX));
}
} |
public static void insert(
final UnsafeBuffer termBuffer, final int termOffset, final UnsafeBuffer packet, final int length)
{
if (0 == termBuffer.getInt(termOffset))
{
termBuffer.putBytes(termOffset + HEADER_LENGTH, packet, HEADER_LENGTH, length - HEADER_LENGTH);
termBuffer.putLong(termOffset + 24, packet.getLong(24));
termBuffer.putLong(termOffset + 16, packet.getLong(16));
termBuffer.putLong(termOffset + 8, packet.getLong(8));
termBuffer.putLongOrdered(termOffset, packet.getLong(0));
}
} | @Test
void shouldInsertIntoEmptyBuffer()
{
final UnsafeBuffer packet = new UnsafeBuffer(ByteBuffer.allocate(256));
final int termOffset = 0;
final int srcOffset = 0;
final int length = 256;
packet.putInt(srcOffset, length, LITTLE_ENDIAN);
TermRebuilder.insert(termBuffer, termOffset, packet, length);
final InOrder inOrder = inOrder(termBuffer);
inOrder.verify(termBuffer).putBytes(
termOffset + HEADER_LENGTH, packet, srcOffset + HEADER_LENGTH, length - HEADER_LENGTH);
inOrder.verify(termBuffer).putLong(termOffset + 24, packet.getLong(24));
inOrder.verify(termBuffer).putLong(termOffset + 16, packet.getLong(16));
inOrder.verify(termBuffer).putLong(termOffset + 8, packet.getLong(8));
inOrder.verify(termBuffer).putLongOrdered(termOffset, packet.getLong(0));
} |
public Jwt convertJwtRecordToJwt(JwtRecord jwtRecord) {
return new Jwt(
jwtRecord.tokenValue(),
jwtRecord.issuedAt(),
jwtRecord.expiresAt(),
jwtRecord.headers(),
jwtRecord.claims()
);
} | @Test
void givenJwtRecord_whenConvertJwtRecordToJwt_thenCorrectlyConverted() {
// Given
JwtRecord jwtRecord = new JwtRecord(
"tokenValue",
Map.of("alg", "RS256", "typ", "JWT"),
Map.of("sub", "user123", "iss", "issuer", "aud", "audience"),
Instant.now(),
Instant.now().plusSeconds(3600),
"user123",
"issuer",
"audience"
);
// When
Jwt jwt = JwtRecordConverter.convertJwtRecordToJwt(jwtRecord);
// Then
assertThat(jwt).isNotNull();
assertThat(jwt.getTokenValue()).isEqualTo("tokenValue");
assertThat(jwt.getHeaders()).isEqualTo(jwtRecord.headers());
assertThat(jwt.getClaims()).isEqualTo(jwtRecord.claims());
assertThat(jwt.getIssuedAt()).isEqualTo(jwtRecord.issuedAt());
assertThat(jwt.getExpiresAt()).isEqualTo(jwtRecord.expiresAt());
assertThat(jwt.getClaimAsString("sub")).isEqualTo("user123");
assertThat(jwt.getClaimAsString("iss")).isEqualTo("issuer");
assertThat(jwt.getAudience()).contains("audience");
} |
@Override
public String toString() {
StringBuilder sb = new StringBuilder("{");
addField(sb, "\"userUuid\": ", this.userUuid, true);
addField(sb, "\"userLogin\": ", this.userLogin, true);
addField(sb, "\"name\": ", this.name, true);
addField(sb, "\"email\": ", this.email, true);
addField(sb, "\"isActive\": ", Objects.toString(this.isActive, ""), false);
addField(sb, "\"scmAccounts\": ", String.join(",", scmAccounts), true);
addField(sb, "\"externalId\": ", this.externalId, true);
addField(sb, "\"externalLogin\": ", this.externalLogin, true);
addField(sb, "\"externalIdentityProvider\": ", this.externalIdentityProvider, true);
addField(sb, "\"local\": ", Objects.toString(this.local, ""), false);
addField(sb, "\"lastConnectionDate\": ", this.lastConnectionDate == null ?
"" : DateUtils.formatDateTime(this.lastConnectionDate), true);
endString(sb);
return sb.toString();
} | @Test
void toString_givenUserUuidAndUserLogin_returnValidJSON() {
UserNewValue userNewValue = new UserNewValue("userUuid", "userLogin");
String jsonString = userNewValue.toString();
assertValidJSON(jsonString);
} |
@VisibleForTesting
MaestroWorkflow getMaestroWorkflow(String workflowId) {
return withRetryableQuery(
GET_MAESTRO_WORKFLOW,
stmt -> stmt.setString(1, workflowId),
result -> {
if (result.next()) {
return maestroWorkflowFromResult(result);
}
return null;
});
} | @Test
public void testGetMaestroWorkflow() throws Exception {
WorkflowDefinition wfd = loadWorkflow(TEST_WORKFLOW_ID1);
assertEquals(TEST_WORKFLOW_ID1, wfd.getWorkflow().getId());
WorkflowDefinition definition =
workflowDao.addWorkflowDefinition(wfd, wfd.getPropertiesSnapshot().extractProperties());
assertNotNull(wfd.getInternalId());
assertEquals(wfd, definition);
MaestroWorkflow maestroWorkflow = workflowDao.getMaestroWorkflow(wfd.getWorkflow().getId());
assertEquals(1L, maestroWorkflow.getActiveVersionId().longValue());
assertNotNull(maestroWorkflow.getActivateTime());
assertNotNull(maestroWorkflow.getActivatedBy());
assertNotNull(maestroWorkflow.getModifyTime());
assertNull("metadata should be unset", maestroWorkflow.getMetadata());
assertNull("definition should be unset", maestroWorkflow.getDefinition());
assertEquals(
wfd.getPropertiesSnapshot().getOwner(), maestroWorkflow.getPropertiesSnapshot().getOwner());
assertEquals(
wfd.getPropertiesSnapshot().getAccessControl(),
maestroWorkflow.getPropertiesSnapshot().getAccessControl());
assertEquals(
wfd.getPropertiesSnapshot().getRunStrategy(),
maestroWorkflow.getPropertiesSnapshot().getRunStrategy());
assertEquals(
wfd.getPropertiesSnapshot().getStepConcurrency(),
maestroWorkflow.getPropertiesSnapshot().getStepConcurrency());
assertEquals(
wfd.getPropertiesSnapshot().getAlerting(),
maestroWorkflow.getPropertiesSnapshot().getAlerting());
assertEquals(1L, maestroWorkflow.getLatestVersionId().longValue());
} |
public static int findMessage(String expectedMessage) {
int count = 0;
List<Log> logList = DubboAppender.logList;
for (int i = 0; i < logList.size(); i++) {
String logMessage = logList.get(i).getLogMessage();
if (logMessage.contains(expectedMessage)) {
count++;
}
}
return count;
} | @Test
void testFindMessage2() {
Log log = mock(Log.class);
DubboAppender.logList.add(log);
when(log.getLogMessage()).thenReturn("message");
when(log.getLogLevel()).thenReturn(Level.ERROR);
log = mock(Log.class);
DubboAppender.logList.add(log);
when(log.getLogMessage()).thenReturn("message");
when(log.getLogLevel()).thenReturn(Level.INFO);
assertThat(LogUtil.findMessage(Level.ERROR, "message"), equalTo(1));
} |
public void moveBullet(float offset) {
var currentPosition = bullet.getPosition();
bullet.setPosition(currentPosition + offset);
} | @Test
void testMoveBullet() {
controller.moveBullet(1.5f);
assertEquals(1.5f, controller.bullet.getPosition(), 0);
} |
public double[][] test(DataFrame data) {
DataFrame x = formula.x(data);
int n = x.nrow();
int ntrees = trees.length;
double[][] prediction = new double[ntrees][n];
for (int j = 0; j < n; j++) {
Tuple xj = x.get(j);
double base = b;
for (int i = 0; i < ntrees; i++) {
base += shrinkage * trees[i].predict(xj);
prediction[i][j] = base;
}
}
return prediction;
} | @Test
public void testBank32nhQuantile() {
test(Loss.quantile(0.5), "bank32nh", Bank32nh.formula, Bank32nh.data, 0.0909);
} |
public Long combiInsert( RowMetaInterface rowMeta, Object[] row, Long val_key, Long val_crc )
throws KettleDatabaseException {
String debug = "Combination insert";
DatabaseMeta databaseMeta = meta.getDatabaseMeta();
try {
if ( data.prepStatementInsert == null ) { // first time: construct prepared statement
debug = "First: construct prepared statement";
data.insertRowMeta = new RowMeta();
/*
* Construct the SQL statement...
*
* INSERT INTO d_test(keyfield, [crcfield,] keylookup[]) VALUES(val_key, [val_crc], row values with keynrs[]) ;
*/
String sql = "";
sql += "INSERT INTO " + data.schemaTable + ( "( " );
boolean comma = false;
if ( !isAutoIncrement() ) {
// NO AUTOINCREMENT
sql += databaseMeta.quoteField( meta.getTechnicalKeyField() );
data.insertRowMeta.addValueMeta( new ValueMetaInteger( meta.getTechnicalKeyField() ) );
comma = true;
} else if ( databaseMeta.needsPlaceHolder() ) {
sql += "0"; // placeholder on informix! Will be replaced in table by real autoinc value.
data.insertRowMeta.addValueMeta( new ValueMetaInteger( meta.getTechnicalKeyField() ) );
comma = true;
}
if ( meta.useHash() ) {
if ( comma ) {
sql += ", ";
}
sql += databaseMeta.quoteField( meta.getHashField() );
data.insertRowMeta.addValueMeta( new ValueMetaInteger( meta.getHashField() ) );
comma = true;
}
if ( !Utils.isEmpty( meta.getLastUpdateField() ) ) {
if ( comma ) {
sql += ", ";
}
sql += databaseMeta.quoteField( meta.getLastUpdateField() );
data.insertRowMeta
.addValueMeta( new ValueMetaDate( meta.getLastUpdateField() ) );
comma = true;
}
for ( int i = 0; i < meta.getKeyLookup().length; i++ ) {
if ( comma ) {
sql += ", ";
}
sql += databaseMeta.quoteField( meta.getKeyLookup()[ i ] );
data.insertRowMeta.addValueMeta( rowMeta.getValueMeta( data.keynrs[ i ] ) );
comma = true;
}
sql += ") VALUES (";
comma = false;
if ( !isAutoIncrement() ) {
sql += '?';
comma = true;
}
if ( meta.useHash() ) {
if ( comma ) {
sql += ',';
}
sql += '?';
comma = true;
}
if ( !Utils.isEmpty( meta.getLastUpdateField() ) ) {
if ( comma ) {
sql += ',';
}
sql += '?';
comma = true;
}
for ( int i = 0; i < meta.getKeyLookup().length; i++ ) {
if ( comma ) {
sql += ',';
} else {
comma = true;
}
sql += '?';
}
sql += " )";
String sqlStatement = sql;
try {
debug = "First: prepare statement";
if ( isAutoIncrement() && databaseMeta.supportsAutoGeneratedKeys() ) {
logDetailed( "SQL with return keys: " + sqlStatement );
data.prepStatementInsert =
data.db.getConnection().prepareStatement(
databaseMeta.stripCR( sqlStatement ), Statement.RETURN_GENERATED_KEYS );
} else {
logDetailed( "SQL without return keys: " + sqlStatement );
data.prepStatementInsert =
data.db.getConnection().prepareStatement( databaseMeta.stripCR( sqlStatement ) );
}
} catch ( SQLException ex ) {
throw new KettleDatabaseException( "Unable to prepare combi insert statement : "
+ Const.CR + sqlStatement, ex );
} catch ( Exception ex ) {
throw new KettleDatabaseException( "Unable to prepare combi insert statement : "
+ Const.CR + sqlStatement, ex );
}
}
debug = "Create new insert row rins";
Object[] insertRow = new Object[ data.insertRowMeta.size() ];
int insertIndex = 0;
if ( !isAutoIncrement() ) {
insertRow[ insertIndex ] = val_key;
insertIndex++;
}
if ( meta.useHash() ) {
insertRow[ insertIndex ] = val_crc;
insertIndex++;
}
if ( !Utils.isEmpty( meta.getLastUpdateField() ) ) {
insertRow[ insertIndex ] = new Date();
insertIndex++;
}
for ( int i = 0; i < data.keynrs.length; i++ ) {
insertRow[ insertIndex ] = row[ data.keynrs[ i ] ];
insertIndex++;
}
if ( isRowLevel() ) {
logRowlevel( "rins=" + data.insertRowMeta.getString( insertRow ) );
}
debug = "Set values on insert";
// INSERT NEW VALUE!
data.db.setValues( data.insertRowMeta, insertRow, data.prepStatementInsert );
debug = "Insert row";
data.db.insertRow( data.prepStatementInsert );
debug = "Retrieve key";
if ( isAutoIncrement() && databaseMeta.supportsAutoGeneratedKeys() ) {
ResultSet keys = null;
try {
keys = data.prepStatementInsert.getGeneratedKeys(); // 1 key
if ( keys.next() ) {
val_key = new Long( keys.getLong( 1 ) );
} else {
throw new KettleDatabaseException( "Unable to retrieve auto-increment of combi insert key : "
+ meta.getTechnicalKeyField() + ", no fields in resultset" );
}
} catch ( SQLException ex ) {
throw new KettleDatabaseException( "Unable to retrieve auto-increment of combi insert key : "
+ meta.getTechnicalKeyField(), ex );
} finally {
try {
if ( keys != null ) {
keys.close();
}
} catch ( SQLException ex ) {
throw new KettleDatabaseException( "Unable to retrieve auto-increment of combi insert key : "
+ meta.getTechnicalKeyField(), ex );
}
}
}
} catch ( Exception e ) {
logError( Const.getStackTracker( e ) );
throw new KettleDatabaseException( "Unexpected error in combination insert in part ["
+ debug + "] : " + e.toString(), e );
}
return val_key;
} | @Test
public void testCombiInsert() throws Exception {
combinationLookup.combiInsert( any( RowMetaInterface.class ), any( Object[].class ), anyLong(), anyLong() );
verify( databaseMeta, times( 2 ) ).supportsAutoGeneratedKeys();
} |
@Override
public JType apply(String nodeName, JsonNode node, JsonNode parent, JClassContainer jClassContainer, Schema schema) {
String propertyTypeName = getTypeName(node);
JType type;
if (propertyTypeName.equals("object") || node.has("properties") && node.path("properties").size() > 0) {
type = ruleFactory.getObjectRule().apply(nodeName, node, parent, jClassContainer.getPackage(), schema);
} else if (node.has("existingJavaType")) {
String typeName = node.path("existingJavaType").asText();
if (isPrimitive(typeName, jClassContainer.owner())) {
type = primitiveType(typeName, jClassContainer.owner());
} else {
type = resolveType(jClassContainer, typeName);
}
} else if (propertyTypeName.equals("string")) {
type = jClassContainer.owner().ref(String.class);
} else if (propertyTypeName.equals("number")) {
type = getNumberType(jClassContainer.owner(), ruleFactory.getGenerationConfig());
} else if (propertyTypeName.equals("integer")) {
type = getIntegerType(jClassContainer.owner(), node, ruleFactory.getGenerationConfig());
} else if (propertyTypeName.equals("boolean")) {
type = unboxIfNecessary(jClassContainer.owner().ref(Boolean.class), ruleFactory.getGenerationConfig());
} else if (propertyTypeName.equals("array")) {
type = ruleFactory.getArrayRule().apply(nodeName, node, parent, jClassContainer.getPackage(), schema);
} else {
type = jClassContainer.owner().ref(Object.class);
}
if (!node.has("javaType") && !node.has("existingJavaType") && node.has("format")) {
type = ruleFactory.getFormatRule().apply(nodeName, node.get("format"), node, type, schema);
} else if (!node.has("javaType") && !node.has("existingJavaType") && propertyTypeName.equals("string") && node.has("media")) {
type = ruleFactory.getMediaRule().apply(nodeName, node.get("media"), node, type, schema);
}
return type;
} | @Test
public void applyGeneratesNumberUsingJavaTypeFloatPrimitive() {
JPackage jpackage = new JCodeModel()._package(getClass().getPackage().getName());
ObjectNode objectNode = new ObjectMapper().createObjectNode();
objectNode.put("type", "number");
objectNode.put("existingJavaType", "float");
when(config.isUsePrimitives()).thenReturn(false);
JType result = rule.apply("fooBar", objectNode, null, jpackage, null);
assertThat(result.fullName(), is("float"));
} |
public static Map<String, Object> convertValues(final Map<String, Object> data, final ConfigurationRequest configurationRequest) throws ValidationException {
final Map<String, Object> configuration = Maps.newHashMapWithExpectedSize(data.size());
final Map<String, Map<String, Object>> configurationFields = configurationRequest.asList();
for (final Map.Entry<String, Object> entry : data.entrySet()) {
final String field = entry.getKey();
final Map<String, Object> fieldDescription = configurationFields.get(field);
if (fieldDescription == null || fieldDescription.isEmpty()) {
throw new ValidationException(field, "Unknown configuration field description for field \"" + field + "\"");
}
final String type = (String) fieldDescription.get("type");
// Decide what to cast to. (string, bool, number)
Object value;
switch (type) {
case "text":
case "dropdown":
value = entry.getValue() == null ? "" : String.valueOf(entry.getValue());
break;
case "number":
try {
value = Integer.parseInt(String.valueOf(entry.getValue()));
} catch (NumberFormatException e) {
// If a numeric field is optional and not provided, use null as value
if ("true".equals(String.valueOf(fieldDescription.get("is_optional")))) {
value = null;
} else {
throw new ValidationException(field, e.getMessage());
}
}
break;
case "boolean":
value = "true".equalsIgnoreCase(String.valueOf(entry.getValue()));
break;
case "list":
final List<?> valueList = entry.getValue() == null ? Collections.emptyList() : (List<?>) entry.getValue();
value = valueList.stream()
.filter(o -> o != null && o instanceof String)
.map(String::valueOf)
.collect(Collectors.toList());
break;
default:
throw new ValidationException(field, "Unknown configuration field type \"" + type + "\"");
}
configuration.put(field, value);
}
return configuration;
} | @Test
public void testConvertValues() throws Exception {
final ImmutableMap<String, String> dropdownChoices = ImmutableMap.of(
"a", "1",
"b", "2");
final ConfigurationRequest cr = new ConfigurationRequest();
cr.addField(new TextField("string", "string", "default", ""));
cr.addField(new TextField("empty-string", "empty", "", ""));
cr.addField(new TextField("null-string", "null", null, ""));
cr.addField(new TextField("non-string", "non-string", null, ""));
cr.addField(new NumberField("number", "number", 42, ""));
cr.addField(new BooleanField("boolean-true", "true", false, ""));
cr.addField(new BooleanField("boolean-false", "false", false, ""));
cr.addField(new DropdownField("dropdown", "dropdown", "a", dropdownChoices, "", ConfigurationField.Optional.NOT_OPTIONAL));
cr.addField(new DropdownField("dropdown-empty", "dropdown-empty", "", dropdownChoices, "", ConfigurationField.Optional.NOT_OPTIONAL));
cr.addField(new DropdownField("dropdown-null", "dropdown-null", "", dropdownChoices, "", ConfigurationField.Optional.NOT_OPTIONAL));
final UUID uuid = UUID.randomUUID();
final Map<String, Object> data = new HashMap<>();
data.put("string", "foo");
data.put("empty-string", "");
data.put("null-string", null);
data.put("non-string", uuid);
data.put("number", "5");
data.put("boolean-true", "true");
data.put("boolean-false", "false");
data.put("dropdown", "a");
data.put("dropdown-empty", "");
data.put("dropdown-null", null);
final Map<String, Object> config = ConfigurationMapConverter.convertValues(data, cr);
assertThat(config).contains(
entry("string", "foo"),
entry("empty-string", ""),
entry("null-string", ""),
entry("non-string", uuid.toString()),
entry("number", 5),
entry("boolean-true", true),
entry("boolean-false", false),
entry("dropdown", "a"),
entry("dropdown-empty", ""),
entry("dropdown-null", "")
);
} |
public static Criterion matchOduSignalId(OduSignalId oduSignalId) {
return new OduSignalIdCriterion(oduSignalId);
} | @Test
public void testMatchOduSignalIdMethod() {
OduSignalId odu = oduSignalId(1, 80, new byte[]{2, 1, 1, 3, 1, 1, 3, 1, 1, 3});
Criterion matchoduSignalId = Criteria.matchOduSignalId(odu);
OduSignalIdCriterion oduSignalIdCriterion =
checkAndConvert(matchoduSignalId,
Criterion.Type.ODU_SIGID,
OduSignalIdCriterion.class);
assertThat(oduSignalIdCriterion.oduSignalId(), is(equalTo(odu)));
} |
@Override
public boolean find(Path file, final ListProgressListener listener) throws BackgroundException {
if(file.isRoot()) {
return true;
}
try {
try {
final boolean found;
if(containerService.isContainer(file)) {
final CloudBlobContainer container = session.getClient().getContainerReference(containerService.getContainer(file).getName());
return container.exists(null, null, context);
}
if(file.isFile() || file.isPlaceholder()) {
try {
final CloudBlob blob = session.getClient().getContainerReference(containerService.getContainer(file).getName())
.getBlobReferenceFromServer(containerService.getKey(file));
return blob.exists(null, null, context);
}
catch(StorageException e) {
switch(e.getHttpStatusCode()) {
case HttpStatus.SC_NOT_FOUND:
if(file.isPlaceholder()) {
// Ignore failure and look for common prefix
break;
}
default:
throw e;
}
}
}
if(log.isDebugEnabled()) {
log.debug(String.format("Search for common prefix %s", file));
}
// Check for common prefix
try {
new AzureObjectListService(session, context).list(file, new CancellingListProgressListener());
return true;
}
catch(ListCanceledException l) {
// Found common prefix
return true;
}
}
catch(StorageException e) {
throw new AzureExceptionMappingService().map("Failure to read attributes of {0}", e, file);
}
catch(URISyntaxException e) {
return false;
}
}
catch(NotfoundException e) {
return false;
}
} | @Test
public void testFindNotFound() throws Exception {
assertFalse(new AzureFindFeature(session, null).find(new Path(new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file))));
} |
@Override
public TapiNodeRef getNodeRef(TapiNodeRef nodeRef) throws NoSuchElementException {
updateCache();
TapiNodeRef ret = null;
try {
ret = tapiNodeRefList.stream()
.filter(nodeRef::equals)
.findFirst().get();
} catch (NoSuchElementException e) {
log.error("Node not found of {}", nodeRef);
throw e;
}
return ret;
} | @Test(expected = NoSuchElementException.class)
public void testGetNodeRefWhenEmpty() {
tapiResolver.getNodeRef(deviceId);
} |
public static boolean isQuorumCandidate(final ClusterMember[] clusterMembers, final ClusterMember candidate)
{
int possibleVotes = 0;
for (final ClusterMember member : clusterMembers)
{
if (NULL_POSITION == member.logPosition || compareLog(candidate, member) < 0)
{
continue;
}
++possibleVotes;
}
return possibleVotes >= ClusterMember.quorumThreshold(clusterMembers.length);
} | @Test
void isQuorumCandidateReturnFalseWhenQuorumIsNotReached()
{
final ClusterMember candidate = newMember(2, 10, 800);
final ClusterMember[] members = new ClusterMember[]
{
newMember(10, 2, 100),
newMember(20, 18, 6),
newMember(30, 10, 800),
newMember(40, 19, 800),
newMember(50, 10, 1000),
};
assertFalse(isQuorumCandidate(members, candidate));
} |
public static <@NonNull E> CompletableSource resolveScopeFromLifecycle(
final LifecycleScopeProvider<E> provider) throws OutsideScopeException {
return resolveScopeFromLifecycle(provider, true);
} | @Test
public void lifecycleCheckEnd_shouldFailIfEndedWithHandler() {
TestLifecycleScopeProvider lifecycle = TestLifecycleScopeProvider.createInitial(STOPPED);
AutoDisposePlugins.setOutsideScopeHandler(
e -> {
// Swallow the exception.
});
testSource(resolveScopeFromLifecycle(lifecycle, true)).assertComplete();
} |
@VisibleForTesting
protected static String getInsertStatement(
ObjectIdentifier materializedTableIdentifier,
String definitionQuery,
Map<String, String> dynamicOptions) {
return String.format(
"INSERT INTO %s\n%s",
generateTableWithDynamicOptions(materializedTableIdentifier, dynamicOptions),
definitionQuery);
} | @Test
void testGenerateInsertStatementWithDynamicOptions() {
ObjectIdentifier materializedTableIdentifier =
ObjectIdentifier.of("catalog", "database", "table");
String definitionQuery = "SELECT * FROM source_table";
Map<String, String> dynamicOptions = new HashMap<>();
dynamicOptions.put("option1", "value1");
dynamicOptions.put("option2", "value2");
String expectedStatement =
"INSERT INTO `catalog`.`database`.`table` "
+ "/*+ OPTIONS('option1'='value1', 'option2'='value2') */\n"
+ "SELECT * FROM source_table";
String actualStatement =
MaterializedTableManager.getInsertStatement(
materializedTableIdentifier, definitionQuery, dynamicOptions);
assertThat(actualStatement).isEqualTo(expectedStatement);
} |
public void asyncSend(String destination, Message<?> message, SendCallback sendCallback, long timeout,
int delayLevel) {
if (Objects.isNull(message) || Objects.isNull(message.getPayload())) {
log.error("asyncSend failed. destination:{}, message is null ", destination);
throw new IllegalArgumentException("`message` and `message.payload` cannot be null");
}
try {
org.apache.rocketmq.common.message.Message rocketMsg = this.createRocketMqMessage(destination, message);
if (delayLevel > 0) {
rocketMsg.setDelayTimeLevel(delayLevel);
}
producer.send(rocketMsg, sendCallback, timeout);
} catch (Exception e) {
log.info("asyncSend failed. destination:{}, message:{} ", destination, message);
throw new MessagingException(e.getMessage(), e);
}
} | @Test
public void testAsyncBatchSendMessage() {
List<Message> messages = new ArrayList<>();
for (int i = 0; i < 3; i++) {
messages.add(MessageBuilder.withPayload("payload" + i).build());
}
try {
rocketMQTemplate.asyncSend(topic, messages, new SendCallback() {
@Override public void onSuccess(SendResult sendResult) {
}
@Override public void onException(Throwable e) {
}
});
} catch (MessagingException e) {
assertThat(e).hasMessageContaining("org.apache.rocketmq.remoting.exception.RemotingConnectException: connect to [127.0.0.1:9876] failed");
}
} |
@Override
public Object unmarshal(Exchange exchange, InputStream stream) throws Exception {
return builder.build(stream);
} | @Test
public void testUnmarshal() throws Exception {
InputStream stream = IOConverter.toInputStream(new File("src/test/resources/data.ics"));
MockEndpoint endpoint = getMockEndpoint("mock:result");
endpoint.expectedBodiesReceived(createTestCalendar());
template.sendBody("direct:unmarshal", stream);
endpoint.assertIsSatisfied();
} |
void start() throws TransientKinesisException {
ImmutableMap.Builder<String, ShardRecordsIterator> shardsMap = ImmutableMap.builder();
for (ShardCheckpoint checkpoint : initialCheckpoint) {
shardsMap.put(checkpoint.getShardId(), createShardIterator(kinesis, checkpoint));
}
shardIteratorsMap.set(shardsMap.build());
if (!shardIteratorsMap.get().isEmpty()) {
int capacityPerShard =
read.getMaxCapacityPerShard() != null
? read.getMaxCapacityPerShard()
: DEFAULT_CAPACITY_PER_SHARD;
recordsQueue = new ArrayBlockingQueue<>(capacityPerShard * shardIteratorsMap.get().size());
String streamName = initialCheckpoint.getStreamName();
startReadingShards(shardIteratorsMap.get().values(), streamName);
} else {
// There are no shards to handle when restoring from an empty checkpoint. Empty checkpoints
// are generated when the last shard handled by this pool was closed
recordsQueue = new ArrayBlockingQueue<>(1);
}
} | @Test
public void shouldStartReadingSuccessiveShardsAfterReceivingShardClosedException()
throws Exception {
when(firstIterator.readNextBatch()).thenThrow(KinesisShardClosedException.class);
when(firstIterator.findSuccessiveShardRecordIterators())
.thenReturn(ImmutableList.of(thirdIterator, fourthIterator));
shardReadersPool.start();
verify(thirdIterator, timeout(TIMEOUT_IN_MILLIS).atLeast(2)).readNextBatch();
verify(fourthIterator, timeout(TIMEOUT_IN_MILLIS).atLeast(2)).readNextBatch();
} |
@Override
public ArchivedExecutionGraph getArchivedExecutionGraph(
JobStatus jobStatus, @Nullable Throwable cause) {
return ArchivedExecutionGraph.createSparseArchivedExecutionGraphWithJobVertices(
jobInformation.getJobID(),
jobInformation.getName(),
jobStatus,
jobGraph.getJobType(),
cause,
jobInformation.getCheckpointingSettings(),
initializationTimestamp,
jobGraph.getVertices(),
initialParallelismStore);
} | @Test
void testArchivedJobVerticesPresent() throws Exception {
final JobGraph jobGraph = createJobGraph();
jobGraph.setSnapshotSettings(
new JobCheckpointingSettings(
CheckpointCoordinatorConfiguration.builder().build(), null));
final ArchivedExecutionGraph archivedExecutionGraph =
new AdaptiveSchedulerBuilder(
jobGraph, mainThreadExecutor, EXECUTOR_RESOURCE.getExecutor())
.build()
.getArchivedExecutionGraph(JobStatus.INITIALIZING, null);
ArchivedExecutionJobVertex jobVertex =
archivedExecutionGraph.getJobVertex(JOB_VERTEX.getID());
assertThat(jobVertex)
.isNotNull()
.satisfies(
archived -> {
assertThat(archived.getParallelism())
.isEqualTo(JOB_VERTEX.getParallelism());
// JOB_VERTEX.maxP == -1, but we want the actual maxP determined by the
// scheduler
assertThat(archived.getMaxParallelism()).isEqualTo(128);
});
ArchivedExecutionGraphTest.assertContainsCheckpointSettings(archivedExecutionGraph);
} |
public final Map<DecodeHintType, Object> getReaderHintMap() {
return readerHintMap;
} | @Test
final void testGetReaderHintMap() throws IOException {
try (BarcodeDataFormat instance = new BarcodeDataFormat()) {
Map<DecodeHintType, Object> result = instance.getReaderHintMap();
assertNotNull(result);
}
} |
public boolean createDataConnection(DataConnectionCatalogEntry dl, boolean replace, boolean ifNotExists) {
if (replace) {
dataConnectionStorage.put(dl.name(), dl);
listeners.forEach(TableListener::onTableChanged);
return true;
} else {
boolean added = dataConnectionStorage.putIfAbsent(dl.name(), dl);
if (!added && !ifNotExists) {
throw QueryException.error("Data connection already exists: " + dl.name());
}
if (!added) {
// report only updates to listener
listeners.forEach(TableListener::onTableChanged);
}
return added;
}
} | @Test
public void when_createsDuplicateDataConnectionIfReplace_then_succeeds() {
// given
DataConnectionCatalogEntry dataConnectionCatalogEntry = dataConnection();
// when
dataConnectionResolver.createDataConnection(dataConnectionCatalogEntry, true, false);
// then
verify(relationsStorage).put(eq(dataConnectionCatalogEntry.name()), isA(DataConnectionCatalogEntry.class));
} |
@Override
public int ncol() {
return df.ncol();
} | @Test
public void testNcols() {
System.out.println("ncol");
assertEquals(5, df.ncol());
} |
@Override
public DataSourceProvenance getProvenance() {
return new AggregateDataSourceProvenance(this);
} | @Test
public void testACDSIterationOrder() {
MockOutputFactory factory = new MockOutputFactory();
String[] featureNames = new String[] {"X1","X2"};
double[] featureValues = new double[] {1.0, 2.0};
List<Example<MockOutput>> first = new ArrayList<>();
first.add(new ArrayExample<>(new MockOutput("A"),featureNames,featureValues));
first.add(new ArrayExample<>(new MockOutput("B"),featureNames,featureValues));
first.add(new ArrayExample<>(new MockOutput("C"),featureNames,featureValues));
first.add(new ArrayExample<>(new MockOutput("D"),featureNames,featureValues));
first.add(new ArrayExample<>(new MockOutput("E"),featureNames,featureValues));
MockListConfigurableDataSource<MockOutput> firstSource = new MockListConfigurableDataSource<>(first,factory,new SimpleDataSourceProvenance("First",factory));
List<Example<MockOutput>> second = new ArrayList<>();
second.add(new ArrayExample<>(new MockOutput("F"),featureNames,featureValues));
second.add(new ArrayExample<>(new MockOutput("G"),featureNames,featureValues));
MockListConfigurableDataSource<MockOutput> secondSource = new MockListConfigurableDataSource<>(second,factory,new SimpleDataSourceProvenance("Second",factory));
List<Example<MockOutput>> third = new ArrayList<>();
third.add(new ArrayExample<>(new MockOutput("H"),featureNames,featureValues));
third.add(new ArrayExample<>(new MockOutput("I"),featureNames,featureValues));
third.add(new ArrayExample<>(new MockOutput("J"),featureNames,featureValues));
third.add(new ArrayExample<>(new MockOutput("K"),featureNames,featureValues));
MockListConfigurableDataSource<MockOutput> thirdSource = new MockListConfigurableDataSource<>(third,factory,new SimpleDataSourceProvenance("Third",factory));
List<ConfigurableDataSource<MockOutput>> sources = new ArrayList<>();
sources.add(firstSource);
sources.add(secondSource);
sources.add(thirdSource);
AggregateConfigurableDataSource<MockOutput> acdsSeq = new AggregateConfigurableDataSource<>(sources, AggregateDataSource.IterationOrder.SEQUENTIAL);
String[] expectedSeq = new String[] {"A","B","C","D","E","F","G","H","I","J","K"};
String[] actualSeq = StreamSupport.stream(acdsSeq.spliterator(), false).map(Example::getOutput).map(MockOutput::toString).toArray(String[]::new);
Assertions.assertArrayEquals(expectedSeq,actualSeq);
Helpers.testProvenanceMarshalling(acdsSeq.getProvenance());
AggregateConfigurableDataSource<MockOutput> acdsRR = new AggregateConfigurableDataSource<>(sources, AggregateDataSource.IterationOrder.ROUNDROBIN);
String[] expectedRR = new String[] {"A","F","H","B","G","I","C","J","D","K","E"};
String[] actualRR = StreamSupport.stream(acdsRR.spliterator(), false).map(Example::getOutput).map(MockOutput::toString).toArray(String[]::new);
Assertions.assertArrayEquals(expectedRR,actualRR);
Helpers.testProvenanceMarshalling(acdsRR.getProvenance());
} |
@Override
public boolean isValid() {
// Validate type/devices
type();
devices();
return super.isValid()
&& hasOnlyFields(ALLOWED, NAME, LATITUDE, LONGITUDE, UI_TYPE,
RACK_ADDRESS, OWNER, TYPE, DEVICES, LOC_IN_PEERS);
} | @Test
public void sampleValidConfig() {
ObjectNode node = new TmpJson()
.props(NAME, TYPE)
.arrays(DEVICES)
.node();
cfg = new BasicRegionConfig();
cfg.init(regionId(R1), BASIC, node, mapper, delegate);
assertTrue("not valid: " + cfg, cfg.isValid());
} |
@Override
@MethodNotAvailable
public void removeAll() {
throw new MethodNotAvailableException();
} | @Test(expected = MethodNotAvailableException.class)
public void testRemoveAll() {
adapter.removeAll();
} |
public synchronized boolean hasStartOfBlock() {
return startOfBlockIndex >= 0;
} | @Test
public void testHasStartOfBlock() {
assertFalse(instance.hasStartOfBlock(), "Unexpected initial value");
instance.write(MllpProtocolConstants.START_OF_BLOCK);
assertTrue(instance.hasStartOfBlock());
instance.reset();
assertFalse(instance.hasStartOfBlock());
instance.write(TEST_HL7_MESSAGE.getBytes());
assertFalse(instance.hasStartOfBlock());
instance.write(MllpProtocolConstants.START_OF_BLOCK);
assertTrue(instance.hasStartOfBlock());
instance.write(TEST_HL7_MESSAGE.getBytes());
assertTrue(instance.hasStartOfBlock());
} |
protected boolean isFactMappingValueToSkip(FactMappingValue factMappingValue) {
return factMappingValue.getRawValue() == null;
} | @Test
public void isFactMappingValueToSkip() {
FactIdentifier factIdentifier = FactIdentifier.create("MyInstance", String.class.getCanonicalName());
ExpressionIdentifier expressionIdentifier = ExpressionIdentifier.create("MyProperty", FactMappingType.GIVEN);
FactMappingValue factMappingValueWithValidValue = new FactMappingValue(factIdentifier, expressionIdentifier, VALUE);
assertThat(abstractRunnerHelper.isFactMappingValueToSkip(factMappingValueWithValidValue)).isFalse();
FactMappingValue factMappingValueWithoutValue = new FactMappingValue(factIdentifier, expressionIdentifier, null);
assertThat(abstractRunnerHelper.isFactMappingValueToSkip(factMappingValueWithoutValue)).isTrue();
} |
public Ticket add(long delay, TimerHandler handler, Object... args)
{
if (handler == null) {
return null;
}
Utils.checkArgument(delay > 0, "Delay of a ticket has to be strictly greater than 0");
final Ticket ticket = new Ticket(this, now(), delay, handler, args);
insert(ticket);
return ticket;
} | @Test
public void testAddFaultyHandler()
{
ZTicket.Ticket ticket = tickets.add(10, null);
assertThat(ticket, nullValue());
} |
public static String[] getTableNames(String statement) {
return splitTableNames(statement);
} | @Test
void getTableNames() {
String sql = "print VersionT";
assertArrayEquals(new String[] {"VersionT"}, PrintStatementExplainer.getTableNames(sql));
sql = "print VersionT, Buyers, r, rr, vvv";
assertArrayEquals(
new String[] {"VersionT", "Buyers", "r", "rr", "vvv"}, PrintStatementExplainer.getTableNames(sql));
} |
@Override
public boolean start() throws IOException {
LOG.info("Starting reader using {}", initCheckpoint);
try {
shardReadersPool = createShardReadersPool();
shardReadersPool.start();
} catch (TransientKinesisException e) {
throw new IOException(e);
}
return advance();
} | @Test
public void startReturnsFalseIfNoDataAtTheBeginning() throws IOException {
assertThat(reader.start()).isFalse();
} |
@Override
public <R> R eval(Mode mode, String luaScript, ReturnType returnType) {
return eval(mode, luaScript, returnType, Collections.emptyList());
} | @Test
public void testEvalResultMapping() {
testInCluster(redissonClient -> {
RScript script = redissonClient.getScript(StringCodec.INSTANCE);
Long res = script.eval(RScript.Mode.READ_ONLY, "return 1;", RScript.ReturnType.INTEGER,
integers -> integers.stream().mapToLong(r -> r).sum());
assertThat(res).isEqualTo(3);
});
} |
@InvokeOnHeader(Web3jConstants.SHH_NEW_IDENTITY)
void shhNewIdentity(Message message) throws IOException {
Request<?, ShhNewIdentity> request = web3j.shhNewIdentity();
setRequestId(message, request);
ShhNewIdentity response = request.send();
boolean hasError = checkForError(message, response);
if (!hasError) {
message.setBody(response.getAddress());
}
} | @Test
public void shhNewIdentityTest() throws Exception {
ShhNewIdentity response = Mockito.mock(ShhNewIdentity.class);
Mockito.when(mockWeb3j.shhNewIdentity()).thenReturn(request);
Mockito.when(request.send()).thenReturn(response);
Mockito.when(response.getAddress()).thenReturn("test");
Exchange exchange = createExchangeWithBodyAndHeader(null, OPERATION, Web3jConstants.SHH_NEW_IDENTITY);
template.send(exchange);
String body = exchange.getIn().getBody(String.class);
assertEquals("test", body);
} |
public static boolean isMatch(URL consumerUrl, URL providerUrl) {
String consumerInterface = consumerUrl.getServiceInterface();
String providerInterface = providerUrl.getServiceInterface();
// FIXME accept providerUrl with '*' as interface name, after carefully thought about all possible scenarios I
// think it's ok to add this condition.
// Return false if the consumer interface is not equals the provider interface,
// except one of the interface configurations is equals '*' (i.e. any value).
if (!(ANY_VALUE.equals(consumerInterface)
|| ANY_VALUE.equals(providerInterface)
|| StringUtils.isEquals(consumerInterface, providerInterface))) {
return false;
}
// If the category of provider URL does not match the category of consumer URL.
// Usually, the provider URL's category is empty, and the default category ('providers') is present.
// Hence, the category of the provider URL is 'providers'.
// Through observing of debugging process, I found that the category of the consumer URL is
// 'providers,configurators,routers'.
if (!isMatchCategory(providerUrl.getCategory(DEFAULT_CATEGORY), consumerUrl.getCategory(DEFAULT_CATEGORY))) {
return false;
}
// If the provider is not enabled, return false.
if (!providerUrl.getParameter(ENABLED_KEY, true) && !ANY_VALUE.equals(consumerUrl.getParameter(ENABLED_KEY))) {
return false;
}
// Obtain consumer's group, version and classifier.
String consumerGroup = consumerUrl.getGroup();
String consumerVersion = consumerUrl.getVersion();
String consumerClassifier = consumerUrl.getParameter(CLASSIFIER_KEY, ANY_VALUE);
// Obtain provider's group, version and classifier.
String providerGroup = providerUrl.getGroup();
String providerVersion = providerUrl.getVersion();
String providerClassifier = providerUrl.getParameter(CLASSIFIER_KEY, ANY_VALUE);
// If Group, Version, Classifier all matches, return true.
boolean groupMatches = ANY_VALUE.equals(consumerGroup)
|| StringUtils.isEquals(consumerGroup, providerGroup)
|| StringUtils.isContains(consumerGroup, providerGroup);
boolean versionMatches =
ANY_VALUE.equals(consumerVersion) || StringUtils.isEquals(consumerVersion, providerVersion);
boolean classifierMatches = consumerClassifier == null
|| ANY_VALUE.equals(consumerClassifier)
|| StringUtils.isEquals(consumerClassifier, providerClassifier);
return groupMatches && versionMatches && classifierMatches;
} | @Test
void testIsMatch() {
URL consumerUrl = URL.valueOf("dubbo://127.0.0.1:20880/com.xxx.XxxService?version=1.0.0&group=test");
URL providerUrl = URL.valueOf("http://127.0.0.1:8080/com.xxx.XxxService?version=1.0.0&group=test");
assertTrue(UrlUtils.isMatch(consumerUrl, providerUrl));
} |
Object getCellValue(Cell cell, Schema.FieldType type) {
ByteString cellValue = cell.getValue();
int valueSize = cellValue.size();
switch (type.getTypeName()) {
case BOOLEAN:
checkArgument(valueSize == 1, message("Boolean", 1));
return cellValue.toByteArray()[0] != 0;
case BYTE:
checkArgument(valueSize == 1, message("Byte", 1));
return cellValue.toByteArray()[0];
case INT16:
checkArgument(valueSize == 2, message("Int16", 2));
return Shorts.fromByteArray(cellValue.toByteArray());
case INT32:
checkArgument(valueSize == 4, message("Int32", 4));
return Ints.fromByteArray(cellValue.toByteArray());
case INT64:
checkArgument(valueSize == 8, message("Int64", 8));
return Longs.fromByteArray(cellValue.toByteArray());
case FLOAT:
checkArgument(valueSize == 4, message("Float", 4));
return Float.intBitsToFloat(Ints.fromByteArray(cellValue.toByteArray()));
case DOUBLE:
checkArgument(valueSize == 8, message("Double", 8));
return Double.longBitsToDouble(Longs.fromByteArray(cellValue.toByteArray()));
case DATETIME:
return DateTime.parse(cellValue.toStringUtf8());
case STRING:
return cellValue.toStringUtf8();
case BYTES:
return cellValue.toByteArray();
case LOGICAL_TYPE:
String identifier = checkArgumentNotNull(type.getLogicalType()).getIdentifier();
throw new IllegalStateException("Unsupported logical type: " + identifier);
default:
throw new IllegalArgumentException(
String.format("Unsupported cell value type '%s'.", type.getTypeName()));
}
} | @Test
public void shouldFailTooLongByteValue() {
byte[] value = new byte[3];
IllegalArgumentException exception =
assertThrows(IllegalArgumentException.class, () -> PARSER.getCellValue(cell(value), BYTE));
checkMessage(exception.getMessage(), "Byte has to be 1-byte long bytearray");
} |
@Override
public void process(MetricsPacket.Builder builder) {
Set<DimensionId> dimensionsToRetain = builder.getDimensionIds();
dimensionsToRetain.removeAll(blocklistDimensions);
builder.retainDimensions(dimensionsToRetain);
} | @Test
public void public_dimensions_are_retained() {
var builder = new MetricsPacket.Builder(toServiceId("foo"))
.putDimension(toDimensionId(APPLICATION_ID), "app");
var processor = new PublicDimensionsProcessor();
processor.process(builder);
assertEquals(1, builder.getDimensionIds().size());
assertEquals(toDimensionId(APPLICATION_ID), builder.getDimensionIds().iterator().next());
} |
public static Object eval(String expression, Map<String, Object> context) {
return eval(expression, context, ListUtil.empty());
} | @Test
public void qlExpressTest(){
final ExpressionEngine engine = new QLExpressEngine();
final Dict dict = Dict.of()
.set("a", 100.3)
.set("b", 45)
.set("c", -199.100);
final Object eval = engine.eval("a-(b-c)", dict, null);
assertEquals(-143.8, (double)eval, 0);
} |
public static long getEpochValueInSeconds(String epoch) {
final String seconds;
if (epoch.length() >= 13) {
//this is in milliseconds - reduce to seconds
seconds = epoch.substring(0, 10);
} else {
seconds = epoch;
}
long results = 0;
try {
results = Long.parseLong(seconds);
} catch (NumberFormatException ex) {
LOGGER.debug(String.format("Error parsing '%s' property from the database - using zero", epoch), ex);
}
return results;
} | @Test
public void testGetEpochValueInSeconds() throws ParseException {
String milliseconds = "1550538553466";
long expected = 1550538553;
long result = DateUtil.getEpochValueInSeconds(milliseconds);
assertEquals(expected, result);
milliseconds = "blahblahblah";
expected = 0;
result = DateUtil.getEpochValueInSeconds(milliseconds);
assertEquals(expected, result);
milliseconds = "1550538553";
expected = 1550538553;
result = DateUtil.getEpochValueInSeconds(milliseconds);
assertEquals(expected, result);
} |
@Override
public void stopTrackingAndReleaseAllClusterPartitions() {
clusterPartitions.values().stream()
.map(DataSetEntry::getPartitionIds)
.forEach(shuffleEnvironment::releasePartitionsLocally);
clusterPartitions.clear();
} | @Test
void stopTrackingAndReleaseAllClusterPartitions() throws Exception {
final TestingShuffleEnvironment testingShuffleEnvironment = new TestingShuffleEnvironment();
final CompletableFuture<Collection<ResultPartitionID>> shuffleReleaseFuture =
new CompletableFuture<>();
testingShuffleEnvironment.releasePartitionsLocallyFuture = shuffleReleaseFuture;
final ResultPartitionID resultPartitionId1 = new ResultPartitionID();
final ResultPartitionID resultPartitionId2 = new ResultPartitionID();
final TaskExecutorPartitionTracker partitionTracker =
new TaskExecutorPartitionTrackerImpl(testingShuffleEnvironment);
partitionTracker.startTrackingPartition(
new JobID(),
new TaskExecutorPartitionInfo(
new TestingShuffleDescriptor(resultPartitionId1),
new IntermediateDataSetID(),
1));
partitionTracker.startTrackingPartition(
new JobID(),
new TaskExecutorPartitionInfo(
new TestingShuffleDescriptor(resultPartitionId2),
new IntermediateDataSetID(),
1));
partitionTracker.promoteJobPartitions(Collections.singleton(resultPartitionId1));
partitionTracker.stopTrackingAndReleaseAllClusterPartitions();
assertThatFuture(shuffleReleaseFuture)
.eventuallySucceeds()
.satisfies(actual -> assertThat(actual).contains(resultPartitionId1));
} |
public void setResourceBase(String resourceBase) {
handler.setResourceBase(resourceBase);
} | @Test
void setsResourceBase() throws Exception {
environment.setResourceBase("/woo");
assertThat(handler.getResourceBase()).isEqualTo(handler.newResource("/woo").toString());
} |
@Override
public void createNamespace(Namespace namespace, Map<String, String> meta) {
Preconditions.checkArgument(
!namespace.isEmpty(), "Cannot create namespace with invalid name: %s", namespace);
Preconditions.checkArgument(
isValidateNamespace(namespace),
"Cannot support multi part namespace in Hive Metastore: %s",
namespace);
Preconditions.checkArgument(
meta.get(HMS_DB_OWNER_TYPE) == null || meta.get(HMS_DB_OWNER) != null,
"Create namespace setting %s without setting %s is not allowed",
HMS_DB_OWNER_TYPE,
HMS_DB_OWNER);
try {
clients.run(client -> {
client.createDatabase(convertToDatabase(namespace, meta));
return null;
});
LOG.info("Created namespace: {}", namespace);
} catch (AlreadyExistsException e) {
throw new org.apache.iceberg.exceptions.AlreadyExistsException(
e, "Namespace already exists: %s", namespace);
} catch (TException e) {
throw new RuntimeException("Failed to create namespace " + namespace + " in Hive Metastore", e);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new RuntimeException(
"Interrupted in call to createDatabase(name) " + namespace + " in Hive Metastore", e);
}
} | @Test
public void testNamespaceExists() {
Namespace namespace = Namespace.of("dbname_exists");
catalog.createNamespace(namespace, meta);
assertThat(catalog.namespaceExists(namespace)).as("Should true to namespace exist").isTrue();
assertThat(catalog.namespaceExists(Namespace.of("db2", "db2", "ns2")))
.as("Should false to namespace doesn't exist")
.isFalse();
} |
@Override
public synchronized void editSchedule() {
updateConfigIfNeeded();
long startTs = clock.getTime();
CSQueue root = scheduler.getRootQueue();
Resource clusterResources = Resources.clone(scheduler.getClusterResource());
containerBasedPreemptOrKill(root, clusterResources);
if (LOG.isDebugEnabled()) {
LOG.debug("Total time used=" + (clock.getTime() - startTs) + " ms.");
}
} | @Test
public void testObserveOnly() {
int[][] qData = new int[][]{
// / A B C
{ 100, 40, 40, 20 }, // abs
{ 100, 100, 100, 100 }, // maxCap
{ 100, 90, 10, 0 }, // used
{ 80, 10, 20, 50 }, // pending
{ 0, 0, 0, 0 }, // reserved
{ 2, 1, 1, 0 }, // apps
{ -1, 1, 1, 0 }, // req granularity
{ 3, 0, 0, 0 }, // subqueues
};
conf.setBoolean(CapacitySchedulerConfiguration.PREEMPTION_OBSERVE_ONLY,
true);
when(mCS.getConfiguration()).thenReturn(
new CapacitySchedulerConfiguration(conf));
ProportionalCapacityPreemptionPolicy policy = buildPolicy(qData);
policy.editSchedule();
// verify even severe imbalance not affected
verify(mDisp, never()).handle(isA(ContainerPreemptEvent.class));
} |
public synchronized GpuDeviceInformation parseXml(String xmlContent)
throws YarnException {
InputSource inputSource = new InputSource(new StringReader(xmlContent));
SAXSource source = new SAXSource(xmlReader, inputSource);
try {
return (GpuDeviceInformation) unmarshaller.unmarshal(source);
} catch (JAXBException e) {
String msg = "Failed to parse XML output of " +
GPU_SCRIPT_REFERENCE + "!";
LOG.error(msg, e);
throw new YarnException(msg, e);
}
} | @Test
public void testParseMissingInnerTags() throws IOException, YarnException {
File f =new File("src/test/resources/nvidia-smi-output-missing-tags2.xml");
String s = FileUtils.readFileToString(f, StandardCharsets.UTF_8);
GpuDeviceInformationParser parser = new GpuDeviceInformationParser();
GpuDeviceInformation info = parser.parseXml(s);
assertEquals("375.66", info.getDriverVersion());
assertEquals(2, info.getGpus().size());
PerGpuDeviceInformation gpu = info.getGpus().get(0);
assertEquals("Tesla P100-PCIE-12GB", gpu.getProductName());
assertEquals("GPU-28604e81-21ec-cc48-6759-bf2648b22e16", gpu.getUuid());
assertEquals(0, gpu.getMinorNumber());
assertEquals(-1, gpu.getGpuMemoryUsage().getTotalMemoryMiB());
assertEquals(-1, (long) gpu.getGpuMemoryUsage().getUsedMemoryMiB());
assertEquals(-1, (long) gpu.getGpuMemoryUsage().getAvailMemoryMiB());
assertEquals(0f,
gpu.getGpuUtilizations().getOverallGpuUtilization(), DELTA);
assertEquals(Float.MIN_VALUE,
gpu.getTemperature().getCurrentGpuTemp(), DELTA);
assertEquals(Float.MIN_VALUE,
gpu.getTemperature().getMaxGpuTemp(), DELTA);
assertEquals(Float.MIN_VALUE,
gpu.getTemperature().getSlowThresholdGpuTemp(),
DELTA);
assertSecondGpu(info.getGpus().get(1));
} |
public PrepareResult prepare(HostValidator hostValidator, DeployLogger logger, PrepareParams params,
Optional<ApplicationVersions> activeApplicationVersions, Instant now, File serverDbSessionDir,
ApplicationPackage applicationPackage, SessionZooKeeperClient sessionZooKeeperClient) {
ApplicationId applicationId = params.getApplicationId();
Preparation preparation = new Preparation(hostValidator, logger, params, activeApplicationVersions,
TenantRepository.getTenantPath(applicationId.tenant()),
serverDbSessionDir, applicationPackage, sessionZooKeeperClient,
onnxModelCost, endpointCertificateSecretStores);
preparation.preprocess();
try {
AllocatedHosts allocatedHosts = preparation.buildModels(now);
preparation.makeResult(allocatedHosts);
if ( ! params.isDryRun()) {
FileReference fileReference = preparation.triggerDistributionOfApplicationPackage();
preparation.writeStateZK(fileReference);
preparation.writeEndpointCertificateMetadataZK();
preparation.writeContainerEndpointsZK();
}
log.log(Level.FINE, () -> "time used " + params.getTimeoutBudget().timesUsed() + " : " + applicationId);
return preparation.result();
}
catch (IllegalArgumentException e) {
if (e instanceof InvalidApplicationException)
throw e;
throw new InvalidApplicationException("Invalid application package", e);
}
} | @Test
public void require_that_file_reference_of_application_package_is_written_to_zk() throws Exception {
prepare(testApp);
assertTrue(curator.exists(sessionPath(1).append(APPLICATION_PACKAGE_REFERENCE_PATH)));
} |
@Udf
public Long trunc(@UdfParameter final Long val) {
return val;
} | @Test
public void shouldTruncateSimpleBigDecimalPositive() {
assertThat(udf.trunc(new BigDecimal("0.0")), is(new BigDecimal("0")));
assertThat(udf.trunc(new BigDecimal("1.23")), is(new BigDecimal("1")));
assertThat(udf.trunc(new BigDecimal("1.0")), is(new BigDecimal("1")));
assertThat(udf.trunc(new BigDecimal("1.5")), is(new BigDecimal("1")));
assertThat(udf.trunc(new BigDecimal("1.75")), is(new BigDecimal("1")));
assertThat(udf.trunc(new BigDecimal("1530000")), is(new BigDecimal("1530000")));
assertThat(udf.trunc(new BigDecimal("10.1")), is(new BigDecimal("10")));
assertThat(udf.trunc(new BigDecimal("12345.5")), is(new BigDecimal("12345")));
assertThat(udf.trunc(new BigDecimal("9.99")), is(new BigDecimal("9")));
assertThat(udf.trunc(new BigDecimal("110.1")), is(new BigDecimal("110")));
assertThat(udf.trunc(new BigDecimal("1530000.01")), is(new BigDecimal("1530000")));
assertThat(udf.trunc(new BigDecimal("9999999.99")), is(new BigDecimal("9999999")));
} |
public TreeSelection[] getTreeObjects( final Tree tree, Tree selectionTree, Tree coreObjectsTree ) {
List<TreeSelection> objects = new ArrayList<TreeSelection>();
if ( selectionTree != null && !selectionTree.isDisposed() && tree.equals( selectionTree ) ) {
TreeItem[] selection = selectionTree.getSelection();
for ( int s = 0; s < selection.length; s++ ) {
TreeItem treeItem = selection[s];
String[] path = ConstUI.getTreeStrings( treeItem );
TreeSelection object = null;
switch ( path.length ) {
case 0:
break;
case 1: // ------complete-----
if ( path[0].equals( Spoon.STRING_TRANSFORMATIONS ) ) { // the top level Transformations entry
object = new TreeSelection( path[0], TransMeta.class );
}
if ( path[0].equals( Spoon.STRING_JOBS ) ) { // the top level Jobs entry
object = new TreeSelection( path[0], JobMeta.class );
}
break;
case 2: // ------complete-----
if ( path[0].equals( Spoon.STRING_BUILDING_BLOCKS ) ) { // the top level Transformations entry
if ( path[1].equals( Spoon.STRING_TRANS_BASE ) ) {
object = new TreeSelection( path[1], PluginInterface.class );
}
}
if ( path[0].equals( Spoon.STRING_TRANSFORMATIONS ) ) { // Transformation title
object = new TreeSelection( path[1], spoon.delegates.trans.getTransformation( path[1] ) );
}
if ( path[0].equals( Spoon.STRING_JOBS ) ) { // Jobs title
object = new TreeSelection( path[1], spoon.delegates.jobs.getJob( path[1] ) );
}
break;
case 3: // ------complete-----
if ( path[0].equals( Spoon.STRING_TRANSFORMATIONS ) ) { // Transformations title
TransMeta transMeta = spoon.delegates.trans.getTransformation( path[1] );
if ( path[2].equals( Spoon.STRING_CONNECTIONS ) ) {
object = new TreeSelection( path[2], DatabaseMeta.class, transMeta );
}
if ( path[2].equals( Spoon.STRING_STEPS ) ) {
object = new TreeSelection( path[2], StepMeta.class, transMeta );
}
if ( path[2].equals( Spoon.STRING_HOPS ) ) {
object = new TreeSelection( path[2], TransHopMeta.class, transMeta );
}
if ( path[2].equals( Spoon.STRING_PARTITIONS ) ) {
object = new TreeSelection( path[2], PartitionSchema.class, transMeta );
}
if ( path[2].equals( Spoon.STRING_SLAVES ) ) {
object = new TreeSelection( path[2], SlaveServer.class, transMeta );
}
if ( path[2].equals( Spoon.STRING_CLUSTERS ) ) {
object = new TreeSelection( path[2], ClusterSchema.class, transMeta );
}
executeExtensionPoint( new SpoonTreeDelegateExtension( transMeta, path, 3, objects ) );
}
if ( path[0].equals( Spoon.STRING_JOBS ) ) { // Jobs title
JobMeta jobMeta = spoon.delegates.jobs.getJob( path[1] );
if ( path[2].equals( Spoon.STRING_CONNECTIONS ) ) {
object = new TreeSelection( path[2], DatabaseMeta.class, jobMeta );
}
if ( path[2].equals( Spoon.STRING_JOB_ENTRIES ) ) {
object = new TreeSelection( path[2], JobEntryCopy.class, jobMeta );
}
if ( path[2].equals( Spoon.STRING_SLAVES ) ) {
object = new TreeSelection( path[2], SlaveServer.class, jobMeta );
}
executeExtensionPoint( new SpoonTreeDelegateExtension( jobMeta, path, 3, objects ) );
}
break;
case 4: // ------complete-----
if ( path[0].equals( Spoon.STRING_TRANSFORMATIONS ) ) { // The name of a transformation
final TransMeta transMeta = spoon.delegates.trans.getTransformation( path[1] );
if ( transMeta != null ) {
if ( path[2].equals( Spoon.STRING_CONNECTIONS ) ) {
String dbName = path[3];
DatabaseMeta databaseMeta = transMeta.findDatabase( dbName );
if ( databaseMeta != null ) {
dbName = databaseMeta.getName();
}
object = new TreeSelection( dbName, databaseMeta, transMeta );
}
if ( path[2].equals( Spoon.STRING_STEPS ) ) {
object = new TreeSelection( path[3], transMeta.findStep( path[3] ), transMeta );
}
if ( path[2].equals( Spoon.STRING_HOPS ) ) {
object = new TreeSelection( path[3], transMeta.findTransHop( path[3] ), transMeta );
}
if ( path[2].equals( Spoon.STRING_PARTITIONS ) ) {
object = new TreeSelection( path[3], transMeta.findPartitionSchema( path[3] ), transMeta );
}
if ( path[2].equals( Spoon.STRING_SLAVES ) ) {
object = new TreeSelection( path[3], transMeta.findSlaveServer( path[3] ), transMeta );
}
if ( path[2].equals( Spoon.STRING_CLUSTERS ) ) {
object = new TreeSelection( path[3], transMeta.findClusterSchema( path[3] ), transMeta );
}
executeExtensionPoint( new SpoonTreeDelegateExtension( transMeta, path, 4, objects ) );
}
}
if ( path[0].equals( Spoon.STRING_JOBS ) ) { // The name of a job
JobMeta jobMeta = spoon.delegates.jobs.getJob( path[1] );
if ( jobMeta != null && path[2].equals( Spoon.STRING_CONNECTIONS ) ) {
String dbName = path[3];
DatabaseMeta databaseMeta = jobMeta.findDatabase( dbName );
if ( databaseMeta != null ) {
dbName = databaseMeta.getName();
}
object = new TreeSelection( dbName, databaseMeta, jobMeta );
}
if ( jobMeta != null && path[2].equals( Spoon.STRING_JOB_ENTRIES ) ) {
object = new TreeSelection( path[3], jobMeta.findJobEntry( path[3] ), jobMeta );
}
if ( jobMeta != null && path[2].equals( Spoon.STRING_SLAVES ) ) {
object = new TreeSelection( path[3], jobMeta.findSlaveServer( path[3] ), jobMeta );
}
executeExtensionPoint( new SpoonTreeDelegateExtension( jobMeta, path, 4, objects ) );
}
break;
case 5:
if ( path[0].equals( Spoon.STRING_TRANSFORMATIONS ) ) { // The name of a transformation
TransMeta transMeta = spoon.delegates.trans.getTransformation( path[1] );
if ( transMeta != null && path[2].equals( Spoon.STRING_CLUSTERS ) ) {
ClusterSchema clusterSchema = transMeta.findClusterSchema( path[3] );
object =
new TreeSelection( path[4], clusterSchema.findSlaveServer( path[4] ), clusterSchema, transMeta );
}
}
break;
default:
break;
}
if ( object != null ) {
objects.add( object );
}
}
}
if ( tree != null && coreObjectsTree != null && tree.equals( coreObjectsTree ) ) {
TreeItem[] selection = coreObjectsTree.getSelection();
for ( int s = 0; s < selection.length; s++ ) {
TreeItem treeItem = selection[s];
String[] path = ConstUI.getTreeStrings( treeItem );
TreeSelection object = null;
switch ( path.length ) {
case 0:
break;
case 2: // Job entries
if ( spoon.showJob ) {
PluginRegistry registry = PluginRegistry.getInstance();
Class<? extends PluginTypeInterface> pluginType = JobEntryPluginType.class;
PluginInterface plugin = registry.findPluginWithName( pluginType, path[1] );
// Retry for Start
//
if ( plugin == null ) {
if ( path[1].equalsIgnoreCase( JobMeta.STRING_SPECIAL_START ) ) {
plugin = registry.findPluginWithId( pluginType, JobMeta.STRING_SPECIAL );
}
}
// Retry for Dummy
//
if ( plugin == null ) {
if ( path[1].equalsIgnoreCase( JobMeta.STRING_SPECIAL_DUMMY ) ) {
plugin = registry.findPluginWithId( pluginType, JobMeta.STRING_SPECIAL );
}
}
if ( plugin != null ) {
object = new TreeSelection( path[1], plugin );
}
}
if ( spoon.showTrans ) {
String stepId = (String) treeItem.getData( "StepId" );
if ( stepId != null ) {
object = new TreeSelection( path[1], PluginRegistry.getInstance().findPluginWithId( StepPluginType.class, stepId ) );
} else {
object = new TreeSelection( path[1], PluginRegistry.getInstance().findPluginWithName( StepPluginType.class, path[1] ) );
}
}
break;
default:
break;
}
if ( object != null ) {
objects.add( object );
}
}
}
return objects.toArray( new TreeSelection[objects.size()] );
} | @Test
public void getTreeObjects_getStepById() {
SpoonTreeDelegate std = spy( new SpoonTreeDelegate( spoon ) );
Tree selection = mock( Tree.class );
Tree core = mock( Tree.class );
TreeItem item = mock( TreeItem.class );
PluginInterface step = mock( PluginInterface.class );
PluginRegistry registry = mock( PluginRegistry.class );
TreeItem[] items = new TreeItem[] { item };
when( ConstUI.getTreeStrings( item ) ).thenReturn( new String[] { "Output", "Avro Output" } );
when( PluginRegistry.getInstance() ).thenReturn( registry );
doReturn( items ).when( core ).getSelection();
doReturn( "AvroOutputPlugin" ).when( item ).getData( anyString() );
doReturn( step ).when( registry ).findPluginWithId( StepPluginType.class, "AvroOutputPlugin" );
spoon.showJob = false;
spoon.showTrans = true;
TreeSelection[] ts = std.getTreeObjects( core, selection, core );
assertEquals( 1, ts.length );
assertEquals( step, ts[ 0 ].getSelection() );
} |
protected void patchLoadBalancerClass(Service current, Service desired) {
if (current.getSpec().getLoadBalancerClass() != null
&& desired.getSpec().getLoadBalancerClass() == null) {
desired.getSpec().setLoadBalancerClass(current.getSpec().getLoadBalancerClass());
}
} | @Test
public void testLoadBalancerClassPatching() {
KubernetesClient client = mock(KubernetesClient.class);
Service current = new ServiceBuilder()
.withNewMetadata()
.withNamespace(NAMESPACE)
.withName(RESOURCE_NAME)
.endMetadata()
.withNewSpec()
.withType("LoadBalancer")
.withPorts(new ServicePortBuilder()
.withName("port1")
.withPort(1234)
.withTargetPort(new IntOrString(1234))
.build())
.withLoadBalancerClass("service.k8s.aws/nlb")
.endSpec()
.build();
Service desired = new ServiceBuilder()
.withNewMetadata()
.withNamespace(NAMESPACE)
.withName(RESOURCE_NAME)
.endMetadata()
.withNewSpec()
.withType("LoadBalancer")
.withPorts(new ServicePortBuilder()
.withName("port1")
.withPort(1234)
.withTargetPort(new IntOrString(1234))
.build())
.endSpec()
.build();
ServiceOperator op = new ServiceOperator(vertx, client);
op.patchLoadBalancerClass(current, desired);
assertThat(current.getSpec().getLoadBalancerClass(), is(desired.getSpec().getLoadBalancerClass()));
} |
public static CommonsConfigurationRetryConfiguration of(final Configuration configuration) throws ConfigParseException {
CommonsConfigurationRetryConfiguration obj = new CommonsConfigurationRetryConfiguration();
try{
obj.getConfigs().putAll(obj.getProperties(configuration.subset(RETRY_CONFIGS_PREFIX)));
obj.getInstances().putAll(obj.getProperties(configuration.subset(RETRY_INSTANCES_PREFIX)));
return obj;
}catch (Exception ex){
throw new ConfigParseException("Error creating retry configuration", ex);
}
} | @Test
public void testFromPropertiesFile() throws ConfigurationException {
Configuration config = CommonsConfigurationUtil.getConfiguration(PropertiesConfiguration.class, TestConstants.RESILIENCE_CONFIG_PROPERTIES_FILE_NAME);
CommonsConfigurationRetryConfiguration retryConfiguration = CommonsConfigurationRetryConfiguration.of(config);
assertConfigs(retryConfiguration.getConfigs());
assertInstances(retryConfiguration.getInstances());
} |
@Override
public Set<Network> networks() {
return osNetworkStore.networks();
} | @Test
public void testGetNetworks() {
createBasicNetworks();
assertEquals("Number of network did not match", 1, target.networks().size());
} |
public static List<String> validateXml(InputStream schemaStream, String xmlString) throws Exception {
return validateXml(schemaStream, xmlString, null);
} | @Test
public void testValidXmlAgainstValidSchema() throws Exception {
InputStream schemaStream = new FileInputStream("target/test-classes/io/github/microcks/util/valid-schema.xsd");
String validXml = """
<note>
<to>Tove</to>
<from>Jani</from>
<heading>Reminder</heading>
<body>Don't forget me this weekend!</body>
</note>
""";
List<String> errors = XmlSchemaValidator.validateXml(schemaStream, validXml);
assertTrue(errors.isEmpty(), "Expected no validation errors, but got: " + errors);
} |
@Override
protected void analyzeDependency(final Dependency dependency, final Engine engine) throws AnalysisException {
// batch request component-reports for all dependencies
synchronized (FETCH_MUTIX) {
if (reports == null) {
try {
requestDelay();
reports = requestReports(engine.getDependencies());
} catch (TransportException ex) {
final String message = ex.getMessage();
final boolean warnOnly = getSettings().getBoolean(Settings.KEYS.ANALYZER_OSSINDEX_WARN_ONLY_ON_REMOTE_ERRORS, false);
this.setEnabled(false);
if (StringUtils.endsWith(message, "401")) {
LOG.error("Invalid credentials for the OSS Index, disabling the analyzer");
throw new AnalysisException("Invalid credentials provided for OSS Index", ex);
} else if (StringUtils.endsWith(message, "403")) {
LOG.error("OSS Index access forbidden, disabling the analyzer");
throw new AnalysisException("OSS Index access forbidden", ex);
} else if (StringUtils.endsWith(message, "429")) {
if (warnOnly) {
LOG.warn("OSS Index rate limit exceeded, disabling the analyzer", ex);
} else {
throw new AnalysisException("OSS Index rate limit exceeded, disabling the analyzer", ex);
}
} else if (warnOnly) {
LOG.warn("Error requesting component reports, disabling the analyzer", ex);
} else {
LOG.debug("Error requesting component reports, disabling the analyzer", ex);
throw new AnalysisException("Failed to request component-reports", ex);
}
} catch (SocketTimeoutException e) {
final boolean warnOnly = getSettings().getBoolean(Settings.KEYS.ANALYZER_OSSINDEX_WARN_ONLY_ON_REMOTE_ERRORS, false);
this.setEnabled(false);
if (warnOnly) {
LOG.warn("OSS Index socket timeout, disabling the analyzer", e);
} else {
LOG.debug("OSS Index socket timeout", e);
throw new AnalysisException("Failed to establish socket to OSS Index", e);
}
} catch (Exception e) {
LOG.debug("Error requesting component reports", e);
throw new AnalysisException("Failed to request component-reports", e);
}
}
// skip enrichment if we failed to fetch reports
if (reports != null) {
enrich(dependency);
}
}
} | @Test
public void should_analyzeDependency_only_warn_when_transport_error_from_sonatype() throws Exception {
// Given
OssIndexAnalyzer analyzer = new OssIndexAnalyzerThrowing502();
getSettings().setBoolean(Settings.KEYS.ANALYZER_OSSINDEX_WARN_ONLY_ON_REMOTE_ERRORS, true);
analyzer.initialize(getSettings());
Identifier identifier = new PurlIdentifier("maven", "test", "test", "1.0",
Confidence.HIGHEST);
Dependency dependency = new Dependency();
dependency.addSoftwareIdentifier(identifier);
Settings settings = getSettings();
Engine engine = new Engine(settings);
engine.setDependencies(Collections.singletonList(dependency));
// When
try {
analyzer.analyzeDependency(dependency, engine);
} catch (AnalysisException e) {
Assert.fail("Analysis exception thrown upon remote error although only a warning should have been logged");
} finally {
analyzer.close();
engine.close();
}
} |
@Override
public ConfigOperateResult insertOrUpdateCas(String srcIp, String srcUser, ConfigInfo configInfo,
Map<String, Object> configAdvanceInfo) {
try {
ConfigInfoStateWrapper configInfoState = findConfigInfoState(configInfo.getDataId(), configInfo.getGroup(),
configInfo.getTenant());
if (configInfoState == null) {
return addConfigInfo(srcIp, srcUser, configInfo, configAdvanceInfo);
} else {
return updateConfigInfoCas(configInfo, srcIp, srcUser, configAdvanceInfo);
}
} catch (Exception exception) {
LogUtil.FATAL_LOG.error("[db-error] try to update or add config failed, {}", exception.getMessage(),
exception);
throw exception;
}
} | @Test
void testInsertOrUpdateCasOfUpdateConfigSuccess() {
Map<String, Object> configAdvanceInfo = new HashMap<>();
configAdvanceInfo.put("config_tags", "tag1,tag2");
configAdvanceInfo.put("desc", "desc11");
configAdvanceInfo.put("use", "use2233");
configAdvanceInfo.put("effect", "effect222");
configAdvanceInfo.put("type", "type3");
configAdvanceInfo.put("schema", "schema");
String dataId = "dataId";
String group = "group";
String tenant = "tenant";
String content = "content132456";
String encryptedDataKey = "key34567";
String casMd5 = "casMd5..";
ConfigInfo configInfo = new ConfigInfo(dataId, group, tenant, null, content);
configInfo.setMd5(casMd5);
configInfo.setEncryptedDataKey(encryptedDataKey);
//mock get config state,first and second is not null
Mockito.when(jdbcTemplate.queryForObject(anyString(), eq(new Object[] {dataId, group, tenant}),
eq(CONFIG_INFO_STATE_WRAPPER_ROW_MAPPER))).thenReturn(new ConfigInfoStateWrapper(), new ConfigInfoStateWrapper());
//mock select config info before update
ConfigInfoWrapper configInfoWrapperOld = new ConfigInfoWrapper();
configInfoWrapperOld.setDataId(dataId);
configInfoWrapperOld.setGroup(group);
configInfoWrapperOld.setTenant(tenant);
configInfoWrapperOld.setAppName("old_app11");
configInfoWrapperOld.setMd5("old_md5");
configInfoWrapperOld.setId(123456799L);
Mockito.when(jdbcTemplate.queryForObject(anyString(), eq(new Object[]{dataId, group, tenant}), eq(CONFIG_INFO_WRAPPER_ROW_MAPPER)))
.thenReturn(configInfoWrapperOld);
String srcIp = "srcIp";
String srcUser = "srcUser";
//mock update config info cas
Mockito.when(jdbcTemplate.update(anyString(), eq(content), eq(MD5Utils.md5Hex(content, Constants.PERSIST_ENCODE)),
eq(srcIp), eq(srcUser), eq(configInfoWrapperOld.getAppName()), eq(configAdvanceInfo.get("desc")),
eq(configAdvanceInfo.get("use")), eq(configAdvanceInfo.get("effect")), eq(configAdvanceInfo.get("type")),
eq(configAdvanceInfo.get("schema")), eq(encryptedDataKey), eq(dataId), eq(group), eq(tenant), eq(casMd5))).thenReturn(1);
//mock insert config tags.
Mockito.when(jdbcTemplate.update(eq(externalConfigInfoPersistService.mapperManager.findMapper(dataSourceService.getDataSourceType(),
TableConstant.CONFIG_TAGS_RELATION)
.insert(Arrays.asList("id", "tag_name", "tag_type", "data_id", "group_id", "tenant_id"))), eq(configInfoWrapperOld.getId()),
anyString(), eq(StringUtils.EMPTY), eq(dataId), eq(group), eq(tenant))).thenReturn(1);
//mock insert his config info
Mockito.doNothing().when(historyConfigInfoPersistService)
.insertConfigHistoryAtomic(eq(configInfoWrapperOld.getId()), eq(configInfo), eq(srcIp), eq(srcUser), any(Timestamp.class),
eq("I"));
externalConfigInfoPersistService.insertOrUpdateCas(srcIp, srcUser, configInfo, configAdvanceInfo);
//expect update config cas
Mockito.verify(jdbcTemplate, times(1))
.update(anyString(), eq(content), eq(MD5Utils.md5Hex(content, Constants.PERSIST_ENCODE)), eq(srcIp),
eq(srcUser), eq(configInfoWrapperOld.getAppName()), eq(configAdvanceInfo.get("desc")),
eq(configAdvanceInfo.get("use")), eq(configAdvanceInfo.get("effect")), eq(configAdvanceInfo.get("type")),
eq(configAdvanceInfo.get("schema")), eq(encryptedDataKey), eq(dataId), eq(group), eq(tenant), eq(casMd5));
//expect update config tags
Mockito.verify(jdbcTemplate, times(1)).update(eq(
externalConfigInfoPersistService.mapperManager.findMapper(dataSourceService.getDataSourceType(),
TableConstant.CONFIG_TAGS_RELATION)
.insert(Arrays.asList("id", "tag_name", "tag_type", "data_id", "group_id", "tenant_id"))),
eq(configInfoWrapperOld.getId()), eq("tag1"), eq(StringUtils.EMPTY), eq(dataId), eq(group), eq(tenant));
Mockito.verify(jdbcTemplate, times(1)).update(eq(
externalConfigInfoPersistService.mapperManager.findMapper(dataSourceService.getDataSourceType(),
TableConstant.CONFIG_TAGS_RELATION)
.insert(Arrays.asList("id", "tag_name", "tag_type", "data_id", "group_id", "tenant_id"))),
eq(configInfoWrapperOld.getId()), eq("tag2"), eq(StringUtils.EMPTY), eq(dataId), eq(group), eq(tenant));
//expect insert history info
Mockito.verify(historyConfigInfoPersistService, times(1))
.insertConfigHistoryAtomic(eq(configInfoWrapperOld.getId()), any(ConfigInfo.class), eq(srcIp), eq(srcUser),
any(Timestamp.class), eq("U"));
} |
public static void updateDetailMessage(
@Nullable Throwable root, @Nullable Function<Throwable, String> throwableToMessage) {
if (throwableToMessage == null) {
return;
}
Throwable it = root;
while (it != null) {
String newMessage = throwableToMessage.apply(it);
if (newMessage != null) {
updateDetailMessageOfThrowable(it, newMessage);
}
it = it.getCause();
}
} | @Test
void testUpdateDetailMessageOfNullWithoutException() {
ExceptionUtils.updateDetailMessage(null, t -> "new message");
} |
public Command create(
final ConfiguredStatement<? extends Statement> statement,
final KsqlExecutionContext context) {
return create(statement, context.getServiceContext(), context);
} | @Test
public void shouldValidateTerminateQuery() {
// Given:
givenTerminate();
// When:
commandFactory.create(configuredStatement, executionContext);
// Then:
verify(executionContext).getPersistentQuery(QUERY_ID);
verify(query1).close();
} |
public static <T> RestResult<T> failed() {
return RestResult.<T>builder().withCode(500).build();
} | @Test
void testFailedWithCode() {
RestResult<String> restResult = RestResultUtils.failed(400, "content");
assertRestResult(restResult, 400, null, "content", false);
} |
@Override
public boolean expireEntry(K key, Duration ttl) {
return get(expireEntryAsync(key, ttl));
} | @Test
public void testExpireEntry() {
RMapCacheNative<String, String> testMap = redisson.getMapCacheNative("map");
testMap.put("key", "value");
testMap.expireEntry("key", Duration.ofMillis(20000));
assertThat(testMap.remainTimeToLive("key")).isBetween(19800L, 20000L);
} |
public int orCardinality(BitmapCollection bitmaps) {
ImmutableRoaringBitmap left = reduceInternal();
ImmutableRoaringBitmap right = bitmaps.reduceInternal();
if (!_inverted) {
if (!bitmaps._inverted) {
return ImmutableRoaringBitmap.orCardinality(left, right);
}
return _numDocs - right.getCardinality() - ImmutableRoaringBitmap.andCardinality(left, right);
} else {
if (!bitmaps._inverted) {
return _numDocs - left.getCardinality()
+ ImmutableRoaringBitmap.andCardinality(right, left);
}
return _numDocs - ImmutableRoaringBitmap.andCardinality(left, right);
}
} | @Test(dataProvider = "orCardinalityTestCases")
public void testOrCardinality(int numDocs, ImmutableRoaringBitmap left, boolean leftInverted,
ImmutableRoaringBitmap right, boolean rightInverted, int expected) {
assertEquals(new BitmapCollection(numDocs, leftInverted, left).orCardinality(
new BitmapCollection(numDocs, rightInverted, right)), expected);
assertEquals(new BitmapCollection(numDocs, leftInverted, split(left)).orCardinality(
new BitmapCollection(numDocs, rightInverted, right)), expected);
assertEquals(new BitmapCollection(numDocs, leftInverted, left).orCardinality(
new BitmapCollection(numDocs, rightInverted, split(right))), expected);
assertEquals(new BitmapCollection(numDocs, leftInverted, split(left)).orCardinality(
new BitmapCollection(numDocs, rightInverted, split(right))), expected);
} |
public boolean eval(ContentFile<?> file) {
// TODO: detect the case where a column is missing from the file using file's max field id.
return new MetricsEvalVisitor().eval(file);
} | @Test
public void testIntegerNotIn() {
boolean shouldRead =
new StrictMetricsEvaluator(SCHEMA, notIn("id", INT_MIN_VALUE - 25, INT_MIN_VALUE - 24))
.eval(FILE);
assertThat(shouldRead).as("Should match: all values !=5 and !=6").isTrue();
shouldRead =
new StrictMetricsEvaluator(SCHEMA, notIn("id", INT_MIN_VALUE - 1, INT_MIN_VALUE))
.eval(FILE);
assertThat(shouldRead).as("Should not match: some values may be == 30").isFalse();
shouldRead =
new StrictMetricsEvaluator(SCHEMA, notIn("id", INT_MAX_VALUE - 4, INT_MAX_VALUE - 3))
.eval(FILE);
assertThat(shouldRead).as("Should not match: some value may be == 75 or == 76").isFalse();
shouldRead =
new StrictMetricsEvaluator(SCHEMA, notIn("id", INT_MAX_VALUE, INT_MAX_VALUE + 1))
.eval(FILE);
assertThat(shouldRead).as("Should not match: some value may be == 79").isFalse();
shouldRead =
new StrictMetricsEvaluator(SCHEMA, notIn("id", INT_MAX_VALUE + 1, INT_MAX_VALUE + 2))
.eval(FILE);
assertThat(shouldRead).as("Should match: no values == 80 or == 81").isTrue();
shouldRead = new StrictMetricsEvaluator(SCHEMA, notIn("always_5", 5, 6)).eval(FILE);
assertThat(shouldRead).as("Should not match: all values == 5").isFalse();
shouldRead = new StrictMetricsEvaluator(SCHEMA, notIn("all_nulls", "abc", "def")).eval(FILE);
assertThat(shouldRead).as("Should match: notIn on all nulls column").isTrue();
shouldRead = new StrictMetricsEvaluator(SCHEMA, notIn("some_nulls", "abc", "def")).eval(FILE_3);
assertThat(shouldRead)
.as("Should match: notIn on some nulls column, 'bbb' > 'abc' and 'bbb' < 'def'")
.isTrue();
shouldRead = new StrictMetricsEvaluator(SCHEMA, notIn("no_nulls", "abc", "def")).eval(FILE);
assertThat(shouldRead).as("Should not match: no_nulls field does not have bounds").isFalse();
} |
@Override
public Optional<ProfileDescription> compare(final ProfileDescription next) {
// Filter out profiles with matching checksum
final Optional<ProfileDescription> found = repository.stream()
.filter(description -> Objects.equals(description.getChecksum(), next.getChecksum()))
.findFirst();
if(found.isPresent()) {
// Found matching checksum. Determine if latest version
if(found.get().isLatest()) {
// Latest version already installed
return Optional.empty();
}
else {
// Read last profile version from server as we found matching checksum for previous version
return found;
}
}
log.warn(String.format("Local only profile %s", next));
return Optional.empty();
} | @Test
public void testEqual() throws Exception {
// Managed profile
final ProfileDescription remote = new ProfileDescription(
ProtocolFactory.get(), new Checksum(HashAlgorithm.md5, "d41d8cd98f00b204e9800998ecf8427e"), null) {
@Override
public boolean isLatest() {
return true;
}
};
final ProfileDescription local = new ProfileDescription(
ProtocolFactory.get(), new Checksum(HashAlgorithm.md5, "d41d8cd98f00b204e9800998ecf8427e"), null);
assertFalse(new ChecksumProfileMatcher(Stream.of(remote).collect(Collectors.toSet())).compare(local).isPresent());
} |
@Override
public Optional<DatabaseAdminExecutor> create(final SQLStatementContext sqlStatementContext) {
SQLStatement sqlStatement = sqlStatementContext.getSqlStatement();
if (sqlStatement instanceof ShowStatement) {
return Optional.of(new PostgreSQLShowVariableExecutor((ShowStatement) sqlStatement));
}
return Optional.empty();
} | @Test
void assertCreateWithSelectTablespace() {
SQLStatement sqlStatement = parseSQL(PSQL_SELECT_TABLESPACES);
SelectStatementContext selectStatementContext = mock(SelectStatementContext.class);
when(selectStatementContext.getSqlStatement()).thenReturn((SelectStatement) sqlStatement);
Optional<DatabaseAdminExecutor> actual = new PostgreSQLAdminExecutorCreator().create(selectStatementContext, PSQL_SELECT_TABLESPACES, "", Collections.emptyList());
assertTrue(actual.isPresent());
} |
public boolean isShallowClone() {
return shallowClone;
} | @Test
void byDefaultShallowCloneShouldBeOff() {
assertFalse(git("http://url", "foo").isShallowClone());
assertFalse(git("http://url", "foo", false).isShallowClone());
assertFalse(git("http://url", "foo", null).isShallowClone());
assertTrue(git("http://url", "foo", true).isShallowClone());
} |
static int addHash(int originalHash, int addedHash) {
return originalHash + HUGE_PRIME * addedHash;
} | @Test
public void testAddHashIsAssociative() {
int hash = MerkleTreeUtil.addHash(0, 1);
hash = MerkleTreeUtil.addHash(hash, 2);
hash = MerkleTreeUtil.addHash(hash, 3);
int hash2 = MerkleTreeUtil.addHash(0, 3);
hash2 = MerkleTreeUtil.addHash(hash2, 1);
hash2 = MerkleTreeUtil.addHash(hash2, 2);
assertEquals(hash2, hash);
} |
@Override
public void execute(GraphModel graphModel) {
Graph graph = graphModel.getGraphVisible();
execute(graph);
} | @Test
public void testColumnReplace() {
GraphModel graphModel = GraphGenerator.generateNullUndirectedGraph(1);
graphModel.getNodeTable().addColumn(WeightedDegree.WDEGREE, String.class);
WeightedDegree d = new WeightedDegree();
d.execute(graphModel);
} |
public static <T> List<T> parseArray(String text, Class<T> clazz) {
if (StringUtil.isBlank(text)) {
return Collections.emptyList();
}
return JSON_FACADE.parseArray(text, clazz);
} | @Test
public void assertParseArray() {
Assert.assertEquals(Collections.emptyList(), JSONUtil.parseArray(null, Foo.class));
Assert.assertEquals(Collections.emptyList(), JSONUtil.parseArray(" ", Foo.class));
Assert.assertEquals(
EXPECTED_FOO_ARRAY,
JSONUtil.parseArray(EXPECTED_FOO_JSON_ARRAY, Foo.class));
} |
@Override
public Output run(RunContext runContext) throws Exception {
URI from = new URI(runContext.render(this.from));
final PebbleExpressionPredicate predicate = getExpressionPredication(runContext);
final Path path = runContext.workingDir().createTempFile(".ion");
long processedItemsTotal = 0L;
long droppedItemsTotal = 0L;
try (final BufferedWriter writer = Files.newBufferedWriter(path);
final BufferedReader reader = newBufferedReader(runContext, from)) {
String item;
while ((item = reader.readLine()) != null) {
IllegalVariableEvaluationException exception = null;
Boolean match = null;
try {
match = predicate.apply(item);
} catch (IllegalVariableEvaluationException e) {
exception = e;
}
FilterType action = this.filterType;
if (match == null) {
switch (errorOrNullBehavior) {
case FAIL -> {
if (exception != null) {
throw exception;
} else {
throw new IllegalVariableEvaluationException(String.format(
"Expression `%s` return `null` on item `%s`",
filterCondition,
item
));
}
}
case INCLUDE -> action = FilterType.INCLUDE;
case EXCLUDE -> action = FilterType.EXCLUDE;
}
match = true;
}
if (!match) {
action = action.reverse();
}
switch (action) {
case INCLUDE -> {
writer.write(item);
writer.newLine();
}
case EXCLUDE -> droppedItemsTotal++;
}
processedItemsTotal++;
}
}
URI uri = runContext.storage().putFile(path.toFile());
return Output.builder()
.uri(uri)
.processedItemsTotal(processedItemsTotal)
.droppedItemsTotal(droppedItemsTotal)
.build();
} | @Test
void shouldFilterGivenInvalidRecordsForInclude() throws Exception {
// Given
RunContext runContext = runContextFactory.of();
FilterItems task = FilterItems
.builder()
.from(generateKeyValueFile(TEST_INVALID_ITEMS, runContext).toString())
.filterCondition(" {{ value % 2 == 0 }}")
.filterType(FilterItems.FilterType.INCLUDE)
.errorOrNullBehavior(FilterItems.ErrorOrNullBehavior.INCLUDE)
.build();
// When
FilterItems.Output output = task.run(runContext);
// Then
Assertions.assertNotNull(output);
Assertions.assertNotNull(output.getUri());
Assertions.assertEquals(2, output.getDroppedItemsTotal());
Assertions.assertEquals(4, output.getProcessedItemsTotal());
assertFile(runContext, output, List.of(new KeyValue("k2", "dummy"), new KeyValue("k4", 4)), KeyValue.class);
} |
@CanIgnoreReturnValue
@Override
public V put(K key, V value) {
if (key == null) {
throw new NullPointerException("key == null");
}
if (value == null && !allowNullValues) {
throw new NullPointerException("value == null");
}
Node<K, V> created = find(key, true);
V result = created.value;
created.value = value;
return result;
} | @Test
@SuppressWarnings("ModifiedButNotUsed")
public void testPutNonComparableKeyFails() {
LinkedTreeMap<Object, String> map = new LinkedTreeMap<>();
assertThrows(ClassCastException.class, () -> map.put(new Object(), "android"));
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.