focal_method
stringlengths 13
60.9k
| test_case
stringlengths 25
109k
|
---|---|
public static Map<String, String> loadProperties(final File propertiesFile) {
return loadProperties(ImmutableList.of(propertiesFile));
}
|
@Test
public void shouldLoadPropsFromFile() {
// Given:
givenPropsFileContains(
"# Comment" + System.lineSeparator()
+ "some.prop=some value" + System.lineSeparator()
+ "some.other.prop=124" + System.lineSeparator()
);
// When:
final Map<String, String> result = PropertiesUtil.loadProperties(propsFile);
// Then:
assertThat(result.get("some.prop"), is("some value"));
assertThat(result.get("some.other.prop"), is("124"));
}
|
public BlockLease flatten(Block block)
{
requireNonNull(block, "block is null");
if (block instanceof DictionaryBlock) {
return flattenDictionaryBlock((DictionaryBlock) block);
}
if (block instanceof RunLengthEncodedBlock) {
return flattenRunLengthEncodedBlock((RunLengthEncodedBlock) block);
}
return newLease(block);
}
|
@Test
public void testIntArrayIdentityDecode()
{
Block block = createIntArrayBlock(1, 2, 3, 4);
try (BlockLease blockLease = flattener.flatten(block)) {
Block flattenedBlock = blockLease.get();
assertSame(flattenedBlock, block);
}
}
|
public static KubernetesJobManagerSpecification buildKubernetesJobManagerSpecification(
FlinkPod podTemplate, KubernetesJobManagerParameters kubernetesJobManagerParameters)
throws IOException {
FlinkPod flinkPod = Preconditions.checkNotNull(podTemplate).copy();
List<HasMetadata> accompanyingResources = new ArrayList<>();
final List<KubernetesStepDecorator> stepDecorators =
new ArrayList<>(
Arrays.asList(
new InitJobManagerDecorator(kubernetesJobManagerParameters),
new EnvSecretsDecorator(kubernetesJobManagerParameters),
new MountSecretsDecorator(kubernetesJobManagerParameters),
new CmdJobManagerDecorator(kubernetesJobManagerParameters),
new InternalServiceDecorator(kubernetesJobManagerParameters),
new ExternalServiceDecorator(kubernetesJobManagerParameters)));
Configuration configuration = kubernetesJobManagerParameters.getFlinkConfiguration();
if (configuration.get(KUBERNETES_HADOOP_CONF_MOUNT_DECORATOR_ENABLED)) {
stepDecorators.add(new HadoopConfMountDecorator(kubernetesJobManagerParameters));
}
if (configuration.get(KUBERNETES_KERBEROS_MOUNT_DECORATOR_ENABLED)) {
stepDecorators.add(new KerberosMountDecorator(kubernetesJobManagerParameters));
}
stepDecorators.addAll(
Arrays.asList(
new FlinkConfMountDecorator(kubernetesJobManagerParameters),
new PodTemplateMountDecorator(kubernetesJobManagerParameters)));
for (KubernetesStepDecorator stepDecorator : stepDecorators) {
flinkPod = stepDecorator.decorateFlinkPod(flinkPod);
accompanyingResources.addAll(stepDecorator.buildAccompanyingKubernetesResources());
}
final Deployment deployment =
createJobManagerDeployment(flinkPod, kubernetesJobManagerParameters);
return new KubernetesJobManagerSpecification(deployment, accompanyingResources);
}
|
@Test
void testDeploymentSpec() throws IOException {
kubernetesJobManagerSpecification =
KubernetesJobManagerFactory.buildKubernetesJobManagerSpecification(
flinkPod, kubernetesJobManagerParameters);
final DeploymentSpec resultDeploymentSpec =
this.kubernetesJobManagerSpecification.getDeployment().getSpec();
assertThat(resultDeploymentSpec.getReplicas().intValue()).isEqualTo(1);
final Map<String, String> expectedLabels = new HashMap<>(getCommonLabels());
expectedLabels.put(Constants.LABEL_COMPONENT_KEY, Constants.LABEL_COMPONENT_JOB_MANAGER);
assertThat(resultDeploymentSpec.getSelector().getMatchLabels()).isEqualTo(expectedLabels);
expectedLabels.putAll(userLabels);
assertThat(resultDeploymentSpec.getTemplate().getMetadata().getLabels())
.isEqualTo(expectedLabels);
assertThat(resultDeploymentSpec.getTemplate().getMetadata().getAnnotations())
.isEqualTo(userAnnotations);
assertThat(resultDeploymentSpec.getTemplate().getSpec()).isNotNull();
}
|
@Override
public String route(final ReadwriteSplittingDataSourceGroupRule rule) {
switch (rule.getTransactionalReadQueryStrategy()) {
case FIXED:
if (!connectionContext.getTransactionContext().getReadWriteSplitReplicaRoute().isPresent()) {
connectionContext.getTransactionContext().setReadWriteSplitReplicaRoute(standardRouter.route(rule));
}
return connectionContext.getTransactionContext().getReadWriteSplitReplicaRoute().get();
case DYNAMIC:
return standardRouter.route(rule);
case PRIMARY:
default:
return rule.getWriteDataSource();
}
}
|
@Test
void assertRoute() {
ReadwriteSplittingDataSourceGroupRuleConfiguration dataSourceGroupConfig = new ReadwriteSplittingDataSourceGroupRuleConfiguration(
"test_config", "write_ds", Arrays.asList("read_ds_0", "read_ds_1"), null);
ReadwriteSplittingDataSourceGroupRule rule;
rule = new ReadwriteSplittingDataSourceGroupRule(dataSourceGroupConfig, TransactionalReadQueryStrategy.PRIMARY, null);
assertThat(new QualifiedReadwriteSplittingTransactionalDataSourceRouter(new ConnectionContext(Collections::emptySet)).route(rule), is("write_ds"));
rule = new ReadwriteSplittingDataSourceGroupRule(dataSourceGroupConfig, TransactionalReadQueryStrategy.FIXED, new RoundRobinLoadBalanceAlgorithm());
assertThat(new QualifiedReadwriteSplittingTransactionalDataSourceRouter(new ConnectionContext(Collections::emptySet)).route(rule), is("read_ds_0"));
rule = new ReadwriteSplittingDataSourceGroupRule(dataSourceGroupConfig, TransactionalReadQueryStrategy.DYNAMIC, new RoundRobinLoadBalanceAlgorithm());
assertThat(new QualifiedReadwriteSplittingTransactionalDataSourceRouter(new ConnectionContext(Collections::emptySet)).route(rule), is("read_ds_0"));
}
|
@SuppressWarnings("OptionalGetWithoutIsPresent") // Enforced by type
@Override
public StreamsMaterializedWindowedTable windowed() {
if (!windowInfo.isPresent()) {
throw new UnsupportedOperationException("Table has non-windowed key");
}
final WindowInfo wndInfo = windowInfo.get();
final WindowType wndType = wndInfo.getType();
switch (wndType) {
case SESSION:
return new KsMaterializedSessionTable(stateStore,
SessionStoreCacheBypass::fetch, SessionStoreCacheBypass::fetchRange);
case HOPPING:
case TUMBLING:
return new KsMaterializedWindowTable(stateStore, wndInfo.getSize().get(),
WindowStoreCacheBypass::fetch,
WindowStoreCacheBypass::fetchAll,
WindowStoreCacheBypass::fetchRange);
default:
throw new UnsupportedOperationException("Unknown window type: " + wndInfo);
}
}
|
@Test
public void shouldReturnWindowedForTumbling() {
// Given:
givenWindowType(Optional.of(WindowType.TUMBLING));
// When:
final StreamsMaterializedWindowedTable table = materialization.windowed();
// Then:
assertThat(table, is(instanceOf(KsMaterializedWindowTable.class)));
}
|
@Override
public SofaResponse invoke(FilterInvoker invoker, SofaRequest request) throws SofaRpcException {
throw new UnsupportedOperationException();
}
|
@Test
public void invoke() throws Exception {
boolean error = false;
try {
new ExcludeFilter("*").invoke(null, null);
} catch (Exception e) {
error = e instanceof UnsupportedOperationException;
}
Assert.assertTrue(error);
}
|
protected void populateSettings(CliParser cli) throws InvalidSettingException {
final File propertiesFile = cli.getFileArgument(CliParser.ARGUMENT.PROP);
if (propertiesFile != null) {
try {
settings.mergeProperties(propertiesFile);
} catch (FileNotFoundException ex) {
throw new InvalidSettingException("Unable to find properties file '" + propertiesFile.getPath() + "'", ex);
} catch (IOException ex) {
throw new InvalidSettingException("Error reading properties file '" + propertiesFile.getPath() + "'", ex);
}
}
final String dataDirectory = cli.getStringArgument(CliParser.ARGUMENT.DATA_DIRECTORY);
if (dataDirectory != null) {
settings.setString(Settings.KEYS.DATA_DIRECTORY, dataDirectory);
} else if (System.getProperty("basedir") != null) {
final File dataDir = new File(System.getProperty("basedir"), "data");
settings.setString(Settings.KEYS.DATA_DIRECTORY, dataDir.getAbsolutePath());
} else {
final File jarPath = new File(App.class
.getProtectionDomain().getCodeSource().getLocation().getPath());
final File base = jarPath.getParentFile();
final String sub = settings.getString(Settings.KEYS.DATA_DIRECTORY);
final File dataDir = new File(base, sub);
settings.setString(Settings.KEYS.DATA_DIRECTORY, dataDir.getAbsolutePath());
}
final Boolean autoUpdate = cli.hasOption(CliParser.ARGUMENT.DISABLE_AUTO_UPDATE) != null ? false : null;
settings.setBooleanIfNotNull(Settings.KEYS.AUTO_UPDATE, autoUpdate);
settings.setStringIfNotEmpty(Settings.KEYS.PROXY_SERVER,
cli.getStringArgument(CliParser.ARGUMENT.PROXY_SERVER));
settings.setStringIfNotEmpty(Settings.KEYS.PROXY_PORT,
cli.getStringArgument(CliParser.ARGUMENT.PROXY_PORT));
settings.setStringIfNotEmpty(Settings.KEYS.PROXY_USERNAME,
cli.getStringArgument(CliParser.ARGUMENT.PROXY_USERNAME));
settings.setStringIfNotEmpty(Settings.KEYS.PROXY_PASSWORD,
cli.getStringArgument(CliParser.ARGUMENT.PROXY_PASSWORD, Settings.KEYS.PROXY_PASSWORD));
settings.setStringIfNotEmpty(Settings.KEYS.PROXY_NON_PROXY_HOSTS,
cli.getStringArgument(CliParser.ARGUMENT.NON_PROXY_HOSTS));
settings.setStringIfNotEmpty(Settings.KEYS.CONNECTION_TIMEOUT,
cli.getStringArgument(CliParser.ARGUMENT.CONNECTION_TIMEOUT));
settings.setStringIfNotEmpty(Settings.KEYS.CONNECTION_READ_TIMEOUT,
cli.getStringArgument(CliParser.ARGUMENT.CONNECTION_READ_TIMEOUT));
settings.setStringIfNotEmpty(Settings.KEYS.HINTS_FILE,
cli.getStringArgument(CliParser.ARGUMENT.HINTS_FILE));
settings.setArrayIfNotEmpty(Settings.KEYS.SUPPRESSION_FILE,
cli.getStringArguments(CliParser.ARGUMENT.SUPPRESSION_FILES));
//File Type Analyzer Settings
settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_EXPERIMENTAL_ENABLED,
cli.hasOption(CliParser.ARGUMENT.EXPERIMENTAL));
settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_RETIRED_ENABLED,
cli.hasOption(CliParser.ARGUMENT.RETIRED));
settings.setStringIfNotNull(Settings.KEYS.ANALYZER_GOLANG_PATH,
cli.getStringArgument(CliParser.ARGUMENT.PATH_TO_GO));
settings.setStringIfNotNull(Settings.KEYS.ANALYZER_YARN_PATH,
cli.getStringArgument(CliParser.ARGUMENT.PATH_TO_YARN));
settings.setStringIfNotNull(Settings.KEYS.ANALYZER_PNPM_PATH,
cli.getStringArgument(CliParser.ARGUMENT.PATH_TO_PNPM));
settings.setBooleanIfNotNull(Settings.KEYS.PRETTY_PRINT,
cli.hasOption(CliParser.ARGUMENT.PRETTY_PRINT));
settings.setStringIfNotNull(Settings.KEYS.ANALYZER_RETIREJS_REPO_JS_URL,
cli.getStringArgument(CliParser.ARGUMENT.RETIREJS_URL));
settings.setStringIfNotNull(Settings.KEYS.ANALYZER_RETIREJS_REPO_JS_USER,
cli.getStringArgument(CliParser.ARGUMENT.RETIREJS_URL_USER));
settings.setStringIfNotNull(Settings.KEYS.ANALYZER_RETIREJS_REPO_JS_PASSWORD,
cli.getStringArgument(CliParser.ARGUMENT.RETIREJS_URL_PASSWORD));
settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_RETIREJS_FORCEUPDATE,
cli.hasOption(CliParser.ARGUMENT.RETIRE_JS_FORCEUPDATE));
settings.setStringIfNotNull(Settings.KEYS.ANALYZER_RETIREJS_FILTERS,
cli.getStringArgument(CliParser.ARGUMENT.RETIREJS_FILTERS));
settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_RETIREJS_FILTER_NON_VULNERABLE,
cli.hasOption(CliParser.ARGUMENT.RETIREJS_FILTER_NON_VULNERABLE));
settings.setBoolean(Settings.KEYS.ANALYZER_JAR_ENABLED,
!cli.isDisabled(CliParser.ARGUMENT.DISABLE_JAR, Settings.KEYS.ANALYZER_JAR_ENABLED));
settings.setBoolean(Settings.KEYS.ANALYZER_MSBUILD_PROJECT_ENABLED,
!cli.isDisabled(CliParser.ARGUMENT.DISABLE_MSBUILD, Settings.KEYS.ANALYZER_MSBUILD_PROJECT_ENABLED));
settings.setBoolean(Settings.KEYS.ANALYZER_ARCHIVE_ENABLED,
!cli.isDisabled(CliParser.ARGUMENT.DISABLE_ARCHIVE, Settings.KEYS.ANALYZER_ARCHIVE_ENABLED));
settings.setBoolean(Settings.KEYS.ANALYZER_KNOWN_EXPLOITED_ENABLED,
!cli.isDisabled(CliParser.ARGUMENT.DISABLE_KEV, Settings.KEYS.ANALYZER_KNOWN_EXPLOITED_ENABLED));
settings.setStringIfNotNull(Settings.KEYS.KEV_URL,
cli.getStringArgument(CliParser.ARGUMENT.KEV_URL));
settings.setBoolean(Settings.KEYS.ANALYZER_PYTHON_DISTRIBUTION_ENABLED,
!cli.isDisabled(CliParser.ARGUMENT.DISABLE_PY_DIST, Settings.KEYS.ANALYZER_PYTHON_DISTRIBUTION_ENABLED));
settings.setBoolean(Settings.KEYS.ANALYZER_PYTHON_PACKAGE_ENABLED,
!cli.isDisabled(CliParser.ARGUMENT.DISABLE_PY_PKG, Settings.KEYS.ANALYZER_PYTHON_PACKAGE_ENABLED));
settings.setBoolean(Settings.KEYS.ANALYZER_AUTOCONF_ENABLED,
!cli.isDisabled(CliParser.ARGUMENT.DISABLE_AUTOCONF, Settings.KEYS.ANALYZER_AUTOCONF_ENABLED));
settings.setBoolean(Settings.KEYS.ANALYZER_MAVEN_INSTALL_ENABLED,
!cli.isDisabled(CliParser.ARGUMENT.DISABLE_MAVEN_INSTALL, Settings.KEYS.ANALYZER_MAVEN_INSTALL_ENABLED));
settings.setBoolean(Settings.KEYS.ANALYZER_PIP_ENABLED,
!cli.isDisabled(CliParser.ARGUMENT.DISABLE_PIP, Settings.KEYS.ANALYZER_PIP_ENABLED));
settings.setBoolean(Settings.KEYS.ANALYZER_PIPFILE_ENABLED,
!cli.isDisabled(CliParser.ARGUMENT.DISABLE_PIPFILE, Settings.KEYS.ANALYZER_PIPFILE_ENABLED));
settings.setBoolean(Settings.KEYS.ANALYZER_POETRY_ENABLED,
!cli.isDisabled(CliParser.ARGUMENT.DISABLE_POETRY, Settings.KEYS.ANALYZER_POETRY_ENABLED));
settings.setBoolean(Settings.KEYS.ANALYZER_CMAKE_ENABLED,
!cli.isDisabled(CliParser.ARGUMENT.DISABLE_CMAKE, Settings.KEYS.ANALYZER_CMAKE_ENABLED));
settings.setBoolean(Settings.KEYS.ANALYZER_NUSPEC_ENABLED,
!cli.isDisabled(CliParser.ARGUMENT.DISABLE_NUSPEC, Settings.KEYS.ANALYZER_NUSPEC_ENABLED));
settings.setBoolean(Settings.KEYS.ANALYZER_NUGETCONF_ENABLED,
!cli.isDisabled(CliParser.ARGUMENT.DISABLE_NUGETCONF, Settings.KEYS.ANALYZER_NUGETCONF_ENABLED));
settings.setBoolean(Settings.KEYS.ANALYZER_ASSEMBLY_ENABLED,
!cli.isDisabled(CliParser.ARGUMENT.DISABLE_ASSEMBLY, Settings.KEYS.ANALYZER_ASSEMBLY_ENABLED));
settings.setBoolean(Settings.KEYS.ANALYZER_BUNDLE_AUDIT_ENABLED,
!cli.isDisabled(CliParser.ARGUMENT.DISABLE_BUNDLE_AUDIT, Settings.KEYS.ANALYZER_BUNDLE_AUDIT_ENABLED));
settings.setBoolean(Settings.KEYS.ANALYZER_FILE_NAME_ENABLED,
!cli.isDisabled(CliParser.ARGUMENT.DISABLE_FILENAME, Settings.KEYS.ANALYZER_FILE_NAME_ENABLED));
settings.setBoolean(Settings.KEYS.ANALYZER_MIX_AUDIT_ENABLED,
!cli.isDisabled(CliParser.ARGUMENT.DISABLE_MIX_AUDIT, Settings.KEYS.ANALYZER_MIX_AUDIT_ENABLED));
settings.setBoolean(Settings.KEYS.ANALYZER_OPENSSL_ENABLED,
!cli.isDisabled(CliParser.ARGUMENT.DISABLE_OPENSSL, Settings.KEYS.ANALYZER_OPENSSL_ENABLED));
settings.setBoolean(Settings.KEYS.ANALYZER_COMPOSER_LOCK_ENABLED,
!cli.isDisabled(CliParser.ARGUMENT.DISABLE_COMPOSER, Settings.KEYS.ANALYZER_COMPOSER_LOCK_ENABLED));
settings.setBoolean(Settings.KEYS.ANALYZER_CPANFILE_ENABLED,
!cli.isDisabled(CliParser.ARGUMENT.DISABLE_CPAN, Settings.KEYS.ANALYZER_CPANFILE_ENABLED));
settings.setBoolean(Settings.KEYS.ANALYZER_GOLANG_DEP_ENABLED,
!cli.isDisabled(CliParser.ARGUMENT.DISABLE_GO_DEP, Settings.KEYS.ANALYZER_GOLANG_DEP_ENABLED));
settings.setBoolean(Settings.KEYS.ANALYZER_GOLANG_MOD_ENABLED,
!cli.isDisabled(CliParser.ARGUMENT.DISABLE_GOLANG_MOD, Settings.KEYS.ANALYZER_GOLANG_MOD_ENABLED));
settings.setBoolean(Settings.KEYS.ANALYZER_DART_ENABLED,
!cli.isDisabled(CliParser.ARGUMENT.DISABLE_DART, Settings.KEYS.ANALYZER_DART_ENABLED));
settings.setBoolean(Settings.KEYS.ANALYZER_NODE_PACKAGE_ENABLED,
!cli.isDisabled(CliParser.ARGUMENT.DISABLE_NODE_JS, Settings.KEYS.ANALYZER_NODE_PACKAGE_ENABLED));
settings.setBoolean(Settings.KEYS.ANALYZER_NODE_AUDIT_ENABLED,
!cli.isNodeAuditDisabled());
settings.setBoolean(Settings.KEYS.ANALYZER_YARN_AUDIT_ENABLED,
!cli.isYarnAuditDisabled());
settings.setBoolean(Settings.KEYS.ANALYZER_PNPM_AUDIT_ENABLED,
!cli.isPnpmAuditDisabled());
settings.setBoolean(Settings.KEYS.ANALYZER_NODE_AUDIT_USE_CACHE,
!cli.isDisabled(CliParser.ARGUMENT.DISABLE_NODE_AUDIT_CACHE, Settings.KEYS.ANALYZER_NODE_AUDIT_USE_CACHE));
settings.setBoolean(Settings.KEYS.ANALYZER_RETIREJS_ENABLED,
!cli.isDisabled(CliParser.ARGUMENT.DISABLE_RETIRE_JS, Settings.KEYS.ANALYZER_RETIREJS_ENABLED));
settings.setBoolean(Settings.KEYS.ANALYZER_SWIFT_PACKAGE_MANAGER_ENABLED,
!cli.isDisabled(CliParser.ARGUMENT.DISABLE_SWIFT, Settings.KEYS.ANALYZER_SWIFT_PACKAGE_MANAGER_ENABLED));
settings.setBoolean(Settings.KEYS.ANALYZER_SWIFT_PACKAGE_RESOLVED_ENABLED,
!cli.isDisabled(CliParser.ARGUMENT.DISABLE_SWIFT_RESOLVED, Settings.KEYS.ANALYZER_SWIFT_PACKAGE_RESOLVED_ENABLED));
settings.setBoolean(Settings.KEYS.ANALYZER_COCOAPODS_ENABLED,
!cli.isDisabled(CliParser.ARGUMENT.DISABLE_COCOAPODS, Settings.KEYS.ANALYZER_COCOAPODS_ENABLED));
settings.setBoolean(Settings.KEYS.ANALYZER_CARTHAGE_ENABLED,
!cli.isDisabled(CliParser.ARGUMENT.DISABLE_CARTHAGE, Settings.KEYS.ANALYZER_CARTHAGE_ENABLED));
settings.setBoolean(Settings.KEYS.ANALYZER_RUBY_GEMSPEC_ENABLED,
!cli.isDisabled(CliParser.ARGUMENT.DISABLE_RUBYGEMS, Settings.KEYS.ANALYZER_RUBY_GEMSPEC_ENABLED));
settings.setBoolean(Settings.KEYS.ANALYZER_CENTRAL_ENABLED,
!cli.isDisabled(CliParser.ARGUMENT.DISABLE_CENTRAL, Settings.KEYS.ANALYZER_CENTRAL_ENABLED));
settings.setBoolean(Settings.KEYS.ANALYZER_CENTRAL_USE_CACHE,
!cli.isDisabled(CliParser.ARGUMENT.DISABLE_CENTRAL_CACHE, Settings.KEYS.ANALYZER_CENTRAL_USE_CACHE));
settings.setBoolean(Settings.KEYS.ANALYZER_OSSINDEX_ENABLED,
!cli.isDisabled(CliParser.ARGUMENT.DISABLE_OSSINDEX, Settings.KEYS.ANALYZER_OSSINDEX_ENABLED));
settings.setBoolean(Settings.KEYS.ANALYZER_OSSINDEX_USE_CACHE,
!cli.isDisabled(CliParser.ARGUMENT.DISABLE_OSSINDEX_CACHE, Settings.KEYS.ANALYZER_OSSINDEX_USE_CACHE));
settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_NODE_PACKAGE_SKIPDEV,
cli.hasOption(CliParser.ARGUMENT.NODE_PACKAGE_SKIP_DEV_DEPENDENCIES));
settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_NODE_AUDIT_SKIPDEV,
cli.hasOption(CliParser.ARGUMENT.DISABLE_NODE_AUDIT_SKIPDEV));
settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_NEXUS_ENABLED,
cli.hasOption(CliParser.ARGUMENT.ENABLE_NEXUS));
settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_CENTRAL_URL,
cli.getStringArgument(CliParser.ARGUMENT.CENTRAL_URL));
settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_OSSINDEX_URL,
cli.getStringArgument(CliParser.ARGUMENT.OSSINDEX_URL));
settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_OSSINDEX_USER,
cli.getStringArgument(CliParser.ARGUMENT.OSSINDEX_USERNAME));
settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_OSSINDEX_PASSWORD,
cli.getStringArgument(CliParser.ARGUMENT.OSSINDEX_PASSWORD, Settings.KEYS.ANALYZER_OSSINDEX_PASSWORD));
settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_OSSINDEX_WARN_ONLY_ON_REMOTE_ERRORS,
cli.getStringArgument(CliParser.ARGUMENT.OSSINDEX_WARN_ONLY_ON_REMOTE_ERRORS,
Settings.KEYS.ANALYZER_OSSINDEX_WARN_ONLY_ON_REMOTE_ERRORS));
settings.setFloat(Settings.KEYS.JUNIT_FAIL_ON_CVSS,
cli.getFloatArgument(CliParser.ARGUMENT.FAIL_JUNIT_ON_CVSS, 0));
settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_ARTIFACTORY_ENABLED,
cli.hasOption(CliParser.ARGUMENT.ARTIFACTORY_ENABLED));
settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_ARTIFACTORY_PARALLEL_ANALYSIS,
cli.getBooleanArgument(CliParser.ARGUMENT.ARTIFACTORY_PARALLEL_ANALYSIS));
settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_ARTIFACTORY_USES_PROXY,
cli.getBooleanArgument(CliParser.ARGUMENT.ARTIFACTORY_USES_PROXY));
settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_ARTIFACTORY_URL,
cli.getStringArgument(CliParser.ARGUMENT.ARTIFACTORY_URL));
settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_ARTIFACTORY_API_USERNAME,
cli.getStringArgument(CliParser.ARGUMENT.ARTIFACTORY_USERNAME));
settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_ARTIFACTORY_API_TOKEN,
cli.getStringArgument(CliParser.ARGUMENT.ARTIFACTORY_API_TOKEN));
settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_ARTIFACTORY_BEARER_TOKEN,
cli.getStringArgument(CliParser.ARGUMENT.ARTIFACTORY_BEARER_TOKEN));
settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_MIX_AUDIT_PATH,
cli.getStringArgument(CliParser.ARGUMENT.PATH_TO_MIX_AUDIT));
settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_BUNDLE_AUDIT_PATH,
cli.getStringArgument(CliParser.ARGUMENT.PATH_TO_BUNDLE_AUDIT));
settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_BUNDLE_AUDIT_WORKING_DIRECTORY,
cli.getStringArgument(CliParser.ARGUMENT.PATH_TO_BUNDLE_AUDIT_WORKING_DIRECTORY));
settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_NEXUS_URL,
cli.getStringArgument(CliParser.ARGUMENT.NEXUS_URL));
settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_NEXUS_USER,
cli.getStringArgument(CliParser.ARGUMENT.NEXUS_USERNAME));
settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_NEXUS_PASSWORD,
cli.getStringArgument(CliParser.ARGUMENT.NEXUS_PASSWORD, Settings.KEYS.ANALYZER_NEXUS_PASSWORD));
//TODO deprecate this in favor of non-proxy host
final boolean nexusUsesProxy = cli.isNexusUsesProxy();
settings.setBoolean(Settings.KEYS.ANALYZER_NEXUS_USES_PROXY, nexusUsesProxy);
settings.setStringIfNotEmpty(Settings.KEYS.DB_DRIVER_NAME,
cli.getStringArgument(CliParser.ARGUMENT.DB_DRIVER));
settings.setStringIfNotEmpty(Settings.KEYS.DB_DRIVER_PATH,
cli.getStringArgument(CliParser.ARGUMENT.DB_DRIVER_PATH));
settings.setStringIfNotEmpty(Settings.KEYS.DB_CONNECTION_STRING,
cli.getStringArgument(CliParser.ARGUMENT.CONNECTION_STRING));
settings.setStringIfNotEmpty(Settings.KEYS.DB_USER,
cli.getStringArgument(CliParser.ARGUMENT.DB_NAME));
settings.setStringIfNotEmpty(Settings.KEYS.DB_PASSWORD,
cli.getStringArgument(CliParser.ARGUMENT.DB_PASSWORD, Settings.KEYS.DB_PASSWORD));
settings.setStringIfNotEmpty(Settings.KEYS.ADDITIONAL_ZIP_EXTENSIONS,
cli.getStringArgument(CliParser.ARGUMENT.ADDITIONAL_ZIP_EXTENSIONS));
settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_ASSEMBLY_DOTNET_PATH,
cli.getStringArgument(CliParser.ARGUMENT.PATH_TO_CORE));
String key = cli.getStringArgument(CliParser.ARGUMENT.NVD_API_KEY);
if (key != null) {
if ((key.startsWith("\"") && key.endsWith("\"") || (key.startsWith("'") && key.endsWith("'")))) {
key = key.substring(1, key.length() - 1);
}
settings.setStringIfNotEmpty(Settings.KEYS.NVD_API_KEY, key);
}
settings.setStringIfNotEmpty(Settings.KEYS.NVD_API_ENDPOINT,
cli.getStringArgument(CliParser.ARGUMENT.NVD_API_ENDPOINT));
settings.setIntIfNotNull(Settings.KEYS.NVD_API_DELAY, cli.getIntegerValue(CliParser.ARGUMENT.NVD_API_DELAY));
settings.setIntIfNotNull(Settings.KEYS.NVD_API_RESULTS_PER_PAGE, cli.getIntegerValue(CliParser.ARGUMENT.NVD_API_RESULTS_PER_PAGE));
settings.setStringIfNotEmpty(Settings.KEYS.NVD_API_DATAFEED_URL, cli.getStringArgument(CliParser.ARGUMENT.NVD_API_DATAFEED_URL));
settings.setStringIfNotEmpty(Settings.KEYS.NVD_API_DATAFEED_USER, cli.getStringArgument(CliParser.ARGUMENT.NVD_API_DATAFEED_USER));
settings.setStringIfNotEmpty(Settings.KEYS.NVD_API_DATAFEED_PASSWORD, cli.getStringArgument(CliParser.ARGUMENT.NVD_API_DATAFEED_PASSWORD));
settings.setIntIfNotNull(Settings.KEYS.NVD_API_MAX_RETRY_COUNT, cli.getIntegerValue(CliParser.ARGUMENT.NVD_API_MAX_RETRY_COUNT));
settings.setIntIfNotNull(Settings.KEYS.NVD_API_VALID_FOR_HOURS, cli.getIntegerValue(CliParser.ARGUMENT.NVD_API_VALID_FOR_HOURS));
settings.setStringIfNotNull(Settings.KEYS.HOSTED_SUPPRESSIONS_URL,
cli.getStringArgument(CliParser.ARGUMENT.HOSTED_SUPPRESSIONS_URL));
settings.setBoolean(Settings.KEYS.HOSTED_SUPPRESSIONS_ENABLED,
!cli.isDisabled(CliParser.ARGUMENT.DISABLE_HOSTED_SUPPRESSIONS, Settings.KEYS.HOSTED_SUPPRESSIONS_ENABLED));
settings.setBooleanIfNotNull(Settings.KEYS.HOSTED_SUPPRESSIONS_FORCEUPDATE,
cli.hasOption(CliParser.ARGUMENT.HOSTED_SUPPRESSIONS_FORCEUPDATE));
settings.setIntIfNotNull(Settings.KEYS.HOSTED_SUPPRESSIONS_VALID_FOR_HOURS,
cli.getIntegerValue(CliParser.ARGUMENT.HOSTED_SUPPRESSIONS_VALID_FOR_HOURS));
}
|
@Test
public void testPopulatingSuppressionSettingsWithMultipleFiles() throws Exception {
// GIVEN CLI properties with the mandatory arguments
File prop = new File(this.getClass().getClassLoader().getResource("sample.properties").toURI().getPath());
// AND a single suppression file
String[] args = {"-P", prop.getAbsolutePath(), "--suppression", "first-file.xml", "--suppression", "another-file.xml"};
// WHEN parsing the CLI arguments
final CliParser cli = new CliParser(getSettings());
cli.parse(args);
final App classUnderTest = new App(getSettings());
classUnderTest.populateSettings(cli);
// THEN the suppression file is set in the settings for use in the application core
assertThat("Expected the suppression files to be set in the Settings with a separator", getSettings().getString(KEYS.SUPPRESSION_FILE), is("[\"first-file.xml\",\"another-file.xml\"]"));
}
|
public void putMulti(MultiItem<T> items) throws Exception {
putMulti(items, 0, null);
}
|
@Test
public void testPutMulti() throws Exception {
final int itemQty = 100;
DistributedQueue<TestQueueItem> queue = null;
CuratorFramework client = CuratorFrameworkFactory.newClient(server.getConnectString(), new RetryOneTime(1));
client.start();
try {
BlockingQueueConsumer<TestQueueItem> consumer =
new BlockingQueueConsumer<>(new DummyConnectionStateListener());
queue = QueueBuilder.builder(client, consumer, serializer, QUEUE_PATH)
.buildQueue();
queue.start();
MultiItem<TestQueueItem> items = new MultiItem<TestQueueItem>() {
private int index = 0;
@Override
public TestQueueItem nextItem() throws Exception {
if (index >= itemQty) {
return null;
}
return new TestQueueItem(Integer.toString(index++));
}
};
queue.putMulti(items);
for (int i = 0; i < itemQty; ++i) {
TestQueueItem queueItem = consumer.take(1, TimeUnit.SECONDS);
assertNotNull(queueItem);
assertEquals(queueItem, new TestQueueItem(Integer.toString(i)));
}
} finally {
CloseableUtils.closeQuietly(queue);
CloseableUtils.closeQuietly(client);
}
}
|
@Override
public Object[] getRowFromCache( RowMetaInterface lookupMeta, Object[] lookupRow ) throws KettleException {
if ( stepData.hasDBCondition ) {
// actually, there was no sense in executing SELECT from db in this case,
// should be reported as improvement
return null;
}
SearchingContext context = new SearchingContext();
context.init( keys.length );
for ( Index index : indexes ) {
int column = index.getColumn();
// IS (NOT) NULL operation does not require second argument
// hence, lookupValue can be absent
// basically, the index ignores both meta and value, so we can pass everything there
Object lookupValue = ( column < lookupRow.length ) ? lookupRow[ column ] : null;
index.applyRestrictionsTo( context, lookupMeta.getValueMeta( column ), lookupValue );
if ( context.isEmpty() ) {
// if nothing matches, break the search
return null;
}
}
// iterate through all elements survived after filtering stage
// and find the first matching
BitSet candidates = context.getCandidates();
int candidate = candidates.nextSetBit( 0 );
while ( candidate != -1 ) {
Object[] dataKeys = keys[ candidate ];
boolean matches = true;
int lookupShift = 0;
for ( int i = 0, len = otherConditions.length; i < len && matches; i++ ) {
int[] columnConditionPair = otherConditions[ i ];
final int column = columnConditionPair[ 0 ];
Object keyData = dataKeys[ column ];
ValueMetaInterface keyMeta = keysMeta.getValueMeta( column );
int lookupIndex = column + lookupShift;
Object cmpData = lookupRow[ lookupIndex ];
ValueMetaInterface cmpMeta = lookupMeta.getValueMeta( lookupIndex );
int condition = columnConditionPair[ 1 ];
if ( condition == DatabaseLookupMeta.CONDITION_BETWEEN ) {
// BETWEEN is a special condition demanding two arguments
// technically there are no obstacles to implement it,
// as it is just a short form of: (a <= b) && (b <= c)
// however, let it be so for now
matches = ( keyMeta.compare( keyData, cmpMeta, cmpData ) >= 0 );
if ( matches ) {
lookupShift++;
lookupIndex++;
ValueMetaInterface cmpMeta2 = lookupMeta.getValueMeta( lookupIndex );
Object cmpData2 = lookupRow[ lookupIndex ];
matches = ( keyMeta.compare( keyData, cmpMeta2, cmpData2 ) <= 0 );
}
} else {
// if not BETWEEN, than it is LIKE (or some new operator)
// for now, LIKE is not supported here
matches = false;
stepData.hasDBCondition = true;
}
}
if ( matches ) {
return data[ candidate ];
} else {
candidate = candidates.nextSetBit( candidate + 1 );
}
}
return null;
}
|
@Test
public void hasDbConditionStopsSearching() throws Exception {
stepData.hasDBCondition = true;
assertNull( buildCache( "" ).getRowFromCache( keysMeta.clone(), keys[ 0 ] ) );
}
|
@Override
public Result responseMessageForCheckConnectionToRepository(String responseBody) {
return jsonResultMessageHandler.toResult(responseBody);
}
|
@Test
public void shouldBuildFailureResultFromCheckRepositoryConnectionResponse() throws Exception {
String responseBody = "{\"status\":\"failure\",messages=[\"message-one\",\"message-two\"]}";
Result result = messageHandler.responseMessageForCheckConnectionToRepository(responseBody);
assertFailureResult(result, List.of("message-one", "message-two"));
}
|
@Override
public Object adapt(final HttpAction action, final WebContext context) {
if (action != null) {
var code = action.getCode();
val response = ((JEEContext) context).getNativeResponse();
if (code < 400) {
response.setStatus(code);
} else {
try {
response.sendError(code);
} catch (final IOException e) {
throw new TechnicalException(e);
}
}
if (action instanceof WithLocationAction withLocationAction) {
context.setResponseHeader(HttpConstants.LOCATION_HEADER, withLocationAction.getLocation());
} else if (action instanceof WithContentAction withContentAction) {
val content = withContentAction.getContent();
if (content != null) {
try {
response.getWriter().write(content);
} catch (final IOException e) {
throw new TechnicalException(e);
}
}
}
return null;
}
throw new TechnicalException("No action provided");
}
|
@Test
public void testError500() throws IOException {
JEEHttpActionAdapter.INSTANCE.adapt(new StatusAction(500), context);
verify(response).sendError(500);
}
|
public void load() {
Set<CoreExtension> coreExtensions = serviceLoaderWrapper.load(getClass().getClassLoader());
ensureNoDuplicateName(coreExtensions);
coreExtensionRepository.setLoadedCoreExtensions(coreExtensions);
if (!coreExtensions.isEmpty()) {
LOG.info("Loaded core extensions: {}", coreExtensions.stream().map(CoreExtension::getName).collect(Collectors.joining(", ")));
}
}
|
@Test
public void load_sets_loaded_core_extensions_into_repository() {
Set<CoreExtension> coreExtensions = IntStream.range(0, 1 + new Random().nextInt(5))
.mapToObj(i -> newCoreExtension("core_ext_" + i))
.collect(Collectors.toSet());
when(serviceLoaderWrapper.load(any())).thenReturn(coreExtensions);
underTest.load();
verify(serviceLoaderWrapper).load(CoreExtensionsLoader.class.getClassLoader());
verify(coreExtensionRepository).setLoadedCoreExtensions(coreExtensions);
verifyNoMoreInteractions(serviceLoaderWrapper, coreExtensionRepository);
}
|
@Override
public boolean move(String destination, V member) {
return get(moveAsync(destination, member));
}
|
@Test
public void testMove() throws Exception {
RSet<Integer> set = redisson.getSet("set");
RSet<Integer> otherSet = redisson.getSet("otherSet");
set.add(1);
set.add(2);
assertThat(set.move("otherSet", 1)).isTrue();
assertThat(set.size()).isEqualTo(1);
assertThat(set).contains(2);
assertThat(otherSet.size()).isEqualTo(1);
assertThat(otherSet).contains(1);
}
|
public static String stripSchemeAndOptions(Endpoint endpoint) {
int start = endpoint.getEndpointUri().indexOf(':');
start++;
// Remove any leading '/'
while (endpoint.getEndpointUri().charAt(start) == '/') {
start++;
}
int end = endpoint.getEndpointUri().indexOf('?');
return end == -1 ? endpoint.getEndpointUri().substring(start) : endpoint.getEndpointUri().substring(start, end);
}
|
@Test
public void testStripSchemeNoOptionsWithSlashes() {
Endpoint endpoint = Mockito.mock(Endpoint.class);
Mockito.when(endpoint.getEndpointUri()).thenReturn("direct://hello");
assertEquals("hello", AbstractSpanDecorator.stripSchemeAndOptions(endpoint));
}
|
@Override
public Local create(final Path file) {
return this.create(String.format("%s-%s", new AlphanumericRandomStringService().random(), file.getName()));
}
|
@Test
public void testCreateFile() {
final String temp = StringUtils.removeEnd(System.getProperty("java.io.tmpdir"), File.separator);
final String s = System.getProperty("file.separator");
assertEquals(String.format("%s%su%s1742810335-f", temp, s, s),
new FlatTemporaryFileService().create("u", new Path("/p/f", EnumSet.of(Path.Type.file))).getAbsolute());
final Path file = new Path("/p/f", EnumSet.of(Path.Type.file));
file.attributes().setRegion("region");
assertEquals(String.format("%s%su%s1742810335-f", temp, s, s),
new FlatTemporaryFileService().create("u", file).getAbsolute());
}
|
@VisibleForTesting
ImmutableList<AggregationKeyResult> extractValues(PivotResult pivotResult) throws EventProcessorException {
final ImmutableList.Builder<AggregationKeyResult> results = ImmutableList.builder();
// Example PivotResult structures. The row value "key" is composed of: "metric/<function>/<field>/<series-id>"
// The row "key" always contains the date range bucket value as first element.
//
// With group-by:
// {
// "rows": [
// {
// "key": ["2020-03-27T16:23:12Z", "php", "box2"],
// "values": [
// {
// "key": ["metric/count/source/abc123"],
// "value": 86,
// "rollup": true,
// "source": "row-leaf"
// },
// {
// "key": ["metric/card/source/abc123"],
// "value": 1,
// "rollup": true,
// "source": "row-leaf"
// }
// ],
// "source": "leaf"
// },
// {
// "key": ["2020-03-27T16:23:12Z", "php"],
// "values": [
// {
// "key": ["metric/count/source/abc123"],
// "value": 86,
// "rollup": true,
// "source": "row-inner"
// },
// {
// "key": ["metric/card/source/abc123"],
// "value": 1,
// "rollup": true,
// "source": "row-inner"
// }
// ],
// "source": "non-leaf"
// },
// {
// "key": ["2020-03-27T16:23:12Z", "sshd","box2"],
// "values": [
// {
// "key": ["metric/count/source/abc123"],
// "value": 5,
// "rollup": true,
// "source": "row-leaf"
// },
// {
// "key": ["metric/card/source/abc123"],
// "value": 1,
// "rollup": true,
// "source": "row-leaf"
// }
// ],
// "source": "leaf"
// }
// ]
//}
//
// Without group-by:
// {
// "rows": [
// {
// "key": ["2020-03-27T16:23:12Z"],
// "values": [
// {
// "key": ["metric/count/source/abc123"],
// "value": 18341,
// "rollup": true,
// "source": "row-leaf"
// },
// {
// "key": ["metric/card/source/abc123"],
// "value": 1,
// "rollup": true,
// "source": "row-leaf"
// }
// ],
// "source": "leaf"
// }
// ]
//}
for (final PivotResult.Row row : pivotResult.rows()) {
if (!"leaf".equals(row.source())) {
// "non-leaf" values can show up when the "rollup" feature is enabled in the pivot search type
continue;
}
// Safety guard against programming errors
if (row.key().size() == 0 || isNullOrEmpty(row.key().get(0))) {
throw new EventProcessorException("Invalid row key! Expected at least the date range timestamp value: " + row.key().toString(), true, eventDefinition);
}
// We always wrap aggregations in date range buckets so we can run aggregations for multiple ranges at once.
// The timestamp value of the date range bucket will be part of the result.
final String timeKey = row.key().get(0);
final ImmutableList<String> groupKey;
if (row.key().size() > 1) {
// The date range bucket value must not be exposed to consumers as part of the key so they
// don't have to unwrap the key all the time.
groupKey = row.key().subList(1, row.key().size());
} else {
groupKey = ImmutableList.of();
}
final ImmutableList.Builder<AggregationSeriesValue> values = ImmutableList.builder();
for (final PivotResult.Value value : row.values()) {
if (!"row-leaf".equals(value.source())) {
// "row-inner" values can show up when the "rollup" feature is enabled in the pivot search type
continue;
}
for (var series : config.series()) {
if (!value.key().isEmpty() && value.key().get(0).equals(metricName(series))) {
// Some Elasticsearch aggregations can return a "null" value. (e.g. avg on a non-existent field)
// We are using NaN in that case to make sure our conditions will work.
final Object maybeNumberValue = firstNonNull(value.value(), Double.NaN);
if (maybeNumberValue instanceof Number) {
final double numberValue = ((Number) maybeNumberValue).doubleValue();
final AggregationSeriesValue seriesValue = AggregationSeriesValue.builder()
.key(groupKey)
.value(numberValue)
.series(series)
.build();
values.add(seriesValue);
} else {
// Should not happen
throw new IllegalStateException("Got unexpected non-number value for " + series.toString() + " " + row.toString() + " " + value.toString());
}
}
}
}
DateTime resultTimestamp;
try {
resultTimestamp = DateTime.parse(timeKey).withZone(DateTimeZone.UTC);
} catch (IllegalArgumentException e) {
throw new IllegalStateException("Failed to create event for: " + eventDefinition.title() + " (possibly due to non-existing grouping fields)", e);
}
results.add(AggregationKeyResult.builder()
.key(groupKey)
.timestamp(resultTimestamp)
.seriesValues(values.build())
.build());
}
return results.build();
}
|
@Test
public void testExtractValuesWithNullValues() throws Exception {
final long WINDOW_LENGTH = 30000;
final AbsoluteRange timerange = AbsoluteRange.create(DateTime.now(DateTimeZone.UTC).minusSeconds(3600), DateTime.now(DateTimeZone.UTC));
final SeriesSpec seriesCount = Count.builder().id("abc123").field("source").build();
final SeriesSpec seriesAvg = Average.builder().id("abc123").field("some_field").build();
final AggregationEventProcessorConfig config = AggregationEventProcessorConfig.builder()
.query("")
.streams(Collections.emptySet())
.groupBy(Collections.emptyList())
.series(ImmutableList.of(seriesCount, seriesAvg))
.conditions(null)
.searchWithinMs(WINDOW_LENGTH)
.executeEveryMs(WINDOW_LENGTH)
.build();
final AggregationEventProcessorParameters parameters = AggregationEventProcessorParameters.builder()
.streams(Collections.emptySet())
.timerange(timerange)
.batchSize(500)
.build();
final PivotAggregationSearch pivotAggregationSearch = new PivotAggregationSearch(
config,
parameters,
new AggregationSearch.User("test", DateTimeZone.UTC),
eventDefinition,
Collections.emptyList(),
searchJobService,
queryEngine,
EventsConfigurationTestProvider.create(),
moreSearch,
permittedStreams,
notificationService,
new QueryStringDecorators(Optional.empty())
);
final PivotResult pivotResult = PivotResult.builder()
.id("test")
.effectiveTimerange(timerange)
.total(1)
.addRow(PivotResult.Row.builder()
.key(ImmutableList.of(timerange.getTo().toString()))
.addValue(PivotResult.Value.create(ImmutableList.of("metric/count(source)"), 42, true, "row-leaf"))
// A "null" value can happen with some Elasticsearch aggregations (e.g. avg on a non-existent field)
.addValue(PivotResult.Value.create(ImmutableList.of("metric/avg(some_field)"), null, true, "row-leaf"))
.source("leaf")
.build())
.build();
final ImmutableList<AggregationKeyResult> results = pivotAggregationSearch.extractValues(pivotResult);
assertThat(results.size()).isEqualTo(1);
assertThat(results.get(0)).isEqualTo(AggregationKeyResult.builder()
.key(ImmutableList.of())
.timestamp(timerange.getTo())
.seriesValues(ImmutableList.of(
AggregationSeriesValue.builder()
.key(ImmutableList.of())
.value(42.0)
.series(seriesCount)
.build(),
AggregationSeriesValue.builder()
.key(ImmutableList.of())
.value(Double.NaN) // For "null" we expect NaN
.series(seriesAvg)
.build()
))
.build());
}
|
protected ChannelPoolHandler handler() {
return handler;
}
|
@Test
public void testHandler() {
final ChannelPoolHandler handler = new CountingChannelPoolHandler();
final SimpleChannelPool pool = new SimpleChannelPool(new Bootstrap(), handler);
try {
assertSame(handler, pool.handler());
} finally {
pool.close();
}
}
|
@Override
public HttpResponse get() throws InterruptedException, ExecutionException {
try {
final Object result = process(0, null);
if (result instanceof Throwable) {
throw new ExecutionException((Throwable) result);
}
return (HttpResponse) result;
} finally {
isDone = true;
}
}
|
@Test(expected = TimeoutException.class)
public void errGetTimeoutEx() throws ExecutionException, InterruptedException, TimeoutException {
get(new TimeoutException(), true);
}
|
public static String getInterfaceName(Invoker invoker) {
return getInterfaceName(invoker, false);
}
|
@Test
public void testGetInterfaceNameWithPrefix() throws NoSuchMethodException {
URL url = URL.valueOf("dubbo://127.0.0.1:2181")
.addParameter(CommonConstants.VERSION_KEY, "1.0.0")
.addParameter(CommonConstants.GROUP_KEY, "grp1")
.addParameter(CommonConstants.INTERFACE_KEY, DemoService.class.getName());
Invoker invoker = mock(Invoker.class);
when(invoker.getUrl()).thenReturn(url);
when(invoker.getInterface()).thenReturn(DemoService.class);
//test with default prefix
String resourceName = DubboUtils.getInterfaceName(invoker, DubboAdapterGlobalConfig.getDubboProviderResNamePrefixKey());
assertEquals("dubbo:provider:com.alibaba.csp.sentinel.adapter.dubbo.provider.DemoService", resourceName);
resourceName = DubboUtils.getInterfaceName(invoker, DubboAdapterGlobalConfig.getDubboConsumerResNamePrefixKey());
assertEquals("dubbo:consumer:com.alibaba.csp.sentinel.adapter.dubbo.provider.DemoService", resourceName);
//test with custom prefix
SentinelConfig.setConfig(DubboAdapterGlobalConfig.DUBBO_PROVIDER_RES_NAME_PREFIX_KEY, "my:dubbo:provider:");
SentinelConfig.setConfig(DubboAdapterGlobalConfig.DUBBO_CONSUMER_RES_NAME_PREFIX_KEY, "my:dubbo:consumer:");
resourceName = DubboUtils.getInterfaceName(invoker, DubboAdapterGlobalConfig.getDubboProviderResNamePrefixKey());
assertEquals("my:dubbo:provider:com.alibaba.csp.sentinel.adapter.dubbo.provider.DemoService", resourceName);
resourceName = DubboUtils.getInterfaceName(invoker, DubboAdapterGlobalConfig.getDubboConsumerResNamePrefixKey());
assertEquals("my:dubbo:consumer:com.alibaba.csp.sentinel.adapter.dubbo.provider.DemoService", resourceName);
}
|
@Override
public boolean betterThan(Num criterionValue1, Num criterionValue2) {
return lessIsBetter ? criterionValue1.isLessThan(criterionValue2)
: criterionValue1.isGreaterThan(criterionValue2);
}
|
@Test
public void betterThanWithLessIsBetter() {
AnalysisCriterion criterion = getCriterion(new ProfitLossCriterion());
assertFalse(criterion.betterThan(numOf(5000), numOf(4500)));
assertTrue(criterion.betterThan(numOf(4500), numOf(5000)));
}
|
public String getQuery() throws Exception {
return getQuery(weatherConfiguration.getLocation());
}
|
@Test
public void testCurrentLocationHourlyQuery() throws Exception {
WeatherConfiguration weatherConfiguration = new WeatherConfiguration();
weatherConfiguration.setMode(WeatherMode.XML);
weatherConfiguration.setPeriod("3");
weatherConfiguration.setLanguage(WeatherLanguage.nl);
weatherConfiguration.setUnits(WeatherUnits.IMPERIAL);
weatherConfiguration.setAppid(APPID);
weatherConfiguration.setWeatherApi(WeatherApi.Hourly);
WeatherQuery weatherQuery = new WeatherQuery(weatherConfiguration);
weatherConfiguration.setGeoLocationProvider(geoLocationProvider);
String query = weatherQuery.getQuery();
assertThat(query, is(
"http://api.openweathermap.org/data/2.5/forecast?lat=51.98&lon=4.13&lang=nl&cnt=3&units=imperial&mode=xml&APPID=9162755b2efa555823cfe0451d7fff38"));
}
|
static int tableSizeFor(int cap) {
int n = ALL_BIT_IS_ONE >>> Integer.numberOfLeadingZeros(cap - 1);
return (n < 0) ? 1 : (n >= MAXIMUM_CAPACITY) ? MAXIMUM_CAPACITY : n + 1;
}
|
@Test
public void testTableSizeFor() {
int maxCap = 1 << 30;
for (int i = 0; i <= maxCap; i++) {
int tabSize1 = tabSizeFor_JDK8(i);
int tabSize2 = TaskTimeRecordPlugin.tableSizeFor(i);
Assert.assertTrue(tabSize1 == tabSize2);
}
}
|
@Override
public int run(String[] args) throws Exception {
YarnConfiguration yarnConf =
getConf() == null ? new YarnConfiguration() : new YarnConfiguration(
getConf());
boolean isHAEnabled =
yarnConf.getBoolean(YarnConfiguration.RM_HA_ENABLED,
YarnConfiguration.DEFAULT_RM_HA_ENABLED);
if (args.length < 1) {
printUsage("", isHAEnabled);
return -1;
}
int exitCode = -1;
int i = 0;
String cmd = args[i++];
exitCode = 0;
if ("-help".equals(cmd)) {
if (i < args.length) {
printUsage(args[i], isHAEnabled);
} else {
printHelp("", isHAEnabled);
}
return exitCode;
}
if (USAGE.containsKey(cmd)) {
if (isHAEnabled) {
return super.run(args);
}
System.out.println("Cannot run " + cmd
+ " when ResourceManager HA is not enabled");
return -1;
}
//
// verify that we have enough command line parameters
//
String subClusterId = StringUtils.EMPTY;
if ("-refreshAdminAcls".equals(cmd) || "-refreshQueues".equals(cmd) ||
"-refreshNodesResources".equals(cmd) ||
"-refreshServiceAcl".equals(cmd) ||
"-refreshUserToGroupsMappings".equals(cmd) ||
"-refreshSuperUserGroupsConfiguration".equals(cmd) ||
"-refreshClusterMaxPriority".equals(cmd)) {
subClusterId = parseSubClusterId(args, isHAEnabled);
// If we enable Federation mode, the number of args may be either one or three.
// Example: -refreshQueues or -refreshQueues -subClusterId SC-1
if (isYarnFederationEnabled(getConf()) && args.length != 1 && args.length != 3) {
printUsage(cmd, isHAEnabled);
return exitCode;
} else if (!isYarnFederationEnabled(getConf()) && args.length != 1) {
// If Federation mode is not enabled, then the number of args can only be one.
// Example: -refreshQueues
printUsage(cmd, isHAEnabled);
return exitCode;
}
}
// If it is federation mode, we will print federation mode information
if (isYarnFederationEnabled(getConf())) {
System.out.println("Using YARN Federation mode.");
}
try {
if ("-refreshQueues".equals(cmd)) {
exitCode = refreshQueues(subClusterId);
} else if ("-refreshNodes".equals(cmd)) {
exitCode = handleRefreshNodes(args, cmd, isHAEnabled);
} else if ("-refreshNodesResources".equals(cmd)) {
exitCode = refreshNodesResources(subClusterId);
} else if ("-refreshUserToGroupsMappings".equals(cmd)) {
exitCode = refreshUserToGroupsMappings(subClusterId);
} else if ("-refreshSuperUserGroupsConfiguration".equals(cmd)) {
exitCode = refreshSuperUserGroupsConfiguration(subClusterId);
} else if ("-refreshAdminAcls".equals(cmd)) {
exitCode = refreshAdminAcls(subClusterId);
} else if ("-refreshServiceAcl".equals(cmd)) {
exitCode = refreshServiceAcls(subClusterId);
} else if ("-refreshClusterMaxPriority".equals(cmd)) {
exitCode = refreshClusterMaxPriority(subClusterId);
} else if ("-getGroups".equals(cmd)) {
String[] usernames = Arrays.copyOfRange(args, i, args.length);
exitCode = getGroups(usernames);
} else if ("-updateNodeResource".equals(cmd)) {
exitCode = handleUpdateNodeResource(args, cmd, isHAEnabled, subClusterId);
} else if ("-addToClusterNodeLabels".equals(cmd)) {
exitCode = handleAddToClusterNodeLabels(args, cmd, isHAEnabled);
} else if ("-removeFromClusterNodeLabels".equals(cmd)) {
exitCode = handleRemoveFromClusterNodeLabels(args, cmd, isHAEnabled);
} else if ("-replaceLabelsOnNode".equals(cmd)) {
exitCode = handleReplaceLabelsOnNodes(args, cmd, isHAEnabled);
} else {
exitCode = -1;
System.err.println(cmd.substring(1) + ": Unknown command");
printUsage("", isHAEnabled);
}
} catch (IllegalArgumentException arge) {
exitCode = -1;
System.err.println(cmd.substring(1) + ": " + arge.getLocalizedMessage());
printUsage(cmd, isHAEnabled);
} catch (RemoteException e) {
//
// This is a error returned by hadoop server. Print
// out the first line of the error message, ignore the stack trace.
exitCode = -1;
try {
String[] content;
content = e.getLocalizedMessage().split("\n");
System.err.println(cmd.substring(1) + ": "
+ content[0]);
} catch (Exception ex) {
System.err.println(cmd.substring(1) + ": "
+ ex.getLocalizedMessage());
}
} catch (Exception e) {
exitCode = -1;
System.err.println(cmd.substring(1) + ": "
+ e.getLocalizedMessage());
}
if (null != localNodeLabelsManager) {
localNodeLabelsManager.stop();
}
return exitCode;
}
|
@Test
public void testCheckHealth() throws Exception {
String[] args = {"-checkHealth", "rm1"};
// RM HA is disabled.
// getServiceState should not be executed
assertEquals(-1, rmAdminCLI.run(args));
verify(haadmin, never()).monitorHealth();
// Now RM HA is enabled.
// getServiceState should be executed
assertEquals(0, rmAdminCLIWithHAEnabled.run(args));
verify(haadmin).monitorHealth();
}
|
public Optional<Boolean> getBasicConstraints() {
return getExtensions()
.map(BasicConstraints::fromExtensions)
.map(BasicConstraints::isCA);
}
|
@Test
void can_read_basic_constraints() {
X500Principal subject = new X500Principal("CN=subject");
KeyPair keypair = KeyUtils.generateKeypair(KeyAlgorithm.EC, 256);
Pkcs10Csr csr = Pkcs10CsrBuilder.fromKeypair(subject, keypair, SignatureAlgorithm.SHA512_WITH_ECDSA)
.setBasicConstraints(true, true)
.build();
assertTrue(csr.getBasicConstraints().isPresent());
assertTrue(csr.getBasicConstraints().get());
}
|
@NonNull public static GestureTypingPathDraw create(
@NonNull OnInvalidateCallback callback, @NonNull GestureTrailTheme theme) {
if (theme.maxTrailLength <= 0) return NO_OP;
return new GestureTypingPathDrawHelper(callback, theme);
}
|
@Test
public void testCreatesNoOp() {
final AtomicInteger invalidates = new AtomicInteger();
Assert.assertSame(
GestureTypingPathDrawHelper.NO_OP,
GestureTypingPathDrawHelper.create(
invalidates::incrementAndGet,
new GestureTrailTheme(
Color.argb(200, 60, 120, 240), Color.argb(100, 30, 240, 200), 100f, 20f, 0)));
}
|
@Override
public URL getResource(String name) {
ClassLoadingStrategy loadingStrategy = getClassLoadingStrategy(name);
log.trace("Received request to load resource '{}'", name);
for (ClassLoadingStrategy.Source classLoadingSource : loadingStrategy.getSources()) {
URL url = null;
switch (classLoadingSource) {
case APPLICATION:
url = super.getResource(name);
break;
case PLUGIN:
url = findResource(name);
break;
case DEPENDENCIES:
url = findResourceFromDependencies(name);
break;
}
if (url != null) {
log.trace("Found resource '{}' in {} classpath", name, classLoadingSource);
return url;
} else {
log.trace("Couldn't find resource '{}' in {}", name, classLoadingSource);
}
}
return null;
}
|
@Test
void parentLastGetResourceExistsInParent() throws IOException, URISyntaxException {
URL resource = parentLastPluginClassLoader.getResource("META-INF/file-only-in-parent");
assertFirstLine("parent", resource);
}
|
static boolean needWrap(MethodDescriptor methodDescriptor, Class<?>[] parameterClasses, Class<?> returnClass) {
String methodName = methodDescriptor.getMethodName();
// generic call must be wrapped
if (CommonConstants.$INVOKE.equals(methodName) || CommonConstants.$INVOKE_ASYNC.equals(methodName)) {
return true;
}
// echo must be wrapped
if ($ECHO.equals(methodName)) {
return true;
}
boolean returnClassProtobuf = isProtobufClass(returnClass);
// Response foo()
if (parameterClasses.length == 0) {
return !returnClassProtobuf;
}
int protobufParameterCount = 0;
int javaParameterCount = 0;
int streamParameterCount = 0;
boolean secondParameterStream = false;
// count normal and protobuf param
for (int i = 0; i < parameterClasses.length; i++) {
Class<?> parameterClass = parameterClasses[i];
if (isProtobufClass(parameterClass)) {
protobufParameterCount++;
} else {
if (isStreamType(parameterClass)) {
if (i == 1) {
secondParameterStream = true;
}
streamParameterCount++;
} else {
javaParameterCount++;
}
}
}
// more than one stream param
if (streamParameterCount > 1) {
throw new IllegalStateException("method params error: more than one Stream params. method=" + methodName);
}
// protobuf only support one param
if (protobufParameterCount >= 2) {
throw new IllegalStateException("method params error: more than one protobuf params. method=" + methodName);
}
// server stream support one normal param and one stream param
if (streamParameterCount == 1) {
if (javaParameterCount + protobufParameterCount > 1) {
throw new IllegalStateException(
"method params error: server stream does not support more than one normal param." + " method="
+ methodName);
}
// server stream: void foo(Request, StreamObserver<Response>)
if (!secondParameterStream) {
throw new IllegalStateException(
"method params error: server stream's second param must be StreamObserver." + " method="
+ methodName);
}
}
if (methodDescriptor.getRpcType() != MethodDescriptor.RpcType.UNARY) {
if (MethodDescriptor.RpcType.SERVER_STREAM == methodDescriptor.getRpcType()) {
if (!secondParameterStream) {
throw new IllegalStateException(
"method params error:server stream's second param must be StreamObserver." + " method="
+ methodName);
}
}
// param type must be consistent
if (returnClassProtobuf) {
if (javaParameterCount > 0) {
throw new IllegalStateException(
"method params error: both normal and protobuf param found. method=" + methodName);
}
} else {
if (protobufParameterCount > 0) {
throw new IllegalStateException("method params error method=" + methodName);
}
}
} else {
if (streamParameterCount > 0) {
throw new IllegalStateException(
"method params error: unary method should not contain any StreamObserver." + " method="
+ methodName);
}
if (protobufParameterCount > 0 && returnClassProtobuf) {
return false;
}
// handler reactor or rxjava only consider gen by proto
if (isMono(returnClass) || isRx(returnClass)) {
return false;
}
if (protobufParameterCount <= 0 && !returnClassProtobuf) {
return true;
}
// handle grpc stub only consider gen by proto
if (GRPC_ASYNC_RETURN_CLASS.equalsIgnoreCase(returnClass.getName()) && protobufParameterCount == 1) {
return false;
}
// handle dubbo generated method
if (TRI_ASYNC_RETURN_CLASS.equalsIgnoreCase(returnClass.getName())) {
Class<?> actualReturnClass = (Class<?>)
((ParameterizedType) methodDescriptor.getMethod().getGenericReturnType())
.getActualTypeArguments()[0];
boolean actualReturnClassProtobuf = isProtobufClass(actualReturnClass);
if (actualReturnClassProtobuf && protobufParameterCount == 1) {
return false;
}
if (!actualReturnClassProtobuf && protobufParameterCount == 0) {
return true;
}
}
// todo remove this in future
boolean ignore = checkNeedIgnore(returnClass);
if (ignore) {
return protobufParameterCount != 1;
}
throw new IllegalStateException("method params error method=" + methodName);
}
// java param should be wrapped
return javaParameterCount > 0;
}
|
@Test
void testBiStream() throws Exception {
Method method = DescriptorService.class.getMethod("bidirectionalStream", StreamObserver.class);
ReflectionMethodDescriptor descriptor = new ReflectionMethodDescriptor(method);
Assertions.assertEquals(1, descriptor.getParameterClasses().length);
assertSame(descriptor.getRpcType(), MethodDescriptor.RpcType.BI_STREAM);
assertFalse(needWrap(descriptor));
}
|
public static JSON parse(Object obj) {
return parse(obj, null);
}
|
@Test
public void parseTest() {
assertThrows(JSONException.class, () -> {
final JSONArray jsonArray = JSONUtil.parseArray("[{\"a\":\"a\\x]");
Console.log(jsonArray);
});
}
|
public static void executeWithRetries(
final Function function,
final RetryBehaviour retryBehaviour
) throws Exception {
executeWithRetries(() -> {
function.call();
return null;
}, retryBehaviour);
}
|
@Test
public void shouldRetryAndEventuallyThrowIfNeverSucceeds() throws Exception {
// Given:
final Callable<Object> neverSucceeds = () -> {
throw new ExecutionException(new TestRetriableException("I will never succeed"));
};
// When:
final ExecutionException e = assertThrows(
ExecutionException.class,
() -> ExecutorUtil.executeWithRetries(neverSucceeds, ON_RETRYABLE, () -> SMALL_RETRY_BACKOFF)
);
// Then:
assertThat(e.getMessage(), containsString("I will never succeed"));
}
|
@Override
public Map<String, NoteInfo> list(AuthenticationInfo subject) throws IOException {
// Must to create rootNotebookFileObject each time when call method list, otherwise we can not
// get the updated data under this folder.
this.rootNotebookFileObject = fsManager.resolveFile(this.rootNotebookFolder);
return listFolder(rootNotebookFileObject);
}
|
@Test
void testSkipInvalidFileName() throws IOException {
assertEquals(0, notebookRepo.list(AuthenticationInfo.ANONYMOUS).size());
createNewNote("{}", "hidden_note", "my_project/.hidden_note");
Map<String, NoteInfo> noteInfos = notebookRepo.list(AuthenticationInfo.ANONYMOUS);
assertEquals(0, noteInfos.size());
}
|
@Override
public List<SnowflakeIdentifier> listDatabases() {
List<SnowflakeIdentifier> databases;
try {
databases =
connectionPool.run(
conn ->
queryHarness.query(
conn, "SHOW DATABASES IN ACCOUNT", DATABASE_RESULT_SET_HANDLER));
} catch (SQLException e) {
throw snowflakeExceptionToIcebergException(
SnowflakeIdentifier.ofRoot(), e, "Failed to list databases");
} catch (InterruptedException e) {
throw new UncheckedInterruptedException(e, "Interrupted while listing databases");
}
databases.forEach(
db ->
Preconditions.checkState(
db.type() == SnowflakeIdentifier.Type.DATABASE,
"Expected DATABASE, got identifier '%s'",
db));
return databases;
}
|
@SuppressWarnings("unchecked")
@Test
public void testListDatabasesSQLExceptionAtRootLevel() throws SQLException, InterruptedException {
Exception injectedException =
new SQLException(String.format("SQL exception with Error Code %d", 0), "2000", 0, null);
when(mockClientPool.run(any(ClientPool.Action.class))).thenThrow(injectedException);
assertThatExceptionOfType(UncheckedSQLException.class)
.isThrownBy(() -> snowflakeClient.listDatabases())
.withMessageContaining("Failed to list databases")
.withCause(injectedException);
}
|
@Override
public Base64BinaryChunk parse(XmlPullParser parser, int initialDepth, XmlEnvironment xmlEnvironment) throws XmlPullParserException, IOException {
String streamId = parser.getAttributeValue("", Base64BinaryChunk.ATTRIBUTE_STREAM_ID);
String nrString = parser.getAttributeValue("", Base64BinaryChunk.ATTRIBUTE_NR);
String lastString = parser.getAttributeValue("", Base64BinaryChunk.ATTRIBUTE_LAST);
boolean last = false;
int nr = Integer.parseInt(nrString);
if (lastString != null) {
last = Boolean.parseBoolean(lastString);
}
String text = null;
boolean done = false;
while (!done) {
XmlPullParser.Event eventType = parser.next();
if (eventType == XmlPullParser.Event.END_ELEMENT) {
if (parser.getName().equals(Base64BinaryChunk.ELEMENT_CHUNK)) {
done = true;
} else {
throw new IllegalArgumentException("unexpected end tag of: " + parser.getName());
}
} else if (eventType == XmlPullParser.Event.TEXT_CHARACTERS) {
text = parser.getText();
} else {
throw new IllegalArgumentException("unexpected eventType: " + eventType);
}
}
return new Base64BinaryChunk(text, streamId, nr, last);
}
|
@Test
public void isLatsChunkParsedCorrectly() throws Exception {
String base64Text = "2uPzi9u+tVWJd+e+y1AAAAABJRU5ErkJggg==";
String string = "<chunk xmlns='urn:xmpp:http' streamId='Stream0001' nr='1' last='true'>" + base64Text + "</chunk>";
Base64BinaryChunkProvider provider = new Base64BinaryChunkProvider();
XmlPullParser parser = PacketParserUtils.getParserFor(string);
ExtensionElement extension = provider.parse(parser);
assertTrue(extension instanceof Base64BinaryChunk);
Base64BinaryChunk chunk = (Base64BinaryChunk) extension;
assertEquals("Stream0001", chunk.getStreamId());
assertTrue(chunk.isLast());
assertEquals(base64Text, chunk.getText());
assertEquals(1, chunk.getNr());
}
|
protected static List<String> getParameters(String path) {
List<String> parameters = null;
int startIndex = path.indexOf('{');
while (startIndex != -1) {
int endIndex = path.indexOf('}', startIndex);
if (endIndex != -1) {
if (parameters == null) {
parameters = new ArrayList<>();
}
parameters.add(path.substring(startIndex + 1, endIndex));
startIndex = path.indexOf('{', endIndex);
} else {
// Break out of loop as no valid end token
startIndex = -1;
}
}
return parameters == null ? Collections.emptyList() : parameters;
}
|
@Test
public void testGetParameters() {
assertEquals(Arrays.asList("id1", "id2"), RestSpanDecorator.getParameters("/context/{id1}/{id2}"));
}
|
public static boolean isRumResource(String resourceName) {
return BOOMERANG_FILENAME.equals(resourceName);
}
|
@Test
public void testIsRumResource() {
assertTrue("isRumResource", RumInjector.isRumResource("boomerang.min.js"));
assertFalse("isRumResource", RumInjector.isRumResource("notboomerang"));
}
|
public boolean hasProjectSubscribersForTypes(String projectUuid, Set<Class<? extends Notification>> notificationTypes) {
Set<String> dispatcherKeys = handlers.stream()
.filter(handler -> notificationTypes.stream().anyMatch(notificationType -> handler.getNotificationClass() == notificationType))
.map(NotificationHandler::getMetadata)
.filter(Optional::isPresent)
.map(Optional::get)
.map(NotificationDispatcherMetadata::getDispatcherKey)
.collect(Collectors.toSet());
return dbClient.propertiesDao().hasProjectNotificationSubscribersForDispatchers(projectUuid, dispatcherKeys);
}
|
@Test
public void hasProjectSubscribersForType_returns_false_if_set_is_empty() {
String dispatcherKey1A = randomAlphabetic(5);
String dispatcherKey1B = randomAlphabetic(6);
String projectUuid = randomAlphabetic(7);
NotificationHandler<Notification1> handler1A = getMockOfNotificationHandlerForType(Notification1.class);
when(handler1A.getMetadata()).thenReturn(Optional.of(NotificationDispatcherMetadata.create(dispatcherKey1A)));
NotificationHandler<Notification1> handler1B = getMockOfNotificationHandlerForType(Notification1.class);
when(handler1B.getMetadata()).thenReturn(Optional.of(NotificationDispatcherMetadata.create(dispatcherKey1B)));
NotificationHandler<Notification2> handler2 = getMockOfNotificationHandlerForType(Notification2.class);
when(handler2.getMetadata()).thenReturn(Optional.empty());
NotificationService underTest = new NotificationService(dbClient, new NotificationHandler[]{handler1A, handler1B, handler2});
boolean flag = underTest.hasProjectSubscribersForTypes(projectUuid, ImmutableSet.of());
assertThat(flag).isFalse();
verify(propertiesDao).hasProjectNotificationSubscribersForDispatchers(projectUuid, ImmutableSet.of());
verifyNoMoreInteractions(propertiesDao);
}
|
public double getRelevance() { return relevance; }
|
@Test
void testRelevanceIsKeptEvenWithBySortData() {
assertEquals(1.3, new LeanHit(gidA, 0, 0, 1.3, gidA).getRelevance(), 0.0);
}
|
@Override
public void merge(ColumnStatisticsObj aggregateColStats, ColumnStatisticsObj newColStats) {
LOG.debug("Merging statistics: [aggregateColStats:{}, newColStats: {}]", aggregateColStats, newColStats);
DoubleColumnStatsDataInspector aggregateData = doubleInspectorFromStats(aggregateColStats);
DoubleColumnStatsDataInspector newData = doubleInspectorFromStats(newColStats);
Double lowValue = mergeLowValue(getLowValue(aggregateData), getLowValue(newData));
if (lowValue != null) {
aggregateData.setLowValue(lowValue);
}
Double highValue = mergeHighValue(getHighValue(aggregateData), getHighValue(newData));
if (highValue != null) {
aggregateData.setHighValue(highValue);
}
aggregateData.setNumNulls(mergeNumNulls(aggregateData.getNumNulls(), newData.getNumNulls()));
NumDistinctValueEstimator oldNDVEst = aggregateData.getNdvEstimator();
NumDistinctValueEstimator newNDVEst = newData.getNdvEstimator();
List<NumDistinctValueEstimator> ndvEstimatorsList = Arrays.asList(oldNDVEst, newNDVEst);
aggregateData.setNumDVs(mergeNumDistinctValueEstimator(aggregateColStats.getColName(),
ndvEstimatorsList, aggregateData.getNumDVs(), newData.getNumDVs()));
aggregateData.setNdvEstimator(ndvEstimatorsList.get(0));
KllHistogramEstimator oldKllEst = aggregateData.getHistogramEstimator();
KllHistogramEstimator newKllEst = newData.getHistogramEstimator();
aggregateData.setHistogramEstimator(mergeHistogramEstimator(aggregateColStats.getColName(), oldKllEst, newKllEst));
aggregateColStats.getStatsData().setDoubleStats(aggregateData);
}
|
@Test
public void testMergeNullValues() {
ColumnStatisticsObj aggrObj = createColumnStatisticsObj(new ColStatsBuilder<>(double.class)
.low(null)
.high(null)
.numNulls(1)
.numDVs(0)
.build());
merger.merge(aggrObj, aggrObj);
ColumnStatisticsData expectedColumnStatisticsData = new ColStatsBuilder<>(double.class)
.low(null)
.high(null)
.numNulls(2)
.numDVs(0)
.build();
assertEquals(expectedColumnStatisticsData, aggrObj.getStatsData());
}
|
@Override
@TpsControl(pointName = "ConfigQuery")
@Secured(action = ActionTypes.READ, signType = SignType.CONFIG)
@ExtractorManager.Extractor(rpcExtractor = ConfigRequestParamExtractor.class)
public ConfigQueryResponse handle(ConfigQueryRequest request, RequestMeta meta) throws NacosException {
try {
return getContext(request, meta, request.isNotify());
} catch (Exception e) {
return ConfigQueryResponse.buildFailResponse(ResponseCode.FAIL.getCode(), e.getMessage());
}
}
|
@Test
void testGetTagNotFound() throws Exception {
final String groupKey = GroupKey2.getKey(dataId, group, "");
String content = "content_from_tag_withtagÄãºÃ" + System.currentTimeMillis();
ConfigRocksDbDiskService configRocksDbDiskService = Mockito.mock(ConfigRocksDbDiskService.class);
when(ConfigDiskServiceFactory.getInstance()).thenReturn(configRocksDbDiskService);
CacheItem cacheItem = new CacheItem(groupKey);
cacheItem.getConfigCache().setMd5Gbk(MD5Utils.md5Hex(content, "GBK"));
cacheItem.getConfigCache().setMd5Utf8(MD5Utils.md5Hex(content, "UTF-8"));
cacheItem.getConfigCache().setEncryptedDataKey("key_testGetTag_NotFound");
when(ConfigCacheService.getContentCache(eq(groupKey))).thenReturn(cacheItem);
ConfigQueryRequest configQueryRequest = new ConfigQueryRequest();
configQueryRequest.setDataId(dataId);
configQueryRequest.setGroup(group);
String specificTag = "specific_tag";
configQueryRequest.setTag(specificTag);
String autoTag = "auto_tag111";
configQueryRequest.putHeader(VIPSERVER_TAG, autoTag);
RequestMeta requestMeta = new RequestMeta();
requestMeta.setClientIp("127.0.0.1");
ConfigQueryResponse response = configQueryRequestHandler.handle(configQueryRequest, requestMeta);
//check content&md5
assertNull(response.getContent());
assertNull(response.getMd5());
assertEquals(CONFIG_NOT_FOUND, response.getErrorCode());
assertNull(response.getEncryptedDataKey());
//check flags.
assertFalse(response.isBeta());
assertEquals(response.getTag(), specificTag);
}
|
@Override
public BaseCombineOperator run() {
try (InvocationScope ignored = Tracing.getTracer().createScope(CombinePlanNode.class)) {
return getCombineOperator();
}
}
|
@Test
public void testSlowPlanNode() {
AtomicBoolean notInterrupted = new AtomicBoolean();
List<PlanNode> planNodes = new ArrayList<>();
for (int i = 0; i < 20; i++) {
planNodes.add(() -> {
try {
Thread.sleep(10000);
} catch (InterruptedException e) {
// Thread should be interrupted
throw new RuntimeException(e);
}
notInterrupted.set(true);
return null;
});
}
_queryContext.setEndTimeMs(System.currentTimeMillis() + 100);
CombinePlanNode combinePlanNode = new CombinePlanNode(planNodes, _queryContext, _executorService, null);
try {
combinePlanNode.run();
} catch (RuntimeException e) {
Assert.assertTrue(e.getCause() instanceof TimeoutException);
Assert.assertFalse(notInterrupted.get());
return;
}
// Fail.
Assert.fail();
}
|
private boolean isContainsEnhancedTable(final ShardingSphereMetaData metaData, final Collection<String> databaseNames, final String currentDatabaseName) {
for (String each : databaseNames) {
if (isContainsEnhancedTable(metaData, each, getTablesContext().getTableNames())) {
return true;
}
}
return null != currentDatabaseName && isContainsEnhancedTable(metaData, currentDatabaseName, getTablesContext().getTableNames());
}
|
@Test
void assertIsContainsEnhancedTable() {
ProjectionsSegment projectionsSegment = new ProjectionsSegment(0, 0);
projectionsSegment.getProjections().add(new ColumnProjectionSegment(new ColumnSegment(0, 0, new IdentifierValue("order_id"))));
SelectStatement selectStatement = new MySQLSelectStatement();
selectStatement.setProjections(projectionsSegment);
selectStatement.setFrom(new SimpleTableSegment(new TableNameSegment(0, 0, new IdentifierValue("t_order"))));
ShardingSphereMetaData metaData = new ShardingSphereMetaData(Collections.singletonMap(DefaultDatabase.LOGIC_NAME, mockDatabase()), mock(ResourceMetaData.class),
mock(RuleMetaData.class), mock(ConfigurationProperties.class));
SelectStatementContext actual = new SelectStatementContext(metaData, Collections.emptyList(), selectStatement, DefaultDatabase.LOGIC_NAME, Collections.emptyList());
assertTrue(actual.isContainsEnhancedTable());
}
|
public DropSourceCommand create(final DropStream statement) {
return create(
statement.getName(),
statement.getIfExists(),
statement.isDeleteTopic(),
DataSourceType.KSTREAM
);
}
|
@Test
public void shouldFailDropSourceOnMissingSourceWithNoIfExistsForTable() {
// Given:
final DropTable dropTable = new DropTable(SOME_NAME, false, true);
when(metaStore.getSource(SOME_NAME)).thenReturn(null);
// When:
final Exception e = assertThrows(
KsqlException.class,
() -> dropSourceFactory.create(dropTable)
);
// Then:
assertThat(e.getMessage(), containsString("Table bob does not exist."));
}
|
@SuppressWarnings("ConstantConditions")
public boolean removeActions(@NonNull Class<? extends Action> clazz) {
if (clazz == null) {
throw new IllegalArgumentException("Action type must be non-null");
}
// CopyOnWriteArrayList does not support Iterator.remove, so need to do it this way:
List<Action> old = new ArrayList<>();
List<Action> current = getActions();
for (Action a : current) {
if (clazz.isInstance(a)) {
old.add(a);
}
}
return current.removeAll(old);
}
|
@Test
public void removeActions_null() {
assertThrows(IllegalArgumentException.class, () -> thing.removeActions(null));
}
|
static void validateFileExtension(Path jarPath) {
String fileName = jarPath.getFileName().toString();
if (!fileName.endsWith(".jar")) {
throw new JetException("File name extension should be .jar");
}
}
|
@Test
public void invalidFileExtension() {
Path jarPath1 = Paths.get("/mnt/foo");
assertThatThrownBy(() -> SubmitJobParametersValidator.validateFileExtension(jarPath1))
.isInstanceOf(JetException.class)
.hasMessageContaining("File name extension should be .jar");
Path jarPath2 = Paths.get("foo");
assertThatThrownBy(() -> SubmitJobParametersValidator.validateFileExtension(jarPath2))
.isInstanceOf(JetException.class)
.hasMessageContaining("File name extension should be .jar");
}
|
public void validateDocumentGraph(List<SDDocumentType> documents) {
for (SDDocumentType document : documents) {
validateRoot(document);
}
}
|
@Test
void self_reference_is_forbidden() {
Throwable exception = assertThrows(DocumentGraphValidator.DocumentGraphException.class, () -> {
Schema adSchema = createSearchWithName("ad");
createDocumentReference(adSchema, adSchema, "ad_ref");
DocumentGraphValidator validator = new DocumentGraphValidator();
validator.validateDocumentGraph(documentListOf(adSchema));
});
assertTrue(exception.getMessage().contains("Document dependency cycle detected: ad->ad."));
}
|
public static String prepareUrl(@NonNull String url) {
url = url.trim();
String lowerCaseUrl = url.toLowerCase(Locale.ROOT); // protocol names are case insensitive
if (lowerCaseUrl.startsWith("feed://")) {
Log.d(TAG, "Replacing feed:// with http://");
return prepareUrl(url.substring("feed://".length()));
} else if (lowerCaseUrl.startsWith("pcast://")) {
Log.d(TAG, "Removing pcast://");
return prepareUrl(url.substring("pcast://".length()));
} else if (lowerCaseUrl.startsWith("pcast:")) {
Log.d(TAG, "Removing pcast:");
return prepareUrl(url.substring("pcast:".length()));
} else if (lowerCaseUrl.startsWith("itpc")) {
Log.d(TAG, "Replacing itpc:// with http://");
return prepareUrl(url.substring("itpc://".length()));
} else if (lowerCaseUrl.startsWith(AP_SUBSCRIBE)) {
Log.d(TAG, "Removing antennapod-subscribe://");
return prepareUrl(url.substring(AP_SUBSCRIBE.length()));
} else if (lowerCaseUrl.contains(AP_SUBSCRIBE_DEEPLINK)) {
Log.d(TAG, "Removing " + AP_SUBSCRIBE_DEEPLINK);
String query = Uri.parse(url).getQueryParameter("url");
try {
return prepareUrl(URLDecoder.decode(query, "UTF-8"));
} catch (UnsupportedEncodingException e) {
return prepareUrl(query);
}
} else if (!(lowerCaseUrl.startsWith("http://") || lowerCaseUrl.startsWith("https://"))) {
Log.d(TAG, "Adding http:// at the beginning of the URL");
return "http://" + url;
} else {
return url;
}
}
|
@Test
public void testAntennaPodSubscribeProtocolWithScheme() {
final String in = "antennapod-subscribe://https://example.com";
final String out = UrlChecker.prepareUrl(in);
assertEquals("https://example.com", out);
}
|
@Override
public AuthorizationPluginInfo pluginInfoFor(GoPluginDescriptor descriptor) {
Capabilities capabilities = capabilities(descriptor.id());
PluggableInstanceSettings authConfigSettings = authConfigSettings(descriptor.id());
PluggableInstanceSettings roleSettings = roleSettings(descriptor.id(), capabilities);
Image image = image(descriptor.id());
return new AuthorizationPluginInfo(descriptor, authConfigSettings, roleSettings, image, capabilities);
}
|
@Test
public void shouldBuildPluginInfoWithCapablities() {
GoPluginDescriptor descriptor = GoPluginDescriptor.builder().id("plugin1").build();
Capabilities capabilities = new Capabilities(SupportedAuthType.Password, true, true, false);
when(extension.getCapabilities(descriptor.id())).thenReturn(capabilities);
AuthorizationPluginInfo pluginInfo = new AuthorizationPluginInfoBuilder(extension).pluginInfoFor(descriptor);
assertThat(pluginInfo.getCapabilities(), is(capabilities));
}
|
static QueryId buildId(
final Statement statement,
final EngineContext engineContext,
final QueryIdGenerator idGenerator,
final OutputNode outputNode,
final boolean createOrReplaceEnabled,
final Optional<String> withQueryId) {
if (withQueryId.isPresent()) {
final String queryId = withQueryId.get().toUpperCase();
validateWithQueryId(queryId);
return new QueryId(queryId);
}
if (statement instanceof CreateTable && ((CreateTable) statement).isSource()) {
// Use the CST name as part of the QueryID
final String suffix = ((CreateTable) statement).getName().text().toUpperCase()
+ "_" + idGenerator.getNext().toUpperCase();
return new QueryId(ReservedQueryIdsPrefixes.CST + suffix);
}
if (!outputNode.getSinkName().isPresent()) {
final String prefix =
"transient_" + outputNode.getSource().getLeftmostSourceNode().getAlias().text() + "_";
return new QueryId(prefix + Math.abs(ThreadLocalRandom.current().nextLong()));
}
final KsqlStructuredDataOutputNode structured = (KsqlStructuredDataOutputNode) outputNode;
if (!structured.createInto()) {
return new QueryId(ReservedQueryIdsPrefixes.INSERT + idGenerator.getNext());
}
final SourceName sink = outputNode.getSinkName().get();
final Set<QueryId> queriesForSink = engineContext.getQueryRegistry().getQueriesWithSink(sink);
if (queriesForSink.size() > 1) {
throw new KsqlException("REPLACE for sink " + sink + " is not supported because there are "
+ "multiple queries writing into it: " + queriesForSink);
} else if (!queriesForSink.isEmpty()) {
if (!createOrReplaceEnabled) {
final String type = outputNode.getNodeOutputType().getKsqlType().toLowerCase();
throw new UnsupportedOperationException(
String.format(
"Cannot add %s '%s': A %s with the same name already exists",
type,
sink.text(),
type));
}
return Iterables.getOnlyElement(queriesForSink);
}
final String suffix = outputNode.getId().toString().toUpperCase()
+ "_" + idGenerator.getNext().toUpperCase();
return new QueryId(
outputNode.getNodeOutputType() == DataSourceType.KTABLE
? ReservedQueryIdsPrefixes.CTAS + suffix
: ReservedQueryIdsPrefixes.CSAS + suffix
);
}
|
@Test
public void shouldGenerateUniqueRandomIdsForTransientQueries() {
// Given:
when(transientPlan.getSinkName()).thenReturn(Optional.empty());
when(transientPlan.getSource()).thenReturn(planNode);
when(planNode.getLeftmostSourceNode()).thenReturn(dataSourceNode);
when(dataSourceNode.getAlias()).thenReturn(sourceName);
when(sourceName.text()).thenReturn(SOURCE);
// When:
long numUniqueIds = IntStream.range(0, 100)
.mapToObj(i -> QueryIdUtil.buildId(statement, engineContext, idGenerator, transientPlan,
false, Optional.empty()))
.distinct()
.count();
// Then:
assertThat(numUniqueIds, is(100L));
}
|
public Optional<Long> validateAndGetTimestamp(final ExternalServiceCredentials credentials) {
final String[] parts = requireNonNull(credentials).password().split(DELIMITER);
final String timestampSeconds;
final String actualSignature;
// making sure password format matches our expectations based on the generator configuration
if (parts.length == 3 && prependUsername) {
final String username = usernameIsTimestamp() ? parts[0] + DELIMITER + parts[1] : parts[0];
// username has to match the one from `credentials`
if (!credentials.username().equals(username)) {
return Optional.empty();
}
timestampSeconds = parts[1];
actualSignature = parts[2];
} else if (parts.length == 2 && !prependUsername) {
timestampSeconds = parts[0];
actualSignature = parts[1];
} else {
// unexpected password format
return Optional.empty();
}
final String signedData = usernameIsTimestamp() ? credentials.username() : credentials.username() + DELIMITER + timestampSeconds;
final String expectedSignature = truncateSignature
? hmac256TruncatedToHexString(key, signedData, TRUNCATED_SIGNATURE_LENGTH)
: hmac256ToHexString(key, signedData);
// if the signature is valid it's safe to parse the `timestampSeconds` string into Long
return hmacHexStringsEqual(expectedSignature, actualSignature)
? Optional.of(Long.valueOf(timestampSeconds))
: Optional.empty();
}
|
@Test
public void testValidateInvalid() throws Exception {
final ExternalServiceCredentials corruptedStandardUsername = new ExternalServiceCredentials(
standardCredentials.username(), standardCredentials.password().replace(E164, E164 + "0"));
final ExternalServiceCredentials corruptedStandardTimestamp = new ExternalServiceCredentials(
standardCredentials.username(), standardCredentials.password().replace(TIME_SECONDS_STRING, TIME_SECONDS_STRING + "0"));
final ExternalServiceCredentials corruptedStandardPassword = new ExternalServiceCredentials(
standardCredentials.username(), standardCredentials.password() + "0");
final ExternalServiceCredentials corruptedUsernameTimestamp = new ExternalServiceCredentials(
usernameIsTimestampCredentials.username(), usernameIsTimestampCredentials.password().replace(USERNAME_TIMESTAMP, USERNAME_TIMESTAMP
+ "0"));
final ExternalServiceCredentials corruptedUsernameTimestampPassword = new ExternalServiceCredentials(
usernameIsTimestampCredentials.username(), usernameIsTimestampCredentials.password() + "0");
assertTrue(standardGenerator.validateAndGetTimestamp(corruptedStandardUsername).isEmpty());
assertTrue(standardGenerator.validateAndGetTimestamp(corruptedStandardTimestamp).isEmpty());
assertTrue(standardGenerator.validateAndGetTimestamp(corruptedStandardPassword).isEmpty());
assertTrue(usernameIsTimestampGenerator.validateAndGetTimestamp(corruptedUsernameTimestamp).isEmpty());
assertTrue(usernameIsTimestampGenerator.validateAndGetTimestamp(corruptedUsernameTimestampPassword).isEmpty());
}
|
@Override
public URL getLocalArtifactUrl(DependencyJar dependency) {
return delegate.getLocalArtifactUrl(dependency);
}
|
@Test
public void whenRobolectricDepsPropertiesResource() throws Exception {
Path depsPath =
tempDirectory.createFile(
"deps.properties", "org.robolectric\\:android-all\\:" + VERSION + ": file-123.jar");
when(mockClassLoader.getResource("robolectric-deps.properties")).thenReturn(meh(depsPath));
DependencyResolver resolver = new LegacyDependencyResolver(properties, mockClassLoader);
URL jarUrl = resolver.getLocalArtifactUrl(DEPENDENCY_COORDS);
assertThat(Fs.fromUrl(jarUrl).toString()).endsWith("file-123.jar");
}
|
public static UserGroupInformation getUGI(HttpServletRequest request,
Configuration conf) throws IOException {
return getUGI(null, request, conf);
}
|
@Test
public void testGetUgiFromToken() throws IOException {
conf.set(DFSConfigKeys.FS_DEFAULT_NAME_KEY, "hdfs://localhost:4321/");
ServletContext context = mock(ServletContext.class);
String realUser = "TheDoctor";
String user = "TheNurse";
conf.set(DFSConfigKeys.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
UserGroupInformation.setConfiguration(conf);
UserGroupInformation ugi;
HttpServletRequest request;
Text ownerText = new Text(user);
DelegationTokenIdentifier dtId = new DelegationTokenIdentifier(
ownerText, ownerText, new Text(realUser));
Token<DelegationTokenIdentifier> token = new Token<DelegationTokenIdentifier>(
dtId, new DummySecretManager(0, 0, 0, 0));
String tokenString = token.encodeToUrlString();
// token with no auth-ed user
request = getMockRequest(null, null, null);
when(request.getParameter(JspHelper.DELEGATION_PARAMETER_NAME)).thenReturn(
tokenString);
ugi = JspHelper.getUGI(context, request, conf);
Assert.assertNotNull(ugi.getRealUser());
Assert.assertEquals(ugi.getRealUser().getShortUserName(), realUser);
Assert.assertEquals(ugi.getShortUserName(), user);
checkUgiFromToken(ugi);
// token with auth-ed user
request = getMockRequest(realUser, null, null);
when(request.getParameter(JspHelper.DELEGATION_PARAMETER_NAME)).thenReturn(
tokenString);
ugi = JspHelper.getUGI(context, request, conf);
Assert.assertNotNull(ugi.getRealUser());
Assert.assertEquals(ugi.getRealUser().getShortUserName(), realUser);
Assert.assertEquals(ugi.getShortUserName(), user);
checkUgiFromToken(ugi);
// completely different user, token trumps auth
request = getMockRequest("rogue", null, null);
when(request.getParameter(JspHelper.DELEGATION_PARAMETER_NAME)).thenReturn(
tokenString);
ugi = JspHelper.getUGI(context, request, conf);
Assert.assertNotNull(ugi.getRealUser());
Assert.assertEquals(ugi.getRealUser().getShortUserName(), realUser);
Assert.assertEquals(ugi.getShortUserName(), user);
checkUgiFromToken(ugi);
// expected case
request = getMockRequest(null, user, null);
when(request.getParameter(JspHelper.DELEGATION_PARAMETER_NAME)).thenReturn(
tokenString);
ugi = JspHelper.getUGI(context, request, conf);
Assert.assertNotNull(ugi.getRealUser());
Assert.assertEquals(ugi.getRealUser().getShortUserName(), realUser);
Assert.assertEquals(ugi.getShortUserName(), user);
checkUgiFromToken(ugi);
// if present token, ignore doas parameter
request = getMockRequest(null, null, "rogue");
when(request.getParameter(JspHelper.DELEGATION_PARAMETER_NAME)).thenReturn(
tokenString);
ugi = JspHelper.getUGI(context, request, conf);
Assert.assertNotNull(ugi.getRealUser());
Assert.assertEquals(ugi.getRealUser().getShortUserName(), realUser);
Assert.assertEquals(ugi.getShortUserName(), user);
checkUgiFromToken(ugi);
// if present token, ignore user.name parameter
request = getMockRequest(null, "rogue", null);
when(request.getParameter(JspHelper.DELEGATION_PARAMETER_NAME)).thenReturn(
tokenString);
ugi = JspHelper.getUGI(context, request, conf);
Assert.assertNotNull(ugi.getRealUser());
Assert.assertEquals(ugi.getRealUser().getShortUserName(), realUser);
Assert.assertEquals(ugi.getShortUserName(), user);
checkUgiFromToken(ugi);
// if present token, ignore user.name and doas parameter
request = getMockRequest(null, user, "rogue");
when(request.getParameter(JspHelper.DELEGATION_PARAMETER_NAME)).thenReturn(
tokenString);
ugi = JspHelper.getUGI(context, request, conf);
Assert.assertNotNull(ugi.getRealUser());
Assert.assertEquals(ugi.getRealUser().getShortUserName(), realUser);
Assert.assertEquals(ugi.getShortUserName(), user);
checkUgiFromToken(ugi);
}
|
public void setSendFullErrorException(boolean sendFullErrorException) {
this.sendFullErrorException = sendFullErrorException;
}
|
@Test
void handleFlowableIllegalArgumentExceptionWithoutSendFullErrorException() throws Exception {
testController.exceptionSupplier = () -> new FlowableIllegalArgumentException("task name is mandatory");
handlerAdvice.setSendFullErrorException(false);
String body = mockMvc.perform(get("/"))
.andExpect(status().isBadRequest())
.andReturn()
.getResponse()
.getContentAsString();
assertThatJson(body)
.isEqualTo("{"
+ " message: 'Bad request',"
+ " exception: 'task name is mandatory'"
+ "}");
}
|
@Override
public TypeInformation<T> getProducedType() {
return type;
}
|
@Test
void testTypeExtractionGeneric() {
TypeInformation<JSONPObject> type = new JsonSchema().getProducedType();
TypeInformation<JSONPObject> expected = TypeInformation.of(new TypeHint<JSONPObject>() {});
assertThat(type).isEqualTo(expected);
}
|
public static short translateBucketAcl(GSAccessControlList acl, String userId) {
short mode = (short) 0;
for (GrantAndPermission gp : acl.getGrantAndPermissions()) {
Permission perm = gp.getPermission();
GranteeInterface grantee = gp.getGrantee();
if (perm.equals(Permission.PERMISSION_READ)) {
if (isUserIdInGrantee(grantee, userId)) {
// If the bucket is readable by the user, add r and x to the owner mode.
mode |= (short) 0500;
}
} else if (perm.equals(Permission.PERMISSION_WRITE)) {
if (isUserIdInGrantee(grantee, userId)) {
// If the bucket is writable by the user, +w to the owner mode.
mode |= (short) 0200;
}
} else if (perm.equals(Permission.PERMISSION_FULL_CONTROL)) {
if (isUserIdInGrantee(grantee, userId)) {
// If the user has full control to the bucket, +rwx to the owner mode.
mode |= (short) 0700;
}
}
}
return mode;
}
|
@Test
public void translateUserWritePermission() {
mAcl.grantPermission(mUserGrantee, Permission.PERMISSION_WRITE);
assertEquals((short) 0200, GCSUtils.translateBucketAcl(mAcl, ID));
mAcl.grantPermission(mUserGrantee, Permission.PERMISSION_READ);
assertEquals((short) 0700, GCSUtils.translateBucketAcl(mAcl, ID));
}
|
public static void main(String[] args) {
String relDir = args.length == 1 ? args[0] : "";
graphhopperLocationIndex(relDir);
lowLevelLocationIndex();
}
|
@Test
public void main() {
LocationIndexExample.main(new String[]{"../"});
}
|
@Override
public void collect(long elapsedTime, StatementContext ctx) {
final Timer timer = getTimer(ctx);
timer.update(elapsedTime, TimeUnit.NANOSECONDS);
}
|
@Test
public void updatesTimerForContextGroupAndName() throws Exception {
final StatementNameStrategy strategy = new SmartNameStrategy();
final InstrumentedTimingCollector collector = new InstrumentedTimingCollector(registry,
strategy);
final StatementContext ctx = mock(StatementContext.class);
doReturn("SELECT 1").when(ctx).getRawSql();
doReturn("my-group").when(ctx).getAttribute(NameStrategies.STATEMENT_GROUP);
doReturn("updatesTimerForContextGroupAndName").when(ctx)
.getAttribute(NameStrategies.STATEMENT_NAME);
collector.collect(TimeUnit.SECONDS.toNanos(4), ctx);
final String name = strategy.getStatementName(ctx);
final Timer timer = registry.timer(name);
assertThat(name)
.isEqualTo(name("my-group", "updatesTimerForContextGroupAndName", ""));
assertThat(timer.getSnapshot().getMax())
.isEqualTo(4000000000L);
}
|
public static FusedPipeline fuse(Pipeline p) {
return new GreedyPipelineFuser(p).fusedPipeline;
}
|
@Test
public void parDoWithStateAndTimerRootsStage() {
PTransform timerTransform =
PTransform.newBuilder()
.setUniqueName("TimerParDo")
.putInputs("input", "impulse.out")
.putInputs("timer", "timer.out")
.putOutputs("timer", "timer.out")
.putOutputs("output", "output.out")
.setSpec(
FunctionSpec.newBuilder()
.setUrn(PTransformTranslation.PAR_DO_TRANSFORM_URN)
.setPayload(
ParDoPayload.newBuilder()
.setDoFn(FunctionSpec.newBuilder())
.putStateSpecs("state", StateSpec.getDefaultInstance())
.putTimerFamilySpecs("timer", TimerFamilySpec.getDefaultInstance())
.build()
.toByteString()))
.setEnvironmentId("common")
.build();
Components components =
partialComponents
.toBuilder()
.putTransforms("timer", timerTransform)
.putPcollections("timer.out", pc("timer.out"))
.putPcollections("output.out", pc("output.out"))
.putEnvironments("common", Environments.createDockerEnvironment("common"))
.build();
FusedPipeline fused =
GreedyPipelineFuser.fuse(
Pipeline.newBuilder()
.setComponents(components)
.addRequirements(ParDoTranslation.REQUIRES_STATEFUL_PROCESSING_URN)
.build());
assertThat(
fused.getRunnerExecutedTransforms(),
containsInAnyOrder(
PipelineNode.pTransform("impulse", components.getTransformsOrThrow("impulse"))));
assertThat(
fused.getFusedStages(),
contains(
ExecutableStageMatcher.withInput("impulse.out")
.withNoOutputs()
.withTransforms("timer")));
}
|
public List<String> toCollection() {
return new ArrayList<>(matchers);
}
|
@Test
void shouldReturnMatchersAsArray() {
assertThat(new Matcher("JH,Pavan").toCollection()).isEqualTo(Arrays.asList("JH", "Pavan"));
}
|
@Override
protected String ruleHandler() {
return "";
}
|
@Test
public void testRuleHandler() {
assertEquals(StringUtils.EMPTY, shenyuClientRegisterMotanService.ruleHandler());
}
|
public String getUser() {
return user;
}
|
@Test
public void getUser() {
assertEquals(USER, context.getUser());
}
|
public static boolean parse(final String str, ResTable_config out) {
return parse(str, out, true);
}
|
@Test
public void parse_screenLayoutLong_notlong() {
ResTable_config config = new ResTable_config();
ConfigDescription.parse("notlong", config);
assertThat(config.screenLayout).isEqualTo(SCREENLONG_NO);
}
|
@Override
public int hashCode() {
return Objects.hash(uuid);
}
|
@Test
void hashCode_whenEmptyObjects_shouldBeTheSame() {
PortfolioDto p1 = new PortfolioDto();
PortfolioDto p2 = new PortfolioDto();
int hash1 = p1.hashCode();
int hash2 = p2.hashCode();
assertThat(hash1).isEqualTo(hash2);
}
|
public static int timeToSecond(String timeStr) {
if (StrUtil.isEmpty(timeStr)) {
return 0;
}
final List<String> hms = StrUtil.splitTrim(timeStr, StrUtil.C_COLON, 3);
int lastIndex = hms.size() - 1;
int result = 0;
for (int i = lastIndex; i >= 0; i--) {
result += Integer.parseInt(hms.get(i)) * Math.pow(60, (lastIndex - i));
}
return result;
}
|
@Test
public void timeToSecondTest() {
int second = DateUtil.timeToSecond("00:01:40");
assertEquals(100, second);
second = DateUtil.timeToSecond("00:00:40");
assertEquals(40, second);
second = DateUtil.timeToSecond("01:00:00");
assertEquals(3600, second);
second = DateUtil.timeToSecond("00:00:00");
assertEquals(0, second);
}
|
public Integer doCall() throws Exception {
// Operator id must be set
if (ObjectHelper.isEmpty(operatorId)) {
printer().println("Operator id must be set");
return -1;
}
List<String> integrationSources
= Stream.concat(Arrays.stream(Optional.ofNullable(filePaths).orElseGet(() -> new String[] {})),
Arrays.stream(Optional.ofNullable(sources).orElseGet(() -> new String[] {}))).toList();
Integration integration = new Integration();
integration.setSpec(new IntegrationSpec());
integration.getMetadata()
.setName(getIntegrationName(integrationSources));
if (dependencies != null && dependencies.length > 0) {
List<String> deps = new ArrayList<>();
for (String dependency : dependencies) {
String normalized = normalizeDependency(dependency);
validateDependency(normalized, printer());
deps.add(normalized);
}
integration.getSpec().setDependencies(deps);
}
if (kit != null) {
IntegrationKit integrationKit = new IntegrationKit();
integrationKit.setName(kit);
integration.getSpec().setIntegrationKit(integrationKit);
}
if (traitProfile != null) {
TraitProfile p = TraitProfile.valueOf(traitProfile.toUpperCase(Locale.US));
integration.getSpec().setProfile(p.name().toLowerCase(Locale.US));
}
if (repositories != null && repositories.length > 0) {
integration.getSpec().setRepositories(List.of(repositories));
}
if (annotations != null && annotations.length > 0) {
integration.getMetadata().setAnnotations(Arrays.stream(annotations)
.filter(it -> it.contains("="))
.map(it -> it.split("="))
.filter(it -> it.length == 2)
.collect(Collectors.toMap(it -> it[0].trim(), it -> it[1].trim())));
}
if (integration.getMetadata().getAnnotations() == null) {
integration.getMetadata().setAnnotations(new HashMap<>());
}
// --operator-id={id} is a syntax sugar for '--annotation camel.apache.org/operator.id={id}'
integration.getMetadata().getAnnotations().put(CamelKCommand.OPERATOR_ID_LABEL, operatorId);
// --integration-profile={id} is a syntax sugar for '--annotation camel.apache.org/integration-profile.id={id}'
if (integrationProfile != null) {
if (integrationProfile.contains("/")) {
String[] namespacedName = integrationProfile.split("/", 2);
integration.getMetadata().getAnnotations().put(CamelKCommand.INTEGRATION_PROFILE_NAMESPACE_ANNOTATION,
namespacedName[0]);
integration.getMetadata().getAnnotations().put(CamelKCommand.INTEGRATION_PROFILE_ANNOTATION, namespacedName[1]);
} else {
integration.getMetadata().getAnnotations().put(CamelKCommand.INTEGRATION_PROFILE_ANNOTATION,
integrationProfile);
}
}
if (labels != null && labels.length > 0) {
integration.getMetadata().setLabels(Arrays.stream(labels)
.filter(it -> it.contains("="))
.map(it -> it.split("="))
.filter(it -> it.length == 2)
.collect(Collectors.toMap(it -> it[0].trim(), it -> it[1].trim())));
}
Traits traitsSpec = TraitHelper.parseTraits(traits);
if (image != null) {
TraitHelper.configureContainerImage(traitsSpec, image, null, null, null, null);
} else {
List<Source> resolvedSources = SourceHelper.resolveSources(integrationSources, compression);
List<Flows> flows = new ArrayList<>();
List<Sources> sources = new ArrayList<>();
for (Source source : resolvedSources) {
if (useFlows && source.isYaml() && !source.compressed()) {
JsonNode json = KubernetesHelper.json().convertValue(
KubernetesHelper.yaml().load(source.content()), JsonNode.class);
if (json.isArray()) {
for (JsonNode item : json) {
Flows flowSpec = new Flows();
flowSpec.setAdditionalProperties(KubernetesHelper.json().readerFor(Map.class).readValue(item));
flows.add(flowSpec);
}
} else {
Flows flowSpec = new Flows();
flowSpec.setAdditionalProperties(KubernetesHelper.json().readerFor(Map.class).readValue(json));
flows.add(flowSpec);
}
} else {
Sources sourceSpec = new Sources();
sourceSpec.setName(source.name());
sourceSpec.setLanguage(source.language());
sourceSpec.setContent(source.content());
sourceSpec.setCompression(source.compressed());
sources.add(sourceSpec);
}
}
if (!flows.isEmpty()) {
integration.getSpec().setFlows(flows);
}
if (!sources.isEmpty()) {
integration.getSpec().setSources(sources);
}
}
if (podTemplate != null) {
Source templateSource = SourceHelper.resolveSource(podTemplate);
if (!templateSource.isYaml()) {
throw new RuntimeCamelException(
("Unsupported pod template %s - " +
"please use proper YAML source").formatted(templateSource.extension()));
}
Spec podSpec = KubernetesHelper.yaml().loadAs(templateSource.content(), Spec.class);
Template template = new Template();
template.setSpec(podSpec);
integration.getSpec().setTemplate(template);
}
convertOptionsToTraits(traitsSpec);
integration.getSpec().setTraits(traitsSpec);
if (serviceAccount != null) {
integration.getSpec().setServiceAccountName(serviceAccount);
}
if (output != null) {
switch (output) {
case "k8s" -> {
List<Source> sources = SourceHelper.resolveSources(integrationSources);
TraitContext context
= new TraitContext(integration.getMetadata().getName(), "1.0-SNAPSHOT", printer(), sources);
TraitHelper.configureContainerImage(traitsSpec, image, "quay.io", null, integration.getMetadata().getName(),
"1.0-SNAPSHOT");
new TraitCatalog().apply(traitsSpec, context, traitProfile);
printer().println(
context.buildItems().stream().map(KubernetesHelper::dumpYaml).collect(Collectors.joining("---")));
}
case "yaml" -> printer().println(KubernetesHelper.dumpYaml(integration));
case "json" -> printer().println(
JSonHelper.prettyPrint(KubernetesHelper.json().writer().writeValueAsString(integration), 2));
default -> {
printer().printf("Unsupported output format '%s' (supported: yaml, json)%n", output);
return -1;
}
}
return 0;
}
final AtomicBoolean updated = new AtomicBoolean(false);
client(Integration.class).resource(integration).createOr(it -> {
updated.set(true);
return it.update();
});
if (updated.get()) {
printer().printf("Integration %s updated%n", integration.getMetadata().getName());
} else {
printer().printf("Integration %s created%n", integration.getMetadata().getName());
}
if (wait || logs) {
client(Integration.class).withName(integration.getMetadata().getName())
.waitUntilCondition(it -> "Running".equals(it.getStatus().getPhase()), 10, TimeUnit.MINUTES);
}
if (logs) {
IntegrationLogs logsCommand = new IntegrationLogs(getMain());
logsCommand.withClient(client());
logsCommand.withName(integration.getMetadata().getName());
logsCommand.doCall();
}
return 0;
}
|
@Test
public void shouldAddTraits() throws Exception {
IntegrationRun command = createCommand();
command.filePaths = new String[] { "classpath:route.yaml" };
command.traits = new String[] { "logging.level=DEBUG", "container.image-pull-policy=Always" };
command.output = "yaml";
command.doCall();
Assertions.assertEquals("""
apiVersion: camel.apache.org/v1
kind: Integration
metadata:
annotations:
camel.apache.org/operator.id: camel-k
name: route
spec:
flows:
- from:
uri: timer:tick
steps:
- setBody:
constant: Hello Camel !!!
- to: log:info
traits:
container:
imagePullPolicy: Always
logging:
level: DEBUG""", printer.getOutput());
}
@Test
public void shouldAddTraitAddons() throws Exception {
IntegrationRun command = createCommand();
command.filePaths = new String[] { "classpath:route.yaml" };
command.traits = new String[] {
"container.port=8080", "telemetry.enabled=true",
"telemetry.endpoint=http://opentelemetrycollector.ns.svc.cluster.local:8080" };
command.output = "yaml";
command.doCall();
Assertions.assertEquals("""
apiVersion: camel.apache.org/v1
kind: Integration
metadata:
annotations:
camel.apache.org/operator.id: camel-k
name: route
spec:
flows:
- from:
uri: timer:tick
steps:
- setBody:
constant: Hello Camel !!!
- to: log:info
traits:
addons:
telemetry:
endpoint: http://opentelemetrycollector.ns.svc.cluster.local:8080
enabled: true
container:
port: 8080""", printer.getOutput());
}
@Test
public void shouldSpecFromOptions() throws Exception {
IntegrationRun command = createCommand();
command.filePaths = new String[] { "classpath:route.yaml" };
command.name = "custom";
command.operatorId = "custom-operator";
command.serviceAccount = "service-account-name";
command.labels = new String[] { "custom-label=enabled" };
command.annotations = new String[] { "custom-annotation=enabled" };
command.repositories = new String[] { "http://custom-repository" };
command.traitProfile = "knative";
command.output = "yaml";
command.doCall();
Assertions.assertEquals("""
apiVersion: camel.apache.org/v1
kind: Integration
metadata:
annotations:
custom-annotation: enabled
camel.apache.org/operator.id: custom-operator
labels:
custom-label: enabled
name: custom
spec:
flows:
- from:
uri: timer:tick
steps:
- setBody:
constant: Hello Camel !!!
- to: log:info
profile: knative
repositories:
- http://custom-repository
serviceAccountName: service-account-name
traits: {}""", printer.getOutput());
}
@Test
public void shouldAddVolumes() throws Exception {
IntegrationRun command = createCommand();
command.filePaths = new String[] { "classpath:route.yaml" };
command.volumes = new String[] { "/foo", "/bar" };
command.output = "yaml";
command.doCall();
Assertions.assertEquals("""
apiVersion: camel.apache.org/v1
kind: Integration
metadata:
annotations:
camel.apache.org/operator.id: camel-k
name: route
spec:
flows:
- from:
uri: timer:tick
steps:
- setBody:
constant: Hello Camel !!!
- to: log:info
traits:
mount:
volumes:
- /foo
- /bar""", printer.getOutput());
}
@Test
public void shouldAddDependencies() throws Exception {
IntegrationRun command = createCommand();
command.filePaths = new String[] { "classpath:route.yaml" };
command.dependencies = new String[] { "camel-jackson", "camel-quarkus-jms", "mvn:foo:bar:1.0" };
command.output = "yaml";
command.doCall();
Assertions.assertEquals("""
apiVersion: camel.apache.org/v1
kind: Integration
metadata:
annotations:
camel.apache.org/operator.id: camel-k
name: route
spec:
dependencies:
- camel:jackson
- camel:jms
- mvn:foo:bar:1.0
flows:
- from:
uri: timer:tick
steps:
- setBody:
constant: Hello Camel !!!
- to: log:info
traits: {}""", printer.getOutput());
}
@Test
public void shouldAddEnvVars() throws Exception {
IntegrationRun command = createCommand();
command.filePaths = new String[] { "classpath:route.yaml" };
command.envVars = new String[] { "CAMEL_FOO=bar" };
command.output = "yaml";
command.doCall();
Assertions.assertEquals("""
apiVersion: camel.apache.org/v1
kind: Integration
metadata:
annotations:
camel.apache.org/operator.id: camel-k
name: route
spec:
flows:
- from:
uri: timer:tick
steps:
- setBody:
constant: Hello Camel !!!
- to: log:info
traits:
environment:
vars:
- CAMEL_FOO=bar""", printer.getOutput());
}
@Test
public void shouldAddProperties() throws Exception {
IntegrationRun command = createCommand();
command.filePaths = new String[] { "classpath:route.yaml" };
command.properties = new String[] { "camel.foo=bar" };
command.output = "yaml";
command.doCall();
Assertions.assertEquals("""
apiVersion: camel.apache.org/v1
kind: Integration
metadata:
annotations:
camel.apache.org/operator.id: camel-k
name: route
spec:
flows:
- from:
uri: timer:tick
steps:
- setBody:
constant: Hello Camel !!!
- to: log:info
traits:
camel:
properties:
- camel.foo=bar""", printer.getOutput());
}
@Test
public void shouldAddBuildProperties() throws Exception {
IntegrationRun command = createCommand();
command.filePaths = new String[] { "classpath:route.yaml" };
command.buildProperties = new String[] { "camel.foo=bar" };
command.output = "yaml";
command.doCall();
Assertions.assertEquals("""
apiVersion: camel.apache.org/v1
kind: Integration
metadata:
annotations:
camel.apache.org/operator.id: camel-k
name: route
spec:
flows:
- from:
uri: timer:tick
steps:
- setBody:
constant: Hello Camel !!!
- to: log:info
traits:
builder:
properties:
- camel.foo=bar""", printer.getOutput());
}
@Test
public void shouldUseKit() throws Exception {
IntegrationRun command = createCommand();
command.filePaths = new String[] { "classpath:route.yaml" };
command.kit = "kit-123456789";
command.output = "yaml";
command.doCall();
Assertions.assertEquals("""
apiVersion: camel.apache.org/v1
kind: Integration
metadata:
annotations:
camel.apache.org/operator.id: camel-k
name: route
spec:
flows:
- from:
uri: timer:tick
steps:
- setBody:
constant: Hello Camel !!!
- to: log:info
integrationKit:
name: kit-123456789
traits: {}""", printer.getOutput());
}
@Test
public void shouldUseIntegrationProfile() throws Exception {
IntegrationRun command = createCommand();
command.filePaths = new String[] { "classpath:route.yaml" };
command.integrationProfile = "my-profile";
command.output = "yaml";
command.doCall();
Assertions.assertEquals("""
apiVersion: camel.apache.org/v1
kind: Integration
metadata:
annotations:
camel.apache.org/operator.id: camel-k
camel.apache.org/integration-profile.id: my-profile
name: route
spec:
flows:
- from:
uri: timer:tick
steps:
- setBody:
constant: Hello Camel !!!
- to: log:info
traits: {}""", printer.getOutput());
}
@Test
public void shouldUseNamespacedIntegrationProfile() throws Exception {
IntegrationRun command = createCommand();
command.filePaths = new String[] { "classpath:route.yaml" };
command.integrationProfile = "my-namespace/my-profile";
command.output = "yaml";
command.doCall();
Assertions.assertEquals("""
apiVersion: camel.apache.org/v1
kind: Integration
metadata:
annotations:
camel.apache.org/operator.id: camel-k
camel.apache.org/integration-profile.namespace: my-namespace
camel.apache.org/integration-profile.id: my-profile
name: route
spec:
flows:
- from:
uri: timer:tick
steps:
- setBody:
constant: Hello Camel !!!
- to: log:info
traits: {}""", printer.getOutput());
}
@Test
public void shouldAddSources() throws Exception {
IntegrationRun command = createCommand();
command.sources = new String[] { "classpath:route.yaml" };
command.output = "yaml";
command.doCall();
Assertions.assertEquals("""
apiVersion: camel.apache.org/v1
kind: Integration
metadata:
annotations:
camel.apache.org/operator.id: camel-k
name: route
spec:
flows:
- from:
uri: timer:tick
steps:
- setBody:
constant: Hello Camel !!!
- to: log:info
traits: {}""", printer.getOutput());
}
@Test
public void shouldAddConnects() throws Exception {
IntegrationRun command = createCommand();
command.filePaths = new String[] { "classpath:route.yaml" };
command.connects = new String[] { "serving.knative.dev/v1:Service:foo" };
command.output = "yaml";
command.doCall();
Assertions.assertEquals("""
apiVersion: camel.apache.org/v1
kind: Integration
metadata:
annotations:
camel.apache.org/operator.id: camel-k
name: route
spec:
flows:
- from:
uri: timer:tick
steps:
- setBody:
constant: Hello Camel !!!
- to: log:info
traits:
service-binding:
services:
- serving.knative.dev/v1:Service:foo""", printer.getOutput());
}
@Test
public void shouldUsePodTemplate() throws Exception {
IntegrationRun command = createCommand();
command.filePaths = new String[] { "classpath:route.yaml" };
command.podTemplate = "classpath:pod.yaml";
command.output = "yaml";
command.doCall();
Assertions.assertEquals("""
apiVersion: camel.apache.org/v1
kind: Integration
metadata:
annotations:
camel.apache.org/operator.id: camel-k
name: route
spec:
flows:
- from:
uri: timer:tick
steps:
- setBody:
constant: Hello Camel !!!
- to: log:info
template:
spec:
containers:
- env:
- name: TEST
value: TEST
name: integration
volumeMounts:
- mountPath: /var/log
name: var-logs
volumes:
- emptyDir: {}
name: var-logs
traits: {}""", printer.getOutput());
}
@Test
public void shouldAddConfigs() throws Exception {
IntegrationRun command = createCommand();
command.filePaths = new String[] { "classpath:route.yaml" };
command.configs = new String[] { "secret:foo", "configmap:bar" };
command.output = "yaml";
command.doCall();
Assertions.assertEquals("""
apiVersion: camel.apache.org/v1
kind: Integration
metadata:
annotations:
camel.apache.org/operator.id: camel-k
name: route
spec:
flows:
- from:
uri: timer:tick
steps:
- setBody:
constant: Hello Camel !!!
- to: log:info
traits:
mount:
configs:
- secret:foo
- configmap:bar""", printer.getOutput());
}
@Test
public void shouldAddResources() throws Exception {
IntegrationRun command = createCommand();
command.filePaths = new String[] { "classpath:route.yaml" };
command.resources = new String[] { "configmap:foo/file.txt" };
command.output = "yaml";
command.doCall();
Assertions.assertEquals("""
apiVersion: camel.apache.org/v1
kind: Integration
metadata:
annotations:
camel.apache.org/operator.id: camel-k
name: route
spec:
flows:
- from:
uri: timer:tick
steps:
- setBody:
constant: Hello Camel !!!
- to: log:info
traits:
mount:
resources:
- configmap:foo/file.txt""", printer.getOutput());
}
@Test
public void shouldAddOpenApis() throws Exception {
IntegrationRun command = createCommand();
command.filePaths = new String[] { "classpath:route.yaml" };
command.openApis = new String[] { "configmap:openapi/spec.yaml" };
command.output = "yaml";
command.doCall();
Assertions.assertEquals("""
apiVersion: camel.apache.org/v1
kind: Integration
metadata:
annotations:
camel.apache.org/operator.id: camel-k
name: route
spec:
flows:
- from:
uri: timer:tick
steps:
- setBody:
constant: Hello Camel !!!
- to: log:info
traits:
openapi:
configmaps:
- configmap:openapi/spec.yaml""", printer.getOutput());
}
@Test
public void shouldUseImage() throws Exception {
IntegrationRun command = createCommand();
command.filePaths = new String[] { "classpath:route.yaml" };
command.image = "quay.io/camel/demo-app:1.0";
command.output = "yaml";
command.doCall();
Assertions.assertEquals("""
apiVersion: camel.apache.org/v1
kind: Integration
metadata:
annotations:
camel.apache.org/operator.id: camel-k
name: demo-app-v1
spec:
traits:
container:
image: quay.io/camel/demo-app:1.0""", printer.getOutput());
}
@Test
public void shouldUseTraitWithListItem() throws Exception {
IntegrationRun command = createCommand();
command.filePaths = new String[] { "classpath:route.yaml" };
command.traits
= new String[] {
"toleration.taints=camel.apache.org/master:NoExecute:300", "camel.properties=camel.foo=bar",
"affinity.node-affinity-labels=kubernetes.io/hostname" };
command.output = "yaml";
command.doCall();
Assertions.assertEquals("""
apiVersion: camel.apache.org/v1
kind: Integration
metadata:
annotations:
camel.apache.org/operator.id: camel-k
name: route
spec:
flows:
- from:
uri: timer:tick
steps:
- setBody:
constant: Hello Camel !!!
- to: log:info
traits:
affinity:
nodeAffinityLabels:
- kubernetes.io/hostname
camel:
properties:
- camel.foo=bar
toleration:
taints:
- camel.apache.org/master:NoExecute:300""", printer.getOutput());
}
@Test
public void shouldUseCompression() throws Exception {
IntegrationRun command = createCommand();
command.filePaths = new String[] { "classpath:route.yaml" };
command.compression = true;
command.output = "yaml";
command.doCall();
Assertions.assertEquals(
"""
apiVersion: camel.apache.org/v1
kind: Integration
metadata:
annotations:
camel.apache.org/operator.id: camel-k
name: route
spec:
sources:
- compression: true
content: ZFPBbtswDL3nK16TSwukybCjd3LTBDVWOECcrshRsWlbqC15Ej03fz/KSdYA08WgRD6+90jPJjO86pyMpwJswTUh7lQun8yWPChH2NjeFIq1NbiPs80DJCQHawjWobWOBCS3hp0+9ixXzRkQqnJELRn2CyAjGtHT7T5ZrVHqhlBofy6S5oPmWnC41h6DdR8oBUkVhQ6NVQNt5KI903BUKVdoU0nb7uR0VTPsYMj5WncLQdkHGdnmysSfYceeIvJg+4uGG7kXF+b4JTChyffFN0G6DynTy+P04QdOUtyqE4xl9J5ukOkzp46FqLBqu0Yrk9OXrH8dxIvDBcMeWUm6GmXAlrdpUDyZSeV4auYuWi6HYVioke7Cump5Vbd8FUfTbP04UpaaN9OQ92LT71478fZ4guqEUa6OwrNRQxjcOJ1x6EJhcOKzqebwl6kLyu10vuy60hPVtwlimDKYxhmSbIqnOEuyuWC8J/uX7dse7/FuF6f7ZJ1hu8Nqmz4n+2SbSrRBnB7wM0mf5yAxS9rQZ+cCfyGpg5FUhJleF+hKIOxHiH1HuS51LrpM1auKUNk/5ExYj45cq30Ypxd6haA0utU8bpH/X5S0mUxKZ9toAvROR2DdkotY5x9y45k6H56AR3jiJ1uczmE48gd4VoYjvFDTWKxUSw3u7u4uBWwjNLaKwiJP/gIAAP//
language: yaml
name: route.yaml
traits: {}""",
removeLicenseHeader(printer.getOutput()));
}
private final Pattern comments = Pattern.compile("^\\s*#.*$", Pattern.MULTILINE);
private final Pattern emptyLine = Pattern.compile("^[\\r?\\n]$", Pattern.MULTILINE);
private String removeLicenseHeader(String yaml) {
return emptyLine.matcher(comments.matcher(yaml).replaceAll("")).replaceAll("");
}
@Test
public void shouldHandleUseFlowsDisabledOption() throws Exception {
IntegrationRun command = createCommand();
command.filePaths = new String[] { "classpath:route.yaml" };
command.useFlows = false;
command.output = "yaml";
command.doCall();
Assertions.assertEquals("""
apiVersion: camel.apache.org/v1
kind: Integration
metadata:
annotations:
camel.apache.org/operator.id: camel-k
name: route
spec:
sources:
- compression: false
content: |
from:
uri: timer:tick
steps:
- setBody:
constant: Hello Camel !!!
- to: log:info
language: yaml
name: route.yaml
traits: {}""", removeLicenseHeader(printer.getOutput()));
}
@Test
public void shouldFailWithMissingOperatorId() throws Exception {
IntegrationRun command = createCommand();
command.filePaths = new String[] { "classpath:route.yaml" };
command.useFlows = false;
command.output = "yaml";
command.operatorId = "";
Assertions.assertEquals(-1, command.doCall());
Assertions.assertEquals("Operator id must be set", printer.getOutput());
}
@Test
public void shouldHandleUnsupportedOutputFormat() throws Exception {
IntegrationRun command = createCommand();
command.filePaths = new String[] { "classpath:route.yaml" };
command.useFlows = false;
command.output = "wrong";
Assertions.assertEquals(-1, command.doCall());
Assertions.assertEquals("Unsupported output format 'wrong' (supported: yaml, json)", printer.getOutput());
}
private IntegrationRun createCommand() {
IntegrationRun command = new IntegrationRun(new CamelJBangMain().withPrinter(printer));
command.withClient(kubernetesClient);
return command;
}
|
@Deprecated
public static FileInputList createFolderList( VariableSpace space, String[] folderName,
String[] folderRequired ) {
return createFolderList( DefaultBowl.getInstance(), space, folderName, folderRequired );
}
|
@Test
public void testCreateFolderList() throws Exception {
buildTestFolderTree();
String[] folderNameList = { tempFolder.getRoot().getPath() };
String[] folderRequiredList = { "N" };
VariableSpace spaceMock = mock( VariableSpace.class );
when( spaceMock.environmentSubstitute( any( String[].class ) ) ).thenAnswer(
(Answer<String[]>) invocationOnMock -> (String[]) invocationOnMock.getArguments()[ 0 ] );
FileInputList fileInputList = FileInputList.
createFolderList( spaceMock, folderNameList, folderRequiredList );
assertNotNull( fileInputList );
assertEquals( TOTAL_NUMBER_OF_FOLDERS_NO_ROOT, fileInputList.nrOfFiles() );
}
|
public static String getLogicIndexName(final String actualIndexName, final String actualTableName) {
String indexNameSuffix = UNDERLINE + actualTableName;
return actualIndexName.endsWith(indexNameSuffix) ? actualIndexName.substring(0, actualIndexName.lastIndexOf(indexNameSuffix)) : actualIndexName;
}
|
@Test
void assertGetLogicIndexNameWithIndexNameSuffix() {
assertThat(IndexMetaDataUtils.getLogicIndexName("order_index_t_order", "t_order"), is("order_index"));
}
|
@Override
public Object getNativeDataType( Object object ) throws KettleValueException {
return getInternetAddress( object );
}
|
@Test
public void testGetNativeDataType() throws UnknownHostException, KettleValueException {
ValueMetaInterface vmi = new ValueMetaInternetAddress( "Test" );
InetAddress expected = InetAddress.getByAddress( new byte[] { (byte) 192, (byte) 168, 1, 1 } );
assertEquals( ValueMetaInterface.TYPE_INET, vmi.getType() );
assertEquals( ValueMetaInterface.STORAGE_TYPE_NORMAL, vmi.getStorageType() );
assertSame( expected, vmi.getNativeDataType( expected ) );
}
|
@Deprecated
@Restricted(DoNotUse.class)
public static String resolve(ConfigurationContext context, String toInterpolate) {
return context.getSecretSourceResolver().resolve(toInterpolate);
}
|
@Test
public void resolve_Json() {
String input = "{ \"a\": 1, \"b\": 2 }";
environment.set("FOO", input);
String output = resolve("${json:a:${FOO}}");
assertThat(output, equalTo("1"));
}
|
public List<String> splitSql(String text) {
List<String> queries = new ArrayList<>();
StringBuilder query = new StringBuilder();
char character;
boolean multiLineComment = false;
boolean singleLineComment = false;
boolean singleQuoteString = false;
boolean doubleQuoteString = false;
for (int index = 0; index < text.length(); index++) {
character = text.charAt(index);
// end of single line comment
if (singleLineComment && (character == '\n')) {
singleLineComment = false;
query.append(character);
if (index == (text.length() - 1) && !query.toString().trim().isEmpty()) {
// add query when it is the end of sql.
queries.add(query.toString());
}
continue;
}
// end of multiple line comment
if (multiLineComment && (index - 1) >= 0 && text.charAt(index - 1) == '/'
&& (index - 2) >= 0 && text.charAt(index - 2) == '*') {
multiLineComment = false;
}
if (character == '\'' && !(singleLineComment || multiLineComment)) {
if (singleQuoteString) {
singleQuoteString = false;
} else if (!doubleQuoteString) {
singleQuoteString = true;
}
}
if (character == '"' && !(singleLineComment || multiLineComment)) {
if (doubleQuoteString && index > 0) {
doubleQuoteString = false;
} else if (!singleQuoteString) {
doubleQuoteString = true;
}
}
if (!singleQuoteString && !doubleQuoteString && !multiLineComment && !singleLineComment
&& text.length() > (index + 1)) {
if (isSingleLineComment(text.charAt(index), text.charAt(index + 1))) {
singleLineComment = true;
} else if (text.charAt(index) == '/' && text.length() > (index + 2)
&& text.charAt(index + 1) == '*' && text.charAt(index + 2) != '+') {
multiLineComment = true;
}
}
if (character == ';' && !singleQuoteString && !doubleQuoteString && !multiLineComment && !singleLineComment) {
// meet the end of semicolon
if (!query.toString().trim().isEmpty()) {
queries.add(query.toString());
query = new StringBuilder();
}
} else if (index == (text.length() - 1)) {
// meet the last character
if ((!singleLineComment && !multiLineComment)) {
query.append(character);
}
if (!query.toString().trim().isEmpty()) {
queries.add(query.toString());
query = new StringBuilder();
}
} else if (!singleLineComment && !multiLineComment) {
// normal case, not in single line comment and not in multiple line comment
query.append(character);
} else if (character == '\n') {
query.append(character);
}
}
List<String> refinedQueries = new ArrayList<>();
for (int i = 0; i < queries.size(); ++i) {
String emptyLine = "";
if (i > 0) {
emptyLine = createEmptyLine(refinedQueries.get(i-1));
}
if (isSingleLineComment(queries.get(i)) || isMultipleLineComment(queries.get(i))) {
// refine the last refinedQuery
if (refinedQueries.size() > 0) {
String lastRefinedQuery = refinedQueries.get(refinedQueries.size() - 1);
refinedQueries.set(refinedQueries.size() - 1,
lastRefinedQuery + createEmptyLine(queries.get(i)));
}
} else {
String refinedQuery = emptyLine + queries.get(i);
refinedQueries.add(refinedQuery);
}
}
return refinedQueries;
}
|
@Test
void testCommentAtEnd() {
String sql = "\n" +
"select\n" +
" 'one'\n" +
" , 'two' -- comment\n";
SqlSplitter sqlSplitter = new SqlSplitter();
List<String> sqls = sqlSplitter.splitSql(sql);
assertEquals(1, sqls.size());
assertEquals("\n" +
"select\n" +
" 'one'\n" +
" , 'two' \n", sqls.get(0));
}
|
ClassicGroup getOrMaybeCreateClassicGroup(
String groupId,
boolean createIfNotExists
) throws GroupIdNotFoundException {
Group group = groups.get(groupId);
if (group == null && !createIfNotExists) {
throw new GroupIdNotFoundException(String.format("Classic group %s not found.", groupId));
}
if (group == null) {
ClassicGroup classicGroup = new ClassicGroup(logContext, groupId, ClassicGroupState.EMPTY, time, metrics);
groups.put(groupId, classicGroup);
metrics.onClassicGroupStateTransition(null, classicGroup.currentState());
return classicGroup;
} else {
if (group.type() == CLASSIC) {
return (ClassicGroup) group;
} else {
// We don't support upgrading/downgrading between protocols at the moment so
// we throw an exception if a group exists with the wrong type.
throw new GroupIdNotFoundException(String.format("Group %s is not a classic group.",
groupId));
}
}
}
|
@Test
public void testStaticMemberRejoinAsFollowerWithKnownMemberIdAndNoProtocolChange() throws Exception {
GroupMetadataManagerTestContext context = new GroupMetadataManagerTestContext.Builder()
.build();
GroupMetadataManagerTestContext.RebalanceResult rebalanceResult = context.staticMembersJoinAndRebalance(
"group-id",
"leader-instance-id",
"follower-instance-id"
);
ClassicGroup group = context.groupMetadataManager.getOrMaybeCreateClassicGroup("group-id", false);
// A static follower rejoin with no protocol change will not trigger rebalance.
JoinGroupRequestData request = new GroupMetadataManagerTestContext.JoinGroupRequestBuilder()
.withGroupId("group-id")
.withGroupInstanceId("follower-instance-id")
.withMemberId(rebalanceResult.followerId)
.withProtocolSuperset()
.build();
GroupMetadataManagerTestContext.JoinResult followerJoinResult = context.sendClassicGroupJoin(
request,
true,
true
);
// No records to write because no metadata changed.
assertTrue(followerJoinResult.records.isEmpty());
assertTrue(followerJoinResult.joinFuture.isDone());
// Old leader shouldn't be timed out.
assertTrue(group.hasStaticMember("leader-instance-id"));
JoinGroupResponseData expectedFollowerResponse = new JoinGroupResponseData()
.setErrorCode(Errors.NONE.code())
.setGenerationId(rebalanceResult.generationId) // The group has not changed.
.setMemberId(rebalanceResult.followerId)
.setLeader(rebalanceResult.leaderId)
.setProtocolName("range")
.setProtocolType("consumer")
.setSkipAssignment(false)
.setMembers(Collections.emptyList());
checkJoinGroupResponse(
expectedFollowerResponse,
followerJoinResult.joinFuture.get(),
group,
STABLE,
Collections.emptySet()
);
}
|
public int filterEntriesForConsumer(List<? extends Entry> entries, EntryBatchSizes batchSizes,
SendMessageInfo sendMessageInfo, EntryBatchIndexesAcks indexesAcks,
ManagedCursor cursor, boolean isReplayRead, Consumer consumer) {
return filterEntriesForConsumer(null, 0, entries, batchSizes,
sendMessageInfo, indexesAcks, cursor,
isReplayRead, consumer);
}
|
@Test
public void testFilterEntriesForConsumerOfTxnBufferAbort() {
PersistentTopic mockTopic = mock(PersistentTopic.class);
when(this.subscriptionMock.getTopic()).thenReturn(mockTopic);
when(mockTopic.isTxnAborted(any(TxnID.class), any())).thenReturn(true);
List<Entry> entries = new ArrayList<>();
entries.add(EntryImpl.create(1, 1, createTnxMessage("message1", 1)));
SendMessageInfo sendMessageInfo = SendMessageInfo.getThreadLocal();
EntryBatchSizes batchSizes = EntryBatchSizes.get(entries.size());
int size = this.helper.filterEntriesForConsumer(entries, batchSizes, sendMessageInfo, null, null, false, null);
assertEquals(size, 0);
}
|
@Override
public void createService(Service service, AbstractSelector selector) throws NacosException {
}
|
@Test
void testCreateService() throws Exception {
//TODO thrown.expect(UnsupportedOperationException.class);
Service service = new Service();
AbstractSelector selector = new NoneSelector();
client.createService(service, selector);
}
|
public String sendPostData(URLConnection connection, HTTPSamplerBase sampler) throws IOException {
// Buffer to hold the post body, except file content
StringBuilder postedBody = new StringBuilder(1000);
HTTPFileArg[] files = sampler.getHTTPFiles();
String contentEncoding = sampler.getContentEncoding();
// Check if we should do a multipart/form-data or an
// application/x-www-form-urlencoded post request
if(sampler.getUseMultipart()) {
OutputStream out = connection.getOutputStream();
// Write the form data post body, which we have constructed
// in the setHeaders. This contains the multipart start divider
// and any form data, i.e. arguments
out.write(formDataPostBody);
// Retrieve the formatted data using the same encoding used to create it
postedBody.append(new String(formDataPostBody, contentEncoding));
// Add any files
for (HTTPFileArg file : files) {
out.write(multipartDividerBytes);
out.write(CRLF);
postedBody.append(multipartDivider);
postedBody.append("\r\n");
// First write the start multipart file
final String headerValue = file.getHeader();
// TODO: reuse the bytes prepared in org.apache.jmeter.protocol.http.sampler.PostWriter.setHeaders
byte[] header = headerValue.getBytes(contentEncoding);
out.write(header);
// Retrieve the formatted data using the same encoding used to create it
postedBody.append(headerValue);
// Write the actual file content
writeFileToStream(file.getPath(), out);
// We just add placeholder text for file content
postedBody.append("<actual file content, not shown here>"); // $NON-NLS-1$
out.write(CRLF);
postedBody.append(CRLF_STRING);
}
// Write end of multipart: --, boundary, --, CRLF
out.write(multipartDividerBytes);
out.write(DASH_DASH_BYTES);
out.write(CRLF);
postedBody.append(multipartDivider);
postedBody.append("--\r\n");
out.close();
}
else {
// If there are no arguments, we can send a file as the body of the request
if(sampler.getArguments() != null && !sampler.hasArguments() && sampler.getSendFileAsPostBody()) {
OutputStream out = connection.getOutputStream();
// we're sure that there is at least one file because of
// getSendFileAsPostBody method's return value.
HTTPFileArg file = files[0];
writeFileToStream(file.getPath(), out);
out.flush();
out.close();
// We just add placeholder text for file content
postedBody.append("<actual file content, not shown here>"); // $NON-NLS-1$
}
else if (formDataUrlEncoded != null){ // may be null for PUT
// In an application/x-www-form-urlencoded request, we only support
// parameters, no file upload is allowed
OutputStream out = connection.getOutputStream();
out.write(formDataUrlEncoded);
out.flush();
out.close();
postedBody.append(new String(formDataUrlEncoded, contentEncoding));
}
}
return postedBody.toString();
}
|
@Test
public void testSendPostData() throws IOException {
sampler.setMethod(HTTPConstants.POST);
setupFilepart(sampler);
String titleValue = "mytitle";
String descriptionValue = "mydescription";
setupFormData(sampler, titleValue, descriptionValue);
// Test sending data with default encoding
String contentEncoding = "";
sampler.setContentEncoding(contentEncoding);
postWriter.setHeaders(connection, sampler);
postWriter.sendPostData(connection, sampler);
checkContentTypeMultipart(connection, PostWriter.BOUNDARY);
byte[] expectedFormBody = createExpectedOutput(PostWriter.BOUNDARY, null, titleValue, descriptionValue, TEST_FILE_CONTENT);
checkArraysHaveSameContent(expectedFormBody, connection.getOutputStreamContent());
checkContentLength(connection, expectedFormBody.length);
connection.disconnect();
// Test sending data as ISO-8859-1
establishConnection();
contentEncoding = "ISO-8859-1";
sampler.setContentEncoding(contentEncoding);
postWriter.setHeaders(connection, sampler);
postWriter.sendPostData(connection, sampler);
checkContentTypeMultipart(connection, PostWriter.BOUNDARY);
expectedFormBody = createExpectedOutput(PostWriter.BOUNDARY, contentEncoding, titleValue, descriptionValue, TEST_FILE_CONTENT);
checkContentLength(connection, expectedFormBody.length);
checkArraysHaveSameContent(expectedFormBody, connection.getOutputStreamContent());
connection.disconnect();
// Test sending data as UTF-8
establishConnection();
titleValue = "mytitle\u0153\u20a1\u0115\u00c5";
descriptionValue = "mydescription\u0153\u20a1\u0115\u00c5";
contentEncoding = UTF_8;
sampler.setContentEncoding(contentEncoding);
setupFormData(sampler, titleValue, descriptionValue);
postWriter.setHeaders(connection, sampler);
postWriter.sendPostData(connection, sampler);
checkContentTypeMultipart(connection, PostWriter.BOUNDARY);
expectedFormBody = createExpectedOutput(PostWriter.BOUNDARY, contentEncoding, titleValue, descriptionValue, TEST_FILE_CONTENT);
checkContentLength(connection, expectedFormBody.length);
checkArraysHaveSameContent(expectedFormBody, connection.getOutputStreamContent());
connection.disconnect();
// Test sending UTF-8 data with ISO-8859-1 content encoding
establishConnection();
contentEncoding = UTF_8;
sampler.setContentEncoding("ISO-8859-1");
postWriter.setHeaders(connection, sampler);
postWriter.sendPostData(connection, sampler);
checkContentTypeMultipart(connection, PostWriter.BOUNDARY);
expectedFormBody = createExpectedOutput(PostWriter.BOUNDARY, contentEncoding, titleValue, descriptionValue, TEST_FILE_CONTENT);
checkContentLength(connection, expectedFormBody.length);
checkArraysHaveDifferentContent(expectedFormBody, connection.getOutputStreamContent());
connection.disconnect();
}
|
List<StatisticsEntry> takeStatistics() {
if (reporterEnabled)
throw new IllegalStateException("Cannot take consistent snapshot while reporter is enabled");
var ret = new ArrayList<StatisticsEntry>();
consume((metric, value) -> ret.add(new StatisticsEntry(metric, value)));
return ret;
}
|
@Test
void statistics_include_grouped_and_single_statuscodes() {
testRequest("http", 401, "GET");
testRequest("http", 404, "GET");
testRequest("http", 403, "GET");
var stats = collector.takeStatistics();
assertStatisticsEntry(stats, "http", "GET", MetricDefinitions.RESPONSES_4XX, "read", 401, 1L);
assertStatisticsEntry(stats, "http", "GET", MetricDefinitions.RESPONSES_4XX, "read", 403, 1L);
assertStatisticsEntry(stats, "http", "GET", MetricDefinitions.RESPONSES_4XX, "read", 404, 1L);
}
|
public static InboundEdgeStream create(
@Nonnull ConcurrentConveyor<Object> conveyor,
int ordinal,
int priority,
boolean waitForAllBarriers,
@Nonnull String debugName,
@Nullable ComparatorEx<?> comparator
) {
if (comparator == null) {
return new RoundRobinDrain(conveyor, ordinal, priority, debugName, waitForAllBarriers);
} else {
return new OrderedDrain(conveyor, ordinal, priority, debugName, comparator);
}
}
|
@Test
public void when_receivingBarriersWhileDone_then_coalesce() {
stream = ConcurrentInboundEdgeStream.create(conveyor, 0, 0, true, "cies", null);
add(q1, 1, barrier(0));
add(q2, DONE_ITEM);
drainAndAssert(MADE_PROGRESS, 1);
drainAndAssert(MADE_PROGRESS, barrier(0));
add(q1, DONE_ITEM);
drainAndAssert(DONE);
}
|
@Override
public KsMaterializedQueryResult<Row> get(
final GenericKey key,
final int partition,
final Optional<Position> position
) {
try {
final ReadOnlyKeyValueStore<GenericKey, ValueAndTimestamp<GenericRow>> store = stateStore
.store(QueryableStoreTypes.timestampedKeyValueStore(), partition);
final ValueAndTimestamp<GenericRow> row = store.get(key);
if (row == null) {
return KsMaterializedQueryResult.rowIterator(Collections.emptyIterator());
} else {
return KsMaterializedQueryResult.rowIterator(ImmutableList.of(Row.of(
stateStore.schema(), key, row.value(), row.timestamp())).iterator());
}
} catch (final Exception e) {
throw new MaterializationException("Failed to get value from materialized table", e);
}
}
|
@Test
public void shouldGetWithCorrectParams() {
// When:
table.get(A_KEY, PARTITION);
// Then:
verify(tableStore).get(A_KEY);
}
|
protected static PrivateKey toPrivateKey(File keyFile, String keyPassword) throws NoSuchAlgorithmException,
NoSuchPaddingException, InvalidKeySpecException,
InvalidAlgorithmParameterException,
KeyException, IOException {
return toPrivateKey(keyFile, keyPassword, true);
}
|
@Test
public void testPkcs1Des3EncryptedRsaNoPassword() throws Exception {
assertThrows(InvalidKeySpecException.class, new Executable() {
@Override
public void execute() throws Throwable {
SslContext.toPrivateKey(new File(getClass().getResource("rsa_pkcs1_des3_encrypted.key")
.getFile()), null);
}
});
}
|
@Override
public void deleteTenantPackage(Long id) {
// 校验存在
validateTenantPackageExists(id);
// 校验正在使用
validateTenantUsed(id);
// 删除
tenantPackageMapper.deleteById(id);
}
|
@Test
public void testDeleteTenantPackage_success() {
// mock 数据
TenantPackageDO dbTenantPackage = randomPojo(TenantPackageDO.class);
tenantPackageMapper.insert(dbTenantPackage);// @Sql: 先插入出一条存在的数据
// 准备参数
Long id = dbTenantPackage.getId();
// mock 租户未使用该套餐
when(tenantService.getTenantCountByPackageId(eq(id))).thenReturn(0L);
// 调用
tenantPackageService.deleteTenantPackage(id);
// 校验数据不存在了
assertNull(tenantPackageMapper.selectById(id));
}
|
@Override
public String getWelcomeMessage(final User user) {
if (isEnhanced()) {
return "Welcome " + user + ". You're using the enhanced welcome message.";
}
return "Welcome to the application.";
}
|
@Test
void testFeatureTurnedOn() {
final var properties = new Properties();
properties.put("enhancedWelcome", true);
var service = new PropertiesFeatureToggleVersion(properties);
assertTrue(service.isEnhanced());
final var welcomeMessage = service.getWelcomeMessage(new User("Jamie No Code"));
assertEquals("Welcome Jamie No Code. You're using the enhanced welcome message.", welcomeMessage);
}
|
public Arguments parse(String[] args) {
JCommander jCommander = new JCommander(this);
jCommander.setProgramName("jsonschema2pojo");
try {
jCommander.parse(args);
if (this.showHelp) {
jCommander.usage();
exit(EXIT_OKAY);
} else if (printVersion) {
Properties properties = new Properties();
properties.load(getClass().getResourceAsStream("version.properties"));
jCommander.getConsole().println(jCommander.getProgramName() + " version " + properties.getProperty("version"));
exit(EXIT_OKAY);
}
} catch (IOException | ParameterException e) {
System.err.println(e.getMessage());
jCommander.usage();
exit(EXIT_ERROR);
}
return this;
}
|
@Test
public void requestingHelpCausesHelp() {
ArgsForTest args = (ArgsForTest) new ArgsForTest().parse(new String[] { "--help" });
assertThat(args.status, is(notNullValue()));
assertThat(new String(systemOutCapture.toByteArray(), StandardCharsets.UTF_8), is(containsString("Usage: jsonschema2pojo")));
}
|
@Override
public Object postProcessAfterInitialization(Object bean, String beanName) throws BeansException {
ReflectionUtils.doWithMethods(bean.getClass(), recurringJobFinderMethodCallback);
return bean;
}
|
@Test
void beansWithMethodsUsingJobContextAnnotatedWithRecurringCronAnnotationWillAutomaticallyBeRegistered() {
// GIVEN
final RecurringJobPostProcessor recurringJobPostProcessor = getRecurringJobPostProcessor();
// WHEN
recurringJobPostProcessor.postProcessAfterInitialization(new MyServiceWithRecurringCronJobUsingJobContext(), "not important");
// THEN
verify(jobScheduler).scheduleRecurrently(eq("my-recurring-job"), jobDetailsArgumentCaptor.capture(), eq(CronExpression.create("0 0/15 * * *")), any(ZoneId.class));
final JobDetails actualJobDetails = jobDetailsArgumentCaptor.getValue();
assertThat(actualJobDetails)
.isCacheable()
.hasClassName(MyServiceWithRecurringCronJobUsingJobContext.class.getName())
.hasMethodName("myRecurringMethod")
.hasJobContextArg();
}
|
public static <N> void merge(MutableGraph<N> graph1, Graph<N> graph2) {
for (N node : graph2.nodes()) {
graph1.addNode(node);
}
for (EndpointPair<N> edge : graph2.edges()) {
graph1.putEdge(edge.nodeU(), edge.nodeV());
}
}
|
@Test
public void mergeDistinctGraphs() {
final MutableGraph<String> graph1 = GraphBuilder.directed().build();
graph1.addNode("Test1");
graph1.addNode("Test2");
graph1.putEdge("Test1", "Test2");
final MutableGraph<String> graph2 = GraphBuilder.directed().build();
graph2.addNode("Test3");
graph2.addNode("Test4");
graph2.putEdge("Test3", "Test4");
final MutableGraph<String> expectedGraph = GraphBuilder.directed().build();
expectedGraph.addNode("Test1");
expectedGraph.addNode("Test2");
expectedGraph.addNode("Test3");
expectedGraph.addNode("Test4");
expectedGraph.putEdge("Test1", "Test2");
expectedGraph.putEdge("Test3", "Test4");
Graphs.merge(graph1, graph2);
assertThat(graph1).isEqualTo(expectedGraph);
}
|
public static List<FieldInfo> buildSourceSchemaEntity(final LogicalSchema schema) {
final List<FieldInfo> allFields = schema.columns().stream()
.map(EntityUtil::toFieldInfo)
.collect(Collectors.toList());
if (allFields.isEmpty()) {
throw new IllegalArgumentException("Root schema should contain columns: " + schema);
}
return allFields;
}
|
@Test
public void shouldSupportSchemasWithKeyColumns() {
// Given:
final LogicalSchema schema = LogicalSchema.builder()
.keyColumn(ColumnName.of("field1"), SqlTypes.INTEGER)
.build();
// When:
final List<FieldInfo> fields = EntityUtil.buildSourceSchemaEntity(schema);
// Then:
assertThat(fields, hasSize(1));
assertThat(fields.get(0).getName(), equalTo("field1"));
assertThat(fields.get(0).getSchema().getTypeName(), equalTo("INTEGER"));
assertThat(fields.get(0).getType(), equalTo(Optional.of(FieldType.KEY)));
}
|
@Override
protected String getInstanceClassName() {
return "";
}
|
@Test
public void getInstanceClassName() {
}
|
@Override
public Credentials configure(final Host host) {
if(StringUtils.isNotBlank(host.getHostname())) {
final Credentials credentials = new Credentials(host.getCredentials());
configuration.refresh();
// Update this host credentials from the OpenSSH configuration file in ~/.ssh/config
final OpenSshConfig.Host entry = configuration.lookup(host.getHostname());
if(StringUtils.isNotBlank(entry.getUser())) {
if(!credentials.validate(host.getProtocol(), new LoginOptions(host.getProtocol()).password(false))) {
if(log.isInfoEnabled()) {
log.info(String.format("Using username %s from %s", entry, configuration));
}
credentials.setUsername(entry.getUser());
}
}
if(!credentials.isPublicKeyAuthentication()) {
if(null != entry.getIdentityFile()) {
if(log.isInfoEnabled()) {
log.info(String.format("Using identity %s from %s", entry, configuration));
}
credentials.setIdentity(entry.getIdentityFile());
}
else {
// No custom public key authentication configuration
if(new HostPreferences(host).getBoolean("ssh.authentication.publickey.default.enable")) {
final Local rsa = LocalFactory.get(new HostPreferences(host).getProperty("ssh.authentication.publickey.default.rsa"));
if(rsa.exists()) {
if(log.isInfoEnabled()) {
log.info(String.format("Using RSA default host key %s from %s", rsa, configuration));
}
credentials.setIdentity(rsa);
}
else {
final Local dsa = LocalFactory.get(new HostPreferences(host).getProperty("ssh.authentication.publickey.default.dsa"));
if(dsa.exists()) {
if(log.isInfoEnabled()) {
log.info(String.format("Using DSA default host key %s from %s", dsa, configuration));
}
credentials.setIdentity(dsa);
}
}
}
}
}
return credentials;
}
return CredentialsConfigurator.DISABLED.configure(host);
}
|
@Test
public void testNoConfigure() {
OpenSSHCredentialsConfigurator c = new OpenSSHCredentialsConfigurator(
new OpenSshConfig(
new Local("src/main/test/resources", "openssh/config")));
Credentials credentials = new Credentials("user", " ");
credentials.setIdentity(new NullLocal("t"));
assertEquals("t", c.configure(new Host(new TestProtocol(Scheme.sftp), "t", credentials)).getIdentity().getName());
}
|
static Object actualCoerceParameter(Type requiredType, Object valueToCoerce) {
Object toReturn = valueToCoerce;
if (valueToCoerce instanceof LocalDate localDate &&
requiredType == BuiltInType.DATE_TIME) {
return DateTimeEvalHelper.coerceDateTime(localDate);
}
return toReturn;
}
|
@Test
void actualCoerceParameterToDateTimeConverted() {
Object value = LocalDate.now();
Object retrieved = CoerceUtil.actualCoerceParameter(BuiltInType.DATE_TIME, value);
assertNotNull(retrieved);
assertTrue(retrieved instanceof ZonedDateTime);
ZonedDateTime zdtRetrieved = (ZonedDateTime) retrieved;
assertEquals(value, zdtRetrieved.toLocalDate());
assertEquals(ZoneOffset.UTC, zdtRetrieved.getOffset());
assertEquals(0, zdtRetrieved.getHour());
assertEquals(0, zdtRetrieved.getMinute());
assertEquals(0, zdtRetrieved.getSecond());
}
|
public static DataSource createDataSource(final ModeConfiguration modeConfig) throws SQLException {
return createDataSource(DefaultDatabase.LOGIC_NAME, modeConfig);
}
|
@Test
void assertCreateDataSourceWithAllParametersForSingleDataSource() throws SQLException {
assertDataSource(ShardingSphereDataSourceFactory.createDataSource("test_db",
new ModeConfiguration("Standalone", null), new MockedDataSource(), new LinkedList<>(), new Properties()), "test_db");
}
|
@Override
public Set<EntityExcerpt> listEntityExcerpts() {
return inputService.all().stream()
.map(InputWithExtractors::create)
.map(this::createExcerpt)
.collect(Collectors.toSet());
}
|
@Test
@MongoDBFixtures("InputFacadeTest.json")
public void listEntityExcerpts() {
final EntityExcerpt expectedEntityExcerpt1 = EntityExcerpt.builder()
.id(ModelId.of("5adf25294b900a0fdb4e5365"))
.type(ModelTypes.INPUT_V1)
.title("Global Random HTTP")
.build();
final EntityExcerpt expectedEntityExcerpt2 = EntityExcerpt.builder()
.id(ModelId.of("5acc84f84b900a4ff290d9a7"))
.type(ModelTypes.INPUT_V1)
.title("Local Raw UDP")
.build();
final EntityExcerpt expectedEntityExcerpt3 = EntityExcerpt.builder()
.id(ModelId.of("5ae2eb0a3d27464477f0fd8b"))
.type(ModelTypes.INPUT_V1)
.title("TEST PLAIN TEXT")
.build();
final EntityExcerpt expectedEntityExcerpt4 = EntityExcerpt.builder()
.id(ModelId.of("5ae2ebbeef27464477f0fd8b"))
.type(ModelTypes.INPUT_V1)
.title("TEST PLAIN TEXT")
.build();
final Set<EntityExcerpt> entityExcerpts = facade.listEntityExcerpts();
assertThat(entityExcerpts).containsOnly(expectedEntityExcerpt1,
expectedEntityExcerpt2, expectedEntityExcerpt3, expectedEntityExcerpt4);
}
|
@Override
public void renameTable(TableIdentifier from, TableIdentifier to) {
if (!namespaceExists(to.namespace())) {
throw new NoSuchNamespaceException(
"Cannot rename %s to %s because namespace %s does not exist", from, to, to.namespace());
}
if (tableExists(to)) {
throw new AlreadyExistsException(
"Cannot rename %s because destination table %s exists", from, to);
}
EcsURI fromURI = tableURI(from);
if (!objectMetadata(fromURI).isPresent()) {
throw new NoSuchTableException("Cannot rename table because table %s does not exist", from);
}
Properties properties = loadProperties(fromURI);
EcsURI toURI = tableURI(to);
if (!putNewProperties(toURI, properties.content())) {
throw new AlreadyExistsException(
"Cannot rename %s because destination table %s exists", from, to);
}
client.deleteObject(fromURI.bucket(), fromURI.name());
LOG.info("Rename table {} to {}", from, to);
}
|
@Test
public void testRenameTable() {
ecsCatalog.createNamespace(Namespace.of("a"));
ecsCatalog.createTable(TableIdentifier.of("a", "t1"), SCHEMA);
ecsCatalog.createNamespace(Namespace.of("b"));
assertThatThrownBy(
() ->
ecsCatalog.renameTable(
TableIdentifier.of("unknown"), TableIdentifier.of("b", "t2")))
.isInstanceOf(NoSuchTableException.class)
.hasMessage("Cannot rename table because table unknown does not exist");
assertThatThrownBy(
() ->
ecsCatalog.renameTable(
TableIdentifier.of("a", "t1"), TableIdentifier.of("unknown", "t2")))
.isInstanceOf(NoSuchNamespaceException.class)
.hasMessage("Cannot rename a.t1 to unknown.t2 because namespace unknown does not exist");
ecsCatalog.renameTable(TableIdentifier.of("a", "t1"), TableIdentifier.of("b", "t2"));
assertThat(ecsCatalog.tableExists(TableIdentifier.of("a", "t1")))
.as("Old table does not exist")
.isFalse();
assertThat(ecsCatalog.tableExists(TableIdentifier.of("b", "t2")))
.as("New table exists")
.isTrue();
}
|
public void logAndProcessFailure(
String computationId,
ExecutableWork executableWork,
Throwable t,
Consumer<Work> onInvalidWork) {
if (shouldRetryLocally(computationId, executableWork.work(), t)) {
// Try again after some delay and at the end of the queue to avoid a tight loop.
executeWithDelay(retryLocallyDelayMs, executableWork);
} else {
// Consider the item invalid. It will eventually be retried by Windmill if it still needs to
// be processed.
onInvalidWork.accept(executableWork.work());
}
}
|
@Test
public void logAndProcessFailure_doesNotRetryKeyTokenInvalidException() {
Set<Work> executedWork = new HashSet<>();
ExecutableWork work = createWork(executedWork::add);
WorkFailureProcessor workFailureProcessor =
createWorkFailureProcessor(streamingEngineFailureReporter());
Set<Work> invalidWork = new HashSet<>();
workFailureProcessor.logAndProcessFailure(
DEFAULT_COMPUTATION_ID, work, new KeyTokenInvalidException("key"), invalidWork::add);
assertThat(executedWork).isEmpty();
assertThat(invalidWork).containsExactly(work.work());
}
|
public Iterator<ReadRowsResponse> readRows()
{
List<ReadRowsResponse> readRowResponses = new ArrayList<>();
long readRowsCount = 0;
int retries = 0;
Iterator<ReadRowsResponse> serverResponses = fetchResponses(request);
while (serverResponses.hasNext()) {
try {
ReadRowsResponse response = serverResponses.next();
readRowsCount += response.getRowCount();
readRowResponses.add(response);
}
catch (RuntimeException e) {
// if relevant, retry the read, from the last read position
if (BigQueryUtil.isRetryable(e) && retries < maxReadRowsRetries) {
request.getReadPositionBuilder().setOffset(readRowsCount);
serverResponses = fetchResponses(request);
retries++;
}
else {
// to safely close the client
try (BigQueryStorageClient ignored = client) {
throw e;
}
}
}
}
return readRowResponses.iterator();
}
|
@Test
void testRetryOfSingleFailure()
{
MockResponsesBatch batch1 = new MockResponsesBatch();
batch1.addResponse(Storage.ReadRowsResponse.newBuilder().setRowCount(10).build());
batch1.addException(new StatusRuntimeException(Status.INTERNAL.withDescription(
"Received unexpected EOS on DATA frame from server.")));
MockResponsesBatch batch2 = new MockResponsesBatch();
batch2.addResponse(Storage.ReadRowsResponse.newBuilder().setRowCount(11).build());
ImmutableList<Storage.ReadRowsResponse> responses = ImmutableList.copyOf(
new MockReadRowsHelper(client, request, 3, ImmutableList.of(batch1, batch2))
.readRows());
assertThat(responses.size()).isEqualTo(2);
assertThat(responses.stream().mapToLong(Storage.ReadRowsResponse::getRowCount).sum()).isEqualTo(21);
}
|
ControllerResult<Map<ConfigResource, ApiError>> incrementalAlterConfigs(
Map<ConfigResource, Map<String, Entry<OpType, String>>> configChanges,
boolean newlyCreatedResource
) {
List<ApiMessageAndVersion> outputRecords =
BoundedList.newArrayBacked(MAX_RECORDS_PER_USER_OP);
Map<ConfigResource, ApiError> outputResults = new HashMap<>();
for (Entry<ConfigResource, Map<String, Entry<OpType, String>>> resourceEntry :
configChanges.entrySet()) {
ApiError apiError = incrementalAlterConfigResource(resourceEntry.getKey(),
resourceEntry.getValue(),
newlyCreatedResource,
outputRecords);
outputResults.put(resourceEntry.getKey(), apiError);
}
return ControllerResult.atomicOf(outputRecords, outputResults);
}
|
@Test
public void testIncrementalAlterConfigs() {
ConfigurationControlManager manager = new ConfigurationControlManager.Builder().
setKafkaConfigSchema(SCHEMA).
build();
ControllerResult<Map<ConfigResource, ApiError>> result = manager.
incrementalAlterConfigs(toMap(entry(BROKER0, toMap(
entry("baz", entry(SUBTRACT, "abc")),
entry("quux", entry(SET, "abc")))),
entry(MYTOPIC, toMap(entry("abc", entry(APPEND, "123"))))),
true);
assertEquals(ControllerResult.atomicOf(Collections.singletonList(new ApiMessageAndVersion(
new ConfigRecord().setResourceType(TOPIC.id()).setResourceName("mytopic").
setName("abc").setValue("123"), CONFIG_RECORD.highestSupportedVersion())),
toMap(entry(BROKER0, new ApiError(Errors.INVALID_CONFIG,
"Can't SUBTRACT to key baz because its type is not LIST.")),
entry(MYTOPIC, ApiError.NONE))), result);
RecordTestUtils.replayAll(manager, result.records());
assertEquals(ControllerResult.atomicOf(Collections.singletonList(new ApiMessageAndVersion(
new ConfigRecord().setResourceType(TOPIC.id()).setResourceName("mytopic").
setName("abc").setValue(null), CONFIG_RECORD.highestSupportedVersion())),
toMap(entry(MYTOPIC, ApiError.NONE))),
manager.incrementalAlterConfigs(toMap(entry(MYTOPIC, toMap(
entry("abc", entry(DELETE, "xyz"))))),
true));
}
|
@Override
public T deserialize(final String topic, final byte[] bytes) {
try {
if (bytes == null) {
return null;
}
// don't use the JsonSchemaConverter to read this data because
// we require that the MAPPER enables USE_BIG_DECIMAL_FOR_FLOATS,
// which is not currently available in the standard converters
final JsonNode value = isJsonSchema
? JsonSerdeUtils.readJsonSR(bytes, MAPPER, JsonNode.class)
: MAPPER.readTree(bytes);
final Object coerced = enforceFieldType(
"$",
new JsonValueContext(value, schema)
);
if (LOG.isTraceEnabled()) {
LOG.trace("Deserialized {}. topic:{}, row:{}", target, topic, coerced);
}
return SerdeUtils.castToTargetType(coerced, targetType);
} catch (final Exception e) {
// Clear location in order to avoid logging data, for security reasons
if (e instanceof JsonParseException) {
((JsonParseException) e).clearLocation();
}
throw new SerializationException(
"Failed to deserialize " + target + " from topic: " + topic + ". " + e.getMessage(), e);
}
}
|
@Test
public void shouldDeserializedJsonNumberAsBigDecimal() {
// Given:
final KsqlJsonDeserializer<BigDecimal> deserializer =
givenDeserializerForSchema(DecimalUtil.builder(20, 19).build(), BigDecimal.class);
final List<String> validCoercions = ImmutableList.of(
"1.1234512345123451234",
"\"1.1234512345123451234\""
);
validCoercions.forEach(value -> {
final byte[] bytes = addMagic(value.getBytes(StandardCharsets.UTF_8));
// When:
final Object result = deserializer.deserialize(SOME_TOPIC, bytes);
// Then:
assertThat(result, is(new BigDecimal("1.1234512345123451234")));
});
}
|
@Override
public void execute(ComputationStep.Context context) {
// no notification on pull requests as there is no real Quality Gate on those
if (analysisMetadataHolder.isPullRequest()) {
return;
}
executeForProject(treeRootHolder.getRoot());
}
|
@Test
public void no_event_created_if_raw_ALERT_STATUS_measure_is_unsupported_value() {
when(measureRepository.getRawMeasure(PROJECT_COMPONENT, alertStatusMetric)).thenReturn(of(Measure.newMeasureBuilder().create(INVALID_ALERT_STATUS)));
underTest.execute(new TestComputationStepContext());
verify(measureRepository).getRawMeasure(PROJECT_COMPONENT, alertStatusMetric);
verifyNoMoreInteractions(measureRepository, eventRepository);
}
|
public LinkedHashMap<String, String> getKeyPropertyList(ObjectName mbeanName) {
LinkedHashMap<String, String> keyProperties = keyPropertiesPerBean.get(mbeanName);
if (keyProperties == null) {
keyProperties = new LinkedHashMap<>();
String properties = mbeanName.getKeyPropertyListString();
Matcher match = PROPERTY_PATTERN.matcher(properties);
while (match.lookingAt()) {
keyProperties.put(match.group(1), match.group(2));
properties = properties.substring(match.end());
if (properties.startsWith(",")) {
properties = properties.substring(1);
}
match.reset(properties);
}
keyPropertiesPerBean.put(mbeanName, keyProperties);
}
return keyProperties;
}
|
@Test
public void testQuotedObjectName() throws Throwable {
JmxMBeanPropertyCache testCache = new JmxMBeanPropertyCache();
LinkedHashMap<String, String> parameterList =
testCache.getKeyPropertyList(
new ObjectName("com.organisation:name=value,name2=\"value2\""));
assertSameElementsAndOrder(parameterList, "name", "value", "name2", "\"value2\"");
}
|
@Override
public void alert(Anomaly anomaly, boolean autoFixTriggered, long selfHealingStartTime, AnomalyType anomalyType) {
super.alert(anomaly, autoFixTriggered, selfHealingStartTime, anomalyType);
if (_alertaApiUrl == null) {
LOG.warn("Alerta API URL is null, can't send Alerta.io self healing notification");
return;
}
if (_alertaApiKey == null) {
LOG.warn("Alerta API key is null, can't send Alerta.io self healing notification");
return;
}
String text = String.format("%s detected %s. Self healing %s.%s", anomalyType, anomaly,
_selfHealingEnabled.get(anomalyType) ? String.format("start time %s", utcDateFor(selfHealingStartTime))
: "is disabled",
autoFixTriggered ? "%nSelf-healing has been triggered." : "");
String tmpLocalHostname;
try {
tmpLocalHostname = InetAddress.getLocalHost().getCanonicalHostName();
} catch (UnknownHostException e) {
LOG.warn("Unable to get the hostname of the Cruise Control server", e);
tmpLocalHostname = ALERT_CRUISE_CONTROL;
}
final String localHostname = tmpLocalHostname;
List<AlertaMessage> alertaMessages = new ArrayList<>();
switch ((KafkaAnomalyType) anomalyType) {
case GOAL_VIOLATION:
GoalViolations goalViolations = (GoalViolations) anomaly;
alertGoalViolation(anomalyType, localHostname, alertaMessages, goalViolations);
break;
case BROKER_FAILURE:
BrokerFailures brokerFailures = (BrokerFailures) anomaly;
alertBrokerFailure(anomalyType, localHostname, alertaMessages, brokerFailures);
break;
case METRIC_ANOMALY:
KafkaMetricAnomaly metricAnomaly = (KafkaMetricAnomaly) anomaly;
alertMetricAnomaly(anomalyType, localHostname, alertaMessages, metricAnomaly);
break;
case DISK_FAILURE:
DiskFailures diskFailures = (DiskFailures) anomaly;
alertDiskFailure(anomalyType, localHostname, alertaMessages, diskFailures);
break;
case TOPIC_ANOMALY:
TopicAnomaly topicAnomaly = (TopicAnomaly) anomaly;
alertTopicAnomaly(anomalyType, localHostname, alertaMessages, topicAnomaly);
break;
case MAINTENANCE_EVENT:
MaintenanceEvent maintenanceEvent = (MaintenanceEvent) anomaly;
alertMaintenanceEvent(anomalyType, localHostname, alertaMessages, maintenanceEvent);
break;
default:
throw new IllegalStateException("Unrecognized anomaly type.");
}
for (AlertaMessage alertaMessage : alertaMessages) {
alertaMessage.setEnvironment(_alertaEnvironment);
alertaMessage.setService(Collections.singletonList(ALERT_CRUISE_CONTROL));
alertaMessage.setText(text);
alertaMessage.setOrigin(ALERT_CRUISE_CONTROL + "/" + localHostname);
alertaMessage.setType(ALERT_CRUISE_CONTROL_ALARM);
alertaMessage.setRawData(anomaly.toString());
alertaMessage.setTags(Collections.singletonList(ALERT_ALARM_ID_TAG_KEY + ":" + anomaly.anomalyId()));
try {
sendAlertaMessage(alertaMessage);
} catch (IOException e) {
LOG.warn("ERROR sending alert to Alerta.io", e);
}
}
}
|
@Test
public void testAlertaAlertWithNoWebhook() {
_notifier = new MockAlertaSelfHealingNotifier(mockTime);
_notifier.alert(failures, false, 1L, KafkaAnomalyType.BROKER_FAILURE);
assertEquals(0, _notifier.getAlertaMessageList().size());
}
|
public static List<Metadata> fromJson(Reader reader) throws IOException {
List<Metadata> ms = null;
if (reader == null) {
return ms;
}
ms = new ArrayList<>();
try (JsonParser jParser = new JsonFactory()
.setStreamReadConstraints(StreamReadConstraints
.builder()
.maxStringLength(TikaConfig.getMaxJsonStringFieldLength())
.build())
.createParser(CloseShieldReader.wrap(reader))) {
JsonToken token = jParser.nextToken();
if (token != JsonToken.START_ARRAY) {
throw new IOException("metadata list must start with an array, but I see: " + token.name());
}
token = jParser.nextToken();
while (token != JsonToken.END_ARRAY) {
Metadata m = JsonMetadata.readMetadataObject(jParser);
ms.add(m);
token = jParser.nextToken();
}
}
if (ms == null) {
return null;
}
//if the last object is the main document,
//as happens with the streaming serializer,
//flip it to be the first element.
if (ms.size() > 1) {
Metadata last = ms.get(ms.size() - 1);
String embResourcePath = last.get(TikaCoreProperties.EMBEDDED_RESOURCE_PATH);
if (embResourcePath == null && ms
.get(0)
.get(TikaCoreProperties.EMBEDDED_RESOURCE_PATH) != null) {
ms.add(0, ms.remove(ms.size() - 1));
}
}
return ms;
}
|
@Test
public void testSwitchingOrderOfMainDoc() throws Exception {
Metadata m1 = new Metadata();
m1.add("k1", "v1");
m1.add("k1", "v2");
m1.add("k1", "v3");
m1.add("k1", "v4");
m1.add("k1", "v4");
m1.add("k2", "v1");
m1.add(TikaCoreProperties.EMBEDDED_RESOURCE_PATH, "/embedded-1");
Metadata m2 = new Metadata();
m2.add("k3", "v1");
m2.add("k3", "v2");
m2.add("k3", "v3");
m2.add("k3", "v4");
m2.add("k3", "v4");
m2.add("k4", "v1");
List<Metadata> truth = new ArrayList<>();
truth.add(m2);
truth.add(m1);
StringWriter stringWriter = new StringWriter();
try (JsonStreamingSerializer serializer = new JsonStreamingSerializer(stringWriter)) {
serializer.add(m1);
serializer.add(m2);
}
Reader reader = new StringReader(stringWriter.toString());
List<Metadata> deserialized = JsonMetadataList.fromJson(reader);
assertEquals(truth, deserialized);
}
|
public FileSystem get(Key key) {
synchronized (mLock) {
Value value = mCacheMap.get(key);
FileSystem fs;
if (value == null) {
// On cache miss, create and insert a new FileSystem instance,
fs = FileSystem.Factory.create(FileSystemContext.create(key.mSubject, key.mConf));
mCacheMap.put(key, new Value(fs, 1));
} else {
fs = value.mFileSystem;
value.mRefCount.getAndIncrement();
}
return new InstanceCachingFileSystem(fs, key);
}
}
|
@Test
public void getTwiceThenClose() throws IOException {
Key key1 = createTestFSKey("user1");
FileSystem fs1 = mFileSystemCache.get(key1);
FileSystem fs2 = mFileSystemCache.get(key1);
fs1.close();
FileSystem fs3 = mFileSystemCache.get(key1);
assertSame(getDelegatedFileSystem(fs2), getDelegatedFileSystem(fs3));
assertTrue(fs1.isClosed());
assertFalse(fs2.isClosed());
assertFalse(fs3.isClosed());
}
|
boolean isEnabled() {
return enabled;
}
|
@Test
public void testBackPressureDisabledByDefault() {
Config config = new Config();
HazelcastProperties hazelcastProperties = new HazelcastProperties(config);
BackpressureRegulator regulator = new BackpressureRegulator(hazelcastProperties, logger);
assertFalse(regulator.isEnabled());
}
|
public boolean isInvalid() {
return getPathComponents().length <= 1 && !isRoot();
}
|
@Test
public void testInvalidPath() {
Assert.assertFalse(TEST_QUEUE_PATH.isInvalid());
Assert.assertFalse(ROOT_PATH.isInvalid());
Assert.assertTrue(EMPTY_PATH.isInvalid());
Assert.assertTrue(new QueuePath("invalidPath").isInvalid());
}
|
static Object parseCell(String cell, Schema.Field field) {
Schema.FieldType fieldType = field.getType();
try {
switch (fieldType.getTypeName()) {
case STRING:
return cell;
case INT16:
return Short.parseShort(cell);
case INT32:
return Integer.parseInt(cell);
case INT64:
return Long.parseLong(cell);
case BOOLEAN:
return Boolean.parseBoolean(cell);
case BYTE:
return Byte.parseByte(cell);
case DECIMAL:
return new BigDecimal(cell);
case DOUBLE:
return Double.parseDouble(cell);
case FLOAT:
return Float.parseFloat(cell);
case DATETIME:
return Instant.parse(cell);
default:
throw new UnsupportedOperationException(
"Unsupported type: " + fieldType + ", consider using withCustomRecordParsing");
}
} catch (IllegalArgumentException e) {
throw new IllegalArgumentException(
e.getMessage() + " field " + field.getName() + " was received -- type mismatch");
}
}
|
@Test
public void givenShortWithSurroundingSpaces_throws() {
Short shortNum = Short.parseShort("12");
DefaultMapEntry cellToExpectedValue = new DefaultMapEntry(" 12 ", shortNum);
Schema schema =
Schema.builder()
.addInt16Field("a_short")
.addInt32Field("an_integer")
.addInt64Field("a_long")
.build();
IllegalArgumentException e =
assertThrows(
IllegalArgumentException.class,
() ->
CsvIOParseHelpers.parseCell(
cellToExpectedValue.getKey().toString(), schema.getField("a_short")));
assertEquals(
"For input string: \""
+ cellToExpectedValue.getKey()
+ "\" field "
+ schema.getField("a_short").getName()
+ " was received -- type mismatch",
e.getMessage());
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.