focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
@Asn1Property(tagNo = 0x30, converter = DigestsConverter.class) public Map<Integer, byte[]> getDigests() { return digests; }
@Test public void readPcaRvigCms() throws Exception { final LdsSecurityObject ldsSecurityObject = mapper.read( readFromCms("pca-rvig"), LdsSecurityObject.class); assertEquals(ImmutableSet.of(14), ldsSecurityObject.getDigests().keySet()); }
@Override @SuppressWarnings("DuplicatedCode") public Integer cleanAccessLog(Integer exceedDay, Integer deleteLimit) { int count = 0; LocalDateTime expireDate = LocalDateTime.now().minusDays(exceedDay); // 循环删除,直到没有满足条件的数据 for (int i = 0; i < Short.MAX_VALUE; i++) { int deleteCount = apiAccessLogMapper.deleteByCreateTimeLt(expireDate, deleteLimit); count += deleteCount; // 达到删除预期条数,说明到底了 if (deleteCount < deleteLimit) { break; } } return count; }
@Test public void testCleanJobLog() { // mock 数据 ApiAccessLogDO log01 = randomPojo(ApiAccessLogDO.class, o -> o.setCreateTime(addTime(Duration.ofDays(-3)))); apiAccessLogMapper.insert(log01); ApiAccessLogDO log02 = randomPojo(ApiAccessLogDO.class, o -> o.setCreateTime(addTime(Duration.ofDays(-1)))); apiAccessLogMapper.insert(log02); // 准备参数 Integer exceedDay = 2; Integer deleteLimit = 1; // 调用 Integer count = apiAccessLogService.cleanAccessLog(exceedDay, deleteLimit); // 断言 assertEquals(1, count); List<ApiAccessLogDO> logs = apiAccessLogMapper.selectList(); assertEquals(1, logs.size()); assertEquals(log02, logs.get(0)); }
@LiteralParameters("x") @ScalarOperator(BETWEEN) @SqlType(StandardTypes.BOOLEAN) public static boolean between(@SqlType("varchar(x)") Slice value, @SqlType("varchar(x)") Slice min, @SqlType("varchar(x)") Slice max) { return min.compareTo(value) <= 0 && value.compareTo(max) <= 0; }
@Test public void testBetween() { assertFunction("'foo' BETWEEN 'foo' AND 'foo'", BOOLEAN, true); assertFunction("'foo' BETWEEN 'foo' AND 'bar'", BOOLEAN, false); assertFunction("'foo' BETWEEN 'bar' AND 'foo'", BOOLEAN, true); assertFunction("'foo' BETWEEN 'bar' AND 'bar'", BOOLEAN, false); assertFunction("'bar' BETWEEN 'foo' AND 'foo'", BOOLEAN, false); assertFunction("'bar' BETWEEN 'foo' AND 'bar'", BOOLEAN, false); assertFunction("'bar' BETWEEN 'bar' AND 'foo'", BOOLEAN, true); assertFunction("'bar' BETWEEN 'bar' AND 'bar'", BOOLEAN, true); }
public static boolean checkpointsMatch( Collection<CompletedCheckpoint> first, Collection<CompletedCheckpoint> second) { if (first.size() != second.size()) { return false; } List<Tuple2<Long, JobID>> firstInterestingFields = new ArrayList<>(first.size()); for (CompletedCheckpoint checkpoint : first) { firstInterestingFields.add( new Tuple2<>(checkpoint.getCheckpointID(), checkpoint.getJobId())); } List<Tuple2<Long, JobID>> secondInterestingFields = new ArrayList<>(second.size()); for (CompletedCheckpoint checkpoint : second) { secondInterestingFields.add( new Tuple2<>(checkpoint.getCheckpointID(), checkpoint.getJobId())); } return firstInterestingFields.equals(secondInterestingFields); }
@Test void testCompareCheckpointsWithDifferentOrder() { CompletedCheckpoint checkpoint1 = new CompletedCheckpoint( new JobID(), 0, 0, 1, new HashMap<>(), Collections.emptyList(), CheckpointProperties.forCheckpoint( CheckpointRetentionPolicy.RETAIN_ON_FAILURE), new TestCompletedCheckpointStorageLocation(), null); CompletedCheckpoint checkpoint2 = new CompletedCheckpoint( new JobID(), 1, 0, 1, new HashMap<>(), Collections.emptyList(), CheckpointProperties.forCheckpoint( CheckpointRetentionPolicy.RETAIN_ON_FAILURE), new TestCompletedCheckpointStorageLocation(), null); List<CompletedCheckpoint> checkpoints1 = new ArrayList<>(); checkpoints1.add(checkpoint1); checkpoints1.add(checkpoint2); checkpoints1.add(checkpoint1); List<CompletedCheckpoint> checkpoints2 = new ArrayList<>(); checkpoints2.add(checkpoint2); checkpoints2.add(checkpoint1); checkpoints2.add(checkpoint2); assertThat(CompletedCheckpoint.checkpointsMatch(checkpoints1, checkpoints2)).isFalse(); }
public static Expression convert(Predicate[] predicates) { Expression expression = Expressions.alwaysTrue(); for (Predicate predicate : predicates) { Expression converted = convert(predicate); Preconditions.checkArgument( converted != null, "Cannot convert Spark predicate to Iceberg expression: %s", predicate); expression = Expressions.and(expression, converted); } return expression; }
@Test public void testEqualToNull() { String col = "col"; NamedReference namedReference = FieldReference.apply(col); LiteralValue value = new LiteralValue(null, DataTypes.IntegerType); org.apache.spark.sql.connector.expressions.Expression[] attrAndValue = new org.apache.spark.sql.connector.expressions.Expression[] {namedReference, value}; org.apache.spark.sql.connector.expressions.Expression[] valueAndAttr = new org.apache.spark.sql.connector.expressions.Expression[] {value, namedReference}; Predicate eq1 = new Predicate("=", attrAndValue); assertThatThrownBy(() -> SparkV2Filters.convert(eq1)) .isInstanceOf(NullPointerException.class) .hasMessageContaining("Expression is always false"); Predicate eq2 = new Predicate("=", valueAndAttr); assertThatThrownBy(() -> SparkV2Filters.convert(eq2)) .isInstanceOf(NullPointerException.class) .hasMessageContaining("Expression is always false"); Predicate eqNullSafe1 = new Predicate("<=>", attrAndValue); Expression expectedEqNullSafe = Expressions.isNull(col); Expression actualEqNullSafe1 = SparkV2Filters.convert(eqNullSafe1); assertThat(actualEqNullSafe1.toString()).isEqualTo(expectedEqNullSafe.toString()); Predicate eqNullSafe2 = new Predicate("<=>", valueAndAttr); Expression actualEqNullSafe2 = SparkV2Filters.convert(eqNullSafe2); assertThat(actualEqNullSafe2.toString()).isEqualTo(expectedEqNullSafe.toString()); }
@Override protected Class<? extends HttpApiV2ProxyRequest> getRequestClass() { return HttpApiV2ProxyRequest.class; }
@Test void reflection_getRequestClass_returnsCorrectType() { assertSame(HttpApiV2ProxyRequest.class, reader.getRequestClass()); }
public CloseableHttpClient build(String name) { final CloseableHttpClient client = buildWithDefaultRequestConfiguration(name).getClient(); // If the environment is present, we tie the client with the server lifecycle if (environment != null) { environment.lifecycle().manage(new Managed() { @Override public void stop() throws Exception { client.close(); } }); } return client; }
@Test void buildWithAnotherBuilder() { CustomBuilder builder = new CustomBuilder(new MetricRegistry(), anotherApacheBuilder); builder.build("test"); assertThat(anotherApacheBuilder).extracting("requestExec") .isInstanceOf(CustomRequestExecutor.class); }
@Override public Mono<GetExpiringProfileKeyCredentialResponse> getExpiringProfileKeyCredential( final GetExpiringProfileKeyCredentialAnonymousRequest request) { final ServiceIdentifier targetIdentifier = ServiceIdentifierUtil.fromGrpcServiceIdentifier(request.getRequest().getAccountIdentifier()); if (targetIdentifier.identityType() != IdentityType.ACI) { throw Status.INVALID_ARGUMENT.withDescription("Expected ACI service identifier").asRuntimeException(); } if (request.getRequest().getCredentialType() != CredentialType.CREDENTIAL_TYPE_EXPIRING_PROFILE_KEY) { throw Status.INVALID_ARGUMENT.withDescription("Expected expiring profile key credential type").asRuntimeException(); } return getTargetAccountAndValidateUnidentifiedAccess(targetIdentifier, request.getUnidentifiedAccessKey().toByteArray()) .flatMap(account -> ProfileGrpcHelper.getExpiringProfileKeyCredentialResponse(account.getUuid(), request.getRequest().getVersion(), request.getRequest().getCredentialRequest().toByteArray(), profilesManager, zkProfileOperations)); }
@Test void getExpiringProfileKeyCredential() throws InvalidInputException, VerificationFailedException { final byte[] unidentifiedAccessKey = TestRandomUtil.nextBytes(UnidentifiedAccessUtil.UNIDENTIFIED_ACCESS_KEY_LENGTH); final UUID targetUuid = UUID.randomUUID(); final ClientZkProfileOperations clientZkProfile = new ClientZkProfileOperations(SERVER_SECRET_PARAMS.getPublicParams()); final byte[] profileKeyBytes = TestRandomUtil.nextBytes(32); final ProfileKey profileKey = new ProfileKey(profileKeyBytes); final ProfileKeyCommitment profileKeyCommitment = profileKey.getCommitment(new ServiceId.Aci(targetUuid)); final ProfileKeyCredentialRequestContext profileKeyCredentialRequestContext = clientZkProfile.createProfileKeyCredentialRequestContext(new ServiceId.Aci(targetUuid), profileKey); final VersionedProfile profile = mock(VersionedProfile.class); when(profile.commitment()).thenReturn(profileKeyCommitment.serialize()); when(account.getUuid()).thenReturn(targetUuid); when(account.getUnidentifiedAccessKey()).thenReturn(Optional.of(unidentifiedAccessKey)); when(accountsManager.getByServiceIdentifierAsync(new AciServiceIdentifier(targetUuid))).thenReturn(CompletableFuture.completedFuture(Optional.of(account))); when(profilesManager.getAsync(targetUuid, "someVersion")).thenReturn(CompletableFuture.completedFuture(Optional.of(profile))); final ProfileKeyCredentialRequest credentialRequest = profileKeyCredentialRequestContext.getRequest(); final Instant expiration = Instant.now().plus(org.whispersystems.textsecuregcm.util.ProfileHelper.EXPIRING_PROFILE_KEY_CREDENTIAL_EXPIRATION) .truncatedTo(ChronoUnit.DAYS); final GetExpiringProfileKeyCredentialAnonymousRequest request = GetExpiringProfileKeyCredentialAnonymousRequest.newBuilder() .setRequest(GetExpiringProfileKeyCredentialRequest.newBuilder() .setAccountIdentifier(ServiceIdentifier.newBuilder() .setIdentityType(IdentityType.IDENTITY_TYPE_ACI) .setUuid(ByteString.copyFrom(UUIDUtil.toBytes(targetUuid))) .build()) .setCredentialRequest(ByteString.copyFrom(credentialRequest.serialize())) .setCredentialType(CredentialType.CREDENTIAL_TYPE_EXPIRING_PROFILE_KEY) .setVersion("someVersion") .build()) .setUnidentifiedAccessKey(ByteString.copyFrom(unidentifiedAccessKey)) .build(); final GetExpiringProfileKeyCredentialResponse response = unauthenticatedServiceStub().getExpiringProfileKeyCredential(request); assertThatNoException().isThrownBy(() -> clientZkProfile.receiveExpiringProfileKeyCredential(profileKeyCredentialRequestContext, new ExpiringProfileKeyCredentialResponse(response.getProfileKeyCredential().toByteArray()))); }
@VisibleForTesting public static RemoteIterator<S3AFileStatus> toProvidedFileStatusIterator( S3AFileStatus[] fileStatuses) { return filteringRemoteIterator( remoteIteratorFromArray(fileStatuses), Listing.ACCEPT_ALL_BUT_S3N::accept); }
@Test public void testProvidedFileStatusIteratorEnd() throws Exception { S3AFileStatus s3aStatus = new S3AFileStatus( 100, 0, new Path("s3a://blah/blah"), 8192, null, null, null); S3AFileStatus[] statuses = { s3aStatus }; RemoteIterator<S3AFileStatus> it = Listing.toProvidedFileStatusIterator( statuses); Assert.assertTrue("hasNext() should return true first time", it.hasNext()); Assert.assertEquals("first element from iterator", s3aStatus, it.next()); Assert.assertFalse("hasNext() should now be false", it.hasNext()); intercept(NoSuchElementException.class, it::next); }
public static RepositoryMetadataStore getInstance() { return repositoryMetadataStore; }
@Test public void shouldReturnNullForMetadataIfPluginIdIsNotProvided() { assertNull(RepositoryMetadataStore.getInstance().getMetadata("")); }
@Override @Deprecated public <KR, VR> KStream<KR, VR> transform(final org.apache.kafka.streams.kstream.TransformerSupplier<? super K, ? super V, KeyValue<KR, VR>> transformerSupplier, final String... stateStoreNames) { Objects.requireNonNull(transformerSupplier, "transformerSupplier can't be null"); final String name = builder.newProcessorName(TRANSFORM_NAME); return flatTransform(new TransformerSupplierAdapter<>(transformerSupplier), Named.as(name), stateStoreNames); }
@Test @SuppressWarnings("deprecation") public void shouldNotAllowNullStoreNamesOnTransformWithNamed() { final NullPointerException exception = assertThrows( NullPointerException.class, () -> testStream.transform(transformerSupplier, Named.as("transform"), (String[]) null)); assertThat(exception.getMessage(), equalTo("stateStoreNames can't be a null array")); }
public void unregisterStream(String id) { removeGrantsForTarget(grnRegistry.newGRN(GRNTypes.STREAM, id)); }
@Test void unregisterStream() { entityOwnershipService.unregisterStream("123"); assertGrantRemoval(GRNTypes.STREAM, "123"); }
public static String substVars(String val, PropertyContainer pc1) throws ScanException { return substVars(val, pc1, null); }
@Test public void defaultValueReferencingAVariable() throws ScanException { context.putProperty("v1", "k1"); String result = OptionHelper.substVars("${undef:-${v1}}", context); assertEquals("k1", result); }
@Override public void debug(String msg) { logger.debug(msg); }
@Test void testDebugWithException() { Exception exception = new Exception(); jobRunrDashboardLogger.debug("Debug", exception); verify(slfLogger).debug("Debug", exception); }
public AccessPrivilege getAccessPrivilege(InetAddress addr) { return getAccessPrivilege(addr.getHostAddress(), addr.getCanonicalHostName()); }
@Test public void testCidrLongRW() { NfsExports matcher = new NfsExports(CacheSize, ExpirationPeriod, "192.168.0.0/255.255.252.0 rw"); Assert.assertEquals(AccessPrivilege.READ_WRITE, matcher.getAccessPrivilege(address1, hostname1)); Assert.assertEquals(AccessPrivilege.NONE, matcher.getAccessPrivilege(address2, hostname1)); }
public String queryParam(String queryParamName) { return optionalQueryParam(queryParamName).orElse(null); }
@Test void testRequestUrlQueryParam() { RequestUrl requestUrl = new MatchUrl("/api/jobs/enqueued?present=2").toRequestUrl("/api/jobs/:state"); assertThat(requestUrl.queryParam("present")).isEqualTo("2"); }
@Override public AwsProxyResponse handle(Throwable ex) { log.error("Called exception handler for:", ex); // adding a print stack trace in case we have no appender or we are running inside SAM local, where need the // output to go to the stderr. ex.printStackTrace(); if (ex instanceof InvalidRequestEventException || ex instanceof InternalServerErrorException) { return new AwsProxyResponse(500, HEADERS, getErrorJson(INTERNAL_SERVER_ERROR)); } else { return new AwsProxyResponse(502, HEADERS, getErrorJson(GATEWAY_TIMEOUT_ERROR)); } }
@Test void streamHandle_InvalidResponseObjectException_502State() throws IOException { ByteArrayOutputStream respStream = new ByteArrayOutputStream(); exceptionHandler.handle(new InvalidResponseObjectException(INVALID_RESPONSE_MESSAGE, null), respStream); assertNotNull(respStream); assertTrue(respStream.size() > 0); AwsProxyResponse resp = objectMapper.readValue(new ByteArrayInputStream(respStream.toByteArray()), AwsProxyResponse.class); assertNotNull(resp); assertEquals(502, resp.getStatusCode()); }
public Optional<ResT> waitForComplete(long timeoutMs) throws IOException { if (mCompleted || mCanceled) { return Optional.empty(); } ResT prevResponse; ResT response = null; do { // wait until inbound stream is closed from server. prevResponse = response; response = receive(timeoutMs); } while (response != null); return Optional.ofNullable(prevResponse); }
@Test public void waitForComplete() throws Exception { WriteResponse[] responses = Stream.generate(() -> WriteResponse.newBuilder().build()) .limit(BUFFER_SIZE * 2).toArray(WriteResponse[]::new); EXECUTOR.submit(() -> { for (WriteResponse response : responses) { mResponseObserver.onNext(response); } try { Thread.sleep(SHORT_TIMEOUT); } catch (InterruptedException e) { Thread.currentThread().interrupt(); } mResponseObserver.onCompleted(); }); WriteResponse actualResponse = mStream.receive(TIMEOUT); assertEquals(responses[0], actualResponse); mStream.waitForComplete(TIMEOUT); actualResponse = mStream.receive(TIMEOUT); assertEquals(null, actualResponse); }
public String getValueForDisplay(){ return getValue(); }
@Test public void shouldReturnValueForDisplay() { ParamConfig paramConfig = new ParamConfig("foo", "bar"); assertThat(paramConfig.getValueForDisplay(), is("bar")); }
@Override public <T> T get(String key, Class<T> type) { ClusterConfig config = findClusterConfig(key); if (config == null) { LOG.debug("Couldn't find cluster config of type {}", key); return null; } T result = extractPayload(config.payload(), type); if (result == null) { LOG.error("Couldn't extract payload from cluster config (type: {})", key); } return result; }
@Test public void getWithKeyReturnsExistingConfig() throws Exception { DBObject dbObject = new BasicDBObjectBuilder() .add("type", "foo") .add("payload", Collections.singletonMap("text", "TEST")) .add("last_updated", TIME.toString()) .add("last_updated_by", "ID") .get(); @SuppressWarnings("deprecation") final DBCollection collection = mongoConnection.getDatabase().getCollection(COLLECTION_NAME); collection.save(dbObject); assertThat(collection.count()).isEqualTo(1L); CustomConfig customConfig = clusterConfigService.get("foo", CustomConfig.class); assertThat(customConfig).isInstanceOf(CustomConfig.class); assertThat(customConfig.text).isEqualTo("TEST"); }
@Override public BasicTypeDefine reconvert(Column column) { try { return super.reconvert(column); } catch (SeaTunnelRuntimeException e) { throw CommonError.convertToConnectorTypeError( DatabaseIdentifier.KINGBASE, column.getDataType().getSqlType().name(), column.getName()); } }
@Test public void testReconvertString() { Column column = PhysicalColumn.builder() .name("test") .dataType(BasicType.STRING_TYPE) .columnLength(null) .build(); BasicTypeDefine typeDefine = KingbaseTypeConverter.INSTANCE.reconvert(column); Assertions.assertEquals(column.getName(), typeDefine.getName()); Assertions.assertEquals(KingbaseTypeConverter.PG_TEXT, typeDefine.getColumnType()); Assertions.assertEquals(KingbaseTypeConverter.PG_TEXT, typeDefine.getDataType()); column = PhysicalColumn.builder() .name("test") .dataType(BasicType.STRING_TYPE) .columnLength(1L) .build(); typeDefine = KingbaseTypeConverter.INSTANCE.reconvert(column); Assertions.assertEquals(column.getName(), typeDefine.getName()); Assertions.assertEquals( String.format("%s(%s)", KingbaseTypeConverter.PG_VARCHAR, column.getColumnLength()), typeDefine.getColumnType()); Assertions.assertEquals(KingbaseTypeConverter.PG_VARCHAR, typeDefine.getDataType()); column = PhysicalColumn.builder() .name("test") .dataType(BasicType.STRING_TYPE) .columnLength(10485761L) .build(); typeDefine = KingbaseTypeConverter.INSTANCE.reconvert(column); Assertions.assertEquals(column.getName(), typeDefine.getName()); Assertions.assertEquals(KingbaseTypeConverter.PG_TEXT, typeDefine.getColumnType()); Assertions.assertEquals(KingbaseTypeConverter.PG_TEXT, typeDefine.getDataType()); }
@Override public Set<String> getClusterList(String topic) throws RemotingConnectException, RemotingSendRequestException, RemotingTimeoutException, MQClientException, InterruptedException { return this.defaultMQAdminExtImpl.getClusterList(topic); }
@Test public void testGetClusterList() throws InterruptedException, RemotingTimeoutException, MQClientException, RemotingSendRequestException, RemotingConnectException { Set<String> clusterlist = defaultMQAdminExt.getClusterList("UnitTest"); assertThat(clusterlist.contains("default-cluster-one")).isTrue(); assertThat(clusterlist.contains("default-cluster-two")).isTrue(); }
public static void register(Class<? extends Event> eventClass, Subscriber subscriber) { CopyOnWriteArraySet<Subscriber> set = SUBSCRIBER_MAP.get(eventClass); if (set == null) { set = new CopyOnWriteArraySet<Subscriber>(); CopyOnWriteArraySet<Subscriber> old = SUBSCRIBER_MAP.putIfAbsent(eventClass, set); if (old != null) { set = old; } } set.add(subscriber); if (LOGGER.isDebugEnabled()) { LOGGER.debug("Register subscriber: {} of event: {}.", subscriber, eventClass); } }
@Test public void register() throws Exception { Subscriber subscriber = new TestSubscriber(); try { Assert.assertEquals(EventBus.isEnable(TestEvent.class), false); EventBus.register(TestEvent.class, subscriber); Assert.assertEquals(EventBus.isEnable(TestEvent.class), true); } finally { EventBus.unRegister(TestEvent.class, subscriber); } Assert.assertEquals(EventBus.isEnable(TestEvent.class), false); }
public String asPairsWithComment(String comment) { return new PropertiesWriter(pairs).writeString(comment); }
@Test public void pairsWithComment() { assertThat(createTestKeyValues().asPairsWithComment("this is a comment"), is("# this is a comment\n" + "first=1\n" + "second=2\n" + "third=3\n" + "FOURTH=4\n")); }
@Override public void run() { try { interceptorChain.doInterceptor(task); } catch (Exception e) { Loggers.SRV_LOG.info("Interceptor health check task {} failed", task.getTaskId(), e); } }
@Test void testRunHealthyInstanceWithHeartBeat() { injectInstance(true, System.currentTimeMillis()); when(globalConfig.isExpireInstance()).thenReturn(true); taskWrapper.run(); assertFalse(client.getAllInstancePublishInfo().isEmpty()); assertTrue(client.getInstancePublishInfo(Service.newService(NAMESPACE, GROUP_NAME, SERVICE_NAME)).isHealthy()); }
public static <T> RetryTransformer<T> of(Retry retry) { return new RetryTransformer<>(retry); }
@Test public void doNotRetryFromPredicateUsingMaybe() { RetryConfig config = RetryConfig.custom() .retryOnException(t -> t instanceof IOException) .waitDuration(Duration.ofMillis(50)) .maxAttempts(3).build(); Retry retry = Retry.of("testName", config); given(helloWorldService.returnHelloWorld()) .willThrow(new HelloWorldException()); Maybe.fromCallable(helloWorldService::returnHelloWorld) .compose(RetryTransformer.of(retry)) .test() .assertError(HelloWorldException.class) .assertNotComplete() .assertSubscribed(); then(helloWorldService).should().returnHelloWorld(); Retry.Metrics metrics = retry.getMetrics(); assertThat(metrics.getNumberOfFailedCallsWithoutRetryAttempt()).isEqualTo(1); assertThat(metrics.getNumberOfFailedCallsWithRetryAttempt()).isZero(); }
public void rollbackVersions(List<Job> jobsThatFailed) { Set<UUID> jobIdsThatFailed = jobsThatFailed.stream().map(Job::getId).collect(toSet()); this.jobVersioners.stream() .filter(jobVersioner -> !jobIdsThatFailed.contains(jobVersioner.getJob().getId())) .forEach(JobVersioner::commitVersion); }
@Test void testJobListVersionerInTryWithResourcesOnRollbackOfSomeJobsVersionIsDecreasedForThoseJobs() { // GIVEN Job job1 = aScheduledJob().withVersion(5).build(); Job job2 = aScheduledJob().withVersion(5).build(); // WHEN try (JobListVersioner jobListVersioner = new JobListVersioner(asList(job1, job2))) { jobListVersioner.rollbackVersions(asList(job2)); } // THEN assertThat(job1).hasVersion(6); assertThat(job2).hasVersion(5); }
public static int doubleCompare(double a, double b) { // these three ifs can only be true if neither value is NaN if (a < b) { return -1; } if (a > b) { return 1; } // this check ensure doubleCompare(+0, -0) will return 0 // if we just did doubleToLongBits comparison, then they // would not compare as equal if (a == b) { return 0; } // this ensures that doubleCompare(NaN, NaN) will return 0 // doubleToLongBits converts all NaNs to the same representation long aBits = doubleToLongBits(a); long bBits = doubleToLongBits(b); return Long.compare(aBits, bBits); }
@Test public void testDoubleCompare() { assertEquals(doubleCompare(0, Double.parseDouble("-0")), 0); assertEquals(doubleCompare(Double.NaN, Double.NaN), 0); //0x7ff8123412341234L is a different representation of NaN assertEquals(doubleCompare(Double.NaN, longBitsToDouble(0x7ff8123412341234L)), 0); }
@Override @Private public boolean isApplicationActive(ApplicationId id) throws YarnException { ApplicationReport report = null; try { report = client.getApplicationReport(id); } catch (ApplicationNotFoundException e) { // the app does not exist return false; } catch (IOException e) { throw new YarnException(e); } if (report == null) { // the app does not exist return false; } return ACTIVE_STATES.contains(report.getYarnApplicationState()); }
@Test void testNonExistentApp() throws Exception { YarnClient client = createCheckerWithMockedClient(); ApplicationId id = ApplicationId.newInstance(1, 1); // test for null doReturn(null).when(client).getApplicationReport(id); assertFalse(checker.isApplicationActive(id)); // test for ApplicationNotFoundException doThrow(new ApplicationNotFoundException("Throw!")).when(client) .getApplicationReport(id); assertFalse(checker.isApplicationActive(id)); }
public List<InetAddress> addresses(String inetHost, ResolvedAddressTypes resolvedAddressTypes) { String normalized = normalize(inetHost); ensureHostsFileEntriesAreFresh(); switch (resolvedAddressTypes) { case IPV4_ONLY: return inet4Entries.get(normalized); case IPV6_ONLY: return inet6Entries.get(normalized); case IPV4_PREFERRED: List<InetAddress> allInet4Addresses = inet4Entries.get(normalized); return allInet4Addresses != null ? allAddresses(allInet4Addresses, inet6Entries.get(normalized)) : inet6Entries.get(normalized); case IPV6_PREFERRED: List<InetAddress> allInet6Addresses = inet6Entries.get(normalized); return allInet6Addresses != null ? allAddresses(allInet6Addresses, inet4Entries.get(normalized)) : inet4Entries.get(normalized); default: throw new IllegalArgumentException("Unknown ResolvedAddressTypes " + resolvedAddressTypes); } }
@Test public void shouldntFindWhenAddressesTypeDoesntMatch() { HostsFileEntriesProvider.Parser parser = givenHostsParserWith( LOCALHOST_V4_ADDRESSES, Collections.<String, List<InetAddress>>emptyMap() ); DefaultHostsFileEntriesResolver resolver = new DefaultHostsFileEntriesResolver(parser, ENTRIES_TTL); List<InetAddress> addresses = resolver.addresses("localhost", ResolvedAddressTypes.IPV6_ONLY); assertNull(addresses, "Should pick an IPv6 address"); }
@PUT @Path("/{pluginName}/config/validate") @Operation(summary = "Validate the provided configuration against the configuration definition for the specified pluginName") public ConfigInfos validateConfigs( final @PathParam("pluginName") String pluginName, final Map<String, String> connectorConfig ) throws Throwable { String includedConnType = connectorConfig.get(ConnectorConfig.CONNECTOR_CLASS_CONFIG); if (includedConnType != null && !normalizedPluginName(includedConnType).endsWith(normalizedPluginName(pluginName))) { throw new BadRequestException( "Included connector type " + includedConnType + " does not match request type " + pluginName ); } // the validated configs don't need to be logged FutureCallback<ConfigInfos> validationCallback = new FutureCallback<>(); herder.validateConnectorConfig(connectorConfig, validationCallback, false); try { return validationCallback.get(requestTimeout.timeoutMs(), TimeUnit.MILLISECONDS); } catch (StagedTimeoutException e) { Stage stage = e.stage(); String message; if (stage.completed() != null) { message = "Request timed out. The last operation the worker completed was " + stage.description() + ", which began at " + Instant.ofEpochMilli(stage.started()) + " and completed at " + Instant.ofEpochMilli(stage.completed()); } else { message = "Request timed out. The worker is currently " + stage.description() + ", which began at " + Instant.ofEpochMilli(stage.started()); } // This timeout is for the operation itself. None of the timeout error codes are relevant, so internal server // error is the best option throw new ConnectRestException(Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), message); } catch (TimeoutException e) { // This timeout is for the operation itself. None of the timeout error codes are relevant, so internal server // error is the best option throw new ConnectRestException(Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), "Request timed out"); } catch (InterruptedException e) { throw new ConnectRestException(Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), "Request interrupted"); } }
@Test public void testValidateConfigWithSimpleName() throws Throwable { @SuppressWarnings("unchecked") ArgumentCaptor<Callback<ConfigInfos>> configInfosCallback = ArgumentCaptor.forClass(Callback.class); doAnswer(invocation -> { ConfigDef connectorConfigDef = ConnectorConfig.configDef(); List<ConfigValue> connectorConfigValues = connectorConfigDef.validate(PROPS); Connector connector = new ConnectorPluginsResourceTestConnector(); Config config = connector.validate(PROPS); ConfigDef configDef = connector.config(); Map<String, ConfigDef.ConfigKey> configKeys = configDef.configKeys(); List<ConfigValue> configValues = config.configValues(); Map<String, ConfigDef.ConfigKey> resultConfigKeys = new HashMap<>(configKeys); resultConfigKeys.putAll(connectorConfigDef.configKeys()); configValues.addAll(connectorConfigValues); ConfigInfos configInfos = AbstractHerder.generateResult( ConnectorPluginsResourceTestConnector.class.getName(), resultConfigKeys, configValues, Collections.singletonList("Test") ); configInfosCallback.getValue().onCompletion(null, configInfos); return null; }).when(herder).validateConnectorConfig(eq(PROPS), configInfosCallback.capture(), anyBoolean()); // make a request to connector-plugins resource using just the simple class name. ConfigInfos configInfos = connectorPluginsResource.validateConfigs( ConnectorPluginsResourceTestConnector.class.getSimpleName(), PROPS ); assertEquals(CONFIG_INFOS.name(), configInfos.name()); assertEquals(0, configInfos.errorCount()); assertEquals(CONFIG_INFOS.groups(), configInfos.groups()); assertEquals(new HashSet<>(CONFIG_INFOS.values()), new HashSet<>(configInfos.values())); verify(herder).validateConnectorConfig(eq(PROPS), any(), anyBoolean()); }
public HollowOrdinalIterator findKeysWithPrefix(String prefix) { TST current; HollowOrdinalIterator it; do { current = prefixIndexVolatile; it = current.findKeysWithPrefix(prefix); } while (current != this.prefixIndexVolatile); return it; }
@Test public void testAutoExpandFieldPath() throws Exception { for (Movie movie : getReferenceList()) { objectMapper.add(movie); } StateEngineRoundTripper.roundTripSnapshot(writeStateEngine, readStateEngine); HollowPrefixIndex index = new HollowPrefixIndex(readStateEngine, "MovieWithReferenceName", "name.n");// no.value appended, it should work Set<Integer> ordinals = toSet(index.findKeysWithPrefix("the")); Assert.assertTrue(ordinals.size() == 3); }
public void detectAndResolve() { createTopologyGraph(); }
@TestTemplate void testDeadlockCausedByExchange() { // P1 = PIPELINED + priority 1 // // 0 -(P0)-> exchange --(P0)-> 1 // \-(P1)-/ TestingBatchExecNode[] nodes = new TestingBatchExecNode[2]; for (int i = 0; i < nodes.length; i++) { nodes[i] = new TestingBatchExecNode("TestingBatchExecNode" + i); } Configuration configuration = new Configuration(); BatchShuffleMode batchShuffleMode = batchShuffleModeAndStreamExchangeMode.f0; StreamExchangeMode streamExchangeMode = batchShuffleModeAndStreamExchangeMode.f1; configuration.set(ExecutionOptions.BATCH_SHUFFLE_MODE, batchShuffleMode); BatchExecExchange exchange = new BatchExecExchange( configuration, InputProperty.builder() .requiredDistribution(InputProperty.ANY_DISTRIBUTION) .build(), (RowType) nodes[0].getOutputType(), "Exchange"); exchange.setRequiredExchangeMode(streamExchangeMode); ExecEdge execEdge = ExecEdge.builder().source(nodes[0]).target(exchange).build(); exchange.setInputEdges(Collections.singletonList(execEdge)); nodes[1].addInput(exchange, InputProperty.builder().priority(0).build()); nodes[1].addInput(exchange, InputProperty.builder().priority(1).build()); InputPriorityConflictResolver resolver = new InputPriorityConflictResolver( Collections.singletonList(nodes[1]), InputProperty.DamBehavior.END_INPUT, streamExchangeMode, configuration); resolver.detectAndResolve(); ExecNode<?> input0 = nodes[1].getInputNodes().get(0); ExecNode<?> input1 = nodes[1].getInputNodes().get(1); assertThat(input1).isNotSameAs(input0); Consumer<ExecNode<?>> checkExchange = execNode -> { assertThat(execNode).isInstanceOf(BatchExecExchange.class); BatchExecExchange e = (BatchExecExchange) execNode; assertThat(e.getRequiredExchangeMode()) .isEqualTo(Optional.of(streamExchangeMode)); assertThat(e.getInputEdges().get(0).getSource()).isEqualTo(nodes[0]); }; checkExchange.accept(input0); checkExchange.accept(input1); }
@Override public void createApiConfig(K8sApiConfig config) { checkNotNull(config, ERR_NULL_CONFIG); configStore.createApiConfig(config); log.info(String.format(MSG_CONFIG, endpoint(config), MSG_CREATED)); }
@Test(expected = IllegalArgumentException.class) public void testCreateDuplicateConfig() { target.createApiConfig(apiConfig1); target.createApiConfig(apiConfig1); }
public static int sum(byte[] x) { int sum = 0; for (int n : x) { sum += n; } return sum; }
@Test public void testSum_doubleArr() { System.out.println("sum"); double[] data = {1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0}; assertEquals(45, MathEx.sum(data), 1E-6); }
Configuration getEffectiveConfiguration(String[] args) throws CliArgsException { final CommandLine commandLine = cli.parseCommandLineOptions(args, true); final Configuration effectiveConfiguration = new Configuration(baseConfiguration); effectiveConfiguration.addAll(cli.toConfiguration(commandLine)); effectiveConfiguration.set(DeploymentOptions.TARGET, KubernetesSessionClusterExecutor.NAME); return effectiveConfiguration; }
@Test void testHeapMemoryPropertyWithOldConfigKey() throws Exception { Configuration configuration = new Configuration(); configuration.set(DeploymentOptions.TARGET, KubernetesSessionClusterExecutor.NAME); configuration.set(JobManagerOptions.JOB_MANAGER_HEAP_MEMORY_MB, 2048); configuration.set(TaskManagerOptions.TASK_MANAGER_HEAP_MEMORY_MB, 4096); final KubernetesSessionCli cli = new KubernetesSessionCli(configuration, confDirPath.toAbsolutePath().toString()); final Configuration executorConfig = cli.getEffectiveConfiguration(new String[] {}); final ClusterClientFactory<String> clientFactory = getClusterClientFactory(executorConfig); final ClusterSpecification clusterSpecification = clientFactory.getClusterSpecification(executorConfig); assertThat(clusterSpecification.getMasterMemoryMB()).isEqualTo(2048); assertThat(clusterSpecification.getTaskManagerMemoryMB()).isEqualTo(4096); }
public double d(int[] x, int[] y) { if (x.length != y.length) { throw new IllegalArgumentException(String.format("Arrays have different length: x[%d], y[%d]", x.length, y.length)); } double dist = 0.0; if (weight == null) { for (int i = 0; i < x.length; i++) { double d = x[i] - y[i]; dist += d * d; } } else { if (x.length != weight.length) throw new IllegalArgumentException(String.format("Input vectors and weight vector have different length: %d, %d", x.length, weight.length)); for (int i = 0; i < x.length; i++) { double d = x[i] - y[i]; dist += weight[i] * d * d; } } return Math.sqrt(dist); }
@Test public void testDistance() { System.out.println("distance"); double[] x = {1.0, 2.0, 3.0, 4.0}; double[] y = {4.0, 3.0, 2.0, 1.0}; assertEquals(4.472136, new EuclideanDistance().d(x, y), 1E-6); double[] w = {-2.1968219, -0.9559913, -0.0431738, 1.0567679, 0.3853515}; double[] v = {-1.7781325, -0.6659839, 0.9526148, -0.9460919, -0.3925300}; assertEquals(2.422302, new EuclideanDistance().d(w, v), 1E-6); SparseArray s = new SparseArray(); s.append(1, 1.0); s.append(2, 2.0); s.append(3, 3.0); s.append(4, 4.0); SparseArray t = new SparseArray(); t.append(1, 4.0); t.append(2, 3.0); t.append(3, 2.0); t.append(4, 1.0); assertEquals(4.472136, new SparseEuclideanDistance().d(s, t), 1E-6); s = new SparseArray(); s.append(2, 2.0); s.append(3, 3.0); s.append(4, 4.0); t = new SparseArray(); t.append(1, 4.0); t.append(2, 3.0); t.append(3, 2.0); assertEquals(5.830951, new SparseEuclideanDistance().d(s, t), 1E-6); s = new SparseArray(); s.append(1, 1.0); t = new SparseArray(); t.append(3, 2.0); assertEquals(2.236067, new SparseEuclideanDistance().d(s, t), 1E-6); }
public static boolean hasIllegalNodeAddress(List<RemoteInstance> remoteInstances) { if (CollectionUtils.isEmpty(remoteInstances)) { return false; } Set<String> remoteAddressSet = remoteInstances.stream().map(remoteInstance -> remoteInstance.getAddress().getHost()).collect(Collectors.toSet()); return !Sets.intersection(ILLEGAL_NODE_ADDRESS_IN_CLUSTER_MODE, remoteAddressSet).isEmpty(); }
@Test public void hasIllegalNodeAddressFalse() { List<RemoteInstance> remoteInstances = new ArrayList<>(); remoteInstances.add(new RemoteInstance(new Address("123.23.4.2", 8899, true))); boolean flag = OAPNodeChecker.hasIllegalNodeAddress(remoteInstances); Assertions.assertFalse(flag); }
@SuppressWarnings("unchecked") public DATA_TYPE getData() { Object data = this.execution.taskInstance.getData(); if (data == null) { return null; } else if (dataClass.isInstance(data)) { return (DATA_TYPE) data; } throw new DataClassMismatchException(dataClass, data.getClass()); }
@Test public void test_data_class_type_not_equals() { DataClassMismatchException dataClassMismatchException = assertThrows( DataClassMismatchException.class, () -> { Instant now = Instant.now(); OneTimeTask<Integer> task = TestTasks.oneTime("OneTime", Integer.class, (instance, executionContext) -> {}); Execution execution = new Execution( now, task.instance("id1", new Integer(1))); // Data class is an integer new ScheduledExecution<>(String.class, execution) .getData(); // Instantiate with incorrect type }); assertThat( dataClassMismatchException.getMessage(), CoreMatchers.containsString("Task data mismatch")); }
@Override public boolean find(final Path file, final ListProgressListener listener) throws BackgroundException { if(file.isRoot()) { return true; } try { new SwiftAttributesFinderFeature(session).find(file, listener); return true; } catch(NotfoundException e) { return false; } }
@Test public void testFindKeyWithSameSuffix() throws Exception { final Path container = new Path("test.cyberduck.ch", EnumSet.of(Path.Type.directory, Path.Type.volume)); container.attributes().setRegion("IAD"); final String suffix = new AlphanumericRandomStringService().random(); final Path other = new Path(container, String.format("%s.%s", new AlphanumericRandomStringService().random(), suffix), EnumSet.of(Path.Type.file)); new SwiftTouchFeature(session, new SwiftRegionService(session)).touch(other, new TransferStatus()); final Path file = new Path(container, suffix, EnumSet.of(Path.Type.file)); final SwiftFindFeature feature = new SwiftFindFeature(session); assertFalse(feature.find(file)); assertFalse(feature.find(new Path(file).withType(EnumSet.of(Path.Type.directory)))); new SwiftTouchFeature(session, new SwiftRegionService(session)).touch(file, new TransferStatus()); assertTrue(feature.find(file)); assertFalse(feature.find(new Path(file).withType(EnumSet.of(Path.Type.directory)))); assertFalse(feature.find(new Path(String.format("%s-", file.getAbsolute()), EnumSet.of(Path.Type.file)))); assertFalse(feature.find(new Path(String.format("%s-", file.getAbsolute()), EnumSet.of(Path.Type.directory)))); assertFalse(feature.find(new Path(String.format("-%s", file.getAbsolute()), EnumSet.of(Path.Type.file)))); assertFalse(feature.find(new Path(String.format("-%s", file.getAbsolute()), EnumSet.of(Path.Type.directory)))); assertNotNull(new SwiftAttributesFinderFeature(session).find(file)); new SwiftDeleteFeature(session).delete(Arrays.asList(file, other), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
public static String validIdentifier(String value, int maxLen, String name) { Check.notEmpty(value, name); if (value.length() > maxLen) { throw new IllegalArgumentException( MessageFormat.format("[{0}] = [{1}] exceeds max len [{2}]", name, value, maxLen)); } if (!IDENTIFIER_PATTERN.matcher(value).find()) { throw new IllegalArgumentException( MessageFormat.format("[{0}] = [{1}] must be \"{2}\"", name, value, IDENTIFIER_PATTERN_STR)); } return value; }
@Test(expected = IllegalArgumentException.class) public void validIdentifierInvalid3() throws Exception { Check.validIdentifier("1", 1, ""); }
@Override public void initialize(String name, Map<String, String> properties) { String uri = properties.get(CatalogProperties.URI); Preconditions.checkArgument(null != uri, "JDBC connection URI is required"); try { // We'll ensure the expected JDBC driver implementation class is initialized through // reflection regardless of which classloader ends up using this JdbcSnowflakeClient, but // we'll only warn if the expected driver fails to load, since users may use repackaged or // custom JDBC drivers for Snowflake communication. Class.forName(JdbcSnowflakeClient.EXPECTED_JDBC_IMPL); } catch (ClassNotFoundException cnfe) { LOG.warn( "Failed to load expected JDBC SnowflakeDriver - if queries fail by failing" + " to find a suitable driver for jdbc:snowflake:// URIs, you must add the Snowflake " + " JDBC driver to your jars/packages", cnfe); } // The uniqueAppIdentifier should be less than 50 characters, so trimming the guid. String uniqueId = UUID.randomUUID().toString().replace("-", "").substring(0, UNIQUE_ID_LENGTH); String uniqueAppIdentifier = APP_IDENTIFIER + "_" + uniqueId; String userAgentSuffix = IcebergBuild.fullVersion() + " " + uniqueAppIdentifier; // Populate application identifier in jdbc client properties.put(JdbcCatalog.PROPERTY_PREFIX + JDBC_APPLICATION_PROPERTY, uniqueAppIdentifier); // Adds application identifier to the user agent header of the JDBC requests. properties.put(JdbcCatalog.PROPERTY_PREFIX + JDBC_USER_AGENT_SUFFIX_PROPERTY, userAgentSuffix); JdbcClientPool connectionPool = new JdbcClientPool(uri, properties); initialize(name, new JdbcSnowflakeClient(connectionPool), new FileIOFactory(), properties); }
@Test public void testInitializeNullFileIO() { assertThatExceptionOfType(IllegalArgumentException.class) .isThrownBy(() -> catalog.initialize(TEST_CATALOG_NAME, fakeClient, null, properties)) .withMessageContaining("fileIOFactory must be non-null"); }
public boolean execute( WorkflowSummary workflowSummary, Step step, StepRuntimeSummary runtimeSummary) { StepRuntime.Result result = getStepRuntime(runtimeSummary.getType()) .execute(workflowSummary, step, cloneSummary(runtimeSummary)); runtimeSummary.mergeRuntimeUpdate(result.getTimeline(), result.getArtifacts()); switch (result.getState()) { case CONTINUE: return true; case DONE: runtimeSummary.markFinishing(tracingManager); return result.shouldPersist(); case USER_ERROR: markTerminatedWithMetric( runtimeSummary, result.getState(), getUserErrorStatus(runtimeSummary)); return false; case PLATFORM_ERROR: markTerminatedWithMetric( runtimeSummary, result.getState(), getPlatformErrorStatus(runtimeSummary)); return false; case FATAL_ERROR: markTerminatedWithMetric( runtimeSummary, result.getState(), StepInstance.Status.FATALLY_FAILED); return false; case STOPPED: markTerminatedWithMetric(runtimeSummary, result.getState(), StepInstance.Status.STOPPED); return false; case TIMED_OUT: markTerminatedWithMetric(runtimeSummary, result.getState(), StepInstance.Status.TIMED_OUT); return false; default: throw new MaestroInternalError( "Entered an unexpected result state [%s] for step %s when executing", result.getState(), runtimeSummary.getIdentity()); } }
@Test public void testExecuteFailure() { StepInstance.StepRetry stepRetry = new StepInstance.StepRetry(); stepRetry.setErrorRetryLimit(1); stepRetry.setPlatformRetryLimit(1); stepRetry.incrementByStatus(StepInstance.Status.USER_FAILED); count.incrementAndGet(); StepRuntimeSummary summary = StepRuntimeSummary.builder().type(StepType.NOOP).stepRetry(stepRetry).build(); boolean ret = runtimeManager.execute(workflowSummary, null, summary); assertFalse(ret); assertEquals(StepInstance.Status.PLATFORM_FAILED, summary.getRuntimeState().getStatus()); assertNotNull(summary.getRuntimeState().getEndTime()); assertNotNull(summary.getRuntimeState().getModifyTime()); assertEquals(1, summary.getPendingRecords().size()); assertEquals( StepInstance.Status.NOT_CREATED, summary.getPendingRecords().get(0).getOldStatus()); assertEquals( StepInstance.Status.PLATFORM_FAILED, summary.getPendingRecords().get(0).getNewStatus()); assertTrue(summary.getArtifacts().isEmpty()); stepRetry.incrementByStatus(StepInstance.Status.PLATFORM_FAILED); ret = runtimeManager.execute(workflowSummary, null, summary); assertFalse(ret); assertEquals(StepInstance.Status.FATALLY_FAILED, summary.getRuntimeState().getStatus()); assertNotNull(summary.getRuntimeState().getEndTime()); assertNotNull(summary.getRuntimeState().getModifyTime()); assertEquals(2, summary.getPendingRecords().size()); assertEquals( StepInstance.Status.PLATFORM_FAILED, summary.getPendingRecords().get(1).getOldStatus()); assertEquals( StepInstance.Status.FATALLY_FAILED, summary.getPendingRecords().get(1).getNewStatus()); assertTrue(summary.getArtifacts().isEmpty()); }
@Override public int hashCode() { return msgId.hashCode(); }
@Test public void hashCodeTest() { MessageIdImpl msgId1 = new MessageIdImpl(0, 0, 0); MessageIdImpl msgId2 = new BatchMessageIdImpl(1, 1, 1, 1); TopicMessageIdImpl topicMsgId1 = new TopicMessageIdImpl("topic-partition-1", msgId1); TopicMessageIdImpl topic2MsgId1 = new TopicMessageIdImpl("topic2-partition-1", msgId1); TopicMessageIdImpl topicMsgId2 = new TopicMessageIdImpl("topic-partition-2", msgId2); assertEquals(topicMsgId1.hashCode(), topicMsgId1.hashCode()); assertEquals(topic2MsgId1.hashCode(), topic2MsgId1.hashCode()); assertEquals(topicMsgId1.hashCode(), msgId1.hashCode()); assertNotEquals(topicMsgId1.hashCode(), topicMsgId2.hashCode()); assertEquals(topicMsgId2.hashCode(), msgId2.hashCode()); }
public BackOffTimer(ScheduledExecutorService scheduler) { this.scheduler = scheduler; }
@Test public void testBackOffTimer() throws Exception { final CountDownLatch latch = new CountDownLatch(1); final AtomicInteger counter = new AtomicInteger(); final ScheduledExecutorService executor = Executors.newScheduledThreadPool(3); final BackOff backOff = BackOff.builder().delay(100).build(); final BackOffTimer timer = new BackOffTimer(executor); final AtomicLong first = new AtomicLong(); BackOffTimer.Task task = timer.schedule( backOff, context -> { assertEquals(counter.incrementAndGet(), context.getCurrentAttempts()); assertEquals(100, context.getCurrentDelay()); assertEquals(100L * counter.get(), context.getCurrentElapsedTime()); if (first.get() == 0) { first.set(context.getFirstAttemptTime()); } else { assertEquals(first.get(), context.getFirstAttemptTime()); } return counter.get() < 5; }); task.whenComplete( (context, throwable) -> { assertEquals(5, counter.get()); latch.countDown(); }); latch.await(5, TimeUnit.SECONDS); executor.shutdownNow(); }
@Override public boolean add(V value) { lock.lock(); try { checkComparator(); BinarySearchResult<V> res = binarySearch(value); int index = 0; if (res.getIndex() < 0) { index = -(res.getIndex() + 1); } else { index = res.getIndex() + 1; } get(commandExecutor.evalWriteNoRetryAsync(getRawName(), codec, RedisCommands.EVAL_VOID, "local len = redis.call('llen', KEYS[1]);" + "if tonumber(ARGV[1]) < len then " + "local pivot = redis.call('lindex', KEYS[1], ARGV[1]);" + "redis.call('linsert', KEYS[1], 'before', pivot, ARGV[2]);" + "return;" + "end;" + "redis.call('rpush', KEYS[1], ARGV[2]);", Arrays.asList(getRawName()), index, encode(value))); return true; } finally { lock.unlock(); } }
@Test public void testIteratorSequence() { RPriorityQueue<Integer> set = redisson.getPriorityQueue("set"); for (int i = 0; i < 1000; i++) { set.add(Integer.valueOf(i)); } Queue<Integer> setCopy = new PriorityQueue<Integer>(); for (int i = 0; i < 1000; i++) { setCopy.add(Integer.valueOf(i)); } checkIterator(set, setCopy); }
@GetMapping("/secretInfo") public ShenyuAdminResult info() { return ShenyuAdminResult.success(null, secretService.info()); }
@Test public void testQuerySecretInfo() throws Exception { final String querySecretInfoUri = "/platform/secretInfo"; this.mockMvc.perform(MockMvcRequestBuilders.request(HttpMethod.GET, querySecretInfoUri)) .andExpect(status().isOk()) .andExpect(jsonPath("$.code", is(CommonErrorCode.SUCCESSFUL))) .andExpect(jsonPath("$.data", is(secretService.info()))) .andReturn(); }
protected void populateSettings(CliParser cli) throws InvalidSettingException { final File propertiesFile = cli.getFileArgument(CliParser.ARGUMENT.PROP); if (propertiesFile != null) { try { settings.mergeProperties(propertiesFile); } catch (FileNotFoundException ex) { throw new InvalidSettingException("Unable to find properties file '" + propertiesFile.getPath() + "'", ex); } catch (IOException ex) { throw new InvalidSettingException("Error reading properties file '" + propertiesFile.getPath() + "'", ex); } } final String dataDirectory = cli.getStringArgument(CliParser.ARGUMENT.DATA_DIRECTORY); if (dataDirectory != null) { settings.setString(Settings.KEYS.DATA_DIRECTORY, dataDirectory); } else if (System.getProperty("basedir") != null) { final File dataDir = new File(System.getProperty("basedir"), "data"); settings.setString(Settings.KEYS.DATA_DIRECTORY, dataDir.getAbsolutePath()); } else { final File jarPath = new File(App.class .getProtectionDomain().getCodeSource().getLocation().getPath()); final File base = jarPath.getParentFile(); final String sub = settings.getString(Settings.KEYS.DATA_DIRECTORY); final File dataDir = new File(base, sub); settings.setString(Settings.KEYS.DATA_DIRECTORY, dataDir.getAbsolutePath()); } final Boolean autoUpdate = cli.hasOption(CliParser.ARGUMENT.DISABLE_AUTO_UPDATE) != null ? false : null; settings.setBooleanIfNotNull(Settings.KEYS.AUTO_UPDATE, autoUpdate); settings.setStringIfNotEmpty(Settings.KEYS.PROXY_SERVER, cli.getStringArgument(CliParser.ARGUMENT.PROXY_SERVER)); settings.setStringIfNotEmpty(Settings.KEYS.PROXY_PORT, cli.getStringArgument(CliParser.ARGUMENT.PROXY_PORT)); settings.setStringIfNotEmpty(Settings.KEYS.PROXY_USERNAME, cli.getStringArgument(CliParser.ARGUMENT.PROXY_USERNAME)); settings.setStringIfNotEmpty(Settings.KEYS.PROXY_PASSWORD, cli.getStringArgument(CliParser.ARGUMENT.PROXY_PASSWORD, Settings.KEYS.PROXY_PASSWORD)); settings.setStringIfNotEmpty(Settings.KEYS.PROXY_NON_PROXY_HOSTS, cli.getStringArgument(CliParser.ARGUMENT.NON_PROXY_HOSTS)); settings.setStringIfNotEmpty(Settings.KEYS.CONNECTION_TIMEOUT, cli.getStringArgument(CliParser.ARGUMENT.CONNECTION_TIMEOUT)); settings.setStringIfNotEmpty(Settings.KEYS.CONNECTION_READ_TIMEOUT, cli.getStringArgument(CliParser.ARGUMENT.CONNECTION_READ_TIMEOUT)); settings.setStringIfNotEmpty(Settings.KEYS.HINTS_FILE, cli.getStringArgument(CliParser.ARGUMENT.HINTS_FILE)); settings.setArrayIfNotEmpty(Settings.KEYS.SUPPRESSION_FILE, cli.getStringArguments(CliParser.ARGUMENT.SUPPRESSION_FILES)); //File Type Analyzer Settings settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_EXPERIMENTAL_ENABLED, cli.hasOption(CliParser.ARGUMENT.EXPERIMENTAL)); settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_RETIRED_ENABLED, cli.hasOption(CliParser.ARGUMENT.RETIRED)); settings.setStringIfNotNull(Settings.KEYS.ANALYZER_GOLANG_PATH, cli.getStringArgument(CliParser.ARGUMENT.PATH_TO_GO)); settings.setStringIfNotNull(Settings.KEYS.ANALYZER_YARN_PATH, cli.getStringArgument(CliParser.ARGUMENT.PATH_TO_YARN)); settings.setStringIfNotNull(Settings.KEYS.ANALYZER_PNPM_PATH, cli.getStringArgument(CliParser.ARGUMENT.PATH_TO_PNPM)); settings.setBooleanIfNotNull(Settings.KEYS.PRETTY_PRINT, cli.hasOption(CliParser.ARGUMENT.PRETTY_PRINT)); settings.setStringIfNotNull(Settings.KEYS.ANALYZER_RETIREJS_REPO_JS_URL, cli.getStringArgument(CliParser.ARGUMENT.RETIREJS_URL)); settings.setStringIfNotNull(Settings.KEYS.ANALYZER_RETIREJS_REPO_JS_USER, cli.getStringArgument(CliParser.ARGUMENT.RETIREJS_URL_USER)); settings.setStringIfNotNull(Settings.KEYS.ANALYZER_RETIREJS_REPO_JS_PASSWORD, cli.getStringArgument(CliParser.ARGUMENT.RETIREJS_URL_PASSWORD)); settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_RETIREJS_FORCEUPDATE, cli.hasOption(CliParser.ARGUMENT.RETIRE_JS_FORCEUPDATE)); settings.setStringIfNotNull(Settings.KEYS.ANALYZER_RETIREJS_FILTERS, cli.getStringArgument(CliParser.ARGUMENT.RETIREJS_FILTERS)); settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_RETIREJS_FILTER_NON_VULNERABLE, cli.hasOption(CliParser.ARGUMENT.RETIREJS_FILTER_NON_VULNERABLE)); settings.setBoolean(Settings.KEYS.ANALYZER_JAR_ENABLED, !cli.isDisabled(CliParser.ARGUMENT.DISABLE_JAR, Settings.KEYS.ANALYZER_JAR_ENABLED)); settings.setBoolean(Settings.KEYS.ANALYZER_MSBUILD_PROJECT_ENABLED, !cli.isDisabled(CliParser.ARGUMENT.DISABLE_MSBUILD, Settings.KEYS.ANALYZER_MSBUILD_PROJECT_ENABLED)); settings.setBoolean(Settings.KEYS.ANALYZER_ARCHIVE_ENABLED, !cli.isDisabled(CliParser.ARGUMENT.DISABLE_ARCHIVE, Settings.KEYS.ANALYZER_ARCHIVE_ENABLED)); settings.setBoolean(Settings.KEYS.ANALYZER_KNOWN_EXPLOITED_ENABLED, !cli.isDisabled(CliParser.ARGUMENT.DISABLE_KEV, Settings.KEYS.ANALYZER_KNOWN_EXPLOITED_ENABLED)); settings.setStringIfNotNull(Settings.KEYS.KEV_URL, cli.getStringArgument(CliParser.ARGUMENT.KEV_URL)); settings.setBoolean(Settings.KEYS.ANALYZER_PYTHON_DISTRIBUTION_ENABLED, !cli.isDisabled(CliParser.ARGUMENT.DISABLE_PY_DIST, Settings.KEYS.ANALYZER_PYTHON_DISTRIBUTION_ENABLED)); settings.setBoolean(Settings.KEYS.ANALYZER_PYTHON_PACKAGE_ENABLED, !cli.isDisabled(CliParser.ARGUMENT.DISABLE_PY_PKG, Settings.KEYS.ANALYZER_PYTHON_PACKAGE_ENABLED)); settings.setBoolean(Settings.KEYS.ANALYZER_AUTOCONF_ENABLED, !cli.isDisabled(CliParser.ARGUMENT.DISABLE_AUTOCONF, Settings.KEYS.ANALYZER_AUTOCONF_ENABLED)); settings.setBoolean(Settings.KEYS.ANALYZER_MAVEN_INSTALL_ENABLED, !cli.isDisabled(CliParser.ARGUMENT.DISABLE_MAVEN_INSTALL, Settings.KEYS.ANALYZER_MAVEN_INSTALL_ENABLED)); settings.setBoolean(Settings.KEYS.ANALYZER_PIP_ENABLED, !cli.isDisabled(CliParser.ARGUMENT.DISABLE_PIP, Settings.KEYS.ANALYZER_PIP_ENABLED)); settings.setBoolean(Settings.KEYS.ANALYZER_PIPFILE_ENABLED, !cli.isDisabled(CliParser.ARGUMENT.DISABLE_PIPFILE, Settings.KEYS.ANALYZER_PIPFILE_ENABLED)); settings.setBoolean(Settings.KEYS.ANALYZER_POETRY_ENABLED, !cli.isDisabled(CliParser.ARGUMENT.DISABLE_POETRY, Settings.KEYS.ANALYZER_POETRY_ENABLED)); settings.setBoolean(Settings.KEYS.ANALYZER_CMAKE_ENABLED, !cli.isDisabled(CliParser.ARGUMENT.DISABLE_CMAKE, Settings.KEYS.ANALYZER_CMAKE_ENABLED)); settings.setBoolean(Settings.KEYS.ANALYZER_NUSPEC_ENABLED, !cli.isDisabled(CliParser.ARGUMENT.DISABLE_NUSPEC, Settings.KEYS.ANALYZER_NUSPEC_ENABLED)); settings.setBoolean(Settings.KEYS.ANALYZER_NUGETCONF_ENABLED, !cli.isDisabled(CliParser.ARGUMENT.DISABLE_NUGETCONF, Settings.KEYS.ANALYZER_NUGETCONF_ENABLED)); settings.setBoolean(Settings.KEYS.ANALYZER_ASSEMBLY_ENABLED, !cli.isDisabled(CliParser.ARGUMENT.DISABLE_ASSEMBLY, Settings.KEYS.ANALYZER_ASSEMBLY_ENABLED)); settings.setBoolean(Settings.KEYS.ANALYZER_BUNDLE_AUDIT_ENABLED, !cli.isDisabled(CliParser.ARGUMENT.DISABLE_BUNDLE_AUDIT, Settings.KEYS.ANALYZER_BUNDLE_AUDIT_ENABLED)); settings.setBoolean(Settings.KEYS.ANALYZER_FILE_NAME_ENABLED, !cli.isDisabled(CliParser.ARGUMENT.DISABLE_FILENAME, Settings.KEYS.ANALYZER_FILE_NAME_ENABLED)); settings.setBoolean(Settings.KEYS.ANALYZER_MIX_AUDIT_ENABLED, !cli.isDisabled(CliParser.ARGUMENT.DISABLE_MIX_AUDIT, Settings.KEYS.ANALYZER_MIX_AUDIT_ENABLED)); settings.setBoolean(Settings.KEYS.ANALYZER_OPENSSL_ENABLED, !cli.isDisabled(CliParser.ARGUMENT.DISABLE_OPENSSL, Settings.KEYS.ANALYZER_OPENSSL_ENABLED)); settings.setBoolean(Settings.KEYS.ANALYZER_COMPOSER_LOCK_ENABLED, !cli.isDisabled(CliParser.ARGUMENT.DISABLE_COMPOSER, Settings.KEYS.ANALYZER_COMPOSER_LOCK_ENABLED)); settings.setBoolean(Settings.KEYS.ANALYZER_CPANFILE_ENABLED, !cli.isDisabled(CliParser.ARGUMENT.DISABLE_CPAN, Settings.KEYS.ANALYZER_CPANFILE_ENABLED)); settings.setBoolean(Settings.KEYS.ANALYZER_GOLANG_DEP_ENABLED, !cli.isDisabled(CliParser.ARGUMENT.DISABLE_GO_DEP, Settings.KEYS.ANALYZER_GOLANG_DEP_ENABLED)); settings.setBoolean(Settings.KEYS.ANALYZER_GOLANG_MOD_ENABLED, !cli.isDisabled(CliParser.ARGUMENT.DISABLE_GOLANG_MOD, Settings.KEYS.ANALYZER_GOLANG_MOD_ENABLED)); settings.setBoolean(Settings.KEYS.ANALYZER_DART_ENABLED, !cli.isDisabled(CliParser.ARGUMENT.DISABLE_DART, Settings.KEYS.ANALYZER_DART_ENABLED)); settings.setBoolean(Settings.KEYS.ANALYZER_NODE_PACKAGE_ENABLED, !cli.isDisabled(CliParser.ARGUMENT.DISABLE_NODE_JS, Settings.KEYS.ANALYZER_NODE_PACKAGE_ENABLED)); settings.setBoolean(Settings.KEYS.ANALYZER_NODE_AUDIT_ENABLED, !cli.isNodeAuditDisabled()); settings.setBoolean(Settings.KEYS.ANALYZER_YARN_AUDIT_ENABLED, !cli.isYarnAuditDisabled()); settings.setBoolean(Settings.KEYS.ANALYZER_PNPM_AUDIT_ENABLED, !cli.isPnpmAuditDisabled()); settings.setBoolean(Settings.KEYS.ANALYZER_NODE_AUDIT_USE_CACHE, !cli.isDisabled(CliParser.ARGUMENT.DISABLE_NODE_AUDIT_CACHE, Settings.KEYS.ANALYZER_NODE_AUDIT_USE_CACHE)); settings.setBoolean(Settings.KEYS.ANALYZER_RETIREJS_ENABLED, !cli.isDisabled(CliParser.ARGUMENT.DISABLE_RETIRE_JS, Settings.KEYS.ANALYZER_RETIREJS_ENABLED)); settings.setBoolean(Settings.KEYS.ANALYZER_SWIFT_PACKAGE_MANAGER_ENABLED, !cli.isDisabled(CliParser.ARGUMENT.DISABLE_SWIFT, Settings.KEYS.ANALYZER_SWIFT_PACKAGE_MANAGER_ENABLED)); settings.setBoolean(Settings.KEYS.ANALYZER_SWIFT_PACKAGE_RESOLVED_ENABLED, !cli.isDisabled(CliParser.ARGUMENT.DISABLE_SWIFT_RESOLVED, Settings.KEYS.ANALYZER_SWIFT_PACKAGE_RESOLVED_ENABLED)); settings.setBoolean(Settings.KEYS.ANALYZER_COCOAPODS_ENABLED, !cli.isDisabled(CliParser.ARGUMENT.DISABLE_COCOAPODS, Settings.KEYS.ANALYZER_COCOAPODS_ENABLED)); settings.setBoolean(Settings.KEYS.ANALYZER_CARTHAGE_ENABLED, !cli.isDisabled(CliParser.ARGUMENT.DISABLE_CARTHAGE, Settings.KEYS.ANALYZER_CARTHAGE_ENABLED)); settings.setBoolean(Settings.KEYS.ANALYZER_RUBY_GEMSPEC_ENABLED, !cli.isDisabled(CliParser.ARGUMENT.DISABLE_RUBYGEMS, Settings.KEYS.ANALYZER_RUBY_GEMSPEC_ENABLED)); settings.setBoolean(Settings.KEYS.ANALYZER_CENTRAL_ENABLED, !cli.isDisabled(CliParser.ARGUMENT.DISABLE_CENTRAL, Settings.KEYS.ANALYZER_CENTRAL_ENABLED)); settings.setBoolean(Settings.KEYS.ANALYZER_CENTRAL_USE_CACHE, !cli.isDisabled(CliParser.ARGUMENT.DISABLE_CENTRAL_CACHE, Settings.KEYS.ANALYZER_CENTRAL_USE_CACHE)); settings.setBoolean(Settings.KEYS.ANALYZER_OSSINDEX_ENABLED, !cli.isDisabled(CliParser.ARGUMENT.DISABLE_OSSINDEX, Settings.KEYS.ANALYZER_OSSINDEX_ENABLED)); settings.setBoolean(Settings.KEYS.ANALYZER_OSSINDEX_USE_CACHE, !cli.isDisabled(CliParser.ARGUMENT.DISABLE_OSSINDEX_CACHE, Settings.KEYS.ANALYZER_OSSINDEX_USE_CACHE)); settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_NODE_PACKAGE_SKIPDEV, cli.hasOption(CliParser.ARGUMENT.NODE_PACKAGE_SKIP_DEV_DEPENDENCIES)); settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_NODE_AUDIT_SKIPDEV, cli.hasOption(CliParser.ARGUMENT.DISABLE_NODE_AUDIT_SKIPDEV)); settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_NEXUS_ENABLED, cli.hasOption(CliParser.ARGUMENT.ENABLE_NEXUS)); settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_CENTRAL_URL, cli.getStringArgument(CliParser.ARGUMENT.CENTRAL_URL)); settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_OSSINDEX_URL, cli.getStringArgument(CliParser.ARGUMENT.OSSINDEX_URL)); settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_OSSINDEX_USER, cli.getStringArgument(CliParser.ARGUMENT.OSSINDEX_USERNAME)); settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_OSSINDEX_PASSWORD, cli.getStringArgument(CliParser.ARGUMENT.OSSINDEX_PASSWORD, Settings.KEYS.ANALYZER_OSSINDEX_PASSWORD)); settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_OSSINDEX_WARN_ONLY_ON_REMOTE_ERRORS, cli.getStringArgument(CliParser.ARGUMENT.OSSINDEX_WARN_ONLY_ON_REMOTE_ERRORS, Settings.KEYS.ANALYZER_OSSINDEX_WARN_ONLY_ON_REMOTE_ERRORS)); settings.setFloat(Settings.KEYS.JUNIT_FAIL_ON_CVSS, cli.getFloatArgument(CliParser.ARGUMENT.FAIL_JUNIT_ON_CVSS, 0)); settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_ARTIFACTORY_ENABLED, cli.hasOption(CliParser.ARGUMENT.ARTIFACTORY_ENABLED)); settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_ARTIFACTORY_PARALLEL_ANALYSIS, cli.getBooleanArgument(CliParser.ARGUMENT.ARTIFACTORY_PARALLEL_ANALYSIS)); settings.setBooleanIfNotNull(Settings.KEYS.ANALYZER_ARTIFACTORY_USES_PROXY, cli.getBooleanArgument(CliParser.ARGUMENT.ARTIFACTORY_USES_PROXY)); settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_ARTIFACTORY_URL, cli.getStringArgument(CliParser.ARGUMENT.ARTIFACTORY_URL)); settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_ARTIFACTORY_API_USERNAME, cli.getStringArgument(CliParser.ARGUMENT.ARTIFACTORY_USERNAME)); settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_ARTIFACTORY_API_TOKEN, cli.getStringArgument(CliParser.ARGUMENT.ARTIFACTORY_API_TOKEN)); settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_ARTIFACTORY_BEARER_TOKEN, cli.getStringArgument(CliParser.ARGUMENT.ARTIFACTORY_BEARER_TOKEN)); settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_MIX_AUDIT_PATH, cli.getStringArgument(CliParser.ARGUMENT.PATH_TO_MIX_AUDIT)); settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_BUNDLE_AUDIT_PATH, cli.getStringArgument(CliParser.ARGUMENT.PATH_TO_BUNDLE_AUDIT)); settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_BUNDLE_AUDIT_WORKING_DIRECTORY, cli.getStringArgument(CliParser.ARGUMENT.PATH_TO_BUNDLE_AUDIT_WORKING_DIRECTORY)); settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_NEXUS_URL, cli.getStringArgument(CliParser.ARGUMENT.NEXUS_URL)); settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_NEXUS_USER, cli.getStringArgument(CliParser.ARGUMENT.NEXUS_USERNAME)); settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_NEXUS_PASSWORD, cli.getStringArgument(CliParser.ARGUMENT.NEXUS_PASSWORD, Settings.KEYS.ANALYZER_NEXUS_PASSWORD)); //TODO deprecate this in favor of non-proxy host final boolean nexusUsesProxy = cli.isNexusUsesProxy(); settings.setBoolean(Settings.KEYS.ANALYZER_NEXUS_USES_PROXY, nexusUsesProxy); settings.setStringIfNotEmpty(Settings.KEYS.DB_DRIVER_NAME, cli.getStringArgument(CliParser.ARGUMENT.DB_DRIVER)); settings.setStringIfNotEmpty(Settings.KEYS.DB_DRIVER_PATH, cli.getStringArgument(CliParser.ARGUMENT.DB_DRIVER_PATH)); settings.setStringIfNotEmpty(Settings.KEYS.DB_CONNECTION_STRING, cli.getStringArgument(CliParser.ARGUMENT.CONNECTION_STRING)); settings.setStringIfNotEmpty(Settings.KEYS.DB_USER, cli.getStringArgument(CliParser.ARGUMENT.DB_NAME)); settings.setStringIfNotEmpty(Settings.KEYS.DB_PASSWORD, cli.getStringArgument(CliParser.ARGUMENT.DB_PASSWORD, Settings.KEYS.DB_PASSWORD)); settings.setStringIfNotEmpty(Settings.KEYS.ADDITIONAL_ZIP_EXTENSIONS, cli.getStringArgument(CliParser.ARGUMENT.ADDITIONAL_ZIP_EXTENSIONS)); settings.setStringIfNotEmpty(Settings.KEYS.ANALYZER_ASSEMBLY_DOTNET_PATH, cli.getStringArgument(CliParser.ARGUMENT.PATH_TO_CORE)); String key = cli.getStringArgument(CliParser.ARGUMENT.NVD_API_KEY); if (key != null) { if ((key.startsWith("\"") && key.endsWith("\"") || (key.startsWith("'") && key.endsWith("'")))) { key = key.substring(1, key.length() - 1); } settings.setStringIfNotEmpty(Settings.KEYS.NVD_API_KEY, key); } settings.setStringIfNotEmpty(Settings.KEYS.NVD_API_ENDPOINT, cli.getStringArgument(CliParser.ARGUMENT.NVD_API_ENDPOINT)); settings.setIntIfNotNull(Settings.KEYS.NVD_API_DELAY, cli.getIntegerValue(CliParser.ARGUMENT.NVD_API_DELAY)); settings.setIntIfNotNull(Settings.KEYS.NVD_API_RESULTS_PER_PAGE, cli.getIntegerValue(CliParser.ARGUMENT.NVD_API_RESULTS_PER_PAGE)); settings.setStringIfNotEmpty(Settings.KEYS.NVD_API_DATAFEED_URL, cli.getStringArgument(CliParser.ARGUMENT.NVD_API_DATAFEED_URL)); settings.setStringIfNotEmpty(Settings.KEYS.NVD_API_DATAFEED_USER, cli.getStringArgument(CliParser.ARGUMENT.NVD_API_DATAFEED_USER)); settings.setStringIfNotEmpty(Settings.KEYS.NVD_API_DATAFEED_PASSWORD, cli.getStringArgument(CliParser.ARGUMENT.NVD_API_DATAFEED_PASSWORD)); settings.setIntIfNotNull(Settings.KEYS.NVD_API_MAX_RETRY_COUNT, cli.getIntegerValue(CliParser.ARGUMENT.NVD_API_MAX_RETRY_COUNT)); settings.setIntIfNotNull(Settings.KEYS.NVD_API_VALID_FOR_HOURS, cli.getIntegerValue(CliParser.ARGUMENT.NVD_API_VALID_FOR_HOURS)); settings.setStringIfNotNull(Settings.KEYS.HOSTED_SUPPRESSIONS_URL, cli.getStringArgument(CliParser.ARGUMENT.HOSTED_SUPPRESSIONS_URL)); settings.setBoolean(Settings.KEYS.HOSTED_SUPPRESSIONS_ENABLED, !cli.isDisabled(CliParser.ARGUMENT.DISABLE_HOSTED_SUPPRESSIONS, Settings.KEYS.HOSTED_SUPPRESSIONS_ENABLED)); settings.setBooleanIfNotNull(Settings.KEYS.HOSTED_SUPPRESSIONS_FORCEUPDATE, cli.hasOption(CliParser.ARGUMENT.HOSTED_SUPPRESSIONS_FORCEUPDATE)); settings.setIntIfNotNull(Settings.KEYS.HOSTED_SUPPRESSIONS_VALID_FOR_HOURS, cli.getIntegerValue(CliParser.ARGUMENT.HOSTED_SUPPRESSIONS_VALID_FOR_HOURS)); }
@Test public void testPopulateSettings() throws Exception { File prop = new File(this.getClass().getClassLoader().getResource("sample.properties").toURI().getPath()); String[] args = {"-P", prop.getAbsolutePath()}; Map<String, Boolean> expected = new HashMap<>(); expected.put(Settings.KEYS.AUTO_UPDATE, Boolean.FALSE); expected.put(Settings.KEYS.ANALYZER_ARCHIVE_ENABLED, Boolean.TRUE); assertTrue(testBooleanProperties(args, expected)); String[] args2 = {"-n"}; expected.put(Settings.KEYS.AUTO_UPDATE, Boolean.FALSE); expected.put(Settings.KEYS.ANALYZER_ARCHIVE_ENABLED, Boolean.TRUE); assertTrue(testBooleanProperties(args2, expected)); String[] args3 = {"-h"}; expected.put(Settings.KEYS.AUTO_UPDATE, Boolean.TRUE); expected.put(Settings.KEYS.ANALYZER_ARCHIVE_ENABLED, Boolean.TRUE); assertTrue(testBooleanProperties(args3, expected)); String[] args4 = {"--disableArchive"}; expected.put(Settings.KEYS.AUTO_UPDATE, Boolean.TRUE); expected.put(Settings.KEYS.ANALYZER_ARCHIVE_ENABLED, Boolean.FALSE); assertTrue(testBooleanProperties(args4, expected)); String[] args5 = {"-P", prop.getAbsolutePath(), "--disableArchive"}; expected.put(Settings.KEYS.AUTO_UPDATE, Boolean.FALSE); expected.put(Settings.KEYS.ANALYZER_ARCHIVE_ENABLED, Boolean.FALSE); assertTrue(testBooleanProperties(args5, expected)); prop = new File(this.getClass().getClassLoader().getResource("sample2.properties").toURI().getPath()); String[] args6 = {"-P", prop.getAbsolutePath(), "--disableArchive"}; expected.put(Settings.KEYS.AUTO_UPDATE, Boolean.TRUE); expected.put(Settings.KEYS.ANALYZER_ARCHIVE_ENABLED, Boolean.FALSE); assertTrue(testBooleanProperties(args6, expected)); String[] args7 = {"-P", prop.getAbsolutePath(), "--noupdate"}; expected.put(Settings.KEYS.AUTO_UPDATE, Boolean.FALSE); expected.put(Settings.KEYS.ANALYZER_ARCHIVE_ENABLED, Boolean.FALSE); assertTrue(testBooleanProperties(args7, expected)); String[] args8 = {"-P", prop.getAbsolutePath(), "--noupdate", "--disableArchive"}; expected.put(Settings.KEYS.AUTO_UPDATE, Boolean.FALSE); expected.put(Settings.KEYS.ANALYZER_ARCHIVE_ENABLED, Boolean.FALSE); assertTrue(testBooleanProperties(args8, expected)); }
public String asPairsWithComment(String comment) { return new PropertiesWriter(pairs).writeString(comment); }
@Test public void pairsWithMultineComment() { assertThat(new OrderedProperties().asPairsWithComment("this\nis\n\ra\rcomment\\"), is("# this\n" + "# is\n" + "# a\n" + "# comment\\\n")); }
@VisibleForTesting boolean checkSymlink(File jar) { if (Files.isSymbolicLink(jar.toPath())) { try { java.nio.file.Path link = Files.readSymbolicLink(jar.toPath()); java.nio.file.Path jarPath = Paths.get(jar.getAbsolutePath()); String linkString = link.toString(); java.nio.file.Path jarParent = jarPath.getParent(); java.nio.file.Path linkPath = jarParent == null ? null : jarParent.resolve(linkString); java.nio.file.Path linkPathParent = linkPath == null ? null : linkPath.getParent(); java.nio.file.Path normalizedLinkPath = linkPathParent == null ? null : linkPathParent.normalize(); if (normalizedLinkPath != null && jarParent.normalize().equals( normalizedLinkPath)) { LOG.info(String.format("Ignoring same directory link %s to %s", jarPath.toString(), link.toString())); return true; } } catch (NotLinkException ex) { LOG.debug("Not a link", jar); } catch (IOException ex) { LOG.warn("Cannot read symbolic link on", jar); } } return false; }
@Test void testNativeIO() throws IOException { FrameworkUploader uploader = new FrameworkUploader(); File parent = new File(testDir); try { // Create a parent directory parent.deleteOnExit(); assertTrue(parent.mkdirs()); // Create a target file File targetFile = new File(parent, "a.txt"); try (FileOutputStream os = new FileOutputStream(targetFile)) { IOUtils.writeLines(Lists.newArrayList("a", "b"), null, os, StandardCharsets.UTF_8); } assertFalse(uploader.checkSymlink(targetFile)); // Create a symlink to the target File symlinkToTarget = new File(parent, "symlinkToTarget.txt"); try { Files.createSymbolicLink( Paths.get(symlinkToTarget.getAbsolutePath()), Paths.get(targetFile.getAbsolutePath())); } catch (UnsupportedOperationException e) { // Symlinks are not supported, so ignore the test Assumptions.assumeTrue(false); } assertTrue(uploader.checkSymlink(symlinkToTarget)); // Create a symlink to the target with /./ in the path symlinkToTarget = new File(parent.getAbsolutePath() + "/./symlinkToTarget2.txt"); try { Files.createSymbolicLink( Paths.get(symlinkToTarget.getAbsolutePath()), Paths.get(targetFile.getAbsolutePath())); } catch (UnsupportedOperationException e) { // Symlinks are not supported, so ignore the test Assumptions.assumeTrue(false); } assertTrue(uploader.checkSymlink(symlinkToTarget)); // Create a symlink outside the current directory File symlinkOutside = new File(parent, "symlinkToParent.txt"); try { Files.createSymbolicLink( Paths.get(symlinkOutside.getAbsolutePath()), Paths.get(parent.getAbsolutePath())); } catch (UnsupportedOperationException e) { // Symlinks are not supported, so ignore the test Assumptions.assumeTrue(false); } assertFalse(uploader.checkSymlink(symlinkOutside)); } finally { FileUtils.forceDelete(parent); } }
@Override public boolean isSecure() { return delegate.isSecure(); }
@Test public void delegate_methods_for_cookie() { javax.servlet.http.Cookie mockCookie = new javax.servlet.http.Cookie("name", "value"); mockCookie.setSecure(true); mockCookie.setPath("path"); mockCookie.setHttpOnly(true); mockCookie.setMaxAge(100); Cookie cookie = new JavaxHttpRequest.JavaxCookie(mockCookie); assertThat(cookie.getName()).isEqualTo("name"); assertThat(cookie.getValue()).isEqualTo("value"); assertThat(cookie.getPath()).isEqualTo("path"); assertThat(cookie.isSecure()).isTrue(); assertThat(cookie.isHttpOnly()).isTrue(); assertThat(cookie.getMaxAge()).isEqualTo(100); }
static void dissectReplaySessionError( final MutableDirectBuffer buffer, final int offset, final StringBuilder builder) { int absoluteOffset = offset; absoluteOffset += dissectLogHeader(CONTEXT, REPLAY_SESSION_ERROR, buffer, absoluteOffset, builder); final long sessionId = buffer.getLong(absoluteOffset, LITTLE_ENDIAN); absoluteOffset += SIZE_OF_LONG; final long recordingId = buffer.getLong(absoluteOffset, LITTLE_ENDIAN); absoluteOffset += SIZE_OF_LONG; builder.append(": sessionId=").append(sessionId); builder.append(" recordingId=").append(recordingId); builder.append(" errorMessage="); buffer.getStringAscii(absoluteOffset, builder); }
@Test void replaySessionError() { internalEncodeLogHeader(buffer, 0, 6, 100, () -> 5_600_000_000L); buffer.putLong(LOG_HEADER_LENGTH, -8, LITTLE_ENDIAN); buffer.putLong(LOG_HEADER_LENGTH + SIZE_OF_LONG, 42, LITTLE_ENDIAN); buffer.putStringAscii(LOG_HEADER_LENGTH + SIZE_OF_LONG * 2, "something went wrong"); dissectReplaySessionError(buffer, 0, builder); assertEquals("[5.600000000] " + CONTEXT + ": " + REPLAY_SESSION_ERROR.name() + " [6/100]:" + " sessionId=-8 recordingId=42 errorMessage=something went wrong", builder.toString()); }
private Function<KsqlConfig, Kudf> getUdfFactory( final Method method, final UdfDescription udfDescriptionAnnotation, final String functionName, final FunctionInvoker invoker, final String sensorName ) { return ksqlConfig -> { final Object actualUdf = FunctionLoaderUtils.instantiateFunctionInstance( method.getDeclaringClass(), udfDescriptionAnnotation.name()); if (actualUdf instanceof Configurable) { ExtensionSecurityManager.INSTANCE.pushInUdf(); try { ((Configurable) actualUdf) .configure(ksqlConfig.getKsqlFunctionsConfigProps(functionName)); } finally { ExtensionSecurityManager.INSTANCE.popOutUdf(); } } final PluggableUdf theUdf = new PluggableUdf(invoker, actualUdf); return metrics.<Kudf>map(m -> new UdfMetricProducer( m.getSensor(sensorName), theUdf, Time.SYSTEM )).orElse(theUdf); }; }
@Test public void shouldLoadFunctionWithStructReturnType() { // Given: final UdfFactory toStruct = FUNC_REG.getUdfFactory(FunctionName.of("tostruct")); // When: final List<SqlArgument> args = Collections.singletonList(SqlArgument.of(SqlTypes.STRING)); final KsqlScalarFunction function = toStruct.getFunction(args); // Then: assertThat(function.getReturnType(args), equalTo(SqlTypes.struct() .field("A", SqlTypes.STRING) .build()) ); }
public static NetworkEndpoint forHostnameAndPort(String hostname, int port) { checkArgument( 0 <= port && port <= MAX_PORT_NUMBER, "Port out of range. Expected [0, %s], actual %s.", MAX_PORT_NUMBER, port); return forHostname(hostname).toBuilder() .setType(NetworkEndpoint.Type.HOSTNAME_PORT) .setPort(Port.newBuilder().setPortNumber(port)) .build(); }
@Test public void forHostnameAndPort_withHostnameAndPort_returnsHostnameAndPortNetworkEndpoint() { assertThat(NetworkEndpointUtils.forHostnameAndPort("localhost", 8888)) .isEqualTo( NetworkEndpoint.newBuilder() .setType(NetworkEndpoint.Type.HOSTNAME_PORT) .setPort(Port.newBuilder().setPortNumber(8888)) .setHostname(Hostname.newBuilder().setName("localhost")) .build()); }
void updateSlobrokList(ApplicationInfo application) { List<String> slobrokSpecs = getSlobrokSpecs(application); slobrokList.setup(slobrokSpecs.toArray(new String[0])); }
@Test public void testUpdateSlobrokList() { ApplicationInfo applicationInfo = ExampleModel.createApplication( "tenant", "application-name") .build(); }
public static <T> ExtensionLoader<T> getExtensionLoader(final Class<T> clazz, final ClassLoader cl) { Objects.requireNonNull(clazz, "extension clazz is null"); if (!clazz.isInterface()) { throw new IllegalArgumentException("extension clazz (" + clazz + ") is not interface!"); } if (!clazz.isAnnotationPresent(SPI.class)) { throw new IllegalArgumentException("extension clazz (" + clazz + ") without @" + SPI.class + " Annotation"); } ExtensionLoader<T> extensionLoader = (ExtensionLoader<T>) LOADERS.get(clazz); if (Objects.nonNull(extensionLoader)) { return extensionLoader; } LOADERS.putIfAbsent(clazz, new ExtensionLoader<>(clazz, cl)); return (ExtensionLoader<T>) LOADERS.get(clazz); }
@Test public void loadResourcesIOException() throws NoSuchMethodException, MalformedURLException, IllegalAccessException { Method loadResourcesMethod = getLoadResources(); ExtensionLoader<JdbcSPI> extensionLoader = ExtensionLoader.getExtensionLoader(JdbcSPI.class); try { loadResourcesMethod.invoke(extensionLoader, new HashMap<>(), new URL("file:/org.apache.shenyu.spi.fixture.NoExistSPI")); fail(); } catch (InvocationTargetException expect) { assertThat(expect.getTargetException().getMessage(), containsString("load extension resources error")); } }
@Nullable static String route(ContainerRequest request) { ExtendedUriInfo uriInfo = request.getUriInfo(); List<UriTemplate> templates = uriInfo.getMatchedTemplates(); int templateCount = templates.size(); if (templateCount == 0) return ""; StringBuilder builder = null; // don't allocate unless you need it! String basePath = uriInfo.getBaseUri().getPath(); String result = null; if (!"/" .equals(basePath)) { // skip empty base paths result = basePath; } for (int i = templateCount - 1; i >= 0; i--) { String template = templates.get(i).getTemplate(); if ("/" .equals(template)) continue; // skip allocation if (builder != null) { builder.append(template); } else if (result != null) { builder = new StringBuilder(result).append(template); result = null; } else { result = template; } } return result != null ? result : builder != null ? builder.toString() : ""; }
@Test void route_basePath() { setBaseUri("/base"); when(uriInfo.getMatchedTemplates()).thenReturn(Arrays.asList( new PathTemplate("/"), new PathTemplate("/items/{itemId}") )); assertThat(SpanCustomizingApplicationEventListener.route(request)) .isEqualTo("/base/items/{itemId}"); }
public MemoryLRUCacheBytesIterator reverseRange(final String namespace, final Bytes from, final Bytes to) { final NamedCache cache = getCache(namespace); if (cache == null) { return new MemoryLRUCacheBytesIterator(Collections.emptyIterator(), new NamedCache(namespace, this.metrics)); } return new MemoryLRUCacheBytesIterator(cache.reverseKeyRange(from, to), cache); }
@Test public void shouldGetSameKeyAsPeekNextReverseRange() { final ThreadCache cache = setupThreadCache(1, 1, 10000L, true); final Bytes theByte = Bytes.wrap(new byte[]{1}); final ThreadCache.MemoryLRUCacheBytesIterator iterator = cache.reverseRange(namespace, Bytes.wrap(new byte[]{0}), theByte); assertThat(iterator.peekNextKey(), is(iterator.next().key)); }
private static GuardedByExpression bind(JCTree.JCExpression exp, BinderContext context) { GuardedByExpression expr = BINDER.visit(exp, context); checkGuardedBy(expr != null, String.valueOf(exp)); checkGuardedBy(expr.kind() != Kind.TYPE_LITERAL, "Raw type literal: %s", exp); return expr; }
@Test public void implicitThisOuterClassMethod() { assertThat( bind( "Inner", "endpoint().lock()", forSourceLines( "threadsafety/Test.java", "package threadsafety;", "class Outer {", " class Endpoint {", " Object lock() { return null; }", " }", " Endpoint endpoint() { return null; }", " class Inner {", " int x;", " }", "}"))) .isEqualTo("(SELECT (SELECT (SELECT (THIS) outer$threadsafety.Outer) endpoint()) lock())"); }
@Override public PipelineConfigs getLocal() { for (PipelineConfigs part : this.parts) { if (part.isLocal()) return part; } return null; }
@Test public void shouldReturnFilePartForGetLocalWhenHasRemoteAndFilePart() { BasicPipelineConfigs filePart = new BasicPipelineConfigs(); filePart.setOrigin(new FileConfigOrigin()); BasicPipelineConfigs secondPart = new BasicPipelineConfigs(); secondPart.setOrigin(new RepoConfigOrigin()); MergePipelineConfigs merge = new MergePipelineConfigs(filePart, secondPart); assertThat(merge.getLocal(), Matchers.is(filePart)); }
@Override public JreInfoRestResponse getJreMetadata(String id) { return jresHandler.getJreMetadata(id); }
@Test void getJre_shouldReturnMetadataAsJson() throws Exception { String anyId = "anyId"; JreInfoRestResponse jreInfoRestResponse = new JreInfoRestResponse(anyId, "filename", "sha256", "javaPath", "os", "arch"); when(jresHandler.getJreMetadata(anyId)).thenReturn(jreInfoRestResponse); String expectedJson = "{\"id\":\"" + anyId + "\",\"filename\":\"filename\",\"sha256\":\"sha256\",\"javaPath\":\"javaPath\",\"os\":\"os\",\"arch\":\"arch\"}"; mockMvc.perform(get(JRE_ENDPOINT + "/" + anyId)) .andExpect(status().isOk()) .andExpect(content().json(expectedJson)); }
@UdafFactory(description = "Compute average of column with type Long.", aggregateSchema = "STRUCT<SUM bigint, COUNT bigint>") public static TableUdaf<Long, Struct, Double> averageLong() { return getAverageImplementation( 0L, STRUCT_LONG, (sum, newValue) -> sum.getInt64(SUM) + newValue, (sum, count) -> sum.getInt64(SUM) / count, (sum1, sum2) -> sum1.getInt64(SUM) + sum2.getInt64(SUM), (sum, valueToUndo) -> sum.getInt64(SUM) - valueToUndo); }
@Test public void shouldAggregateLongs() { final TableUdaf<Long, Struct, Double> udaf = AverageUdaf.averageLong(); Struct agg = udaf.initialize(); final Long[] values = new Long[] {1L, 1L, 1L, 1L, 1L}; for (final Long thisValue : values) { agg = udaf.aggregate(thisValue, agg); } assertThat(5L, equalTo(agg.getInt64("COUNT"))); assertThat(5L, equalTo(agg.getInt64("SUM"))); }
@Override public SelResult childrenAccept(SelParserVisitor visitor, Object data) { SelResult res = SelResult.NONE; if (children != null) { for (int i = 0; i < children.length; ++i) { res = (SelResult) children[i].jjtAccept(visitor, data); switch (res) { case BREAK: return SelResult.BREAK; case CONTINUE: return SelResult.CONTINUE; case RETURN: return SelResult.RETURN; } } } return res; }
@Test public void testVisitedReturnNode() { root.jjtAddChild(returnNode, 2); root.jjtAddChild(returnNode, 1); root.jjtAddChild(returnNode, 0); SelResult res = root.childrenAccept(null, null); assertEquals(SelResult.RETURN, res); assertArrayEquals(new int[] {0, 0, 1, 0, 0}, visited); }
public static TimeUnit getMetricsDurationUnit(Map<String, Object> daemonConf) { return getTimeUnitForConfig(daemonConf, Config.STORM_DAEMON_METRICS_REPORTER_PLUGIN_DURATION_UNIT); }
@Test public void getMetricsDurationUnit() { Map<String, Object> daemonConf = new HashMap<>(); assertNull(MetricsUtils.getMetricsDurationUnit(daemonConf)); daemonConf.put(Config.STORM_DAEMON_METRICS_REPORTER_PLUGIN_DURATION_UNIT, "SECONDS"); assertEquals(TimeUnit.SECONDS, MetricsUtils.getMetricsDurationUnit(daemonConf)); daemonConf.put(Config.STORM_DAEMON_METRICS_REPORTER_PLUGIN_DURATION_UNIT, "MINUTES"); assertEquals(TimeUnit.MINUTES, MetricsUtils.getMetricsDurationUnit(daemonConf)); }
public static ExecutorService getSingleThreadExecutor() { setup(); return singleThreadExecutor; }
@Test public void singleThread() { ExecutorService a = SharedExecutors.getSingleThreadExecutor(); assertNotNull("ExecutorService must not be null", a); ExecutorService b = SharedExecutors.getSingleThreadExecutor(); assertSame("factories should be same", a, b); }
static <T, W extends BoundedWindow> ThrowingFunction<KV<T, Iterable<W>>, KV<T, KV<Iterable<W>, Iterable<KV<W, Iterable<W>>>>>> createMapFunctionForPTransform(String ptransformId, PTransform ptransform) throws IOException { RunnerApi.FunctionSpec payload = RunnerApi.FunctionSpec.parseFrom(ptransform.getSpec().getPayload()); WindowFn<?, W> windowFn = (WindowFn<?, W>) WindowingStrategyTranslation.windowFnFromProto(payload); return WindowMergingFnRunner.<T, W>create(windowFn)::mergeWindows; }
@Test public void testWindowMergingWithMergingWindowFn() throws Exception { ThrowingFunction< KV<Object, Iterable<BoundedWindow>>, KV< Object, KV<Iterable<BoundedWindow>, Iterable<KV<BoundedWindow, Iterable<BoundedWindow>>>>>> mapFunction = WindowMergingFnRunner.createMapFunctionForPTransform( "ptransformId", createMergeTransformForWindowFn(Sessions.withGapDuration(Duration.millis(5L)))); // 7, 8 and 10 should all be merged. 1 and 20 should remain in the original set. BoundedWindow[] expectedToBeMerged = new BoundedWindow[] { new IntervalWindow(new Instant(9L), new Instant(11L)), new IntervalWindow(new Instant(10L), new Instant(10L)), new IntervalWindow(new Instant(7L), new Instant(10L)) }; Iterable<BoundedWindow> expectedToBeUnmerged = Sets.newHashSet( new IntervalWindow(new Instant(1L), new Instant(1L)), new IntervalWindow(new Instant(20L), new Instant(20L))); KV<Object, Iterable<BoundedWindow>> input = KV.of( "abc", ImmutableList.<BoundedWindow>builder() .add(expectedToBeMerged) .addAll(expectedToBeUnmerged) .build()); KV<Object, KV<Iterable<BoundedWindow>, Iterable<KV<BoundedWindow, Iterable<BoundedWindow>>>>> output = mapFunction.apply(input); assertEquals(input.getKey(), output.getKey()); assertEquals(expectedToBeUnmerged, output.getValue().getKey()); KV<BoundedWindow, Iterable<BoundedWindow>> mergedOutput = Iterables.getOnlyElement(output.getValue().getValue()); assertEquals(new IntervalWindow(new Instant(7L), new Instant(11L)), mergedOutput.getKey()); assertThat(mergedOutput.getValue(), containsInAnyOrder(expectedToBeMerged)); // Process a new group of windows, make sure that previous result has been cleaned up. BoundedWindow[] expectedToBeMergedGroup2 = new BoundedWindow[] { new IntervalWindow(new Instant(15L), new Instant(17L)), new IntervalWindow(new Instant(16L), new Instant(18L)) }; input = KV.of( "abc", ImmutableList.<BoundedWindow>builder() .add(expectedToBeMergedGroup2) .addAll(expectedToBeUnmerged) .build()); output = mapFunction.apply(input); assertEquals(input.getKey(), output.getKey()); assertEquals(expectedToBeUnmerged, output.getValue().getKey()); mergedOutput = Iterables.getOnlyElement(output.getValue().getValue()); assertEquals(new IntervalWindow(new Instant(15L), new Instant(18L)), mergedOutput.getKey()); assertThat(mergedOutput.getValue(), containsInAnyOrder(expectedToBeMergedGroup2)); }
@Override public final String toString() { StringBuilder out = new StringBuilder(); appendTo(out); return out.toString(); }
@Test void requireThatPredicatesCanBeBuiltUsingSeparateMethodCalls() { Conjunction conjunction = new Conjunction(); FeatureSet countrySet = new FeatureSet("country"); countrySet.addValue("no"); countrySet.addValue("se"); conjunction.addOperand(countrySet); FeatureRange ageRange = new FeatureRange("age"); ageRange.setFromInclusive(20L); conjunction.addOperand(ageRange); FeatureRange heightRange = new FeatureRange("height"); heightRange.setToInclusive(160L); conjunction.addOperand(heightRange); assertEquals("country in [no, se] and age in [20..] and height in [..160]", conjunction.toString()); }
public boolean compareAndSet(T expectedState, T newState) { checkState(!Thread.holdsLock(lock), "Can not set state while holding the lock"); requireNonNull(expectedState, "expectedState is null"); requireNonNull(newState, "newState is null"); FutureStateChange<T> futureStateChange; ImmutableList<StateChangeListener<T>> stateChangeListeners; synchronized (lock) { if (!state.equals(expectedState)) { return false; } // change to same state is not a change, and does not notify the listeners if (state.equals(newState)) { return false; } checkState(!isTerminalState(state), "%s can not transition from %s to %s", name, state, newState); state = newState; futureStateChange = this.futureStateChange.getAndSet(new FutureStateChange<>()); stateChangeListeners = ImmutableList.copyOf(this.stateChangeListeners); // if we are now in a terminal state, free the listeners since this will be the last notification if (isTerminalState(state)) { this.stateChangeListeners.clear(); } } fireStateChanged(newState, futureStateChange, stateChangeListeners); return true; }
@Test public void testCompareAndSet() throws Exception { StateMachine<State> stateMachine = new StateMachine<>("test", executor, State.BREAKFAST, ImmutableSet.of(State.DINNER)); assertEquals(stateMachine.get(), State.BREAKFAST); // no match with new state assertNoStateChange(stateMachine, () -> stateMachine.compareAndSet(State.DINNER, State.LUNCH)); // match with new state assertStateChange(stateMachine, () -> stateMachine.compareAndSet(State.BREAKFAST, State.LUNCH), State.LUNCH); // no match with same state assertNoStateChange(stateMachine, () -> stateMachine.compareAndSet(State.BREAKFAST, State.LUNCH)); // match with same state assertNoStateChange(stateMachine, () -> stateMachine.compareAndSet(State.LUNCH, State.LUNCH)); // transition to a final state assertStateChange(stateMachine, () -> stateMachine.compareAndSet(State.LUNCH, State.DINNER), State.DINNER); // attempt transition from a final state assertNoStateChange(stateMachine, () -> { try { stateMachine.compareAndSet(State.DINNER, State.LUNCH); fail("expected IllegalStateException"); } catch (IllegalStateException expected) { } }); assertNoStateChange(stateMachine, () -> stateMachine.compareAndSet(State.DINNER, State.DINNER)); }
@Override public void createDataStream(String dataStreamName, String timestampField, Map<String, Map<String, String>> mappings, Policy ismPolicy) { updateDataStreamTemplate(dataStreamName, timestampField, mappings); dataStreamAdapter.createDataStream(dataStreamName); dataStreamAdapter.applyIsmPolicy(dataStreamName, ismPolicy); dataStreamAdapter.setNumberOfReplicas(dataStreamName, replicas); }
@SuppressWarnings("unchecked") @Test public void templateDoesNotOverwriteTimestampMapping() { final Map<String, Map<String, String>> mappings = new HashMap<>(); String ts = "ts"; mappings.put(ts, Map.of("type", "date", "format", "mycustomformat")); dataStreamService.createDataStream("teststream", ts, mappings, mock(Policy.class)); ArgumentCaptor<Template> templateCaptor = ArgumentCaptor.forClass(Template.class); verify(dataStreamAdapter).ensureDataStreamTemplate(anyString(), templateCaptor.capture(), anyString()); Map<String, Object> fieldMappings = (Map<String, Object>) templateCaptor.getValue().mappings().get("properties"); Map<String, String> timestampMapping = (Map<String, String>) fieldMappings.get(ts); assertThat(timestampMapping).isNotNull(); assertThat(timestampMapping.get("type")).isEqualTo("date"); assertThat(timestampMapping.get("format")).isEqualTo("mycustomformat"); }
@Override public ScalarOperator visitMultiInPredicate(MultiInPredicateOperator operator, Void context) { return shuttleIfUpdate(operator); }
@Test void testMultiInPredicateOperator() { ColumnRefOperator column1 = new ColumnRefOperator(1, INT, "id", true); MultiInPredicateOperator operator = new MultiInPredicateOperator(false, Lists.newArrayList(column1, column1), Lists.newArrayList(column1, column1)); { ScalarOperator newOperator = shuttle.visitMultiInPredicate(operator, null); assertEquals(operator, newOperator); } { ScalarOperator newOperator = shuttle2.visitMultiInPredicate(operator, null); assertEquals(operator, newOperator); } }
public double[] rowMeans() { double[] x = rowSums(); for (int i = 0; i < m; i++) { x[i] /= n; } return x; }
@Test public void testRowMeans() { System.out.println("rowMeans"); double[][] A = { { 0.7220180, 0.07121225, 0.6881997f}, {-0.2648886, -0.89044952, 0.3700456f}, {-0.6391588, 0.44947578, 0.6240573f} }; double[] r = {0.4938100, -0.2617642, 0.1447914f}; double[] result = Matrix.of(A).rowMeans(); for (int i = 0; i < r.length; i++) { assertEquals(result[i], r[i], 1E-7); } }
public static void print(final Options options) { print(options, new TerminalHelpFormatter()); }
@Test @Ignore public void testPrintWidth20DefaultFormatter() { final HelpFormatter f = new TerminalHelpFormatter(); f.setWidth(20); TerminalHelpPrinter.print(TerminalOptionsBuilder.options(), f); }
public void checkIfAnyComponentsNeedIssueSync(DbSession dbSession, List<String> componentKeys) { boolean isAppOrViewOrSubview = dbClient.componentDao().existAnyOfComponentsWithQualifiers(dbSession, componentKeys, APP_VIEW_OR_SUBVIEW); boolean needIssueSync; if (isAppOrViewOrSubview) { needIssueSync = dbClient.branchDao().hasAnyBranchWhereNeedIssueSync(dbSession, true); } else { needIssueSync = dbClient.branchDao().doAnyOfComponentsNeedIssueSync(dbSession, componentKeys); } if (needIssueSync) { throw new EsIndexSyncInProgressException(IssueIndexDefinition.TYPE_ISSUE.getMainType(), "Results are temporarily unavailable. Indexing of issues is in progress."); } }
@Test public void checkIfAnyComponentsNeedIssueSync_throws_exception_if_at_least_one_component_has_need_issue_sync_TRUE() { ProjectData projectData1 = insertProjectWithBranches(false, 0); ProjectData projectData2 = insertProjectWithBranches(true, 0); DbSession session = db.getSession(); List<String> projectKeys = Arrays.asList(projectData1.getProjectDto().getKey(), projectData2.getProjectDto().getKey()); assertThatThrownBy(() -> underTest.checkIfAnyComponentsNeedIssueSync(session, projectKeys)) .isInstanceOf(EsIndexSyncInProgressException.class) .hasFieldOrPropertyWithValue("httpCode", 503) .hasMessage("Results are temporarily unavailable. Indexing of issues is in progress."); }
public void connect() throws ConnectException { connect(s -> {}, t -> {}, () -> {}); }
@Test public void testConnectViaWebSocketClient() throws Exception { service.connect(); verify(webSocketClient).connectBlocking(); }
protected synchronized Timestamp convertStringToTimestamp( String string ) throws KettleValueException { // See if trimming needs to be performed before conversion // string = Const.trimToType( string, getTrimType() ); if ( Utils.isEmpty( string ) ) { return null; } Timestamp returnValue; try { returnValue = Timestamp.valueOf( string ); } catch ( IllegalArgumentException e ) { try { returnValue = (Timestamp) getDateFormat().parse( string ); } catch ( ParseException ex ) { throw new KettleValueException( toString() + " : couldn't convert string [" + string + "] to a timestamp, expecting format [yyyy-mm-dd hh:mm:ss.ffffff]", e ); } } return returnValue; }
@Test public void testConvertStringToTimestamp() throws Exception { ValueMetaTimestamp valueMetaTimestamp = new ValueMetaTimestamp(); assertEquals( Timestamp.valueOf( "2012-04-05 04:03:02.123456" ), valueMetaTimestamp .convertStringToTimestamp( "2012/4/5 04:03:02.123456" ) ); assertEquals( Timestamp.valueOf( "2012-04-05 04:03:02.123" ), valueMetaTimestamp .convertStringToTimestamp( "2012/4/5 04:03:02.123" ) ); assertEquals( Timestamp.valueOf( "2012-04-05 04:03:02.123456789" ), valueMetaTimestamp .convertStringToTimestamp( "2012/4/5 04:03:02.123456789" ) ); }
@Override public boolean retainAll(Collection<?> c) { throw new UnsupportedOperationException("LazySet is not modifiable"); }
@Test(expected = UnsupportedOperationException.class) public void testRetainAll_throwsException() { set.retainAll(Collections.emptyList()); }
public static RowRanges calculateRowRanges( FilterCompat.Filter filter, ColumnIndexStore columnIndexStore, Set<ColumnPath> paths, long rowCount) { return filter.accept(new FilterCompat.Visitor<RowRanges>() { @Override public RowRanges visit(FilterPredicateCompat filterPredicateCompat) { try { return filterPredicateCompat .getFilterPredicate() .accept(new ColumnIndexFilter(columnIndexStore, paths, rowCount)); } catch (MissingOffsetIndexException e) { LOGGER.info(e.getMessage()); return RowRanges.createSingle(rowCount); } } @Override public RowRanges visit(UnboundRecordFilterCompat unboundRecordFilterCompat) { return RowRanges.createSingle(rowCount); } @Override public RowRanges visit(NoOpFilter noOpFilter) { return RowRanges.createSingle(rowCount); } }); }
@Test public void testFilteringOnMissingColumns() { Set<ColumnPath> paths = paths("column1", "column2", "column3", "column4"); // Missing column filter is always true assertAllRows( calculateRowRanges( FilterCompat.get(notEq(intColumn("missing_column"), 0)), STORE, paths, TOTAL_ROW_COUNT), TOTAL_ROW_COUNT); assertRows( calculateRowRanges( FilterCompat.get(and( and(gtEq(intColumn("column1"), 7), lt(intColumn("column1"), 11)), eq(binaryColumn("missing_column"), null))), STORE, paths, TOTAL_ROW_COUNT), 7, 8, 9, 10, 11, 12, 13); // Missing column filter is always false assertRows( calculateRowRanges( FilterCompat.get(or( and(gtEq(intColumn("column1"), 7), lt(intColumn("column1"), 11)), notEq(binaryColumn("missing_column"), null))), STORE, paths, TOTAL_ROW_COUNT), 7, 8, 9, 10, 11, 12, 13); assertRows(calculateRowRanges( FilterCompat.get(gt(intColumn("missing_column"), 0)), STORE, paths, TOTAL_ROW_COUNT)); }
private void processData() { Map<String, InstanceInfo> instanceInfoMap = InstanceCache.INSTANCE_MAP; if (instanceInfoMap.isEmpty()) { LOGGER.log(Level.FINE,"Instance information is empty"); return; } for (Iterator<Map.Entry<String, InstanceInfo>> iterator = instanceInfoMap.entrySet().iterator(); iterator.hasNext(); ) { InstanceInfo info = iterator.next().getValue(); if (System.currentTimeMillis() - info.getLastInvokeTime() >= removalConfig.getExpireTime()) { iterator.remove(); if (info.getRemovalStatus().get()) { removalEventService.reportRemovalEvent(info); } LOGGER.info("Instance information expires, remove instance information"); continue; } saveRequestCountData(info); LOGGER.log(Level.FINE, "The Instance information is {0}", info); } }
@Test public void testProcessData() throws InterruptedException { requestDataCountTask = new RequestDataCountTask(); requestDataCountTask.start(); Thread.sleep(15000); InstanceInfo info = InstanceCache.INSTANCE_MAP.get(KEY); Assert.assertTrue(info != null && info.getCountDataList().size() > 0); Iterator<RequestCountData> requestCountDataIterator = info.getCountDataList().iterator(); while (requestCountDataIterator.hasNext()) { RequestCountData requestCountData = requestCountDataIterator.next(); Assert.assertEquals(REQ_NUM, requestCountData.getRequestNum()); Assert.assertEquals(REQ_FAIL_NUM, requestCountData.getRequestFailNum()); } }
public boolean putKey(final String key, final long phyOffset, final long storeTimestamp) { if (this.indexHeader.getIndexCount() < this.indexNum) { int keyHash = indexKeyHashMethod(key); int slotPos = keyHash % this.hashSlotNum; int absSlotPos = IndexHeader.INDEX_HEADER_SIZE + slotPos * hashSlotSize; try { int slotValue = this.mappedByteBuffer.getInt(absSlotPos); if (slotValue <= invalidIndex || slotValue > this.indexHeader.getIndexCount()) { slotValue = invalidIndex; } long timeDiff = storeTimestamp - this.indexHeader.getBeginTimestamp(); timeDiff = timeDiff / 1000; if (this.indexHeader.getBeginTimestamp() <= 0) { timeDiff = 0; } else if (timeDiff > Integer.MAX_VALUE) { timeDiff = Integer.MAX_VALUE; } else if (timeDiff < 0) { timeDiff = 0; } int absIndexPos = IndexHeader.INDEX_HEADER_SIZE + this.hashSlotNum * hashSlotSize + this.indexHeader.getIndexCount() * indexSize; this.mappedByteBuffer.putInt(absIndexPos, keyHash); this.mappedByteBuffer.putLong(absIndexPos + 4, phyOffset); this.mappedByteBuffer.putInt(absIndexPos + 4 + 8, (int) timeDiff); this.mappedByteBuffer.putInt(absIndexPos + 4 + 8 + 4, slotValue); this.mappedByteBuffer.putInt(absSlotPos, this.indexHeader.getIndexCount()); if (this.indexHeader.getIndexCount() <= 1) { this.indexHeader.setBeginPhyOffset(phyOffset); this.indexHeader.setBeginTimestamp(storeTimestamp); } if (invalidIndex == slotValue) { this.indexHeader.incHashSlotCount(); } this.indexHeader.incIndexCount(); this.indexHeader.setEndPhyOffset(phyOffset); this.indexHeader.setEndTimestamp(storeTimestamp); return true; } catch (Exception e) { log.error("putKey exception, Key: " + key + " KeyHashCode: " + key.hashCode(), e); } } else { log.warn("Over index file capacity: index count = " + this.indexHeader.getIndexCount() + "; index max num = " + this.indexNum); } return false; }
@Test public void testPutKey() throws Exception { IndexFile indexFile = new IndexFile("100", HASH_SLOT_NUM, INDEX_NUM, 0, 0); for (long i = 0; i < (INDEX_NUM - 1); i++) { boolean putResult = indexFile.putKey(Long.toString(i), i, System.currentTimeMillis()); assertThat(putResult).isTrue(); } // put over index file capacity. boolean putResult = indexFile.putKey(Long.toString(400), 400, System.currentTimeMillis()); assertThat(putResult).isFalse(); indexFile.destroy(0); File file = new File("100"); UtilAll.deleteFile(file); }
@CanIgnoreReturnValue public final Ordered containsAtLeastElementsIn(@Nullable Iterable<?> expectedIterable) { List<?> actual = Lists.newLinkedList(checkNotNull(this.actual)); Collection<?> expected = iterableToCollection(expectedIterable); List<@Nullable Object> missing = newArrayList(); List<@Nullable Object> actualNotInOrder = newArrayList(); boolean ordered = true; // step through the expected elements... for (Object e : expected) { int index = actual.indexOf(e); if (index != -1) { // if we find the element in the actual list... // drain all the elements that come before that element into actualNotInOrder moveElements(actual, actualNotInOrder, index); // and remove the element from the actual list actual.remove(0); } else { // otherwise try removing it from actualNotInOrder... if (actualNotInOrder.remove(e)) { // if it was in actualNotInOrder, we're not in order ordered = false; } else { // if it's not in actualNotInOrder, we're missing an expected element missing.add(e); } } } // if we have any missing expected elements, fail if (!missing.isEmpty()) { return failAtLeast(expected, missing); } return ordered ? IN_ORDER : new Ordered() { @Override public void inOrder() { ImmutableList.Builder<Fact> facts = ImmutableList.builder(); facts.add(simpleFact("required elements were all found, but order was wrong")); facts.add(fact("expected order for required elements", expected)); List<Object> actualOrder = Lists.newArrayList(checkNotNull(IterableSubject.this.actual)); if (actualOrder.retainAll(expected)) { facts.add(fact("but order was", actualOrder)); facts.add(fullContents()); failWithoutActual(facts.build()); } else { failWithActual(facts.build()); } } }; }
@Test @SuppressWarnings("ContainsAllElementsInWithVarArgsToContainsAtLeast") public void iterableContainsAtLeastElementsInIterable() { assertThat(asList(1, 2, 3)).containsAtLeastElementsIn(asList(1, 2)); expectFailureWhenTestingThat(asList(1, 2, 3)).containsAtLeastElementsIn(asList(1, 2, 4)); assertFailureKeys("missing (1)", "---", "expected to contain at least", "but was"); assertFailureValue("missing (1)", "4"); assertFailureValue("expected to contain at least", "[1, 2, 4]"); }
public static boolean pathExists(Path path, Configuration conf) { try { FileSystem fileSystem = FileSystem.get(path.toUri(), conf); return fileSystem.exists(path); } catch (Exception e) { LOG.error("Failed to check path {}", path, e); throw new StarRocksConnectorException("Failed to check path: " + path + ". msg: " + e.getMessage()); } }
@Test public void testPathExists() { Path path = new Path("hdfs://127.0.0.1:9000/user/hive/warehouse/db"); ExceptionChecker.expectThrowsWithMsg(StarRocksConnectorException.class, "Failed to check path", () -> HiveWriteUtils.pathExists(path, new Configuration())); new MockUp<FileSystem>() { @Mock public FileSystem get(URI uri, Configuration conf) { return new MockedRemoteFileSystem(HDFS_HIVE_TABLE); } }; Assert.assertFalse(HiveWriteUtils.pathExists(path, new Configuration())); }
public Location setZ(double z) { return new Location(extent, position.withZ(z), yaw, pitch); }
@Test public void testSetZ() throws Exception { World world = mock(World.class); Location location1 = new Location(world, Vector3.ZERO); Location location2 = location1.setZ(TEST_VALUE); assertEquals(0, location1.getZ(), EPSILON); assertEquals(0, location2.getX(), EPSILON); assertEquals(0, location2.getY(), EPSILON); assertEquals(TEST_VALUE, location2.getZ(), EPSILON); }
int retrieveEdits(long requestedStartTxn, int maxTxns, List<ByteBuffer> outputBuffers) throws IOException { int txnCount = 0; try (AutoCloseableLock l = readLock.acquire()) { if (lowestTxnId == INVALID_TXN_ID || requestedStartTxn < lowestTxnId) { throw getCacheMissException(requestedStartTxn); } else if (requestedStartTxn > highestTxnId) { return 0; } outputBuffers.add(layoutHeader); Iterator<Map.Entry<Long, byte[]>> incrBuffIter = dataMap.tailMap(dataMap.floorKey(requestedStartTxn), true) .entrySet().iterator(); long prevTxn = requestedStartTxn; byte[] prevBuf = null; // Stop when maximum transactions reached... while ((txnCount < maxTxns) && // ... or there are no more entries ... (incrBuffIter.hasNext() || prevBuf != null)) { long currTxn; byte[] currBuf; if (incrBuffIter.hasNext()) { Map.Entry<Long, byte[]> ent = incrBuffIter.next(); currTxn = ent.getKey(); currBuf = ent.getValue(); } else { // This accounts for the trailing entry currTxn = highestTxnId + 1; currBuf = null; } if (prevBuf != null) { // True except for the first loop iteration outputBuffers.add(ByteBuffer.wrap(prevBuf)); // if prevTxn < requestedStartTxn, the extra transactions will get // removed after the loop, so don't include them in the txn count txnCount += currTxn - Math.max(requestedStartTxn, prevTxn); } prevTxn = currTxn; prevBuf = currBuf; } // Release the lock before doing operations on the buffers (deserializing // to find transaction boundaries, and copying into an output buffer) } // Remove extra leading transactions in the first buffer ByteBuffer firstBuf = outputBuffers.get(1); // 0th is the header firstBuf.position( findTransactionPosition(firstBuf.array(), requestedStartTxn)); // Remove trailing transactions in the last buffer if necessary if (txnCount > maxTxns) { ByteBuffer lastBuf = outputBuffers.get(outputBuffers.size() - 1); int limit = findTransactionPosition(lastBuf.array(), requestedStartTxn + maxTxns); lastBuf.limit(limit); txnCount = maxTxns; } return txnCount; }
@Test(expected = JournaledEditsCache.CacheMissException.class) public void testReadUninitializedCache() throws Exception { cache.retrieveEdits(1, 10, new ArrayList<>()); }
private RemotingCommand deleteAcl(ChannelHandlerContext ctx, RemotingCommand request) throws RemotingCommandException { final RemotingCommand response = RemotingCommand.createResponseCommand(null); DeleteAclRequestHeader requestHeader = request.decodeCommandCustomHeader(DeleteAclRequestHeader.class); Subject subject = Subject.of(requestHeader.getSubject()); PolicyType policyType = PolicyType.getByName(requestHeader.getPolicyType()); Resource resource = Resource.of(requestHeader.getResource()); this.brokerController.getAuthorizationMetadataManager().deleteAcl(subject, policyType, resource) .thenAccept(nil -> { response.setCode(ResponseCode.SUCCESS); }) .exceptionally(ex -> { LOGGER.error("delete acl for {} error", requestHeader.getSubject(), ex); return handleAuthException(response, ex); }) .join(); return response; }
@Test public void testDeleteAcl() throws RemotingCommandException { when(authorizationMetadataManager.deleteAcl(any(), any(), any())) .thenReturn(CompletableFuture.completedFuture(null)); DeleteAclRequestHeader deleteAclRequestHeader = new DeleteAclRequestHeader(); deleteAclRequestHeader.setSubject("User:abc"); RemotingCommand request = RemotingCommand.createRequestCommand(RequestCode.AUTH_DELETE_ACL, deleteAclRequestHeader); request.setVersion(441); request.addExtField("AccessKey", "rocketmq"); request.makeCustomHeaderToNet(); RemotingCommand response = adminBrokerProcessor.processRequest(handlerContext, request); assertThat(response.getCode()).isEqualTo(ResponseCode.SUCCESS); }
@Udf public String elt( @UdfParameter(description = "the nth element to extract") final int n, @UdfParameter(description = "the strings of which to extract the nth") final String... args ) { if (args == null) { return null; } if (n < 1 || n > args.length) { return null; } return args[n - 1]; }
@Test public void shouldHandleNulls() { // When: final String el = elt.elt(2, "a", null); // Then: assertThat(el, is(nullValue())); }
public static DeploymentDescriptor merge(List<DeploymentDescriptor> descriptorHierarchy, MergeMode mode) { if (descriptorHierarchy == null || descriptorHierarchy.isEmpty()) { throw new IllegalArgumentException("Descriptor hierarchy list cannot be empty"); } if (descriptorHierarchy.size() == 1) { return descriptorHierarchy.get(0); } Deque<DeploymentDescriptor> stack = new ArrayDeque<>(); descriptorHierarchy.forEach(stack::push); while (stack.size() > 1) { stack.push(merge(stack.pop(), stack.pop(), mode)); } // last element from the stack is the one that contains all merged descriptors return stack.pop(); }
@Test public void testDeploymentDesciptorMergeKeepAll() { DeploymentDescriptor primary = new DeploymentDescriptorImpl("org.jbpm.domain"); primary.getBuilder() .addMarshalingStrategy(new ObjectModel("org.jbpm.test.CustomStrategy", new Object[]{"param2"})) .setLimitSerializationClasses(true); assertThat(primary).isNotNull(); assertThat(primary.getPersistenceUnit()).isEqualTo("org.jbpm.domain"); assertThat(primary.getAuditPersistenceUnit()).isEqualTo("org.jbpm.domain"); assertThat(primary.getAuditMode()).isEqualTo(AuditMode.JPA); assertThat(primary.getPersistenceMode()).isEqualTo(PersistenceMode.JPA); assertThat(primary.getRuntimeStrategy()).isEqualTo(RuntimeStrategy.SINGLETON); assertThat(primary.getMarshallingStrategies().size()).isEqualTo(1); assertThat(primary.getConfiguration().size()).isEqualTo(0); assertThat(primary.getEnvironmentEntries().size()).isEqualTo(0); assertThat(primary.getEventListeners().size()).isEqualTo(0); assertThat(primary.getGlobals().size()).isEqualTo(0); assertThat(primary.getTaskEventListeners().size()).isEqualTo(0); assertThat(primary.getWorkItemHandlers().size()).isEqualTo(0); assertThat(primary.getLimitSerializationClasses()).isTrue(); DeploymentDescriptor secondary = new DeploymentDescriptorImpl("org.jbpm.domain"); secondary.getBuilder() .auditMode(AuditMode.JMS) .persistenceMode(PersistenceMode.JPA) .persistenceUnit("my.custom.unit") .auditPersistenceUnit("my.custom.unit2") .setLimitSerializationClasses(false); assertThat(secondary).isNotNull(); assertThat(secondary.getPersistenceUnit()).isEqualTo("my.custom.unit"); assertThat(secondary.getAuditPersistenceUnit()).isEqualTo("my.custom.unit2"); assertThat(secondary.getAuditMode()).isEqualTo(AuditMode.JMS); assertThat(secondary.getPersistenceMode()).isEqualTo(PersistenceMode.JPA); assertThat(secondary.getRuntimeStrategy()).isEqualTo(RuntimeStrategy.SINGLETON); assertThat(secondary.getMarshallingStrategies().size()).isEqualTo(0); assertThat(secondary.getConfiguration().size()).isEqualTo(0); assertThat(secondary.getEnvironmentEntries().size()).isEqualTo(0); assertThat(secondary.getEventListeners().size()).isEqualTo(0); assertThat(secondary.getGlobals().size()).isEqualTo(0); assertThat(secondary.getTaskEventListeners().size()).isEqualTo(0); assertThat(secondary.getWorkItemHandlers().size()).isEqualTo(0); assertThat(secondary.getLimitSerializationClasses()).isFalse(); // and now let's merge them DeploymentDescriptor outcome = DeploymentDescriptorMerger.merge(primary, secondary, MergeMode.KEEP_ALL); assertThat(outcome).isNotNull(); assertThat(outcome.getPersistenceUnit()).isEqualTo("org.jbpm.domain"); assertThat(outcome.getAuditPersistenceUnit()).isEqualTo("org.jbpm.domain"); assertThat(outcome.getAuditMode()).isEqualTo(AuditMode.JPA); assertThat(outcome.getPersistenceMode()).isEqualTo(PersistenceMode.JPA); assertThat(outcome.getRuntimeStrategy()).isEqualTo(RuntimeStrategy.SINGLETON); assertThat(outcome.getMarshallingStrategies().size()).isEqualTo(1); assertThat(outcome.getConfiguration().size()).isEqualTo(0); assertThat(outcome.getEnvironmentEntries().size()).isEqualTo(0); assertThat(outcome.getEventListeners().size()).isEqualTo(0); assertThat(outcome.getGlobals().size()).isEqualTo(0); assertThat(outcome.getTaskEventListeners().size()).isEqualTo(0); assertThat(outcome.getWorkItemHandlers().size()).isEqualTo(0); assertThat(outcome.getLimitSerializationClasses()).isTrue(); }
public String getLabel() { if (period == null) { final DateFormat dateFormat = I18N.createDateFormat(); return dateFormat.format(startDate) + " - " + dateFormat.format(endDate); } return period.getLabel(); }
@Test public void testGetLabel() { assertNotNull("getLabel", periodRange.getLabel()); assertNotNull("getLabel", customRange.getLabel()); }
@Override public V poll() { return get(pollAsync()); }
@Test public void testPoll() throws InterruptedException { RBlockingQueue<String> queue = redisson.getBlockingQueue("test"); RDelayedQueue<String> dealyedQueue = redisson.getDelayedQueue(queue); dealyedQueue.offer("1", 1, TimeUnit.SECONDS); dealyedQueue.offer("2", 2, TimeUnit.SECONDS); dealyedQueue.offer("3", 3, TimeUnit.SECONDS); dealyedQueue.offer("4", 4, TimeUnit.SECONDS); assertThat(dealyedQueue.poll()).isEqualTo("1"); assertThat(dealyedQueue.poll()).isEqualTo("2"); assertThat(dealyedQueue.poll()).isEqualTo("3"); assertThat(dealyedQueue.poll()).isEqualTo("4"); Thread.sleep(3000); assertThat(queue.isEmpty()).isTrue(); assertThat(queue.poll()).isNull(); assertThat(queue.poll()).isNull(); dealyedQueue.destroy(); }
public Date getStartDate() { return startDate; }
@Test public void testGetStartDate() { assertNull("getStartDate", periodRange.getStartDate()); assertNotNull("getStartDate", customRange.getStartDate()); }
@Override public Set<Path> getPaths(ElementId src, ElementId dst, LinkWeigher weigher) { checkNotNull(src, ELEMENT_ID_NULL); checkNotNull(dst, ELEMENT_ID_NULL); LinkWeigher internalWeigher = weigher != null ? weigher : DEFAULT_WEIGHER; // Get the source and destination edge locations EdgeLink srcEdge = getEdgeLink(src, true); EdgeLink dstEdge = getEdgeLink(dst, false); // If either edge is null, bail with no paths. if (srcEdge == null || dstEdge == null) { return ImmutableSet.of(); } DeviceId srcDevice = srcEdge != NOT_HOST ? srcEdge.dst().deviceId() : (DeviceId) src; DeviceId dstDevice = dstEdge != NOT_HOST ? dstEdge.src().deviceId() : (DeviceId) dst; // If the source and destination are on the same edge device, there // is just one path, so build it and return it. if (srcDevice.equals(dstDevice)) { return edgeToEdgePaths(srcEdge, dstEdge, internalWeigher); } // Otherwise get all paths between the source and destination edge // devices. Topology topology = topologyService.currentTopology(); Set<Path> paths = topologyService.getPaths(topology, srcDevice, dstDevice, internalWeigher); return edgeToEdgePaths(srcEdge, dstEdge, paths, internalWeigher); }
@Test public void testDevicePaths() { topoMgr.definePaths(ImmutableSet.of(path1)); Set<Path> pathsAC = service.getPaths(did("A"), did("C"), new TestWeigher()); checkPaths(pathsAC); }
@Override public boolean equals(Object object) { if (object != null && getClass() == object.getClass()) { if (!super.equals(object)) { return false; } DefaultPortDescription that = (DefaultPortDescription) object; return Objects.equal(this.number, that.number) && Objects.equal(this.isEnabled, that.isEnabled) && Objects.equal(this.type, that.type) && Objects.equal(this.portSpeed, that.portSpeed); } return false; }
@Test public void testEquals() { new EqualsTester() .addEqualityGroup(portDescription1, sameAsPortDescription1) .addEqualityGroup(portDescription2) .addEqualityGroup(portDescription3) .testEquals(); }
@Override public void restRequest(RestRequest request, Callback<RestResponse> callback, String routeKey) { this.restRequest(request, new RequestContext(), callback, routeKey); }
@Test public void testBadRequest() { RouteLookup routeLookup = new SimpleTestRouteLookup(); final D2Client d2Client = new D2ClientBuilder().setZkHosts("localhost:2121").build(); d2Client.start(new FutureCallback<>()); RouteLookupClient routeLookupClient = new RouteLookupClient(d2Client, routeLookup, "WestCoast"); RestRequest dummyRestRequest = new RestRequestBuilder(URI.create("http://simple_uri")).build(); try { Future<RestResponse> future = routeLookupClient.restRequest(dummyRestRequest, "5436"); future.get(); Assert.fail("Unexpected success, request should have thrown an Exception"); } catch (Exception e) { Assert.assertTrue(e instanceof IllegalArgumentException); String message = e.getMessage(); if (!message.contains("Unsupported scheme in URI: http://simple_uri")) { Assert.fail("request was sent using http instead of d2, but we didn't get Unsupported scheme"); } } }
public InetSocketAddress getManagedPort( final UdpChannel udpChannel, final InetSocketAddress bindAddress) throws BindException { InetSocketAddress address = bindAddress; if (bindAddress.getPort() != 0) { portSet.add(bindAddress.getPort()); } else if (!isOsWildcard) { // do not map if not a subscription and does not have a control address. We want to use an ephemeral port // for the control channel on publications. if (!isSender || udpChannel.hasExplicitControl()) { address = new InetSocketAddress(bindAddress.getAddress(), allocateOpenPort()); } } return address; }
@Test void shouldPassThroughWithExplicitBindAddressOutSideRange() throws BindException { final InetSocketAddress bindAddress = new InetSocketAddress("localhost", 1000); final WildcardPortManager manager = new WildcardPortManager(portRange, true); assertThat(manager.getManagedPort( udpChannelPort0, bindAddress), is(new InetSocketAddress("localhost", 1000))); }
public static AttributesBuilder newAttributesBuilder() { AttributesBuilder attributesBuilder; if (attributesBuilderSupplier == null) { attributesBuilderSupplier = Attributes::builder; } attributesBuilder = attributesBuilderSupplier.get(); LABEL_MAP.forEach(attributesBuilder::put); return attributesBuilder; }
@Test public void testNewAttributesBuilder() { Attributes attributes = BrokerMetricsManager.newAttributesBuilder().put("a", "b") .build(); assertThat(attributes.get(AttributeKey.stringKey("a"))).isEqualTo("b"); }
@Override public InputStream getInputStream(final int columnIndex, final String type) throws SQLException { return queryResult.getInputStream(columnIndex, type); }
@Test void assertGetInputStream() throws SQLException { QueryResult queryResult = mock(QueryResult.class); InputStream value = mock(InputStream.class); when(queryResult.getInputStream(1, "Ascii")).thenReturn(value); TransparentMergedResult actual = new TransparentMergedResult(queryResult); assertThat(actual.getInputStream(1, "Ascii"), is(value)); }
public void executeTransformation( final TransMeta transMeta, final boolean local, final boolean remote, final boolean cluster, final boolean preview, final boolean debug, final Date replayDate, final boolean safe, LogLevel logLevel ) throws KettleException { if ( transMeta == null ) { return; } // See if we need to ask for debugging information... // TransDebugMeta transDebugMeta = null; TransExecutionConfiguration executionConfiguration = null; if ( preview ) { executionConfiguration = spoon.getTransPreviewExecutionConfiguration(); } else if ( debug ) { executionConfiguration = spoon.getTransDebugExecutionConfiguration(); } else { executionConfiguration = spoon.getTransExecutionConfiguration(); } // Set defaults so the run configuration can set it up correctly executionConfiguration.setExecutingLocally( true ); executionConfiguration.setExecutingRemotely( false ); executionConfiguration.setExecutingClustered( false ); // Set repository and safe mode information in both the exec config and the metadata transMeta.setRepository( spoon.rep ); transMeta.setMetaStore( spoon.getMetaStore() ); executionConfiguration.setRepository( spoon.rep ); executionConfiguration.setSafeModeEnabled( safe ); if ( debug ) { // See if we have debugging information stored somewhere? // transDebugMeta = transDebugMetaMap.get( transMeta ); if ( transDebugMeta == null ) { transDebugMeta = new TransDebugMeta( transMeta ); transDebugMetaMap.put( transMeta, transDebugMeta ); } // Set the default number of rows to retrieve on all selected steps... // List<StepMeta> selectedSteps = transMeta.getSelectedSteps(); if ( selectedSteps != null && selectedSteps.size() > 0 ) { transDebugMeta.getStepDebugMetaMap().clear(); for ( StepMeta stepMeta : transMeta.getSelectedSteps() ) { StepDebugMeta stepDebugMeta = new StepDebugMeta( stepMeta ); stepDebugMeta.setRowCount( PropsUI.getInstance().getDefaultPreviewSize() ); stepDebugMeta.setPausingOnBreakPoint( true ); stepDebugMeta.setReadingFirstRows( false ); transDebugMeta.getStepDebugMetaMap().put( stepMeta, stepDebugMeta ); } } } else if ( preview ) { // See if we have preview information stored somewhere? // transDebugMeta = transPreviewMetaMap.get( transMeta ); if ( transDebugMeta == null ) { transDebugMeta = new TransDebugMeta( transMeta ); transPreviewMetaMap.put( transMeta, transDebugMeta ); } // Set the default number of preview rows on all selected steps... // List<StepMeta> selectedSteps = transMeta.getSelectedSteps(); if ( selectedSteps != null && selectedSteps.size() > 0 ) { transDebugMeta.getStepDebugMetaMap().clear(); for ( StepMeta stepMeta : transMeta.getSelectedSteps() ) { StepDebugMeta stepDebugMeta = new StepDebugMeta( stepMeta ); stepDebugMeta.setRowCount( PropsUI.getInstance().getDefaultPreviewSize() ); stepDebugMeta.setPausingOnBreakPoint( false ); stepDebugMeta.setReadingFirstRows( true ); transDebugMeta.getStepDebugMetaMap().put( stepMeta, stepDebugMeta ); } } } int debugAnswer = TransDebugDialog.DEBUG_CONFIG; if ( debug || preview ) { transDebugMeta.getTransMeta().setRepository( spoon.rep ); // pass repository for mappings TransDebugDialog transDebugDialog = new TransDebugDialog( spoon.getShell(), transDebugMeta ); debugAnswer = transDebugDialog.open(); if ( debugAnswer != TransDebugDialog.DEBUG_CANCEL ) { executionConfiguration.setExecutingLocally( true ); executionConfiguration.setExecutingRemotely( false ); executionConfiguration.setExecutingClustered( false ); } else { // If we cancel the debug dialog, we don't go further with the execution either. // return; } } Object[] data = spoon.variables.getData(); String[] fields = spoon.variables.getRowMeta().getFieldNames(); Map<String, String> variableMap = new HashMap<String, String>(); variableMap.putAll( executionConfiguration.getVariables() ); // the default for ( int idx = 0; idx < fields.length; idx++ ) { String value = executionConfiguration.getVariables().get( fields[idx] ); if ( Utils.isEmpty( value ) ) { value = data[idx].toString(); } variableMap.put( fields[idx], value ); } executionConfiguration.setVariables( variableMap ); executionConfiguration.getUsedVariables( transMeta ); executionConfiguration.getUsedArguments( transMeta, spoon.getArguments() ); executionConfiguration.setReplayDate( replayDate ); executionConfiguration.setLogLevel( logLevel ); boolean execConfigAnswer = true; if ( debugAnswer == TransDebugDialog.DEBUG_CONFIG && replayDate == null && transMeta.isShowDialog() ) { TransExecutionConfigurationDialog dialog = new TransExecutionConfigurationDialog( spoon.getShell(), executionConfiguration, transMeta ); execConfigAnswer = dialog.open(); } if ( execConfigAnswer ) { TransGraph activeTransGraph = spoon.getActiveTransGraph(); activeTransGraph.transLogDelegate.addTransLog(); // Set the named parameters Map<String, String> paramMap = executionConfiguration.getParams(); for ( String key : paramMap.keySet() ) { transMeta.setParameterValue( key, Const.NVL( paramMap.get( key ), "" ) ); } transMeta.activateParameters(); // Set the log level // if ( executionConfiguration.getLogLevel() != null ) { transMeta.setLogLevel( executionConfiguration.getLogLevel() ); } // Set the run options transMeta.setClearingLog( executionConfiguration.isClearingLog() ); transMeta.setSafeModeEnabled( executionConfiguration.isSafeModeEnabled() ); transMeta.setGatheringMetrics( executionConfiguration.isGatheringMetrics() ); ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.SpoonTransMetaExecutionStart.id, transMeta ); ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.SpoonTransExecutionConfiguration.id, executionConfiguration ); try { ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.SpoonTransBeforeStart.id, new Object[] { executionConfiguration, transMeta, transMeta, spoon.getRepository() } ); } catch ( KettleException e ) { log.logError( e.getMessage(), transMeta.getFilename() ); return; } if ( !executionConfiguration.isExecutingLocally() && !executionConfiguration.isExecutingRemotely() ) { if ( transMeta.hasChanged() ) { activeTransGraph.showSaveFileMessage(); } } // Verify if there is at least one step specified to debug or preview... // if ( debug || preview ) { if ( transDebugMeta.getNrOfUsedSteps() == 0 ) { MessageBox box = new MessageBox( spoon.getShell(), SWT.ICON_WARNING | SWT.YES | SWT.NO ); box.setText( BaseMessages.getString( PKG, "Spoon.Dialog.Warning.NoPreviewOrDebugSteps.Title" ) ); box.setMessage( BaseMessages.getString( PKG, "Spoon.Dialog.Warning.NoPreviewOrDebugSteps.Message" ) ); int answer = box.open(); if ( answer != SWT.YES ) { return; } } } // Is this a local execution? // if ( executionConfiguration.isExecutingLocally() ) { if ( debug || preview ) { activeTransGraph.debug( executionConfiguration, transDebugMeta ); } else { activeTransGraph.start( executionConfiguration ); } // Are we executing remotely? // } else if ( executionConfiguration.isExecutingRemotely() ) { activeTransGraph.handleTransMetaChanges( transMeta ); if ( transMeta.hasChanged() ) { showSaveTransformationBeforeRunningDialog( spoon.getShell() ); } else if ( executionConfiguration.getRemoteServer() != null ) { String carteObjectId = Trans.sendToSlaveServer( transMeta, executionConfiguration, spoon.rep, spoon.getMetaStore() ); monitorRemoteTrans( transMeta, carteObjectId, executionConfiguration.getRemoteServer() ); spoon.delegates.slaves.addSpoonSlave( executionConfiguration.getRemoteServer() ); } else { MessageBox mb = new MessageBox( spoon.getShell(), SWT.OK | SWT.ICON_INFORMATION ); mb.setMessage( BaseMessages.getString( PKG, "Spoon.Dialog.NoRemoteServerSpecified.Message" ) ); mb.setText( BaseMessages.getString( PKG, "Spoon.Dialog.NoRemoteServerSpecified.Title" ) ); mb.open(); } // Are we executing clustered? // } else if ( executionConfiguration.isExecutingClustered() ) { activeTransGraph.handleTransMetaChanges( transMeta ); if ( transMeta.hasChanged() ) { showSaveTransformationBeforeRunningDialog( spoon.getShell() ); } else { splitTrans( transMeta, executionConfiguration ); } } } }
@Test @SuppressWarnings( "ResultOfMethodCallIgnored" ) public void testSetParamsIntoMetaInExecuteTransformation() throws KettleException { doCallRealMethod().when( delegate ).executeTransformation( transMeta, true, false, false, false, false, null, false, LogLevel.BASIC ); RowMetaInterface rowMeta = mock( RowMetaInterface.class ); TransExecutionConfiguration transExecutionConfiguration = mock( TransExecutionConfiguration.class ); TransGraph activeTransGraph = mock( TransGraph.class ); activeTransGraph.transLogDelegate = mock( TransLogDelegate.class ); doReturn( rowMeta ).when( spoon.variables ).getRowMeta(); doReturn( EMPTY_STRING_ARRAY ).when( rowMeta ).getFieldNames(); doReturn( transExecutionConfiguration ).when( spoon ).getTransExecutionConfiguration(); doReturn( MAP_WITH_TEST_PARAM ).when( transExecutionConfiguration ).getParams(); doReturn( activeTransGraph ).when( spoon ).getActiveTransGraph(); doReturn( TEST_LOG_LEVEL ).when( transExecutionConfiguration ).getLogLevel(); doReturn( TEST_BOOLEAN_PARAM ).when( transExecutionConfiguration ).isClearingLog(); doReturn( TEST_BOOLEAN_PARAM ).when( transExecutionConfiguration ).isSafeModeEnabled(); doReturn( TEST_BOOLEAN_PARAM ).when( transExecutionConfiguration ).isGatheringMetrics(); delegate.executeTransformation( transMeta, true, false, false, false, false, null, false, LogLevel.BASIC ); verify( transMeta ).setParameterValue( TEST_PARAM_KEY, TEST_PARAM_VALUE ); verify( transMeta ).activateParameters(); verify( transMeta ).setLogLevel( TEST_LOG_LEVEL ); verify( transMeta ).setClearingLog( TEST_BOOLEAN_PARAM ); verify( transMeta ).setSafeModeEnabled( TEST_BOOLEAN_PARAM ); verify( transMeta ).setGatheringMetrics( TEST_BOOLEAN_PARAM ); }
@Override public WorkProcessor<Page> buildResult() { if (groupByHash.getGroupCount() == 0) { return WorkProcessor.fromIterator(emptyIterator()); } return WorkProcessor.fromIterator(new ResultIterator(IntStream.range(0, groupByHash.getGroupCount()).iterator(), false)); }
@Test public void testEmptyInput() { InMemoryGroupedTopNBuilder groupedTopNBuilder = new InMemoryGroupedTopNBuilder( ImmutableList.of(BIGINT), (left, leftPosition, right, rightPosition) -> { throw new UnsupportedOperationException(); }, 5, false, new TestingMemoryContext(100L), new NoChannelGroupByHash()); assertFalse(groupedTopNBuilder.buildResult().iterator().hasNext()); }
Set<String> getConvertedUserIds() { return Collections.unmodifiableSet(idToDirectoryNameMap.keySet()); }
@Test public void testInitialUserIds() throws IOException { UserIdMapper mapper = createUserIdMapper(IdStrategy.CASE_INSENSITIVE); assertThat(mapper.getConvertedUserIds(), empty()); }
@Override public CompletableFuture<Void> offload(ReadHandle readHandle, UUID uuid, Map<String, String> extraMetadata) { final String managedLedgerName = extraMetadata.get(MANAGED_LEDGER_NAME); final String topicName = TopicName.fromPersistenceNamingEncoding(managedLedgerName); CompletableFuture<Void> promise = new CompletableFuture<>(); scheduler.chooseThread(readHandle.getId()).execute(() -> { final BlobStore writeBlobStore = getBlobStore(config.getBlobStoreLocation()); log.info("offload {} uuid {} extraMetadata {} to {} {}", readHandle.getId(), uuid, extraMetadata, config.getBlobStoreLocation(), writeBlobStore); if (readHandle.getLength() == 0 || !readHandle.isClosed() || readHandle.getLastAddConfirmed() < 0) { promise.completeExceptionally( new IllegalArgumentException("An empty or open ledger should never be offloaded")); return; } OffloadIndexBlockBuilder indexBuilder = OffloadIndexBlockBuilder.create() .withLedgerMetadata(readHandle.getLedgerMetadata()) .withDataBlockHeaderLength(BlockAwareSegmentInputStreamImpl.getHeaderSize()); String dataBlockKey = DataBlockUtils.dataBlockOffloadKey(readHandle.getId(), uuid); String indexBlockKey = DataBlockUtils.indexBlockOffloadKey(readHandle.getId(), uuid); log.info("ledger {} dataBlockKey {} indexBlockKey {}", readHandle.getId(), dataBlockKey, indexBlockKey); MultipartUpload mpu = null; List<MultipartPart> parts = Lists.newArrayList(); // init multi part upload for data block. try { BlobBuilder blobBuilder = writeBlobStore.blobBuilder(dataBlockKey); Map<String, String> objectMetadata = new HashMap<>(userMetadata); objectMetadata.put("role", "data"); if (extraMetadata != null) { objectMetadata.putAll(extraMetadata); } DataBlockUtils.addVersionInfo(blobBuilder, objectMetadata); Blob blob = blobBuilder.build(); log.info("initiateMultipartUpload bucket {}, metadata {} ", config.getBucket(), blob.getMetadata()); mpu = writeBlobStore.initiateMultipartUpload(config.getBucket(), blob.getMetadata(), new PutOptions()); } catch (Throwable t) { promise.completeExceptionally(t); return; } long dataObjectLength = 0; // start multi part upload for data block. try { long startEntry = 0; int partId = 1; long start = System.nanoTime(); long entryBytesWritten = 0; while (startEntry <= readHandle.getLastAddConfirmed()) { int blockSize = BlockAwareSegmentInputStreamImpl .calculateBlockSize(config.getMaxBlockSizeInBytes(), readHandle, startEntry, entryBytesWritten); try (BlockAwareSegmentInputStream blockStream = new BlockAwareSegmentInputStreamImpl( readHandle, startEntry, blockSize, this.offloaderStats, managedLedgerName)) { Payload partPayload = Payloads.newInputStreamPayload(blockStream); partPayload.getContentMetadata().setContentLength((long) blockSize); partPayload.getContentMetadata().setContentType("application/octet-stream"); parts.add(writeBlobStore.uploadMultipartPart(mpu, partId, partPayload)); log.debug("UploadMultipartPart. container: {}, blobName: {}, partId: {}, mpu: {}", config.getBucket(), dataBlockKey, partId, mpu.id()); indexBuilder.addBlock(startEntry, partId, blockSize); if (blockStream.getEndEntryId() != -1) { startEntry = blockStream.getEndEntryId() + 1; } else { // could not read entry from ledger. break; } entryBytesWritten += blockStream.getBlockEntryBytesCount(); partId++; this.offloaderStats.recordOffloadBytes(topicName, blockStream.getBlockEntryBytesCount()); } dataObjectLength += blockSize; } String etag = writeBlobStore.completeMultipartUpload(mpu, parts); log.info("Ledger {}, upload finished, etag {}", readHandle.getId(), etag); mpu = null; } catch (Throwable t) { try { if (mpu != null) { writeBlobStore.abortMultipartUpload(mpu); } } catch (Throwable throwable) { log.error("Failed abortMultipartUpload in bucket - {} with key - {}, uploadId - {}.", config.getBucket(), dataBlockKey, mpu.id(), throwable); } this.offloaderStats.recordWriteToStorageError(topicName); this.offloaderStats.recordOffloadError(topicName); promise.completeExceptionally(t); return; } // upload index block try (OffloadIndexBlock index = indexBuilder.withDataObjectLength(dataObjectLength).build(); IndexInputStream indexStream = index.toStream()) { // write the index block BlobBuilder blobBuilder = writeBlobStore.blobBuilder(indexBlockKey); Map<String, String> objectMetadata = new HashMap<>(userMetadata); objectMetadata.put("role", "index"); if (extraMetadata != null) { objectMetadata.putAll(extraMetadata); } DataBlockUtils.addVersionInfo(blobBuilder, objectMetadata); Payload indexPayload = Payloads.newInputStreamPayload(indexStream); indexPayload.getContentMetadata().setContentLength((long) indexStream.getStreamSize()); indexPayload.getContentMetadata().setContentType("application/octet-stream"); Blob blob = blobBuilder .payload(indexPayload) .contentLength((long) indexStream.getStreamSize()) .build(); writeBlobStore.putBlob(config.getBucket(), blob); promise.complete(null); } catch (Throwable t) { try { writeBlobStore.removeBlob(config.getBucket(), dataBlockKey); } catch (Throwable throwable) { log.error("Failed deleteObject in bucket - {} with key - {}.", config.getBucket(), dataBlockKey, throwable); } this.offloaderStats.recordWriteToStorageError(topicName); this.offloaderStats.recordOffloadError(topicName); promise.completeExceptionally(t); return; } }); return promise; }
@Test public void testOffloadEmpty() throws Exception { CompletableFuture<LedgerEntries> noEntries = new CompletableFuture<>(); noEntries.completeExceptionally(new BKException.BKReadException()); ReadHandle readHandle = Mockito.mock(ReadHandle.class); Mockito.doReturn(-1L).when(readHandle).getLastAddConfirmed(); Mockito.doReturn(noEntries).when(readHandle).readAsync(anyLong(), anyLong()); Mockito.doReturn(0L).when(readHandle).getLength(); Mockito.doReturn(true).when(readHandle).isClosed(); Mockito.doReturn(1234L).when(readHandle).getId(); UUID uuid = UUID.randomUUID(); LedgerOffloader offloader = getOffloader(); try { offloader.offload(readHandle, uuid, new HashMap<>()).get(); Assert.fail("Shouldn't have been able to offload"); } catch (ExecutionException e) { assertEquals(e.getCause().getClass(), IllegalArgumentException.class); } }
private CoordinatorResult<ConsumerGroupHeartbeatResponseData, CoordinatorRecord> consumerGroupHeartbeat( String groupId, String memberId, int memberEpoch, String instanceId, String rackId, int rebalanceTimeoutMs, String clientId, String clientHost, List<String> subscribedTopicNames, String assignorName, List<ConsumerGroupHeartbeatRequestData.TopicPartitions> ownedTopicPartitions ) throws ApiException { final long currentTimeMs = time.milliseconds(); final List<CoordinatorRecord> records = new ArrayList<>(); // Get or create the consumer group. boolean createIfNotExists = memberEpoch == 0; final ConsumerGroup group = getOrMaybeCreateConsumerGroup(groupId, createIfNotExists, records); throwIfConsumerGroupIsFull(group, memberId); // Get or create the member. if (memberId.isEmpty()) memberId = Uuid.randomUuid().toString(); final ConsumerGroupMember member; if (instanceId == null) { member = getOrMaybeSubscribeDynamicConsumerGroupMember( group, memberId, memberEpoch, ownedTopicPartitions, createIfNotExists, false ); } else { member = getOrMaybeSubscribeStaticConsumerGroupMember( group, memberId, memberEpoch, instanceId, ownedTopicPartitions, createIfNotExists, false, records ); } // 1. Create or update the member. If the member is new or has changed, a ConsumerGroupMemberMetadataValue // record is written to the __consumer_offsets partition to persist the change. If the subscriptions have // changed, the subscription metadata is updated and persisted by writing a ConsumerGroupPartitionMetadataValue // record to the __consumer_offsets partition. Finally, the group epoch is bumped if the subscriptions have // changed, and persisted by writing a ConsumerGroupMetadataValue record to the partition. ConsumerGroupMember updatedMember = new ConsumerGroupMember.Builder(member) .maybeUpdateInstanceId(Optional.ofNullable(instanceId)) .maybeUpdateRackId(Optional.ofNullable(rackId)) .maybeUpdateRebalanceTimeoutMs(ofSentinel(rebalanceTimeoutMs)) .maybeUpdateServerAssignorName(Optional.ofNullable(assignorName)) .maybeUpdateSubscribedTopicNames(Optional.ofNullable(subscribedTopicNames)) .setClientId(clientId) .setClientHost(clientHost) .setClassicMemberMetadata(null) .build(); boolean bumpGroupEpoch = hasMemberSubscriptionChanged( groupId, member, updatedMember, records ); int groupEpoch = group.groupEpoch(); Map<String, TopicMetadata> subscriptionMetadata = group.subscriptionMetadata(); Map<String, Integer> subscribedTopicNamesMap = group.subscribedTopicNames(); SubscriptionType subscriptionType = group.subscriptionType(); if (bumpGroupEpoch || group.hasMetadataExpired(currentTimeMs)) { // The subscription metadata is updated in two cases: // 1) The member has updated its subscriptions; // 2) The refresh deadline has been reached. subscribedTopicNamesMap = group.computeSubscribedTopicNames(member, updatedMember); subscriptionMetadata = group.computeSubscriptionMetadata( subscribedTopicNamesMap, metadataImage.topics(), metadataImage.cluster() ); int numMembers = group.numMembers(); if (!group.hasMember(updatedMember.memberId()) && !group.hasStaticMember(updatedMember.instanceId())) { numMembers++; } subscriptionType = ModernGroup.subscriptionType( subscribedTopicNamesMap, numMembers ); if (!subscriptionMetadata.equals(group.subscriptionMetadata())) { log.info("[GroupId {}] Computed new subscription metadata: {}.", groupId, subscriptionMetadata); bumpGroupEpoch = true; records.add(newConsumerGroupSubscriptionMetadataRecord(groupId, subscriptionMetadata)); } if (bumpGroupEpoch) { groupEpoch += 1; records.add(newConsumerGroupEpochRecord(groupId, groupEpoch)); log.info("[GroupId {}] Bumped group epoch to {}.", groupId, groupEpoch); metrics.record(CONSUMER_GROUP_REBALANCES_SENSOR_NAME); } group.setMetadataRefreshDeadline(currentTimeMs + consumerGroupMetadataRefreshIntervalMs, groupEpoch); } // 2. Update the target assignment if the group epoch is larger than the target assignment epoch. The delta between // the existing and the new target assignment is persisted to the partition. final int targetAssignmentEpoch; final Assignment targetAssignment; if (groupEpoch > group.assignmentEpoch()) { targetAssignment = updateTargetAssignment( group, groupEpoch, member, updatedMember, subscriptionMetadata, subscriptionType, records ); targetAssignmentEpoch = groupEpoch; } else { targetAssignmentEpoch = group.assignmentEpoch(); targetAssignment = group.targetAssignment(updatedMember.memberId(), updatedMember.instanceId()); } // 3. Reconcile the member's assignment with the target assignment if the member is not // fully reconciled yet. updatedMember = maybeReconcile( groupId, updatedMember, group::currentPartitionEpoch, targetAssignmentEpoch, targetAssignment, ownedTopicPartitions, records ); scheduleConsumerGroupSessionTimeout(groupId, memberId); // Prepare the response. ConsumerGroupHeartbeatResponseData response = new ConsumerGroupHeartbeatResponseData() .setMemberId(updatedMember.memberId()) .setMemberEpoch(updatedMember.memberEpoch()) .setHeartbeatIntervalMs(consumerGroupHeartbeatIntervalMs(groupId)); // The assignment is only provided in the following cases: // 1. The member sent a full request. It does so when joining or rejoining the group with zero // as the member epoch; or on any errors (e.g. timeout). We use all the non-optional fields // (rebalanceTimeoutMs, subscribedTopicNames and ownedTopicPartitions) to detect a full request // as those must be set in a full request. // 2. The member's assignment has been updated. boolean isFullRequest = memberEpoch == 0 || (rebalanceTimeoutMs != -1 && subscribedTopicNames != null && ownedTopicPartitions != null); if (isFullRequest || hasAssignedPartitionsChanged(member, updatedMember)) { response.setAssignment(createConsumerGroupResponseAssignment(updatedMember)); } return new CoordinatorResult<>(records, response); }
@Test public void testConsumerGroupRebalanceSensor() { String groupId = "fooup"; // Use a static member id as it makes the test easier. String memberId = Uuid.randomUuid().toString(); Uuid fooTopicId = Uuid.randomUuid(); String fooTopicName = "foo"; Uuid barTopicId = Uuid.randomUuid(); String barTopicName = "bar"; MockPartitionAssignor assignor = new MockPartitionAssignor("range"); GroupMetadataManagerTestContext context = new GroupMetadataManagerTestContext.Builder() .withConsumerGroupAssignors(Collections.singletonList(assignor)) .withMetadataImage(new MetadataImageBuilder() .addTopic(fooTopicId, fooTopicName, 6) .addTopic(barTopicId, barTopicName, 3) .addRacks() .build()) .build(); assignor.prepareGroupAssignment(new GroupAssignment( Collections.singletonMap(memberId, new MemberAssignmentImpl(mkAssignment( mkTopicAssignment(fooTopicId, 0, 1, 2, 3, 4, 5), mkTopicAssignment(barTopicId, 0, 1, 2) ))) )); context.consumerGroupHeartbeat( new ConsumerGroupHeartbeatRequestData() .setGroupId(groupId) .setMemberId(memberId) .setMemberEpoch(0) .setServerAssignor("range") .setRebalanceTimeoutMs(5000) .setSubscribedTopicNames(Arrays.asList("foo", "bar")) .setTopicPartitions(Collections.emptyList())); verify(context.metrics).record(CONSUMER_GROUP_REBALANCES_SENSOR_NAME); }