focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
@Override public void writeFloat(final float v) throws IOException { writeInt(Float.floatToIntBits(v)); }
@Test public void testWriteFloatForPositionVByteOrder() throws Exception { float v = 1.1f; out.writeFloat(1, v, LITTLE_ENDIAN); int expected = Float.floatToIntBits(v); int actual = Bits.readIntL(out.buffer, 1); assertEquals(actual, expected); }
@Override public ExportResult<PhotosContainerResource> export( UUID jobId, TokensAndUrlAuthData authData, Optional<ExportInformation> exportInformation) throws CopyExceptionWithFailureReason { Preconditions.checkNotNull(authData); if (!exportInformation.isPresent()) { // No export information if at the start of a bulk export // Start by getting the list of albums to export return exportAlbums(authData, Optional.empty()); } StringPaginationToken paginationToken = (StringPaginationToken) exportInformation.get().getPaginationData(); ContainerResource containerResource = exportInformation.get().getContainerResource(); boolean containerResourcePresent = containerResource != null; boolean paginationDataPresent = paginationToken != null; if (!containerResourcePresent && paginationDataPresent && paginationToken.getToken().startsWith(ALBUM_TOKEN_PREFIX)) { // Continue exporting albums return exportAlbums(authData, Optional.of(paginationToken)); } else if (containerResourcePresent && containerResource instanceof PhotosContainerResource) { // We have had albums specified from the front end so process them for import PhotosContainerResource photosContainerResource = (PhotosContainerResource) containerResource; Preconditions.checkNotNull(photosContainerResource.getAlbums()); ContinuationData continuationData = new ContinuationData(null); for (PhotoAlbum album : photosContainerResource.getAlbums()) { continuationData.addContainerResource(new IdOnlyContainerResource(album.getId())); } return new ExportResult<>( ExportResult.ResultType.CONTINUE, photosContainerResource, continuationData); } else if (containerResourcePresent && containerResource instanceof IdOnlyContainerResource) { // Export photos return exportPhotos( jobId, authData, (IdOnlyContainerResource) containerResource, Optional.ofNullable(paginationToken)); } else { throw new IllegalStateException( String.format( "Invalid state passed into FacebookPhotosExporter. ExportInformation: %s", exportInformation)); } }
@Test public void testExportAlbum() throws CopyExceptionWithFailureReason { ExportResult<PhotosContainerResource> result = facebookPhotosExporter.export( uuid, new TokensAndUrlAuthData("accessToken", null, null), Optional.empty()); assertEquals(ExportResult.ResultType.CONTINUE, result.getType()); PhotosContainerResource exportedData = result.getExportedData(); assertEquals(1, exportedData.getAlbums().size()); assertEquals( new PhotoAlbum(ALBUM_ID, ALBUM_NAME, ALBUM_DESCRIPTION), exportedData.getAlbums().toArray()[0]); assertNull(result.getContinuationData().getPaginationData()); assertThat(result.getContinuationData().getContainerResources()) .contains(new IdOnlyContainerResource(ALBUM_ID)); }
<T extends PipelineOptions> T as(Class<T> iface) { checkNotNull(iface); checkArgument(iface.isInterface(), "Not an interface: %s", iface); T existingOption = computedProperties.interfaceToProxyCache.getInstance(iface); if (existingOption == null) { synchronized (this) { // double check existingOption = computedProperties.interfaceToProxyCache.getInstance(iface); if (existingOption == null) { Registration<T> registration = PipelineOptionsFactory.CACHE .get() .validateWellFormed(iface, computedProperties.knownInterfaces); List<PropertyDescriptor> propertyDescriptors = registration.getPropertyDescriptors(); Class<T> proxyClass = registration.getProxyClass(); existingOption = InstanceBuilder.ofType(proxyClass) .fromClass(proxyClass) .withArg(InvocationHandler.class, this) .build(); computedProperties = computedProperties.updated(iface, existingOption, propertyDescriptors); } } } return existingOption; }
@Test public void testDisplayDataIncludedForDisjointInterfaceHierarchies() { FooOptions fooOptions = PipelineOptionsFactory.as(FooOptions.class); fooOptions.setFoo("foo"); BarOptions barOptions = fooOptions.as(BarOptions.class); barOptions.setBar("bar"); DisplayData data = DisplayData.from(barOptions); assertThat(data, hasDisplayItem(allOf(hasKey("foo"), hasNamespace(FooOptions.class)))); assertThat(data, hasDisplayItem(allOf(hasKey("bar"), hasNamespace(BarOptions.class)))); }
void wakeup() { wokenup.set(true); try { lock.lock(); notEmptyCondition.signalAll(); } finally { lock.unlock(); } }
@Test public void testWakeup() throws Exception { try (FetchBuffer fetchBuffer = new FetchBuffer(logContext)) { final Thread waitingThread = new Thread(() -> { final Timer timer = time.timer(Duration.ofMinutes(1)); fetchBuffer.awaitNotEmpty(timer); }); waitingThread.start(); fetchBuffer.wakeup(); waitingThread.join(Duration.ofSeconds(30).toMillis()); assertFalse(waitingThread.isAlive()); } }
@Restricted(NoExternalUse.class) public static Icon tryGetIcon(String iconGuess) { // Jenkins Symbols don't have metadata so return null if (iconGuess == null || iconGuess.startsWith("symbol-")) { return null; } Icon iconMetadata = IconSet.icons.getIconByClassSpec(iconGuess); // `iconGuess` must be class names if it contains a whitespace. // It may contains extra css classes unrelated to icons. // Filter classes with `icon-` prefix. if (iconMetadata == null && iconGuess.contains(" ")) { iconMetadata = IconSet.icons.getIconByClassSpec(filterIconNameClasses(iconGuess)); } if (iconMetadata == null) { // Icon could be provided as a simple iconFileName e.g. "help.svg" iconMetadata = IconSet.icons.getIconByClassSpec(IconSet.toNormalizedIconNameClass(iconGuess) + " icon-md"); } if (iconMetadata == null) { // Icon could be provided as an absolute iconFileName e.g. "/plugin/foo/abc.png" iconMetadata = IconSet.icons.getIconByUrl(iconGuess); } return iconMetadata; }
@Test public void tryGetIcon_shouldReturnNullForUnknown() throws Exception { assertThat(Functions.tryGetIcon("icon-nosuchicon"), is(nullValue())); }
@Override public void doFilter(ServletRequest req, ServletResponse resp, FilterChain chain) throws IOException, ServletException { doHttpFilter((HttpServletRequest) req, (HttpServletResponse) resp, chain); }
@Test public void do_not_set_frame_protection_on_integration_resources_with_context() throws Exception { HttpServletRequest request = mock(HttpServletRequest.class); when(request.getMethod()).thenReturn("GET"); when(request.getRequestURI()).thenReturn("/sonarqube/integration/github"); when(request.getContextPath()).thenReturn("/sonarqube"); underTest.doFilter(request, response, chain); verify(response, never()).setHeader(eq("X-Frame-Options"), anyString()); verify(response).setHeader("X-XSS-Protection", "1; mode=block"); verify(response).setHeader("X-Content-Type-Options", "nosniff"); }
@Override public KsMaterializedQueryResult<WindowedRow> get( final GenericKey key, final int partition, final Range<Instant> windowStartBounds, final Range<Instant> windowEndBounds, final Optional<Position> position ) { try { final Instant lower = calculateLowerBound(windowStartBounds, windowEndBounds); final Instant upper = calculateUpperBound(windowStartBounds, windowEndBounds); final WindowKeyQuery<GenericKey, ValueAndTimestamp<GenericRow>> query = WindowKeyQuery.withKeyAndWindowStartRange(key, lower, upper); StateQueryRequest<WindowStoreIterator<ValueAndTimestamp<GenericRow>>> request = inStore(stateStore.getStateStoreName()).withQuery(query); if (position.isPresent()) { request = request.withPositionBound(PositionBound.at(position.get())); } final KafkaStreams streams = stateStore.getKafkaStreams(); final StateQueryResult<WindowStoreIterator<ValueAndTimestamp<GenericRow>>> result = streams.query(request); final QueryResult<WindowStoreIterator<ValueAndTimestamp<GenericRow>>> queryResult = result.getPartitionResults().get(partition); if (queryResult.isFailure()) { throw failedQueryException(queryResult); } if (queryResult.getResult() == null) { return KsMaterializedQueryResult.rowIteratorWithPosition( Collections.emptyIterator(), queryResult.getPosition()); } try (WindowStoreIterator<ValueAndTimestamp<GenericRow>> it = queryResult.getResult()) { final Builder<WindowedRow> builder = ImmutableList.builder(); while (it.hasNext()) { final KeyValue<Long, ValueAndTimestamp<GenericRow>> next = it.next(); final Instant windowStart = Instant.ofEpochMilli(next.key); if (!windowStartBounds.contains(windowStart)) { continue; } final Instant windowEnd = windowStart.plus(windowSize); if (!windowEndBounds.contains(windowEnd)) { continue; } final TimeWindow window = new TimeWindow(windowStart.toEpochMilli(), windowEnd.toEpochMilli()); final WindowedRow row = WindowedRow.of( stateStore.schema(), new Windowed<>(key, window), next.value.value(), next.value.timestamp() ); builder.add(row); } return KsMaterializedQueryResult.rowIteratorWithPosition( builder.build().iterator(), queryResult.getPosition()); } } catch (final NotUpToBoundException | MaterializationException e) { throw e; } catch (final Exception e) { throw new MaterializationException("Failed to get value from materialized table", e); } }
@Test @SuppressWarnings("unchecked") public void shouldReturnValuesForOpenStartBounds() { // Given: final Range<Instant> start = Range.open( NOW, NOW.plusSeconds(10) ); final StateQueryResult<WindowStoreIterator<ValueAndTimestamp<GenericRow>>> partitionResult = new StateQueryResult<>(); final QueryResult<WindowStoreIterator<ValueAndTimestamp<GenericRow>>> queryResult = QueryResult.forResult(fetchIterator); queryResult.setPosition(POSITION); partitionResult.addResult(PARTITION, queryResult); when(kafkaStreams.query(any(StateQueryRequest.class))).thenReturn(partitionResult); when(fetchIterator.hasNext()) .thenReturn(true) .thenReturn(true) .thenReturn(true) .thenReturn(false); when(fetchIterator.next()) .thenReturn(new KeyValue<>(start.lowerEndpoint().toEpochMilli(), VALUE_1)) .thenReturn(new KeyValue<>(start.lowerEndpoint().plusMillis(1).toEpochMilli(), VALUE_2)) .thenReturn(new KeyValue<>(start.upperEndpoint().toEpochMilli(), VALUE_3)) .thenThrow(new AssertionError()); // When: final KsMaterializedQueryResult<WindowedRow> result = table.get( A_KEY, PARTITION, start, Range.all()); // Then: final Iterator<WindowedRow> rowIterator = result.getRowIterator(); assertThat(rowIterator.hasNext(), is(true)); assertThat(rowIterator.next(), is( WindowedRow.of( SCHEMA, windowedKey(start.lowerEndpoint().plusMillis(1)), VALUE_2.value(), VALUE_2.timestamp() ) )); assertThat(result.getPosition(), not(Optional.empty())); assertThat(result.getPosition().get(), is(POSITION)); }
public static <F extends Future<Void>> Mono<Void> from(F future) { Objects.requireNonNull(future, "future"); if (future.isDone()) { if (!future.isSuccess()) { return Mono.error(FutureSubscription.wrapError(future.cause())); } return Mono.empty(); } return new ImmediateFutureMono<>(future); }
@SuppressWarnings("FutureReturnValueIgnored") @Test void testImmediateFutureMonoLater() { ImmediateEventExecutor eventExecutor = ImmediateEventExecutor.INSTANCE; Promise<Void> promise = eventExecutor.newPromise(); StepVerifier.create(FutureMono.from(promise)) .expectSubscription() .then(() -> promise.setFailure(new ClosedChannelException())) .expectError(AbortedException.class) .verify(Duration.ofSeconds(30)); }
@Override public Time getTime(final int columnIndex) throws SQLException { return (Time) ResultSetUtils.convertValue(mergeResultSet.getValue(columnIndex, Time.class), Time.class); }
@Test void assertGetTimeWithColumnIndex() throws SQLException { when(mergeResultSet.getValue(1, Time.class)).thenReturn(new Time(0L)); assertThat(shardingSphereResultSet.getTime(1), is(new Time(0L))); }
public boolean matchesBeacon(Beacon beacon) { // All identifiers must match, or the corresponding region identifier must be null. for (int i = mIdentifiers.size(); --i >= 0; ) { final Identifier identifier = mIdentifiers.get(i); Identifier beaconIdentifier = null; if (i < beacon.mIdentifiers.size()) { beaconIdentifier = beacon.getIdentifier(i); } if ((beaconIdentifier == null && identifier != null) || (beaconIdentifier != null && identifier != null && !identifier.equals(beaconIdentifier))) { return false; } } if (mBluetoothAddress != null && !mBluetoothAddress.equalsIgnoreCase(beacon.mBluetoothAddress)) { return false; } return true; }
@Test public void testBeaconMatchesRegionWithSingleNullIdentifierList() { Beacon beacon = new AltBeacon.Builder().setId1("1").setId2("2").setId3("3").setRssi(4) .setBeaconTypeCode(5).setTxPower(6).setBluetoothAddress("1:2:3:4:5:6").build(); ArrayList<Identifier> identifiers=new ArrayList<>(); identifiers.add(null); Region region=new Region("all-beacons-region",identifiers); assertTrue("Beacon should match region with first identifier null and shorter Identifier list", region.matchesBeacon(beacon)); }
@ProcessElement public ProcessContinuation processElement( @Element byte[] element, RestrictionTracker<OffsetRange, Long> tracker, WatermarkEstimator<Instant> watermarkEstimator, OutputReceiver<V> receiver) { if (tracker.currentRestriction() != null) { LOG.info( "Start processing element. Restriction = {}", tracker.currentRestriction().toString()); } SparkConsumer<V> sparkConsumer; Receiver<V> sparkReceiver; try { sparkReceiver = sparkReceiverBuilder.build(); } catch (Exception e) { LOG.error("Can not build Spark Receiver", e); throw new IllegalStateException("Spark Receiver was not built!"); } LOG.debug("Restriction {}", tracker.currentRestriction().toString()); sparkConsumer = new SparkConsumerWithOffset<>(tracker.currentRestriction().getFrom()); sparkConsumer.start(sparkReceiver); Long recordsProcessed = 0L; while (true) { LOG.debug("Start polling records"); try { TimeUnit.SECONDS.sleep(startPollTimeoutSec); } catch (InterruptedException e) { LOG.error("SparkReceiver was interrupted before polling started", e); throw new IllegalStateException("Spark Receiver was interrupted before polling started"); } if (!sparkConsumer.hasRecords()) { LOG.debug("No records left"); ((HasOffset) sparkReceiver).setCheckpoint(recordsProcessed); sparkConsumer.stop(); tracker.checkDone(); if (pullFrequencySec != 0L) { LOG.debug("Waiting to poll for new records..."); try { TimeUnit.SECONDS.sleep(pullFrequencySec); } catch (InterruptedException e) { LOG.error("SparkReceiver was interrupted while waiting to poll new records", e); throw new IllegalStateException( "Spark Receiver was interrupted while waiting to poll new records"); } } OffsetRange currentRestriction = tracker.currentRestriction(); if (currentRestriction != null && currentRestriction.getFrom() == currentRestriction.getTo()) { LOG.info("Stop for empty restriction: {}", currentRestriction); return ProcessContinuation.stop(); } else { LOG.info("Resume for restriction: {}", currentRestriction); return ProcessContinuation.resume(); } } while (sparkConsumer.hasRecords()) { V record = sparkConsumer.poll(); if (record != null) { Long offset = getOffsetFn.apply(record); if (!tracker.tryClaim(offset)) { ((HasOffset) sparkReceiver).setCheckpoint(recordsProcessed); sparkConsumer.stop(); LOG.info("Stop for restriction: {}", tracker.currentRestriction()); return ProcessContinuation.stop(); } Instant currentTimeStamp = getTimestampFn.apply(record); recordsProcessed++; ((ManualWatermarkEstimator<Instant>) watermarkEstimator).setWatermark(currentTimeStamp); receiver.outputWithTimestamp(record, currentTimeStamp); } } } }
@Test public void testProcessElement() { MockOutputReceiver receiver = new MockOutputReceiver(); DoFn.ProcessContinuation result = dofnInstance.processElement( TEST_ELEMENT, dofnInstance.restrictionTracker( TEST_ELEMENT, dofnInstance.initialRestriction(TEST_ELEMENT)), mockWatermarkEstimator, receiver); assertEquals(DoFn.ProcessContinuation.resume(), result); assertEquals( createExpectedRecords(CustomReceiverWithOffset.RECORDS_COUNT), receiver.getOutputs()); }
@Override public Map<ExecutionAttemptID, ExecutionDeploymentState> getExecutionsOn(ResourceID host) { return executionsByHost.getOrDefault(host, Collections.emptySet()).stream() .collect( Collectors.toMap( x -> x, x -> pendingDeployments.contains(x) ? ExecutionDeploymentState.PENDING : ExecutionDeploymentState.DEPLOYED)); }
@Test void testGetExecutionsReturnsEmptySetForUnknownHost() { final DefaultExecutionDeploymentTracker tracker = new DefaultExecutionDeploymentTracker(); assertThat(tracker.getExecutionsOn(ResourceID.generate()).entrySet()).isEmpty(); }
static CompositeParser of(Parser... parsers) { if (parsers == null || parsers.length == 0) throw new IllegalArgumentException("Unable to create CompositeParser"); return new CompositeParser(Arrays.asList(parsers)); }
@Test public void fails_when_no_successful_parsers() { assertScheduleNotPresent( CompositeParser.of(NON_MATCHING_PARSER, ANOTHER_NON_MATCHING_PARSER), ANY_SCHEDULE_STRING); }
public String migrate(String oldJSON, int targetVersion) { LOGGER.debug("Migrating to version {}: {}", targetVersion, oldJSON); Chainr transform = getTransformerFor(targetVersion); Object transformedObject = transform.transform(JsonUtils.jsonToMap(oldJSON), getContextMap(targetVersion)); String transformedJSON = JsonUtils.toJsonString(transformedObject); LOGGER.debug("After migration to version {}: {}", targetVersion, transformedJSON); return transformedJSON; }
@Test void migrateV3ToV4_shouldAddADefaultDisplayOrderWeightToPipelines() { ConfigRepoDocumentMother documentMother = new ConfigRepoDocumentMother(); String oldJSON = documentMother.v3Comprehensive(); String newJSON = documentMother.v4ComprehensiveWithDisplayOrderWeightOfMinusOneForBothPipelines(); String transformedJSON = migrator.migrate(oldJSON, 4); assertThatJson(newJSON).isEqualTo(transformedJSON); }
@Override public Config build() { return build(new Config()); }
@Override @Test public void testConfigurationURL() throws Exception { URL configURL = getClass().getClassLoader().getResource("hazelcast-default.yaml"); Config config = new YamlConfigBuilder(configURL).build(); assertEquals(configURL, config.getConfigurationUrl()); assertNull(config.getConfigurationFile()); }
@Override public Optional<FunctionDefinition> getFunctionDefinition(String name) { final String normalizedName = name.toUpperCase(Locale.ROOT); return Optional.ofNullable(normalizedFunctions.get(normalizedName)); }
@Test void testGetFunction() { assertThat(CoreModule.INSTANCE.getFunctionDefinition("CAST")) .hasValueSatisfying( def -> assertThat(def) .asInstanceOf(type(BuiltInFunctionDefinition.class)) .extracting(BuiltInFunctionDefinition::getQualifiedName) .isEqualTo("$CAST$1")); }
@Nullable public ResolvedAddressTypes resolvedAddressTypes() { return resolvedAddressTypes; }
@Test void resolvedAddressTypes() { assertThat(builder.build().resolvedAddressTypes()).isNull(); builder.resolvedAddressTypes(ResolvedAddressTypes.IPV4_ONLY); assertThat(builder.build().resolvedAddressTypes()).isEqualTo(ResolvedAddressTypes.IPV4_ONLY); }
public SignatureResponse getDigitalSignatureRestService(SignatureRequest request, String clientIp) { SignatureResponse response = new SignatureResponse(); EidSession session = initSession(request, clientIp, response); if (session == null) return response; // 1. create the digital signature to send back // use the private key to create the signature made out of the challenge, the // iccpace ephemeral public key and the ephemeral we created in the polymorphic // info byte[] toSign = KeyUtils.calcDataToSign(request.getChallenge(), session.getEphemeralKey().getQ(), session.getIdpicc().data); byte[] signature = signatureService.sign(toSign, session.getAtReference(), false); response.setSignature(signature); // Result OK return response; }
@Test public void getDigitalSignatureRestServiceTest() { EidSession session = new EidSession(); session.setAtReference("SSSSSSSSSSSSSSSS"); session.setEphemeralKey(ephemeralKey); session.setIdpicc(ByteArray.fromBase64("SSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSS")); SignatureRequest request = new SignatureRequest(); request.setHeader(createRequestHeader()); byte[] signature = "signature".getBytes(); doReturn(session).when(rdwService).initSession(any(AppRequest.class), eq(localhost), any(AppResponse.class)); when(signatureService.sign(any(byte[].class), eq("SSSSSSSSSSSSSSSS"), eq(false))).thenReturn(signature); SignatureResponse result = rdwService.getDigitalSignatureRestService(request, localhost); assertEquals("OK", result.getStatus()); assertEquals(signature, result.getSignature()); verify(signatureService, times(1)).sign(any(byte[].class), eq("SSSSSSSSSSSSSSSS"), eq(false)); }
public StringSetData combine(StringSetData other) { if (this.stringSet().isEmpty()) { return other; } else if (other.stringSet().isEmpty()) { return this; } else { ImmutableSet.Builder<String> combined = ImmutableSet.builder(); combined.addAll(this.stringSet()); combined.addAll(other.stringSet()); return StringSetData.create(combined.build()); } }
@Test public void testCombine() { StringSetData singleElement = StringSetData.create(ImmutableSet.of("ab")); StringSetData multipleElement = StringSetData.create(ImmutableSet.of("cd", "ef")); StringSetData result = singleElement.combine(multipleElement); assertEquals(result.stringSet(), ImmutableSet.of("cd", "ef", "ab")); // original sets in stringsetdata should have remained the same assertEquals(singleElement.stringSet(), ImmutableSet.of("ab")); assertEquals(multipleElement.stringSet(), ImmutableSet.of("cd", "ef")); }
public void union(Block block) { currentBlockIndex++; ensureBlocksCapacity(currentBlockIndex + 1); blocks[currentBlockIndex] = block; int positionCount = block.getPositionCount(); int[] positions = new int[positionCount]; // Add the elements to the hash table. Since union can only increase the set size, there is no need to create a separate hashtable. int positionsIndex = 0; for (int i = 0; i < positionCount; i++) { int hashPosition = getInsertPosition(blockPositionByHash, getMaskedHash(hashPosition(elementType, block, i)), block, i); if (hashPosition != INVALID_POSITION) { // There is no need to test if adding element is successful since it's on the same hash table addElement(blockPositionByHash, hashPosition, block, i); positions[positionsIndex++] = i; } } getPositionsForBlocks().add(positionsList(positions, 0, positionsIndex)); size += positionsIndex; }
@Test public void testIntersectWithDistinctValues() { OptimizedTypedSet typedSet = new OptimizedTypedSet(BIGINT, BIGINT_DISTINCT_METHOD_HANDLE, POSITIONS_PER_PAGE); Block block = createLongSequenceBlock(0, POSITIONS_PER_PAGE - 1).appendNull(); typedSet.union(block); testIntersect(typedSet, block, block); block = createLongSequenceBlock(0, POSITIONS_PER_PAGE / 2 - 1).appendNull(); testIntersect(typedSet, block, block); block = createLongSequenceBlock(0, 1).appendNull(); testIntersect(typedSet, block, block); }
public static int nextInt(final int startInclusive, final int endExclusive) { checkParameters(startInclusive, endExclusive); int diff = endExclusive - startInclusive; if (diff == 0) { return startInclusive; } return startInclusive + RANDOM.nextInt(diff); }
@Test void testNextInt() { final int result = RandomUtils.nextInt(1, 199); assertTrue(result >= 1 && result < 199); }
@Override public Map<String, T> members() { List<ChildData<T>> children = getActiveChildren(); children.sort(sequenceComparator); Map<String, T> members = new LinkedHashMap<>(); for (ChildData<T> child : children) { members.put(child.getPath(), child.getNode()); } return members; }
@Test public void testMembersWithStaleNodes() throws Exception { putChildData(group, PATH + "/001", "container1"); // stale putChildData(group, PATH + "/002", "container1"); putChildData(group, PATH + "/003", "container2"); // stale putChildData(group, PATH + "/004", "container3"); // stale putChildData(group, PATH + "/005", "container2"); putChildData(group, PATH + "/006", "container3"); Map<String, NodeState> members = group.members(); assertThat(members.size(), equalTo(3)); assertThat(members.get(PATH + "/002").getContainer(), equalTo("container1")); assertThat(members.get(PATH + "/005").getContainer(), equalTo("container2")); assertThat(members.get(PATH + "/006").getContainer(), equalTo("container3")); }
public static ScalarType createUnifiedDecimalType() { // for mysql compatibility return createUnifiedDecimalType(10, 0); }
@Test public void testCreateUnifiedDecimalType() { Config.enable_decimal_v3 = false; Assert.assertEquals( ScalarType.createUnifiedDecimalType(27, 3), ScalarType.createDecimalV2Type(27, 3)); Assert.assertEquals( ScalarType.createUnifiedDecimalType(28, 9), ScalarType.createDecimalV2Type(28, 9)); Assert.assertEquals( ScalarType.createUnifiedDecimalType(18, 10), ScalarType.createUnifiedDecimalType(18, 10)); Config.enable_decimal_v3 = true; Assert.assertEquals( ScalarType.createUnifiedDecimalType(9, 3), ScalarType.createDecimalV3Type(PrimitiveType.DECIMAL64, 9, 3)); Assert.assertEquals( ScalarType.createUnifiedDecimalType(18, 15), ScalarType.createDecimalV3Type(PrimitiveType.DECIMAL64, 18, 15)); Assert.assertEquals( ScalarType.createUnifiedDecimalType(19, 15), ScalarType.createDecimalV3Type(PrimitiveType.DECIMAL128, 19, 15)); Assert.assertEquals( ScalarType.createUnifiedDecimalType(27, 15), ScalarType.createDecimalV3Type(PrimitiveType.DECIMAL128, 27, 15)); Assert.assertEquals( ScalarType.createUnifiedDecimalType(28, 28), ScalarType.createDecimalV3Type(PrimitiveType.DECIMAL128, 28, 28)); Assert.assertEquals( ScalarType.createUnifiedDecimalType(38, 0), ScalarType.createDecimalV3Type(PrimitiveType.DECIMAL128, 38, 0)); Assert.assertEquals( ScalarType.createUnifiedDecimalType(38, 38), ScalarType.createDecimalV3Type(PrimitiveType.DECIMAL128, 38, 38)); Assert.assertThrows(Throwable.class, () -> ScalarType.createUnifiedDecimalType(39, 38)); Assert.assertThrows(Throwable.class, () -> ScalarType.createUnifiedDecimalType(10, 11)); }
public static String decode(String s) { final int n = s.length(); StringBuilder result = new StringBuilder(n); for (int i = 0; i < n; i++) { char c = s.charAt(i); if (c == '%') { int numCharsConsumed = decodeConsecutiveOctets(result, s, i); i += numCharsConsumed - 1; } else { result.append(c); } } return result.toString(); }
@Test(dataProvider = "validEncodedText") public void testDecodeValidStrings(String encoded, String expected) { String actual = URIDecoderUtils.decode(encoded); Assert.assertEquals(actual, expected, "Encoded string was incorrectly decoded."); }
public static MySQLBinaryProtocolValue getBinaryProtocolValue(final BinaryColumnType binaryColumnType) { Preconditions.checkArgument(BINARY_PROTOCOL_VALUES.containsKey(binaryColumnType), "Cannot find MySQL type '%s' in column type when process binary protocol value", binaryColumnType); return BINARY_PROTOCOL_VALUES.get(binaryColumnType); }
@Test void assertGetBinaryProtocolValueWithMySQLTypeShort() { assertThat(MySQLBinaryProtocolValueFactory.getBinaryProtocolValue(MySQLBinaryColumnType.SHORT), instanceOf(MySQLInt2BinaryProtocolValue.class)); }
public static String getTypeName(final int type) { switch (type) { case START_EVENT_V3: return "Start_v3"; case STOP_EVENT: return "Stop"; case QUERY_EVENT: return "Query"; case ROTATE_EVENT: return "Rotate"; case INTVAR_EVENT: return "Intvar"; case LOAD_EVENT: return "Load"; case NEW_LOAD_EVENT: return "New_load"; case SLAVE_EVENT: return "Slave"; case CREATE_FILE_EVENT: return "Create_file"; case APPEND_BLOCK_EVENT: return "Append_block"; case DELETE_FILE_EVENT: return "Delete_file"; case EXEC_LOAD_EVENT: return "Exec_load"; case RAND_EVENT: return "RAND"; case XID_EVENT: return "Xid"; case USER_VAR_EVENT: return "User var"; case FORMAT_DESCRIPTION_EVENT: return "Format_desc"; case TABLE_MAP_EVENT: return "Table_map"; case PRE_GA_WRITE_ROWS_EVENT: return "Write_rows_event_old"; case PRE_GA_UPDATE_ROWS_EVENT: return "Update_rows_event_old"; case PRE_GA_DELETE_ROWS_EVENT: return "Delete_rows_event_old"; case WRITE_ROWS_EVENT_V1: return "Write_rows_v1"; case UPDATE_ROWS_EVENT_V1: return "Update_rows_v1"; case DELETE_ROWS_EVENT_V1: return "Delete_rows_v1"; case BEGIN_LOAD_QUERY_EVENT: return "Begin_load_query"; case EXECUTE_LOAD_QUERY_EVENT: return "Execute_load_query"; case INCIDENT_EVENT: return "Incident"; case HEARTBEAT_LOG_EVENT: case HEARTBEAT_LOG_EVENT_V2: return "Heartbeat"; case IGNORABLE_LOG_EVENT: return "Ignorable"; case ROWS_QUERY_LOG_EVENT: return "Rows_query"; case WRITE_ROWS_EVENT: return "Write_rows"; case UPDATE_ROWS_EVENT: return "Update_rows"; case DELETE_ROWS_EVENT: return "Delete_rows"; case GTID_LOG_EVENT: return "Gtid"; case ANONYMOUS_GTID_LOG_EVENT: return "Anonymous_Gtid"; case PREVIOUS_GTIDS_LOG_EVENT: return "Previous_gtids"; case PARTIAL_UPDATE_ROWS_EVENT: return "Update_rows_partial"; case TRANSACTION_CONTEXT_EVENT : return "Transaction_context"; case VIEW_CHANGE_EVENT : return "view_change"; case XA_PREPARE_LOG_EVENT : return "Xa_prepare"; case TRANSACTION_PAYLOAD_EVENT : return "transaction_payload"; default: return "Unknown type:" + type; } }
@Test public void getTypeNameInputPositiveOutputNotNull13() { // Arrange final int type = 15; // Act final String actual = LogEvent.getTypeName(type); // Assert result Assert.assertEquals("Format_desc", actual); }
@Override public Collection<DatabasePacket> execute() { connectionSession.getServerPreparedStatementRegistry().<MySQLServerPreparedStatement>getPreparedStatement(packet.getStatementId()).getLongData().clear(); return Collections.singleton(new MySQLOKPacket(ServerStatusFlagCalculator.calculateFor(connectionSession))); }
@Test void assertExecute() { ConnectionSession connectionSession = mock(ConnectionSession.class); when(connectionSession.getServerPreparedStatementRegistry()).thenReturn(new ServerPreparedStatementRegistry()); when(connectionSession.getTransactionStatus()).thenReturn(new TransactionStatus()); MySQLServerPreparedStatement preparedStatement = new MySQLServerPreparedStatement("", mock(SQLStatementContext.class), new HintValueContext(), Collections.emptyList()); preparedStatement.getLongData().put(0, new byte[0]); connectionSession.getServerPreparedStatementRegistry().addPreparedStatement(1, preparedStatement); MySQLComStmtResetPacket packet = mock(MySQLComStmtResetPacket.class); when(packet.getStatementId()).thenReturn(1); MySQLComStmtResetExecutor executor = new MySQLComStmtResetExecutor(packet, connectionSession); Collection<DatabasePacket> actual = executor.execute(); assertThat(actual.size(), is(1)); assertThat(actual.iterator().next(), instanceOf(MySQLOKPacket.class)); assertTrue(preparedStatement.getLongData().isEmpty()); }
public ValidationResult validateMessagesAndAssignOffsets(PrimitiveRef.LongRef offsetCounter, MetricsRecorder metricsRecorder, BufferSupplier bufferSupplier) { if (sourceCompressionType == CompressionType.NONE && targetCompression.type() == CompressionType.NONE) { // check the magic value if (!records.hasMatchingMagic(toMagic)) return convertAndAssignOffsetsNonCompressed(offsetCounter, metricsRecorder); else // Do in-place validation, offset assignment and maybe set timestamp return assignOffsetsNonCompressed(offsetCounter, metricsRecorder); } else return validateMessagesAndAssignOffsetsCompressed(offsetCounter, metricsRecorder, bufferSupplier); }
@Test public void testDifferentLevelDoesNotCauseRecompression() { List<byte[]> records = Arrays.asList( String.join("", Collections.nCopies(256, "some")).getBytes(), String.join("", Collections.nCopies(256, "data")).getBytes() ); // Records from the producer were created with gzip max level Compression gzipMax = Compression.gzip().level(CompressionType.GZIP.maxLevel()).build(); MemoryRecords recordsGzipMax = createRecords(records, RecordBatch.MAGIC_VALUE_V2, RecordBatch.NO_TIMESTAMP, gzipMax); // The topic is configured with gzip min level Compression gzipMin = Compression.gzip().level(CompressionType.GZIP.minLevel()).build(); MemoryRecords recordsGzipMin = createRecords(records, RecordBatch.MAGIC_VALUE_V2, RecordBatch.NO_TIMESTAMP, gzipMin); // Ensure data compressed with gzip max and min is different assertNotEquals(recordsGzipMax, recordsGzipMin); LogValidator validator = new LogValidator(recordsGzipMax, topicPartition, time, gzipMax.type(), gzipMin, false, RecordBatch.MAGIC_VALUE_V2, TimestampType.CREATE_TIME, 5000L, 5000L, RecordBatch.NO_PARTITION_LEADER_EPOCH, AppendOrigin.CLIENT, MetadataVersion.latestTesting() ); LogValidator.ValidationResult result = validator.validateMessagesAndAssignOffsets( PrimitiveRef.ofLong(0L), metricsRecorder, RequestLocal.withThreadConfinedCaching().bufferSupplier() ); // Ensure validated records have not been changed so they are the same as the producer records assertEquals(recordsGzipMax, result.validatedRecords); assertNotEquals(recordsGzipMin, result.validatedRecords); }
public static Node build(final List<JoinInfo> joins) { Node root = null; for (final JoinInfo join : joins) { if (root == null) { root = new Leaf(join.getLeftSource()); } if (root.containsSource(join.getRightSource()) && root.containsSource(join.getLeftSource())) { throw new KsqlException("Cannot perform circular join - both " + join.getRightSource() + " and " + join.getLeftJoinExpression() + " are already included in the current join tree: " + root.debugString(0)); } else if (root.containsSource(join.getLeftSource())) { root = new Join(root, new Leaf(join.getRightSource()), join); } else if (root.containsSource(join.getRightSource())) { root = new Join(root, new Leaf(join.getLeftSource()), join.flip()); } else { throw new KsqlException( "Cannot build JOIN tree; neither source in the join is the FROM source or included " + "in a previous JOIN: " + join + ". The current join tree is " + root.debugString(0) ); } } return root; }
@Test public void outputsCorrectJoinTreeString() { // Given: when(j1.getLeftSource()).thenReturn(a); when(j1.getRightSource()).thenReturn(b); when(j2.getLeftSource()).thenReturn(a); when(j2.getRightSource()).thenReturn(c); final List<JoinInfo> joins = ImmutableList.of(j1, j2); // When: final Node root = JoinTree.build(joins); // Then: assertThat(root.debugString(0), is( "⋈\n" + "+--⋈\n" + " +--a\n" + " +--b\n" + "+--c" )); }
@Override public long currentSystemTimeMs() { throw new UnsupportedOperationException("StateStores can't access system time."); }
@Test public void shouldThrowOnCurrentSystemTime() { assertThrows(UnsupportedOperationException.class, () -> context.currentSystemTimeMs()); }
@Override public int hashCode() { return value.hashCode(); }
@Test public void testHashCode() { LazilyParsedNumber n1 = new LazilyParsedNumber("1"); LazilyParsedNumber n1Another = new LazilyParsedNumber("1"); assertThat(n1Another.hashCode()).isEqualTo(n1.hashCode()); }
static Optional<ExecutorService> lookupExecutorServiceRef( CamelContext camelContext, String name, Object source, String executorServiceRef) { ExecutorServiceManager manager = camelContext.getExecutorServiceManager(); ObjectHelper.notNull(manager, ESM_NAME); ObjectHelper.notNull(executorServiceRef, "executorServiceRef"); // lookup in registry first and use existing thread pool if exists, // or create a new thread pool, assuming that the executor service ref is a thread pool ID return lookupByNameAndType(camelContext, executorServiceRef, ExecutorService.class) .or(() -> Optional.ofNullable(manager.newThreadPool(source, name, executorServiceRef))); }
@Test void testLookupExecutorServiceRefWithNullManager() { String name = "ThreadPool"; Object source = new Object(); String executorServiceRef = "ThreadPoolRef"; when(camelContext.getExecutorServiceManager()).thenReturn(null); Exception ex = assertThrows(IllegalArgumentException.class, () -> DynamicRouterRecipientListHelper.lookupExecutorServiceRef(camelContext, name, source, executorServiceRef)); assertEquals("ExecutorServiceManager must be specified", ex.getMessage()); }
public static ScheduledTaskHandler of(UUID uuid, String schedulerName, String taskName) { return new ScheduledTaskHandlerImpl(uuid, -1, schedulerName, taskName); }
@Test public void of_equalityDifferentSchedulers() { String urnA = "urn:hzScheduledTaskHandler:39ffc539-a356-444c-bec7-6f644462c208-1SchedulerTask"; String urnB = "urn:hzScheduledTaskHandler:39ffc539-a356-444c-bec7-6f644462c208-1Scheduler2Task"; assertNotEquals(ScheduledTaskHandler.of(urnA), ScheduledTaskHandler.of(urnB)); }
@Override public void sessionDidActivate(HttpSessionEvent event) { if (!instanceEnabled) { return; } // pour getSessionCount SESSION_COUNT.incrementAndGet(); // pour invalidateAllSession addSession(event.getSession()); }
@Test public void testSessionDidActivate() { sessionListener.sessionDidActivate(createSessionEvent()); if (SessionListener.getSessionCount() != 1) { fail("sessionDidActivate"); } if (SessionListener.getAllSessionsInformations().isEmpty()) { fail("sessionDidActivate"); } }
@Override public void run() { try { cleanup(); } catch (Exception e) { log.warn("Caught exception during Intent cleanup", e); } }
@Test public void corruptPoll() { IntentStoreDelegate mockDelegate = new IntentStoreDelegate() { @Override public void process(IntentData intentData) { intentData.setState(CORRUPT); store.write(intentData); } @Override public void notify(IntentEvent event) {} }; store.setDelegate(mockDelegate); Intent intent = new MockIntent(1L); Timestamp version = new SystemClockTimestamp(1L); IntentData data = new IntentData(intent, INSTALL_REQ, version); store.addPending(data); cleanup.run(); //FIXME broken? assertEquals("Expect number of submits incorrect", 1, service.submitCounter()); }
@Override public String toString() { return "Permission{" + "resource='" + resource + '\'' + ", action='" + action + '\'' + '}'; }
@Test void testToString() { assertEquals("Permission{resource='Resource{namespaceId='', group='', name='', type='', properties=null}', action='w'}", permission.toString()); }
public boolean hasMethod(String name) { for (String mn : getMethodNames()) { if (mn.equals(name)) { return true; } } return false; }
@Test void testHasMethod() throws Exception { Wrapper w = Wrapper.getWrapper(I1.class); Assertions.assertTrue(w.hasMethod("setName")); Assertions.assertTrue(w.hasMethod("hello")); Assertions.assertTrue(w.hasMethod("showInt")); Assertions.assertTrue(w.hasMethod("getFloat")); Assertions.assertTrue(w.hasMethod("setFloat")); Assertions.assertFalse(w.hasMethod("setFloatXXX")); }
public static SelectorConditionVO buildSelectorConditionVO(final SelectorConditionDO selectorConditionDO) { ParamTypeEnum paramTypeEnum = ParamTypeEnum.getParamTypeEnumByName(selectorConditionDO.getParamType()); OperatorEnum operatorEnum = OperatorEnum.getOperatorEnumByAlias(selectorConditionDO.getOperator()); return new SelectorConditionVO(selectorConditionDO.getId(), selectorConditionDO.getSelectorId(), selectorConditionDO.getParamType(), Optional.ofNullable(paramTypeEnum).map(ParamTypeEnum::getName).orElse(selectorConditionDO.getParamType()), selectorConditionDO.getOperator(), Optional.ofNullable(operatorEnum).map(OperatorEnum::getAlias).orElse(selectorConditionDO.getOperator()), selectorConditionDO.getParamName(), selectorConditionDO.getParamValue(), DateUtils.localDateTimeToString(selectorConditionDO.getDateCreated().toLocalDateTime()), DateUtils.localDateTimeToString(selectorConditionDO.getDateUpdated().toLocalDateTime())); }
@Test public void testBuildSelectorConditionVO() { Timestamp currentTime = new Timestamp(System.currentTimeMillis()); assertNotNull(SelectorConditionVO.buildSelectorConditionVO(SelectorConditionDO.builder() .paramType(ParamTypeEnum.POST.getName()) .operator(OperatorEnum.MATCH.getAlias()) .dateCreated(currentTime) .dateUpdated(currentTime) .build())); }
public void isNull() { standardIsEqualTo(null); }
@Test public void isNullFail() { Object o = new Object(); expectFailure.whenTesting().that(o).isNull(); assertFailureKeys("expected", "but was"); assertFailureValue("expected", "null"); }
public int getNumberOfPendingCheckpoints() { synchronized (lock) { return this.pendingCheckpoints.size(); } }
@Test void testPeriodicSchedulingWithInactiveTasks() throws Exception { CheckpointCoordinator checkpointCoordinator = setupCheckpointCoordinatorWithInactiveTasks(new MemoryStateBackend()); // the coordinator should start checkpointing now manuallyTriggeredScheduledExecutor.triggerNonPeriodicScheduledTasks( CheckpointCoordinator.ScheduledTrigger.class); manuallyTriggeredScheduledExecutor.triggerAll(); assertThat(checkpointCoordinator.getNumberOfPendingCheckpoints()).isGreaterThan(0); }
@Override public Page download(Request request, Task task) { if (task == null || task.getSite() == null) { throw new NullPointerException("task or site can not be null"); } CloseableHttpResponse httpResponse = null; CloseableHttpClient httpClient = getHttpClient(task.getSite()); Proxy proxy = proxyProvider != null ? proxyProvider.getProxy(request, task) : null; HttpClientRequestContext requestContext = httpUriRequestConverter.convert(request, task.getSite(), proxy); Page page = Page.fail(request); try { httpResponse = httpClient.execute(requestContext.getHttpUriRequest(), requestContext.getHttpClientContext()); page = handleResponse(request, request.getCharset() != null ? request.getCharset() : task.getSite().getCharset(), httpResponse, task); onSuccess(page, task); return page; } catch (IOException e) { onError(page, task, e); return page; } finally { if (httpResponse != null) { //ensure the connection is released back to pool EntityUtils.consumeQuietly(httpResponse.getEntity()); } if (proxyProvider != null && proxy != null) { proxyProvider.returnProxy(proxy, page, task); } } }
@Test public void test_set_site_cookie() throws Exception { HttpServer server = httpServer(13423); server.get(eq(cookie("cookie"), "cookie-webmagic")).response("ok"); Runner.running(server, new Runnable() { @Override public void run() throws Exception { HttpClientDownloader httpClientDownloader = new HttpClientDownloader(); Request request = new Request(); request.setUrl("http://127.0.0.1:13423"); Site site = Site.me().addCookie("cookie", "cookie-webmagic").setDomain("127.0.0.1"); Page page = httpClientDownloader.download(request, site.toTask()); assertThat(page.getRawText()).isEqualTo("ok"); } }); }
@Override public V put(K key, V value, Duration ttl) { return get(putAsync(key, value, ttl)); }
@Test public void testGetAllTTL() throws InterruptedException { RMapCacheNative<Integer, Integer> map = redisson.getMapCacheNative("getAll"); map.put(1, 100); map.put(2, 200, Duration.ofSeconds(1)); map.put(3, 300, Duration.ofSeconds(1)); map.put(4, 400); Map<Integer, Integer> filtered = map.getAll(new HashSet<Integer>(Arrays.asList(2, 3, 5))); Map<Integer, Integer> expectedMap = new HashMap<Integer, Integer>(); expectedMap.put(2, 200); expectedMap.put(3, 300); Assertions.assertEquals(expectedMap, filtered); Thread.sleep(1000); Map<Integer, Integer> filteredAgain = map.getAll(new HashSet<Integer>(Arrays.asList(2, 3, 5))); Assertions.assertTrue(filteredAgain.isEmpty()); map.destroy(); }
@Override public void require(long byteCount) throws IOException { if (!request(byteCount)) throw new EOFException(); }
@Test public void requireInsufficientData() throws Exception { Buffer source = new Buffer(); source.writeUtf8("a"); BufferedSource bufferedSource = new RealBufferedSource(source); try { bufferedSource.require(2); fail(); } catch (EOFException expected) { } }
@Override public Object decode(Response response, Type type) throws IOException { JsonAdapter<Object> jsonAdapter = moshi.adapter(type); if (response.status() == 404 || response.status() == 204) return Util.emptyValueOf(type); if (response.body() == null) return null; try (BufferedSource source = Okio.buffer(Okio.source(response.body().asInputStream()))) { if (source.exhausted()) { return null; // empty body } return jsonAdapter.fromJson(source); } catch (JsonDataException e) { if (e.getCause() != null && e.getCause() instanceof IOException) { throw (IOException) e.getCause(); } throw e; } }
@Test void customObjectDecoder() throws Exception { final JsonAdapter<VideoGame> videoGameJsonAdapter = new Moshi.Builder().build().adapter(VideoGame.class); MoshiDecoder decoder = new MoshiDecoder(Collections.singleton(videoGameJsonAdapter)); VideoGame videoGame = new VideoGame("Super Mario", "Luigi", "Bowser"); Response response = Response.builder() .status(200) .reason("OK") .headers(Collections.emptyMap()) .request( Request.create(Request.HttpMethod.GET, "/api", Collections.emptyMap(), null, Util.UTF_8)) .body(videoGamesJson, UTF_8) .build(); VideoGame actual = (VideoGame) decoder.decode(response, videoGameJsonAdapter.getClass()); assertThat(actual) .isEqualToComparingFieldByFieldRecursively(videoGame); }
public JobState getState() { return state; }
@Test public void testUpdateNumOfDataErrorRowMoreThanMax(@Mocked GlobalStateMgr globalStateMgr) { RoutineLoadJob routineLoadJob = new KafkaRoutineLoadJob(); Deencapsulation.setField(routineLoadJob, "maxErrorNum", 0); Deencapsulation.setField(routineLoadJob, "maxBatchRows", 0); Deencapsulation.invoke(routineLoadJob, "updateNumOfData", 1L, 1L, 0L, 1L, 1L, false); Assert.assertEquals(RoutineLoadJob.JobState.PAUSED, routineLoadJob.getState()); ErrorReason reason = routineLoadJob.pauseReason; Assert.assertEquals(InternalErrorCode.TOO_MANY_FAILURE_ROWS_ERR, reason.getCode()); Assert.assertEquals( "Current error rows: 1 is more than max error num: 0. Check the 'TrackingSQL' field for detailed information. " + "If you are sure that the data has many errors, you can set 'max_error_number' property " + "to a greater value through ALTER ROUTINE LOAD and RESUME the job", reason.getMsg()); }
@Override public void execute(final List<String> args, final PrintWriter terminal) { CliCmdUtil.ensureArgCountBounds(args, 1, 1, HELP); final String filePath = args.get(0); final String content = loadScript(filePath); requestExecutor.makeKsqlRequest(content); }
@Test public void shouldThrowIfDirectory() throws Exception { // Given: final File dir = TMP.newFolder(); // When: final Exception e = assertThrows( KsqlException.class, () -> cmd.execute(ImmutableList.of(dir.toString()), terminal) ); // Then: assertThat(e.getMessage(), containsString( "Failed to read file: " + dir.toString())); assertThat(e.getCause(), (hasMessage(anyOf(containsString(dir.toString()), containsString("Is a directory"))))); }
@GetInitialRestriction public OffsetRange initialRestriction(@Element byte[] element) { return new OffsetRange(startOffset, Long.MAX_VALUE); }
@Test public void testInitialRestriction() { long expectedStartOffset = 0L; OffsetRange result = dofnInstance.initialRestriction(TEST_ELEMENT); assertEquals(new OffsetRange(expectedStartOffset, Long.MAX_VALUE), result); }
@ApiOperation(value = "Delete User (deleteUser)", notes = "Deletes the User, it's credentials and all the relations (from and to the User). " + "Referencing non-existing User Id will cause an error. " + SYSTEM_OR_TENANT_AUTHORITY_PARAGRAPH) @PreAuthorize("hasAnyAuthority('SYS_ADMIN', 'TENANT_ADMIN')") @RequestMapping(value = "/user/{userId}", method = RequestMethod.DELETE) @ResponseStatus(value = HttpStatus.OK) public void deleteUser( @Parameter(description = USER_ID_PARAM_DESCRIPTION) @PathVariable(USER_ID) String strUserId) throws ThingsboardException { checkParameter(USER_ID, strUserId); UserId userId = new UserId(toUUID(strUserId)); User user = checkUserId(userId, Operation.DELETE); if (user.getAuthority() == Authority.SYS_ADMIN && getCurrentUser().getId().equals(userId)) { throw new ThingsboardException("Sysadmin is not allowed to delete himself", ThingsboardErrorCode.PERMISSION_DENIED); } tbUserService.delete(getTenantId(), getCurrentUser().getCustomerId(), user, getCurrentUser()); }
@Test public void testDeleteUser() throws Exception { loginSysAdmin(); User user = createTenantAdminUser(); User savedUser = doPost("/api/user", user, User.class); User foundUser = doGet("/api/user/" + savedUser.getId().getId().toString(), User.class); Assert.assertNotNull(foundUser); doDelete("/api/user/" + savedUser.getId().getId().toString()) .andExpect(status().isOk()); String userIdStr = savedUser.getId().getId().toString(); doGet("/api/user/" + userIdStr) .andExpect(status().isNotFound()) .andExpect(statusReason(containsString(msgErrorNoFound("User", userIdStr)))); }
@Override public LookupResult<BrokerKey> handleResponse(Set<BrokerKey> keys, AbstractResponse abstractResponse) { validateLookupKeys(keys); MetadataResponse response = (MetadataResponse) abstractResponse; MetadataResponseData.MetadataResponseBrokerCollection brokers = response.data().brokers(); if (brokers.isEmpty()) { log.debug("Metadata response contained no brokers. Will backoff and retry"); return LookupResult.empty(); } else { log.debug("Discovered all brokers {} to send requests to", brokers); } Map<BrokerKey, Integer> brokerKeys = brokers.stream().collect(Collectors.toMap( broker -> new BrokerKey(OptionalInt.of(broker.nodeId())), MetadataResponseData.MetadataResponseBroker::nodeId )); return new LookupResult<>( Collections.singletonList(ANY_BROKER), Collections.emptyMap(), brokerKeys ); }
@Test public void testHandleResponse() { AllBrokersStrategy strategy = new AllBrokersStrategy(logContext); MetadataResponseData response = new MetadataResponseData(); response.brokers().add(new MetadataResponseData.MetadataResponseBroker() .setNodeId(1) .setHost("host1") .setPort(9092) ); response.brokers().add(new MetadataResponseData.MetadataResponseBroker() .setNodeId(2) .setHost("host2") .setPort(9092) ); AdminApiLookupStrategy.LookupResult<AllBrokersStrategy.BrokerKey> lookupResult = strategy.handleResponse( AllBrokersStrategy.LOOKUP_KEYS, new MetadataResponse(response, ApiKeys.METADATA.latestVersion()) ); assertEquals(Collections.emptyMap(), lookupResult.failedKeys); Set<AllBrokersStrategy.BrokerKey> expectedMappedKeys = mkSet( new AllBrokersStrategy.BrokerKey(OptionalInt.of(1)), new AllBrokersStrategy.BrokerKey(OptionalInt.of(2)) ); assertEquals(expectedMappedKeys, lookupResult.mappedKeys.keySet()); lookupResult.mappedKeys.forEach((brokerKey, brokerId) -> assertEquals(OptionalInt.of(brokerId), brokerKey.brokerId) ); }
public boolean isValid() { if (mIntervals.isEmpty()) { return false; } if (mIntervals.size() == 1 && mIntervals.get(0).equals(Interval.NEVER)) { return false; } return true; }
@Test public void isValid() { Assert.assertTrue(IntervalSet.ALWAYS.isValid()); Assert.assertFalse(IntervalSet.NEVER.isValid()); Assert.assertTrue(new IntervalSet(Interval.between(1, 2)).isValid()); Assert.assertTrue(new IntervalSet(Interval.before(10)).isValid()); Assert.assertTrue(new IntervalSet(Interval.after(10)).isValid()); Assert.assertFalse(new IntervalSet(Interval.between(2, 1)).isValid()); Assert.assertFalse(new IntervalSet(Collections.emptyList()).isValid()); }
public FEELFnResult<BigDecimal> invoke(@ParameterName( "n" ) BigDecimal number) { if ( number == null ) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "number", "cannot be null")); } return FEELFnResult.ofResult( number.abs() ); }
@Test void absFunctionDuration() { FunctionTestUtil.assertResult(absFunction.invoke(Duration.ofSeconds(100, 50 )), Duration.ofSeconds(100, 50)); FunctionTestUtil.assertResult(absFunction.invoke(Duration.ofSeconds(-100, 50 )), Duration.ofSeconds(100, -50)); FunctionTestUtil.assertResult(absFunction.invoke(Duration.ofSeconds(100, -50 )), Duration.ofSeconds(100, -50)); FunctionTestUtil.assertResult(absFunction.invoke(Duration.ofSeconds(-100, -50 )), Duration.ofSeconds(100, 50)); FunctionTestUtil.assertResultError(absFunction.invoke((Duration)null), InvalidParametersEvent.class); }
static Timestamp parseTimeStamp(final String value) { try { // JDK format in Timestamp.valueOf is compatible with TIMESTAMP_FORMAT return Timestamp.valueOf(value); } catch (IllegalArgumentException e) { return throwRuntimeParseException(value, new ParseException(e.getMessage(), 0), TIMESTAMP_FORMAT); } }
@Test public void testTimestampWithTrailingZeros() throws Exception { // Given Timestamp expectedTimestamp = new Timestamp(new SimpleDateFormat(TIMESTAMP_FORMAT) .parse("2010-10-20 10:20:30.040") .getTime()); // When Timestamp actualTimestamp = DateHelper.parseTimeStamp(expectedTimestamp.toString()); // Then assertTimestampsEqual(expectedTimestamp, actualTimestamp); }
public static double lgamma(double x) { double xcopy = x; double fg; double first = x + LANCZOS_SMALL_GAMMA + 0.5; double second = LANCZOS_COEFF[0]; if (x >= 0.0) { if (x >= 1.0 && x - (int) x == 0.0) { fg = lfactorial((int) x - 1); } else { first -= (x + 0.5) * log(first); for (int i = 1; i <= LANCZOS_N; i++) { second += LANCZOS_COEFF[i] / ++xcopy; } fg = log(sqrt(2.0 * PI) * second / x) - first; } } else { fg = PI / (gamma(1.0 - x) * sin(PI * x)); if (Double.isFinite(fg)) { if (fg < 0) { throw new IllegalArgumentException("The gamma function is negative: " + fg); } else { fg = log(fg); } } } return fg; }
@Test public void testLogGamma() { System.out.println("lgamma"); assertTrue(Double.isInfinite(Gamma.lgamma(0))); assertEquals(0.0, Gamma.lgamma(1), 1E-7); assertEquals(0, Gamma.lgamma(2), 1E-7); assertEquals(Math.log(2.0), Gamma.lgamma(3), 1E-7); assertEquals(Math.log(6.0), Gamma.lgamma(4), 1E-7); assertEquals(-0.1207822, Gamma.lgamma(1.5), 1E-7); assertEquals(0.2846829, Gamma.lgamma(2.5), 1E-7); assertEquals(1.200974, Gamma.lgamma(3.5), 1E-6); assertEquals(2.453737, Gamma.lgamma(4.5), 1E-6); }
@Override public String doLayout(ILoggingEvent event) { StringWriter output = new StringWriter(); try (JsonWriter json = new JsonWriter(output)) { json.beginObject(); if (!"".equals(nodeName)) { json.name("nodename").value(nodeName); } json.name("process").value(processKey); for (Map.Entry<String, String> entry : event.getMDCPropertyMap().entrySet()) { if (entry.getValue() != null && !exclusions.contains(entry.getKey())) { json.name(entry.getKey()).value(entry.getValue()); } } json .name("timestamp").value(DATE_FORMATTER.format(Instant.ofEpochMilli(event.getTimeStamp()))) .name("severity").value(event.getLevel().toString()) .name("logger").value(event.getLoggerName()) .name("message").value(NEWLINE_REGEXP.matcher(event.getFormattedMessage()).replaceAll("\r")); IThrowableProxy tp = event.getThrowableProxy(); if (tp != null) { json.name("stacktrace").beginArray(); int nbOfTabs = 0; while (tp != null) { printFirstLine(json, tp, nbOfTabs); render(json, tp, nbOfTabs); tp = tp.getCause(); nbOfTabs++; } json.endArray(); } json.endObject(); } catch (Exception e) { e.printStackTrace(); throw new IllegalStateException("BUG - fail to create JSON", e); } output.write(System.lineSeparator()); return output.toString(); }
@Test public void test_log_with_throwable_and_cause() { Throwable rootCause = new IllegalArgumentException("Root cause"); Throwable exception = new IllegalStateException("BOOM", rootCause); LoggingEvent event = new LoggingEvent("org.foundation.Caller", (Logger) LoggerFactory.getLogger("the.logger"), Level.WARN, "the message", exception, new Object[0]); String log = underTest.doLayout(event); JsonLog json = new Gson().fromJson(log, JsonLog.class); assertThat(json.stacktrace).hasSizeGreaterThan(5); assertThat(json.stacktrace[0]).isEqualTo("java.lang.IllegalStateException: BOOM"); assertThat(json.stacktrace[1]).contains("at org.sonar.process.logging.LogbackJsonLayoutTest.test_log_with_throwable_and_cause"); assertThat(json.stacktrace) .contains("\tCaused by: ") .contains("\tjava.lang.IllegalArgumentException: Root cause"); }
@Override public void validate() throws TelegramApiValidationException { if (inlineQueryId.isEmpty()) { throw new TelegramApiValidationException("InlineQueryId can't be empty", this); } for (InlineQueryResult result : results) { result.validate(); } if (button != null) { button.validate(); } }
@Test void testSwitchPmParameterIsMandatoryIfSwitchPmTextIsPresent() { answerInlineQuery.setInlineQueryId("RANDOMEID"); answerInlineQuery.setResults(new ArrayList<>()); answerInlineQuery.setButton(InlineQueryResultsButton .builder() .text("Test Text") .build()); try { answerInlineQuery.validate(); } catch (TelegramApiValidationException e) { assertEquals("SwitchPmParameter can't be empty if switchPmText is present", e.getMessage()); } }
public boolean initAndAddIssue(Issue issue) { DefaultInputComponent inputComponent = (DefaultInputComponent) issue.primaryLocation().inputComponent(); if (noSonar(inputComponent, issue)) { return false; } ActiveRule activeRule = activeRules.find(issue.ruleKey()); if (activeRule == null) { // rule does not exist or is not enabled -> ignore the issue return false; } ScannerReport.Issue rawIssue = createReportIssue(issue, inputComponent.scannerId(), activeRule.severity()); if (filters.accept(inputComponent, rawIssue)) { write(inputComponent.scannerId(), rawIssue); return true; } return false; }
@Test public void ignore_null_rule_of_active_rule() { initModuleIssues(); DefaultIssue issue = new DefaultIssue(project) .at(new DefaultIssueLocation().on(file).at(file.selectLine(3)).message("Foo")) .forRule(JAVA_RULE_KEY); boolean added = moduleIssues.initAndAddIssue(issue); assertThat(added).isFalse(); verifyNoInteractions(reportPublisher); }
@Override public boolean equals(final Object o) { if (o == this) return true; if (o == null || o.getClass() != getClass()) return false; final MonetaryFormat other = (MonetaryFormat) o; if (!Objects.equals(this.negativeSign, other.negativeSign)) return false; if (!Objects.equals(this.positiveSign, other.positiveSign)) return false; if (!Objects.equals(this.zeroDigit, other.zeroDigit)) return false; if (!Objects.equals(this.decimalMark, other.decimalMark)) return false; if (!Objects.equals(this.minDecimals, other.minDecimals)) return false; if (!Objects.equals(this.decimalGroups, other.decimalGroups)) return false; if (!Objects.equals(this.shift, other.shift)) return false; if (!Objects.equals(this.roundingMode, other.roundingMode)) return false; if (!Arrays.equals(this.codes, other.codes)) return false; if (!Objects.equals(this.codeSeparator, other.codeSeparator)) return false; if (!Objects.equals(this.codePrefixed, other.codePrefixed)) return false; return true; }
@Test public void testEquals() { MonetaryFormat mf1 = new MonetaryFormat(true); MonetaryFormat mf2 = new MonetaryFormat(true); assertEquals(mf1, mf2); }
public static Comparator<StructLike> forType(Types.StructType struct) { return new StructLikeComparator(struct); }
@Test public void testString() { assertComparesCorrectly(Comparators.forType(Types.StringType.get()), "a", "b"); }
@SuppressWarnings("unchecked") public <T extends Expression> T rewrite(final T expression, final C context) { return (T) rewriter.process(expression, context); }
@Test public void shouldRewriteIsNullPredicate() { // Given: final IsNullPredicate parsed = parseExpression("col0 IS NULL"); when(processor.apply(parsed.getValue(), context)).thenReturn(expr1); // When: final Expression rewritten = expressionRewriter.rewrite(parsed, context); // Then: assertThat(rewritten, equalTo(new IsNullPredicate(parsed.getLocation(), expr1))); }
public static List<UpdateRequirement> forUpdateTable( TableMetadata base, List<MetadataUpdate> metadataUpdates) { Preconditions.checkArgument(null != base, "Invalid table metadata: null"); Preconditions.checkArgument(null != metadataUpdates, "Invalid metadata updates: null"); Builder builder = new Builder(base, false); builder.require(new UpdateRequirement.AssertTableUUID(base.uuid())); metadataUpdates.forEach(builder::update); return builder.build(); }
@Test public void setLocation() { List<UpdateRequirement> requirements = UpdateRequirements.forUpdateTable( metadata, ImmutableList.of(new MetadataUpdate.SetLocation("location"))); requirements.forEach(req -> req.validate(metadata)); assertThat(requirements) .hasSize(1) .hasOnlyElementsOfTypes(UpdateRequirement.AssertTableUUID.class); assertTableUUID(requirements); }
public void setInputFile( String file ) { this.inputFile = file; }
@Test public void setInputFile() { JobScheduleRequest jobScheduleRequest = mock( JobScheduleRequest.class ); doCallRealMethod().when( jobScheduleRequest ).setInputFile( any() ); String inputFile = "hitachi"; jobScheduleRequest.setInputFile( inputFile ); Assert.assertEquals( inputFile, ReflectionTestUtils.getField( jobScheduleRequest, "inputFile" ) ); }
@Override public List<String> getTenantIdList(int page, int pageSize) { ConfigInfoMapper configInfoMapper = mapperManager.findMapper(dataSourceService.getDataSourceType(), TableConstant.CONFIG_INFO); int from = (page - 1) * pageSize; MapperResult mapperResult = configInfoMapper.getTenantIdList(new MapperContext(from, pageSize)); return jt.queryForList(mapperResult.getSql(), mapperResult.getParamList().toArray(), String.class); }
@Test void testGetTenantIdList() { int page = 10; int pageSize = 100; //mock select config state List<String> tenantStrings = Arrays.asList("tenant1", "tenant2", "tenant3"); when(jdbcTemplate.queryForList(anyString(), eq(new Object[] {}), eq(String.class))).thenReturn(tenantStrings); //execute return mock obj List<String> returnTenants = externalConfigInfoPersistService.getTenantIdList(page, pageSize); //expect check assertEquals(tenantStrings, returnTenants); }
@Override public String[] split(String text) { if (splitContraction) { text = WONT_CONTRACTION.matcher(text).replaceAll("$1ill not"); text = SHANT_CONTRACTION.matcher(text).replaceAll("$1ll not"); text = AINT_CONTRACTION.matcher(text).replaceAll("$1m not"); for (Pattern regexp : NOT_CONTRACTIONS) { text = regexp.matcher(text).replaceAll("$1 not"); } for (Pattern regexp : CONTRACTIONS2) { text = regexp.matcher(text).replaceAll("$1 $2"); } for (Pattern regexp : CONTRACTIONS3) { text = regexp.matcher(text).replaceAll("$1 $2 $3"); } } text = DELIMITERS[0].matcher(text).replaceAll(" $1 "); text = DELIMITERS[1].matcher(text).replaceAll(" $1"); text = DELIMITERS[2].matcher(text).replaceAll(" $1"); text = DELIMITERS[3].matcher(text).replaceAll(" . "); text = DELIMITERS[4].matcher(text).replaceAll(" $1 "); String[] words = WHITESPACE.split(text); if (words.length > 1 && words[words.length-1].equals(".")) { if (EnglishAbbreviations.contains(words[words.length-2])) { words[words.length-2] = words[words.length-2] + "."; } } ArrayList<String> result = new ArrayList<>(); for (String token : words) { if (!token.isEmpty()) { result.add(token); } } return result.toArray(new String[0]); }
@Test public void testTokenizeVariousSpaces() { System.out.println("tokenize words separated by various kinds of space"); // No-break space and em-space String text = "the\u00A0cat\u2003the_cat"; String[] expResult = {"the", "cat", "the_cat"}; SimpleTokenizer instance = new SimpleTokenizer(); String[] result = instance.split(text); assertEquals(expResult.length, result.length); for (int i = 0; i < result.length; i++) { assertEquals(expResult[i], result[i]); } }
@Override public Object adapt(final HttpAction action, final WebContext context) { if (action != null) { var code = action.getCode(); val response = ((JEEContext) context).getNativeResponse(); if (code < 400) { response.setStatus(code); } else { try { response.sendError(code); } catch (final IOException e) { throw new TechnicalException(e); } } if (action instanceof WithLocationAction withLocationAction) { context.setResponseHeader(HttpConstants.LOCATION_HEADER, withLocationAction.getLocation()); } else if (action instanceof WithContentAction withContentAction) { val content = withContentAction.getContent(); if (content != null) { try { response.getWriter().write(content); } catch (final IOException e) { throw new TechnicalException(e); } } } return null; } throw new TechnicalException("No action provided"); }
@Test(expected = TechnicalException.class) public void testNullAction() { JEEHttpActionAdapter.INSTANCE.adapt(null, context); }
@Override @CheckForNull public EmailMessage format(Notification notif) { if (!(notif instanceof ChangesOnMyIssuesNotification)) { return null; } ChangesOnMyIssuesNotification notification = (ChangesOnMyIssuesNotification) notif; if (notification.getChange() instanceof AnalysisChange) { checkState(!notification.getChangedIssues().isEmpty(), "changedIssues can't be empty"); return formatAnalysisNotification(notification.getChangedIssues().keySet().iterator().next(), notification); } return formatMultiProject(notification); }
@Test public void format_sets_subject_with_project_name_of_first_issue_in_set_when_change_from_Analysis() { Set<ChangedIssue> changedIssues = IntStream.range(0, 2 + new Random().nextInt(4)) .mapToObj(i -> newChangedIssue(i + "", randomValidStatus(), newProject("prj_" + i), newRandomNotAHotspotRule("rule_" + i))) .collect(toSet()); AnalysisChange analysisChange = IssuesChangesNotificationBuilderTesting.newAnalysisChange(); EmailMessage emailMessage = underTest.format(new ChangesOnMyIssuesNotification(analysisChange, changedIssues)); Project project = changedIssues.iterator().next().getProject(); assertThat(emailMessage.getSubject()).isEqualTo("Analysis has changed some of your issues in " + project.getProjectName()); }
@Override public Result invoke(Invocation invocation) throws RpcException { Result result; String value = getUrl().getMethodParameter( RpcUtils.getMethodName(invocation), MOCK_KEY, Boolean.FALSE.toString()) .trim(); if (ConfigUtils.isEmpty(value)) { // no mock result = this.invoker.invoke(invocation); } else if (value.startsWith(FORCE_KEY)) { if (logger.isWarnEnabled()) { logger.warn( CLUSTER_FAILED_MOCK_REQUEST, "force mock", "", "force-mock: " + RpcUtils.getMethodName(invocation) + " force-mock enabled , url : " + getUrl()); } // force:direct mock result = doMockInvoke(invocation, null); } else { // fail-mock try { result = this.invoker.invoke(invocation); // fix:#4585 if (result.getException() != null && result.getException() instanceof RpcException) { RpcException rpcException = (RpcException) result.getException(); if (rpcException.isBiz()) { throw rpcException; } else { result = doMockInvoke(invocation, rpcException); } } } catch (RpcException e) { if (e.isBiz()) { throw e; } if (logger.isWarnEnabled()) { logger.warn( CLUSTER_FAILED_MOCK_REQUEST, "failed to mock invoke", "", "fail-mock: " + RpcUtils.getMethodName(invocation) + " fail-mock enabled , url : " + getUrl(), e); } result = doMockInvoke(invocation, e); } } return result; }
@Test void testMockInvokerInvoke_failmock() { URL url = URL.valueOf("remote://1.2.3.4/" + IHelloService.class.getName()) .addParameter( REFER_KEY, URL.encode(PATH_KEY + "=" + IHelloService.class.getName() + "&" + "mock=fail:return null")) .addParameter("invoke_return_error", "true"); URL mockUrl = URL.valueOf("mock://localhost/" + IHelloService.class.getName()) .addParameter("mock", "fail:return null") .addParameter("getSomething.mock", "return aa") .addParameter(REFER_KEY, URL.encode(PATH_KEY + "=" + IHelloService.class.getName())) .addParameter("invoke_return_error", "true"); Protocol protocol = new MockProtocol(); Invoker<IHelloService> mInvoker1 = protocol.refer(IHelloService.class, mockUrl); Invoker<IHelloService> cluster = getClusterInvokerMock(url, mInvoker1); // Configured with mock RpcInvocation invocation = new RpcInvocation(); invocation.setMethodName("getSomething"); Result ret = cluster.invoke(invocation); Assertions.assertEquals("aa", ret.getValue()); // If no mock was configured, return null directly invocation = new RpcInvocation(); invocation.setMethodName("getSomething2"); ret = cluster.invoke(invocation); Assertions.assertNull(ret.getValue()); // If no mock was configured, return null directly invocation = new RpcInvocation(); invocation.setMethodName("sayHello"); ret = cluster.invoke(invocation); Assertions.assertNull(ret.getValue()); }
@Override public KsMaterializedQueryResult<WindowedRow> get( final GenericKey key, final int partition, final Range<Instant> windowStart, final Range<Instant> windowEnd, final Optional<Position> position ) { try { final ReadOnlySessionStore<GenericKey, GenericRow> store = stateStore .store(QueryableStoreTypes.sessionStore(), partition); return KsMaterializedQueryResult.rowIterator( findSession(store, key, windowStart, windowEnd).iterator()); } catch (final Exception e) { throw new MaterializationException("Failed to get value from materialized table", e); } }
@Test public void shouldIgnoreSessionsThatStartAtUpperBoundIfUpperBoundOpen() { // Given: final Range<Instant> startBounds = Range.closedOpen( LOWER_INSTANT, UPPER_INSTANT ); givenSingleSession(UPPER_INSTANT, UPPER_INSTANT.plusMillis(1)); // When: final Iterator<WindowedRow> rowIterator = table.get(A_KEY, PARTITION, startBounds, Range.all()).rowIterator; // Then: assertThat(rowIterator.hasNext(), is(false)); }
public static int decodeConsecutiveOctets(StringBuilder dest, String s, int start) { final int n = s.length(); if (start >= n) { throw new IllegalArgumentException("Cannot decode from index " + start + " of a length-" + n + " string"); } if (s.charAt(start) != '%') { throw new IllegalArgumentException("Must begin decoding from a percent-escaped octet, but found '" + s.charAt(start) + "'"); } if (start + 3 < n && s.charAt(start + 3) == '%') { // If there are multiple consecutive encoded octets, decode all into bytes ByteBuffer bb = decodeConsecutiveOctets(s, start); int numCharsConsumed = bb.limit() * 3; // Decode the bytes into a string decodeBytes(dest, bb); return numCharsConsumed; } else if (start + 2 < n) { // Else, decode just one octet byte b = decodeOctet(s, start + 1); decodeByte(dest, b); return 3; } throw new IllegalArgumentException("Malformed percent-encoded octet at index " + start); }
@Test(dataProvider = "invalidConsecutiveOctetData") public void testDecodeInvalidConsecutiveOctets(String encoded, int startIndex, String expectedErrorMessage) { IllegalArgumentException exception = null; try { URIDecoderUtils.decodeConsecutiveOctets(new StringBuilder(), encoded, startIndex); } catch (IllegalArgumentException e) { exception = e; } Assert.assertNotNull(exception, "Expected exception when decoding consecutive bytes for string \"" + encoded + "\"."); Assert.assertEquals(exception.getMessage(), expectedErrorMessage, "Unexpected error message during decoding."); }
public static ImmutableList<String> glob(final String glob) { Path path = getGlobPath(glob); int globIndex = getGlobIndex(path); if (globIndex < 0) { return of(glob); } return doGlob(path, searchPath(path, globIndex)); }
@Test public void should_glob_absolute_files(@TempDir final File folder) { File file = new File(folder, "glob-absolute.json"); String path = file.getAbsolutePath(); ImmutableList<String> files = Globs.glob(path); assertThat(files.contains(path), is(true)); }
public static ParamType getSchemaFromType(final Type type) { return getSchemaFromType(type, JAVA_TO_ARG_TYPE); }
@Test public void shouldGetFunction() throws NoSuchMethodException { final Type type = getClass().getDeclaredMethod("functionType", Function.class) .getGenericParameterTypes()[0]; final ParamType schema = UdfUtil.getSchemaFromType(type); assertThat(schema, instanceOf(LambdaType.class)); assertThat(((LambdaType) schema).inputTypes(), equalTo(ImmutableList.of(ParamTypes.LONG))); assertThat(((LambdaType) schema).returnType(), equalTo(ParamTypes.INTEGER)); }
@Override protected boolean hasLeadership(String componentId, UUID leaderSessionId) { synchronized (lock) { if (leaderElectionDriver != null) { if (leaderContenderRegistry.containsKey(componentId)) { return leaderElectionDriver.hasLeadership() && leaderSessionId.equals(issuedLeaderSessionID); } else { LOG.debug( "hasLeadership is called for component '{}' while there is no contender registered under that ID in the service, returning false.", componentId); return false; } } else { LOG.debug("hasLeadership is called after the service is closed, returning false."); return false; } } }
@Test void testHasLeadershipWithLeadershipLostButNoRevokeEventProcessed() throws Exception { new Context() { { runTestWithManuallyTriggeredEvents( executorService -> { final UUID expectedSessionID = UUID.randomUUID(); grantLeadership(expectedSessionID); executorService.trigger(); revokeLeadership(); applyToBothContenderContexts( ctx -> { assertThat( leaderElectionService.hasLeadership( ctx.componentId, expectedSessionID)) .as( "No operation should be handled anymore after the HA backend " + "indicated leadership loss even if the onRevokeLeadership wasn't " + "processed, yet, because some other process could have picked up " + "the leadership in the meantime already based on the HA " + "backend's decision.") .isFalse(); assertThat( leaderElectionService.hasLeadership( ctx.componentId, UUID.randomUUID())) .isFalse(); }); }); } }; }
public FEELFnResult<String> invoke(@ParameterName("from") Object val) { if ( val == null ) { return FEELFnResult.ofResult( null ); } else { return FEELFnResult.ofResult( TypeUtil.formatValue(val, false) ); } }
@Test void invokeOffsetDateTime() { final OffsetDateTime offsetDateTime = OffsetDateTime.now(); FunctionTestUtil.assertResult(stringFunction.invoke(offsetDateTime), DateAndTimeFunction.FEEL_DATE_TIME.format(offsetDateTime)); }
public CreateTableBuilder withPkConstraintName(String pkConstraintName) { this.pkConstraintName = validateConstraintName(pkConstraintName); return this; }
@Test public void withPkConstraintName_throws_IAE_if_name_is_more_than_30_char_long() { assertThatThrownBy(() -> underTest.withPkConstraintName("abcdefghijklmnopqrstuvwxyzabcdf")) .isInstanceOf(IllegalArgumentException.class) .hasMessageContaining("Constraint name length can't be more than 30"); }
@SuppressWarnings("unchecked") public static <E extends Enum<E>> EnumSet<E> parseEnumSet(final String key, final String valueString, final Class<E> enumClass, final boolean ignoreUnknown) throws IllegalArgumentException { // build a map of lower case string to enum values. final Map<String, E> mapping = mapEnumNamesToValues("", enumClass); // scan the input string and add all which match final EnumSet<E> enumSet = noneOf(enumClass); for (String element : getTrimmedStringCollection(valueString)) { final String item = element.toLowerCase(Locale.ROOT); if ("*".equals(item)) { enumSet.addAll(mapping.values()); continue; } final E e = mapping.get(item); if (e != null) { enumSet.add(e); } else { // no match // unless configured to ignore unknown values, raise an exception checkArgument(ignoreUnknown, "%s: Unknown option value: %s in list %s." + " Valid options for enum class %s are: %s", key, element, valueString, enumClass.getName(), mapping.keySet().stream().collect(Collectors.joining(","))); } } return enumSet; }
@Test public void testCaseConflictingEnumNotSupported() throws Throwable { intercept(IllegalArgumentException.class, ERROR_MULTIPLE_ELEMENTS_MATCHING_TO_LOWER_CASE_VALUE, () -> parseEnumSet("key", "c, unrecognized", CaseConflictingEnum.class, false)); }
@Override public BlueRun getLatestRun() { Run run = job.getLastBuild(); if(run instanceof FreeStyleBuild){ BlueRun blueRun = new FreeStyleRunImpl((FreeStyleBuild) run, this, organization); return new FreeStyleRunSummary(blueRun, run, this, organization); } return super.getLatestRun(); }
@Test @Issue("JENKINS-51716") public void findNonNumericRun() throws Exception { FreeStyleProject freestyle = Mockito.spy(j.createProject(FreeStyleProject.class, "freestyle")); FreeStyleBuild build1 = Mockito.mock(FreeStyleBuild.class); FreeStyleBuild build2 = Mockito.mock(FreeStyleBuild.class); Mockito.when(build1.getId()).thenReturn("build1"); Mockito.when(build1.getParent()).thenReturn(freestyle); Mockito.when(build1.getNextBuild()).thenReturn(build2); Mockito.when(build2.getId()).thenReturn("build2"); Mockito.when(build2.getParent()).thenReturn(freestyle); Mockito.when(build2.getPreviousBuild()).thenReturn(build1); RunList<FreeStyleBuild> runs = RunList.fromRuns(Arrays.asList(build1, build2)); Mockito.doReturn(runs).when(freestyle).getBuilds(); Mockito.doReturn(build2).when(freestyle).getLastBuild(); FreeStylePipeline freeStylePipeline = (FreeStylePipeline) BluePipelineFactory.resolve(freestyle); assertNotNull(freeStylePipeline); BlueRun blueRun = freeStylePipeline.getLatestRun(); assertNotNull(blueRun); Links links = blueRun.getLinks(); assertNotNull(links); assertNotNull(links.get("self")); }
boolean isCollection( BeanInjectionInfo.Property property ) { if ( property == null ) { // not sure if this is necessary return false; } BeanLevelInfo beanLevelInfo = getFinalPath( property ); return ( beanLevelInfo != null ) ? isCollection( beanLevelInfo ) : null; }
@Test public void isCollection_False() { BeanInjector bi = new BeanInjector(null ); BeanInjectionInfo bii = new BeanInjectionInfo( MetaBeanLevel1.class ); BeanInjectionInfo.Property seperatorProperty = bii.getProperties().values().stream() .filter( p -> p.getName().equals( "SEPARATOR" ) ).findFirst().orElse( null ); assertFalse( bi.isCollection( seperatorProperty ) ); }
public void start() throws Exception { AuthenticationService authenticationService = new AuthenticationService( PulsarConfigurationLoader.convertFrom(config)); if (config.getBrokerClientAuthenticationPlugin() != null) { proxyClientAuthentication = AuthenticationFactory.create(config.getBrokerClientAuthenticationPlugin(), config.getBrokerClientAuthenticationParameters()); Objects.requireNonNull(proxyClientAuthentication, "No supported auth found for proxy"); try { proxyClientAuthentication.start(); } catch (Exception e) { try { proxyClientAuthentication.close(); } catch (IOException ioe) { log.error("Failed to close the authentication service", ioe); } throw new PulsarClientException.InvalidConfigurationException(e.getMessage()); } } else { proxyClientAuthentication = AuthenticationDisabled.INSTANCE; } // create proxy service proxyService = new ProxyService(config, authenticationService, proxyClientAuthentication); // create a web-service server = new WebServer(config, authenticationService); if (!embeddedMode) { Runtime.getRuntime().addShutdownHook(new Thread(this::close)); } proxyService.start(); if (!metricsInitialized) { // Setup metrics DefaultExports.initialize(); CollectorRegistry registry = CollectorRegistry.defaultRegistry; // Report direct memory from Netty counters Collector jvmMemoryDirectBytesUsed = Gauge.build("jvm_memory_direct_bytes_used", "-").create().setChild(new Child() { @Override public double get() { return getJvmDirectMemoryUsed(); } }); try { registry.register(jvmMemoryDirectBytesUsed); } catch (IllegalArgumentException e) { // workaround issue in tests where the metric is already registered log.debug("Failed to register jvm_memory_direct_bytes_used metric: {}", e.getMessage()); } Collector jvmMemoryDirectBytesMax = Gauge.build("jvm_memory_direct_bytes_max", "-").create().setChild(new Child() { @Override public double get() { return DirectMemoryUtils.jvmMaxDirectMemory(); } }); try { registry.register(jvmMemoryDirectBytesMax); } catch (IllegalArgumentException e) { // workaround issue in tests where the metric is already registered log.debug("Failed to register jvm_memory_direct_bytes_max metric: {}", e.getMessage()); } metricsInitialized = true; } AtomicReference<WebSocketService> webSocketServiceRef = new AtomicReference<>(); addWebServerHandlers(server, config, proxyService, proxyService.getDiscoveryProvider(), webSocketServiceRef, proxyClientAuthentication); webSocketService = webSocketServiceRef.get(); // start web-service server.start(); }
@Test public void testProduceAndConsumeMessageWithWebsocket() throws Exception { @Cleanup("stop") HttpClient producerClient = new HttpClient(); @Cleanup("stop") WebSocketClient producerWebSocketClient = new WebSocketClient(producerClient); producerWebSocketClient.start(); MyWebSocket producerSocket = new MyWebSocket(); String produceUri = computeWsBasePath() + "/producer/persistent/sample/test/local/websocket-topic"; Future<Session> producerSession = producerWebSocketClient.connect(producerSocket, URI.create(produceUri)); ProducerMessage produceRequest = new ProducerMessage(); produceRequest.setContext("context"); produceRequest.setPayload(Base64.getEncoder().encodeToString("my payload".getBytes())); @Cleanup("stop") HttpClient consumerClient = new HttpClient(); @Cleanup("stop") WebSocketClient consumerWebSocketClient = new WebSocketClient(consumerClient); consumerWebSocketClient.start(); MyWebSocket consumerSocket = new MyWebSocket(); String consumeUri = computeWsBasePath() + "/consumer/persistent/sample/test/local/websocket-topic/my-sub"; Future<Session> consumerSession = consumerWebSocketClient.connect(consumerSocket, URI.create(consumeUri)); consumerSession.get().getRemote().sendPing(ByteBuffer.wrap("ping".getBytes())); producerSession.get().getRemote().sendString(ObjectMapperFactory.getMapper().writer().writeValueAsString(produceRequest)); assertTrue(consumerSocket.getResponse().contains("ping")); ProducerMessage message = ObjectMapperFactory.getMapper().reader().readValue(consumerSocket.getResponse(), ProducerMessage.class); assertEquals(new String(Base64.getDecoder().decode(message.getPayload())), "my payload"); }
public static TableIdentifier toIcebergTableIdentifier(SnowflakeIdentifier identifier) { Preconditions.checkArgument( identifier.type() == SnowflakeIdentifier.Type.TABLE, "SnowflakeIdentifier must be type TABLE, got '%s'", identifier); return TableIdentifier.of( identifier.databaseName(), identifier.schemaName(), identifier.tableName()); }
@Test public void testToIcebergTableIdentifier() { assertThat( NamespaceHelpers.toIcebergTableIdentifier( SnowflakeIdentifier.ofTable("DB1", "SCHEMA1", "TABLE1"))) .isEqualTo(TableIdentifier.of("DB1", "SCHEMA1", "TABLE1")); }
public List<String> getRequestNames() { return requestNames; }
@Test public void testDatabase() { for (final Database database : Database.values()) { final List<String> requestNames = database.getRequestNames(); assertTrue("getRequestNames", requestNames != null && !requestNames.isEmpty()); for (final String requestName : requestNames) { assertNotNull("getRequestByName", database.getRequestByName(requestName)); } } }
public HealthCheckResponse checkHealth() { final Map<String, HealthCheckResponseDetail> results = DEFAULT_CHECKS.stream() .collect(Collectors.toMap( Check::getName, check -> check.check(this) )); final boolean allHealthy = results.values().stream() .allMatch(HealthCheckResponseDetail::getIsHealthy); final State serverState = commandRunner.checkServerState(); return new HealthCheckResponse(allHealthy, results, Optional.of(serverState.toString())); }
@Test public void shouldReturnUnhealthyIfKafkaCheckFails() { // Given: givenDescribeTopicsThrows(KafkaResponseGetFailedException.class); // When: final HealthCheckResponse response = healthCheckAgent.checkHealth(); // Then: assertThat(response.getDetails().get(KAFKA_CHECK_NAME).getIsHealthy(), is(false)); assertThat(response.getIsHealthy(), is(false)); }
public void addPoint(Point p) { mPoints.add(p); }
@Test public void json() throws Exception { IOTaskResult result = new IOTaskResult(); result.addPoint(new IOTaskResult.Point(IOTaskResult.IOMode.READ, 100L, 20)); result.addPoint(new IOTaskResult.Point(IOTaskResult.IOMode.WRITE, 100L, 5)); ObjectMapper mapper = new ObjectMapper(); String json = mapper.writeValueAsString(result); IOTaskResult other = mapper.readValue(json, IOTaskResult.class); checkEquality(result, other); }
public Optional<InstantAndValue<T>> getAndSet(MetricKey metricKey, Instant now, T value) { InstantAndValue<T> instantAndValue = new InstantAndValue<>(now, value); InstantAndValue<T> valueOrNull = counters.put(metricKey, instantAndValue); // there wasn't already an entry, so return empty. if (valueOrNull == null) { return Optional.empty(); } // Return the previous instance and the value. return Optional.of(valueOrNull); }
@Test public void testGetAndSetDoubleWithTrackedValue() { LastValueTracker<Double> lastValueTracker = new LastValueTracker<>(); lastValueTracker.getAndSet(METRIC_NAME, instant1, 1d); Optional<InstantAndValue<Double>> result = lastValueTracker .getAndSet(METRIC_NAME, instant2, 1000d); assertTrue(result.isPresent()); assertEquals(instant1, result.get().getIntervalStart()); assertEquals(1d, result.get().getValue(), 1e-6); }
public static Builder builder(String testId) { return new Builder(testId); }
@Test public void testCreateResourceManagerThrowsCustomPortErrorWhenUsingStaticContainer() { assertThat( assertThrows( SplunkResourceManagerException.class, () -> SplunkResourceManager.builder(TEST_ID) .setHost(HOST) .useStaticContainer() .build()) .getMessage()) .containsMatch("the hecPort and splunkdPort were not properly set"); }
@Override public void putTaskConfigs(final String connName, final List<Map<String, String>> configs, final Callback<Void> callback, InternalRequestSignature requestSignature) { log.trace("Submitting put task configuration request {}", connName); if (requestNotSignedProperly(requestSignature, callback)) { return; } addRequest( () -> { if (!isLeader()) callback.onCompletion(new NotLeaderException("Only the leader may write task configurations.", leaderUrl()), null); else if (!configState.contains(connName)) callback.onCompletion(new NotFoundException("Connector " + connName + " not found"), null); else { writeTaskConfigs(connName, configs); callback.onCompletion(null, null); } return null; }, forwardErrorAndTickThreadStages(callback) ); }
@Test public void testPutTaskConfigsValidRequiredSignature() { when(member.currentProtocolVersion()).thenReturn(CONNECT_PROTOCOL_V2); InternalRequestSignature signature = mock(InternalRequestSignature.class); when(signature.keyAlgorithm()).thenReturn("HmacSHA256"); when(signature.isValid(any())).thenReturn(true); SessionKey sessionKey = mock(SessionKey.class); SecretKey secretKey = mock(SecretKey.class); when(sessionKey.key()).thenReturn(secretKey); when(sessionKey.creationTimestamp()).thenReturn(time.milliseconds()); // Read a new session key from the config topic configUpdateListener.onSessionKeyUpdate(sessionKey); Callback<Void> taskConfigCb = mock(Callback.class); List<String> stages = expectRecordStages(taskConfigCb); herder.putTaskConfigs(CONN1, TASK_CONFIGS, taskConfigCb, signature); // Expect a wakeup call after the request to write task configs is added to the herder's request queue verify(member).wakeup(); verifyNoMoreInteractions(member, taskConfigCb); assertEquals( singletonList("awaiting startup"), stages ); }
public ReportEntry createEntry( final long initialBytesLost, final long timestampMs, final int sessionId, final int streamId, final String channel, final String source) { ReportEntry reportEntry = null; final int requiredCapacity = CHANNEL_OFFSET + BitUtil.align(SIZE_OF_INT + channel.length(), SIZE_OF_INT) + SIZE_OF_INT + source.length(); if (requiredCapacity <= (buffer.capacity() - nextRecordOffset)) { final int offset = nextRecordOffset; buffer.putLong(offset + TOTAL_BYTES_LOST_OFFSET, initialBytesLost); buffer.putLong(offset + FIRST_OBSERVATION_OFFSET, timestampMs); buffer.putLong(offset + LAST_OBSERVATION_OFFSET, timestampMs); buffer.putInt(offset + SESSION_ID_OFFSET, sessionId); buffer.putInt(offset + STREAM_ID_OFFSET, streamId); final int encodedChannelLength = buffer.putStringAscii(offset + CHANNEL_OFFSET, channel); buffer.putStringAscii( offset + CHANNEL_OFFSET + BitUtil.align(encodedChannelLength, SIZE_OF_INT), source); buffer.putLongOrdered(offset + OBSERVATION_COUNT_OFFSET, 1); reportEntry = new ReportEntry(buffer, offset); nextRecordOffset += BitUtil.align(requiredCapacity, ENTRY_ALIGNMENT); } return reportEntry; }
@Test void shouldUpdateEntry() { final long initialBytesLost = 32; final int timestampMs = 7; final int sessionId = 3; final int streamId = 1; final String channel = "aeron:udp://stuff"; final String source = "127.0.0.1:8888"; final ReportEntry entry = lossReport.createEntry( initialBytesLost, timestampMs, sessionId, streamId, channel, source); final long additionBytesLost = 64; final long latestTimestamp = 10; entry.recordObservation(additionBytesLost, latestTimestamp); assertEquals(latestTimestamp, unsafeBuffer.getLong(LAST_OBSERVATION_OFFSET)); assertEquals(initialBytesLost + additionBytesLost, unsafeBuffer.getLong(TOTAL_BYTES_LOST_OFFSET)); assertEquals(2L, unsafeBuffer.getLong(OBSERVATION_COUNT_OFFSET)); }
@Override public HttpClientResponse execute(URI uri, String httpMethod, RequestHttpEntity requestHttpEntity) throws Exception { while (interceptors.hasNext()) { HttpClientRequestInterceptor nextInterceptor = interceptors.next(); if (nextInterceptor.isIntercept(uri, httpMethod, requestHttpEntity)) { return nextInterceptor.intercept(); } } return httpClientRequest.execute(uri, httpMethod, requestHttpEntity); }
@Test void testExecuteNotIntercepted() throws Exception { HttpClientResponse response = clientRequest.execute(URI.create("http://example.com"), "GET", new RequestHttpEntity(Header.EMPTY, Query.EMPTY)); assertEquals(httpClientResponse, response); }
@Override public boolean tableExists(String dbName, String tblName) { return deltaOps.tableExists(dbName, tblName); }
@Test public void testTableExists() { Assert.assertTrue(deltaLakeMetadata.tableExists("db1", "table1")); }
public static String getDataSourceUnitNode(final String databaseName, final String dataSourceName) { return String.join("/", getDataSourceUnitsNode(databaseName), dataSourceName); }
@Test void assertGetMetaDataDataSourceNode() { assertThat(DataSourceMetaDataNode.getDataSourceUnitNode("foo_db", "foo_ds"), is("/metadata/foo_db/data_sources/units/foo_ds")); }
public static Expression appendNewLambdaToOld(LambdaExpr l1, LambdaExpr l2) { ExpressionStmt l1ExprStmt = (ExpressionStmt) l1.getBody(); ExpressionStmt l2ExprStmt = (ExpressionStmt) l2.getBody(); DrlxParseUtil.RemoveRootNodeResult removeRootNodeResult = DrlxParseUtil.removeRootNode(l2ExprStmt.getExpression()); NodeWithOptionalScope<?> newExpr = (NodeWithOptionalScope<?>) removeRootNodeResult.getFirstChild(); newExpr.setScope(l1ExprStmt.getExpression()); l1.setBody(new ExpressionStmt(removeRootNodeResult.getWithoutRootNode())); return l1; }
@Test public void appendTwoMethodsToLambda() { LambdaExpr l1 = parseExpression("(_this) -> _this.getDueDate()"); LambdaExpr l2 = parseExpression("(_this) -> _this.getTime().getTime()"); Expression expected = parseExpression("(_this) -> _this.getDueDate().getTime().getTime()"); Expression actual = LambdaUtil.appendNewLambdaToOld(l1, l2); assertThat(actual.toString()).isEqualTo(expected.toString()); }
public boolean isBuiltIn() { return mIsBuiltIn; }
@Test public void isBuiltIn() { assertTrue(mTestProperty.isBuiltIn()); }
@Override public boolean fastPut(K key, V value, long ttl, TimeUnit ttlUnit) { return get(fastPutAsync(key, value, ttl, ttlUnit)); }
@Test public void testFastPutExpiration() throws Exception { RMapCache<String, Object> mapCache = redisson.getMapCache("testFastPutExpiration"); mapCache.fastPut("k1", "v1", 1, TimeUnit.SECONDS); Thread.sleep(1000); mapCache.fastPut("k1", "v2"); assertThat(mapCache.get("k1")).isEqualTo("v2"); }
public static void print(Object obj) { print(TEMPLATE_VAR, obj); }
@Test public void printTest(){ String[] a = {"abc", "bcd", "def"}; Console.print(a); Console.log("This is Console print for {}.", "test"); }
public long bytesAfter(SegmentPointer mark) { assert mark.samePage(this.end); return end.distance(mark); }
@Test public void testBytesAfter() throws IOException { final MappedByteBuffer pageBuffer = Utils.createPageFile(); final SegmentPointer begin = new SegmentPointer(0, 0); final SegmentPointer end = new SegmentPointer(0, 1023); final Segment segment = new Segment(pageBuffer, begin, end); assertEquals(0, segment.bytesAfter(end)); assertEquals(1023, segment.bytesAfter(begin)); }
@Override public void getConfig(StorServerConfig.Builder builder) { super.getConfig(builder); provider.getConfig(builder); }
@Test void testSplitAndJoin() { StorDistributormanagerConfig.Builder builder = new StorDistributormanagerConfig.Builder(); parse("<cluster id=\"storage\">\n" + " <redundancy>3</redundancy>" + " <documents/>" + " <tuning>\n" + " <bucket-splitting max-documents=\"2K\" max-size=\"25M\" minimum-bits=\"8\" />\n" + " </tuning>\n" + " <group>" + " <node distribution-key=\"0\" hostalias=\"mockhost\"/>" + " </group>" + "</cluster>").getConfig(builder); StorDistributormanagerConfig conf = new StorDistributormanagerConfig(builder); assertEquals(2048, conf.splitcount()); assertEquals(1024, conf.joincount()); assertEquals(26214400, conf.splitsize()); assertEquals(13107200, conf.joinsize()); assertEquals(8, conf.minsplitcount()); assertFalse(conf.inlinebucketsplitting()); }
@Override public byte[] serialize(final String topic, final List<?> values) { if (values == null) { return null; } final T single = extractOnlyColumn(values, topic); return inner.serialize(topic, single); }
@Test public void shouldSerializeNewStyle() { // Given: final List<?> values = ImmutableList.of(DATA); // When: final byte[] result = serializer.serialize(TOPIC, HEADERS, values); // Then: verify(inner).serialize(TOPIC, HEADERS, DATA); assertThat(result, is(SERIALIZED)); }
public void rebuildNode(String fullPath) throws Exception { Preconditions.checkArgument( ZKPaths.getPathAndNode(fullPath).getPath().equals(path), "Node is not part of this cache: " + fullPath); Preconditions.checkState(state.get() == State.STARTED, "cache has been closed"); ensurePath(); internalRebuildNode(fullPath); // this is necessary so that any updates that occurred while rebuilding are taken // have to rebuild entire tree in case this node got deleted in the interim offerOperation(new RefreshOperation(this, RefreshMode.FORCE_GET_DATA_AND_STAT)); }
@Test public void testRebuildNode() throws Exception { Timing timing = new Timing(); PathChildrenCache cache = null; CuratorFramework client = CuratorFrameworkFactory.newClient( server.getConnectString(), timing.session(), timing.connection(), new RetryOneTime(1)); try { client.start(); client.create().creatingParentsIfNeeded().forPath("/test/one", "one".getBytes()); final CountDownLatch latch = new CountDownLatch(1); final AtomicInteger counter = new AtomicInteger(); final Semaphore semaphore = new Semaphore(1); cache = new PathChildrenCache(client, "/test", true) { @Override void getDataAndStat(String fullPath) throws Exception { semaphore.acquire(); counter.incrementAndGet(); super.getDataAndStat(fullPath); latch.countDown(); } }; cache.start(PathChildrenCache.StartMode.BUILD_INITIAL_CACHE); assertTrue(timing.awaitLatch(latch)); int saveCounter = counter.get(); client.setData().forPath("/test/one", "alt".getBytes()); cache.rebuildNode("/test/one"); assertArrayEquals(cache.getCurrentData("/test/one").getData(), "alt".getBytes()); assertEquals(saveCounter, counter.get()); semaphore.release(1000); timing.sleepABit(); } finally { CloseableUtils.closeQuietly(cache); TestCleanState.closeAndTestClean(client); } }
public static void setCurator(CuratorFramework curator) { CURATOR_TL.set(curator); }
@Test public void testCreatingParentContainersIfNeeded() throws Exception { String connectString = zkServer.getConnectString(); RetryPolicy retryPolicy = new ExponentialBackoffRetry(1000, 3); Configuration conf = getSecretConf(connectString); CuratorFramework curatorFramework = CuratorFrameworkFactory.builder() .connectString(connectString) .retryPolicy(retryPolicy) .build(); curatorFramework.start(); ZKDelegationTokenSecretManager.setCurator(curatorFramework); DelegationTokenManager tm1 = new DelegationTokenManager(conf, new Text("foo")); // When the init method is called, // the ZKDelegationTokenSecretManager#startThread method will be called, // and the creatingParentContainersIfNeeded will be called to create the nameSpace. tm1.init(); String workingPath = "/" + conf.get(ZKDelegationTokenSecretManager.ZK_DTSM_ZNODE_WORKING_PATH, ZKDelegationTokenSecretManager.ZK_DTSM_ZNODE_WORKING_PATH_DEAFULT) + "/ZKDTSMRoot"; // Check if the created NameSpace exists. Stat stat = curatorFramework.checkExists().forPath(workingPath); Assert.assertNotNull(stat); tm1.destroy(); curatorFramework.close(); }
public static String decodeUsername(String username) { if (username.contains("CN=")) { try { return new LdapName(username).getRdns().stream() .filter(rdn -> rdn.getType().equalsIgnoreCase("cn")) .map(rdn -> rdn.getValue().toString()).collect(Collectors.joining()); } catch (InvalidNameException e) { throw new IllegalArgumentException(e); } } else { return username; } }
@Test public void testDecodeUsername() { assertThat(KafkaUserModel.decodeUsername("CN=my-user"), is("my-user")); assertThat(KafkaUserModel.decodeUsername("CN=my-user,OU=my-org"), is("my-user")); assertThat(KafkaUserModel.decodeUsername("OU=my-org,CN=my-user"), is("my-user")); }
@Override public Upstream doSelect(final List<Upstream> upstreamList, final String ip) { final ConcurrentSkipListMap<Long, Upstream> treeMap = new ConcurrentSkipListMap<>(); upstreamList.forEach(upstream -> IntStream.range(0, VIRTUAL_NODE_NUM).forEach(i -> { long addressHash = hash("SHENYU-" + upstream.getUrl() + "-HASH-" + i); treeMap.put(addressHash, upstream); })); long hash = hash(ip); SortedMap<Long, Upstream> lastRing = treeMap.tailMap(hash); if (!lastRing.isEmpty()) { return lastRing.get(lastRing.firstKey()); } return treeMap.firstEntry().getValue(); }
@Test void doSelectWithSuccess() { final HashLoadBalancer hashLoadBalancer = new HashLoadBalancer(); final List<Upstream> upstreamList = new ArrayList<>(); upstreamList.add(Upstream.builder().url("http://1.1.1.1/api").build()); upstreamList.add(Upstream.builder().url("http://2.2.2.2/api").build()); upstreamList.add(Upstream.builder().url("http://3.3.3.3/api").build()); final Upstream upstream = hashLoadBalancer.doSelect(upstreamList, "127.0.0.1"); assertEquals(upstreamList.get(2).getUrl(), upstream.getUrl()); }
public final ResponseReceiver<?> get() { return request(HttpMethod.GET); }
@Test void serverInfiniteClientClose() throws Exception { CountDownLatch latch = new CountDownLatch(1); disposableServer = createServer() .handle((req, resp) -> { req.withConnection(cn -> cn.onDispose(latch::countDown)); return Flux.interval(Duration.ofSeconds(1)) .flatMap(d -> resp.sendObject(Unpooled.EMPTY_BUFFER)); }) .bindNow(); createHttpClientForContextWithPort() .get() .uri("/") .response() .block(Duration.ofSeconds(5)); latch.await(); }