focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
public ApplicationBuilder environment(String environment) { this.environment = environment; return getThis(); }
@Test void environment() { ApplicationBuilder builder = new ApplicationBuilder(); Assertions.assertEquals("product", builder.build().getEnvironment()); builder.environment("develop"); Assertions.assertEquals("develop", builder.build().getEnvironment()); builder.environment("test"); Assertions.assertEquals("test", builder.build().getEnvironment()); builder.environment("product"); Assertions.assertEquals("product", builder.build().getEnvironment()); }
public int deleteById(ModelId id) { final DBQuery.Query query = DBQuery.is(Identified.FIELD_META_ID, id); final WriteResult<ContentPack, ObjectId> writeResult = dbCollection.remove(query); return writeResult.getN(); }
@Test @MongoDBFixtures("ContentPackPersistenceServiceTest.json") public void deleteById() { final int deletedContentPacks = contentPackPersistenceService.deleteById(ModelId.of("dcd74ede-6832-4ef7-9f69-deadbeef0000")); final Set<ContentPack> contentPacks = contentPackPersistenceService.loadAll(); assertThat(deletedContentPacks).isEqualTo(3); assertThat(contentPacks) .hasSize(2) .noneMatch(contentPack -> contentPack.id().equals(ModelId.of("dcd74ede-6832-4ef7-9f69-deadbeef0000"))); }
@SuppressWarnings("unchecked") @Override public <T extends Statement> ConfiguredStatement<T> inject( final ConfiguredStatement<T> statement ) { if (!(statement.getStatement() instanceof CreateSource) && !(statement.getStatement() instanceof CreateAsSelect)) { return statement; } try { if (statement.getStatement() instanceof CreateSource) { final ConfiguredStatement<CreateSource> createStatement = (ConfiguredStatement<CreateSource>) statement; return (ConfiguredStatement<T>) forCreateStatement(createStatement).orElse(createStatement); } else { final ConfiguredStatement<CreateAsSelect> createStatement = (ConfiguredStatement<CreateAsSelect>) statement; return (ConfiguredStatement<T>) forCreateAsStatement(createStatement).orElse( createStatement); } } catch (final KsqlStatementException e) { throw e; } catch (final KsqlException e) { throw new KsqlStatementException( ErrorMessageUtil.buildErrorMessage(e), statement.getMaskedStatementText(), e.getCause()); } }
@Test public void shouldInjectValuesAndMaintainKeysAndHeadersForCs() { // Given: givenKeyAndValueInferenceSupported(); when(cs.getElements()).thenReturn(HEADER_AND_VALUE); // When: final ConfiguredStatement<CreateStream> result = injector.inject(csStatement); // Then: assertThat(result.getStatement().getElements(), is(combineElements(HEADER_ELEMENTS, INFERRED_KSQL_KEY_SCHEMA_STREAM, SOME_VALUE_ELEMENTS))); assertThat(result.getMaskedStatementText(), is( "CREATE STREAM `cs` (" + "`head` BYTES HEADER('header'), " + "`key` STRING KEY, " + "`bob` STRING) " + "WITH (KAFKA_TOPIC='some-topic', KEY_FORMAT='protobuf', VALUE_FORMAT='avro');" )); }
@Override public boolean add(FilteredBlock block) throws VerificationException, PrunedException { boolean success = super.add(block); if (success) { trackFilteredTransactions(block.getTransactionCount()); } return success; }
@Test public void receiveCoins() throws Exception { Context.propagate(new Context(100, Coin.ZERO, false, true)); int height = 1; // Quick check that we can actually receive coins. Transaction tx1 = createFakeTx(TESTNET.network(), COIN, testNetWallet.currentReceiveKey().toAddress(ScriptType.P2PKH, BitcoinNetwork.TESTNET)); Block b1 = createFakeBlock(testNetStore, height, tx1).block; testNetChain.add(b1); assertTrue(testNetWallet.getBalance().signum() > 0); }
public static <K, C, V, T> V computeIfAbsent(Map<K, V> target, K key, BiFunction<C, T, V> mappingFunction, C param1, T param2) { Objects.requireNonNull(target, "target"); Objects.requireNonNull(key, "key"); Objects.requireNonNull(mappingFunction, "mappingFunction"); Objects.requireNonNull(param1, "param1"); Objects.requireNonNull(param2, "param2"); V val = target.get(key); if (val == null) { V ret = mappingFunction.apply(param1, param2); target.put(key, ret); return ret; } return val; }
@Test public void computeIfAbsentNotExistParam2Test() { Map<String, Object> map = new HashMap<>(); map.put("abc", "123"); BiFunction<String, String, Object> mappingFunction = (a, b) -> a + b; try { MapUtil.computeIfAbsent(map, "abc", mappingFunction, "param1", null); } catch (Exception e) { if (e instanceof NullPointerException) { Assert.isTrue(Objects.equals("param2", e.getMessage())); } } }
@Nullable @Override public Message decode(@Nonnull final RawMessage rawMessage) { final GELFMessage gelfMessage = new GELFMessage(rawMessage.getPayload(), rawMessage.getRemoteAddress()); final String json = gelfMessage.getJSON(decompressSizeLimit, charset); final JsonNode node; try { node = objectMapper.readTree(json); if (node == null) { throw new IOException("null result"); } } catch (final Exception e) { log.error("Could not parse JSON, first 400 characters: " + StringUtils.abbreviate(json, 403), e); throw new IllegalStateException("JSON is null/could not be parsed (invalid JSON)", e); } try { validateGELFMessage(node, rawMessage.getId(), rawMessage.getRemoteAddress()); } catch (IllegalArgumentException e) { log.trace("Invalid GELF message <{}>", node); throw e; } // Timestamp. final double messageTimestamp = timestampValue(node); final DateTime timestamp; if (messageTimestamp <= 0) { timestamp = rawMessage.getTimestamp(); } else { // we treat this as a unix timestamp timestamp = Tools.dateTimeFromDouble(messageTimestamp); } final Message message = messageFactory.createMessage( stringValue(node, "short_message"), stringValue(node, "host"), timestamp ); message.addField(Message.FIELD_FULL_MESSAGE, stringValue(node, "full_message")); final String file = stringValue(node, "file"); if (file != null && !file.isEmpty()) { message.addField("file", file); } final long line = longValue(node, "line"); if (line > -1) { message.addField("line", line); } // Level is set by server if not specified by client. final int level = intValue(node, "level"); if (level > -1) { message.addField("level", level); } // Facility is set by server if not specified by client. final String facility = stringValue(node, "facility"); if (facility != null && !facility.isEmpty()) { message.addField("facility", facility); } // Add additional data if there is some. final Iterator<Map.Entry<String, JsonNode>> fields = node.fields(); while (fields.hasNext()) { final Map.Entry<String, JsonNode> entry = fields.next(); String key = entry.getKey(); // Do not index useless GELF "version" field. if ("version".equals(key)) { continue; } // Don't include GELF syntax underscore in message field key. if (key.startsWith("_") && key.length() > 1) { key = key.substring(1); } // We already set short_message and host as message and source. Do not add as fields again. if ("short_message".equals(key) || "host".equals(key)) { continue; } // Skip standard or already set fields. if (message.getField(key) != null || Message.RESERVED_FIELDS.contains(key) && !Message.RESERVED_SETTABLE_FIELDS.contains(key)) { continue; } // Convert JSON containers to Strings, and pick a suitable number representation. final JsonNode value = entry.getValue(); final Object fieldValue; if (value.isContainerNode()) { fieldValue = value.toString(); } else if (value.isFloatingPointNumber()) { fieldValue = value.asDouble(); } else if (value.isIntegralNumber()) { fieldValue = value.asLong(); } else if (value.isNull()) { log.debug("Field [{}] is NULL. Skipping.", key); continue; } else if (value.isTextual()) { fieldValue = value.asText(); } else { log.debug("Field [{}] has unknown value type. Skipping.", key); continue; } message.addField(key, fieldValue); } return message; }
@Test public void decodeFailsWithEmptyShortMessage() throws Exception { final String json = "{" + "\"version\": \"1.1\"," + "\"host\": \"example.org\"," + "\"short_message\": \"\"" + "}"; final RawMessage rawMessage = new RawMessage(json.getBytes(StandardCharsets.UTF_8)); assertThatIllegalArgumentException().isThrownBy(() -> codec.decode(rawMessage)) .withNoCause() .withMessageMatching("GELF message <[0-9a-f-]+> has empty mandatory \"short_message\" field."); }
@Override public int compare(T o1, T o2) { if (!(o1 instanceof CharSequence) || !(o2 instanceof CharSequence)) { throw new RuntimeException("Attempted use of AvroCharSequenceComparator on non-CharSequence objects: " + o1.getClass().getName() + " and " + o2.getClass().getName()); } return compareCharSequence((CharSequence) o1, (CharSequence) o2); }
@Test void compareString() { assertEquals(0, mComparator.compare("", "")); assertThat(mComparator.compare("", "a"), lessThan(0)); assertThat(mComparator.compare("a", ""), greaterThan(0)); assertEquals(0, mComparator.compare("a", "a")); assertThat(mComparator.compare("a", "b"), lessThan(0)); assertThat(mComparator.compare("b", "a"), greaterThan(0)); assertEquals(0, mComparator.compare("ab", "ab")); assertThat(mComparator.compare("a", "aa"), lessThan(0)); assertThat(mComparator.compare("aa", "a"), greaterThan(0)); assertThat(mComparator.compare("abc", "abcdef"), lessThan(0)); assertThat(mComparator.compare("abcdef", "abc"), greaterThan(0)); }
@SuppressWarnings("removal") // Since JDK 22 public static void fullFence() { UnsafeAccess.UNSAFE.fullFence(); }
@Test void fullFence() { MemoryAccess.fullFence(); }
public static String bestMatch(Collection<String> supported, String header) { return bestMatch(supported.stream(), header); }
@Test(dataProvider = "invalidHeaders", expectedExceptions = InvalidMimeTypeException.class) public void testBestMatchForInvalidHeaders(List<String> supportedTypes, String header) { MIMEParse.bestMatch(supportedTypes, header); }
public static boolean compare(Object source, Object target) { if (source == target) { return true; } if (source == null || target == null) { return false; } if (source.equals(target)) { return true; } if (source instanceof Boolean) { return compare(((Boolean) source), target); } if (source instanceof Number) { return compare(((Number) source), target); } if (target instanceof Number) { return compare(((Number) target), source); } if (source instanceof Date) { return compare(((Date) source), target); } if (target instanceof Date) { return compare(((Date) target), source); } if (source instanceof String) { return compare(((String) source), target); } if (target instanceof String) { return compare(((String) target), source); } if (source instanceof Collection) { return compare(((Collection) source), target); } if (target instanceof Collection) { return compare(((Collection) target), source); } if (source instanceof Map) { return compare(((Map) source), target); } if (target instanceof Map) { return compare(((Map) target), source); } if (source.getClass().isEnum() || source instanceof Enum) { return compare(((Enum) source), target); } if (target.getClass().isEnum() || source instanceof Enum) { return compare(((Enum) target), source); } if (source.getClass().isArray()) { return compare(((Object[]) source), target); } if (target.getClass().isArray()) { return compare(((Object[]) target), source); } return compare(FastBeanCopier.copy(source, HashMap.class), FastBeanCopier.copy(target, HashMap.class)); }
@Test public void stringTest() { Assert.assertTrue(CompareUtils.compare("20180101", DateFormatter.fromString("20180101"))); Assert.assertTrue(CompareUtils.compare(1, "1")); Assert.assertTrue(CompareUtils.compare("1", 1)); Assert.assertTrue(CompareUtils.compare("1.0", 1.0D)); Assert.assertTrue(CompareUtils.compare("1.01", 1.01D)); Assert.assertTrue(CompareUtils.compare("1,2,3", Arrays.asList(1, 2, 3))); Assert.assertTrue(CompareUtils.compare("blue", TestEnumDic.BLUE)); Assert.assertTrue(CompareUtils.compare("BLUE", TestEnum.BLUE)); }
@Override public ClientPoolHandler addFirst(String name, ChannelHandler handler) { super.addFirst(name, handler); return this; }
@Test public void addFirst() { ClientPoolHandler handler = new ClientPoolHandler(); Assert.assertTrue(handler.isEmpty()); handler.addFirst(null, new TestHandler()); Assert.assertFalse(handler.isEmpty()); }
@Override public int write(ByteBuffer src) throws IOException { checkNotNull(src); checkOpen(); checkWritable(); int written = 0; // will definitely either be assigned or an exception will be thrown synchronized (this) { boolean completed = false; try { if (!beginBlocking()) { return 0; // AsynchronousCloseException will be thrown } file.writeLock().lockInterruptibly(); try { if (append) { position = file.size(); } written = file.write(position, src); position += written; file.setLastModifiedTime(fileSystemState.now()); completed = true; } finally { file.writeLock().unlock(); } } catch (InterruptedException e) { Thread.currentThread().interrupt(); } finally { endBlocking(completed); } } return written; }
@Test public void testWriteNegative() throws IOException { FileChannel channel = channel(regularFile(0), READ, WRITE); try { channel.write(buffer("111"), -1); fail(); } catch (IllegalArgumentException expected) { } ByteBuffer[] bufs = {buffer("111"), buffer("111")}; try { channel.write(bufs, -1, 10); fail(); } catch (IndexOutOfBoundsException expected) { } try { channel.write(bufs, 0, -1); fail(); } catch (IndexOutOfBoundsException expected) { } }
@Override public MaterializedWindowedTable windowed() { return new KsqlMaterializedWindowedTable(inner.windowed()); }
@Test public void shouldPipeTransformsWindowed() { // Given: final MaterializedWindowedTable table = materialization.windowed(); givenNoopProject(); when(filter.apply(any(), any(), any())).thenReturn(Optional.of(transformed)); // When: table.get(aKey, partition, windowStartBounds, windowEndBounds); // Then: verify(project).apply( new Windowed<>(aKey, streamWindow), transformed, new PullProcessingContext(aRowtime) ); }
public void onPeriodicEmit() { updateCombinedWatermark(); }
@Test void noCombinedDeferredUpdateWhenWeHaveZeroOutputs() { TestingWatermarkOutput underlyingWatermarkOutput = createTestingWatermarkOutput(); WatermarkOutputMultiplexer multiplexer = new WatermarkOutputMultiplexer(underlyingWatermarkOutput); multiplexer.onPeriodicEmit(); assertThat(underlyingWatermarkOutput.lastWatermark()).isNull(); assertThat(underlyingWatermarkOutput.isIdle()).isFalse(); }
public int maxValue() { final int missingValue = this.missingValue; int max = 0 == size ? missingValue : Integer.MIN_VALUE; final int[] entries = this.entries; @DoNotSub final int length = entries.length; for (@DoNotSub int valueIndex = 1; valueIndex < length; valueIndex += 2) { final int value = entries[valueIndex]; if (missingValue != value) { max = Math.max(max, value); } } return max; }
@Test void shouldFindMaxValue() { addValues(map); assertEquals(10, map.maxValue()); }
@Override public Path copy(final Path file, final Path target, final TransferStatus status, final ConnectionCallback callback, final StreamListener listener) throws BackgroundException { try { final EueApiClient client = new EueApiClient(session); if(status.isExists()) { if(log.isWarnEnabled()) { log.warn(String.format("Trash file %s to be replaced with %s", target, file)); } new EueTrashFeature(session, fileid).delete(Collections.singletonMap(target, status), callback, new Delete.DisabledCallback()); } final String resourceId = fileid.getFileId(file); final String parentResourceId = fileid.getFileId(target.getParent()); String targetResourceId = null; final ResourceCopyResponseEntries resourceCopyResponseEntries; switch(parentResourceId) { case EueResourceIdProvider.ROOT: case EueResourceIdProvider.TRASH: resourceCopyResponseEntries = new CopyChildrenForAliasApiApi(client) .resourceAliasAliasChildrenCopyPost(parentResourceId, Collections.singletonList(String.format("%s/resource/%s", session.getBasePath(), resourceId)), null, null, null, "rename", null); break; default: resourceCopyResponseEntries = new CopyChildrenApi(client).resourceResourceIdChildrenCopyPost(parentResourceId, Collections.singletonList(String.format("%s/resource/%s", session.getBasePath(), resourceId)), null, null, null, "rename", null); } if(null == resourceCopyResponseEntries) { // Copy of single file will return 200 status code with empty response body } else { for(ResourceCopyResponseEntry resourceCopyResponseEntry : resourceCopyResponseEntries.values()) { switch(resourceCopyResponseEntry.getStatusCode()) { case HttpStatus.SC_CREATED: fileid.cache(target, EueResourceIdProvider.getResourceIdFromResourceUri(resourceCopyResponseEntry.getHeaders().getLocation())); break; default: log.warn(String.format("Failure %s copying file %s", resourceCopyResponseEntries, file)); throw new EueExceptionMappingService().map(new ApiException(resourceCopyResponseEntry.getReason(), null, resourceCopyResponseEntry.getStatusCode(), client.getResponseHeaders())); } } } listener.sent(status.getLength()); if(!StringUtils.equals(file.getName(), target.getName())) { final ResourceUpdateModel resourceUpdateModel = new ResourceUpdateModel(); final ResourceUpdateModelUpdate resourceUpdateModelUpdate = new ResourceUpdateModelUpdate(); final Uifs uifs = new Uifs(); uifs.setName(target.getName()); resourceUpdateModelUpdate.setUifs(uifs); resourceUpdateModel.setUpdate(resourceUpdateModelUpdate); final ResourceMoveResponseEntries resourceMoveResponseEntries = new UpdateResourceApi(client).resourceResourceIdPatch(fileid.getFileId(target), resourceUpdateModel, null, null, null); if(null == resourceMoveResponseEntries) { // Move of single file will return 200 status code with empty response body } else { for(ResourceMoveResponseEntry resourceMoveResponseEntry : resourceMoveResponseEntries.values()) { switch(resourceMoveResponseEntry.getStatusCode()) { case HttpStatus.SC_CREATED: break; default: log.warn(String.format("Failure %s renaming file %s", resourceMoveResponseEntry, file)); throw new EueExceptionMappingService().map(new ApiException(resourceMoveResponseEntry.getReason(), null, resourceMoveResponseEntry.getStatusCode(), client.getResponseHeaders())); } } } } return target; } catch(ApiException e) { throw new EueExceptionMappingService().map("Cannot copy {0}", e, file); } }
@Test public void testCopyRecursiveToRoot() throws Exception { final EueResourceIdProvider fileid = new EueResourceIdProvider(session); final Path sourceFolder = new EueDirectoryFeature(session, fileid).mkdir( new Path(new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory)), new TransferStatus()); final Path sourceFile = new Path(sourceFolder, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)); createFile(fileid, sourceFile, RandomUtils.nextBytes(541)); final Path targetFolder = new Path(new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory)); assertNull(targetFolder.attributes().getFileId()); final EueCopyFeature feature = new EueCopyFeature(session, fileid); feature.copy(sourceFolder, targetFolder, new TransferStatus(), new DisabledLoginCallback(), new DisabledStreamListener()); assertNotEquals(sourceFolder.attributes().getFileId(), targetFolder.attributes().getFileId()); assertTrue(new EueFindFeature(session, fileid).find(targetFolder)); assertTrue(new EueFindFeature(session, fileid).find(new Path(targetFolder, sourceFile.getName(), sourceFile.getType()))); assertTrue(new DefaultFindFeature(session).find(new Path(targetFolder, sourceFile.getName(), sourceFile.getType()))); assertTrue(new EueFindFeature(session, fileid).find(sourceFolder)); assertTrue(new EueFindFeature(session, fileid).find(sourceFile)); assertTrue(new DefaultFindFeature(session).find(sourceFile)); new EueDeleteFeature(session, fileid).delete(Arrays.asList(sourceFolder, targetFolder), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
public static Map<String, Object> compare(byte[] baselineImg, byte[] latestImg, Map<String, Object> options, Map<String, Object> defaultOptions) throws MismatchException { boolean allowScaling = toBool(defaultOptions.get("allowScaling")); ImageComparison imageComparison = new ImageComparison(baselineImg, latestImg, options, allowScaling); imageComparison.configure(defaultOptions); if (imageComparison.baselineMissing) { imageComparison.result.put("isBaselineMissing", true); throw new MismatchException("baseline image was empty or not found", imageComparison.result); } if (imageComparison.scaleMismatch) { imageComparison.result.put("isScaleMismatch", true); throw new MismatchException("latest image dimensions != baseline image dimensions", imageComparison.result); } double mismatchPercentage = 100.0; for (String engine : imageComparison.engines) { double currentMismatchPercentage; switch (engine) { case RESEMBLE: currentMismatchPercentage = imageComparison.execResemble(); break; case SSIM: currentMismatchPercentage = imageComparison.execSSIM(); break; default: logger.error("skipping unsupported image comparison engine: {}", engine); continue; } if (currentMismatchPercentage <= mismatchPercentage) { mismatchPercentage = currentMismatchPercentage; } if (mismatchPercentage < imageComparison.stopWhenMismatchIsLessThan) { break; } } return imageComparison.checkMismatch(mismatchPercentage); }
@Test void testFailureThresholdTriggered() { ImageComparison.MismatchException exception = assertThrows(ImageComparison.MismatchException.class, () -> ImageComparison.compare(B_3x3_IMG, BG_3x3_IMG, opts(), opts())); double mismatchPercentage = (double)exception.data.get("mismatchPercentage"); // 3x3 = 9 pixels, 1 is different => 1/9 = 0.111111... => ~11.11% assertEquals(11.11, round(mismatchPercentage)); }
int calculatePartBufferSize(HazelcastProperties hazelcastProperties, long jarSize) { int partBufferSize = hazelcastProperties.getInteger(JOB_UPLOAD_PART_SIZE); // If jar size is smaller, then use it if (jarSize < partBufferSize) { partBufferSize = (int) jarSize; } return partBufferSize; }
@Test public void calculatePartBufferSize_when_JarIsSmall() { SubmitJobPartCalculator submitJobPartCalculator = new SubmitJobPartCalculator(); Properties properties = new Properties(); HazelcastProperties hazelcastProperties = new HazelcastProperties(properties); long jarSize = 2_000; int partSize = submitJobPartCalculator.calculatePartBufferSize(hazelcastProperties, jarSize); assertEquals(2_000, partSize); }
@Override public void setConf(Configuration conf) { if (conf != null) { conf = addSecurityConfiguration(conf); } super.setConf(conf); }
@Test public void testFailoverWithFencerConfigured() throws Exception { Mockito.doReturn(STANDBY_READY_RESULT).when(mockProtocol).getServiceStatus(); HdfsConfiguration conf = getHAConf(); conf.set(DFSConfigKeys.DFS_HA_FENCE_METHODS_KEY, getFencerTrueCommand()); tool.setConf(conf); assertEquals(0, runTool("-failover", "nn1", "nn2")); }
static boolean fieldMatch(Object repoObj, Object filterObj) { return filterObj == null || repoObj.equals(filterObj); }
@Test public void testFieldMatchWithNonEqualNonStringObjectsShouldReturnFalse() { assertFalse(Utilities.fieldMatch(42, 43)); }
@Override public int run(String[] argv) { if (argv.length < 1) { printUsage(""); return -1; } int exitCode = -1; int i = 0; String cmd = argv[i++]; // // verify that we have enough command line parameters // if ("-safemode".equals(cmd)) { if (argv.length != 2) { printUsage(cmd); return exitCode; } } else if ("-allowSnapshot".equalsIgnoreCase(cmd)) { if (argv.length != 2) { printUsage(cmd); return exitCode; } } else if ("-disallowSnapshot".equalsIgnoreCase(cmd)) { if (argv.length != 2) { printUsage(cmd); return exitCode; } } else if ("-provisionSnapshotTrash".equalsIgnoreCase(cmd)) { if (argv.length != 2) { printUsage(cmd); return exitCode; } } else if ("-report".equals(cmd)) { if (argv.length > DFS_REPORT_ARGS.length + 1) { printUsage(cmd); return exitCode; } } else if ("-saveNamespace".equals(cmd)) { if (argv.length != 1 && argv.length != 2) { printUsage(cmd); return exitCode; } } else if ("-rollEdits".equals(cmd)) { if (argv.length != 1) { printUsage(cmd); return exitCode; } } else if ("-restoreFailedStorage".equals(cmd)) { if (argv.length != 2) { printUsage(cmd); return exitCode; } } else if ("-refreshNodes".equals(cmd)) { if (argv.length != 1) { printUsage(cmd); return exitCode; } } else if ("-finalizeUpgrade".equals(cmd)) { if (argv.length != 1) { printUsage(cmd); return exitCode; } } else if (RollingUpgradeCommand.matches(cmd)) { if (argv.length > 2) { printUsage(cmd); return exitCode; } } else if ("-upgrade".equals(cmd)) { if (argv.length != 2) { printUsage(cmd); return exitCode; } } else if ("-metasave".equals(cmd)) { if (argv.length != 2) { printUsage(cmd); return exitCode; } } else if ("-refreshServiceAcl".equals(cmd)) { if (argv.length != 1) { printUsage(cmd); return exitCode; } } else if ("-refresh".equals(cmd)) { if (argv.length < 3) { printUsage(cmd); return exitCode; } } else if ("-refreshUserToGroupsMappings".equals(cmd)) { if (argv.length != 1) { printUsage(cmd); return exitCode; } } else if ("-printTopology".equals(cmd)) { if(argv.length != 1) { printUsage(cmd); return exitCode; } } else if ("-refreshNamenodes".equals(cmd)) { if (argv.length != 2) { printUsage(cmd); return exitCode; } } else if ("-getVolumeReport".equals(cmd)) { if (argv.length != 2) { printUsage(cmd); return exitCode; } } else if ("-reconfig".equals(cmd)) { if (argv.length != 4) { printUsage(cmd); return exitCode; } } else if ("-deleteBlockPool".equals(cmd)) { if ((argv.length != 3) && (argv.length != 4)) { printUsage(cmd); return exitCode; } } else if ("-setBalancerBandwidth".equals(cmd)) { if (argv.length != 2) { printUsage(cmd); return exitCode; } } else if ("-getBalancerBandwidth".equalsIgnoreCase(cmd)) { if (argv.length != 2) { printUsage(cmd); return exitCode; } } else if ("-fetchImage".equals(cmd)) { if (argv.length != 2) { printUsage(cmd); return exitCode; } } else if ("-shutdownDatanode".equals(cmd)) { if ((argv.length != 2) && (argv.length != 3)) { printUsage(cmd); return exitCode; } } else if ("-getDatanodeInfo".equals(cmd)) { if (argv.length != 2) { printUsage(cmd); return exitCode; } } else if ("-triggerBlockReport".equals(cmd)) { if ((argv.length < 2) || (argv.length > 5)) { printUsage(cmd); return exitCode; } } else if ("-listOpenFiles".equals(cmd)) { if ((argv.length > 4)) { printUsage(cmd); return exitCode; } } // initialize DFSAdmin init(); Exception debugException = null; exitCode = 0; try { if ("-report".equals(cmd)) { report(argv, i); } else if ("-safemode".equals(cmd)) { setSafeMode(argv, i); } else if ("-allowSnapshot".equalsIgnoreCase(cmd)) { allowSnapshot(argv); } else if ("-disallowSnapshot".equalsIgnoreCase(cmd)) { disallowSnapshot(argv); } else if ("-provisionSnapshotTrash".equalsIgnoreCase(cmd)) { provisionSnapshotTrash(argv); } else if ("-saveNamespace".equals(cmd)) { exitCode = saveNamespace(argv); } else if ("-rollEdits".equals(cmd)) { exitCode = rollEdits(); } else if ("-restoreFailedStorage".equals(cmd)) { exitCode = restoreFailedStorage(argv[i]); } else if ("-refreshNodes".equals(cmd)) { exitCode = refreshNodes(); } else if ("-finalizeUpgrade".equals(cmd)) { exitCode = finalizeUpgrade(); } else if (RollingUpgradeCommand.matches(cmd)) { exitCode = RollingUpgradeCommand.run(getDFS(), argv, i); } else if ("-upgrade".equals(cmd)) { exitCode = upgrade(argv[i]); } else if ("-metasave".equals(cmd)) { exitCode = metaSave(argv, i); } else if (ClearQuotaCommand.matches(cmd)) { exitCode = new ClearQuotaCommand(argv, i, getConf()).runAll(); } else if (SetQuotaCommand.matches(cmd)) { exitCode = new SetQuotaCommand(argv, i, getConf()).runAll(); } else if (ClearSpaceQuotaCommand.matches(cmd)) { exitCode = new ClearSpaceQuotaCommand(argv, i, getConf()).runAll(); } else if (SetSpaceQuotaCommand.matches(cmd)) { exitCode = new SetSpaceQuotaCommand(argv, i, getConf()).runAll(); } else if ("-refreshServiceAcl".equals(cmd)) { exitCode = refreshServiceAcl(); } else if ("-refreshUserToGroupsMappings".equals(cmd)) { exitCode = refreshUserToGroupsMappings(); } else if ("-refreshSuperUserGroupsConfiguration".equals(cmd)) { exitCode = refreshSuperUserGroupsConfiguration(); } else if ("-refreshCallQueue".equals(cmd)) { exitCode = refreshCallQueue(); } else if ("-refresh".equals(cmd)) { exitCode = genericRefresh(argv, i); } else if ("-printTopology".equals(cmd)) { exitCode = printTopology(); } else if ("-refreshNamenodes".equals(cmd)) { exitCode = refreshNamenodes(argv, i); } else if ("-getVolumeReport".equals(cmd)) { exitCode = getVolumeReport(argv, i); } else if ("-deleteBlockPool".equals(cmd)) { exitCode = deleteBlockPool(argv, i); } else if ("-setBalancerBandwidth".equals(cmd)) { exitCode = setBalancerBandwidth(argv, i); } else if ("-getBalancerBandwidth".equals(cmd)) { exitCode = getBalancerBandwidth(argv, i); } else if ("-fetchImage".equals(cmd)) { exitCode = fetchImage(argv, i); } else if ("-shutdownDatanode".equals(cmd)) { exitCode = shutdownDatanode(argv, i); } else if ("-evictWriters".equals(cmd)) { exitCode = evictWriters(argv, i); } else if ("-getDatanodeInfo".equals(cmd)) { exitCode = getDatanodeInfo(argv, i); } else if ("-reconfig".equals(cmd)) { exitCode = reconfig(argv, i); } else if ("-triggerBlockReport".equals(cmd)) { exitCode = triggerBlockReport(argv); } else if ("-listOpenFiles".equals(cmd)) { exitCode = listOpenFiles(argv); } else if ("-help".equals(cmd)) { if (i < argv.length) { printHelp(argv[i]); } else { printHelp(""); } } else { exitCode = -1; System.err.println(cmd.substring(1) + ": Unknown command"); printUsage(""); } } catch (IllegalArgumentException arge) { debugException = arge; exitCode = -1; System.err.println(cmd.substring(1) + ": " + arge.getLocalizedMessage()); printUsage(cmd); } catch (RemoteException e) { // // This is a error returned by hadoop server. Print // out the first line of the error message, ignore the stack trace. exitCode = -1; debugException = e; try { String[] content; content = e.getLocalizedMessage().split("\n"); System.err.println(cmd.substring(1) + ": " + content[0]); } catch (Exception ex) { System.err.println(cmd.substring(1) + ": " + ex.getLocalizedMessage()); debugException = ex; } } catch (Exception e) { exitCode = -1; debugException = e; System.err.println(cmd.substring(1) + ": " + e.getLocalizedMessage()); } if (LOG.isDebugEnabled() && debugException != null) { LOG.debug("Exception encountered:", debugException); } return exitCode; }
@Test(timeout = 60000) public void testDFSAdminUnreachableDatanode() throws Exception { redirectStream(); final DFSAdmin dfsAdmin = new DFSAdmin(conf); for (String command : new String[]{"-getDatanodeInfo", "-evictWriters", "-getBalancerBandwidth"}) { // Connecting to Xfer port instead of IPC port will get // Datanode unreachable. java.io.EOFException final String dnDataAddr = datanode.getXferAddress().getHostString() + ":" + datanode.getXferPort(); resetStream(); final List<String> outs = Lists.newArrayList(); final int ret = ToolRunner.run(dfsAdmin, new String[]{command, dnDataAddr}); assertEquals(-1, ret); scanIntoList(out, outs); assertTrue("Unexpected " + command + " stdout: " + out, outs.isEmpty()); assertTrue("Unexpected " + command + " stderr: " + err, err.toString().contains("Exception")); } }
public final T apply(Schema left, Schema right) { return visit(this, Context.EMPTY, FieldType.row(left), FieldType.row(right)); }
@Test public void testCountMissingFields() { assertEquals(4, new CountMissingFields().apply(LEFT, RIGHT).intValue()); }
public static SchemaFactory getSchemaFactory() throws SAXException { SchemaFactory schemaFactory = SchemaFactory.newInstance(XMLConstants.W3C_XML_SCHEMA_NS_URI); setProperty(schemaFactory, XMLConstants.ACCESS_EXTERNAL_SCHEMA); setProperty(schemaFactory, XMLConstants.ACCESS_EXTERNAL_DTD); return schemaFactory; }
@Test public void testGetSchemaFactory() throws Exception { SchemaFactory schemaFactory = XmlUtil.getSchemaFactory(); assertNotNull(schemaFactory); assertThrows(SAXException.class, () -> XmlUtil.setProperty(schemaFactory, "test://no-such-property")); ignoreXxeFailureProp.setOrClearProperty("false"); assertThrows(SAXException.class, () -> XmlUtil.setProperty(schemaFactory, "test://no-such-property")); ignoreXxeFailureProp.setOrClearProperty("true"); XmlUtil.setProperty(schemaFactory, "test://no-such-property"); }
List<Quote> getQuotes() { return this.quotes; }
@Test void staticQuotesAreLoaded() { assertThat(quotesProperties.getQuotes()).hasSize(2); }
@Override public Collection<LocalDataQueryResultRow> getRows(final ExportStorageNodesStatement sqlStatement, final ContextManager contextManager) { checkSQLStatement(contextManager.getMetaDataContexts().getMetaData(), sqlStatement); String exportedData = generateExportData(contextManager.getMetaDataContexts().getMetaData(), sqlStatement); if (sqlStatement.getFilePath().isPresent()) { String filePath = sqlStatement.getFilePath().get(); ExportUtils.exportToFile(filePath, exportedData); return Collections.singleton(new LocalDataQueryResultRow(contextManager.getComputeNodeInstanceContext().getInstance().getMetaData().getId(), LocalDateTime.now(), String.format("Successfully exported to:'%s'", filePath))); } return Collections.singleton( new LocalDataQueryResultRow(contextManager.getComputeNodeInstanceContext().getInstance().getMetaData().getId(), LocalDateTime.now(), exportedData)); }
@Test void assertExecute() { when(database.getName()).thenReturn("normal_db"); Map<String, StorageUnit> storageUnits = createStorageUnits(); when(database.getResourceMetaData().getStorageUnits()).thenReturn(storageUnits); when(database.getRuleMetaData().getConfigurations()).thenReturn(Collections.singleton(createShardingRuleConfiguration())); ContextManager contextManager = mockContextManager(); when(ProxyContext.getInstance().getContextManager()).thenReturn(contextManager); Collection<LocalDataQueryResultRow> actual = new ExportStorageNodesExecutor().getRows(new ExportStorageNodesStatement(null, null), contextManager); assertThat(actual.size(), is(1)); LocalDataQueryResultRow row = actual.iterator().next(); assertThat(row.getCell(3), is(loadExpectedRow())); }
@Override public InterpreterResult interpret(String st, InterpreterContext context) { return helper.interpret(session, st, context); }
@Test void should_execute_bound_statement() { // Given String queries = "@prepare[users_insert]=INSERT INTO zeppelin.users" + "(login,firstname,lastname,addresses,location)" + "VALUES(:login,:fn,:ln,:addresses,:loc)\n" + "@bind[users_insert]='jdoe','John','DOE'," + "{street_number: 3, street_name: 'Beverly Hills Bld', zip_code: 90209," + " country: 'USA', extra_info: ['Right on the hills','Next to the post box']," + " phone_numbers: {'home': 2016778524, 'office': 2015790847}}," + "('USA', 90209, 'Beverly Hills')\n" + "SELECT * FROM zeppelin.users WHERE login='jdoe';"; // When final InterpreterResult actual = interpreter.interpret(queries, intrContext); // Then assertEquals(Code.SUCCESS, actual.code()); assertEquals("login\taddresses\tage\tdeceased\tfirstname\tlast_update\tlastname\tlocation\n" + "jdoe\t" + "{street_number: 3, street_name: Beverly Hills Bld, zip_code: 90209, " + "country: USA, extra_info: [Right on the hills, Next to the post box], " + "phone_numbers: {home: 2016778524, office: 2015790847}}\tnull\t" + "null\t" + "John\t" + "null\t" + "DOE\t" + "(USA, 90209, Beverly Hills)\n", actual.message().get(0).getData()); }
@Override public ManageSnapshots createTag(String name, long snapshotId) { updateSnapshotReferencesOperation().createTag(name, snapshotId); return this; }
@TestTemplate public void testCreateTag() { table.newAppend().appendFile(FILE_A).commit(); long snapshotId = table.currentSnapshot().snapshotId(); // Test a basic case of creating a tag table.manageSnapshots().createTag("tag1", snapshotId).commit(); SnapshotRef expectedTag = table.ops().refresh().ref("tag1"); assertThat(expectedTag).isNotNull().isEqualTo(SnapshotRef.tagBuilder(snapshotId).build()); }
@Override public List<RoleDO> getRoleListByStatus(Collection<Integer> statuses) { return roleMapper.selectListByStatus(statuses); }
@Test public void testGetRoleListByStatus() { // mock 数据 RoleDO dbRole01 = randomPojo(RoleDO.class, o -> o.setStatus(CommonStatusEnum.ENABLE.getStatus())); roleMapper.insert(dbRole01); RoleDO dbRole02 = randomPojo(RoleDO.class, o -> o.setStatus(CommonStatusEnum.DISABLE.getStatus())); roleMapper.insert(dbRole02); // 调用 List<RoleDO> list = roleService.getRoleListByStatus( singleton(CommonStatusEnum.ENABLE.getStatus())); // 断言 assertEquals(1, list.size()); assertPojoEquals(dbRole01, list.get(0)); }
void release() { Arrays.stream(subpartitionCacheDataManagers) .forEach(SubpartitionRemoteCacheManager::release); }
@Test void testRelease() { TieredStoragePartitionId partitionId = TieredStorageIdMappingUtils.convertId(new ResultPartitionID()); AtomicBoolean isReleased = new AtomicBoolean(false); TestingPartitionFileWriter partitionFileWriter = new TestingPartitionFileWriter.Builder() .setReleaseRunnable(() -> isReleased.set(true)) .build(); RemoteCacheManager cacheManager = new RemoteCacheManager( partitionId, 1, new TestingTieredStorageMemoryManager.Builder().build(), partitionFileWriter); cacheManager.release(); assertThat(isReleased).isTrue(); }
public static ParseResult parse(String text) { Map<String, String> localProperties = new HashMap<>(); String intpText = ""; String scriptText = null; Matcher matcher = REPL_PATTERN.matcher(text); if (matcher.find()) { String headingSpace = matcher.group(1); intpText = matcher.group(2); int startPos = headingSpace.length() + intpText.length() + 1; if (startPos < text.length() && text.charAt(startPos) == '(') { startPos = parseLocalProperties(text, startPos, localProperties); } scriptText = text.substring(startPos); } else { intpText = ""; scriptText = text; } return new ParseResult(intpText, removeLeadingWhiteSpaces(scriptText), localProperties); }
@Test void testParagraphTextQuotedPropertyKeyAndValue() { ParagraphTextParser.ParseResult parseResult = ParagraphTextParser.parse( "%spark.pyspark(\"po ol\"=\"value with \\\" inside\")"); assertEquals("spark.pyspark", parseResult.getIntpText()); assertEquals(1, parseResult.getLocalProperties().size()); assertEquals("value with \" inside", parseResult.getLocalProperties().get("po ol")); assertEquals("", parseResult.getScriptText()); }
protected List<HeaderValue> parseHeaderValue(String headerValue) { return parseHeaderValue(headerValue, HEADER_VALUE_SEPARATOR, HEADER_QUALIFIER_SEPARATOR); }
@Test void headers_parseHeaderValue_validMultipleCookie() { AwsProxyHttpServletRequest request = new AwsProxyHttpServletRequest(validCookieRequest, mockContext, null, config); List<AwsHttpServletRequest.HeaderValue> values = request.parseHeaderValue(request.getHeader(HttpHeaders.COOKIE), ";", ","); assertEquals(2, values.size()); assertEquals("yummy_cookie", values.get(0).getKey()); assertEquals("choco", values.get(0).getValue()); assertEquals("tasty_cookie", values.get(1).getKey()); assertEquals("strawberry", values.get(1).getValue()); }
@Override public KsqlSecurityContext provide(final ApiSecurityContext apiSecurityContext) { final Optional<KsqlPrincipal> principal = apiSecurityContext.getPrincipal(); final Optional<String> authHeader = apiSecurityContext.getAuthHeader(); final List<Entry<String, String>> requestHeaders = apiSecurityContext.getRequestHeaders(); // A user context is not necessary if a user context provider is not present or the user // principal is missing. If a failed authentication attempt results in a missing principle, // then the authentication plugin will have already failed the connection before calling // this method. Therefore, if we've reached this method with a missing principle, then this // must be a valid connection that does not require authentication. // For these cases, we create a default service context that the missing user can use. final boolean requiresUserContext = securityExtension != null && securityExtension.getUserContextProvider().isPresent() && principal.isPresent(); if (!requiresUserContext) { return new KsqlSecurityContext( principal, defaultServiceContextFactory.create( ksqlConfig, authHeader, schemaRegistryClientFactory, connectClientFactory, sharedClient, requestHeaders, principal) ); } return securityExtension.getUserContextProvider() .map(provider -> new KsqlSecurityContext( principal, userServiceContextFactory.create( ksqlConfig, authHeader, provider.getKafkaClientSupplier(principal.get()), provider.getSchemaRegistryClientFactory(principal.get()), connectClientFactory, sharedClient, requestHeaders, principal))) .get(); }
@Test public void shouldCreateDefaultServiceContextIfUserPrincipalIsMissing() { // Given: when(securityExtension.getUserContextProvider()).thenReturn(Optional.of(userContextProvider)); when(apiSecurityContext.getPrincipal()).thenReturn(Optional.empty()); // When: final KsqlSecurityContext ksqlSecurityContext = ksqlSecurityContextProvider.provide(apiSecurityContext); // Then: assertThat(ksqlSecurityContext.getUserPrincipal(), is(Optional.empty())); assertThat(ksqlSecurityContext.getServiceContext(), is(defaultServiceContext)); }
@Override @SuccessResponse(statuses = { HttpStatus.S_204_NO_CONTENT }) @ServiceErrors(INVALID_PERMISSIONS) @ParamError(code = INVALID_ID, parameterNames = { "albumEntryId" }) public UpdateResponse update(CompoundKey key, AlbumEntry entity) { long photoId = (Long) key.getPart("photoId"); long albumId = (Long) key.getPart("albumId"); // make sure photo and album exist if (!_photoDb.getData().containsKey(photoId)) throw new RestLiServiceException(HttpStatus.S_400_BAD_REQUEST, "Nonexistent photo ID: " + photoId); if (!_albumDb.getData().containsKey(albumId)) throw new RestLiServiceException(HttpStatus.S_400_BAD_REQUEST, "Nonexistent album ID: " + albumId); // disallow changing entity ID if (entity.hasAlbumId() || entity.hasPhotoId()) throw new RestLiServiceException(HttpStatus.S_400_BAD_REQUEST, "Photo/album ID are not acceptable in request"); // make sure the ID in the entity is consistent with the key in the database entity.setPhotoId(photoId); entity.setAlbumId(albumId); _db.getData().put(key, entity); return new UpdateResponse(HttpStatus.S_204_NO_CONTENT); }
@Test(expectedExceptions = RestLiServiceException.class) public void testBadUpdateAlbumId() { // album 100 doesn't exist CompoundKey key = new CompoundKey().append("photoId", 1L).append("albumId", 100L); AlbumEntry entry = new AlbumEntry().setAddTime(4); _entryRes.update(key, entry); }
@Override public String toString() { return ioStatisticsToString(this); }
@Test public void testStringification2() throws Throwable { String ss = snapshot.toString(); LOG.info("original {}", ss); Assertions.assertThat(ss) .describedAs("snapshot toString()") .contains("c1=0") .contains("g1=1"); }
public static String getJobOffsetItemPath(final String jobId, final int shardingItem) { return String.join("/", getJobOffsetPath(jobId), Integer.toString(shardingItem)); }
@Test void assertGetJobOffsetItemPath() { assertThat(PipelineMetaDataNode.getJobOffsetItemPath(jobId, 0), is(jobRootPath + "/offset/0")); }
@Override public Future<?> submit(Runnable runnable) { submitted.mark(); return delegate.submit(new InstrumentedRunnable(runnable)); }
@Test @SuppressWarnings("unchecked") public void reportsTasksInformationForForkJoinPool() throws Exception { executor = Executors.newWorkStealingPool(4); instrumentedExecutorService = new InstrumentedExecutorService(executor, registry, "fjp"); submitted = registry.meter("fjp.submitted"); running = registry.counter("fjp.running"); completed = registry.meter("fjp.completed"); duration = registry.timer("fjp.duration"); idle = registry.timer("fjp.idle"); final Gauge<Long> tasksStolen = (Gauge<Long>) registry.getGauges().get("fjp.tasks.stolen"); final Gauge<Long> tasksQueued = (Gauge<Long>) registry.getGauges().get("fjp.tasks.queued"); final Gauge<Integer> threadsActive = (Gauge<Integer>) registry.getGauges().get("fjp.threads.active"); final Gauge<Integer> threadsRunning = (Gauge<Integer>) registry.getGauges().get("fjp.threads.running"); assertThat(submitted.getCount()).isEqualTo(0); assertThat(running.getCount()).isEqualTo(0); assertThat(completed.getCount()).isEqualTo(0); assertThat(duration.getCount()).isEqualTo(0); assertThat(idle.getCount()).isEqualTo(0); assertThat(tasksStolen.getValue()).isEqualTo(0L); assertThat(tasksQueued.getValue()).isEqualTo(0L); assertThat(threadsActive.getValue()).isEqualTo(0); assertThat(threadsRunning.getValue()).isEqualTo(0); Runnable runnable = () -> { assertThat(submitted.getCount()).isEqualTo(1); assertThat(running.getCount()).isEqualTo(1); assertThat(completed.getCount()).isEqualTo(0); assertThat(duration.getCount()).isEqualTo(0); assertThat(idle.getCount()).isEqualTo(1); assertThat(tasksQueued.getValue()).isEqualTo(0L); assertThat(threadsActive.getValue()).isEqualTo(1); assertThat(threadsRunning.getValue()).isEqualTo(1); }; Future<?> theFuture = instrumentedExecutorService.submit(runnable); assertThat(theFuture).succeedsWithin(Duration.ofSeconds(5L)); assertThat(submitted.getCount()).isEqualTo(1); assertThat(running.getCount()).isEqualTo(0); assertThat(completed.getCount()).isEqualTo(1); assertThat(duration.getCount()).isEqualTo(1); assertThat(duration.getSnapshot().size()).isEqualTo(1); assertThat(idle.getCount()).isEqualTo(1); assertThat(idle.getSnapshot().size()).isEqualTo(1); }
@Override public MetadataNode child(String name) { if (name.equals("name")) { return new MetadataLeafNode(image.name()); } else if (name.equals("id")) { return new MetadataLeafNode(image.id().toString()); } else { int partitionId; try { partitionId = Integer.parseInt(name); } catch (NumberFormatException e) { return null; } PartitionRegistration registration = image.partitions().get(partitionId); if (registration == null) return null; return new MetadataLeafNode(registration.toString()); } }
@Test public void testChildPartitionIdNull() { MetadataNode child1 = NODE.child("1"); MetadataNode child2 = NODE.child("a"); assertNull(child1); assertNull(child2); }
@GET @Produces(MediaType.APPLICATION_JSON) @Operation(summary = "Get prekey count", description = "Gets the number of one-time prekeys uploaded for this device and still available") @ApiResponse(responseCode = "200", description = "Body contains the number of available one-time prekeys for the device.", useReturnTypeSchema = true) @ApiResponse(responseCode = "401", description = "Account authentication check failed.") public CompletableFuture<PreKeyCount> getStatus(@ReadOnly @Auth final AuthenticatedDevice auth, @QueryParam("identity") @DefaultValue("aci") final IdentityType identityType) { final CompletableFuture<Integer> ecCountFuture = keysManager.getEcCount(auth.getAccount().getIdentifier(identityType), auth.getAuthenticatedDevice().getId()); final CompletableFuture<Integer> pqCountFuture = keysManager.getPqCount(auth.getAccount().getIdentifier(identityType), auth.getAuthenticatedDevice().getId()); return ecCountFuture.thenCombine(pqCountFuture, PreKeyCount::new); }
@Test void putKeysStructurallyInvalidPQOneTimeKey() { final ECKeyPair identityKeyPair = Curve.generateKeyPair(); final IdentityKey identityKey = new IdentityKey(identityKeyPair.getPublicKey()); final WeaklyTypedSignedPreKey wrongPreKey = WeaklyTypedSignedPreKey.fromSignedPreKey(KeysHelper.signedECPreKey(1, identityKeyPair)); final WeaklyTypedPreKeyState preKeyState = new WeaklyTypedPreKeyState(null, null, List.of(wrongPreKey), null, identityKey.serialize()); Response response = resources.getJerseyTest() .target("/v2/keys") .request() .header("Authorization", AuthHelper.getAuthHeader(AuthHelper.VALID_UUID, AuthHelper.VALID_PASSWORD)) .put(Entity.entity(preKeyState, MediaType.APPLICATION_JSON_TYPE)); assertThat(response.getStatus()).isEqualTo(400); }
CompletableFuture<String> getOperationFuture() { return operationFuture; }
@Test void testJobFailedAndSavepointOperationSucceeds() throws Exception { try (MockStopWithSavepointContext ctx = new MockStopWithSavepointContext()) { StateTrackingMockExecutionGraph mockExecutionGraph = new StateTrackingMockExecutionGraph(); final CompletableFuture<String> savepointFuture = new CompletableFuture<>(); StopWithSavepoint sws = createStopWithSavepoint(ctx, mockExecutionGraph, savepointFuture); ctx.setStopWithSavepoint(sws); ctx.setHowToHandleFailure(FailureResult::canNotRestart); // fail job: mockExecutionGraph.completeTerminationFuture(JobStatus.FAILED); // this is a sanity check that we haven't scheduled a state transition ctx.triggerExecutors(); ctx.setExpectFailing( failingArguments -> { assertThat(failingArguments.getExecutionGraph().getState()) .isEqualTo(JobStatus.FAILED); assertThat(failingArguments.getFailureCause()) .satisfies( FlinkAssertions.anyCauseMatches( StopWithSavepointStoppingException.class)); }); savepointFuture.complete(SAVEPOINT_PATH); ctx.triggerExecutors(); assertThat(sws.getOperationFuture()).isCompletedExceptionally(); } }
public String send() throws MailException { try { return doSend(); } catch (MessagingException e) { if (e instanceof SendFailedException) { // 当地址无效时,显示更加详细的无效地址信息 final Address[] invalidAddresses = ((SendFailedException) e).getInvalidAddresses(); final String msg = StrUtil.format("Invalid Addresses: {}", ArrayUtil.toString(invalidAddresses)); throw new MailException(msg, e); } throw new MailException(e); } }
@Test @Disabled public void sendByAccountTest() { MailAccount account = new MailAccount(); account.setHost("smtp.yeah.net"); account.setPort(465); account.setSslEnable(true); account.setFrom("hutool@yeah.net"); account.setUser("hutool"); account.setPass("q1w2e3"); JakartaMailUtil.send(account, "hutool@foxmail.com", "测试", "<h1>邮件来自Hutool测试</h1>", true); }
@Override public KTable<K, V> reduce(final Reducer<V> reducer) { return reduce(reducer, Materialized.with(keySerde, valueSerde)); }
@Test public void shouldReduceAndMaterializeResults() { groupedStream.reduce( MockReducer.STRING_ADDER, Materialized.<String, String, KeyValueStore<Bytes, byte[]>>as("reduce") .withKeySerde(Serdes.String()) .withValueSerde(Serdes.String())); try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), props)) { processData(driver); { final KeyValueStore<String, String> reduced = driver.getKeyValueStore("reduce"); assertThat(reduced.get("1"), equalTo("A+C+D")); assertThat(reduced.get("2"), equalTo("B")); assertThat(reduced.get("3"), equalTo("E+F")); } { final KeyValueStore<String, ValueAndTimestamp<String>> reduced = driver.getTimestampedKeyValueStore("reduce"); assertThat(reduced.get("1"), equalTo(ValueAndTimestamp.make("A+C+D", 10L))); assertThat(reduced.get("2"), equalTo(ValueAndTimestamp.make("B", 1L))); assertThat(reduced.get("3"), equalTo(ValueAndTimestamp.make("E+F", 9L))); } } }
public FEELFnResult<List> invoke(@ParameterName("list") List list, @ParameterName("position") BigDecimal position, @ParameterName("newItem") Object newItem) { if (list == null) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "list", CANNOT_BE_NULL)); } if (position == null) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "position", CANNOT_BE_NULL)); } int intPosition = position.intValue(); if (intPosition == 0 || Math.abs(intPosition) > list.size()) { String paramProblem = String.format("%s outside valid boundaries (1-%s)", intPosition, list.size()); return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "position", paramProblem)); } Object e = NumberEvalHelper.coerceNumber(newItem); List toReturn = new ArrayList(list); int replacementPosition = intPosition > 0 ? intPosition -1 : list.size() - Math.abs(intPosition); toReturn.set(replacementPosition, e); return FEELFnResult.ofResult(toReturn); }
@Test void invokeMatchNull() { FunctionTestUtil.assertResultError(listReplaceFunction.invoke(new ArrayList(), (AbstractCustomFEELFunction) null, ""), InvalidParametersEvent.class); }
public static <T extends PipelineOptions> T validate(Class<T> klass, PipelineOptions options) { return validate(klass, options, false); }
@Test public void testWhenRequiredOptionIsNeverSet() { expectedException.expect(IllegalArgumentException.class); expectedException.expectMessage( "Missing required value for " + "[public abstract java.lang.String org.apache.beam." + "sdk.options.PipelineOptionsValidatorTest$Required.getObject(), \"Fake Description\"]."); Required required = PipelineOptionsFactory.as(Required.class); PipelineOptionsValidator.validate(Required.class, required); }
public RateLimitSet getUser() { return user; }
@Test public void testUser() { LimitConfig.RateLimitSet limitUser = limitConfig.getUser(); Assert.assertEquals(limitUser.getDirectMaps().size(), 3); }
@Override public Long sendSingleMailToAdmin(String mail, Long userId, String templateCode, Map<String, Object> templateParams) { // 如果 mail 为空,则加载用户编号对应的邮箱 if (StrUtil.isEmpty(mail)) { AdminUserDO user = adminUserService.getUser(userId); if (user != null) { mail = user.getEmail(); } } // 执行发送 return sendSingleMail(mail, userId, UserTypeEnum.ADMIN.getValue(), templateCode, templateParams); }
@Test public void testSendSingleMailToAdmin() { // 准备参数 Long userId = randomLongId(); String templateCode = RandomUtils.randomString(); Map<String, Object> templateParams = MapUtil.<String, Object>builder().put("code", "1234") .put("op", "login").build(); // mock adminUserService 的方法 AdminUserDO user = randomPojo(AdminUserDO.class, o -> o.setMobile("15601691300")); when(adminUserService.getUser(eq(userId))).thenReturn(user); // mock MailTemplateService 的方法 MailTemplateDO template = randomPojo(MailTemplateDO.class, o -> { o.setStatus(CommonStatusEnum.ENABLE.getStatus()); o.setContent("验证码为{code}, 操作为{op}"); o.setParams(Lists.newArrayList("code", "op")); }); when(mailTemplateService.getMailTemplateByCodeFromCache(eq(templateCode))).thenReturn(template); String title = RandomUtils.randomString(); when(mailTemplateService.formatMailTemplateContent(eq(template.getTitle()), eq(templateParams))) .thenReturn(title); String content = RandomUtils.randomString(); when(mailTemplateService.formatMailTemplateContent(eq(template.getContent()), eq(templateParams))) .thenReturn(content); // mock MailAccountService 的方法 MailAccountDO account = randomPojo(MailAccountDO.class); when(mailAccountService.getMailAccountFromCache(eq(template.getAccountId()))).thenReturn(account); // mock MailLogService 的方法 Long mailLogId = randomLongId(); when(mailLogService.createMailLog(eq(userId), eq(UserTypeEnum.ADMIN.getValue()), eq(user.getEmail()), eq(account), eq(template), eq(content), eq(templateParams), eq(true))).thenReturn(mailLogId); // 调用 Long resultMailLogId = mailSendService.sendSingleMailToAdmin(null, userId, templateCode, templateParams); // 断言 assertEquals(mailLogId, resultMailLogId); // 断言调用 verify(mailProducer).sendMailSendMessage(eq(mailLogId), eq(user.getEmail()), eq(account.getId()), eq(template.getNickname()), eq(title), eq(content)); }
@Override public boolean imbalanceDetected(LoadImbalance imbalance) { long min = imbalance.minimumLoad; long max = imbalance.maximumLoad; if (min == Long.MIN_VALUE || max == Long.MAX_VALUE) { return false; } long lowerBound = (long) (MIN_MAX_RATIO_MIGRATION_THRESHOLD * max); return min < lowerBound; }
@Test public void testImbalanceDetected_shouldReturnFalseWhenNoKnownMaximum() { imbalance.maximumLoad = Long.MAX_VALUE; boolean imbalanceDetected = strategy.imbalanceDetected(imbalance); assertFalse(imbalanceDetected); }
protected String readEncodingAndString(int max) throws IOException { byte encoding = readByte(); return readEncodedString(encoding, max - 1); }
@Test public void testReadUtf16NullPrefix() throws IOException { byte[] data = { ID3Reader.ENCODING_UTF16_WITH_BOM, (byte) 0xff, (byte) 0xfe, // BOM 0x00, 0x01, // Latin Capital Letter A with macron (Ā) 0, 0, // Null-terminated }; CountingInputStream inputStream = new CountingInputStream(new ByteArrayInputStream(data)); String string = new ID3Reader(inputStream).readEncodingAndString(1000); assertEquals("Ā", string); }
@Override public OutputFile newOutputFile(String path) { return new OSSOutputFile(client(), new OSSURI(path), aliyunProperties, metrics); }
@Test public void testOutputFile() throws IOException { String location = randomLocation(); int dataSize = 1024 * 10; byte[] data = randomData(dataSize); OutputFile out = fileIO().newOutputFile(location); writeOSSData(out, data); OSSURI uri = new OSSURI(location); assertThat(ossClient().get().doesObjectExist(uri.bucket(), uri.key())) .as("OSS file should exist") .isTrue(); assertThat(out.location()).as("Should have expected location").isEqualTo(location); assertThat(ossDataLength(uri)).as("Should have expected length").isEqualTo(dataSize); assertThat(ossDataContent(uri, dataSize)).as("Should have expected content").isEqualTo(data); }
SelType pop() { SelType ret = stack[top]; stack[top--] = null; return ret; }
@Test(expected = ArrayIndexOutOfBoundsException.class) public void testInvalidPop() { state.pop(); }
@VisibleForTesting Object evaluate(final GenericRow row) { return term.getValue(new TermEvaluationContext(row)); }
@Test public void shouldEvaluateIsNotNullPredicate() { // Given: final Expression expression1 = new IsNotNullPredicate( COL11 ); final Expression expression2 = new IsNotNullPredicate( new NullLiteral() ); // When: InterpretedExpression interpreter1 = interpreter(expression1); InterpretedExpression interpreter2 = interpreter(expression2); // Then: assertThat(interpreter1.evaluate(make(11, true)), is(true)); assertThat(interpreter1.evaluate(make(11, null)), is(false)); assertThat(interpreter2.evaluate(ROW), is(false)); }
@Override public void close() { diskCacheManager.close(); }
@Test void testRelease() { AtomicBoolean isReleased = new AtomicBoolean(false); TestingPartitionFileWriter partitionFileWriter = new TestingPartitionFileWriter.Builder() .setReleaseRunnable(() -> isReleased.set(true)) .build(); TieredStorageResourceRegistry resourceRegistry = new TieredStorageResourceRegistry(); DiskTierProducerAgent diskTierProducerAgent = createDiskTierProducerAgent( false, NUM_BYTES_PER_SEGMENT, 0, tempFolder.toString(), partitionFileWriter, resourceRegistry); diskTierProducerAgent.close(); resourceRegistry.clearResourceFor(PARTITION_ID); assertThat(isReleased).isTrue(); }
@Override public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { rejectCount.incrementAndGet(); if (ApplicationContextHolder.getInstance() != null) { try { ThreadPoolCheckAlarm alarmHandler = ApplicationContextHolder.getBean(ThreadPoolCheckAlarm.class); alarmHandler.asyncSendRejectedAlarm(threadPoolId); } catch (Throwable ex) { log.error("Failed to send rejection policy alert.", ex); } } try { return method.invoke(target, args); } catch (InvocationTargetException ex) { throw ex.getCause(); } }
@Test public void testInvoke() throws Throwable { Object[] mockArgs = new Object[]{"arg1", "arg2"}; MockedStatic<ApplicationContextHolder> mockedStatic = Mockito.mockStatic(ApplicationContextHolder.class); mockedStatic.when(ApplicationContextHolder::getInstance).thenReturn(applicationContext); mockedStatic.when(() -> ApplicationContextHolder.getBean(ThreadPoolCheckAlarm.class)).thenReturn(mockAlarmHandler); Mockito.doNothing().when(mockAlarmHandler).asyncSendRejectedAlarm("test-pool"); handler.invoke(null, mockMethod, mockArgs); Mockito.doThrow(new InvocationTargetException(new Throwable())).when(mockMethod).invoke(target, mockArgs); Assertions.assertThrows(Throwable.class, () -> handler.invoke(null, mockMethod, mockArgs)); Assertions.assertSame(rejectCount.get(), 2L); }
@Override public void stop() throws BundleException { throw newException(); }
@Test void require_that_stop_throws_exception() throws BundleException { assertThrows(RuntimeException.class, () -> { new DisableOsgiFramework().stop(); }); }
@Override public Set<EmailRecipient> findSubscribedEmailRecipients(String dispatcherKey, String projectKey, SubscriberPermissionsOnProject subscriberPermissionsOnProject) { verifyProjectKey(projectKey); try (DbSession dbSession = dbClient.openSession(false)) { Set<EmailSubscriberDto> emailSubscribers = dbClient.propertiesDao().findEmailSubscribersForNotification( dbSession, dispatcherKey, EmailNotificationChannel.class.getSimpleName(), projectKey); return keepAuthorizedEmailSubscribers(dbSession, projectKey, subscriberPermissionsOnProject, emailSubscribers); } }
@Test public void findSubscribedEmailRecipients_fails_with_NPE_if_projectKey_is_null() { String dispatcherKey = randomAlphabetic(12); assertThatThrownBy(() -> underTest.findSubscribedEmailRecipients(dispatcherKey, null, ALL_MUST_HAVE_ROLE_USER)) .isInstanceOf(NullPointerException.class) .hasMessage("projectKey is mandatory"); }
@Override public RexNode visit(CallExpression call) { boolean isBatchMode = unwrapContext(relBuilder).isBatchMode(); for (CallExpressionConvertRule rule : getFunctionConvertChain(isBatchMode)) { Optional<RexNode> converted = rule.convert(call, newFunctionContext()); if (converted.isPresent()) { return converted.get(); } } throw new RuntimeException("Unknown call expression: " + call); }
@Test void testTimestampLiteral() { RexNode rex = converter.visit( valueLiteral( LocalDateTime.parse("2012-12-12T12:12:12.12345"), DataTypes.TIMESTAMP(3).notNull())); assertThat(((RexLiteral) rex).getValueAs(TimestampString.class)) .isEqualTo(new TimestampString("2012-12-12 12:12:12.123")); assertThat(rex.getType().getSqlTypeName()).isEqualTo(SqlTypeName.TIMESTAMP); assertThat(rex.getType().getPrecision()).isEqualTo(3); }
@Override public synchronized void editSchedule() { updateConfigIfNeeded(); long startTs = clock.getTime(); CSQueue root = scheduler.getRootQueue(); Resource clusterResources = Resources.clone(scheduler.getClusterResource()); containerBasedPreemptOrKill(root, clusterResources); if (LOG.isDebugEnabled()) { LOG.debug("Total time used=" + (clock.getTime() - startTs) + " ms."); } }
@Test public void testHierarchicalLarge() { int[][] qData = new int[][] { // / A D G // B C E F H I { 400, 200, 60, 140, 100, 70, 30, 100, 10, 90 }, // abs { 400, 400, 400, 400, 400, 400, 400, 400, 400, 400 }, // maxCap { 400, 210, 70, 140, 100, 50, 50, 90, 90, 0 }, // used { 15, 0, 0, 0, 0, 0, 0, 0, 0, 15 }, // pending { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // reserved // appA appB appC appD appE appF { 6, 2, 1, 1, 2, 1, 1, 2, 1, 1 }, // apps { -1, -1, 1, 1, -1, 1, 1, -1, 1, 1 }, // req granularity { 3, 2, 0, 0, 2, 0, 0, 2, 0, 0 }, // subqueues }; ProportionalCapacityPreemptionPolicy policy = buildPolicy(qData); policy.editSchedule(); // verify capacity taken from A1, not H1 despite H1 being far over // its absolute guaranteed capacity // XXX note: compensating for rounding error in Resources.multiplyTo // which is likely triggered since we use small numbers for readability verify(mDisp, times(9)).handle(argThat(new IsPreemptionRequestFor(appA))); verify(mDisp, times(6)).handle(argThat(new IsPreemptionRequestFor(appE))); }
public void setProperty(String name, String value) { if (value == null) { return; } name = Introspector.decapitalize(name); PropertyDescriptor prop = getPropertyDescriptor(name); if (prop == null) { addWarn("No such property [" + name + "] in " + objClass.getName() + "."); } else { try { setProperty(prop, name, value); } catch (PropertySetterException ex) { addWarn("Failed to set property [" + name + "] to value \"" + value + "\". ", ex); } } }
@Test public void charset() { setter.setProperty("charset", "UTF-8"); assertEquals(Charset.forName("UTF-8"), house.getCharset()); house.setCharset(null); setter.setProperty("charset", "UTF"); assertNull(house.getCharset()); StatusChecker checker = new StatusChecker(context); checker.containsException(UnsupportedCharsetException.class); }
@Override public String doSharding(final Collection<String> availableTargetNames, final PreciseShardingValue<Comparable<?>> shardingValue) { ShardingSpherePreconditions.checkNotNull(shardingValue.getValue(), NullShardingValueException::new); String shardingResultSuffix = getShardingResultSuffix(cutShardingValue(shardingValue.getValue()).mod(new BigInteger(String.valueOf(shardingCount))).toString()); return ShardingAutoTableAlgorithmUtils.findMatchedTargetName(availableTargetNames, shardingResultSuffix, shardingValue.getDataNodeInfo()).orElse(null); }
@Test void assertPreciseDoShardingWithIntShardingValue() { ModShardingAlgorithm algorithm = (ModShardingAlgorithm) TypedSPILoader.getService(ShardingAlgorithm.class, "MOD", PropertiesBuilder.build(new Property("sharding-count", "16"))); assertThat(algorithm.doSharding(createAvailableTargetNames(), new PreciseShardingValue<>("t_order", "order_id", DATA_NODE_INFO, 17)), is("t_order_1")); }
public Plan validateReservationDeleteRequest( ReservationSystem reservationSystem, ReservationDeleteRequest request) throws YarnException { return validateReservation(reservationSystem, request.getReservationId(), AuditConstants.DELETE_RESERVATION_REQUEST); }
@Test public void testDeleteReservationInvalidPlan() { ReservationDeleteRequest request = new ReservationDeleteRequestPBImpl(); ReservationId reservationID = ReservationSystemTestUtil.getNewReservationId(); request.setReservationId(reservationID); when(rSystem.getPlan(PLAN_NAME)).thenReturn(null); Plan plan = null; try { plan = rrValidator.validateReservationDeleteRequest(rSystem, request); Assert.fail(); } catch (YarnException e) { Assert.assertNull(plan); String message = e.getMessage(); Assert .assertTrue(message .endsWith(" is not associated with any valid plan. Please try again with a valid reservation.")); LOG.info(message); } }
@Override public void updatePort(Port osPort) { checkNotNull(osPort, ERR_NULL_PORT); checkArgument(!Strings.isNullOrEmpty(osPort.getId()), ERR_NULL_PORT_ID); checkArgument(!Strings.isNullOrEmpty(osPort.getNetworkId()), ERR_NULL_PORT_NET_ID); osNetworkStore.updatePort(osPort); log.info(String.format(MSG_PORT, osPort.getId(), MSG_UPDATED)); }
@Test(expected = IllegalArgumentException.class) public void testUpdatePortWithNullId() { final Port testPort = NeutronPort.builder() .networkId(NETWORK_ID) .build(); target.updatePort(testPort); }
@Override public ScheduledExecutor getScheduledExecutor() { return internalScheduledExecutor; }
@Test void testExecuteRunnable() throws Exception { final OneShotLatch latch = new OneShotLatch(); pekkoRpcService.getScheduledExecutor().execute(latch::trigger); latch.await(30L, TimeUnit.SECONDS); }
public static void checkValidProjectId(String idToCheck) { if (idToCheck.length() < MIN_PROJECT_ID_LENGTH) { throw new IllegalArgumentException("Project ID " + idToCheck + " cannot be empty."); } if (idToCheck.length() > MAX_PROJECT_ID_LENGTH) { throw new IllegalArgumentException( "Project ID " + idToCheck + " cannot be longer than " + MAX_PROJECT_ID_LENGTH + " characters."); } if (ILLEGAL_PROJECT_CHARS.matcher(idToCheck).find()) { throw new IllegalArgumentException( "Project ID " + idToCheck + " is not a valid ID. Only letters, numbers, hyphens, single quotes, colon, dot and" + " exclamation points are allowed."); } }
@Test public void testCheckValidProjectIdWhenIdIsTooLong() { assertThrows( IllegalArgumentException.class, () -> checkValidProjectId("really-really-really-really-long-project-id")); }
@Override public String loopbackEthOnu(String target) { DriverHandler handler = handler(); NetconfController controller = handler.get(NetconfController.class); MastershipService mastershipService = handler.get(MastershipService.class); DeviceId ncDeviceId = handler.data().deviceId(); checkNotNull(controller, "Netconf controller is null"); String reply = null; String[] data = null; String[] ethId = null; if (!mastershipService.isLocalMaster(ncDeviceId)) { log.warn("Not master for {} Use {} to execute command", ncDeviceId, mastershipService.getMasterFor(ncDeviceId)); return null; } data = target.split(COLON); if (data.length > TWO) { log.error("Invalid number of parameters {}", target); return null; } ethId = checkIdString(data[FIRST_PART], THREE); if (ethId == null) { log.error("Invalid ETH port identifier {}", data[FIRST_PART]); return null; } if (data.length > ONE) { if (!LOOPBACKMODES.contains(data[SECOND_PART])) { log.error("Unsupported parameter: {}", data[SECOND_PART]); return null; } } try { StringBuilder request = new StringBuilder(); request.append(ANGLE_LEFT + ONU_ETHPORT_LOOPBACK + SPACE); request.append(VOLT_NE_NAMESPACE + ANGLE_RIGHT + NEW_LINE); request.append(buildStartTag(PONLINK_ID, false)) .append(ethId[FIRST_PART]) .append(buildEndTag(PONLINK_ID)) .append(buildStartTag(ONU_ID, false)) .append(ethId[SECOND_PART]) .append(buildEndTag(ONU_ID)) .append(buildStartTag(ETHPORT_ID, false)) .append(ethId[THIRD_PART]) .append(buildEndTag(ETHPORT_ID)); if (data.length > ONE) { request.append(buildStartTag(LOOPBACK_MODE, false)) .append(data[SECOND_PART]) .append(buildEndTag(LOOPBACK_MODE)); } request.append(buildEndTag(ONU_ETHPORT_LOOPBACK)); reply = controller .getDevicesMap() .get(ncDeviceId) .getSession() .doWrappedRpc(request.toString()); } catch (NetconfException e) { log.error("Cannot communicate to device {} exception {}", ncDeviceId, e); } return reply; }
@Test public void testValidLoopbackEthOnu() throws Exception { String target; String reply; for (int i = ZERO; i < VALID_ETHPORT_LOOPBACK_TCS.length; i++) { target = VALID_ETHPORT_LOOPBACK_TCS[i]; currentKey = i; reply = voltConfig.loopbackEthOnu(target); assertNotNull("Incorrect response for VALID_ETHPORT_LOOPBACK_TCS", reply); } }
@ExecuteOn(TaskExecutors.IO) @Get(uri = "/{executionId}") @Operation(tags = {"Executions"}, summary = "Get an execution") public Execution get( @Parameter(description = "The execution id") @PathVariable String executionId ) { return executionRepository .findById(tenantService.resolveTenant(), executionId) .orElse(null); }
@SuppressWarnings("unchecked") @Test void getDistinctNamespaceExecutables() { List<String> result = client.toBlocking().retrieve( GET("/api/v1/executions/namespaces"), Argument.of(List.class, String.class) ); assertThat(result.size(), greaterThanOrEqualTo(5)); }
@Override protected void channelRead0(ChannelHandlerContext ctx, Http2StreamFrame msg) throws Exception { if (msg instanceof Http2HeadersFrame) { final Http2HeadersFrame headers = (Http2HeadersFrame) msg; transportListener.onHeader(headers.headers(), headers.isEndStream()); } else if (msg instanceof Http2DataFrame) { final Http2DataFrame data = (Http2DataFrame) msg; transportListener.onData(data.content(), data.isEndStream()); } else { super.channelRead(ctx, msg); } }
@Test void testChannelRead0() throws Exception { final Http2Headers headers = new DefaultHttp2Headers(true); DefaultHttp2HeadersFrame headersFrame = new DefaultHttp2HeadersFrame(headers, true); handler.channelRead0(ctx, headersFrame); Mockito.verify(transportListener, Mockito.times(1)).onHeader(headers, true); }
public SchemaMapping fromParquet(MessageType parquetSchema) { List<Type> fields = parquetSchema.getFields(); List<TypeMapping> mappings = fromParquet(fields); List<Field> arrowFields = fields(mappings); return new SchemaMapping(new Schema(arrowFields), parquetSchema, mappings); }
@Test(expected = IllegalStateException.class) public void testParquetInt32TimestampMicrosToArrow() { converter.fromParquet(Types.buildMessage() .addField(Types.optional(INT32) .as(LogicalTypeAnnotation.timestampType(false, MICROS)) .named("a")) .named("root")); }
public boolean fence(HAServiceTarget fromSvc) { return fence(fromSvc, null); }
@Test public void testShortNameSsh() throws BadFencingConfigurationException { NodeFencer fencer = setupFencer("sshfence"); assertFalse(fencer.fence(MOCK_TARGET)); }
@SuppressWarnings({"checkstyle:ParameterNumber"}) public static Supplier<RequestManagers> supplier(final Time time, final LogContext logContext, final BackgroundEventHandler backgroundEventHandler, final ConsumerMetadata metadata, final SubscriptionState subscriptions, final FetchBuffer fetchBuffer, final ConsumerConfig config, final GroupRebalanceConfig groupRebalanceConfig, final ApiVersions apiVersions, final FetchMetricsManager fetchMetricsManager, final Supplier<NetworkClientDelegate> networkClientDelegateSupplier, final Optional<ClientTelemetryReporter> clientTelemetryReporter, final Metrics metrics, final OffsetCommitCallbackInvoker offsetCommitCallbackInvoker, final MemberStateListener applicationThreadMemberStateListener ) { return new CachedSupplier<RequestManagers>() { @Override protected RequestManagers create() { final NetworkClientDelegate networkClientDelegate = networkClientDelegateSupplier.get(); final FetchConfig fetchConfig = new FetchConfig(config); long retryBackoffMs = config.getLong(ConsumerConfig.RETRY_BACKOFF_MS_CONFIG); long retryBackoffMaxMs = config.getLong(ConsumerConfig.RETRY_BACKOFF_MAX_MS_CONFIG); final int requestTimeoutMs = config.getInt(ConsumerConfig.REQUEST_TIMEOUT_MS_CONFIG); final OffsetsRequestManager listOffsets = new OffsetsRequestManager(subscriptions, metadata, fetchConfig.isolationLevel, time, retryBackoffMs, requestTimeoutMs, apiVersions, networkClientDelegate, backgroundEventHandler, logContext); final FetchRequestManager fetch = new FetchRequestManager(logContext, time, metadata, subscriptions, fetchConfig, fetchBuffer, fetchMetricsManager, networkClientDelegate, apiVersions); final TopicMetadataRequestManager topic = new TopicMetadataRequestManager( logContext, time, config); HeartbeatRequestManager heartbeatRequestManager = null; ConsumerMembershipManager membershipManager = null; CoordinatorRequestManager coordinator = null; CommitRequestManager commit = null; if (groupRebalanceConfig != null && groupRebalanceConfig.groupId != null) { Optional<String> serverAssignor = Optional.ofNullable(config.getString(ConsumerConfig.GROUP_REMOTE_ASSIGNOR_CONFIG)); coordinator = new CoordinatorRequestManager( logContext, retryBackoffMs, retryBackoffMaxMs, backgroundEventHandler, groupRebalanceConfig.groupId); commit = new CommitRequestManager( time, logContext, subscriptions, config, coordinator, offsetCommitCallbackInvoker, groupRebalanceConfig.groupId, groupRebalanceConfig.groupInstanceId, metrics); membershipManager = new ConsumerMembershipManager( groupRebalanceConfig.groupId, groupRebalanceConfig.groupInstanceId, groupRebalanceConfig.rebalanceTimeoutMs, serverAssignor, subscriptions, commit, metadata, logContext, clientTelemetryReporter, backgroundEventHandler, time, metrics); membershipManager.registerStateListener(commit); membershipManager.registerStateListener(applicationThreadMemberStateListener); heartbeatRequestManager = new HeartbeatRequestManager( logContext, time, config, coordinator, subscriptions, membershipManager, backgroundEventHandler, metrics); } return new RequestManagers( logContext, listOffsets, topic, fetch, Optional.ofNullable(coordinator), Optional.ofNullable(commit), Optional.ofNullable(heartbeatRequestManager), Optional.ofNullable(membershipManager) ); } }; }
@Test public void testMemberStateListenerRegistered() { final MemberStateListener listener = (memberEpoch, memberId) -> { }; final Properties properties = requiredConsumerConfig(); properties.setProperty(ConsumerConfig.GROUP_ID_CONFIG, "consumerGroup"); final ConsumerConfig config = new ConsumerConfig(properties); final GroupRebalanceConfig groupRebalanceConfig = new GroupRebalanceConfig( config, GroupRebalanceConfig.ProtocolType.CONSUMER ); final RequestManagers requestManagers = RequestManagers.supplier( new MockTime(), new LogContext(), mock(BackgroundEventHandler.class), mock(ConsumerMetadata.class), mock(SubscriptionState.class), mock(FetchBuffer.class), config, groupRebalanceConfig, mock(ApiVersions.class), mock(FetchMetricsManager.class), () -> mock(NetworkClientDelegate.class), Optional.empty(), new Metrics(), mock(OffsetCommitCallbackInvoker.class), listener ).get(); requestManagers.consumerMembershipManager.ifPresent( membershipManager -> assertTrue(membershipManager.stateListeners().contains(listener)) ); }
public Stream<Hit> stream() { if (nPostingLists == 0) { return Stream.empty(); } return StreamSupport.stream(new PredicateSpliterator(), false); }
@Test void requireThatPostingListsAreSortedAfterAdvancing() { PredicateSearch search = createPredicateSearch( new byte[]{2, 1, 1, 1}, postingList(SubqueryBitmap.ALL_SUBQUERIES, entry(0, 0x000100ff), entry(3, 0x000100ff)), postingList(SubqueryBitmap.ALL_SUBQUERIES, entry(1, 0x000100ff), entry(2, 0x000100ff))); assertEquals(List.of(new Hit(1), new Hit(2), new Hit(3)).toString(), search.stream().toList().toString()); }
public double calculateAveragePercentageUsedBy(NormalizedResources used, double totalMemoryMb, double usedMemoryMb) { int skippedResourceTypes = 0; double total = 0.0; if (usedMemoryMb > totalMemoryMb) { throwBecauseUsedIsNotSubsetOfTotal(used, totalMemoryMb, usedMemoryMb); } if (totalMemoryMb != 0.0) { total += usedMemoryMb / totalMemoryMb; } else { skippedResourceTypes++; } double totalCpu = getTotalCpu(); if (used.getTotalCpu() > getTotalCpu()) { throwBecauseUsedIsNotSubsetOfTotal(used, totalMemoryMb, usedMemoryMb); } if (totalCpu != 0.0) { total += used.getTotalCpu() / getTotalCpu(); } else { skippedResourceTypes++; } if (used.otherResources.length > otherResources.length) { throwBecauseUsedIsNotSubsetOfTotal(used, totalMemoryMb, usedMemoryMb); } for (int i = 0; i < otherResources.length; i++) { double totalValue = otherResources[i]; double usedValue; if (i >= used.otherResources.length) { //Resources missing from used are using none of that resource usedValue = 0.0; } else { usedValue = used.otherResources[i]; } if (usedValue > totalValue) { throwBecauseUsedIsNotSubsetOfTotal(used, totalMemoryMb, usedMemoryMb); } if (totalValue == 0.0) { //Skip any resources where the total is 0, the percent used for this resource isn't meaningful. //We fall back to prioritizing by cpu, memory and any other resources by ignoring this value skippedResourceTypes++; continue; } total += usedValue / totalValue; } //Adjust the divisor for the average to account for any skipped resources (those where the total was 0) int divisor = 2 + otherResources.length - skippedResourceTypes; if (divisor == 0) { /* * This is an arbitrary choice to make the result consistent with calculateMin. Any value would be valid here, becase there are * no (non-zero) resources in the total set of resources, so we're trying to average 0 values. */ return 100.0; } else { return (total * 100.0) / divisor; } }
@Test public void testCalculateAvgWithResourceMissingFromTotal() { Map<String, Double> allResourcesMap = new HashMap<>(); allResourcesMap.put(Constants.COMMON_CPU_RESOURCE_NAME, 2.0); NormalizedResources resources = new NormalizedResources(normalize(allResourcesMap)); Map<String, Double> usedResourcesMap = new HashMap<>(); usedResourcesMap.put(Constants.COMMON_CPU_RESOURCE_NAME, 1.0); usedResourcesMap.put(gpuResourceName, 1.0); NormalizedResources usedResources = new NormalizedResources(normalize(usedResourcesMap)); assertThrows(IllegalArgumentException.class, () -> resources.calculateAveragePercentageUsedBy(usedResources, 4, 1)); }
public void parseStepParameter( Map<String, Map<String, Object>> allStepOutputData, Map<String, Parameter> workflowParams, Map<String, Parameter> stepParams, Parameter param, String stepId) { parseStepParameter( allStepOutputData, workflowParams, stepParams, param, stepId, new HashSet<>()); }
@Test public void testParseStepParameterWith3Underscore() { StringParameter bar = StringParameter.builder().name("bar").expression("_step1___foo + '-1';").build(); paramEvaluator.parseStepParameter( Collections.singletonMap("_step1", Collections.emptyMap()), Collections.emptyMap(), Collections.singletonMap("_foo", StringParameter.builder().value("123").build()), bar, "_step1"); assertEquals("123-1", bar.getEvaluatedResult()); }
public List<Stream> match(Message message) { final Set<Stream> result = Sets.newHashSet(); final Set<String> blackList = Sets.newHashSet(); for (final Rule rule : rulesList) { if (blackList.contains(rule.getStreamId())) { continue; } final StreamRule streamRule = rule.getStreamRule(); final StreamRuleType streamRuleType = streamRule.getType(); final Stream.MatchingType matchingType = rule.getMatchingType(); if (!ruleTypesNotNeedingFieldPresence.contains(streamRuleType) && !message.hasField(streamRule.getField())) { if (matchingType == Stream.MatchingType.AND) { result.remove(rule.getStream()); // blacklist stream because it can't match anymore blackList.add(rule.getStreamId()); } continue; } final Stream stream; if (streamRuleType != StreamRuleType.REGEX) { stream = rule.match(message); } else { stream = rule.matchWithTimeOut(message, streamProcessingTimeout, TimeUnit.MILLISECONDS); } if (stream == null) { if (matchingType == Stream.MatchingType.AND) { result.remove(rule.getStream()); // blacklist stream because it can't match anymore blackList.add(rule.getStreamId()); } } else { result.add(stream); if (matchingType == Stream.MatchingType.OR) { // blacklist stream because it is already matched blackList.add(rule.getStreamId()); } } } final Stream defaultStream = defaultStreamProvider.get(); boolean alreadyRemovedDefaultStream = false; for (Stream stream : result) { if (stream.getRemoveMatchesFromDefaultStream()) { if (alreadyRemovedDefaultStream || message.removeStream(defaultStream)) { alreadyRemovedDefaultStream = true; if (LOG.isTraceEnabled()) { LOG.trace("Successfully removed default stream <{}> from message <{}>", defaultStream.getId(), message.getId()); } } else { // A previously executed message processor (or Illuminate) has likely already removed the // default stream from the message. Now, the message has matched a stream in the Graylog // MessageFilterChain, and the matching stream is also set to remove the default stream. // This is usually from user-defined stream rules, and is generally not a problem. cannotRemoveDefaultMeter.inc(); if (LOG.isTraceEnabled()) { LOG.trace("Couldn't remove default stream <{}> from message <{}>", defaultStream.getId(), message.getId()); } } } } return ImmutableList.copyOf(result); }
@Test public void issue1396() throws Exception { final StreamMock stream = getStreamMock("GitHub issue #1396"); stream.setMatchingType(Stream.MatchingType.AND); final StreamRuleMock rule1 = new StreamRuleMock(ImmutableMap.<String, Object>builder() .put("_id", new ObjectId()) .put("field", "custom1") .put("value", "value1") .put("type", StreamRuleType.EXACT.toInteger()) .put("inverted", false) .put("stream_id", stream.getId()) .build() ); final StreamRuleMock rule2 = new StreamRuleMock(ImmutableMap.<String, Object>builder() .put("_id", new ObjectId()) .put("field", "custom2") .put("value", "value2") .put("type", StreamRuleType.EXACT.toInteger()) .put("inverted", false) .put("stream_id", stream.getId()) .build() ); stream.setStreamRules(Lists.newArrayList(rule1, rule2)); final StreamRouterEngine engine = newEngine(Lists.newArrayList(stream)); final Message message1 = getMessage(); message1.addFields(ImmutableMap.of("custom1", "value1")); assertTrue("Message without \"custom2\" should not match conditions", engine.match(message1).isEmpty()); final Message message2 = getMessage(); message2.addFields(ImmutableMap.of( "custom1", "value1", "custom2", "value2" ) ); assertEquals("Message with \"custom1\" and \"custom2\" should match conditions", Lists.newArrayList(stream), engine.match(message2)); }
public static List<ZKAuthInfo> parseAuth(String authString) throws BadAuthFormatException{ List<ZKAuthInfo> ret = Lists.newArrayList(); if (authString == null) { return ret; } List<String> authComps = Lists.newArrayList( Splitter.on(',').omitEmptyStrings().trimResults() .split(authString)); for (String comp : authComps) { String parts[] = comp.split(":", 2); if (parts.length != 2) { throw new BadAuthFormatException( "Auth '" + comp + "' not of expected form scheme:auth"); } ret.add(new ZKAuthInfo(parts[0], parts[1].getBytes(StandardCharsets.UTF_8))); } return ret; }
@Test public void testEmptyAuth() { List<ZKAuthInfo> result = ZKUtil.parseAuth(""); assertTrue(result.isEmpty()); }
@Override public boolean isScanAllowedUsingPermissionsFromDevopsPlatform() { checkState(authAppInstallationToken != null, "An auth app token is required in case repository permissions checking is necessary."); String[] orgaAndRepoTokenified = devOpsProjectCreationContext.fullName().split("/"); String organization = orgaAndRepoTokenified[0]; String repository = orgaAndRepoTokenified[1]; Set<DevOpsPermissionsMappingDto> permissionsMappingDtos = dbClient.githubPermissionsMappingDao() .findAll(dbClient.openSession(false), devOpsPlatformSettings.getDevOpsPlatform()); boolean userHasDirectAccessToRepo = doesUserHaveScanPermission(organization, repository, permissionsMappingDtos); if (userHasDirectAccessToRepo) { return true; } return doesUserBelongToAGroupWithScanPermission(organization, repository, permissionsMappingDtos); }
@Test void isScanAllowedUsingPermissionsFromDevopsPlatform_whenCollaboratorHasDirectAccess_returnsTrue() { GsonRepositoryCollaborator collaborator1 = mockCollaborator("collaborator1", 1, "role1", "read", "admin"); GsonRepositoryCollaborator collaborator2 = mockCollaborator("collaborator2", 2, "role2", "read", "scan"); mockGithubCollaboratorsFromApi(collaborator1, collaborator2); bindSessionToCollaborator(collaborator2); assertThat(githubProjectCreator.isScanAllowedUsingPermissionsFromDevopsPlatform()).isTrue(); }
@Override public void writeShort(final int v) throws IOException { ensureAvailable(SHORT_SIZE_IN_BYTES); Bits.writeShort(buffer, pos, (short) v, isBigEndian); pos += SHORT_SIZE_IN_BYTES; }
@Test public void testWriteShortV() throws Exception { short expected = 100; out.writeShort(expected); short actual = Bits.readShortB(out.buffer, 0); assertEquals(expected, actual); }
@Override public List<JreInfoRestResponse> getJresMetadata(@Nullable String os, @Nullable String arch) { Predicate<JreInfoRestResponse> osFilter = isBlank(os) ? jre -> true : (jre -> OS.from(jre.os()) == OS.from(os)); Predicate<JreInfoRestResponse> archFilter = isBlank(arch) ? jre -> true : (jre -> Arch.from(jre.arch()) == Arch.from(arch)); return metadata.values().stream() .filter(osFilter) .filter(archFilter) .toList(); }
@Test void getJresMetadata_shouldFail_whenFilteredWithUnsupportedOsValue() { String anyUnsupportedOS = "not-supported"; assertThatThrownBy(() -> jresHandler.getJresMetadata(anyUnsupportedOS, null)) .isInstanceOf(IllegalArgumentException.class) .hasMessageStartingWith("Unsupported OS: '" + anyUnsupportedOS + "'"); }
public static CoordinatorRecord newGroupMetadataRecord( ClassicGroup group, Map<String, byte[]> assignment, MetadataVersion metadataVersion ) { List<GroupMetadataValue.MemberMetadata> members = new ArrayList<>(group.allMembers().size()); group.allMembers().forEach(member -> { byte[] subscription = group.protocolName().map(member::metadata).orElse(null); if (subscription == null) { throw new IllegalStateException("Attempted to write non-empty group metadata with no defined protocol."); } byte[] memberAssignment = assignment.get(member.memberId()); if (memberAssignment == null) { throw new IllegalStateException("Attempted to write member " + member.memberId() + " of group " + group.groupId() + " with no assignment."); } members.add( new GroupMetadataValue.MemberMetadata() .setMemberId(member.memberId()) .setClientId(member.clientId()) .setClientHost(member.clientHost()) .setRebalanceTimeout(member.rebalanceTimeoutMs()) .setSessionTimeout(member.sessionTimeoutMs()) .setGroupInstanceId(member.groupInstanceId().orElse(null)) .setSubscription(subscription) .setAssignment(memberAssignment) ); }); return new CoordinatorRecord( new ApiMessageAndVersion( new GroupMetadataKey() .setGroup(group.groupId()), (short) 2 ), new ApiMessageAndVersion( new GroupMetadataValue() .setProtocol(group.protocolName().orElse(null)) .setProtocolType(group.protocolType().orElse("")) .setGeneration(group.generationId()) .setLeader(group.leaderOrNull()) .setCurrentStateTimestamp(group.currentStateTimestampOrDefault()) .setMembers(members), metadataVersion.groupMetadataValueVersion() ) ); }
@Test public void testNewGroupMetadataRecordThrowsWhenEmptyAssignment() { Time time = new MockTime(); List<GroupMetadataValue.MemberMetadata> expectedMembers = new ArrayList<>(); expectedMembers.add( new GroupMetadataValue.MemberMetadata() .setMemberId("member-1") .setClientId("client-1") .setClientHost("host-1") .setRebalanceTimeout(1000) .setSessionTimeout(1500) .setGroupInstanceId("group-instance-1") .setSubscription(new byte[]{0, 1}) .setAssignment(null) ); ClassicGroup group = new ClassicGroup( new LogContext(), "group-id", ClassicGroupState.PREPARING_REBALANCE, time, mock(GroupCoordinatorMetricsShard.class) ); expectedMembers.forEach(member -> { JoinGroupRequestProtocolCollection protocols = new JoinGroupRequestProtocolCollection(); protocols.add(new JoinGroupRequestProtocol() .setName("range") .setMetadata(member.subscription())); group.add(new ClassicGroupMember( member.memberId(), Optional.of(member.groupInstanceId()), member.clientId(), member.clientHost(), member.rebalanceTimeout(), member.sessionTimeout(), "consumer", protocols, member.assignment() )); }); assertThrows(IllegalStateException.class, () -> GroupCoordinatorRecordHelpers.newGroupMetadataRecord( group, Collections.emptyMap(), MetadataVersion.IBP_3_5_IV2 )); }
static MetricRegistry getMetricRegistryFromCamelRegistry(Registry camelRegistry, String registryName) { MetricRegistry registry = camelRegistry.lookupByNameAndType(registryName, MetricRegistry.class); if (registry != null) { return registry; } else { Set<MetricRegistry> registries = camelRegistry.findByType(MetricRegistry.class); if (registries.size() == 1) { return registries.iterator().next(); } } return null; }
@Test public void testGetMetricRegistryFromCamelRegistry() { when(camelRegistry.lookupByNameAndType("name", MetricRegistry.class)).thenReturn(metricRegistry); MetricRegistry result = MetricsComponent.getMetricRegistryFromCamelRegistry(camelRegistry, "name"); assertThat(result, is(metricRegistry)); inOrder.verify(camelRegistry, times(1)).lookupByNameAndType("name", MetricRegistry.class); inOrder.verifyNoMoreInteractions(); }
@Override public Iterator<RawUnionValue> call(Iterator<WindowedValue<InputT>> inputs) throws Exception { SparkPipelineOptions options = pipelineOptions.get().as(SparkPipelineOptions.class); // Register standard file systems. FileSystems.setDefaultPipelineOptions(options); // Do not call processElements if there are no inputs // Otherwise, this may cause validation errors (e.g. ParDoTest) if (!inputs.hasNext()) { return Collections.emptyIterator(); } try (ExecutableStageContext stageContext = contextFactory.get(jobInfo)) { ExecutableStage executableStage = ExecutableStage.fromPayload(stagePayload); try (StageBundleFactory stageBundleFactory = stageContext.getStageBundleFactory(executableStage)) { ConcurrentLinkedQueue<RawUnionValue> collector = new ConcurrentLinkedQueue<>(); StateRequestHandler stateRequestHandler = getStateRequestHandler( executableStage, stageBundleFactory.getProcessBundleDescriptor()); if (executableStage.getTimers().size() == 0) { ReceiverFactory receiverFactory = new ReceiverFactory(collector, outputMap); processElements(stateRequestHandler, receiverFactory, null, stageBundleFactory, inputs); return collector.iterator(); } // Used with Batch, we know that all the data is available for this key. We can't use the // timer manager from the context because it doesn't exist. So we create one and advance // time to the end after processing all elements. final InMemoryTimerInternals timerInternals = new InMemoryTimerInternals(); timerInternals.advanceProcessingTime(Instant.now()); timerInternals.advanceSynchronizedProcessingTime(Instant.now()); ReceiverFactory receiverFactory = new ReceiverFactory(collector, outputMap); TimerReceiverFactory timerReceiverFactory = new TimerReceiverFactory( stageBundleFactory, (Timer<?> timer, TimerInternals.TimerData timerData) -> { currentTimerKey = timer.getUserKey(); if (timer.getClearBit()) { timerInternals.deleteTimer(timerData); } else { timerInternals.setTimer(timerData); } }, windowCoder); // Process inputs. processElements( stateRequestHandler, receiverFactory, timerReceiverFactory, stageBundleFactory, inputs); // Finish any pending windows by advancing the input watermark to infinity. timerInternals.advanceInputWatermark(BoundedWindow.TIMESTAMP_MAX_VALUE); // Finally, advance the processing time to infinity to fire any timers. timerInternals.advanceProcessingTime(BoundedWindow.TIMESTAMP_MAX_VALUE); timerInternals.advanceSynchronizedProcessingTime(BoundedWindow.TIMESTAMP_MAX_VALUE); // Now we fire the timers and process elements generated by timers (which may be timers // itself) while (timerInternals.hasPendingTimers()) { try (RemoteBundle bundle = stageBundleFactory.getBundle( receiverFactory, timerReceiverFactory, stateRequestHandler, getBundleProgressHandler())) { PipelineTranslatorUtils.fireEligibleTimers( timerInternals, bundle.getTimerReceivers(), currentTimerKey); } } return collector.iterator(); } } }
@Test(expected = Exception.class) public void sdkErrorsSurfaceOnClose() throws Exception { SparkExecutableStageFunction<Integer, ?> function = getFunction(Collections.emptyMap()); doThrow(new Exception()).when(remoteBundle).close(); List<WindowedValue<Integer>> inputs = new ArrayList<>(); inputs.add(WindowedValue.valueInGlobalWindow(0)); function.call(inputs.iterator()); }
public static String getCharset(HttpURLConnection conn) { if (conn == null) { return null; } return getCharset(conn.getContentType()); }
@Test public void getCharsetTest() { String charsetName = ReUtil.get(HttpUtil.CHARSET_PATTERN, "Charset=UTF-8;fq=0.9", 1); assertEquals("UTF-8", charsetName); charsetName = ReUtil.get(HttpUtil.META_CHARSET_PATTERN, "<meta charset=utf-8", 1); assertEquals("utf-8", charsetName); charsetName = ReUtil.get(HttpUtil.META_CHARSET_PATTERN, "<meta charset='utf-8'", 1); assertEquals("utf-8", charsetName); charsetName = ReUtil.get(HttpUtil.META_CHARSET_PATTERN, "<meta charset=\"utf-8\"", 1); assertEquals("utf-8", charsetName); charsetName = ReUtil.get(HttpUtil.META_CHARSET_PATTERN, "<meta charset = \"utf-8\"", 1); assertEquals("utf-8", charsetName); }
private int getGroups(String[] usernames) throws IOException { // Get groups users belongs to ResourceManagerAdministrationProtocol adminProtocol = createAdminProtocol(); if (usernames.length == 0) { usernames = new String[] { UserGroupInformation.getCurrentUser().getUserName() }; } for (String username : usernames) { StringBuilder sb = new StringBuilder(); sb.append(username + " :"); for (String group : adminProtocol.getGroupsForUser(username)) { sb.append(" ") .append(group); } System.out.println(sb); } return 0; }
@Test public void testGetGroups() throws Exception { when(admin.getGroupsForUser(eq("admin"))).thenReturn( new String[] {"group1", "group2"}); PrintStream origOut = System.out; PrintStream out = mock(PrintStream.class); System.setOut(out); try { String[] args = { "-getGroups", "admin" }; assertEquals(0, rmAdminCLI.run(args)); verify(admin).getGroupsForUser(eq("admin")); verify(out).println(argThat( (ArgumentMatcher<StringBuilder>) arg -> ("" + arg).equals("admin : group1 group2"))); } finally { System.setOut(origOut); } }
public boolean fence(HAServiceTarget fromSvc) { return fence(fromSvc, null); }
@Test public void testWhitespaceAndCommentsInConfig() throws BadFencingConfigurationException { NodeFencer fencer = setupFencer( "\n" + " # the next one will always fail\n" + " " + AlwaysFailFencer.class.getName() + "(foo) # <- fails\n" + AlwaysSucceedFencer.class.getName() + "(bar) \n"); assertTrue(fencer.fence(MOCK_TARGET)); // One call to each, since top fencer fails assertEquals(1, AlwaysFailFencer.fenceCalled); assertSame(MOCK_TARGET, AlwaysFailFencer.fencedSvc); assertEquals(1, AlwaysSucceedFencer.fenceCalled); assertSame(MOCK_TARGET, AlwaysSucceedFencer.fencedSvc); assertEquals("foo", AlwaysFailFencer.callArgs.get(0)); assertEquals("bar", AlwaysSucceedFencer.callArgs.get(0)); }
@Override public String put(String key, String value) { if (value == null) throw new IllegalArgumentException("Null value not allowed as an environment variable: " + key); return super.put(key, value); }
@Test public void overrideOrderCalculatorInOrder() { EnvVars env = new EnvVars(); EnvVars overrides = new EnvVars(); overrides.put("A", "NoReference"); overrides.put("B", "${A}"); overrides.put("C", "${B}"); overrides.put("D", "${E}"); overrides.put("E", "${C}"); OverrideOrderCalculator calc = new OverrideOrderCalculator(env, overrides); List<String> order = calc.getOrderedVariableNames(); assertEquals(Arrays.asList("A", "B", "C", "E", "D"), order); }
@Override public CompletableFuture<Void> endTransactionOneway(ProxyContext ctx, String brokerName, EndTransactionRequestHeader requestHeader, long timeoutMillis) { CompletableFuture<Void> future = new CompletableFuture<>(); SimpleChannel channel = channelManager.createChannel(ctx); ChannelHandlerContext channelHandlerContext = channel.getChannelHandlerContext(); RemotingCommand command = LocalRemotingCommand.createRequestCommand(RequestCode.END_TRANSACTION, requestHeader, ctx.getLanguage()); try { brokerController.getEndTransactionProcessor() .processRequest(channelHandlerContext, command); future.complete(null); } catch (Exception e) { future.completeExceptionally(e); } return future; }
@Test public void testEndTransaction() throws Exception { EndTransactionRequestHeader requestHeader = new EndTransactionRequestHeader(); localMessageService.endTransactionOneway(proxyContext, null, requestHeader, 1000L); Mockito.verify(endTransactionProcessorMock, Mockito.times(1)).processRequest(Mockito.any(SimpleChannelHandlerContext.class), Mockito.argThat(argument -> { boolean first = argument.getCode() == RequestCode.END_TRANSACTION; boolean second = argument.readCustomHeader() instanceof EndTransactionRequestHeader; return first && second; })); }
public MutableRecordBatch nextBatch() { int remaining = buffer.remaining(); Integer batchSize = nextBatchSize(); if (batchSize == null || remaining < batchSize) return null; byte magic = buffer.get(buffer.position() + MAGIC_OFFSET); ByteBuffer batchSlice = buffer.slice(); batchSlice.limit(batchSize); buffer.position(buffer.position() + batchSize); if (magic > RecordBatch.MAGIC_VALUE_V1) return new DefaultRecordBatch(batchSlice); else return new AbstractLegacyRecordBatch.ByteBufferLegacyRecordBatch(batchSlice); }
@Test public void iteratorRaisesOnInvalidMagic() { ByteBuffer buffer = ByteBuffer.allocate(1024); MemoryRecordsBuilder builder = MemoryRecords.builder(buffer, Compression.NONE, TimestampType.CREATE_TIME, 0L); builder.append(15L, "a".getBytes(), "1".getBytes()); builder.append(20L, "b".getBytes(), "2".getBytes()); builder.close(); int position = buffer.position(); builder = MemoryRecords.builder(buffer, Compression.NONE, TimestampType.CREATE_TIME, 2L); builder.append(30L, "c".getBytes(), "3".getBytes()); builder.append(40L, "d".getBytes(), "4".getBytes()); builder.close(); buffer.flip(); buffer.put(position + DefaultRecordBatch.MAGIC_OFFSET, (byte) 37); ByteBufferLogInputStream logInputStream = new ByteBufferLogInputStream(buffer, Integer.MAX_VALUE); assertNotNull(logInputStream.nextBatch()); assertThrows(CorruptRecordException.class, logInputStream::nextBatch); }
@Override public AppResponse process(Flow flow, CheckAuthenticationStatusRequest request){ switch(appSession.getState()) { case "AUTHENTICATION_REQUIRED", "AWAITING_QR_SCAN": return new CheckAuthenticationStatusResponse("PENDING", false); case "RETRIEVED", "AWAITING_CONFIRMATION": return new CheckAuthenticationStatusResponse("PENDING", true); case "CONFIRMED": return new StatusResponse("PENDING_CONFIRMED"); case "AUTHENTICATED": return new OkResponse(); case "CANCELLED": return new StatusResponse("CANCELLED"); case "ABORTED": if (appSession.getAbortCode().equals("verification_code_invalid")) { String logCode = "wid_checker".equals(request.getAppType()) ? "1320" : "1368"; digidClient.remoteLog(logCode, Map.of(HIDDEN, true)); } return new NokResponse(); default: return new CheckAuthenticationStatusResponse("PENDING", false); } }
@Test void processRetrieved(){ appSession.setState("RETRIEVED"); AppResponse response = checkAuthenticationStatus.process(flow, request); assertTrue(response instanceof CheckAuthenticationStatusResponse); assertEquals("PENDING", ((CheckAuthenticationStatusResponse) response).getStatus()); assertEquals(true, ((CheckAuthenticationStatusResponse) response).isSessionReceived()); }
public <T> void resolve(T resolvable) { ParamResolver resolver = this; if (ParamScope.class.isAssignableFrom(resolvable.getClass())) { ParamScope newScope = (ParamScope) resolvable; resolver = newScope.applyOver(resolver); } resolveStringLeaves(resolvable, resolver); resolveNonStringLeaves(resolvable, resolver); resolveNodes(resolvable, resolver); }
@Test public void shouldAddErrorTheMessageOnTheRightFieldOfTheRightElement() { ResourceConfig resourceConfig = new ResourceConfig(); resourceConfig.setName("#{not-found}"); PipelineConfig pipelineConfig = PipelineConfigMother.createPipelineConfig("cruise", "dev", "ant"); pipelineConfig.setLabelTemplate("#a"); pipelineConfig.get(0).getJobs().addJobWithoutValidityAssertion(new JobConfig(new CaseInsensitiveString("another"), new ResourceConfigs(resourceConfig), new ArtifactTypeConfigs())); new ParamResolver(new ParamSubstitutionHandlerFactory(params(param("foo", "pavan"), param("bar", "jj"))), fieldCache).resolve(pipelineConfig); assertThat(pipelineConfig.errors().on("labelTemplate"), is("Error when processing params for '#a' used in field 'labelTemplate', # must be followed by a parameter pattern or escaped by another #")); assertThat(resourceConfig.errors().on(JobConfig.RESOURCES), is("Parameter 'not-found' is not defined. All pipelines using this parameter directly or via a template must define it.")); }
public Config read() { var federationEntityStatementJwksPath = loadJwks(CONFIG_FEDERATION_ENTITY_STATEMENT_JWKS_PATH); var baseUri = configProvider .get(CONFIG_BASE_URI) .map(URI::create) .orElseThrow(() -> new IllegalArgumentException("no 'base_uri' configured")); var idpDiscoveryUri = configProvider .get(CONFIG_IDP_DISCOVERY_URI) .map(URI::create) .orElseThrow( () -> new IllegalArgumentException( "no '%s' configured".formatted(CONFIG_IDP_DISCOVERY_URI))); var host = configProvider.get(CONFIG_HOST).orElse("0.0.0.0"); var port = getPortConfig(CONFIG_PORT, 1234); var managementPort = getPortConfig(CONFIG_MANAGEMENT_PORT, 1235); var fedmaster = configProvider .get(CONFIG_FEDERATION_MASTER) .map(URI::create) .orElse(URI.create("https://app-test.federationmaster.de")); var appName = configProvider .get(CONFIG_APP_NAME) .orElseThrow(() -> new IllegalArgumentException("missing 'app_name' configuration")); var entityStatementTtl = configProvider.get(CONFIG_ES_TTL).map(Duration::parse).orElse(Duration.ofHours(1)); var federationConfig = FederationConfig.create() .sub(baseUri) .iss(baseUri) .appName(appName) .federationMaster(fedmaster) // safety, remove the private key as we don't need it here .entitySigningKeys(federationEntityStatementJwksPath.toPublicJWKSet()) .entitySigningKey(federationEntityStatementJwksPath.getKeys().get(0).toECKey()) .ttl(entityStatementTtl) .scopes(getScopes()) .redirectUris(List.of(baseUri.resolve("/auth/callback").toString())) .build(); var supportedResponseTypes = List.of("code"); var relyingPartyConfig = new RelyingPartyConfig(supportedResponseTypes, loadAllowedRedirectUrls()); return new Config( relyingPartyConfig, federationConfig, host, port, managementPort, baseUri, idpDiscoveryUri, sessionStoreConfig(), codeStoreConfig()); }
@Test void read_defaults() { var provider = mock(ConfigProvider.class); var sut = new ConfigReader(provider); var baseUri = "https://rp.example.com"; var idpDiscoveryUri = "https://sso.example.com/.well-known/openid-configuration"; var appName = "Awesome DiGA"; when(provider.get(ConfigReader.CONFIG_FEDERATION_ENTITY_STATEMENT_JWKS_PATH)) .thenReturn(Optional.of("./src/test/resources/fixtures/example_sig_jwks.json")); when(provider.get(ConfigReader.CONFIG_BASE_URI)).thenReturn(Optional.of(baseUri)); when(provider.get(ConfigReader.CONFIG_APP_NAME)).thenReturn(Optional.of(appName)); when(provider.get(ConfigReader.CONFIG_IDP_DISCOVERY_URI)) .thenReturn(Optional.of(idpDiscoveryUri)); // when var config = sut.read(); // then assertEquals(baseUri, config.baseUri().toString()); assertEquals(appName, config.federation().appName()); assertEquals("0.0.0.0", config.host()); assertEquals(1234, config.port()); assertEquals(List.of("code"), config.relyingParty().supportedResponseTypes()); assertEquals(List.of(), config.relyingParty().validRedirectUris()); assertEquals(baseUri, config.federation().iss().toString()); assertEquals(baseUri, config.federation().sub().toString()); assertEquals( List.of( "openid", "urn:telematik:email", "urn:telematik:versicherter", "urn:telematik:display_name"), config.federation().scopes()); assertNotNull(config.federation().entitySigningKey()); assertNotNull(config.federation().entitySigningKeys().getKeyByKeyId("test-sig")); // these will be generated assertNull(config.federation().relyingPartyKeys()); }
public static Expression convert(Filter[] filters) { Expression expression = Expressions.alwaysTrue(); for (Filter filter : filters) { Expression converted = convert(filter); Preconditions.checkArgument( converted != null, "Cannot convert filter to Iceberg: %s", filter); expression = Expressions.and(expression, converted); } return expression; }
@Test public void testNotIn() { Not filter = Not.apply(In.apply("col", new Integer[] {1, 2})); Expression actual = SparkFilters.convert(filter); Expression expected = Expressions.and(Expressions.notNull("col"), Expressions.notIn("col", 1, 2)); Assert.assertEquals("Expressions should match", expected.toString(), actual.toString()); }
public static byte[] encode(Predicate predicate) { Objects.requireNonNull(predicate, "predicate"); Slime slime = new Slime(); encode(predicate, slime.setObject()); return com.yahoo.slime.BinaryFormat.encode(slime); }
@Test void requireThatEncodeNullThrows() { try { BinaryFormat.encode(null); fail(); } catch (NullPointerException e) { assertEquals("predicate", e.getMessage()); } }
@Nullable public static ValueReference of(Object value) { if (value instanceof Boolean) { return of((Boolean) value); } else if (value instanceof Double) { return of((Double) value); } else if (value instanceof Float) { return of((Float) value); } else if (value instanceof Integer) { return of((Integer) value); } else if (value instanceof Long) { return of((Long) value); } else if (value instanceof String) { return of((String) value); } else if (value instanceof Enum) { return of((Enum) value); } else if (value instanceof EncryptedValue encryptedValue) { return of(encryptedValue); } else { return null; } }
@Test public void deserializeBoolean() throws IOException { assertThat(objectMapper.readValue("{\"@type\":\"boolean\",\"@value\":true}", ValueReference.class)).isEqualTo(ValueReference.of(true)); assertThat(objectMapper.readValue("{\"@type\":\"boolean\",\"@value\":false}", ValueReference.class)).isEqualTo(ValueReference.of(false)); }
@PostMapping("/api/v1/meetings") public ResponseEntity<MomoApiResponse<MeetingCreateResponse>> create( @RequestBody @Valid MeetingCreateRequest request ) { MeetingCreateResponse response = meetingService.create(request); String path = cookieManager.pathOf(response.uuid()); String cookie = cookieManager.createNewCookie(response.token(), path); return ResponseEntity.created(URI.create("/meeting/" + response.uuid())) .header(HttpHeaders.SET_COOKIE, cookie) .body(new MomoApiResponse<>(response)); }
@DisplayName("존재하지 않는 약속을 잠금 해제 시도하면 400 Bad Request를 반환한다.") @Test void unlockWithInvalidUUID() { String invalidUUID = "INVALID_UUID"; Meeting meeting = meetingRepository.save(MeetingFixture.DINNER.create()); Attendee attendee = attendeeRepository.save(AttendeeFixture.HOST_JAZZ.create(meeting)); String token = getToken(attendee, meeting); RestAssured.given().log().all() .cookie("ACCESS_TOKEN", token) .contentType(ContentType.JSON) .pathParam("uuid", invalidUUID) .when().patch("/api/v1/meetings/{uuid}/unlock") .then().log().all() .statusCode(HttpStatus.BAD_REQUEST.value()); }
public List<Job> toScheduledJobs(Instant from, Instant upTo) { List<Job> jobs = new ArrayList<>(); Instant nextRun = getNextRun(from); while (nextRun.isBefore(upTo)) { jobs.add(toJob(new ScheduledState(nextRun, this))); nextRun = getNextRun(nextRun); } return jobs; }
@Test void testToScheduledJobsGetsAllJobsBetweenStartAndEnd() { final RecurringJob recurringJob = aDefaultRecurringJob() .withCronExpression("*/5 * * * * *") .build(); final List<Job> jobs = recurringJob.toScheduledJobs(now(), now().plusSeconds(5)); assertThat(jobs).hasSize(1); ScheduledState scheduledState = jobs.get(0).getJobState(); assertThat(scheduledState.getScheduledAt()).isAfter(now()); }
@Override public void updateSmsReceiveResult(Long id, Boolean success, LocalDateTime receiveTime, String apiReceiveCode, String apiReceiveMsg) { SmsReceiveStatusEnum receiveStatus = Objects.equals(success, true) ? SmsReceiveStatusEnum.SUCCESS : SmsReceiveStatusEnum.FAILURE; smsLogMapper.updateById(SmsLogDO.builder().id(id).receiveStatus(receiveStatus.getStatus()) .receiveTime(receiveTime).apiReceiveCode(apiReceiveCode).apiReceiveMsg(apiReceiveMsg).build()); }
@Test public void testUpdateSmsReceiveResult() { // mock 数据 SmsLogDO dbSmsLog = randomSmsLogDO( o -> o.setReceiveStatus(SmsReceiveStatusEnum.INIT.getStatus())); smsLogMapper.insert(dbSmsLog); // 准备参数 Long id = dbSmsLog.getId(); Boolean success = randomBoolean(); LocalDateTime receiveTime = randomLocalDateTime(); String apiReceiveCode = randomString(); String apiReceiveMsg = randomString(); // 调用 smsLogService.updateSmsReceiveResult(id, success, receiveTime, apiReceiveCode, apiReceiveMsg); // 断言 dbSmsLog = smsLogMapper.selectById(id); assertEquals(success ? SmsReceiveStatusEnum.SUCCESS.getStatus() : SmsReceiveStatusEnum.FAILURE.getStatus(), dbSmsLog.getReceiveStatus()); assertEquals(receiveTime, dbSmsLog.getReceiveTime()); assertEquals(apiReceiveCode, dbSmsLog.getApiReceiveCode()); assertEquals(apiReceiveMsg, dbSmsLog.getApiReceiveMsg()); }
public void printKsqlEntityList(final List<KsqlEntity> entityList) { switch (outputFormat) { case JSON: printAsJson(entityList); break; case TABULAR: final boolean showStatements = entityList.size() > 1; for (final KsqlEntity ksqlEntity : entityList) { writer().println(); if (showStatements) { writer().println(ksqlEntity.getStatementText()); } printAsTable(ksqlEntity); } break; default: throw new RuntimeException(String.format( "Unexpected output format: '%s'", outputFormat.name() )); } }
@Test public void shouldPrintDropConnector() { // Given: final KsqlEntity entity = new DropConnectorEntity("statementText", "connectorName"); // When: console.printKsqlEntityList(ImmutableList.of(entity)); // Then: final String output = terminal.getOutputString(); Approvals.verify(output, approvalOptions); }
public static boolean isJavaIdentifier(String s) { if (isEmpty(s) || !Character.isJavaIdentifierStart(s.charAt(0))) { return false; } for (int i = 1; i < s.length(); i++) { if (!Character.isJavaIdentifierPart(s.charAt(i))) { return false; } } return true; }
@Test void testIsJavaIdentifier() throws Exception { assertThat(StringUtils.isJavaIdentifier(""), is(false)); assertThat(StringUtils.isJavaIdentifier("1"), is(false)); assertThat(StringUtils.isJavaIdentifier("abc123"), is(true)); assertThat(StringUtils.isJavaIdentifier("abc(23)"), is(false)); }
@Override public CompletableFuture<Acknowledge> sendEvent(OperatorEvent evt) { if (!isReady()) { throw new FlinkRuntimeException("SubtaskGateway is not ready, task not yet running."); } final SerializedValue<OperatorEvent> serializedEvent; try { serializedEvent = new SerializedValue<>(evt); } catch (IOException e) { // we do not expect that this exception is handled by the caller, so we make it // unchecked so that it can bubble up throw new FlinkRuntimeException("Cannot serialize operator event", e); } final Callable<CompletableFuture<Acknowledge>> sendAction = subtaskAccess.createEventSendAction(serializedEvent); final CompletableFuture<Acknowledge> sendResult = new CompletableFuture<>(); final CompletableFuture<Acknowledge> result = sendResult.whenCompleteAsync( (success, failure) -> { if (failure != null && subtaskAccess.isStillRunning()) { String msg = String.format( EVENT_LOSS_ERROR_MESSAGE, evt, subtaskAccess.subtaskName()); Runnables.assertNoException( () -> subtaskAccess.triggerTaskFailover( new FlinkException(msg, failure))); } }, mainThreadExecutor); mainThreadExecutor.execute( () -> { sendEventInternal(sendAction, sendResult); incompleteFuturesTracker.trackFutureWhileIncomplete(result); }); return result; }
@Test void eventsPassThroughOpenGateway() { final EventReceivingTasks receiver = EventReceivingTasks.createForRunningTasks(); final SubtaskGatewayImpl gateway = new SubtaskGatewayImpl( getUniqueElement(receiver.getAccessesForSubtask(11)), ComponentMainThreadExecutorServiceAdapter.forMainThread(), new IncompleteFuturesTracker()); final OperatorEvent event = new TestOperatorEvent(); final CompletableFuture<Acknowledge> future = gateway.sendEvent(event); assertThat(receiver.events).containsExactly(new EventWithSubtask(event, 11)); assertThat(future).isDone(); }
public DdlCommandResult execute( final String sql, final DdlCommand ddlCommand, final boolean withQuery, final Set<SourceName> withQuerySources ) { return execute(sql, ddlCommand, withQuery, withQuerySources, false); }
@Test public void shouldThrowOnDropTableWhenConstraintExist() { // Given: final CreateTableCommand table1 = buildCreateTable(SourceName.of("t1"), false, false); final CreateTableCommand table2 = buildCreateTable(SourceName.of("t2"), false, false); final CreateTableCommand table3 = buildCreateTable(SourceName.of("t3"), false, false); cmdExec.execute(SQL_TEXT, table1, true, Collections.emptySet()); cmdExec.execute(SQL_TEXT, table2, true, Collections.singleton(SourceName.of("t1"))); cmdExec.execute(SQL_TEXT, table3, true, Collections.singleton(SourceName.of("t1"))); // When: final DropSourceCommand dropStream = buildDropSourceCommand(SourceName.of("t1")); final Exception e = assertThrows( KsqlReferentialIntegrityException.class, () -> cmdExec.execute(SQL_TEXT, dropStream, false, Collections.emptySet()) ); // Then: assertThat(e.getMessage(), containsString("Cannot drop t1.")); assertThat(e.getMessage(), containsString("The following streams and/or tables read from this source: [t2, t3].")); assertThat(e.getMessage(), containsString("You need to drop them before dropping t1.")); }
@Override public Iterable<Intent> getIntents() { return intentStore.getIntents(networkId); }
@Test public void testGetIntents() { VirtualNetwork virtualNetwork = setupVirtualNetworkTopology(); Key intentKey = Key.of("test", APP_ID); List<Constraint> constraints = new ArrayList<>(); constraints.add(new EncapsulationConstraint(EncapsulationType.VLAN)); VirtualNetworkIntent virtualIntent = VirtualNetworkIntent.builder() .networkId(virtualNetwork.id()) .key(intentKey) .appId(APP_ID) .ingressPoint(cp1) .egressPoint(cp5) .constraints(constraints) .build(); // Test the submit() method. vnetIntentService.submit(virtualIntent); // Wait for the both intents to go into an INSTALLED state. try { if (!created.tryAcquire(MAX_PERMITS, MAX_WAIT_TIME, TimeUnit.SECONDS)) { fail("Failed to wait for intent to get installed."); } } catch (InterruptedException e) { fail("Semaphore exception during intent installation." + e.getMessage()); } // Test the getIntents() method assertEquals("The intents size did not match as expected.", 1, Iterators.size(vnetIntentService.getIntents().iterator())); // Test the getIntent() method assertNotNull("The intent should have been found.", vnetIntentService.getIntent(virtualIntent.key())); // Test the getIntentData() method assertEquals("The intent data size did not match as expected.", 1, Iterators.size(vnetIntentService.getIntentData().iterator())); // Test the getIntentCount() method assertEquals("The intent count did not match as expected.", 1, vnetIntentService.getIntentCount()); // Test the isLocal() method assertTrue("The intent should be local.", vnetIntentService.isLocal(virtualIntent.key())); }
@Override public PathAttributes find(final Path file, final ListProgressListener listener) throws BackgroundException { if(file.isRoot()) { return PathAttributes.EMPTY; } try { try { for(final DavResource resource : this.list(file)) { if(resource.isDirectory()) { if(!file.getType().contains(Path.Type.directory)) { throw new NotfoundException(String.format("File %s has set MIME type %s", file.getAbsolute(), DavResource.HTTPD_UNIX_DIRECTORY_CONTENT_TYPE)); } } else { if(!file.getType().contains(Path.Type.file)) { throw new NotfoundException(String.format("File %s has set MIME type %s", file.getAbsolute(), resource.getContentType())); } } return this.toAttributes(resource); } throw new NotfoundException(file.getAbsolute()); } catch(SardineException e) { try { throw new DAVExceptionMappingService().map("Failure to read attributes of {0}", e, file); } catch(InteroperabilityException | ConflictException i) { // PROPFIND Method not allowed if(log.isWarnEnabled()) { log.warn(String.format("Failure with PROPFIND request for %s. %s", file, i.getMessage())); } final PathAttributes attr = this.head(file); if(PathAttributes.EMPTY == attr) { throw i; } return attr; } } } catch(SardineException e) { throw new DAVExceptionMappingService().map("Failure to read attributes of {0}", e, file); } catch(IOException e) { throw new HttpExceptionMappingService().map(e, file); } }
@Test public void testFindDefaultAttributesFinderCryptomator() throws Exception { final Path home = new DefaultHomeFinderService(session).find(); final Path vault = new Path(home, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory)); final CryptoVault cryptomator = new CryptoVault(vault); cryptomator.create(session, new VaultCredentials("test"), vaultVersion); session.withRegistry(new DefaultVaultRegistry(new DisabledPasswordStore(), new DisabledPasswordCallback(), cryptomator)); final Path test = new CryptoTouchFeature<>(session, new DAVTouchFeature(session), new DAVWriteFeature(session), cryptomator).touch( new Path(vault, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)), new TransferStatus()); test.attributes().setSize(0L); final PathAttributes attributes = cryptomator.getFeature(session, AttributesFinder.class, new DefaultAttributesFinderFeature(session)).find(test); assertNotNull(attributes); assertEquals(0L, attributes.getSize()); cryptomator.getFeature(session, Delete.class, new DAVDeleteFeature(session)).delete(Arrays.asList(test, vault), new DisabledLoginCallback(), new Delete.DisabledCallback()); }