focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
@Override public String formatMailTemplateContent(String content, Map<String, Object> params) { return StrUtil.format(content, params); }
@Test public void testFormatMailTemplateContent() { // 准备参数 Map<String, Object> params = new HashMap<>(); params.put("name", "小红"); params.put("what", "饭"); // 调用,并断言 assertEquals("小红,你好,饭吃了吗?", mailTemplateService.formatMailTemplateContent("{name},你好,{what}吃了吗?", params)); }
public Binder(Pattern pattern, GroupExpression groupExpression) { this.pattern = pattern; this.groupExpression = groupExpression; this.groupExpressionIndex = Lists.newArrayList(0); // MULTI_JOIN is a special pattern which can contain children groups if the input group expression // is not a scan node. this.isPatternWithoutChildren = pattern.isPatternMultiJoin() ? Pattern.ALL_SCAN_TYPES.contains(groupExpression.getOp().getOpType()) : pattern.children().size() == 0; }
@Test public void testBinder() { OlapTable table1 = new OlapTable(); table1.setDefaultDistributionInfo(new HashDistributionInfo()); OlapTable table2 = new OlapTable(); table2.setDefaultDistributionInfo(new HashDistributionInfo()); OptExpression expr = OptExpression.create(new LogicalJoinOperator(), new OptExpression(new LogicalOlapScanOperator(table1)), new OptExpression(new LogicalOlapScanOperator(table2))); Pattern pattern = Pattern.create(OperatorType.LOGICAL_JOIN) .addChildren(Pattern.create(OperatorType.PATTERN_LEAF)) .addChildren(Pattern.create(OperatorType.PATTERN_LEAF)); Memo memo = new Memo(); OptExpression result = Binder.bind(pattern, memo.init(expr)); assertEquals(OperatorType.LOGICAL_JOIN, result.getOp().getOpType()); assertEquals(OperatorType.LOGICAL_OLAP_SCAN, result.inputAt(0).getOp().getOpType()); assertEquals(OperatorType.LOGICAL_OLAP_SCAN, result.inputAt(1).getOp().getOpType()); }
public void updateColdDataFlowCtrGroupConfig(final String addr, final Properties properties, final long timeoutMillis) throws RemotingConnectException, RemotingSendRequestException, RemotingTimeoutException, InterruptedException, MQBrokerException, UnsupportedEncodingException { RemotingCommand request = RemotingCommand.createRequestCommand(RequestCode.UPDATE_COLD_DATA_FLOW_CTR_CONFIG, null); String str = MixAll.properties2String(properties); if (str != null && str.length() > 0) { request.setBody(str.getBytes(MixAll.DEFAULT_CHARSET)); RemotingCommand response = this.remotingClient.invokeSync( MixAll.brokerVIPChannel(this.clientConfig.isVipChannelEnabled(), addr), request, timeoutMillis); switch (response.getCode()) { case ResponseCode.SUCCESS: { return; } default: break; } throw new MQBrokerException(response.getCode(), response.getRemark()); } }
@Test public void testUpdateColdDataFlowCtrGroupConfig() throws RemotingException, InterruptedException, MQBrokerException, UnsupportedEncodingException { mockInvokeSync(); Properties props = new Properties(); mqClientAPI.updateColdDataFlowCtrGroupConfig(defaultBrokerAddr, props, defaultTimeout); }
@Override public synchronized void blameResult(InputFile file, List<BlameLine> lines) { checkNotNull(file); checkNotNull(lines); checkArgument(allFilesToBlame.contains(file), "It was not expected to blame file %s", file); if (lines.size() != file.lines()) { LOG.debug("Ignoring blame result since provider returned {} blame lines but file {} has {} lines", lines.size(), file, file.lines()); return; } Builder scmBuilder = ScannerReport.Changesets.newBuilder(); DefaultInputFile inputFile = (DefaultInputFile) file; scmBuilder.setComponentRef(inputFile.scannerId()); Map<String, Integer> changesetsIdByRevision = new HashMap<>(); int lineId = 1; for (BlameLine line : lines) { validateLine(line, lineId, file); Integer changesetId = changesetsIdByRevision.get(line.revision()); if (changesetId == null) { addChangeset(scmBuilder, line); changesetId = scmBuilder.getChangesetCount() - 1; changesetsIdByRevision.put(line.revision(), changesetId); } scmBuilder.addChangesetIndexByLine(changesetId); lineId++; } writer.writeComponentChangesets(scmBuilder.build()); allFilesToBlame.remove(file); count++; progressReport.message(count + "/" + total + " " + pluralize(count) + " have been analyzed"); }
@Test public void shouldFailIfNullRevision() { InputFile file = new TestInputFileBuilder("foo", "src/main/java/Foo.java").setLines(1).build(); var blameOUtput = new DefaultBlameOutput(null, analysisWarnings, singletonList(file), mock(DocumentationLinkGenerator.class)); var lines = singletonList(new BlameLine().date(new Date()).author("guy")); assertThatThrownBy(() -> blameOUtput.blameResult(file, lines)) .isInstanceOf(IllegalArgumentException.class) .hasMessage("Blame revision is blank for file " + file + " at line 1"); }
@Override public void publish(ScannerReportWriter writer) { AbstractProjectOrModule rootProject = moduleHierarchy.root(); ScannerReport.Metadata.Builder builder = ScannerReport.Metadata.newBuilder() .setAnalysisDate(projectInfo.getAnalysisDate().getTime()) // Here we want key without branch .setProjectKey(rootProject.key()) .setCrossProjectDuplicationActivated(cpdSettings.isCrossProjectDuplicationEnabled()) .setRootComponentRef(rootProject.scannerId()); projectInfo.getProjectVersion().ifPresent(builder::setProjectVersion); projectInfo.getBuildString().ifPresent(builder::setBuildString); if (branchConfiguration.branchName() != null) { addBranchInformation(builder); } String newCodeReferenceBranch = referenceBranchSupplier.getFromProperties(); if (newCodeReferenceBranch != null) { builder.setNewCodeReferenceBranch(newCodeReferenceBranch); } addScmInformation(builder); addNotAnalyzedFileCountsByLanguage(builder); for (QProfile qp : qProfiles.findAll()) { builder.putQprofilesPerLanguage(qp.getLanguage(), ScannerReport.Metadata.QProfile.newBuilder() .setKey(qp.getKey()) .setLanguage(qp.getLanguage()) .setName(qp.getName()) .setRulesUpdatedAt(qp.getRulesUpdatedAt().getTime()).build()); } for (Entry<String, ScannerPlugin> pluginEntry : pluginRepository.getPluginsByKey().entrySet()) { builder.putPluginsByKey(pluginEntry.getKey(), ScannerReport.Metadata.Plugin.newBuilder() .setKey(pluginEntry.getKey()) .setUpdatedAt(pluginEntry.getValue().getUpdatedAt()).build()); } addRelativePathFromScmRoot(builder); writer.writeMetadata(builder.build()); }
@Test @UseDataProvider("buildStrings") public void write_buildString(@Nullable String buildString, String expected) { when(projectInfo.getBuildString()).thenReturn(Optional.ofNullable(buildString)); underTest.publish(writer); ScannerReport.Metadata metadata = reader.readMetadata(); assertThat(metadata.getBuildString()).isEqualTo(expected); }
@Override public int addFirst(V... elements) { return get(addFirstAsync(elements)); }
@Test public void testRemoveFirst() { RDeque<Integer> queue1 = redisson.getDeque("deque1"); queue1.addFirst(1); queue1.addFirst(2); queue1.addFirst(3); Assertions.assertEquals(3, (int)queue1.removeFirst()); Assertions.assertEquals(2, (int)queue1.removeFirst()); Assertions.assertEquals(1, (int)queue1.removeFirst()); }
T getFunction(final List<SqlArgument> arguments) { // first try to get the candidates without any implicit casting Optional<T> candidate = findMatchingCandidate(arguments, false); if (candidate.isPresent()) { return candidate.get(); } else if (!supportsImplicitCasts) { throw createNoMatchingFunctionException(arguments); } // if none were found (candidate isn't present) try again with implicit casting candidate = findMatchingCandidate(arguments, true); if (candidate.isPresent()) { return candidate.get(); } throw createNoMatchingFunctionException(arguments); }
@Test public void shouldChooseLaterVariadicWhenTwoVariadicsMatchDiffBranches() { // Given: givenFunctions( function(OTHER, 1, GenericType.of("A"), INT_VARARGS, STRING, DOUBLE), function(EXPECTED, 2, GenericType.of("B"), INT, STRING_VARARGS, DOUBLE) ); // When: final KsqlScalarFunction fun = udfIndex.getFunction(ImmutableList.of( SqlArgument.of(SqlTypes.BIGINT), SqlArgument.of(SqlTypes.INTEGER), SqlArgument.of(SqlTypes.STRING), SqlArgument.of(SqlTypes.DOUBLE)) ); // Then: assertThat(fun.name(), equalTo(EXPECTED)); }
public final void containsEntry(@Nullable Object key, @Nullable Object value) { // TODO(kak): Can we share any of this logic w/ MapSubject.containsEntry()? checkNotNull(actual); if (!actual.containsEntry(key, value)) { Map.Entry<@Nullable Object, @Nullable Object> entry = immutableEntry(key, value); ImmutableList<Map.Entry<@Nullable Object, @Nullable Object>> entryList = ImmutableList.of(entry); // TODO(cpovirk): If the key is present but not with the right value, we could fail using // something like valuesForKey(key).contains(value). Consider whether this is worthwhile. if (hasMatchingToStringPair(actual.entries(), entryList)) { failWithoutActual( fact("expected to contain entry", entry), fact("an instance of", objectToTypeName(entry)), simpleFact("but did not"), fact( "though it did contain", countDuplicatesAndAddTypeInfo( retainMatchingToString(actual.entries(), /* itemsToCheck = */ entryList))), fact("full contents", actualCustomStringRepresentationForPackageMembersToCall())); } else if (actual.containsKey(key)) { failWithoutActual( fact("expected to contain entry", entry), simpleFact("but did not"), fact("though it did contain values with that key", actual.asMap().get(key)), fact("full contents", actualCustomStringRepresentationForPackageMembersToCall())); } else if (actual.containsValue(value)) { Set<@Nullable Object> keys = new LinkedHashSet<>(); for (Map.Entry<?, ?> actualEntry : actual.entries()) { if (Objects.equal(actualEntry.getValue(), value)) { keys.add(actualEntry.getKey()); } } failWithoutActual( fact("expected to contain entry", entry), simpleFact("but did not"), fact("though it did contain keys with that value", keys), fact("full contents", actualCustomStringRepresentationForPackageMembersToCall())); } else { failWithActual("expected to contain entry", immutableEntry(key, value)); } } }
@Test public void containsEntryWithNullValueNullExpected() { ListMultimap<String, String> actual = ArrayListMultimap.create(); actual.put("a", null); assertThat(actual).containsEntry("a", null); }
protected static PrivateKey toPrivateKey(File keyFile, String keyPassword) throws NoSuchAlgorithmException, NoSuchPaddingException, InvalidKeySpecException, InvalidAlgorithmParameterException, KeyException, IOException { return toPrivateKey(keyFile, keyPassword, true); }
@Test public void testPkcs1Des3EncryptedRsaWrongPassword() throws Exception { assertThrows(IOException.class, new Executable() { @Override public void execute() throws Throwable { SslContext.toPrivateKey(new File(getClass().getResource("rsa_pkcs1_des3_encrypted.key") .getFile()), "wrong"); } }); }
public static L3ModificationInstruction copyTtlIn() { return new ModTtlInstruction(L3SubType.TTL_IN); }
@Test public void testCopyTtlInMethod() { final Instruction instruction = Instructions.copyTtlIn(); final L3ModificationInstruction.ModTtlInstruction modTtlInstruction = checkAndConvert(instruction, Instruction.Type.L3MODIFICATION, L3ModificationInstruction.ModTtlInstruction.class); assertThat(modTtlInstruction.subtype(), is(L3ModificationInstruction.L3SubType.TTL_IN)); }
public static <K, V> Reshuffle<K, V> of() { return new Reshuffle<>(); }
@Test @Category({ValidatesRunner.class}) public void testAssignShardFn() { List<KV<String, Integer>> inputKvs = Lists.newArrayList(); for (int i = 0; i < 10; i++) { inputKvs.addAll(ARBITRARY_KVS); } PCollection<KV<String, Integer>> input = pipeline.apply( Create.of(inputKvs).withCoder(KvCoder.of(StringUtf8Coder.of(), VarIntCoder.of()))); PCollection<Integer> output = input .apply(ParDo.of(new AssignShardFn<>(2))) .apply(GroupByKey.create()) .apply(MapElements.into(integers()).via(KV::getKey)); PAssert.that(output).containsInAnyOrder(ImmutableList.of(0, 1)); pipeline.run(); }
public static String name(String name, String... names) { final StringBuilder builder = new StringBuilder(); append(builder, name); if (names != null) { for (String s : names) { append(builder, s); } } return builder.toString(); }
@Test public void elidesNullValuesFromNamesWhenNullAndNotNullPassedIn() { assertThat(name("one", null, "three")) .isEqualTo("one.three"); }
void updateInactivityStateIfExpired(long ts, DeviceId deviceId, DeviceStateData stateData) { log.trace("Processing state {} for device {}", stateData, deviceId); if (stateData != null) { DeviceState state = stateData.getState(); if (!isActive(ts, state) && (state.getLastInactivityAlarmTime() == 0L || state.getLastInactivityAlarmTime() <= state.getLastActivityTime()) && stateData.getDeviceCreationTime() + state.getInactivityTimeout() <= ts) { if (partitionService.resolve(ServiceType.TB_CORE, stateData.getTenantId(), deviceId).isMyPartition()) { reportInactivity(ts, deviceId, stateData); } else { cleanupEntity(deviceId); } } } else { log.debug("[{}] Device that belongs to other server is detected and removed.", deviceId); cleanupEntity(deviceId); } }
@Test public void givenStateDataIsNull_whenUpdateInactivityTimeoutIfExpired_thenShouldCleanupDevice() { // GIVEN service.deviceStates.put(deviceId, deviceStateDataMock); // WHEN service.updateInactivityStateIfExpired(System.currentTimeMillis(), deviceId, null); // THEN assertThat(service.deviceStates.get(deviceId)).isNull(); assertThat(service.deviceStates.size()).isEqualTo(0); assertThat(service.deviceStates.isEmpty()).isTrue(); }
public FloatArrayAsIterable usingTolerance(double tolerance) { return new FloatArrayAsIterable(tolerance(tolerance), iterableSubject()); }
@Test public void usingTolerance_containsExactly_primitiveFloatArray_inOrder_success() { assertThat(array(1.1f, TOLERABLE_2POINT2, 3.3f)) .usingTolerance(DEFAULT_TOLERANCE) .containsExactly(array(1.1f, 2.2f, 3.3f)) .inOrder(); }
@Override public Integer doCall() throws Exception { JsonObject pluginConfig = loadConfig(); JsonObject plugins = pluginConfig.getMap("plugins"); Optional<PluginType> camelPlugin = PluginType.findByName(name); if (camelPlugin.isPresent()) { if (command == null) { command = camelPlugin.get().getCommand(); } if (description == null) { description = camelPlugin.get().getDescription(); } if (firstVersion == null) { firstVersion = camelPlugin.get().getFirstVersion(); } } if (command == null) { // use plugin name as command command = name; } if (firstVersion == null) { // fallback to version specified firstVersion = version; } JsonObject plugin = new JsonObject(); plugin.put("name", name); plugin.put("command", command); if (firstVersion != null) { plugin.put("firstVersion", firstVersion); } plugin.put("description", description != null ? description : "Plugin %s called with command %s".formatted(name, command)); if (gav == null && (groupId != null && artifactId != null)) { if (version == null) { CamelCatalog catalog = new DefaultCamelCatalog(); version = catalog.getCatalogVersion(); } gav = "%s:%s:%s".formatted(groupId, artifactId, version); } if (gav != null) { plugin.put("dependency", gav); } plugins.put(name, plugin); saveConfig(pluginConfig); return 0; }
@Test public void shouldAddDefaultPlugin() throws Exception { PluginAdd command = new PluginAdd(new CamelJBangMain().withPrinter(printer)); command.name = "camel-k"; command.doCall(); Assertions.assertEquals("", printer.getOutput()); Assertions.assertEquals( "{\"plugins\":{\"camel-k\":{\"name\":\"camel-k\",\"command\":\"k\",\"firstVersion\":\"4.4.0\",\"description\":\"%s\"}}}" .formatted(PluginType.CAMEL_K.getDescription()), PluginHelper.getOrCreatePluginConfig().toJson()); }
@Override public List<InterpreterCompletion> completion(String buf, int cursor, InterpreterContext interpreterContext) throws InterpreterException { return innerIntp.completion(buf, cursor, interpreterContext); }
@Test void testCompletion() throws InterpreterException { InterpreterContext context = getInterpreterContext(); InterpreterResult result = interpreter.interpret("val a=\"hello world\"", context); assertEquals(InterpreterResult.Code.SUCCESS, result.code()); List<InterpreterCompletion> completions = interpreter.completion("a.", 2, getInterpreterContext()); assertTrue(completions.size() > 0); completions = interpreter.completion("benv.", 5, getInterpreterContext()); assertTrue(completions.size() > 0); }
public static <K, V> MapCoder<K, V> of(Coder<K> keyCoder, Coder<V> valueCoder) { return new MapCoder<>(keyCoder, valueCoder); }
@Test public void testStructuralValueDecodeEncodeEqual() throws Exception { MapCoder<byte[], Integer> coder = MapCoder.of(ByteArrayCoder.of(), VarIntCoder.of()); Map<byte[], Integer> value = Collections.singletonMap(new byte[] {1, 2, 3, 4}, 1); CoderProperties.structuralValueDecodeEncodeEqual(coder, value); }
static void verifyAddMissingValues(final List<KiePMMLMiningField> notTargetMiningFields, final PMMLRequestData requestData) { logger.debug("verifyMissingValues {} {}", notTargetMiningFields, requestData); Collection<ParameterInfo> requestParams = requestData.getRequestParams(); notTargetMiningFields .forEach(miningField -> { ParameterInfo parameterInfo = requestParams.stream() .filter(paramInfo -> miningField.getName().equals(paramInfo.getName())) .findFirst() .orElse(null); if (parameterInfo == null) { manageMissingValues(miningField, requestData); } }); }
@Test void verifyAddMissingValuesNotMissingNotReturnInvalidNotReplacement() { List<KiePMMLMiningField> miningFields = IntStream.range(0, 3).mapToObj(i -> { DATA_TYPE dataType = DATA_TYPE.values()[i]; return KiePMMLMiningField.builder("FIELD-" + i, null) .withDataType(dataType) .withMissingValueTreatmentMethod(MISSING_VALUE_TREATMENT_METHOD.AS_IS) .build(); }) .collect(Collectors.toList()); PMMLRequestData pmmlRequestData = new PMMLRequestData("123", "modelName"); PreProcess.verifyAddMissingValues(miningFields, pmmlRequestData); }
@Override public Long getValue() { return Arrays.stream(watermarkGauges).mapToLong(WatermarkGauge::getValue).min().orElse(0); }
@Test void testSetCurrentLowWatermark() { WatermarkGauge metric1 = new WatermarkGauge(); WatermarkGauge metric2 = new WatermarkGauge(); MinWatermarkGauge metric = new MinWatermarkGauge(metric1, metric2); assertThat(metric.getValue()).isEqualTo(Long.MIN_VALUE); metric1.setCurrentWatermark(1); assertThat(metric.getValue()).isEqualTo(Long.MIN_VALUE); metric2.setCurrentWatermark(2); assertThat(metric.getValue()).isOne(); metric1.setCurrentWatermark(3); assertThat(metric.getValue()).isEqualTo(2L); }
@Override public void commit() throws SQLException { for (TransactionHook each : transactionHooks) { each.beforeCommit(connection.getCachedConnections().values(), getTransactionContext(), ProxyContext.getInstance().getContextManager().getComputeNodeInstanceContext().getLockContext()); } if (connection.getConnectionSession().getTransactionStatus().isInTransaction()) { try { if (TransactionType.LOCAL == TransactionUtils.getTransactionType(getTransactionContext()) || null == distributionTransactionManager) { localTransactionManager.commit(); } else { distributionTransactionManager.commit(getTransactionContext().isExceptionOccur()); } } finally { for (TransactionHook each : transactionHooks) { each.afterCommit(connection.getCachedConnections().values(), getTransactionContext(), ProxyContext.getInstance().getContextManager().getComputeNodeInstanceContext().getLockContext()); } for (Connection each : connection.getCachedConnections().values()) { ConnectionSavepointManager.getInstance().transactionFinished(each); } connection.getConnectionSession().getTransactionStatus().setInTransaction(false); connection.getConnectionSession().getConnectionContext().close(); } } }
@Test void assertCommitWithoutTransaction() throws SQLException { ContextManager contextManager = mockContextManager(TransactionType.LOCAL); when(ProxyContext.getInstance().getContextManager()).thenReturn(contextManager); newBackendTransactionManager(TransactionType.LOCAL, false); backendTransactionManager.commit(); verify(transactionStatus, times(0)).setInTransaction(false); verify(localTransactionManager, times(0)).commit(); verify(distributionTransactionManager, times(0)).commit(false); }
@Override public WindowStoreIterator<V> backwardFetch(final K key, final Instant timeFrom, final Instant timeTo) throws IllegalArgumentException { Objects.requireNonNull(key, "key can't be null"); final List<ReadOnlyWindowStore<K, V>> stores = provider.stores(storeName, windowStoreType); for (final ReadOnlyWindowStore<K, V> windowStore : stores) { try { final WindowStoreIterator<V> result = windowStore.backwardFetch(key, timeFrom, timeTo); if (!result.hasNext()) { result.close(); } else { return result; } } catch (final InvalidStateStoreException e) { throw new InvalidStateStoreException( "State store is not available anymore and may have been migrated to another instance; " + "please re-discover its location from the state metadata."); } } return KeyValueIterators.emptyWindowStoreIterator(); }
@Test public void emptyBackwardIteratorPeekNextKeyShouldThrowNoSuchElementException() { final StateStoreProvider storeProvider = mock(StateStoreProvider.class); when(storeProvider.stores(anyString(), any())).thenReturn(emptyList()); final CompositeReadOnlyWindowStore<Object, Object> store = new CompositeReadOnlyWindowStore<>( storeProvider, QueryableStoreTypes.windowStore(), "foo" ); try (final WindowStoreIterator<Object> windowStoreIterator = store.backwardFetch("key", ofEpochMilli(1), ofEpochMilli(10))) { assertThrows(NoSuchElementException.class, windowStoreIterator::peekNextKey); } }
public void forceDrain() { appendLock.lock(); try { drainStatus = DrainStatus.STARTED; maybeCompleteDrain(); } finally { appendLock.unlock(); } }
@Test public void testForceDrain() { int leaderEpoch = 17; long baseOffset = 157; int lingerMs = 50; int maxBatchSize = 512; Mockito.when(memoryPool.tryAllocate(maxBatchSize)) .thenReturn(ByteBuffer.allocate(maxBatchSize)); BatchAccumulator<String> acc = buildAccumulator( leaderEpoch, baseOffset, lingerMs, maxBatchSize ); List<String> records = asList("a", "b", "c", "d", "e", "f", "g", "h", "i"); // Append records assertEquals(baseOffset, acc.append(leaderEpoch, records.subList(0, 1), false)); assertEquals(baseOffset + 2, acc.append(leaderEpoch, records.subList(1, 3), false)); assertEquals(baseOffset + 5, acc.append(leaderEpoch, records.subList(3, 6), false)); assertEquals(baseOffset + 7, acc.append(leaderEpoch, records.subList(6, 8), false)); assertEquals(baseOffset + 8, acc.append(leaderEpoch, records.subList(8, 9), false)); assertFalse(acc.needsDrain(time.milliseconds())); acc.forceDrain(); assertTrue(acc.needsDrain(time.milliseconds())); assertEquals(0, acc.timeUntilDrain(time.milliseconds())); // Drain completed batches List<BatchAccumulator.CompletedBatch<String>> batches = acc.drain(); assertEquals(1, batches.size()); assertFalse(acc.needsDrain(time.milliseconds())); assertEquals(Long.MAX_VALUE - time.milliseconds(), acc.timeUntilDrain(time.milliseconds())); BatchAccumulator.CompletedBatch<String> batch = batches.get(0); assertEquals(records, batch.records.get()); assertEquals(baseOffset, batch.baseOffset); assertEquals(time.milliseconds(), batch.appendTimestamp()); }
@Override public void acquirePermissionToRemove(OrchestratorContext context, ApplicationApi applicationApi) throws HostStateChangeDeniedException { ApplicationInstanceStatus applicationStatus = applicationApi.getApplicationStatus(); if (applicationStatus == ApplicationInstanceStatus.ALLOWED_TO_BE_DOWN) { throw new HostStateChangeDeniedException( applicationApi.getNodeGroup(), HostedVespaPolicy.APPLICATION_SUSPENDED_CONSTRAINT, "Unable to test availability constraints as the application " + applicationApi.applicationId() + " is allowed to be down"); } // Apply per-cluster policy for (ClusterApi cluster : applicationApi.getClusters()) { clusterPolicy.verifyGroupGoingDownPermanentlyIsFine(cluster); } // Get permission from the Cluster Controller to remove the content nodes. for (StorageNode storageNode : applicationApi.getStorageNodesInGroupInClusterOrder()) { // Consider changing the semantics of setting storage node state to DOWN in cluster controller, to avoid 2 calls. storageNode.setStorageNodeState(context.createSubcontextForSingleAppOp(true), ClusterControllerNodeState.DOWN); storageNode.forceDistributorState(context, ClusterControllerNodeState.DOWN); } // Ensure all nodes in the group are marked as permanently down for (HostName hostName : applicationApi.getNodesInGroupWith(status -> status != HostStatus.PERMANENTLY_DOWN)) { applicationApi.setHostState(context, hostName, HostStatus.PERMANENTLY_DOWN); } }
@Test public void testAcquirePermissionToRemoveConfigServer() throws OrchestrationException { final HostedVespaClusterPolicy clusterPolicy = mock(HostedVespaClusterPolicy.class); final HostedVespaPolicy policy = new HostedVespaPolicy(clusterPolicy, clientFactory, applicationApiFactory, flagSource); final ApplicationApi applicationApi = mock(ApplicationApi.class); when(applicationApi.applicationId()).thenReturn(ApplicationId.fromSerializedForm("tenant:app:default")); ClusterApi clusterApi1 = mock(ClusterApi.class); ClusterApi clusterApi2 = mock(ClusterApi.class); ClusterApi clusterApi3 = mock(ClusterApi.class); List<ClusterApi> clusterApis = List.of(clusterApi1, clusterApi2, clusterApi3); when(applicationApi.getClusters()).thenReturn(clusterApis); StorageNode storageNode1 = mock(StorageNode.class); HostName hostName1 = new HostName("storage-1"); when(storageNode1.hostName()).thenReturn(hostName1); HostName hostName2 = new HostName("host-2"); StorageNode storageNode3 = mock(StorageNode.class); HostName hostName3 = new HostName("storage-3"); when(storageNode1.hostName()).thenReturn(hostName3); List<StorageNode> upStorageNodes = List.of(storageNode1, storageNode3); when(applicationApi.getStorageNodesInGroupInClusterOrder()).thenReturn(upStorageNodes); List<HostName> noRemarksHostNames = List.of(hostName1, hostName2, hostName3); when(applicationApi.getNodesInGroupWith(any())).thenReturn(noRemarksHostNames); InOrder order = inOrder(applicationApi, clusterPolicy, storageNode1, storageNode3); OrchestratorContext context = mock(OrchestratorContext.class); OrchestratorContext probeContext = mock(OrchestratorContext.class); when(context.createSubcontextForSingleAppOp(true)).thenReturn(probeContext); policy.acquirePermissionToRemove(context, applicationApi); order.verify(applicationApi).getClusters(); order.verify(clusterPolicy).verifyGroupGoingDownPermanentlyIsFine(clusterApi1); order.verify(clusterPolicy).verifyGroupGoingDownPermanentlyIsFine(clusterApi2); order.verify(clusterPolicy).verifyGroupGoingDownPermanentlyIsFine(clusterApi3); order.verify(applicationApi).getStorageNodesInGroupInClusterOrder(); order.verify(storageNode1).setStorageNodeState(probeContext, ClusterControllerNodeState.DOWN); order.verify(storageNode3).setStorageNodeState(probeContext, ClusterControllerNodeState.DOWN); order.verify(applicationApi).getNodesInGroupWith(any()); order.verify(applicationApi).setHostState(context, hostName1, HostStatus.PERMANENTLY_DOWN); order.verify(applicationApi).setHostState(context, hostName2, HostStatus.PERMANENTLY_DOWN); order.verify(applicationApi).setHostState(context, hostName3, HostStatus.PERMANENTLY_DOWN); order.verifyNoMoreInteractions(); }
@Override public int intValue() { return (int) lvVal(); }
@Test public void testIntValue() { PaddedAtomicLong counter = new PaddedAtomicLong(10); assertEquals(10, counter.intValue()); }
@Override public AttributedList<Path> list(final Path directory, final ListProgressListener listener) throws BackgroundException { try { final AttributedList<Path> objects = new AttributedList<>(); Marker marker = new Marker(null, null); final String containerId = fileid.getVersionId(containerService.getContainer(directory)); // Seen placeholders final Map<String, Long> revisions = new HashMap<>(); boolean hasDirectoryPlaceholder = containerService.isContainer(directory); do { if(log.isDebugEnabled()) { log.debug(String.format("List directory %s with marker %s", directory, marker)); } final B2ListFilesResponse response; if(versioning.isEnabled()) { // In alphabetical order by file name, and by reverse of date/time uploaded for // versions of files with the same name. response = session.getClient().listFileVersions(containerId, marker.nextFilename, marker.nextFileId, chunksize, this.createPrefix(directory), String.valueOf(Path.DELIMITER)); } else { response = session.getClient().listFileNames(containerId, marker.nextFilename, chunksize, this.createPrefix(directory), String.valueOf(Path.DELIMITER)); } marker = this.parse(directory, objects, response, revisions); if(null == marker.nextFileId) { if(!response.getFiles().isEmpty()) { hasDirectoryPlaceholder = true; } } listener.chunk(directory, objects); } while(marker.hasNext()); if(!hasDirectoryPlaceholder && objects.isEmpty()) { if(log.isWarnEnabled()) { log.warn(String.format("No placeholder found for directory %s", directory)); } throw new NotfoundException(directory.getAbsolute()); } return objects; } catch(B2ApiException e) { throw new B2ExceptionMappingService(fileid).map("Listing directory {0} failed", e, directory); } catch(IOException e) { throw new DefaultIOExceptionMappingService().map(e); } }
@Test public void testListRevisions() throws Exception { final B2VersionIdProvider fileid = new B2VersionIdProvider(session); final Path bucket = new B2DirectoryFeature(session, fileid).mkdir(new Path( String.format("test-%s", new AsciiRandomStringService().random()), EnumSet.of(Path.Type.directory, Path.Type.volume)), new TransferStatus()); final String name = new AsciiRandomStringService().random(); final Path file = new Path(bucket, name, EnumSet.of(Path.Type.file)); { final byte[] content = RandomUtils.nextBytes(1); final TransferStatus status = new TransferStatus(); status.setLength(content.length); status.setChecksum(new SHA1ChecksumCompute().compute(new ByteArrayInputStream(content), status)); final HttpResponseOutputStream<BaseB2Response> out = new B2WriteFeature(session, fileid).write(file, status, new DisabledConnectionCallback()); IOUtils.write(content, out); out.close(); final B2FileResponse response = (B2FileResponse) out.getStatus(); assertEquals(response.getFileId(), file.attributes().getVersionId()); file.attributes().setVersionId(response.getFileId()); final AttributedList<Path> list = new B2ObjectListService(session, fileid).list(bucket, new DisabledListProgressListener()); assertTrue(list.contains(file)); assertNull(list.find(new SimplePathPredicate(file)).attributes().getRevision()); assertNotNull(list.find(new SimplePathPredicate(file)).attributes().getVersionId()); assertEquals(content.length, list.find(new SimplePathPredicate(file)).attributes().getSize()); assertEquals(bucket, list.find(new SimplePathPredicate(file)).getParent()); } // Replace { final byte[] content = RandomUtils.nextBytes(1); final TransferStatus status = new TransferStatus(); status.setLength(content.length); status.setChecksum(new SHA1ChecksumCompute().compute(new ByteArrayInputStream(content), status)); final HttpResponseOutputStream<BaseB2Response> out = new B2WriteFeature(session, fileid).write(file, status, new DisabledConnectionCallback()); IOUtils.write(content, out); out.close(); final B2FileResponse response = (B2FileResponse) out.getStatus(); assertEquals(response.getFileId(), file.attributes().getVersionId()); final AttributedList<Path> list = new B2ObjectListService(session, fileid).list(bucket, new DisabledListProgressListener()); assertEquals(2, list.size()); assertTrue(list.contains(file)); assertEquals(bucket, list.get(file).getParent()); assertNull(list.get(file).attributes().getRevision()); assertNotNull(list.find(new SimplePathPredicate(file)).attributes().getVersionId()); assertEquals(Long.valueOf(1L), list.find(path -> path.attributes().isDuplicate()).attributes().getRevision()); } // Add hide marker new B2DeleteFeature(session, fileid).delete(Collections.singletonList(file.withAttributes(new PathAttributes(file.attributes()).withVersionId(null))), new DisabledLoginCallback(), new Delete.DisabledCallback()); { final AttributedList<Path> list = new B2ObjectListService(session, fileid).list(bucket, new DisabledListProgressListener()); assertEquals(3, list.size()); for(Path f : list) { assertTrue(f.attributes().isDuplicate()); } assertTrue(new B2ObjectListService(session, fileid, 1, VersioningConfiguration.empty()).list(bucket, new DisabledListProgressListener()).isEmpty()); } assertFalse(new B2FindFeature(session, fileid).find(file)); assertFalse(new DefaultFindFeature(session).find(file)); try { new B2AttributesFinderFeature(session, fileid).find(file); fail(); } catch(NotfoundException e) { // } final AttributedList<Path> list = new B2ObjectListService(session, fileid).list(bucket, new DisabledListProgressListener()); assertEquals(list, new B2VersioningFeature(session, fileid).list(file, new DisabledListProgressListener())); final Path other = new B2TouchFeature(session, fileid).touch(new Path(bucket, name + new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)), new TransferStatus()); final AttributedList<Path> versions = new B2VersioningFeature(session, fileid).list(file, new DisabledListProgressListener()); assertEquals(list, versions); assertFalse(versions.contains(other)); for(Path f : list) { new B2DeleteFeature(session, fileid).delete(Collections.singletonList(f), new DisabledLoginCallback(), new Delete.DisabledCallback()); } new B2DeleteFeature(session, fileid).delete(Arrays.asList(other, bucket), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
public static void cleanUpTokenReferral(Configuration conf) { conf.unset(MRJobConfig.MAPREDUCE_JOB_CREDENTIALS_BINARY); }
@Test public void testCleanUpTokenReferral() throws Exception { Configuration conf = new Configuration(); conf.set(MRJobConfig.MAPREDUCE_JOB_CREDENTIALS_BINARY, "foo"); TokenCache.cleanUpTokenReferral(conf); assertNull(conf.get(MRJobConfig.MAPREDUCE_JOB_CREDENTIALS_BINARY)); }
@Override public String getMethod() { return PATH; }
@Test public void testAnswerWebAppQueryWithInvalidResult() { AnswerWebAppQuery answerWebAppQuery = AnswerWebAppQuery .builder() .webAppQueryId("123456789") .queryResult(InlineQueryResultArticle .builder() .id("") .title("Text") .inputMessageContent(InputTextMessageContent .builder() .messageText("My own text") .build()) .build()) .build(); assertEquals("answerWebAppQuery", answerWebAppQuery.getMethod()); Throwable thrown = assertThrows(TelegramApiValidationException.class, answerWebAppQuery::validate); assertEquals("ID parameter can't be empty", thrown.getMessage()); }
@Override public Class<? extends StorageBuilder> builder() { return SumPerMinLabeledStorageBuilder.class; }
@Test public void testBuilder() throws IllegalAccessException, InstantiationException { function.accept(MeterEntity.newService("sum_sync_time", Layer.GENERAL), table1); function.calculate(); StorageBuilder<SumPerMinLabeledFunction> storageBuilder = function.builder().newInstance(); final HashMapConverter.ToStorage toStorage = new HashMapConverter.ToStorage(); storageBuilder.entity2Storage(function, toStorage); final Map<String, Object> map = toStorage.obtain(); map.put(SumPerMinLabeledFunction.VALUE, ((DataTable) map.get(SumPerMinLabeledFunction.VALUE)).toStorageData()); map.put(SumPerMinLabeledFunction.TOTAL, ((DataTable) map.get(SumPerMinLabeledFunction.TOTAL)).toStorageData()); SumPerMinLabeledFunction function2 = storageBuilder.storage2Entity(new HashMapConverter.ToEntity(map)); assertThat(function2.getValue()).isEqualTo(function.getValue()); }
@Override public Optional<OffsetExpirationCondition> offsetExpirationCondition() { return Optional.of(new OffsetExpirationConditionImpl(offsetAndMetadata -> offsetAndMetadata.commitTimestampMs)); }
@Test public void testOffsetExpirationCondition() { long currentTimestamp = 30000L; long commitTimestamp = 20000L; long offsetsRetentionMs = 10000L; OffsetAndMetadata offsetAndMetadata = new OffsetAndMetadata(15000L, OptionalInt.empty(), "", commitTimestamp, OptionalLong.empty()); ConsumerGroup group = new ConsumerGroup(new SnapshotRegistry(new LogContext()), "group-id", mock(GroupCoordinatorMetricsShard.class)); Optional<OffsetExpirationCondition> offsetExpirationCondition = group.offsetExpirationCondition(); assertTrue(offsetExpirationCondition.isPresent()); OffsetExpirationConditionImpl condition = (OffsetExpirationConditionImpl) offsetExpirationCondition.get(); assertEquals(commitTimestamp, condition.baseTimestamp().apply(offsetAndMetadata)); assertTrue(condition.isOffsetExpired(offsetAndMetadata, currentTimestamp, offsetsRetentionMs)); }
public Coin parse(String str) throws NumberFormatException { return Coin.valueOf(parseValue(str, Coin.SMALLEST_UNIT_EXPONENT)); }
@Test(expected = NumberFormatException.class) public void parseInvalidHugeNegativeNumber() { NO_CODE.parse("-99999999999999999999"); }
@Override public BeamSqlTable buildBeamSqlTable(Table table) { Schema schema = table.getSchema(); ObjectNode properties = table.getProperties(); Optional<ParsedLocation> parsedLocation = Optional.empty(); if (!Strings.isNullOrEmpty(table.getLocation())) { parsedLocation = Optional.of(parseLocation(checkArgumentNotNull(table.getLocation()))); } List<String> topics = mergeParam(parsedLocation.map(loc -> loc.topic), (ArrayNode) properties.get("topics")); List<String> allBootstrapServers = mergeParam( parsedLocation.map(loc -> loc.brokerLocation), (ArrayNode) properties.get("bootstrap_servers")); String bootstrapServers = String.join(",", allBootstrapServers); Optional<String> payloadFormat = properties.has("format") ? Optional.of(properties.get("format").asText()) : Optional.empty(); if (Schemas.isNestedSchema(schema)) { Optional<PayloadSerializer> serializer = payloadFormat.map( format -> PayloadSerializers.getSerializer( format, checkArgumentNotNull(schema.getField(PAYLOAD_FIELD).getType().getRowSchema()), TableUtils.convertNode2Map(properties))); return new NestedPayloadKafkaTable(schema, bootstrapServers, topics, serializer); } else { /* * CSV is handled separately because multiple rows can be produced from a single message, which * adds complexity to payload extraction. It remains here and as the default because it is the * historical default, but it will not be extended to support attaching extended attributes to * rows. */ if (payloadFormat.orElse("csv").equals("csv")) { return new BeamKafkaCSVTable(schema, bootstrapServers, topics); } PayloadSerializer serializer = PayloadSerializers.getSerializer( payloadFormat.get(), schema, TableUtils.convertNode2Map(properties)); return new PayloadSerializerKafkaTable(schema, bootstrapServers, topics, serializer); } }
@Test public void testBuildBeamSqlProtoTable() { Table table = mockProtoTable("hello", PayloadMessages.SimpleMessage.class); BeamSqlTable sqlTable = provider.buildBeamSqlTable(table); assertNotNull(sqlTable); assertTrue(sqlTable instanceof BeamKafkaTable); BeamKafkaTable kafkaTable = (BeamKafkaTable) sqlTable; assertEquals(LOCATION_BROKER, kafkaTable.getBootstrapServers()); assertEquals(ImmutableList.of(LOCATION_TOPIC), kafkaTable.getTopics()); }
@Override public JobStatus getState() { return jobStatus; }
@Test void initialState() { final JobStatusStore store = new JobStatusStore(0); assertThat(store.getState(), is(JobStatus.INITIALIZING)); }
public static Map<PCollection<?>, ReplacementOutput> singleton( Map<TupleTag<?>, PCollection<?>> original, POutput replacement) { Entry<TupleTag<?>, PCollection<?>> originalElement = Iterables.getOnlyElement(original.entrySet()); Entry<TupleTag<?>, PCollection<?>> replacementElement = Iterables.getOnlyElement(PValues.expandOutput(replacement).entrySet()); return Collections.singletonMap( replacementElement.getValue(), ReplacementOutput.of( TaggedPValue.of(originalElement.getKey(), originalElement.getValue()), TaggedPValue.of(replacementElement.getKey(), replacementElement.getValue()))); }
@Test public void singletonSucceeds() { Map<PCollection<?>, ReplacementOutput> replacements = ReplacementOutputs.singleton(PValues.expandValue(ints), replacementInts); assertThat(replacements, Matchers.hasKey(replacementInts)); ReplacementOutput replacement = replacements.get(replacementInts); Map.Entry<TupleTag<?>, PValue> taggedInts = Iterables.getOnlyElement(ints.expand().entrySet()); assertThat(replacement.getOriginal().getTag(), equalTo(taggedInts.getKey())); assertThat(replacement.getOriginal().getValue(), equalTo(taggedInts.getValue())); assertThat(replacement.getReplacement().getValue(), equalTo(replacementInts)); }
@Override public <VO, VR> KStream<K, VR> leftJoin(final KStream<K, VO> otherStream, final ValueJoiner<? super V, ? super VO, ? extends VR> joiner, final JoinWindows windows) { return leftJoin(otherStream, toValueJoinerWithKey(joiner), windows); }
@Test public void shouldNotAllowNullValueJoinerWithKeyOnLeftJoinWithGlobalTable() { final NullPointerException exception = assertThrows( NullPointerException.class, () -> testStream.leftJoin(testGlobalTable, MockMapper.selectValueMapper(), (ValueJoinerWithKey<? super String, ? super String, ? super String, ?>) null)); assertThat(exception.getMessage(), equalTo("joiner can't be null")); }
public static String getJobOffsetPath(final String jobId) { return String.join("/", getJobRootPath(jobId), "offset"); }
@Test void assertGetJobOffsetPath() { assertThat(PipelineMetaDataNode.getJobOffsetPath(jobId), is(jobRootPath + "/offset")); }
public UiTopoLayout scale(double scale) { checkArgument(scaleWithinBounds(scale), E_SCALE_OOB); this.scale = scale; return this; }
@Test public void setScale() { mkRootLayout(); layout.scale(3.0); assertEquals("wrong scale", 3.0, layout.scale(), DELTA); layout.scale(0.05); assertEquals("wrong scale", 0.05, layout.scale(), DELTA); }
@Override public OUT nextRecord(OUT record) throws IOException { OUT returnRecord = null; do { returnRecord = super.nextRecord(record); } while (returnRecord == null && !reachedEnd()); return returnRecord; }
@Disabled("Test disabled because we do not support double-quote escaped quotes right now.") @Test void testParserCorrectness() throws Exception { // RFC 4180 Compliance Test content // Taken from http://en.wikipedia.org/wiki/Comma-separated_values#Example final String fileContent = "Year,Make,Model,Description,Price\n" + "1997,Ford,E350,\"ac, abs, moon\",3000.00\n" + "1999,Chevy,\"Venture \"\"Extended Edition\"\"\",\"\",4900.00\n" + "1996,Jeep,Grand Cherokee,\"MUST SELL! air, moon roof, loaded\",4799.00\n" + "1999,Chevy,\"Venture \"\"Extended Edition, Very Large\"\"\",,5000.00\n" + ",,\"Venture \"\"Extended Edition\"\"\",\"\",4900.00"; final FileInputSplit split = createTempFile(fileContent); final TupleTypeInfo<Tuple5<Integer, String, String, String, Double>> typeInfo = TupleTypeInfo.getBasicTupleTypeInfo( Integer.class, String.class, String.class, String.class, Double.class); final CsvInputFormat<Tuple5<Integer, String, String, String, Double>> format = new TupleCsvInputFormat<>(PATH, typeInfo); format.setSkipFirstLineAsHeader(true); format.setFieldDelimiter(","); format.configure(new Configuration()); format.open(split); Tuple5<Integer, String, String, String, Double> result = new Tuple5<>(); @SuppressWarnings("unchecked") Tuple5<Integer, String, String, String, Double>[] expectedLines = new Tuple5[] { new Tuple5<>(1997, "Ford", "E350", "ac, abs, moon", 3000.0), new Tuple5<>(1999, "Chevy", "Venture \"Extended Edition\"", "", 4900.0), new Tuple5<>( 1996, "Jeep", "Grand Cherokee", "MUST SELL! air, moon roof, loaded", 4799.00), new Tuple5<>( 1999, "Chevy", "Venture \"Extended Edition, Very Large\"", "", 5000.00), new Tuple5<>(0, "", "Venture \"Extended Edition\"", "", 4900.0) }; try { for (Tuple5<Integer, String, String, String, Double> expected : expectedLines) { result = format.nextRecord(result); assertThat(result).isEqualTo(expected); } assertThat(format.nextRecord(result)).isNull(); assertThat(format.reachedEnd()).isTrue(); } catch (Exception ex) { fail("Test failed due to a " + ex.getClass().getName() + ": " + ex.getMessage()); } }
@Override public Result reconcile(Request request) { client.fetch(Theme.class, request.name()) .ifPresent(theme -> { if (isDeleted(theme)) { cleanUpResourcesAndRemoveFinalizer(request.name()); return; } addFinalizerIfNecessary(theme); themeSettingDefaultConfig(theme); reconcileStatus(request.name()); }); return new Result(false, null); }
@Test void reconcileDelete() throws IOException { Path testWorkDir = tempDirectory.resolve("reconcile-delete"); Files.createDirectory(testWorkDir); when(themeRoot.get()).thenReturn(testWorkDir); Theme theme = new Theme(); Metadata metadata = new Metadata(); metadata.setName("theme-test"); metadata.setDeletionTimestamp(Instant.now()); theme.setMetadata(metadata); theme.setKind(Theme.KIND); theme.setApiVersion("theme.halo.run/v1alpha1"); Theme.ThemeSpec themeSpec = new Theme.ThemeSpec(); themeSpec.setSettingName("theme-test-setting"); theme.setSpec(themeSpec); Path defaultThemePath = testWorkDir.resolve("theme-test"); // copy to temp directory FileSystemUtils.copyRecursively(defaultTheme.toPath(), defaultThemePath); assertThat(testWorkDir).isNotEmptyDirectory(); assertThat(defaultThemePath).exists(); when(extensionClient.fetch(eq(Theme.class), eq(metadata.getName()))) .thenReturn(Optional.of(theme)); when(extensionClient.fetch(Setting.class, themeSpec.getSettingName())) .thenReturn(Optional.empty()); themeReconciler.reconcile(new Reconciler.Request(metadata.getName())); verify(extensionClient, times(2)).fetch(eq(Theme.class), eq(metadata.getName())); verify(extensionClient, times(2)).fetch(eq(Setting.class), eq(themeSpec.getSettingName())); verify(extensionClient, times(2)).list(eq(AnnotationSetting.class), any(), any()); assertThat(Files.exists(testWorkDir)).isTrue(); assertThat(Files.exists(defaultThemePath)).isFalse(); }
@Override public LocalAddress localAddress() { return (LocalAddress) super.localAddress(); }
@Test public void testLocalAddressReuse() throws Exception { for (int i = 0; i < 2; i ++) { Bootstrap cb = new Bootstrap(); ServerBootstrap sb = new ServerBootstrap(); cb.group(group1) .channel(LocalChannel.class) .handler(new TestHandler()); sb.group(group2) .channel(LocalServerChannel.class) .childHandler(new ChannelInitializer<LocalChannel>() { @Override public void initChannel(LocalChannel ch) throws Exception { ch.pipeline().addLast(new TestHandler()); } }); Channel sc = null; Channel cc = null; try { // Start server sc = sb.bind(TEST_ADDRESS).sync().channel(); final CountDownLatch latch = new CountDownLatch(1); // Connect to the server cc = cb.connect(sc.localAddress()).sync().channel(); final Channel ccCpy = cc; cc.eventLoop().execute(new Runnable() { @Override public void run() { // Send a message event up the pipeline. ccCpy.pipeline().fireChannelRead("Hello, World"); latch.countDown(); } }); assertTrue(latch.await(5, SECONDS)); // Close the channel closeChannel(cc); closeChannel(sc); sc.closeFuture().sync(); assertNull(LocalChannelRegistry.get(TEST_ADDRESS), String.format( "Expected null, got channel '%s' for local address '%s'", LocalChannelRegistry.get(TEST_ADDRESS), TEST_ADDRESS)); } finally { closeChannel(cc); closeChannel(sc); } } }
@Override public void enableAutoTrackFragments(List<Class<?>> fragmentsList) { }
@Test public void enableAutoTrackFragments() { ArrayList<Class<?>> fragments = new ArrayList<>(); fragments.add(Fragment.class); fragments.add(DialogFragment.class); mSensorsAPI.enableAutoTrackFragments(fragments); Assert.assertFalse(mSensorsAPI.isFragmentAutoTrackAppViewScreen(Fragment.class)); Assert.assertFalse(mSensorsAPI.isFragmentAutoTrackAppViewScreen(DialogFragment.class)); }
public MapStoreConfig setWriteCoalescing(boolean writeCoalescing) { this.writeCoalescing = writeCoalescing; return this; }
@Test public void setWriteCoalescing() { MapStoreConfig cfg = new MapStoreConfig(); cfg.setWriteCoalescing(false); assertFalse(cfg.isWriteCoalescing()); MapStoreConfig otherCfg = new MapStoreConfig(); otherCfg.setWriteCoalescing(false); assertEquals(otherCfg, cfg); }
@ApiOperation("删除功能按钮资源") @DeleteMapping("/{actionId}") public ApiResult del(@PathVariable Long actionId){ baseActionService.delAction(actionId); return ApiResult.success(); }
@Test void del() { }
@VisibleForTesting public HashMap<String, File> getPluginsToLoad(String pluginsDirectories, String pluginsInclude) throws IllegalArgumentException { String[] directories = pluginsDirectories.split(";"); LOGGER.info("Plugin directories env: {}, parsed directories to load: '{}'", pluginsDirectories, directories); HashMap<String, File> finalPluginsToLoad = new HashMap<>(); for (String pluginsDirectory : directories) { if (!new File(pluginsDirectory).exists()) { throw new IllegalArgumentException(String.format("Plugins dir [%s] doesn't exist.", pluginsDirectory)); } Collection<File> jarFiles = FileUtils.listFiles( new File(pluginsDirectory), new String[]{JAR_FILE_EXTENSION}, true); List<String> pluginsToLoad = null; if (!StringUtils.isEmpty(pluginsInclude)) { pluginsToLoad = Arrays.asList(pluginsInclude.split(";")); LOGGER.info("Potential plugins to load: [{}]", Arrays.toString(pluginsToLoad.toArray())); } else { LOGGER.info("Please use env variable '{}' to customize plugins to load. Loading all plugins: {}", PLUGINS_INCLUDE_PROPERTY_NAME, Arrays.toString(jarFiles.toArray())); } for (File jarFile : jarFiles) { File pluginDir = jarFile.getParentFile(); String pluginName = pluginDir.getName(); LOGGER.info("Found plugin, pluginDir: {}, pluginName: {}", pluginDir, pluginName); if (pluginsToLoad != null) { if (!pluginsToLoad.contains(pluginName)) { LOGGER.info("Skipping plugin: {} is not inside pluginsToLoad {}", pluginName, pluginsToLoad); continue; } } if (!finalPluginsToLoad.containsKey(pluginName)) { finalPluginsToLoad.put(pluginName, pluginDir); LOGGER.info("Added [{}] from dir [{}] to final list of plugins to load", pluginName, pluginDir); } } } return finalPluginsToLoad; }
@Test public void testGetPluginsToLoad() throws IOException { /* We have two plugin directories (../plugins/d1/ and ../plugins/d2/) * plugins to include = [ p1, p2, p3 ] * d1 has plugins: p1 * d2 has plugins: p1, p2, p3, p4 * We expect d1/p1, d2/p2, d2/p3 to be picked up * - ensuring second instance of p1 is ignored * - ensuring p4 is ignored as it's not on the plugins to include list */ String pluginsDirs = _tempDir + "/plugins/d1;" + _tempDir + "/plugins/d2;"; String pluginsToInclude = "p1;p2;p3"; // specifically excluding p3.jar File pluginsDir = new File(_tempDir + "/plugins"); pluginsDir.mkdir(); File subPluginsDir1 = new File(pluginsDir + "/d1"); subPluginsDir1.mkdir(); File subPluginsDir2 = new File(pluginsDir + "/d2"); subPluginsDir2.mkdir(); _p1 = new File(pluginsDir + "/d1/p1/p1.jar"); FileUtils.touch(_p1); _p1Copy = new File(pluginsDir + "/d2/p1/p1.jar"); FileUtils.touch(_p1Copy); _p2 = new File(pluginsDir + "/d2/p2/p2.jar"); FileUtils.touch(_p2); _p3 = new File(pluginsDir + "/d2/p3/p3.jar"); FileUtils.touch(_p3); _p4 = new File(pluginsDir + "/d2/p4/p4.jar"); FileUtils.touch(_p4); HashMap<String, File> actualPluginsMap = PluginManager.get().getPluginsToLoad(pluginsDirs, pluginsToInclude); Assert.assertEquals(actualPluginsMap.size(), 3); HashMap<String, String> actualPluginNamesAndPaths = new HashMap<>(); for (Map.Entry<String, File> entry : actualPluginsMap.entrySet()) { actualPluginNamesAndPaths.put(entry.getKey(), entry.getValue().getAbsolutePath()); } HashMap<String, String> expectedPluginNamesAndPaths = new HashMap<>(); expectedPluginNamesAndPaths.put("p1", _p1.getParentFile().getAbsolutePath()); expectedPluginNamesAndPaths.put("p2", _p2.getParentFile().getAbsolutePath()); expectedPluginNamesAndPaths.put("p3", _p3.getParentFile().getAbsolutePath()); Assert.assertEquals(actualPluginNamesAndPaths, expectedPluginNamesAndPaths); }
public static Schema create(Type type) { switch (type) { case STRING: return new StringSchema(); case BYTES: return new BytesSchema(); case INT: return new IntSchema(); case LONG: return new LongSchema(); case FLOAT: return new FloatSchema(); case DOUBLE: return new DoubleSchema(); case BOOLEAN: return new BooleanSchema(); case NULL: return new NullSchema(); default: throw new AvroRuntimeException("Can't create a: " + type); } }
@Test void longAsFloatDefaultValue() { Schema.Field field = new Schema.Field("myField", Schema.create(Schema.Type.FLOAT), "doc", 1L); assertTrue(field.hasDefaultValue()); assertEquals(1.0f, field.defaultVal()); assertEquals(1.0f, GenericData.get().getDefaultValue(field)); }
@Description("Inverse of Laplace cdf given mean, scale parameters and probability") @ScalarFunction @SqlType(StandardTypes.DOUBLE) public static double inverseLaplaceCdf( @SqlType(StandardTypes.DOUBLE) double mean, @SqlType(StandardTypes.DOUBLE) double scale, @SqlType(StandardTypes.DOUBLE) double p) { checkCondition(scale > 0, INVALID_FUNCTION_ARGUMENT, "inverseLaplaceCdf Function: scale must be greater than 0"); checkCondition(p >= 0 && p <= 1, INVALID_FUNCTION_ARGUMENT, "inverseLaplaceCdf Function: p must be in the interval [0, 1]"); LaplaceDistribution distribution = new LaplaceDistribution(null, mean, scale); return distribution.inverseCumulativeProbability(p); }
@Test public void testInverseLaplaceCdf() { assertFunction("inverse_laplace_cdf(5, 1, 0.5)", DOUBLE, 5.0); assertFunction("inverse_laplace_cdf(5, 2, 0.5)", DOUBLE, 5.0); assertFunction("round(inverse_laplace_cdf(5, 2, 0.6), 4)", DOUBLE, 5.0 + 0.4463); assertFunction("round(inverse_laplace_cdf(-5, 2, 0.4), 4)", DOUBLE, -5.0 - 0.4463); assertInvalidFunction("inverse_laplace_cdf(5, 2, -0.1)", "inverseLaplaceCdf Function: p must be in the interval [0, 1]"); assertInvalidFunction("inverse_laplace_cdf(5, 2, 1.1)", "inverseLaplaceCdf Function: p must be in the interval [0, 1]"); assertInvalidFunction("inverse_laplace_cdf(5, 0, 0.5)", "inverseLaplaceCdf Function: scale must be greater than 0"); assertInvalidFunction("inverse_laplace_cdf(5, -1, 0.5)", "inverseLaplaceCdf Function: scale must be greater than 0"); }
@Override public URI getUri() { return myUri; }
@Test public void testURI() { URI uri = fSys.getUri(); Assert.assertEquals(chrootedTo.toUri(), uri); }
public static KiePMMLRegressionModel getKiePMMLRegressionModelClasses(final RegressionCompilationDTO compilationDTO) throws IOException, IllegalAccessException, InstantiationException { logger.trace("getKiePMMLRegressionModelClasses {} {}", compilationDTO.getFields(), compilationDTO.getModel()); Map<String, AbstractKiePMMLTable> regressionTablesMap = getRegressionTables(compilationDTO); try { AbstractKiePMMLTable nestedTable = regressionTablesMap.size() == 1 ? regressionTablesMap.values().iterator().next() : regressionTablesMap.values().stream().filter(KiePMMLClassificationTable.class::isInstance) .findFirst() .orElseThrow(() -> new KiePMMLException("Failed to find expected " + KiePMMLClassificationTable.class.getSimpleName())); return KiePMMLRegressionModel.builder(compilationDTO.getFileName(), compilationDTO.getModelName(), compilationDTO.getMINING_FUNCTION()) .withAbstractKiePMMLTable(nestedTable) .withTargetField(compilationDTO.getTargetFieldName()) .withMiningFields(compilationDTO.getKieMiningFields()) .withOutputFields(compilationDTO.getKieOutputFields()) .withKiePMMLMiningFields(compilationDTO.getKiePMMLMiningFields()) .withKiePMMLOutputFields(compilationDTO.getKiePMMLOutputFields()) .withKiePMMLTargets(compilationDTO.getKiePMMLTargetFields()) .withKiePMMLTransformationDictionary(compilationDTO.getKiePMMLTransformationDictionary()) .withKiePMMLLocalTransformations(compilationDTO.getKiePMMLLocalTransformations()) .build(); } catch (Exception e) { throw new KiePMMLException(e); } }
@Test void getKiePMMLRegressionModelClasses() throws IOException, IllegalAccessException, InstantiationException { final CompilationDTO<RegressionModel> compilationDTO = CommonCompilationDTO.fromGeneratedPackageNameAndFields(PACKAGE_NAME, pmml, regressionModel, new PMMLCompilationContextMock(), "FILENAME"); KiePMMLRegressionModel retrieved = KiePMMLRegressionModelFactory.getKiePMMLRegressionModelClasses(RegressionCompilationDTO.fromCompilationDTO(compilationDTO)); assertThat(retrieved).isNotNull(); assertThat(retrieved.getName()).isEqualTo(regressionModel.getModelName()); assertThat(retrieved.getMiningFunction()).isEqualTo(MINING_FUNCTION.byName(regressionModel.getMiningFunction().value())); assertThat(retrieved.getTargetField()).isEqualTo(miningFields.get(0).getName()); final AbstractKiePMMLTable regressionTable = retrieved.getRegressionTable(); assertThat(regressionTable).isNotNull(); assertThat(regressionTable).isInstanceOf(KiePMMLClassificationTable.class); evaluateCategoricalRegressionTable((KiePMMLClassificationTable) regressionTable); }
public List<String> getWordList() { return wordList; }
@Test public void testGetWordList() { List<String> wordList = mc.getWordList(); assertEquals(2048, wordList.size()); assertEquals("abandon", wordList.get(0)); assertEquals("zoo", wordList.get(2047)); }
public static boolean isPositiveInteger( String strNum ) { boolean result = true; if ( strNum == null ) { result = false; } else { try { int value = Integer.parseInt( strNum.trim() ); if ( value <= 0 ) { result = false; } } catch ( NumberFormatException nfe ) { result = false; } } return result; }
@Test public void test_isPositiveNumber_ForPositiveIntegers() { for ( String value : posInt) { assertTrue( JobEntryPing.isPositiveInteger( value ) ); } }
public static String getSystemProperty(final String key, final String defaultValue) { return System.getProperty(key, defaultValue); }
@Test void assertGetDefaultValue() { assertThat(SystemPropertyUtils.getSystemProperty("key0", "value0"), is("value0")); }
public void isInstanceOf(Class<?> clazz) { if (clazz == null) { throw new NullPointerException("clazz"); } if (actual == null) { failWithActual("expected instance of", clazz.getName()); return; } if (!isInstanceOfType(actual, clazz)) { if (Platform.classMetadataUnsupported()) { throw new UnsupportedOperationException( actualCustomStringRepresentation() + ", an instance of " + actual.getClass().getName() + ", may or may not be an instance of " + clazz.getName() + ". Under -XdisableClassMetadata, we do not have enough information to tell."); } failWithoutActual( fact("expected instance of", clazz.getName()), fact("but was instance of", actual.getClass().getName()), fact("with value", actualCustomStringRepresentation())); } }
@Test public void isInstanceOfClassForNull() { expectFailure.whenTesting().that((Object) null).isInstanceOf(Long.class); assertFailureKeys("expected instance of", "but was"); assertFailureValue("expected instance of", "java.lang.Long"); }
@Override public Iterable<Device> getDevices() { return Collections.unmodifiableCollection(devices.values()); }
@Test public final void testGetDevices() { assertEquals("initialy empty", 0, Iterables.size(deviceStore.getDevices())); putDevice(DID1, SW1); putDevice(DID2, SW2); putDevice(DID1, SW1); assertEquals("expect 2 uniq devices", 2, Iterables.size(deviceStore.getDevices())); Map<DeviceId, Device> devices = new HashMap<>(); for (Device device : deviceStore.getDevices()) { devices.put(device.id(), device); } assertDevice(DID1, SW1, devices.get(DID1)); assertDevice(DID2, SW2, devices.get(DID2)); // add case for new node? }
@Override public boolean skip(final ServerWebExchange exchange) { return skipExcept(exchange, RpcTypeEnum.WEB_SOCKET); }
@Test public void skip() { initMockInfo(); assertTrue(webSocketPlugin.skip(exchange)); }
public static MySQLCommandPacket newInstance(final MySQLCommandPacketType commandPacketType, final MySQLPacketPayload payload, final ConnectionSession connectionSession) { switch (commandPacketType) { case COM_QUIT: return new MySQLComQuitPacket(); case COM_INIT_DB: return new MySQLComInitDbPacket(payload); case COM_FIELD_LIST: return new MySQLComFieldListPacket(payload); case COM_QUERY: return new MySQLComQueryPacket(payload); case COM_STMT_PREPARE: return new MySQLComStmtPreparePacket(payload); case COM_STMT_EXECUTE: MySQLServerPreparedStatement serverPreparedStatement = connectionSession.getServerPreparedStatementRegistry().getPreparedStatement(payload.getByteBuf().getIntLE(payload.getByteBuf().readerIndex())); return new MySQLComStmtExecutePacket(payload, serverPreparedStatement.getSqlStatementContext().getSqlStatement().getParameterCount()); case COM_STMT_SEND_LONG_DATA: return new MySQLComStmtSendLongDataPacket(payload); case COM_STMT_RESET: return new MySQLComStmtResetPacket(payload); case COM_STMT_CLOSE: return new MySQLComStmtClosePacket(payload); case COM_SET_OPTION: return new MySQLComSetOptionPacket(payload); case COM_PING: return new MySQLComPingPacket(); case COM_RESET_CONNECTION: return new MySQLComResetConnectionPacket(); default: return new MySQLUnsupportedCommandPacket(commandPacketType); } }
@Test void assertNewInstanceWithComProcessKillPacket() { assertThat(MySQLCommandPacketFactory.newInstance(MySQLCommandPacketType.COM_PROCESS_KILL, payload, connectionSession), instanceOf(MySQLUnsupportedCommandPacket.class)); }
static DiscoveryResponse parseJsonResponse(JsonValue jsonValue, boolean tpcEnabled) throws IOException { List<JsonValue> response = jsonValue.asArray().values(); Map<Address, Address> privateToPublic = new HashMap<>(); List<Address> memberAddresses = new ArrayList<>(response.size()); for (JsonValue value : response) { JsonObject object = value.asObject(); String privateAddress = object.get(PRIVATE_ADDRESS_PROPERTY).asString(); String publicAddress = object.get(PUBLIC_ADDRESS_PROPERTY).asString(); Address publicAddr = createAddress(publicAddress, -1); // if it is not explicitly given, create the private address with public addresses port Address privateAddr = createAddress(privateAddress, publicAddr.getPort()); privateToPublic.put(privateAddr, publicAddr); memberAddresses.add(privateAddr); JsonValue tpcPorts = object.get(TPC_PORTS_PROPERTY); if (!tpcEnabled || tpcPorts == null) { continue; } try { for (JsonValue tpcPort : tpcPorts.asArray()) { parseTpcPortMapping(tpcPort.asObject(), publicAddr.getHost(), privateAddr.getHost(), privateToPublic); } } catch (Exception ignored) { EmptyStatement.ignore(ignored); } } return new DiscoveryResponse(privateToPublic, memberAddresses); }
@Test public void testJsonResponseParse_withTpc_whenTpcIsDisabled() throws IOException { JsonValue jsonResponse = Json.parse(""" [ { "private-address": "10.96.5.1:30000", "public-address": "100.113.44.139:31115", "tpc-ports": [ { "private-port": 40000, "public-port": 32115 } ] } ]"""); HazelcastCloudDiscovery.DiscoveryResponse response = HazelcastCloudDiscovery.parseJsonResponse(jsonResponse, false); Map<Address, Address> privateToPublic = response.getPrivateToPublicAddresses(); assertEquals(1, privateToPublic.size()); assertEquals(new Address("100.113.44.139", 31115), privateToPublic.get(new Address("10.96.5.1", 30000))); List<Address> members = response.getPrivateMemberAddresses(); assertEquals(1, members.size()); assertContains(members, new Address("10.96.5.1", 30000)); }
public List<String> toPrefix(String in) { List<String> tokens = buildTokens(alignINClause(in)); List<String> output = new ArrayList<>(); List<String> stack = new ArrayList<>(); for (String token : tokens) { if (isOperand(token)) { if (token.equals(")")) { while (openParanthesesFound(stack)) { output.add(stack.remove(stack.size() - 1)); } if (!stack.isEmpty()) { // temporarily fix for issue #189 stack.remove(stack.size() - 1); } } else { while (openParanthesesFound(stack) && !hasHigherPrecedence(token, stack.get(stack.size() - 1))) { output.add(stack.remove(stack.size() - 1)); } stack.add(token); } } else { output.add(token); } } while (!stack.isEmpty()) { output.add(stack.remove(stack.size() - 1)); } return output; }
@Test public void testNotEqual2() { String query = "b <> 30"; List<String> list = parser.toPrefix(query); assertEquals(Arrays.asList("b", "30", "<>"), list); }
@Override public boolean isResourceEvictable(String key, FileStatus file) { synchronized (initialAppsLock) { if (initialApps.size() > 0) { return false; } } long staleTime = System.currentTimeMillis() - TimeUnit.MINUTES.toMillis(this.stalenessMinutes); long accessTime = getAccessTime(key); if (accessTime == -1) { // check modification time long modTime = file.getModificationTime(); // if modification time is older then the store startup time, we need to // just use the store startup time as the last point of certainty long lastUse = modTime < this.startTime ? this.startTime : modTime; return lastUse < staleTime; } else { // check access time return accessTime < staleTime; } }
@Test void testEvictableWithInitialApps() throws Exception { startStoreWithApps(); assertFalse(store.isResourceEvictable("key", mock(FileStatus.class))); }
@Override public void showPreviewForKey( Keyboard.Key key, Drawable icon, View parentView, PreviewPopupTheme previewPopupTheme) { KeyPreview popup = getPopupForKey(key, parentView, previewPopupTheme); Point previewPosition = mPositionCalculator.calculatePositionForPreview( key, previewPopupTheme, getLocationInWindow(parentView)); popup.showPreviewForKey(key, icon, previewPosition); }
@Test public void testSetupPopupLayoutForKeyLabel() { KeyPreviewsManager underTest = new KeyPreviewsManager(getApplicationContext(), mPositionCalculator, 3); underTest.showPreviewForKey(mTestKeys[0], mTestKeys[0].label, mKeyboardView, mTheme); final PopupWindow window = getLatestCreatedPopupWindow(); final TextView textView = window.getContentView().findViewById(R.id.key_preview_text); Assert.assertEquals(textView.getText().toString(), mTestKeys[0].label); Assert.assertEquals(View.VISIBLE, textView.getVisibility()); final ImageView imageView = window.getContentView().findViewById(R.id.key_preview_icon); Assert.assertEquals(View.GONE, imageView.getVisibility()); }
public synchronized NumaResourceAllocation allocateNumaNodes( Container container) throws ResourceHandlerException { NumaResourceAllocation allocation = allocate(container.getContainerId(), container.getResource()); if (allocation != null) { try { // Update state store. context.getNMStateStore().storeAssignedResources(container, NUMA_RESOURCE_TYPE, Arrays.asList(allocation)); } catch (IOException e) { releaseNumaResource(container.getContainerId()); throw new ResourceHandlerException(e); } } return allocation; }
@Test public void testAllocateNumaNodeWithMultipleNodesForMemory() throws Exception { NumaResourceAllocation nodeInfo = numaResourceAllocator .allocateNumaNodes(getContainer( ContainerId.fromString("container_1481156246874_0001_01_000001"), Resource.newInstance(102400, 2))); Assert.assertEquals("0,1", String.join(",", nodeInfo.getMemNodes())); Assert.assertEquals("0", String.join(",", nodeInfo.getCpuNodes())); }
@Override public String toString() { return getClass().getSimpleName(); }
@Test public void testRoadClassInfo() { GraphHopper gh = new GraphHopper() { @Override protected File _getOSMFile() { return new File(getClass().getResource(file2).getFile()); } }.setOSMFile("dummy"). setEncodedValuesString("car_access,car_average_speed"). setProfiles(TestProfiles.accessAndSpeed("profile", "car")). setMinNetworkSize(0). setGraphHopperLocation(dir). importOrLoad(); GHResponse response = gh.route(new GHRequest(51.2492152, 9.4317166, 52.133, 9.1) .setProfile("profile") .setPathDetails(Collections.singletonList(RoadClass.KEY))); assertFalse(response.hasErrors(), response.getErrors().toString()); List<PathDetail> list = response.getBest().getPathDetails().get(RoadClass.KEY); assertEquals(3, list.size()); assertEquals("motorway", list.get(0).getValue()); response = gh.route(new GHRequest(51.2492152, 9.4317166, 52.133, 9.1) .setProfile("profile") .setPathDetails(Arrays.asList(Toll.KEY, Country.KEY))); Throwable ex = response.getErrors().get(0); assertEquals("Cannot find the path details: [toll, country]", ex.getMessage()); }
@Override public boolean registerApplication(ApplicationId appId) { Application app = applicationAdminService.getApplication(appId); if (app == null) { log.warn("Unknown application."); return false; } localAppBundleDirectory.put(appId, getBundleLocations(app)); for (String location : localAppBundleDirectory.get(appId)) { if (!localBundleAppDirectory.containsKey(location)) { localBundleAppDirectory.put(location, new HashSet<>()); } if (!localBundleAppDirectory.get(location).contains(appId)) { localBundleAppDirectory.get(location).add(appId); } } states.put(appId, new SecurityInfo(Sets.newHashSet(), INSTALLED)); return true; }
@Test public void testRegisterApplication() { states.remove(appId); assertNull(states.get(appId)); for (String location : localAppBundleDirectory.get(appId)) { if (!localBundleAppDirectory.containsKey(location)) { localBundleAppDirectory.put(location, new HashSet<>()); } if (!localBundleAppDirectory.get(location).contains(appId)) { localBundleAppDirectory.get(location).add(appId); } } states.put(appId, new SecurityInfo(Sets.newHashSet(), INSTALLED)); assertNotNull(states.get(appId)); assertEquals(INSTALLED, states.get(appId).getState()); }
@Override public void resumeAutoTrackActivity(Class<?> activity) { }
@Test public void resumeAutoTrackActivity() { mSensorsAPI.ignoreAutoTrackActivity(EmptyActivity.class); mSensorsAPI.resumeAutoTrackActivity(EmptyActivity.class); Assert.assertTrue(mSensorsAPI.isActivityAutoTrackAppClickIgnored(EmptyActivity.class)); }
@Override public Collection<? extends Backend> get() { ClassLoader classLoader = classLoaderSupplier.get(); return get(ServiceLoader.load(BackendProviderService.class, classLoader)); }
@Test void should_throw_an_exception_when_no_backend_could_be_found() { BackendServiceLoader backendSupplier = new BackendServiceLoader(classLoaderSupplier, objectFactory); Executable testMethod = () -> backendSupplier.get(emptyList()).iterator().next(); CucumberException actualThrown = assertThrows(CucumberException.class, testMethod); assertThat("Unexpected exception message", actualThrown.getMessage(), is(equalTo( "No backends were found. Please make sure you have a backend module on your CLASSPATH."))); }
public void setProfile(final Set<String> indexSetsIds, final String profileId, final boolean rotateImmediately) { checkProfile(profileId); checkAllIndicesSupportProfileChange(indexSetsIds); for (String indexSetId : indexSetsIds) { try { indexSetService.get(indexSetId).ifPresent(indexSetConfig -> { var updatedIndexSetConfig = setProfileForIndexSet(profileId, indexSetConfig); if (rotateImmediately) { updatedIndexSetConfig.ifPresent(this::cycleIndexSet); } }); } catch (Exception ex) { LOG.error("Failed to update field type in index set : " + indexSetId, ex); throw ex; } } }
@Test void testOverridesPreviousProfile() { existingIndexSet = existingIndexSet.toBuilder() .fieldTypeProfile("000000000000000000000042") .build(); doReturn(Optional.of(existingIndexSet)).when(indexSetService).get("existing_index_set"); final String profileId = "000000000000000000000007"; IndexFieldTypeProfile profile = new IndexFieldTypeProfile( profileId, "Nice profile!", "Nice profile!", new CustomFieldMappings(List.of(new CustomFieldMapping("bubamara", "ip"))) ); doReturn(Optional.of(profile)).when(profileService).get(profileId); toTest.setProfile(Set.of(existingIndexSet.id()), profileId, false); verify(mongoIndexSetService).save( existingIndexSet.toBuilder() .fieldTypeProfile(profileId) .build()); verifyNoInteractions(existingMongoIndexSet); }
@Override public void showUpWebView(WebView webView, boolean isSupportJellyBean) { }
@Test public void showUpWebView() { WebView webView = new WebView(mApplication); mSensorsAPI.showUpWebView(webView, false); }
long[] index() { return index; }
@Test void defaultIndexCapacityIsTenEntries() { final long[] emptyIndex = new long[20]; assertArrayEquals(emptyIndex, catalogIndex.index()); }
@Override public boolean removeAll(Collection<?> c) { // will throw UnsupportedOperationException; delegate anyway for testability return underlying().removeAll(c); }
@Test public void testDelegationOfUnsupportedFunctionRemoveAll() { new PCollectionsHashSetWrapperDelegationChecker<>() .defineMockConfigurationForUnsupportedFunction(mock -> mock.removeAll(eq(Collections.emptyList()))) .defineWrapperUnsupportedFunctionInvocation(wrapper -> wrapper.removeAll(Collections.emptyList())) .doUnsupportedFunctionDelegationCheck(); }
@Override public AttributedList<Path> list(final Path directory, final ListProgressListener listener) throws BackgroundException { final ThreadPool pool = ThreadPoolFactory.get("list", concurrency); try { final String prefix = this.createPrefix(directory); if(log.isDebugEnabled()) { log.debug(String.format("List with prefix %s", prefix)); } final Path bucket = containerService.getContainer(directory); final AttributedList<Path> objects = new AttributedList<>(); String priorLastKey = null; String priorLastVersionId = null; long revision = 0L; String lastKey = null; boolean hasDirectoryPlaceholder = bucket.isRoot() || containerService.isContainer(directory); do { final VersionOrDeleteMarkersChunk chunk = session.getClient().listVersionedObjectsChunked( bucket.isRoot() ? StringUtils.EMPTY : bucket.getName(), prefix, String.valueOf(Path.DELIMITER), new HostPreferences(session.getHost()).getInteger("s3.listing.chunksize"), priorLastKey, priorLastVersionId, false); // Amazon S3 returns object versions in the order in which they were stored, with the most recently stored returned first. for(BaseVersionOrDeleteMarker marker : chunk.getItems()) { final String key = URIEncoder.decode(marker.getKey()); if(new SimplePathPredicate(PathNormalizer.compose(bucket, key)).test(directory)) { if(log.isDebugEnabled()) { log.debug(String.format("Skip placeholder key %s", key)); } hasDirectoryPlaceholder = true; continue; } final PathAttributes attr = new PathAttributes(); attr.setVersionId(marker.getVersionId()); if(!StringUtils.equals(lastKey, key)) { // Reset revision for next file revision = 0L; } attr.setRevision(++revision); attr.setDuplicate(marker.isDeleteMarker() && marker.isLatest() || !marker.isLatest()); if(marker.isDeleteMarker()) { attr.setCustom(Collections.singletonMap(KEY_DELETE_MARKER, String.valueOf(true))); } attr.setModificationDate(marker.getLastModified().getTime()); attr.setRegion(bucket.attributes().getRegion()); if(marker instanceof S3Version) { final S3Version object = (S3Version) marker; attr.setSize(object.getSize()); if(StringUtils.isNotBlank(object.getEtag())) { attr.setETag(StringUtils.remove(object.getEtag(), "\"")); // The ETag will only be the MD5 of the object data when the object is stored as plaintext or encrypted // using SSE-S3. If the object is encrypted using another method (such as SSE-C or SSE-KMS) the ETag is // not the MD5 of the object data. attr.setChecksum(Checksum.parse(StringUtils.remove(object.getEtag(), "\""))); } if(StringUtils.isNotBlank(object.getStorageClass())) { attr.setStorageClass(object.getStorageClass()); } } final Path f = new Path(directory.isDirectory() ? directory : directory.getParent(), PathNormalizer.name(key), EnumSet.of(Path.Type.file), attr); if(metadata) { f.withAttributes(attributes.find(f)); } objects.add(f); lastKey = key; } final String[] prefixes = chunk.getCommonPrefixes(); final List<Future<Path>> folders = new ArrayList<>(); for(String common : prefixes) { if(new SimplePathPredicate(PathNormalizer.compose(bucket, URIEncoder.decode(common))).test(directory)) { continue; } folders.add(this.submit(pool, bucket, directory, URIEncoder.decode(common))); } for(Future<Path> f : folders) { try { objects.add(Uninterruptibles.getUninterruptibly(f)); } catch(ExecutionException e) { log.warn(String.format("Listing versioned objects failed with execution failure %s", e.getMessage())); for(Throwable cause : ExceptionUtils.getThrowableList(e)) { Throwables.throwIfInstanceOf(cause, BackgroundException.class); } throw new DefaultExceptionMappingService().map(Throwables.getRootCause(e)); } } priorLastKey = null != chunk.getNextKeyMarker() ? URIEncoder.decode(chunk.getNextKeyMarker()) : null; priorLastVersionId = chunk.getNextVersionIdMarker(); listener.chunk(directory, objects); } while(priorLastKey != null); if(!hasDirectoryPlaceholder && objects.isEmpty()) { // Only for AWS if(S3Session.isAwsHostname(session.getHost().getHostname())) { if(StringUtils.isEmpty(RequestEntityRestStorageService.findBucketInHostname(session.getHost()))) { if(log.isWarnEnabled()) { log.warn(String.format("No placeholder found for directory %s", directory)); } throw new NotfoundException(directory.getAbsolute()); } } else { // Handle missing prefix for directory placeholders in Minio final VersionOrDeleteMarkersChunk chunk = session.getClient().listVersionedObjectsChunked( bucket.isRoot() ? StringUtils.EMPTY : bucket.getName(), String.format("%s%s", this.createPrefix(directory.getParent()), directory.getName()), String.valueOf(Path.DELIMITER), 1, null, null, false); if(Arrays.stream(chunk.getCommonPrefixes()).map(URIEncoder::decode).noneMatch(common -> common.equals(prefix))) { throw new NotfoundException(directory.getAbsolute()); } } } return objects; } catch(ServiceException e) { throw new S3ExceptionMappingService().map("Listing directory {0} failed", e, directory); } finally { // Cancel future tasks pool.shutdown(false); } }
@Test public void testListPlaceholderPlusCharacter() throws Exception { final Path bucket = new Path("versioning-test-eu-central-1-cyberduck", EnumSet.of(Path.Type.directory, Path.Type.volume)); bucket.attributes().setRegion("us-east-1"); final S3AccessControlListFeature acl = new S3AccessControlListFeature(session); final Path directory = new S3DirectoryFeature(session, new S3WriteFeature(session, acl), acl).mkdir(new Path(bucket, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory)), new TransferStatus().withLength(0L)); final Path placeholder = new S3DirectoryFeature(session, new S3WriteFeature(session, acl), acl).mkdir( new Path(directory, String.format("test+%s", new AlphanumericRandomStringService().random()), EnumSet.of(Path.Type.directory)), new TransferStatus()); assertTrue(new S3VersionedObjectListService(session, acl).list(directory, new DisabledListProgressListener()).contains(placeholder)); assertTrue(new S3VersionedObjectListService(session, acl).list(placeholder, new DisabledListProgressListener()).isEmpty()); new S3DefaultDeleteFeature(session).delete(Collections.singletonList(placeholder), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
public static DataSource createDataSource(final ModeConfiguration modeConfig) throws SQLException { return createDataSource(DefaultDatabase.LOGIC_NAME, modeConfig); }
@Test void assertCreateDataSourceWithDatabaseNameAndModeConfiguration() throws SQLException { assertDataSource(ShardingSphereDataSourceFactory.createDataSource("test_db", new ModeConfiguration("Standalone", null), Collections.emptyMap(), null, null), "test_db"); }
@SuppressWarnings({ "nullness" // TODO(https://github.com/apache/beam/issues/20497) }) public static TableReference parseTableSpec(String tableSpec) { Matcher match = BigQueryIO.TABLE_SPEC.matcher(tableSpec); if (!match.matches()) { throw new IllegalArgumentException( String.format( "Table specification [%s] is not in one of the expected formats (" + " [project_id]:[dataset_id].[table_id]," + " [project_id].[dataset_id].[table_id]," + " [dataset_id].[table_id])", tableSpec)); } TableReference ref = new TableReference(); ref.setProjectId(match.group("PROJECT")); return ref.setDatasetId(match.group("DATASET")).setTableId(match.group("TABLE")); }
@Test public void testTableParsing_validPatterns() { BigQueryHelpers.parseTableSpec("a123-456:foo_bar.d"); BigQueryHelpers.parseTableSpec("a12345:b.c"); BigQueryHelpers.parseTableSpec("a1:b.c"); BigQueryHelpers.parseTableSpec("b12345.c"); }
@Override public Expression getExpression(String tableName, Alias tableAlias) { // 只有有登陆用户的情况下,才进行数据权限的处理 LoginUser loginUser = SecurityFrameworkUtils.getLoginUser(); if (loginUser == null) { return null; } // 只有管理员类型的用户,才进行数据权限的处理 if (ObjectUtil.notEqual(loginUser.getUserType(), UserTypeEnum.ADMIN.getValue())) { return null; } // 获得数据权限 DeptDataPermissionRespDTO deptDataPermission = loginUser.getContext(CONTEXT_KEY, DeptDataPermissionRespDTO.class); // 从上下文中拿不到,则调用逻辑进行获取 if (deptDataPermission == null) { deptDataPermission = permissionApi.getDeptDataPermission(loginUser.getId()); if (deptDataPermission == null) { log.error("[getExpression][LoginUser({}) 获取数据权限为 null]", JsonUtils.toJsonString(loginUser)); throw new NullPointerException(String.format("LoginUser(%d) Table(%s/%s) 未返回数据权限", loginUser.getId(), tableName, tableAlias.getName())); } // 添加到上下文中,避免重复计算 loginUser.setContext(CONTEXT_KEY, deptDataPermission); } // 情况一,如果是 ALL 可查看全部,则无需拼接条件 if (deptDataPermission.getAll()) { return null; } // 情况二,即不能查看部门,又不能查看自己,则说明 100% 无权限 if (CollUtil.isEmpty(deptDataPermission.getDeptIds()) && Boolean.FALSE.equals(deptDataPermission.getSelf())) { return new EqualsTo(null, null); // WHERE null = null,可以保证返回的数据为空 } // 情况三,拼接 Dept 和 User 的条件,最后组合 Expression deptExpression = buildDeptExpression(tableName,tableAlias, deptDataPermission.getDeptIds()); Expression userExpression = buildUserExpression(tableName, tableAlias, deptDataPermission.getSelf(), loginUser.getId()); if (deptExpression == null && userExpression == null) { // TODO 芋艿:获得不到条件的时候,暂时不抛出异常,而是不返回数据 log.warn("[getExpression][LoginUser({}) Table({}/{}) DeptDataPermission({}) 构建的条件为空]", JsonUtils.toJsonString(loginUser), tableName, tableAlias, JsonUtils.toJsonString(deptDataPermission)); // throw new NullPointerException(String.format("LoginUser(%d) Table(%s/%s) 构建的条件为空", // loginUser.getId(), tableName, tableAlias.getName())); return EXPRESSION_NULL; } if (deptExpression == null) { return userExpression; } if (userExpression == null) { return deptExpression; } // 目前,如果有指定部门 + 可查看自己,采用 OR 条件。即,WHERE (dept_id IN ? OR user_id = ?) return new Parenthesis(new OrExpression(deptExpression, userExpression)); }
@Test // 拼接 Dept 和 User 的条件(字段都不符合) public void testGetExpression_noDeptColumn_noSelfColumn() { try (MockedStatic<SecurityFrameworkUtils> securityFrameworkUtilsMock = mockStatic(SecurityFrameworkUtils.class)) { // 准备参数 String tableName = "t_user"; Alias tableAlias = new Alias("u"); // mock 方法(LoginUser) LoginUser loginUser = randomPojo(LoginUser.class, o -> o.setId(1L) .setUserType(UserTypeEnum.ADMIN.getValue())); securityFrameworkUtilsMock.when(SecurityFrameworkUtils::getLoginUser).thenReturn(loginUser); // mock 方法(DeptDataPermissionRespDTO) DeptDataPermissionRespDTO deptDataPermission = new DeptDataPermissionRespDTO() .setDeptIds(SetUtils.asSet(10L, 20L)).setSelf(true); when(permissionApi.getDeptDataPermission(same(1L))).thenReturn(deptDataPermission); // 调用 Expression expression = rule.getExpression(tableName, tableAlias); // 断言 assertSame(EXPRESSION_NULL, expression); assertSame(deptDataPermission, loginUser.getContext(DeptDataPermissionRule.CONTEXT_KEY, DeptDataPermissionRespDTO.class)); } }
public static Set<org.onosproject.security.Permission> convertToOnosPermissions(List<Permission> permissions) { Set<org.onosproject.security.Permission> result = Sets.newHashSet(); for (Permission perm : permissions) { org.onosproject.security.Permission onosPerm = getOnosPermission(perm); if (onosPerm != null) { result.add(onosPerm); } } return result; }
@Test public void testConvertToOnosPermissions() { Permission testJavaPerm = new AppPermission("testName"); List<org.onosproject.security.Permission> result = Lists.newArrayList(); org.onosproject.security.Permission onosPerm = new org.onosproject.security.Permission(AppPermission.class.getName(), testJavaPerm.getName(), ""); result.add(onosPerm); assertTrue(!result.isEmpty()); assertEquals("TESTNAME", result.get(0).getName()); }
public synchronized void removeListen(String groupKey, String connectionId) { //1. remove groupKeyContext Set<String> connectionIds = groupKeyContext.get(groupKey); if (connectionIds != null) { connectionIds.remove(connectionId); if (connectionIds.isEmpty()) { groupKeyContext.remove(groupKey); } } //2.remove connectionIdContext HashMap<String, String> groupKeys = connectionIdContext.get(connectionId); if (groupKeys != null) { groupKeys.remove(groupKey); } }
@Test void testRemoveListen() { configChangeListenContext.addListen("groupKey", "md5", "connectionId"); configChangeListenContext.removeListen("groupKey", "connectionId"); Set<String> groupKey = configChangeListenContext.getListeners("groupKey"); assertNull(groupKey); }
private static int parseInteger(final String text) { // This methods expects |text| is not null. final String textTrimmed = text.trim(); final int length = textTrimmed.length(); if ("null".equals(textTrimmed)) { throw new NullPointerException("\"" + text + "\" is considered to be null."); } if (length > 9) { final long longValue = Long.parseLong(textTrimmed); if (longValue < Integer.MIN_VALUE || longValue > Integer.MAX_VALUE) { throw new NumberFormatException("Overflow: \"" + text + "\" is out of range of int."); } return Integer.valueOf((int) longValue); } if (length == 0) { throw new NullPointerException("\"" + text + "\" is considered to be an empty string."); } return Integer.valueOf(parseIntegerInternal(textTrimmed)); }
@Test public void testParseInteger() { assertInteger("11", 11, 1234); assertInteger(" 13", 13, 1234); assertInteger(" 17 ", 17, 1234); assertInteger("111111111", 111111111, 1234); assertInteger("-11", -11, 1234); }
@Override public boolean next() throws SQLException { currentRow.clear(); if (getOrderByValuesQueue().isEmpty()) { return false; } if (isFirstNext()) { super.next(); } if (aggregateCurrentGroupByRowAndNext()) { currentGroupByValues = new GroupByValue(getCurrentQueryResult(), selectStatementContext.getGroupByContext().getItems()).getGroupValues(); } return true; }
@Test void assertNextForResultSetsAllEmpty() throws SQLException { ShardingDQLResultMerger resultMerger = new ShardingDQLResultMerger(TypedSPILoader.getService(DatabaseType.class, "MySQL")); MergedResult actual = resultMerger.merge(Arrays.asList(mockQueryResult(), mockQueryResult(), mockQueryResult()), createSelectStatementContext(), createDatabase(), mock(ConnectionContext.class)); assertFalse(actual.next()); }
protected String parsePolicyWeights(Map<SubClusterIdInfo, Float> policyWeights) { if (MapUtils.isEmpty(policyWeights)) { return null; } List<String> policyWeightList = new ArrayList<>(); for (Map.Entry<SubClusterIdInfo, Float> entry : policyWeights.entrySet()) { SubClusterIdInfo key = entry.getKey(); Float value = entry.getValue(); policyWeightList.add(key.toId() + ":" + value); } return StringUtils.join(policyWeightList, ","); }
@Test public void testParsePolicyWeights() { Map<SubClusterIdInfo, Float> policyWeights = new LinkedHashMap<>(); SubClusterIdInfo sc1 = new SubClusterIdInfo("SC-1"); policyWeights.put(sc1, 0.7f); SubClusterIdInfo sc2 = new SubClusterIdInfo("SC-2"); policyWeights.put(sc2, 0.3f); String policyWeight = interceptor.parsePolicyWeights(policyWeights); assertEquals("SC-1:0.7,SC-2:0.3", policyWeight); }
public static NamenodeRole convert(NamenodeRoleProto role) { switch (role) { case NAMENODE: return NamenodeRole.NAMENODE; case BACKUP: return NamenodeRole.BACKUP; case CHECKPOINT: return NamenodeRole.CHECKPOINT; } return null; }
@Test public void testConvertNamenodeRegistration() { StorageInfo info = getStorageInfo(NodeType.NAME_NODE); NamenodeRegistration reg = new NamenodeRegistration("address:999", "http:1000", info, NamenodeRole.NAMENODE); NamenodeRegistrationProto regProto = PBHelper.convert(reg); NamenodeRegistration reg2 = PBHelper.convert(regProto); assertEquals(reg.getAddress(), reg2.getAddress()); assertEquals(reg.getClusterID(), reg2.getClusterID()); assertEquals(reg.getCTime(), reg2.getCTime()); assertEquals(reg.getHttpAddress(), reg2.getHttpAddress()); assertEquals(reg.getLayoutVersion(), reg2.getLayoutVersion()); assertEquals(reg.getNamespaceID(), reg2.getNamespaceID()); assertEquals(reg.getRegistrationID(), reg2.getRegistrationID()); assertEquals(reg.getRole(), reg2.getRole()); assertEquals(reg.getVersion(), reg2.getVersion()); }
@Override public int partition(int total, T data) { return (int) Thread.currentThread().getId() % total; }
@Test public void testPartition() { int partitionNum = (int) Thread.currentThread().getId() % 10; ProducerThreadPartitioner<SampleData> partitioner = new ProducerThreadPartitioner<SampleData>(); assertEquals(partitioner.partition(10, new SampleData()), partitionNum); assertEquals(partitioner.partition(10, new SampleData()), partitionNum); assertEquals(partitioner.partition(10, new SampleData()), partitionNum); }
public CompletableFuture<Void> setAsync(UUID uuid, VersionedProfile versionedProfile) { return profiles.setAsync(uuid, versionedProfile) .thenCompose(ignored -> redisSetAsync(uuid, versionedProfile)); }
@Test public void testSetAsync() { final UUID uuid = UUID.randomUUID(); final byte[] name = TestRandomUtil.nextBytes(81); final VersionedProfile profile = new VersionedProfile("someversion", name, "someavatar", null, null, null, null, "somecommitment".getBytes()); when(asyncCommands.hset(eq("profiles::" + uuid), eq("someversion"), anyString())).thenReturn(MockRedisFuture.completedFuture(null)); when(profiles.setAsync(eq(uuid), eq(profile))).thenReturn(CompletableFuture.completedFuture(null)); profilesManager.setAsync(uuid, profile).join(); verify(asyncCommands, times(1)).hset(eq("profiles::" + uuid), eq("someversion"), any()); verifyNoMoreInteractions(asyncCommands); verify(profiles, times(1)).setAsync(eq(uuid), eq(profile)); verifyNoMoreInteractions(profiles); }
@Override public String getUserName() { return null; }
@Test void assertGetUserName() { assertNull(metaData.getUserName()); }
public static JWKSet load(Path path) { try (var fin = Files.newInputStream(path)) { return JWKSet.load(fin); } catch (IOException | ParseException e) { var fullPath = path.toAbsolutePath(); throw new RuntimeException( "failed to load JWKS from '%s' ('%s')".formatted(path, fullPath), e); } }
@Test void load() { var jwks = JwksUtils.load(Path.of("./src/test/resources/fixtures/jwks_utils_sample.json")); assertEquals(1, jwks.size()); assertNotNull(jwks.getKeyByKeyId("test")); }
@Override public Pod pod(String uid) { checkArgument(!Strings.isNullOrEmpty(uid), ERR_NULL_POD_UID); return kubevirtPodStore.pod(uid); }
@Test public void testGetPodByUid() { createBasicPods(); assertNotNull("Pod did not match", target.pod(POD_UID)); assertNull("Pod did not match", target.pod(UNKNOWN_UID)); }
@Override public void write(DataOutput out) throws IOException { String json = GsonUtils.GSON.toJson(this, OnlineOptimizeJobV2.class); Text.writeString(out, json); }
@Test public void testSerializeOfOptimizeJob() throws IOException { // prepare file File file = new File(TEST_FILE_NAME); file.createNewFile(); file.deleteOnExit(); DataOutputStream out = new DataOutputStream(new FileOutputStream(file)); OnlineOptimizeJobV2 optimizeJobV2 = new OnlineOptimizeJobV2(1, 1, 1, "test", 600000); Deencapsulation.setField(optimizeJobV2, "jobState", AlterJobV2.JobState.FINISHED); // write schema change job optimizeJobV2.write(out); out.flush(); out.close(); DataInputStream in = new DataInputStream(new FileInputStream(file)); OnlineOptimizeJobV2 result = (OnlineOptimizeJobV2) AlterJobV2.read(in); Assert.assertEquals(1, result.getJobId()); Assert.assertEquals(AlterJobV2.JobState.FINISHED, result.getJobState()); }
public int sleepAndExecute() { long timeout = timeout(); while (timeout > 0) { ZMQ.msleep(timeout); timeout = timeout(); } return execute(); }
@Test public void testInvokedAfterReset() { testNotInvokedAfterResetHalfTime(); // Wait until the end int rc = timers.sleepAndExecute(); assertThat(rc, is(1)); assertThat(invoked.get(), is(true)); }
public Path(URI uri) { this.path = HttpURL.Path.parse(uri.getRawPath()); }
@Test void testPath() { assertFalse(new Path(URI.create("")).matches("/a/{foo}/bar/{b}")); assertFalse(new Path(URI.create("///")).matches("/a/{foo}/bar/{b}")); assertFalse(new Path(URI.create("///foo")).matches("/a/{foo}/bar/{b}")); assertFalse(new Path(URI.create("///bar/")).matches("/a/{foo}/bar/{b}")); Path path = new Path(URI.create("/a/1/bar/fuz")); assertTrue(path.matches("/a/{foo}/bar/{b}")); assertEquals("1", path.get("foo")); assertEquals("fuz", path.get("b")); }
@Override public DescriptiveUrl toDownloadUrl(final Path file, final Sharee sharee, CreateDownloadShareRequest options, final PasswordCallback callback) throws BackgroundException { try { if(log.isDebugEnabled()) { log.debug(String.format("Create download share for %s", file)); } if(null == options) { options = new CreateDownloadShareRequest(); log.warn(String.format("Use default share options %s", options)); } final Long fileid = Long.parseLong(nodeid.getVersionId(file)); final Host bookmark = session.getHost(); if(new SDSTripleCryptEncryptorFeature(session, nodeid).isEncrypted(file)) { // get existing file key associated with the sharing user final FileKey key = new NodesApi(session.getClient()).requestUserFileKey(fileid, null, null); final EncryptedFileKey encFileKey = TripleCryptConverter.toCryptoEncryptedFileKey(key); final UserKeyPairContainer keyPairContainer = session.getKeyPairForFileKey(encFileKey.getVersion()); final UserKeyPair userKeyPair = TripleCryptConverter.toCryptoUserKeyPair(keyPairContainer); final Credentials passphrase = new TripleCryptKeyPair().unlock(callback, bookmark, userKeyPair); final PlainFileKey plainFileKey = Crypto.decryptFileKey(encFileKey, userKeyPair.getUserPrivateKey(), passphrase.getPassword().toCharArray()); // encrypt file key with a new key pair final UserKeyPair pair; if(null == options.getPassword()) { pair = Crypto.generateUserKeyPair(session.requiredKeyPairVersion(), callback.prompt( bookmark, LocaleFactory.localizedString("Passphrase", "Cryptomator"), LocaleFactory.localizedString("Provide additional login credentials", "Credentials"), new LoginOptions().icon(session.getHost().getProtocol().disk()) ).getPassword().toCharArray()); } else { pair = Crypto.generateUserKeyPair(session.requiredKeyPairVersion(), options.getPassword().toCharArray()); } final EncryptedFileKey encryptedFileKey = Crypto.encryptFileKey(plainFileKey, pair.getUserPublicKey()); options.setPassword(null); options.setKeyPair(TripleCryptConverter.toSwaggerUserKeyPairContainer(pair)); options.setFileKey(TripleCryptConverter.toSwaggerFileKey(encryptedFileKey)); } final DownloadShare share = new SharesApi(session.getClient()).createDownloadShare( options.nodeId(fileid), StringUtils.EMPTY, null); final String help; if(null == share.getExpireAt()) { help = MessageFormat.format(LocaleFactory.localizedString("{0} URL"), LocaleFactory.localizedString("Pre-Signed", "S3")); } else { final long expiry = share.getExpireAt().getMillis(); help = MessageFormat.format(LocaleFactory.localizedString("{0} URL"), LocaleFactory.localizedString("Pre-Signed", "S3")) + " (" + MessageFormat.format(LocaleFactory.localizedString("Expires {0}", "S3") + ")", UserDateFormatterFactory.get().getShortFormat(expiry * 1000) ); } final Matcher matcher = Pattern.compile(SDSSession.VERSION_REGEX).matcher(session.softwareVersion().getRestApiVersion()); if(matcher.matches()) { if(new Version(matcher.group(1)).compareTo(new Version("4.26")) < 0) { return new DescriptiveUrl(URI.create(String.format("%s://%s/#/public/shares-downloads/%s", bookmark.getProtocol().getScheme(), bookmark.getHostname(), share.getAccessKey())), DescriptiveUrl.Type.signed, help); } } return new DescriptiveUrl(URI.create(String.format("%s://%s/public/download-shares/%s", bookmark.getProtocol().getScheme(), bookmark.getHostname(), share.getAccessKey())), DescriptiveUrl.Type.signed, help); } catch(ApiException e) { throw new SDSExceptionMappingService(nodeid).map(e); } catch(CryptoException e) { throw new TripleCryptExceptionMappingService().map(e); } }
@Test(expected = InteroperabilityException.class) public void testToUrlInvalidEmail() throws Exception { final SDSNodeIdProvider nodeid = new SDSNodeIdProvider(session); final Path room = new SDSDirectoryFeature(session, nodeid).mkdir(new Path(new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory, Path.Type.volume)), new TransferStatus()); final Path test = new SDSTouchFeature(session, nodeid).touch(new Path(room, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)), new TransferStatus()); try { final DescriptiveUrl url = new SDSShareFeature(session, nodeid).toDownloadUrl(test, Share.Sharee.world, new CreateDownloadShareRequest() .expiration(new ObjectExpiration().enableExpiration(false)) .notifyCreator(false) .sendMail(true) .mailRecipients("a@b") .sendSms(false) .mailSubject(null) .mailBody(null) .maxDownloads(null), new DisabledPasswordCallback()); } finally { new SDSDeleteFeature(session, nodeid).delete(Collections.singletonList(room), new DisabledLoginCallback(), new Delete.DisabledCallback()); } }
public MethodBuilder onreturnMethod(String onreturnMethod) { this.onreturnMethod = onreturnMethod; return getThis(); }
@Test void onreturnMethod() { MethodBuilder builder = MethodBuilder.newBuilder(); builder.onreturnMethod("on-return-method"); Assertions.assertEquals("on-return-method", builder.build().getOnreturnMethod()); }
@Override public <T extends State> T state(StateNamespace namespace, StateTag<T> address) { return workItemState.get(namespace, address, StateContexts.nullContext()); }
@Test public void testMultimapRemoveThenPut() { final String tag = "multimap"; StateTag<MultimapState<byte[], Integer>> addr = StateTags.multimap(tag, ByteArrayCoder.of(), VarIntCoder.of()); MultimapState<byte[], Integer> multimapState = underTest.state(NAMESPACE, addr); final byte[] key = "key".getBytes(StandardCharsets.UTF_8); SettableFuture<Iterable<Integer>> future = SettableFuture.create(); when(mockReader.multimapFetchSingleEntryFuture( encodeWithCoder(key, ByteArrayCoder.of()), key(NAMESPACE, tag), STATE_FAMILY, VarIntCoder.of())) .thenReturn(future); ReadableState<Iterable<Integer>> result = multimapState.get(key).readLater(); waitAndSet(future, Arrays.asList(1, 2, 3), 30); multimapState.remove(dup(key)); multimapState.put(key, 4); multimapState.put(dup(key), 5); assertThat(result.read(), Matchers.containsInAnyOrder(4, 5)); }
@Override public String resolve(Method method, Object[] arguments, String spelExpression) { if (StringUtils.isEmpty(spelExpression)) { return spelExpression; } if (spelExpression.matches(PLACEHOLDER_SPEL_REGEX) && stringValueResolver != null) { return stringValueResolver.resolveStringValue(spelExpression); } if (spelExpression.matches(METHOD_SPEL_REGEX)) { SpelRootObject rootObject = new SpelRootObject(method, arguments); MethodBasedEvaluationContext evaluationContext = new MethodBasedEvaluationContext(rootObject, method, arguments, parameterNameDiscoverer); Object evaluated = expressionParser.parseExpression(spelExpression).getValue(evaluationContext); return (String) evaluated; } if (spelExpression.matches(BEAN_SPEL_REGEX)) { SpelRootObject rootObject = new SpelRootObject(method, arguments); MethodBasedEvaluationContext evaluationContext = new MethodBasedEvaluationContext(rootObject, method, arguments, parameterNameDiscoverer); evaluationContext.setBeanResolver(new BeanFactoryResolver(this.beanFactory)); Object evaluated = expressionParser.parseExpression(spelExpression).getValue(evaluationContext); return (String) evaluated; } return spelExpression; }
@Test public void testRootMethodName() throws Exception { String testExpression = "#root.methodName"; DefaultSpelResolverTest target = new DefaultSpelResolverTest(); Method testMethod = target.getClass().getMethod("testMethod", String.class); String result = sut.resolve(testMethod, new Object[]{}, testExpression); assertThat(result).isEqualTo("testMethod"); }
@Override public void configure(String encodedAuthParamString) { if (StringUtils.isBlank(encodedAuthParamString)) { throw new IllegalArgumentException("No authentication parameters were provided"); } Map<String, String> params; try { params = AuthenticationUtil.configureFromJsonString(encodedAuthParamString); } catch (IOException e) { throw new IllegalArgumentException("Malformed authentication parameters", e); } String type = params.getOrDefault(CONFIG_PARAM_TYPE, TYPE_CLIENT_CREDENTIALS); switch(type) { case TYPE_CLIENT_CREDENTIALS: this.flow = ClientCredentialsFlow.fromParameters(params); break; default: throw new IllegalArgumentException("Unsupported authentication type: " + type); } }
@Test public void testConfigure() throws Exception { Map<String, String> params = new HashMap<>(); params.put("type", "client_credentials"); params.put("privateKey", "data:base64,e30="); params.put("issuerUrl", "http://localhost"); params.put("audience", "http://localhost"); params.put("scope", "http://localhost"); ObjectMapper mapper = new ObjectMapper(); String authParams = mapper.writeValueAsString(params); this.auth.configure(authParams); assertNotNull(this.auth.flow); }
public static Object applyLogicalType(Schema.Field field, Object value) { if (field == null || field.schema() == null) { return value; } Schema fieldSchema = resolveUnionSchema(field.schema()); return applySchemaTypeLogic(fieldSchema, value); }
@Test public void testApplyLogicalTypeReturnsConvertedValueWhenConversionForLogicalTypeIsKnown() { String value = "d7738003-1472-4f63-b0f1-b5e69c8b93e9"; String schemaString = new StringBuilder().append("{").append(" \"type\": \"record\",").append(" \"name\": \"test\",") .append(" \"fields\": [{").append(" \"name\": \"column1\",").append(" \"type\": {") .append(" \"type\": \"string\",").append(" \"logicalType\": \"uuid\"").append(" }") .append(" }]").append("}").toString(); Schema schema = new Schema.Parser().parse(schemaString); Object result = AvroSchemaUtil.applyLogicalType(schema.getField("column1"), value); Assert.assertTrue(result instanceof UUID); Assert.assertEquals(UUID.fromString(value), result); }
public static State toState(@Nullable String stateName) { if (stateName == null) { return State.UNRECOGNIZED; } switch (stateName) { case "JOB_STATE_UNKNOWN": return State.UNKNOWN; case "JOB_STATE_STOPPED": return State.STOPPED; case "JOB_STATE_FAILED": return State.FAILED; case "JOB_STATE_CANCELLED": return State.CANCELLED; case "JOB_STATE_UPDATED": return State.UPDATED; case "JOB_STATE_RUNNING": case "JOB_STATE_PENDING": // Job has not yet started; closest mapping is RUNNING case "JOB_STATE_DRAINING": // Job is still active; the closest mapping is RUNNING case "JOB_STATE_CANCELLING": // Job is still active; the closest mapping is RUNNING case "JOB_STATE_RESOURCE_CLEANING_UP": // Job is still active; the closest mapping is RUNNING return State.RUNNING; case "JOB_STATE_DONE": case "JOB_STATE_DRAINED": // Job has successfully terminated; closest mapping is DONE return State.DONE; default: LOG.warn( "Unrecognized state from Dataflow service: {}." + " This is likely due to using an older version of Beam.", stateName); return State.UNRECOGNIZED; } }
@Test public void testToStateWithOtherValueReturnsUnknown() { assertEquals(State.UNRECOGNIZED, MonitoringUtil.toState("FOO_BAR_BAZ")); }
@Override public T deserialize(final String topic, final byte[] bytes) { try { if (bytes == null) { return null; } // don't use the JsonSchemaConverter to read this data because // we require that the MAPPER enables USE_BIG_DECIMAL_FOR_FLOATS, // which is not currently available in the standard converters final JsonNode value = isJsonSchema ? JsonSerdeUtils.readJsonSR(bytes, MAPPER, JsonNode.class) : MAPPER.readTree(bytes); final Object coerced = enforceFieldType( "$", new JsonValueContext(value, schema) ); if (LOG.isTraceEnabled()) { LOG.trace("Deserialized {}. topic:{}, row:{}", target, topic, coerced); } return SerdeUtils.castToTargetType(coerced, targetType); } catch (final Exception e) { // Clear location in order to avoid logging data, for security reasons if (e instanceof JsonParseException) { ((JsonParseException) e).clearLocation(); } throw new SerializationException( "Failed to deserialize " + target + " from topic: " + topic + ". " + e.getMessage(), e); } }
@Test public void shouldThrowIfCanNotCoerceMapValue() { // Given: final KsqlJsonDeserializer<Map> deserializer = givenDeserializerForSchema( SchemaBuilder .map(Schema.OPTIONAL_STRING_SCHEMA, Schema.INT32_SCHEMA) .build(), Map.class ); final byte[] bytes = serializeJson(ImmutableMap.of("a", 1, "b", true)); // When: final Exception e = assertThrows( SerializationException.class, () -> deserializer.deserialize(SOME_TOPIC, bytes) ); // Then: assertThat(e.getCause(), (hasMessage(startsWith( "Can't convert type. sourceType: BooleanNode, requiredType: INTEGER")))); }
public static Mode parse(String value) { if (StringUtils.isBlank(value)) { throw new IllegalArgumentException(ExceptionMessage.INVALID_MODE.getMessage(value)); } try { return parseNumeric(value); } catch (NumberFormatException e) { // Treat as symbolic return parseSymbolic(value); } }
@Test public void symbolicsCombined() { Mode parsed = ModeParser.parse("a=rwx"); assertEquals(Mode.Bits.ALL, parsed.getOwnerBits()); assertEquals(Mode.Bits.ALL, parsed.getGroupBits()); assertEquals(Mode.Bits.ALL, parsed.getOtherBits()); parsed = ModeParser.parse("ugo=rwx"); assertEquals(Mode.Bits.ALL, parsed.getOwnerBits()); assertEquals(Mode.Bits.ALL, parsed.getGroupBits()); assertEquals(Mode.Bits.ALL, parsed.getOtherBits()); parsed = ModeParser.parse("u=rwx,go=rx"); assertEquals(Mode.Bits.ALL, parsed.getOwnerBits()); assertEquals(Mode.Bits.READ_EXECUTE, parsed.getGroupBits()); assertEquals(Mode.Bits.READ_EXECUTE, parsed.getOtherBits()); parsed = ModeParser.parse("u=rw,go=r"); assertEquals(Mode.Bits.READ_WRITE, parsed.getOwnerBits()); assertEquals(Mode.Bits.READ, parsed.getGroupBits()); assertEquals(Mode.Bits.READ, parsed.getOtherBits()); }
public static <T extends Throwable> void checkNotEmpty(final String value, final Supplier<T> exceptionSupplierIfUnexpected) throws T { if (Strings.isNullOrEmpty(value)) { throw exceptionSupplierIfUnexpected.get(); } }
@Test void assertCheckNotEmptyWithCollectionToNotThrowException() { assertDoesNotThrow(() -> ShardingSpherePreconditions.checkNotEmpty(Collections.singleton("foo"), SQLException::new)); }
@Override public Optional<Track<T>> clean(Track<T> track) { TreeSet<Point<T>> points = new TreeSet<>(track.points()); Optional<Point<T>> firstNonNull = firstPointWithAltitude(points); if (!firstNonNull.isPresent()) { return Optional.empty(); } SortedSet<Point<T>> pointsMissingAltitude = points.headSet(firstNonNull.get()); TreeSet<Point<T>> fixedPoints = extrapolateAltitudes(pointsMissingAltitude, firstNonNull.get()); pointsMissingAltitude.clear(); points.addAll(fixedPoints); Optional<Point<T>> gapStart; Optional<Point<T>> gapEnd = firstNonNull; while (gapEnd.isPresent()) { gapStart = firstPointWithoutAltitude(points.tailSet(gapEnd.get())); if (!gapStart.isPresent()) { break; } gapEnd = firstPointWithAltitude(points.tailSet(gapStart.get())); if (!gapEnd.isPresent()) { pointsMissingAltitude = points.tailSet(gapStart.get()); fixedPoints = extrapolateAltitudes(pointsMissingAltitude, points.lower(gapStart.get())); pointsMissingAltitude.clear(); points.addAll(fixedPoints); // extrapolateAltitudes(points.tailSet(gapStart.get()), points.lower(gapStart.get())); } else { pointsMissingAltitude = points.subSet(gapStart.get(), gapEnd.get()); fixedPoints = interpolateAltitudes(pointsMissingAltitude, points.lower(gapStart.get()), gapEnd.get()); pointsMissingAltitude.clear(); points.addAll(fixedPoints); // interpolateAltitudes(points.subSet(gapStart.get(), gapEnd.get()), points.lower(gapStart.get()), gapEnd.get()); } } return Optional.of(Track.of(points)); }
@Test public void removeTracksWithNoAltitudes() { Track<NoRawData> testTrack = trackWithNoAltitudes(); Optional<Track<NoRawData>> cleanedTrack = (new FillMissingAltitudes<NoRawData>()).clean(testTrack); assertTrue(!cleanedTrack.isPresent(), "A track with no altitude data should be removed"); }
@Override public Path copy(final Path file, final Path target, final TransferStatus status, final ConnectionCallback callback, final StreamListener listener) throws BackgroundException { try { if(status.isExists()) { if(log.isWarnEnabled()) { log.warn(String.format("Delete file %s to be replaced with %s", target, file)); } new DeepboxTrashFeature(session, fileid).delete(Collections.singletonList(target), callback, new Delete.DisabledCallback()); } final NodeCopy nodeCopy = new NodeCopy(); nodeCopy.setTargetParentNodeId(fileid.getFileId(target.getParent())); final String nodeId = fileid.getFileId(file); // manually patched deepbox-api.json, return code 200 missing in theirs final Node copied = new CoreRestControllerApi(session.getClient()).copyNode(nodeCopy, nodeId); final NodeUpdate nodeUpdate = new NodeUpdate(); nodeUpdate.setName(target.getName()); new CoreRestControllerApi(session.getClient()).updateNode(nodeUpdate, copied.getNodeId()); listener.sent(status.getLength()); return target.withAttributes(new DeepboxAttributesFinderFeature(session, fileid).toAttributes(copied)); } catch(ApiException e) { throw new DeepboxExceptionMappingService(fileid).map("Cannot copy {0}", e, file); } }
@Test public void testCopyFile() throws Exception { final DeepboxIdProvider fileid = new DeepboxIdProvider(session); final Path documents = new Path("/ORG 4 - DeepBox Desktop App/ORG3:Box1/Documents/", EnumSet.of(Path.Type.directory, Path.Type.volume)); final Path test = new Path(documents, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)); new DeepboxTouchFeature(session, fileid).touch(test, new TransferStatus()); final Path copy = new Path(documents, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)); new DeepboxCopyFeature(session, fileid).copy(test, copy, new TransferStatus(), new DisabledConnectionCallback(), new DisabledStreamListener()); try { assertTrue(new DeepboxFindFeature(session, fileid).find(test.withAttributes(new PathAttributes()))); assertTrue(new DeepboxFindFeature(session, fileid).find(copy.withAttributes(new PathAttributes()))); } finally { new DeepboxDeleteFeature(session, fileid).delete(Collections.singletonList(test.withAttributes(new PathAttributes())), new DisabledLoginCallback(), new Delete.DisabledCallback()); new DeepboxDeleteFeature(session, fileid).delete(Collections.singletonList(copy.withAttributes(new PathAttributes())), new DisabledLoginCallback(), new Delete.DisabledCallback()); } }
public String getPath() { return urlAddress == null ? null : urlAddress.getPath(); }
@Test void test_Path() throws Exception { URL url = new ServiceConfigURL("dubbo", "localhost", 20880, "////path"); assertURLStrDecoder(url); assertEquals("path", url.getPath()); }
Flux<DataEntityList> export(KafkaCluster cluster) { String clusterOddrn = Oddrn.clusterOddrn(cluster); Statistics stats = statisticsCache.get(cluster); return Flux.fromIterable(stats.getTopicDescriptions().keySet()) .filter(topicFilter) .flatMap(topic -> createTopicDataEntity(cluster, topic, stats)) .onErrorContinue( (th, topic) -> log.warn("Error exporting data for topic {}, cluster {}", topic, cluster.getName(), th)) .buffer(100) .map(topicsEntities -> new DataEntityList() .dataSourceOddrn(clusterOddrn) .items(topicsEntities)); }
@Test void doesExportTopicData() { when(schemaRegistryClientMock.getSubjectVersion("testTopic-value", "latest", false)) .thenReturn(Mono.just( new SchemaSubject() .schema("\"string\"") .schemaType(SchemaType.AVRO) )); when(schemaRegistryClientMock.getSubjectVersion("testTopic-key", "latest", false)) .thenReturn(Mono.just( new SchemaSubject() .schema("\"int\"") .schemaType(SchemaType.AVRO) )); stats = Statistics.empty() .toBuilder() .topicDescriptions( Map.of( "testTopic", new TopicDescription( "testTopic", false, List.of( new TopicPartitionInfo( 0, null, List.of( new Node(1, "host1", 9092), new Node(2, "host2", 9092) ), List.of()) )) ) ) .topicConfigs( Map.of( "testTopic", List.of( new ConfigEntry( "custom.config", "100500", ConfigEntry.ConfigSource.DYNAMIC_TOPIC_CONFIG, false, false, List.of(), ConfigEntry.ConfigType.INT, null ) ) ) ) .build(); StepVerifier.create(topicsExporter.export(cluster)) .assertNext(entityList -> { assertThat(entityList.getItems()) .hasSize(1); DataEntity topicEntity = entityList.getItems().get(0); assertThat(topicEntity.getName()).isNotEmpty(); assertThat(topicEntity.getOddrn()) .isEqualTo("//kafka/cluster/localhost:19092,localhost:9092/topics/testTopic"); assertThat(topicEntity.getType()).isEqualTo(DataEntityType.KAFKA_TOPIC); assertThat(topicEntity.getMetadata()) .hasSize(1) .singleElement() .satisfies(e -> assertThat(e.getMetadata()) .containsExactlyInAnyOrderEntriesOf( Map.of( "partitions", 1, "replication_factor", 2, "custom.config", "100500"))); assertThat(topicEntity.getDataset()).isNotNull(); assertThat(topicEntity.getDataset().getFieldList()) .hasSize(4); // 2 field for key, 2 for value }) .verifyComplete(); }
void parse() throws IOException, DefParserException { root = new InnerCNode(name); normalizedDefinition = new NormalizedDefinition(); String s; List<String> originalInput = new ArrayList<>(); while ((s = reader.readLine()) != null) { originalInput.add(s); } reader.close(); // Parse and build tree of the original input parseLines(root, originalInput, normalizedDefinition); root.setMd5(normalizedDefinition.generateMd5Sum()); }
@Test void duplicate_parameter_is_illegal() { Class<?> exceptionClass = DefParser.DefParserException.class; StringBuilder sb = createDefTemplate(); String duplicateLine = "b int\n"; sb.append(duplicateLine); sb.append(duplicateLine); try { createParser(sb.toString()).parse(); fail("Didn't find expected exception of type " + exceptionClass); } catch (Exception e) { assertExceptionAndMessage(e, exceptionClass, "Error when parsing line 3: " + duplicateLine + "b is already defined"); } }