focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
public InputSplit[] getSplits(JobConf job, int numSplits) throws IOException { StopWatch sw = new StopWatch().start(); FileStatus[] stats = listStatus(job); // Save the number of input files for metrics/loadgen job.setLong(NUM_INPUT_FILES, stats.length); long totalSize = 0; // compute total size boolean ignoreDirs = !job.getBoolean(INPUT_DIR_RECURSIVE, false) && job.getBoolean(INPUT_DIR_NONRECURSIVE_IGNORE_SUBDIRS, false); List<FileStatus> files = new ArrayList<>(stats.length); for (FileStatus file: stats) { // check we have valid files if (file.isDirectory()) { if (!ignoreDirs) { throw new IOException("Not a file: "+ file.getPath()); } } else { files.add(file); totalSize += file.getLen(); } } long goalSize = totalSize / (numSplits == 0 ? 1 : numSplits); long minSize = Math.max(job.getLong(org.apache.hadoop.mapreduce.lib.input. FileInputFormat.SPLIT_MINSIZE, 1), minSplitSize); // generate splits ArrayList<FileSplit> splits = new ArrayList<FileSplit>(numSplits); NetworkTopology clusterMap = new NetworkTopology(); for (FileStatus file: files) { Path path = file.getPath(); long length = file.getLen(); if (length != 0) { FileSystem fs = path.getFileSystem(job); BlockLocation[] blkLocations; if (file instanceof LocatedFileStatus) { blkLocations = ((LocatedFileStatus) file).getBlockLocations(); } else { blkLocations = fs.getFileBlockLocations(file, 0, length); } if (isSplitable(fs, path)) { long blockSize = file.getBlockSize(); long splitSize = computeSplitSize(goalSize, minSize, blockSize); long bytesRemaining = length; while (((double) bytesRemaining)/splitSize > SPLIT_SLOP) { String[][] splitHosts = getSplitHostsAndCachedHosts(blkLocations, length-bytesRemaining, splitSize, clusterMap); splits.add(makeSplit(path, length-bytesRemaining, splitSize, splitHosts[0], splitHosts[1])); bytesRemaining -= splitSize; } if (bytesRemaining != 0) { String[][] splitHosts = getSplitHostsAndCachedHosts(blkLocations, length - bytesRemaining, bytesRemaining, clusterMap); splits.add(makeSplit(path, length - bytesRemaining, bytesRemaining, splitHosts[0], splitHosts[1])); } } else { if (LOG.isDebugEnabled()) { // Log only if the file is big enough to be splitted if (length > Math.min(file.getBlockSize(), minSize)) { LOG.debug("File is not splittable so no parallelization " + "is possible: " + file.getPath()); } } String[][] splitHosts = getSplitHostsAndCachedHosts(blkLocations,0,length,clusterMap); splits.add(makeSplit(path, 0, length, splitHosts[0], splitHosts[1])); } } else { //Create empty hosts array for zero length files splits.add(makeSplit(path, 0, length, new String[0])); } } sw.stop(); if (LOG.isDebugEnabled()) { LOG.debug("Total # of splits generated by getSplits: " + splits.size() + ", TimeTaken: " + sw.now(TimeUnit.MILLISECONDS)); } return splits.toArray(new FileSplit[splits.size()]); }
@Test public void testSplitLocationInfo() throws Exception { Configuration conf = getConfiguration(); conf.set(org.apache.hadoop.mapreduce.lib.input.FileInputFormat.INPUT_DIR, "test:///a1/a2"); JobConf job = new JobConf(conf); TextInputFormat fileInputFormat = new TextInputFormat(); fileInputFormat.configure(job); FileSplit[] splits = (FileSplit[]) fileInputFormat.getSplits(job, 1); String[] locations = splits[0].getLocations(); Assert.assertEquals(2, locations.length); SplitLocationInfo[] locationInfo = splits[0].getLocationInfo(); Assert.assertEquals(2, locationInfo.length); SplitLocationInfo localhostInfo = locations[0].equals("localhost") ? locationInfo[0] : locationInfo[1]; SplitLocationInfo otherhostInfo = locations[0].equals("otherhost") ? locationInfo[0] : locationInfo[1]; Assert.assertTrue(localhostInfo.isOnDisk()); Assert.assertTrue(localhostInfo.isInMemory()); Assert.assertTrue(otherhostInfo.isOnDisk()); Assert.assertFalse(otherhostInfo.isInMemory()); }
public <T extends BaseRequest<T, R>, R extends BaseResponse> R execute(BaseRequest<T, R> request) { return api.send(request); }
@Test public void setMyName() { retry.setEnabled(false); BaseResponse response = bot.execute(new SetMyName().name("name").languageCode("en")); if (!response.isOk()) { assertEquals(429, response.errorCode()); assertTrue(response.description().startsWith("Too Many Requests: retry after")); } retry.setEnabled(true); GetMyNameResponse nameResponse = bot.execute(new GetMyName().languageCode("en")); assertTrue(nameResponse.isOk()); assertEquals("name", nameResponse.botName().name()); }
@Override public int getGroupKeyLength() { return _groupByColumns.size(); }
@Test public void testGetGroupKeyLength() { // Run the test final int result = _groupByResultSetUnderTest.getGroupKeyLength(); // Verify the results assertEquals(1, result); }
public static Map<String, Object> compare(byte[] baselineImg, byte[] latestImg, Map<String, Object> options, Map<String, Object> defaultOptions) throws MismatchException { boolean allowScaling = toBool(defaultOptions.get("allowScaling")); ImageComparison imageComparison = new ImageComparison(baselineImg, latestImg, options, allowScaling); imageComparison.configure(defaultOptions); if (imageComparison.baselineMissing) { imageComparison.result.put("isBaselineMissing", true); throw new MismatchException("baseline image was empty or not found", imageComparison.result); } if (imageComparison.scaleMismatch) { imageComparison.result.put("isScaleMismatch", true); throw new MismatchException("latest image dimensions != baseline image dimensions", imageComparison.result); } double mismatchPercentage = 100.0; for (String engine : imageComparison.engines) { double currentMismatchPercentage; switch (engine) { case RESEMBLE: currentMismatchPercentage = imageComparison.execResemble(); break; case SSIM: currentMismatchPercentage = imageComparison.execSSIM(); break; default: logger.error("skipping unsupported image comparison engine: {}", engine); continue; } if (currentMismatchPercentage <= mismatchPercentage) { mismatchPercentage = currentMismatchPercentage; } if (mismatchPercentage < imageComparison.stopWhenMismatchIsLessThan) { break; } } return imageComparison.checkMismatch(mismatchPercentage); }
@Test void testMissingBaseline() { ImageComparison.MismatchException exception = assertThrows(ImageComparison.MismatchException.class, () -> ImageComparison.compare(null, R_1x1_IMG, opts(), opts())); assertTrue(exception.getMessage().contains("baseline image was empty or not found")); assertEquals(Boolean.TRUE, exception.data.get("isBaselineMissing")); }
public int toBaseUnitsRounded() { return (int) (toBaseUnits() + 0.5d); }
@Test public void calculateRoundedUpValueInBaseUnits() throws Exception { Quantity<Metrics> quantity = new Quantity<Metrics>(51, Metrics.cm); assertThat(quantity.toBaseUnitsRounded()).isEqualTo(1); }
@Override public Processor createPostProcessor(Exchange exchange, DynamicAwareEntry entry) { Processor postProcessor = null; if (DynamicRouterControlConstants.SHOULD_OPTIMIZE.test(entry.getUri())) { postProcessor = ex -> { Message message = exchange.getMessage(); DynamicRouterControlConstants.URI_PARAMS_TO_HEADER_NAMES.values().forEach(message::removeHeader); }; } return postProcessor; }
@Test void createPostProcessor() throws Exception { Mockito.when(exchange.getMessage()).thenReturn(message); Mockito.when(message.removeHeader(any())).thenReturn("test"); String originalUri = "dynamic-router-control:subscribe?subscriptionId=testSub1"; String uri = "dynamic-router-control://subscribe?subscriptionId=testSub1"; try (DynamicRouterControlChannelSendDynamicAware testSubject = new DynamicRouterControlChannelSendDynamicAware()) { SendDynamicAware.DynamicAwareEntry entry = testSubject.prepare(exchange, uri, originalUri); Processor postProcessor = testSubject.createPostProcessor(exchange, entry); postProcessor.process(exchange); } Mockito.verify(message, Mockito.times(URI_PARAMS_TO_HEADER_NAMES.size())).removeHeader(any()); }
private Mono<ServerResponse> search(ServerRequest request) { return Mono.fromSupplier( () -> new SearchParam(request.queryParams())) .map(param -> { var option = new SearchOption(); option.setIncludeTypes(List.of(PostHaloDocumentsProvider.POST_DOCUMENT_TYPE)); option.setKeyword(param.getKeyword()); option.setLimit(param.getLimit()); option.setHighlightPreTag(param.getHighlightPreTag()); option.setHighlightPostTag(param.getHighlightPostTag()); return option; }) .flatMap(this::performSearch) .flatMap(result -> ServerResponse.ok().bodyValue(result)); }
@Test void shouldFailWhenSearchEngineIsUnavailable() { when(searchService.search(any(SearchOption.class))) .thenReturn(Mono.error(new SearchEngineUnavailableException())); client.post().uri("/indices/-/search") .bodyValue(new SearchOption()) .exchange() .expectStatus().is4xxClientError(); }
public static Expression convert(Filter[] filters) { Expression expression = Expressions.alwaysTrue(); for (Filter filter : filters) { Expression converted = convert(filter); Preconditions.checkArgument( converted != null, "Cannot convert filter to Iceberg: %s", filter); expression = Expressions.and(expression, converted); } return expression; }
@Test public void testNestedInInsideNot() { Not filter = Not.apply(And.apply(EqualTo.apply("col1", 1), In.apply("col2", new Integer[] {1, 2}))); Expression converted = SparkFilters.convert(filter); Assert.assertNull("Expression should not be converted", converted); }
public static Expression compile(String expression, String... variableNames) throws ExpressionException { return new Expression(expression, variableNames); }
@Test public void testErrors() { // test lexer errors { ExpressionException e = assertThrows(ExpressionException.class, () -> compile("#")); assertEquals(0, e.getPosition(), "Error position"); } // test parser errors { ExpressionException e = assertThrows(ExpressionException.class, () -> compile("x")); assertEquals(0, e.getPosition(), "Error position"); } { ExpressionException e = assertThrows(ExpressionException.class, () -> compile("x()")); assertEquals(0, e.getPosition(), "Error position"); } { // verify that you must return a value ExpressionException e = assertThrows(ExpressionException.class, () -> compile("return")); assertEquals(6, e.getPosition(), "Error position"); } assertThrows(ExpressionException.class, () -> compile("(")); assertThrows(ExpressionException.class, () -> compile("x(")); // test overloader errors { ExpressionException e = assertThrows(ExpressionException.class, () -> compile("atan2(1)")); assertEquals(0, e.getPosition(), "Error position"); } { ExpressionException e = assertThrows(ExpressionException.class, () -> compile("atan2(1, 2, 3)")); assertEquals(0, e.getPosition(), "Error position"); } { ExpressionException e = assertThrows(ExpressionException.class, () -> compile("rotate(1, 2, 3)")); assertEquals(7, e.getPosition(), "Error position"); } }
public static String encode(String raw) { return new BCryptPasswordEncoder().encode(raw); }
@Test void encode() { String str = PasswordEncoderUtil.encode("nacos"); String str2 = PasswordEncoderUtil.encode("nacos"); assertNotEquals(str2, str); }
public JobConf() { checkAndWarnDeprecation(); }
@SuppressWarnings("deprecation") @Test (timeout=5000) public void testJobConf() { JobConf conf = new JobConf(); // test default value Pattern pattern = conf.getJarUnpackPattern(); assertEquals(Pattern.compile("(?:classes/|lib/).*").toString(), pattern.toString()); // default value assertFalse(conf.getKeepFailedTaskFiles()); conf.setKeepFailedTaskFiles(true); assertTrue(conf.getKeepFailedTaskFiles()); // default value assertNull(conf.getKeepTaskFilesPattern()); conf.setKeepTaskFilesPattern("123454"); assertEquals("123454", conf.getKeepTaskFilesPattern()); // default value assertNotNull(conf.getWorkingDirectory()); conf.setWorkingDirectory(new Path("test")); assertTrue(conf.getWorkingDirectory().toString().endsWith("test")); // default value assertEquals(1, conf.getNumTasksToExecutePerJvm()); // default value assertNull(conf.getKeyFieldComparatorOption()); conf.setKeyFieldComparatorOptions("keySpec"); assertEquals("keySpec", conf.getKeyFieldComparatorOption()); // default value assertFalse(conf.getUseNewReducer()); conf.setUseNewReducer(true); assertTrue(conf.getUseNewReducer()); // default assertTrue(conf.getMapSpeculativeExecution()); assertTrue(conf.getReduceSpeculativeExecution()); assertTrue(conf.getSpeculativeExecution()); conf.setReduceSpeculativeExecution(false); assertTrue(conf.getSpeculativeExecution()); conf.setMapSpeculativeExecution(false); assertFalse(conf.getSpeculativeExecution()); assertFalse(conf.getMapSpeculativeExecution()); assertFalse(conf.getReduceSpeculativeExecution()); conf.setSessionId("ses"); assertEquals("ses", conf.getSessionId()); assertEquals(3, conf.getMaxTaskFailuresPerTracker()); conf.setMaxTaskFailuresPerTracker(2); assertEquals(2, conf.getMaxTaskFailuresPerTracker()); assertEquals(0, conf.getMaxMapTaskFailuresPercent()); conf.setMaxMapTaskFailuresPercent(50); assertEquals(50, conf.getMaxMapTaskFailuresPercent()); assertEquals(0, conf.getMaxReduceTaskFailuresPercent()); conf.setMaxReduceTaskFailuresPercent(70); assertEquals(70, conf.getMaxReduceTaskFailuresPercent()); // by default assertThat(conf.getJobPriority()).isEqualTo(JobPriority.DEFAULT); conf.setJobPriority(JobPriority.HIGH); assertThat(conf.getJobPriority()).isEqualTo(JobPriority.HIGH); assertNull(conf.getJobSubmitHostName()); conf.setJobSubmitHostName("hostname"); assertEquals("hostname", conf.getJobSubmitHostName()); // default assertNull(conf.getJobSubmitHostAddress()); conf.setJobSubmitHostAddress("ww"); assertEquals("ww", conf.getJobSubmitHostAddress()); // default value assertFalse(conf.getProfileEnabled()); conf.setProfileEnabled(true); assertTrue(conf.getProfileEnabled()); // default value assertEquals(conf.getProfileTaskRange(true).toString(), "0-2"); assertEquals(conf.getProfileTaskRange(false).toString(), "0-2"); conf.setProfileTaskRange(true, "0-3"); assertEquals(conf.getProfileTaskRange(false).toString(), "0-2"); assertEquals(conf.getProfileTaskRange(true).toString(), "0-3"); // default value assertNull(conf.getMapDebugScript()); conf.setMapDebugScript("mDbgScript"); assertEquals("mDbgScript", conf.getMapDebugScript()); // default value assertNull(conf.getReduceDebugScript()); conf.setReduceDebugScript("rDbgScript"); assertEquals("rDbgScript", conf.getReduceDebugScript()); // default value assertNull(conf.getJobLocalDir()); assertEquals("default", conf.getQueueName()); conf.setQueueName("qname"); assertEquals("qname", conf.getQueueName()); conf.setMemoryForMapTask(100 * 1000); assertEquals(100 * 1000, conf.getMemoryForMapTask()); conf.setMemoryForReduceTask(1000 * 1000); assertEquals(1000 * 1000, conf.getMemoryForReduceTask()); assertEquals(-1, conf.getMaxPhysicalMemoryForTask()); assertEquals("The variable key is no longer used.", JobConf.deprecatedString("key")); // make sure mapreduce.map|reduce.java.opts are not set by default // so that they won't override mapred.child.java.opts assertNull("mapreduce.map.java.opts should not be set by default", conf.get(JobConf.MAPRED_MAP_TASK_JAVA_OPTS)); assertNull("mapreduce.reduce.java.opts should not be set by default", conf.get(JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS)); }
public static NotFoundException appNotFound(String appId) { return new NotFoundException("app not found for appId:%s", appId); }
@Test public void testAppNotFoundException() { NotFoundException exception = NotFoundException.appNotFound(appId); assertEquals(exception.getMessage(), "app not found for appId:app-1001"); }
public synchronized ImmutableList<Struct> readTableRecords(String tableId, String... columnNames) throws IllegalStateException { return readTableRecords(tableId, ImmutableList.copyOf(columnNames)); }
@Test public void testReadRecordsShouldThrowExceptionWhenCalledBeforeExecuteDdlStatement() { ImmutableList<String> columnNames = ImmutableList.of("SingerId"); assertThrows( IllegalStateException.class, () -> testManager.readTableRecords("Singers", columnNames)); assertThrows( IllegalStateException.class, () -> testManager.readTableRecords("Singers", "SingerId")); }
@Override public DropStatement withoutDeleteClause() { return new DropTable(getLocation(), getName(), getIfExists(), false); }
@Test public void shouldCopyWithoutDeleteTopic() { // Given: final DropTable table = new DropTable(SOME_NAME, true, true); // When: final DropTable result = (DropTable) table.withoutDeleteClause(); // Then: assertThat(result, is(new DropTable(SOME_NAME, true, false))); }
public String extractVersion(String rawXml) { Matcher m = p.matcher(rawXml); if (m.find()) { return m.group(1); } throw new IllegalArgumentException("Impossible to extract version from the file"); }
@Test public void extractVersionWhenXmlPrologIsPresent() { String version = instance.extractVersion("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" + "<ScenarioSimulationModel version=\"1.1\">"); assertThat(version).isEqualTo("1.1"); }
static String escapeAndJoin(List<String> parts) { return parts.stream() .map(ZetaSqlIdUtils::escapeSpecialChars) .map(ZetaSqlIdUtils::replaceWhitespaces) .map(ZetaSqlIdUtils::backtickIfNeeded) .collect(joining(".")); }
@Test public void testHandlesMixedIds() { List<String> id = Arrays.asList("aaa", "Bb---B", "zAzzz00"); assertEquals("aaa.`Bb---B`.zAzzz00", ZetaSqlIdUtils.escapeAndJoin(id)); }
@Override public Processor<K, Change<V>, KO, SubscriptionWrapper<K>> get() { return new UnbindChangeProcessor(); }
@Test public void innerJoinShouldPropagateNewPrimaryKey() { final MockInternalNewProcessorContext<String, SubscriptionWrapper<String>> context = new MockInternalNewProcessorContext<>(); innerJoinProcessor.init(context); context.setRecordMetadata("topic", 0, 0); final LeftValue leftRecordValue = new LeftValue(fk1); innerJoinProcessor.process(new Record<>(pk, new Change<>(leftRecordValue, null), 0)); assertThat(context.forwarded().size(), is(1)); assertThat( context.forwarded().get(0).record(), is(new Record<>(fk1, new SubscriptionWrapper<>(hash(leftRecordValue), PROPAGATE_ONLY_IF_FK_VAL_AVAILABLE, pk, 0), 0)) ); }
public static DispatcherRunner create( LeaderElection leaderElection, FatalErrorHandler fatalErrorHandler, DispatcherLeaderProcessFactory dispatcherLeaderProcessFactory) throws Exception { final DefaultDispatcherRunner dispatcherRunner = new DefaultDispatcherRunner( leaderElection, fatalErrorHandler, dispatcherLeaderProcessFactory); dispatcherRunner.start(); return dispatcherRunner; }
@Test public void grantLeadership_withExistingLeader_waitsForTerminationOfFirstLeader() throws Exception { final UUID firstLeaderSessionId = UUID.randomUUID(); final UUID secondLeaderSessionId = UUID.randomUUID(); final StartStopDispatcherLeaderProcess firstTestingDispatcherLeaderProcess = StartStopDispatcherLeaderProcess.create(firstLeaderSessionId); final StartStopDispatcherLeaderProcess secondTestingDispatcherLeaderProcess = StartStopDispatcherLeaderProcess.create(secondLeaderSessionId); testingDispatcherLeaderProcessFactory = TestingDispatcherLeaderProcessFactory.from( firstTestingDispatcherLeaderProcess.asTestingDispatcherLeaderProcess(), secondTestingDispatcherLeaderProcess.asTestingDispatcherLeaderProcess()); try (final DispatcherRunner dispatcherRunner = createDispatcherRunner()) { leaderElection.isLeader(firstLeaderSessionId); assertThat(firstTestingDispatcherLeaderProcess.isStarted(), is(true)); leaderElection.isLeader(secondLeaderSessionId); assertThat(secondTestingDispatcherLeaderProcess.isStarted(), is(false)); firstTestingDispatcherLeaderProcess.terminateProcess(); assertThat(secondTestingDispatcherLeaderProcess.isStarted(), is(true)); secondTestingDispatcherLeaderProcess .terminateProcess(); // make the dispatcherRunner terminate } }
@Override public void verify(X509Certificate certificate, Date date) { logger.debug("Verifying {} issued by {}", certificate.getSubjectX500Principal(), certificate.getIssuerX500Principal()); // Create trustAnchors final Set<TrustAnchor> trustAnchors = getTrusted().stream().map( c -> new TrustAnchor(c, null) ).collect(Collectors.toSet()); if (trustAnchors.isEmpty()) { throw new VerificationException("No trust anchors available"); } // Create the selector that specifies the starting certificate final X509CertSelector selector = new X509CertSelector(); selector.setCertificate(certificate); // Configure the PKIX certificate builder algorithm parameters try { final PKIXBuilderParameters pkixParams = new PKIXBuilderParameters(trustAnchors, selector); // Set assume date if (date != null) { pkixParams.setDate(date); } // Add cert store with certificate to check pkixParams.addCertStore(CertStore.getInstance( "Collection", new CollectionCertStoreParameters(ImmutableList.of(certificate)), "BC")); // Add cert store with intermediates pkixParams.addCertStore(CertStore.getInstance( "Collection", new CollectionCertStoreParameters(getIntermediates()), "BC")); // Add cert store with CRLs pkixParams.addCertStore(CertStore.getInstance( "Collection", new CollectionCertStoreParameters(getCRLs()), "BC")); // Toggle to check revocation list pkixParams.setRevocationEnabled(checkRevocation()); // Build and verify the certification chain final CertPathBuilder builder = CertPathBuilder.getInstance("PKIX", "BC"); builder.build(pkixParams); } catch (CertPathBuilderException e) { throw new VerificationException( String.format("Invalid certificate %s issued by %s", certificate.getSubjectX500Principal(), certificate.getIssuerX500Principal() ), e ); } catch (GeneralSecurityException e) { throw new CryptoException( String.format("Could not verify certificate %s issued by %s", certificate.getSubjectX500Principal(), certificate.getIssuerX500Principal() ), e ); } }
@Test public void shouldThrowExceptionIfIntermediateIsMissing() { thrown.expect(VerificationException.class); thrown.expectMessage("PPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPP"); createCertificateService( new String[] { "root.crt" }, new String[0], new String[0], false ).verify(readCert("normal.crt")); }
public static DisruptContext error(long latency) { if (latency < 0) { throw new IllegalArgumentException("Latency cannot be smaller than 0"); } return new ErrorDisruptContext(latency); }
@Test public void testError() { final long latency = 4200; DisruptContexts.ErrorDisruptContext context = (DisruptContexts.ErrorDisruptContext) DisruptContexts.error(latency); Assert.assertEquals(context.mode(), DisruptMode.ERROR); Assert.assertEquals(context.latency(), latency); }
@Override public int findConfigHistoryCountByTime(final Timestamp startTime) { HistoryConfigInfoMapper historyConfigInfoMapper = mapperManager.findMapper( dataSourceService.getDataSourceType(), TableConstant.HIS_CONFIG_INFO); MapperContext context = new MapperContext(); context.putWhereParameter(FieldConstant.START_TIME, startTime); MapperResult sqlFetchRows = historyConfigInfoMapper.findConfigHistoryCountByTime(context); Integer result = databaseOperate.queryOne(sqlFetchRows.getSql(), sqlFetchRows.getParamList().toArray(), Integer.class); if (result == null) { throw new IllegalArgumentException("findConfigHistoryCountByTime error"); } return result; }
@Test void testFindConfigHistoryCountByTime() { Timestamp timestamp = new Timestamp(System.currentTimeMillis()); //mock count Mockito.when(databaseOperate.queryOne(anyString(), eq(new Object[] {timestamp}), eq(Integer.class))).thenReturn(308); //execute & verify int count = embeddedHistoryConfigInfoPersistService.findConfigHistoryCountByTime(timestamp); assertEquals(308, count); }
private MetricDto getMetric(String key) { return requireNonNull(metricsByKeys.get(key), () -> String.format("Metric with key %s not found", key)); }
@Test public void getMetric() { Collection<MetricDto> metrics = asList(METRIC_1, METRIC_2); MeasureMatrix underTest = new MeasureMatrix(asList(PROJECT, FILE), metrics, new ArrayList<>()); assertThat(underTest.getMetricByUuid(METRIC_2.getUuid())).isSameAs(METRIC_2); }
public void expectLogMessage(int level, String tag, Matcher<String> messageMatcher) { expectLog(level, tag, messageMatcher, null); }
@Test public void testExpectedLogMessageFailureOutput() { Log.e("Mytag", "message1"); Log.e("Mytag", "message2"); // Not expected rule.expectLogMessage(Log.ERROR, "Mytag", "message1"); rule.expectLogMessage(Log.ERROR, "Mytag", "message3"); // Not logged expectedException.expect( new TypeSafeMatcher<AssertionError>() { @Override protected boolean matchesSafely(AssertionError error) { return error .getMessage() .matches( "[\\s\\S]*Expected, and observed:\\s+\\[LogItem\\{" + "\\s+timeString='.+'" + "\\s+type=6" + "\\s+tag='Mytag'" + "\\s+msg='message1'" + "\\s+throwable=null" + "\\s+}]" + "[\\s\\S]*") && error .getMessage() .matches( "[\\s\\S]*Observed, but not expected:\\s+\\[LogItem\\{" + "\\s+timeString='.+'" + "\\s+type=6" + "\\s+tag='Mytag'" + "\\s+msg='message2'" + "\\s+throwable=null" + "\\s+}][\\s\\S]*") && error .getMessage() .matches( "[\\s\\S]*Expected, but not observed: \\[ExpectedLogItem\\{timeString='.+'," + " type=6, tag='Mytag', msg='message3'}]" + "[\\s\\S]*"); } @Override public void describeTo(Description description) { description.appendText("Matches ExpectedLogMessagesRule"); } }); }
public static boolean isCaseSensitiveCustomerId(final String customerId) { return NEW_CUSTOMER_CASE_SENSISTIVE_PATTERN.matcher(customerId).matches(); }
@Test public void testCaseSensitiveNewCustomerIds() { for (String validValue : CustomerIdExamples.VALID_CASE_SENSISTIVE_NEW_CUSTOMER_IDS) { assertTrue(validValue + " is case-insensitive customer ID.", BaseSupportConfig.isCaseSensitiveCustomerId(validValue)); } }
@Override public V computeIfPresent(K key, BiFunction<? super K, ? super V, ? extends V> remappingFunction) { // will throw UnsupportedOperationException; delegate anyway for testability return underlying().computeIfPresent(key, remappingFunction); }
@Test public void testDelegationOfUnsupportedFunctionComputeIfPresent() { final BiFunction<Object, Object, Object> mockBiFunction = mock(BiFunction.class); new PCollectionsHashMapWrapperDelegationChecker<>() .defineMockConfigurationForUnsupportedFunction(mock -> mock.computeIfPresent(eq(this), eq(mockBiFunction))) .defineWrapperUnsupportedFunctionInvocation(wrapper -> wrapper.computeIfPresent(this, mockBiFunction)) .doUnsupportedFunctionDelegationCheck(); }
@Override public void fenceZombieSourceTasks(final String connName, final Callback<Void> callback, InternalRequestSignature requestSignature) { log.trace("Submitting zombie fencing request {}", connName); if (requestNotSignedProperly(requestSignature, callback)) { return; } fenceZombieSourceTasks(connName, callback); }
@Test public void testFenceZombiesInvalidSignature() { // Don't have to run the whole gamut of scenarios (invalid signature, missing signature, earlier protocol that doesn't require signatures) // since the task config tests cover that pretty well. One sanity check to ensure that this method is guarded should be sufficient. when(member.currentProtocolVersion()).thenReturn(CONNECT_PROTOCOL_V2); InternalRequestSignature signature = mock(InternalRequestSignature.class); when(signature.keyAlgorithm()).thenReturn("HmacSHA256"); when(signature.isValid(any())).thenReturn(false); SessionKey sessionKey = mock(SessionKey.class); SecretKey secretKey = mock(SecretKey.class); when(sessionKey.key()).thenReturn(secretKey); when(sessionKey.creationTimestamp()).thenReturn(time.milliseconds()); // Read a new session key from the config topic configUpdateListener.onSessionKeyUpdate(sessionKey); Callback<Void> taskConfigCb = mock(Callback.class); herder.fenceZombieSourceTasks(CONN1, taskConfigCb, signature); ArgumentCaptor<Throwable> errorCapture = ArgumentCaptor.forClass(Throwable.class); verify(taskConfigCb).onCompletion(errorCapture.capture(), isNull()); assertInstanceOf(ConnectRestException.class, errorCapture.getValue()); assertEquals(FORBIDDEN.getStatusCode(), ((ConnectRestException) errorCapture.getValue()).statusCode()); verifyNoMoreInteractions(member); }
public Command create( final ConfiguredStatement<? extends Statement> statement, final KsqlExecutionContext context) { return create(statement, context.getServiceContext(), context); }
@Test public void shouldCreateCommandForPauseQuery() { // Given: givenPause(); // When: final Command command = commandFactory.create(configuredStatement, executionContext); // Then: assertThat(command, is(Command.of(configuredStatement))); }
public <T extends BaseRequest<T, R>, R extends BaseResponse> R execute(BaseRequest<T, R> request) { return api.send(request); }
@Test public void banChatMember() { BaseResponse response = bot.execute(new BanChatMember(channelName, chatId).untilDate(123).revokeMessages(true)); assertFalse(response.isOk()); assertEquals(400, response.errorCode()); assertEquals("Bad Request: can't remove chat owner", response.description()); }
public static Matrix.SVD svd(IMatrix A, int k) { return svd(A, k, Math.min(3 * k, Math.min(A.nrow(), A.ncol())), 1E-6); }
@Test public void testSVD() { System.out.println("SVD sparse matrix"); double[][] A = { {1, 0, 0, 1, 0, 0, 0, 0, 0}, {1, 0, 1, 0, 0, 0, 0, 0, 0}, {1, 1, 0, 0, 0, 0, 0, 0, 0}, {0, 1, 1, 0, 1, 0, 0, 0, 0}, {0, 1, 1, 2, 0, 0, 0, 0, 0}, {0, 1, 0, 0, 1, 0, 0, 0, 0}, {0, 1, 0, 0, 1, 0, 0, 0, 0}, {0, 0, 1, 1, 0, 0, 0, 0, 0}, {0, 1, 0, 0, 0, 0, 0, 0, 1}, {0, 0, 0, 0, 0, 1, 1, 1, 0}, {0, 0, 0, 0, 0, 0, 1, 1, 1}, {0, 0, 0, 0, 0, 0, 0, 1, 1} }; double[] s = {3.34088, 2.5417, 2.35394, 1.64453, 1.50483, 1.30638, 0.845903, 0.560134, 0.363677}; double[][] Vt = { { 0.197393, 0.60599, 0.462918, 0.542114, 0.279469, 0.00381521, 0.0146315, 0.0241368, 0.0819574}, { 0.0559135, -0.165593, 0.127312, 0.231755, -0.106775, -0.192848, -0.437875, -0.615122, -0.529937}, {-0.11027, 0.497326, -0.207606, -0.569921, 0.50545, -0.0981842, -0.192956, -0.252904, -0.0792731}, {-0.949785, -0.0286489, 0.0416092, 0.267714, 0.150035, 0.0150815, 0.0155072, 0.010199, -0.0245549}, {-0.0456786, 0.206327, -0.378336, 0.205605, -0.327194, -0.394841, -0.349485, -0.149798, 0.601993}, {-0.0765936, -0.256475, 0.7244, -0.368861, 0.034813, -0.300161, -0.212201, 9.74342e-05, 0.362219}, {-0.177318, 0.432984, 0.23689, -0.2648, -0.672304, 0.34084, 0.152195, -0.249146, -0.0380342}, {-0.0143933, 0.0493053, 0.0088255, -0.0194669, -0.0583496, 0.454477, -0.761527, 0.449643, -0.0696375}, {-0.0636923, 0.242783, 0.0240769, -0.0842069, -0.262376, -0.619847, 0.0179752, 0.51989, -0.453507} }; double[][] Ut = { { 0.221351, 0.197645, 0.24047, 0.403599, 0.644481, 0.265037, 0.265037, 0.300828, 0.205918, 0.0127462, 0.0361358, 0.0317563}, { 0.11318, 0.0720878, -0.043152, -0.0570703, 0.167301, -0.10716, -0.10716, 0.14127, -0.273647, -0.490162, -0.622785, -0.450509}, {-0.288958, -0.13504, 0.164429, 0.337804, -0.361148, 0.425998, 0.425998, -0.330308, 0.177597, -0.23112, -0.223086, -0.141115}, {-0.414751, -0.55224, -0.594962, 0.0991137, 0.333462, 0.0738122, 0.0738122, 0.188092, -0.0323519, 0.024802, 0.000700072, -0.00872947}, { 0.106275, -0.281769, 0.106755, -0.331734, 0.158955, -0.0803194, -0.0803194, -0.114785, 0.53715, -0.59417, 0.0682529, 0.300495}, {-0.340983, 0.495878, -0.254955, 0.384832, -0.206523, -0.169676, -0.169676, 0.272155, 0.080944, -0.392125, 0.114909, 0.277343}, {-0.522658, 0.0704234, 0.30224, -0.00287218, 0.165829, -0.282916, -0.282916, -0.0329941, 0.466898, 0.288317, -0.159575, -0.339495}, {-0.0604501, -0.00994004, 0.062328, -0.000390504, 0.034272, -0.0161465, -0.0161465, -0.018998, -0.0362988, 0.254568, -0.681125, 0.6784180}, {-0.406678, -0.10893, 0.492444, 0.0123293, 0.270696, -0.0538747, -0.0538747, -0.165339, -0.579426, -0.225424, 0.231961, 0.182535} }; int m = A.length; int n = A[0].length; int k = 3; // m > n SparseMatrix a = new SparseMatrix(A, 1E-8); Matrix.SVD svd = ARPACK.svd(a, k); for (int i = 0; i < k; i++) { assertEquals(s[i], svd.s[i], 1E-5); } assertEquals(m, svd.U.nrow()); assertEquals(k, svd.U.ncol()); for (int j = 0; j < k; j++) { for (int i = 0; i < m; i++) { assertEquals(Math.abs(Ut[j][i]), Math.abs(svd.U.get(i, j)), 1E-6); } } assertEquals(n, svd.V.nrow()); assertEquals(k, svd.V.ncol()); for (int j = 0; j < k; j++) { for (int i = 0; i < n; i++) { assertEquals(Math.abs(Vt[j][i]), Math.abs(svd.V.get(i, j)), 1E-6); } } // m < n svd = ARPACK.svd(a.transpose(), k); for (int i = 0; i < k; i++) { assertEquals(s[i], svd.s[i], 1E-5); } assertEquals(n, svd.U.nrow()); assertEquals(k, svd.U.ncol()); System.out.println(svd.U); System.out.println(svd.V); for (int j = 0; j < k; j++) { for (int i = 0; i < n; i++) { assertEquals(Math.abs(Vt[j][i]), Math.abs(svd.U.get(i, j)), 1E-6); } } assertEquals(m, svd.V.nrow()); assertEquals(k, svd.V.ncol()); for (int j = 0; j < k; j++) { for (int i = 0; i < m; i++) { assertEquals(Math.abs(Ut[j][i]), Math.abs(svd.V.get(i, j)), 1E-6); } } }
public CoercedExpressionResult coerce() { final Class<?> leftClass = left.getRawClass(); final Class<?> nonPrimitiveLeftClass = toNonPrimitiveType(leftClass); final Class<?> rightClass = right.getRawClass(); final Class<?> nonPrimitiveRightClass = toNonPrimitiveType(rightClass); boolean sameClass = leftClass == rightClass; boolean isUnificationExpression = left instanceof UnificationTypedExpression || right instanceof UnificationTypedExpression; if (sameClass || isUnificationExpression) { return new CoercedExpressionResult(left, right); } if (!canCoerce()) { throw new CoercedExpressionException(new InvalidExpressionErrorResult("Comparison operation requires compatible types. Found " + leftClass + " and " + rightClass)); } if ((nonPrimitiveLeftClass == Integer.class || nonPrimitiveLeftClass == Long.class) && nonPrimitiveRightClass == Double.class) { CastExpr castExpression = new CastExpr(PrimitiveType.doubleType(), this.left.getExpression()); return new CoercedExpressionResult( new TypedExpression(castExpression, double.class, left.getType()), right, false); } final boolean leftIsPrimitive = leftClass.isPrimitive() || Number.class.isAssignableFrom( leftClass ); final boolean canCoerceLiteralNumberExpr = canCoerceLiteralNumberExpr(leftClass); boolean rightAsStaticField = false; final Expression rightExpression = right.getExpression(); final TypedExpression coercedRight; if (leftIsPrimitive && canCoerceLiteralNumberExpr && rightExpression instanceof LiteralStringValueExpr) { final Expression coercedLiteralNumberExprToType = coerceLiteralNumberExprToType((LiteralStringValueExpr) right.getExpression(), leftClass); coercedRight = right.cloneWithNewExpression(coercedLiteralNumberExprToType); coercedRight.setType( leftClass ); } else if (shouldCoerceBToString(left, right)) { coercedRight = coerceToString(right); } else if (isNotBinaryExpression(right) && canBeNarrowed(leftClass, rightClass) && right.isNumberLiteral()) { coercedRight = castToClass(leftClass); } else if (leftClass == long.class && rightClass == int.class) { coercedRight = right.cloneWithNewExpression(new CastExpr(PrimitiveType.longType(), right.getExpression())); } else if (leftClass == Date.class && rightClass == String.class) { coercedRight = coerceToDate(right); rightAsStaticField = true; } else if (leftClass == LocalDate.class && rightClass == String.class) { coercedRight = coerceToLocalDate(right); rightAsStaticField = true; } else if (leftClass == LocalDateTime.class && rightClass == String.class) { coercedRight = coerceToLocalDateTime(right); rightAsStaticField = true; } else if (shouldCoerceBToMap()) { coercedRight = castToClass(toNonPrimitiveType(leftClass)); } else if (isBoolean(leftClass) && !isBoolean(rightClass)) { coercedRight = coerceBoolean(right); } else { coercedRight = right; } final TypedExpression coercedLeft; if (nonPrimitiveLeftClass == Character.class && shouldCoerceBToString(right, left)) { coercedLeft = coerceToString(left); } else { coercedLeft = left; } return new CoercedExpressionResult(coercedLeft, coercedRight, rightAsStaticField); }
@Test public void testNameExprToString() { final TypedExpression left = expr(THIS_PLACEHOLDER + ".getName", String.class); final TypedExpression right = expr("$maxName", Comparable.class); final CoercedExpression.CoercedExpressionResult coerce = new CoercedExpression(left, right, true).coerce(); assertThat(coerce.getCoercedRight()).isEqualTo(expr("(java.lang.String) $maxName", String.class)); }
public static List<Element> getDirectChildren(Element parent, String namespace, String tag) { List<Element> directChildren = new ArrayList<>(); NodeList children = parent.getChildNodes(); for (int i = 0; i < children.getLength(); i++) { Node child = children.item(i); if (child.getNodeType() == Node.ELEMENT_NODE && namespace.equals(child.getNamespaceURI()) && tag.equals(child.getLocalName())) { directChildren.add((Element) child); } } return directChildren; }
@Test void getDirectChildren() { List<Element> children = XmlUtil.getDirectChildren(parent, "http://example.com", "child"); assertEquals(2, children.size()); assertEquals("child", children.get(0).getLocalName()); assertEquals("child", children.get(1).getLocalName()); }
public static ProjectPath projectPathFromId(String projectId) { return new ProjectPath(String.format("projects/%s", projectId)); }
@Test public void projectPathFromIdWellFormed() { ProjectPath path = PubsubClient.projectPathFromId("test"); assertEquals("projects/test", path.getPath()); }
@Override protected void decode(ChannelHandlerContext ctx, ByteBuf in, List<Object> out) throws Exception { if (in.readableBytes() < 1) { return; } // read one byte to guess protocol final int magic = in.getByte(in.readerIndex()); ChannelPipeline p = ctx.pipeline(); p.addLast(new ForeignHostPermitHandler(qosConfiguration)); if (isHttp(magic)) { // no welcome output for http protocol if (welcomeFuture != null && welcomeFuture.isCancellable()) { welcomeFuture.cancel(false); } p.addLast(new HttpServerCodec()); p.addLast(new HttpObjectAggregator(1048576)); p.addLast(new HttpProcessHandler(frameworkModel, qosConfiguration)); p.remove(this); } else { p.addLast(new LineBasedFrameDecoder(2048)); p.addLast(new StringDecoder(CharsetUtil.UTF_8)); p.addLast(new StringEncoder(CharsetUtil.UTF_8)); p.addLast(new IdleStateHandler(0, 0, 5 * 60)); p.addLast(new TelnetIdleEventHandler()); p.addLast(new TelnetProcessHandler(frameworkModel, qosConfiguration)); p.remove(this); } }
@Test void testDecodeHttp() throws Exception { ByteBuf buf = Unpooled.wrappedBuffer(new byte[] {'G'}); ChannelHandlerContext context = Mockito.mock(ChannelHandlerContext.class); ChannelPipeline pipeline = Mockito.mock(ChannelPipeline.class); Mockito.when(context.pipeline()).thenReturn(pipeline); QosProcessHandler handler = new QosProcessHandler( FrameworkModel.defaultModel(), QosConfiguration.builder() .welcome("welcome") .acceptForeignIp(false) .acceptForeignIpWhitelist(StringUtils.EMPTY_STRING) .build()); handler.decode(context, buf, Collections.emptyList()); verify(pipeline).addLast(any(HttpServerCodec.class)); verify(pipeline).addLast(any(HttpObjectAggregator.class)); verify(pipeline).addLast(any(HttpProcessHandler.class)); verify(pipeline).remove(handler); }
@NonNull public static Permutor<FeedItem> getPermutor(@NonNull SortOrder sortOrder) { Comparator<FeedItem> comparator = null; Permutor<FeedItem> permutor = null; switch (sortOrder) { case EPISODE_TITLE_A_Z: comparator = (f1, f2) -> itemTitle(f1).compareTo(itemTitle(f2)); break; case EPISODE_TITLE_Z_A: comparator = (f1, f2) -> itemTitle(f2).compareTo(itemTitle(f1)); break; case DATE_OLD_NEW: comparator = (f1, f2) -> pubDate(f1).compareTo(pubDate(f2)); break; case DATE_NEW_OLD: comparator = (f1, f2) -> pubDate(f2).compareTo(pubDate(f1)); break; case DURATION_SHORT_LONG: comparator = (f1, f2) -> Integer.compare(duration(f1), duration(f2)); break; case DURATION_LONG_SHORT: comparator = (f1, f2) -> Integer.compare(duration(f2), duration(f1)); break; case EPISODE_FILENAME_A_Z: comparator = (f1, f2) -> itemLink(f1).compareTo(itemLink(f2)); break; case EPISODE_FILENAME_Z_A: comparator = (f1, f2) -> itemLink(f2).compareTo(itemLink(f1)); break; case FEED_TITLE_A_Z: comparator = (f1, f2) -> feedTitle(f1).compareTo(feedTitle(f2)); break; case FEED_TITLE_Z_A: comparator = (f1, f2) -> feedTitle(f2).compareTo(feedTitle(f1)); break; case RANDOM: permutor = Collections::shuffle; break; case SMART_SHUFFLE_OLD_NEW: permutor = (queue) -> smartShuffle(queue, true); break; case SMART_SHUFFLE_NEW_OLD: permutor = (queue) -> smartShuffle(queue, false); break; case SIZE_SMALL_LARGE: comparator = (f1, f2) -> Long.compare(size(f1), size(f2)); break; case SIZE_LARGE_SMALL: comparator = (f1, f2) -> Long.compare(size(f2), size(f1)); break; case COMPLETION_DATE_NEW_OLD: comparator = (f1, f2) -> f2.getMedia().getPlaybackCompletionDate() .compareTo(f1.getMedia().getPlaybackCompletionDate()); break; default: throw new IllegalArgumentException("Permutor not implemented"); } if (comparator != null) { final Comparator<FeedItem> comparator2 = comparator; permutor = (queue) -> Collections.sort(queue, comparator2); } return permutor; }
@Test public void testPermutorForRule_FEED_TITLE_DESC_NullTitle() { Permutor<FeedItem> permutor = FeedItemPermutors.getPermutor(SortOrder.FEED_TITLE_Z_A); List<FeedItem> itemList = getTestList(); itemList.get(1) // itemId 3 .getFeed().setTitle(null); assertTrue(checkIdOrder(itemList, 1, 3, 2)); // before sorting permutor.reorder(itemList); assertTrue(checkIdOrder(itemList, 2, 1, 3)); // after sorting }
@Override public void checkSubjectAccess( final KsqlSecurityContext securityContext, final String subjectName, final AclOperation operation ) { checkAccess(new CacheKey(securityContext, AuthObjectType.SUBJECT, subjectName, operation)); }
@Test public void shouldCheckBackendValidatorOnFirstSubjectAccessRequest() { // When cache.checkSubjectAccess(securityContext, SUBJECT_1, AclOperation.READ); // Then verify(backendValidator, times(1)) .checkSubjectAccess(securityContext, SUBJECT_1, AclOperation.READ); verifyNoMoreInteractions(backendValidator); }
@Override public List<ColumnarFeature> process(String value) { try { LocalDate date = LocalDate.parse(value, formatter); List<ColumnarFeature> features = new ArrayList<>(featureTypes.size()); for (DateFeatureType f : featureTypes) { int featureValue = f.extract(date); ColumnarFeature feature = new ColumnarFeature(fieldName,f.toString(),featureValue); features.add(feature); } return features; } catch (DateTimeParseException e) { logger.log(Level.WARNING, e.getParsedString()); logger.log(Level.WARNING, String.format("Unable to parse date %s with formatter %s", value, formatter.toString())); return Collections.emptyList(); } }
@Test public void testInvalidBehaviour() { String notADateFormatString = "not-a-date-format-string"; try { DateFieldProcessor proc = new DateFieldProcessor("test", EnumSet.of(DateFieldProcessor.DateFeatureType.DAY), notADateFormatString); fail("Should have thrown on failing to parse the date format string"); } catch (PropertyException e) { // pass } String isoFormat = "uuuu-MM-dd"; DateFieldProcessor proc = new DateFieldProcessor("test", EnumSet.of(DateFieldProcessor.DateFeatureType.DAY), isoFormat); List<ColumnarFeature> extractedFeatures = proc.process("definitely-not-a-date"); assertTrue(extractedFeatures.isEmpty()); }
@Private public void scheduleAllReduces() { for (ContainerRequest req : pendingReduces) { scheduledRequests.addReduce(req); } pendingReduces.clear(); }
@Test(timeout = 30000) public void testExcessReduceContainerAssign() throws Exception { final Configuration conf = new Configuration(); conf.setFloat(MRJobConfig.COMPLETED_MAPS_FOR_REDUCE_SLOWSTART, 0.0f); final MyResourceManager2 rm = new MyResourceManager2(conf); rm.start(); final RMApp app = MockRMAppSubmitter.submitWithMemory(2048, rm); rm.drainEvents(); final String host = "host1"; final MockNM nm = rm.registerNode(String.format("%s:1234", host), 4096); nm.nodeHeartbeat(true); rm.drainEvents(); final ApplicationAttemptId appAttemptId = app.getCurrentAppAttempt() .getAppAttemptId(); rm.sendAMLaunched(appAttemptId); rm.drainEvents(); final JobId jobId = MRBuilderUtils .newJobId(appAttemptId.getApplicationId(), 0); final Job mockJob = mock(Job.class); when(mockJob.getReport()).thenReturn( MRBuilderUtils.newJobReport(jobId, "job", "user", JobState.RUNNING, 0, 0, 0, 0, 0, 0, 0, "jobfile", null, false, "")); final MyContainerAllocator allocator = new MyContainerAllocator(rm, conf, appAttemptId, mockJob, SystemClock.getInstance()); // request to allocate two reduce priority containers final String[] locations = new String[] {host}; allocator.sendRequest(createRequest(jobId, 0, Resource.newInstance(1024, 1), locations, false, true)); allocator.scheduleAllReduces(); allocator.makeRemoteRequest(); nm.nodeHeartbeat(true); rm.drainEvents(); allocator.sendRequest(createRequest(jobId, 1, Resource.newInstance(1024, 1), locations, false, false)); int assignedContainer; for (assignedContainer = 0; assignedContainer < 1;) { assignedContainer += allocator.schedule().size(); nm.nodeHeartbeat(true); rm.drainEvents(); } // only 1 allocated container should be assigned assertThat(assignedContainer).isEqualTo(1); }
void setAssociationPolicy(LeafNodeAssociationPolicy associationPolicy) { this.associationPolicy = associationPolicy; }
@Test public void testSerialization() throws Exception { // Setup fixture. final DefaultNodeConfiguration config = new DefaultNodeConfiguration(false); config.setDeliverPayloads( !config.isDeliverPayloads() ); // invert all defaults to improve test coverage. config.setMaxPayloadSize( 98732 ); config.setPersistPublishedItems( !config.isPersistPublishedItems() ); config.setMaxPublishedItems( 13461 ); config.setNotifyConfigChanges( !config.isNotifyConfigChanges()); config.setNotifyDelete( !config.isNotifyDelete() ); config.setNotifyRetract( !config.isNotifyRetract() ); config.setPresenceBasedDelivery( !config.isPresenceBasedDelivery() ); config.setSendItemSubscribe( !config.isSendItemSubscribe() ); config.setPublisherModel(PublisherModel.subscribers ); config.setSubscriptionEnabled( !config.isSubscriptionEnabled() ); config.setAccessModel(AccessModel.whitelist ); config.setLanguage( "nl_NL" ); config.setReplyPolicy(Node.ItemReplyPolicy.publisher ); config.setAssociationPolicy(CollectionNode.LeafNodeAssociationPolicy.whitelist ); final CollectionNode input = new CollectionNode( new PubSubService.UniqueIdentifier( "test-service-id" ), null, "test-node-id", new JID( "unit-test@example.org"), config); // Execute system under test. final Object result = serializeAndDeserialize( input ); // Verify result. assertNotNull( result ); assertTrue( result instanceof CollectionNode ); assertEquals( input, result ); }
public static JsonToRowWithErrFn withExceptionReporting(Schema rowSchema) { return JsonToRowWithErrFn.forSchema(rowSchema); }
@Test @Category(NeedsRunner.class) public void testParsesRowsDeadLetterNoErrors() throws Exception { PCollection<String> jsonPersons = pipeline.apply("jsonPersons", Create.of(JSON_PERSON)); ParseResult results = jsonPersons.apply(JsonToRow.withExceptionReporting(PERSON_SCHEMA)); PCollection<Row> personRows = results.getResults(); PCollection<Row> errors = results.getFailedToParseLines(); PAssert.that(personRows).containsInAnyOrder(PERSON_ROWS); PAssert.that(errors).empty(); pipeline.run(); }
public URI baseUri() { return server.configuration().baseUri(); }
@Test void run_callback() { var baseUri = application.baseUri(); var sessionID = UUID.randomUUID().toString(); var response = given() .log() .all() .cookie("session_id", sessionID) .formParam("code", "code") .when() .get(baseUri.resolve(CALLBACK_PATH)) .then() .contentType(ContentType.HTML) .statusCode(400) .extract() .response(); var responseBody = response.getBody().asString(); assertTrue(responseBody.contains("de-DE")); assertTrue( responseBody.contains( "Oops, Sitzung unbekannt oder abgelaufen. Bitte starten Sie erneut.")); }
public static String encodeHexString(byte[] bytes) { int l = bytes.length; char[] out = new char[l << 1]; for (int i = 0, j = 0; i < l; i++) { out[j++] = DIGITS_LOWER[(0xF0 & bytes[i]) >>> 4]; out[j++] = DIGITS_LOWER[0x0F & bytes[i]]; } return new String(out); }
@Test void testEncodeHexString() { assertEquals("", MD5Utils.encodeHexString(new byte[0])); assertEquals("010203", MD5Utils.encodeHexString(new byte[] {1, 2, 3})); }
@Nonnull @Override public Optional<? extends INode> parse( @Nullable final String str, @Nonnull DetectionLocation detectionLocation) { if (str == null) { return Optional.empty(); } for (IMapper mapper : jcaSpecificAlgorithmMappers) { Optional<? extends INode> asset = mapper.parse(str, detectionLocation); if (asset.isPresent()) { return asset; } } return switch (str.toUpperCase().trim()) { case "PBE", "PBES2" -> Optional.of(new PasswordBasedEncryption(detectionLocation)); case "DH", "DIFFIEHELLMAN" -> Optional.of(new DH(detectionLocation)); case "RSA" -> Optional.of(new RSA(detectionLocation)); case "EC" -> Optional.of(new Algorithm(str, PublicKeyEncryption.class, detectionLocation)); default -> { final Algorithm algorithm = new Algorithm(str, Unknown.class, detectionLocation); algorithm.put(new Unknown(detectionLocation)); yield Optional.of(algorithm); } }; }
@Test void aeCipher() { DetectionLocation testDetectionLocation = new DetectionLocation("testfile", 1, 1, List.of("test"), () -> "SSL"); JcaAlgorithmMapper jcaAlgorithmMapper = new JcaAlgorithmMapper(); Optional<? extends INode> assetOptional = jcaAlgorithmMapper.parse("AES/GCM/NoPadding", testDetectionLocation); assertThat(assetOptional).isPresent(); assertThat(assetOptional.get().is(AuthenticatedEncryption.class)).isTrue(); }
@Override public Path touch(final Path file, final TransferStatus status) throws BackgroundException { if(file.isFile()) { try { Files.createFile(session.toPath(file)); } catch(FileAlreadyExistsException e) { // } catch(IOException e) { throw new LocalExceptionMappingService().map("Cannot create {0}", e, file); } } return file; }
@Test public void testTouch() throws Exception { final LocalSession session = new LocalSession(new Host(new LocalProtocol(), new LocalProtocol().getDefaultHostname())); assertNotNull(session.open(new DisabledProxyFinder(), new DisabledHostKeyCallback(), new DisabledLoginCallback(), new DisabledCancelCallback())); assertTrue(session.isConnected()); assertNotNull(session.getClient()); session.login(new DisabledLoginCallback(), new DisabledCancelCallback()); final Path workdir = new LocalHomeFinderFeature().find(); final Path test = new Path(workdir, UUID.randomUUID().toString(), EnumSet.of(Path.Type.file)); new LocalTouchFeature(session).touch(test, new TransferStatus()); // Test override new LocalTouchFeature(session).touch(test, new TransferStatus()); assertTrue(new LocalFindFeature(session).find(test)); final AttributedList<Path> list = new LocalListService(session).list(workdir, new DisabledListProgressListener()); assertTrue(list.contains(test)); new LocalDeleteFeature(session).delete(Collections.<Path>singletonList(test), new DisabledLoginCallback(), new Delete.DisabledCallback()); session.close(); }
public void test() { User user = new User(); user.setName("yupi"); User resultUser = userService.getUser(user); System.out.println(resultUser.getName()); }
@Test void test1() { exampleService.test(); }
public void openFile() { openFile( false ); }
@Test public void testLoadLastUsedTransLocalWithRepository() throws Exception { String repositoryName = "repositoryName"; String fileName = "fileName"; setLoadLastUsedJobLocalWithRepository( false, repositoryName, null, fileName, true ); verify( spoon ).openFile( fileName, null, true ); }
public void stopRunning( StepMetaInterface smi, StepDataInterface sdi ) throws KettleException { if ( this.isStopped() || sdi.isDisposed() ) { return; } final DatabaseJoinData data = (DatabaseJoinData) sdi; dbLock.lock(); try { if ( data.db != null && data.db.getConnection() != null && !data.isCanceled ) { data.db.cancelStatement( data.pstmt ); setStopped( true ); data.isCanceled = true; } } finally { dbLock.unlock(); } }
@Test public void testStopRunningWhenStepIsNotStoppedNorStepDataInterfaceIsDisposedAndDatabaseConnectionIsNotValid() throws KettleException { doReturn( false ).when( mockDatabaseJoin ).isStopped(); doReturn( false ).when( mockStepDataInterface ).isDisposed(); when( mockStepDataInterface.db.getConnection() ).thenReturn( null ); mockDatabaseJoin.stopRunning( mockStepMetaInterface, mockStepDataInterface ); verify( mockDatabaseJoin, times( 1 ) ).isStopped(); verify( mockStepDataInterface, times( 1 ) ).isDisposed(); verify( mockStepDataInterface.db, times( 1 ) ).getConnection(); verify( mockStepDataInterface.db, times( 0 ) ).cancelStatement( any( PreparedStatement.class ) ); assertFalse( mockStepDataInterface.isCanceled ); }
static int readHeapBuffer(InputStream f, ByteBuffer buf) throws IOException { int bytesRead = f.read(buf.array(), buf.arrayOffset() + buf.position(), buf.remaining()); if (bytesRead < 0) { // if this resulted in EOF, don't update position return bytesRead; } else { buf.position(buf.position() + bytesRead); return bytesRead; } }
@Test public void testHeapPositionAndLimit() throws Exception { ByteBuffer readBuffer = ByteBuffer.allocate(20); readBuffer.position(5); readBuffer.limit(13); readBuffer.mark(); MockInputStream stream = new MockInputStream(7); int len = DelegatingSeekableInputStream.readHeapBuffer(stream, readBuffer); Assert.assertEquals(7, len); Assert.assertEquals(12, readBuffer.position()); Assert.assertEquals(13, readBuffer.limit()); len = DelegatingSeekableInputStream.readHeapBuffer(stream, readBuffer); Assert.assertEquals(1, len); Assert.assertEquals(13, readBuffer.position()); Assert.assertEquals(13, readBuffer.limit()); len = DelegatingSeekableInputStream.readHeapBuffer(stream, readBuffer); Assert.assertEquals(0, len); readBuffer.reset(); Assert.assertEquals("Buffer contents should match", ByteBuffer.wrap(TEST_ARRAY, 0, 8), readBuffer); }
@Override public void deleteGroup(Long id) { // 校验存在 validateGroupExists(id); // 校验分组下是否有用户 validateGroupHasUser(id); // 删除 memberGroupMapper.deleteById(id); }
@Test public void testDeleteGroup_success() { // mock 数据 MemberGroupDO dbGroup = randomPojo(MemberGroupDO.class); groupMapper.insert(dbGroup);// @Sql: 先插入出一条存在的数据 // 准备参数 Long id = dbGroup.getId(); // 调用 groupService.deleteGroup(id); // 校验数据不存在了 assertNull(groupMapper.selectById(id)); }
@Override public void create(final String path, final byte[] data, final List<ACL> acl, final CreateMode createMode, final AsyncCallback.StringCallback cb, final Object ctx) { final RetryCallback callback = new RetryCallback() { @Override protected void retry() { if (!createMode.isSequential()) { // it's always safe to retry create for non-sequential names _log.info("Retry create operation: path = " + path + " data length " + getDataLength(data)); zkCreate(path, data, acl, createMode, this, ctx); } else { _log.error("Connection lost during create operation of sequential node. " + "Consider using createUniqueSequential() instead"); } } @Override protected void processStringResult(int cbRC, String cbPath, Object cbCtx, String cbName) { cb.processResult(cbRC, cbPath, cbCtx, cbName); } }; zkCreate(path, data, acl, createMode, callback, ctx); }
@Test public void testCreate() throws NoSuchMethodException { testCreateHelper(_dummyData); }
@Override public void updateMailSendResult(Long logId, String messageId, Exception exception) { // 1. 成功 if (exception == null) { mailLogMapper.updateById(new MailLogDO().setId(logId).setSendTime(LocalDateTime.now()) .setSendStatus(MailSendStatusEnum.SUCCESS.getStatus()).setSendMessageId(messageId)); return; } // 2. 失败 mailLogMapper.updateById(new MailLogDO().setId(logId).setSendTime(LocalDateTime.now()) .setSendStatus(MailSendStatusEnum.FAILURE.getStatus()).setSendException(getRootCauseMessage(exception))); }
@Test public void testUpdateMailSendResult_exception() { // mock 数据 MailLogDO log = randomPojo(MailLogDO.class, o -> { o.setSendStatus(MailSendStatusEnum.INIT.getStatus()); o.setSendTime(null).setSendMessageId(null).setSendException(null) .setTemplateParams(randomTemplateParams()); }); mailLogMapper.insert(log); // 准备参数 Long logId = log.getId(); Exception exception = new NullPointerException("测试异常"); // 调用 mailLogService.updateMailSendResult(logId, null, exception); // 断言 MailLogDO dbLog = mailLogMapper.selectById(logId); assertEquals(MailSendStatusEnum.FAILURE.getStatus(), dbLog.getSendStatus()); assertNotNull(dbLog.getSendTime()); assertNull(dbLog.getSendMessageId()); assertEquals("NullPointerException: 测试异常", dbLog.getSendException()); }
public static LocalDateTime formatLocalDateTimeFromTimestamp(final Long timestamp) { return LocalDateTime.ofEpochSecond(timestamp / 1000, 0, ZoneOffset.ofHours(8)); }
@Test public void testFormatLocalDateTimeFromTimestamp() { LocalDateTime localDateTime1 = LocalDateTime.now(ZoneOffset.ofHours(8)); LocalDateTime localDateTime2 = DateUtils.formatLocalDateTimeFromTimestamp(ZonedDateTime.of(localDateTime1, ZoneOffset.ofHours(8)).toInstant().toEpochMilli()); assertEquals(localDateTime1.getYear(), localDateTime2.getYear()); assertEquals(localDateTime1.getDayOfMonth(), localDateTime2.getDayOfMonth()); assertEquals(localDateTime1.getMonth(), localDateTime2.getMonth()); assertEquals(localDateTime1.getHour(), localDateTime2.getHour()); assertEquals(localDateTime1.getMinute(), localDateTime2.getMinute()); assertEquals(localDateTime1.getSecond(), localDateTime2.getSecond()); }
@Override public V getValueForExactAddress(IpPrefix prefix) { String prefixString = getPrefixString(prefix); if (prefix.isIp4()) { return ipv4Tree.getValueForExactKey(prefixString); } if (prefix.isIp6()) { return ipv6Tree.getValueForExactKey(prefixString); } return null; }
@Test public void testGetValueForExactAddress() { assertThat("IPv4 prefix has not been inserted correctly", radixTree.getValueForExactAddress(ipv4PrefixKey1), is(1)); assertThat("IPv4 prefix has not been inserted correctly", radixTree.getValueForExactAddress(ipv4PrefixKey2), is(2)); assertThat("IPv4 prefix has not been inserted correctly", radixTree.getValueForExactAddress(ipv4PrefixKey3), is(3)); assertThat("IPv4 prefix has not been inserted correctly", radixTree.getValueForExactAddress(ipv4PrefixKey4), is(4)); assertThat("IPv6 prefix has not been inserted correctly", radixTree.getValueForExactAddress(ipv6PrefixKey1), is(11)); assertThat("IPv6 prefix has not been inserted correctly", radixTree.getValueForExactAddress(ipv6PrefixKey2), is(12)); assertThat("IPv6 prefix has not been inserted correctly", radixTree.getValueForExactAddress(ipv6PrefixKey3), is(13)); assertThat("IPv6 prefix has not been inserted correctly", radixTree.getValueForExactAddress(ipv6PrefixKey4), is(14)); assertThat("IPv6 prefix has not been inserted correctly", radixTree.getValueForExactAddress(ipv6PrefixKey5), is(15)); assertThat("IPv6 prefix has not been inserted correctly", radixTree.getValueForExactAddress(ipv6PrefixKey6), is(16)); }
static Object actualCoerceParameter(Type requiredType, Object valueToCoerce) { Object toReturn = valueToCoerce; if (valueToCoerce instanceof LocalDate localDate && requiredType == BuiltInType.DATE_TIME) { return DateTimeEvalHelper.coerceDateTime(localDate); } return toReturn; }
@Test void actualCoerceParameterNotConverted() { Object value = "TEST_OBJECT"; Object retrieved = CoerceUtil.actualCoerceParameter(BuiltInType.DATE_TIME, value); assertNotNull(retrieved); assertEquals(value, retrieved); value = LocalDate.now(); retrieved = CoerceUtil.actualCoerceParameter(BuiltInType.DATE, value); assertNotNull(retrieved); assertEquals(value, retrieved); }
public static String getCertFingerPrint(Certificate cert) { byte [] digest = null; try { byte[] encCertInfo = cert.getEncoded(); MessageDigest md = MessageDigest.getInstance("SHA-1"); digest = md.digest(encCertInfo); } catch (Exception e) { logger.error("Exception:", e); } if (digest != null) { return bytesToHex(digest).toLowerCase(); } return null; }
@Test public void testGetCertFingerPrintPrimary() throws Exception { X509Certificate cert = null; try (InputStream is = Config.getInstance().getInputStreamFromFile("primary.crt")){ CertificateFactory cf = CertificateFactory.getInstance("X.509"); cert = (X509Certificate) cf.generateCertificate(is); } catch (Exception e) { e.printStackTrace(); } String fp = FingerPrintUtil.getCertFingerPrint(cert); Assert.assertEquals("564aa231f84039ce2b2b886e58f88dcee26fa3e3", fp); }
@ExceptionHandler(ConstraintViolationException.class) protected ShenyuAdminResult handleConstraintViolationException(final ConstraintViolationException e) { LOG.warn("constraint violation exception", e); Set<ConstraintViolation<?>> violations = e.getConstraintViolations(); return ShenyuAdminResult.error(violations.stream() .map(v -> v.getPropertyPath().toString().concat(": ").concat(v.getMessage())) .collect(Collectors.joining("| "))); }
@Test public void testHandleConstraintViolationException() { ConstraintViolationException exception = spy(new ConstraintViolationException(Collections.emptySet())); Set<ConstraintViolation<?>> violations = spy(Collections.emptySet()); when(exception.getConstraintViolations()).thenReturn(violations); ShenyuAdminResult result = exceptionHandlersUnderTest.handleConstraintViolationException(exception); Assertions.assertEquals(result.getCode().intValue(), CommonErrorCode.ERROR); }
public Optional<Object> evaluate(final Map<String, Object> columnPairsMap, final String outputColumn, final String regexField) { boolean matching = true; boolean isRegex = regexField != null && columnValues.containsKey(regexField) && (boolean) columnValues.get(regexField); for (Map.Entry<String, Object> columnPairEntry : columnPairsMap.entrySet()) { Object value = columnValues.get(columnPairEntry.getKey()); matching = isRegex ? isRegexMatching(value.toString(), (String) columnPairEntry.getValue()) : isMatching(value, columnPairEntry.getValue()); if (!matching) { break; } } return matching ? Optional.ofNullable(columnValues.get(outputColumn)) : Optional.empty(); }
@Test void evaluateKeyFoundNotMatchingRegex() { KiePMMLRow kiePMMLRow = new KiePMMLRow(COLUMN_VALUES); Optional<Object> retrieved = kiePMMLRow.evaluate(Collections.singletonMap("KEY-1", "[435345]"), "KEY-0", REGEX_FIELD); assertThat(retrieved).isNotPresent(); }
@Override public void readOne(TProtocol in, TProtocol out) throws TException { readOneStruct(in, out); }
@Test public void testEnumMissingSchema() throws Exception { CountingErrorHandler countingHandler = new CountingErrorHandler(); BufferedProtocolReadToWrite p = new BufferedProtocolReadToWrite( ThriftSchemaConverter.toStructType(StructWithEnum.class), countingHandler); final ByteArrayOutputStream in = new ByteArrayOutputStream(); final ByteArrayOutputStream out = new ByteArrayOutputStream(); StructWithMoreEnum enumDefinedInOldDefinition = new StructWithMoreEnum(NumberEnumWithMoreValue.THREE); StructWithMoreEnum extraEnumDefinedInNewDefinition = new StructWithMoreEnum(NumberEnumWithMoreValue.FOUR); enumDefinedInOldDefinition.write(protocol(in)); extraEnumDefinedInNewDefinition.write(protocol(in)); ByteArrayInputStream baos = new ByteArrayInputStream(in.toByteArray()); // first should not throw p.readOne(protocol(baos), protocol(out)); try { p.readOne(protocol(baos), protocol(out)); fail("this should throw"); } catch (SkippableException e) { Throwable cause = e.getCause(); assertEquals(DecodingSchemaMismatchException.class, cause.getClass()); assertTrue(cause.getMessage().contains("can not find index 4 in enum")); } assertEquals(0, countingHandler.recordCountOfMissingFields); assertEquals(0, countingHandler.fieldIgnoredCount); }
@Override public PathAttributes find(final Path file, final ListProgressListener listener) throws BackgroundException { try { final IRODSFileSystemAO fs = session.getClient(); final IRODSFile f = fs.getIRODSFileFactory().instanceIRODSFile(file.getAbsolute()); if(!f.exists()) { throw new NotfoundException(file.getAbsolute()); } final ObjStat stats = fs.getObjStat(f.getAbsolutePath()); return this.toAttributes(stats); } catch(JargonException e) { throw new IRODSExceptionMappingService().map("Failure to read attributes of {0}", e, file); } }
@Test public void testFind() throws Exception { final ProtocolFactory factory = new ProtocolFactory(new HashSet<>(Collections.singleton(new IRODSProtocol()))); final Profile profile = new ProfilePlistReader(factory).read( this.getClass().getResourceAsStream("/iRODS (iPlant Collaborative).cyberduckprofile")); final Host host = new Host(profile, profile.getDefaultHostname(), new Credentials( PROPERTIES.get("irods.key"), PROPERTIES.get("irods.secret") )); final IRODSSession session = new IRODSSession(host); session.open(new DisabledProxyFinder(), new DisabledHostKeyCallback(), new DisabledLoginCallback(), new DisabledCancelCallback()); session.login(new DisabledLoginCallback(), new DisabledCancelCallback()); final Path folder = new IRODSDirectoryFeature(session).mkdir(new Path( new IRODSHomeFinderService(session).find(), UUID.randomUUID().toString(), EnumSet.of(Path.Type.directory)), new TransferStatus()); final IRODSAttributesFinderFeature f = new IRODSAttributesFinderFeature(session); final long folderTimestamp = f.find(folder).getModificationDate(); final Path test = new Path(folder, UUID.randomUUID().toString(), EnumSet.of(Path.Type.file)); new IRODSTouchFeature(session).touch(test, new TransferStatus()); assertEquals(folderTimestamp, f.find(folder).getModificationDate()); final PathAttributes attributes = f.find(test); assertEquals(0L, attributes.getSize()); assertEquals("iterate", attributes.getOwner()); assertEquals("iplant", attributes.getGroup()); new IRODSDeleteFeature(session).delete(Collections.singletonList(test), new DisabledLoginCallback(), new Delete.DisabledCallback()); assertFalse(new IRODSFindFeature(session).find(test)); session.close(); }
public static void disablePullConsumption(DefaultLitePullConsumerWrapper wrapper, Set<String> topics) { Set<String> subscribedTopic = wrapper.getSubscribedTopics(); if (subscribedTopic.stream().anyMatch(topics::contains)) { suspendPullConsumer(wrapper); return; } resumePullConsumer(wrapper); }
@Test public void testDisablePullConsumptionWithAssignNoSubTractTopics() { subscribedTopics = new HashSet<>(); subscribedTopics.add("test-topic-2"); Collection<MessageQueue> messageQueues = new ArrayList<>(); MessageQueue messageQueue = new MessageQueue("test-topic-2", "broker-1", 1); messageQueues.add(messageQueue); pullConsumerWrapper.setMessageQueues(messageQueues); pullConsumerWrapper.setSubscribedTopics(subscribedTopics); pullConsumerWrapper.setSubscriptionType(SubscriptionType.ASSIGN); pullConsumerWrapper.setAssignedMessageQueue(assignedMessageQueue); RocketMqPullConsumerController.disablePullConsumption(pullConsumerWrapper, prohibitionTopics); Mockito.verify(pullConsumer, Mockito.times(1)).assign( Mockito.any()); }
@Override public KeyValue<K, V> next() { final KeyValue<K, ValueAndTimestamp<V>> innerKeyValue = innerIterator.next(); return KeyValue.pair(innerKeyValue.key, getValueOrNull(innerKeyValue.value)); }
@Test public void shouldReturnPlainKeyValuePairOnGet() { when(mockedKeyValueIterator.next()).thenReturn( new KeyValue<>("key", ValueAndTimestamp.make("value", 42L))); assertThat(keyValueIteratorFacade.next(), is(KeyValue.pair("key", "value"))); }
static Double convertToDouble(Object toConvert) { if (!(toConvert instanceof Number )) { throw new IllegalArgumentException("Input data must be declared and sent as Number, received " + toConvert); } return (Double) DATA_TYPE.DOUBLE.getActualValue(toConvert); }
@Test void convertToDouble_invalidValues() { List<Object> inputs = Arrays.asList("3", "3.0", true); inputs.forEach(number -> { assertThatThrownBy(() -> KiePMMLClusteringModel.convertToDouble(number)) .isInstanceOf(IllegalArgumentException.class); }); }
@Override public PathAttributes find(final Path file, final ListProgressListener listener) throws BackgroundException { if(file.isRoot()) { return PathAttributes.EMPTY; } if(file.getType().contains(Path.Type.upload)) { // Pending large file upload final Write.Append append = new B2LargeUploadService(session, fileid, new B2WriteFeature(session, fileid)).append(file, new TransferStatus()); if(append.append) { return new PathAttributes().withSize(append.offset); } return PathAttributes.EMPTY; } if(containerService.isContainer(file)) { try { final B2BucketResponse info = session.getClient().listBucket(file.getName()); if(null == info) { throw new NotfoundException(file.getAbsolute()); } return this.toAttributes(info); } catch(B2ApiException e) { throw new B2ExceptionMappingService(fileid).map("Failure to read attributes of {0}", e, file); } catch(IOException e) { throw new DefaultIOExceptionMappingService().map(e); } } else { final String id = fileid.getVersionId(file); if(null == id) { return PathAttributes.EMPTY; } B2FileResponse response; try { response = this.findFileInfo(file, id); } catch(NotfoundException e) { // Try with reset cache after failure finding node id response = this.findFileInfo(file, fileid.getVersionId(file)); } final PathAttributes attr = this.toAttributes(response); if(attr.isDuplicate()) { // Throw failure if latest version has hide marker set and lookup was without explicit version if(StringUtils.isBlank(file.attributes().getVersionId())) { if(log.isDebugEnabled()) { log.debug(String.format("Latest version of %s is duplicate", file)); } throw new NotfoundException(file.getAbsolute()); } } return attr; } }
@Test public void testHideMarker() throws Exception { final B2VersionIdProvider fileid = new B2VersionIdProvider(session); final Path bucket = new Path("test-cyberduck", EnumSet.of(Path.Type.directory, Path.Type.volume)); final Path directory = new B2DirectoryFeature(session, fileid).mkdir(new Path(bucket, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory)), new TransferStatus()); final long timestamp = System.currentTimeMillis(); final TransferStatus status = new TransferStatus().withModified(timestamp); final Path test = new B2TouchFeature(session, fileid).touch(new Path(directory, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)), status); assertNotNull(status.getResponse().getVersionId()); assertNotNull(test.attributes().getVersionId()); new B2DeleteFeature(session, fileid).delete(Collections.singletonList(new Path(test).withAttributes(PathAttributes.EMPTY)), new DisabledLoginCallback(), new Delete.DisabledCallback()); final B2AttributesFinderFeature f = new B2AttributesFinderFeature(session, fileid); assertEquals(test.attributes(), f.find(test)); try { f.find(new Path(test).withAttributes(PathAttributes.EMPTY)); fail(); } catch(NotfoundException e) { // Expected } new B2DeleteFeature(session, fileid).delete(new B2ObjectListService(session, fileid).list(directory, new DisabledListProgressListener()).toList(), new DisabledLoginCallback(), new Delete.DisabledCallback()); new B2DeleteFeature(session, fileid).delete(Collections.singletonList(directory), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
@Override public int getNettyWriteBufferHighWaterMark() { return clientConfig.getPropertyAsInteger(WRITE_BUFFER_HIGH_WATER_MARK, DEFAULT_WRITE_BUFFER_HIGH_WATER_MARK); }
@Test void testGetNettyWriteBufferHighWaterMarkOverride() { clientConfig.set(ConnectionPoolConfigImpl.WRITE_BUFFER_HIGH_WATER_MARK, 40000); assertEquals(40000, connectionPoolConfig.getNettyWriteBufferHighWaterMark()); }
public static int findLevelWithThreadName(Level expectedLevel, String threadName) { int count = 0; List<Log> logList = DubboAppender.logList; for (int i = 0; i < logList.size(); i++) { Log log = logList.get(i); if (log.getLogLevel().equals(expectedLevel) && log.getLogThread().equals(threadName)) { count++; } } return count; }
@Test void testFindLevelWithThreadName() { Log log = mock(Log.class); DubboAppender.logList.add(log); when(log.getLogLevel()).thenReturn(Level.ERROR); when(log.getLogThread()).thenReturn("thread-1"); log = mock(Log.class); DubboAppender.logList.add(log); when(log.getLogLevel()).thenReturn(Level.ERROR); when(log.getLogThread()).thenReturn("thread-2"); assertThat(LogUtil.findLevelWithThreadName(Level.ERROR, "thread-2"), equalTo(1)); }
@Override public String[] split(String text) { boundary.setText(text); List<String> words = new ArrayList<>(); int start = boundary.first(); int end = boundary.next(); while (end != BreakIterator.DONE) { String word = text.substring(start, end).trim(); if (!word.isEmpty()) { words.add(word); } start = end; end = boundary.next(); } return words.toArray(new String[0]); }
@Test public void testSplitSingleQuote() { System.out.println("tokenize single quote"); String text = "String literals can be enclosed in matching single " + "quotes ('). But it's also appearing in contractions such as can't."; String[] expResult = {"String", "literals", "can", "be", "enclosed", "in", "matching", "single", "quotes", "(", "'", ")", ".", "But", "it's", "also", "appearing", "in", "contractions", "such", "as", "can't", "."}; BreakIteratorTokenizer instance = new BreakIteratorTokenizer(); String[] result = instance.split(text); assertEquals(expResult.length, result.length); for (int i = 0; i < result.length; i++) { assertEquals(expResult[i], result[i]); } }
@Override public void open(Configuration parameters) throws Exception { this.rateLimiterTriggeredCounter = getRuntimeContext() .getMetricGroup() .addGroup( TableMaintenanceMetrics.GROUP_KEY, TableMaintenanceMetrics.GROUP_VALUE_DEFAULT) .counter(TableMaintenanceMetrics.RATE_LIMITER_TRIGGERED); this.concurrentRunThrottledCounter = getRuntimeContext() .getMetricGroup() .addGroup( TableMaintenanceMetrics.GROUP_KEY, TableMaintenanceMetrics.GROUP_VALUE_DEFAULT) .counter(TableMaintenanceMetrics.CONCURRENT_RUN_THROTTLED); this.nothingToTriggerCounter = getRuntimeContext() .getMetricGroup() .addGroup( TableMaintenanceMetrics.GROUP_KEY, TableMaintenanceMetrics.GROUP_VALUE_DEFAULT) .counter(TableMaintenanceMetrics.NOTHING_TO_TRIGGER); this.triggerCounters = taskNames.stream() .map( name -> getRuntimeContext() .getMetricGroup() .addGroup(TableMaintenanceMetrics.GROUP_KEY, name) .counter(TableMaintenanceMetrics.TRIGGERED)) .collect(Collectors.toList()); this.nextEvaluationTimeState = getRuntimeContext() .getState(new ValueStateDescriptor<>("triggerManagerNextTriggerTime", Types.LONG)); this.accumulatedChangesState = getRuntimeContext() .getListState( new ListStateDescriptor<>( "triggerManagerAccumulatedChange", TypeInformation.of(TableChange.class))); this.lastTriggerTimesState = getRuntimeContext() .getListState(new ListStateDescriptor<>("triggerManagerLastTriggerTime", Types.LONG)); tableLoader.open(); }
@Test void testCommitCount() throws Exception { TriggerManager manager = manager(sql.tableLoader(TABLE_NAME), new TriggerEvaluator.Builder().commitCount(3).build()); try (KeyedOneInputStreamOperatorTestHarness<Boolean, TableChange, Trigger> testHarness = harness(manager)) { testHarness.open(); addEventAndCheckResult(testHarness, TableChange.builder().commitCount(1).build(), 0); addEventAndCheckResult(testHarness, TableChange.builder().commitCount(2).build(), 1); addEventAndCheckResult(testHarness, TableChange.builder().commitCount(3).build(), 2); addEventAndCheckResult(testHarness, TableChange.builder().commitCount(10).build(), 3); // No trigger in this case addEventAndCheckResult(testHarness, TableChange.builder().commitCount(1).build(), 3); addEventAndCheckResult(testHarness, TableChange.builder().commitCount(1).build(), 3); addEventAndCheckResult(testHarness, TableChange.builder().commitCount(1).build(), 4); } }
@Override public CompletableFuture<Map<String, BrokerLookupData>> filterAsync( Map<String, BrokerLookupData> brokers, ServiceUnitId serviceUnit, LoadManagerContext context) { if (brokers.isEmpty()) { return CompletableFuture.completedFuture(brokers); } brokers.entrySet().removeIf(entry -> { BrokerLookupData v = entry.getValue(); // The load manager class name can be null if the cluster has old version of broker. return !Objects.equals(v.getLoadManagerClassName(), context.brokerConfiguration().getLoadManagerClassName()); }); return CompletableFuture.completedFuture(brokers); }
@Test public void test() throws BrokerFilterException, ExecutionException, InterruptedException { LoadManagerContext context = getContext(); context.brokerConfiguration().setLoadManagerClassName(ExtensibleLoadManagerImpl.class.getName()); BrokerLoadManagerClassFilter filter = new BrokerLoadManagerClassFilter(); Map<String, BrokerLookupData> originalBrokers = Map.of( "broker1", getLookupData("3.0.0", ExtensibleLoadManagerImpl.class.getName()), "broker2", getLookupData("3.0.0", ExtensibleLoadManagerImpl.class.getName()), "broker3", getLookupData("3.0.0", ModularLoadManagerImpl.class.getName()), "broker4", getLookupData("3.0.0", ModularLoadManagerImpl.class.getName()), "broker5", getLookupData("3.0.0", null) ); Map<String, BrokerLookupData> result = filter.filterAsync(new HashMap<>(originalBrokers), null, context).get(); assertEquals(result, Map.of( "broker1", getLookupData("3.0.0", ExtensibleLoadManagerImpl.class.getName()), "broker2", getLookupData("3.0.0", ExtensibleLoadManagerImpl.class.getName()) )); context.brokerConfiguration().setLoadManagerClassName(ModularLoadManagerImpl.class.getName()); result = filter.filterAsync(new HashMap<>(originalBrokers), null, context).get(); assertEquals(result, Map.of( "broker3", getLookupData("3.0.0", ModularLoadManagerImpl.class.getName()), "broker4", getLookupData("3.0.0", ModularLoadManagerImpl.class.getName()) )); }
@Override public int run(String[] args) throws Exception { YarnConfiguration yarnConf = getConf() == null ? new YarnConfiguration() : new YarnConfiguration( getConf()); boolean isHAEnabled = yarnConf.getBoolean(YarnConfiguration.RM_HA_ENABLED, YarnConfiguration.DEFAULT_RM_HA_ENABLED); if (args.length < 1) { printUsage("", isHAEnabled); return -1; } int exitCode = -1; int i = 0; String cmd = args[i++]; exitCode = 0; if ("-help".equals(cmd)) { if (i < args.length) { printUsage(args[i], isHAEnabled); } else { printHelp("", isHAEnabled); } return exitCode; } if (USAGE.containsKey(cmd)) { if (isHAEnabled) { return super.run(args); } System.out.println("Cannot run " + cmd + " when ResourceManager HA is not enabled"); return -1; } // // verify that we have enough command line parameters // String subClusterId = StringUtils.EMPTY; if ("-refreshAdminAcls".equals(cmd) || "-refreshQueues".equals(cmd) || "-refreshNodesResources".equals(cmd) || "-refreshServiceAcl".equals(cmd) || "-refreshUserToGroupsMappings".equals(cmd) || "-refreshSuperUserGroupsConfiguration".equals(cmd) || "-refreshClusterMaxPriority".equals(cmd)) { subClusterId = parseSubClusterId(args, isHAEnabled); // If we enable Federation mode, the number of args may be either one or three. // Example: -refreshQueues or -refreshQueues -subClusterId SC-1 if (isYarnFederationEnabled(getConf()) && args.length != 1 && args.length != 3) { printUsage(cmd, isHAEnabled); return exitCode; } else if (!isYarnFederationEnabled(getConf()) && args.length != 1) { // If Federation mode is not enabled, then the number of args can only be one. // Example: -refreshQueues printUsage(cmd, isHAEnabled); return exitCode; } } // If it is federation mode, we will print federation mode information if (isYarnFederationEnabled(getConf())) { System.out.println("Using YARN Federation mode."); } try { if ("-refreshQueues".equals(cmd)) { exitCode = refreshQueues(subClusterId); } else if ("-refreshNodes".equals(cmd)) { exitCode = handleRefreshNodes(args, cmd, isHAEnabled); } else if ("-refreshNodesResources".equals(cmd)) { exitCode = refreshNodesResources(subClusterId); } else if ("-refreshUserToGroupsMappings".equals(cmd)) { exitCode = refreshUserToGroupsMappings(subClusterId); } else if ("-refreshSuperUserGroupsConfiguration".equals(cmd)) { exitCode = refreshSuperUserGroupsConfiguration(subClusterId); } else if ("-refreshAdminAcls".equals(cmd)) { exitCode = refreshAdminAcls(subClusterId); } else if ("-refreshServiceAcl".equals(cmd)) { exitCode = refreshServiceAcls(subClusterId); } else if ("-refreshClusterMaxPriority".equals(cmd)) { exitCode = refreshClusterMaxPriority(subClusterId); } else if ("-getGroups".equals(cmd)) { String[] usernames = Arrays.copyOfRange(args, i, args.length); exitCode = getGroups(usernames); } else if ("-updateNodeResource".equals(cmd)) { exitCode = handleUpdateNodeResource(args, cmd, isHAEnabled, subClusterId); } else if ("-addToClusterNodeLabels".equals(cmd)) { exitCode = handleAddToClusterNodeLabels(args, cmd, isHAEnabled); } else if ("-removeFromClusterNodeLabels".equals(cmd)) { exitCode = handleRemoveFromClusterNodeLabels(args, cmd, isHAEnabled); } else if ("-replaceLabelsOnNode".equals(cmd)) { exitCode = handleReplaceLabelsOnNodes(args, cmd, isHAEnabled); } else { exitCode = -1; System.err.println(cmd.substring(1) + ": Unknown command"); printUsage("", isHAEnabled); } } catch (IllegalArgumentException arge) { exitCode = -1; System.err.println(cmd.substring(1) + ": " + arge.getLocalizedMessage()); printUsage(cmd, isHAEnabled); } catch (RemoteException e) { // // This is a error returned by hadoop server. Print // out the first line of the error message, ignore the stack trace. exitCode = -1; try { String[] content; content = e.getLocalizedMessage().split("\n"); System.err.println(cmd.substring(1) + ": " + content[0]); } catch (Exception ex) { System.err.println(cmd.substring(1) + ": " + ex.getLocalizedMessage()); } } catch (Exception e) { exitCode = -1; System.err.println(cmd.substring(1) + ": " + e.getLocalizedMessage()); } if (null != localNodeLabelsManager) { localNodeLabelsManager.stop(); } return exitCode; }
@Test public void testAddToClusterNodeLabels() throws Exception { // successfully add labels String[] args = { "-addToClusterNodeLabels", "x", "-directlyAccessNodeLabelStore" }; assertEquals(0, rmAdminCLI.run(args)); assertTrue(dummyNodeLabelsManager.getClusterNodeLabelNames().containsAll( ImmutableSet.of("x"))); // no labels, should fail args = new String[] { "-addToClusterNodeLabels" }; assertTrue(0 != rmAdminCLI.run(args)); // no labels, should fail args = new String[] { "-addToClusterNodeLabels", "-directlyAccessNodeLabelStore" }; assertTrue(0 != rmAdminCLI.run(args)); // no labels, should fail at client validation args = new String[] { "-addToClusterNodeLabels", " " }; assertTrue(0 != rmAdminCLI.run(args)); // no labels, should fail at client validation args = new String[] { "-addToClusterNodeLabels", " , " }; assertTrue(0 != rmAdminCLI.run(args)); // successfully add labels args = new String[] { "-addToClusterNodeLabels", ",x,,", "-directlyAccessNodeLabelStore" }; assertEquals(0, rmAdminCLI.run(args)); assertTrue(dummyNodeLabelsManager.getClusterNodeLabelNames().containsAll( ImmutableSet.of("x"))); }
private String getEnv(String envName, InterpreterLaunchContext context) { String env = context.getProperties().getProperty(envName); if (StringUtils.isBlank(env)) { env = System.getenv(envName); } if (StringUtils.isBlank(env)) { LOGGER.warn("environment variable: {} is empty", envName); } return env; }
@Test void testYarnClusterMode_2() throws IOException { SparkInterpreterLauncher launcher = new SparkInterpreterLauncher(zConf, null); Properties properties = new Properties(); properties.setProperty("SPARK_HOME", sparkHome); properties.setProperty("property_1", "value_1"); properties.setProperty("spark.master", "yarn"); properties.setProperty("spark.submit.deployMode", "cluster"); properties.setProperty("spark.files", "file_1"); properties.setProperty("spark.jars", "jar_1"); InterpreterOption option = new InterpreterOption(); option.setUserImpersonate(true); InterpreterLaunchContext context = new InterpreterLaunchContext(properties, option, null, "user1", "intpGroupId", "groupId", "spark", "spark", 0, "host"); Path localRepoPath = Paths.get(zConf.getInterpreterLocalRepoPath(), context.getInterpreterSettingId()); FileUtils.deleteDirectory(localRepoPath.toFile()); Files.createDirectories(localRepoPath); Files.createFile(Paths.get(localRepoPath.toAbsolutePath().toString(), "test.jar")); InterpreterClient client = launcher.launch(context); assertTrue(client instanceof ExecRemoteInterpreterProcess); try (ExecRemoteInterpreterProcess interpreterProcess = (ExecRemoteInterpreterProcess) client) { assertEquals("spark", interpreterProcess.getInterpreterSettingName()); assertTrue(interpreterProcess.getInterpreterDir().endsWith("/interpreter/spark")); assertTrue(interpreterProcess.getLocalRepoDir().endsWith("/local-repo/groupId")); assertEquals(zConf.getInterpreterRemoteRunnerPath(), interpreterProcess.getInterpreterRunner()); assertTrue(interpreterProcess.getEnv().size() >= 3); assertEquals(sparkHome, interpreterProcess.getEnv().get("SPARK_HOME")); assertEquals("true", interpreterProcess.getEnv().get("ZEPPELIN_SPARK_YARN_CLUSTER")); String sparkJars = "jar_1," + Paths.get(localRepoPath.toAbsolutePath().toString(), "test.jar").toString() + "," + zeppelinHome + "/interpreter/spark/scala-2.12/spark-scala-2.12-" + Util.getVersion() + ".jar," + zeppelinHome + "/interpreter/zeppelin-interpreter-shaded-" + Util.getVersion() + ".jar"; String sparkrZip = sparkHome + "/R/lib/sparkr.zip#sparkr"; String sparkFiles = "file_1," + zeppelinHome + "/conf/log4j_yarn_cluster.properties"; String expected = "--proxy-user|user1|--conf|spark.yarn.dist.archives=" + sparkrZip + "|--conf|spark.yarn.isPython=true|--conf|spark.app.name=intpGroupId" + "|--conf|spark.yarn.maxAppAttempts=1" + "|--conf|spark.master=yarn" + "|--conf|spark.files=" + sparkFiles + "|--conf|spark.jars=" + sparkJars + "|--conf|spark.submit.deployMode=cluster" + "|--conf|spark.yarn.submit.waitAppCompletion=false"; assertTrue(CollectionUtils.isEqualCollection(Arrays.asList(expected.split("\\|")), Arrays.asList(interpreterProcess.getEnv().get("ZEPPELIN_SPARK_CONF").split("\\|")))); assertTrue(interpreterProcess.getEnv().get("ZEPPELIN_SPARK_CONF").startsWith("--proxy-user|user1")); } Files.deleteIfExists(Paths.get(localRepoPath.toAbsolutePath().toString(), "test.jar")); FileUtils.deleteDirectory(localRepoPath.toFile()); }
public static List<Pair<InstrumentSelector, ViewBuilder>> getMetricsView() { ArrayList<Pair<InstrumentSelector, ViewBuilder>> res = new ArrayList<>(); InstrumentSelector providerRpcLatencySelector = InstrumentSelector.builder() .setType(InstrumentType.HISTOGRAM) .setName(HISTOGRAM_PROVIDER_RPC_LATENCY) .build(); InstrumentSelector rpcLatencySelector = InstrumentSelector.builder() .setType(InstrumentType.HISTOGRAM) .setName(HISTOGRAM_API_LATENCY) .build(); ViewBuilder rpcLatencyViewBuilder = View.builder() .setAggregation(Aggregation.explicitBucketHistogram(Arrays.asList(1d, 3d, 5d, 7d, 10d, 100d, 200d, 400d, 600d, 800d, 1d * 1000, 1d * 1500, 1d * 3000))) .setDescription("tiered_store_rpc_latency_view"); InstrumentSelector uploadBufferSizeSelector = InstrumentSelector.builder() .setType(InstrumentType.HISTOGRAM) .setName(HISTOGRAM_UPLOAD_BYTES) .build(); InstrumentSelector downloadBufferSizeSelector = InstrumentSelector.builder() .setType(InstrumentType.HISTOGRAM) .setName(HISTOGRAM_DOWNLOAD_BYTES) .build(); ViewBuilder bufferSizeViewBuilder = View.builder() .setAggregation(Aggregation.explicitBucketHistogram(Arrays.asList(1d * MessageStoreUtil.KB, 10d * MessageStoreUtil.KB, 100d * MessageStoreUtil.KB, 1d * MessageStoreUtil.MB, 10d * MessageStoreUtil.MB, 32d * MessageStoreUtil.MB, 50d * MessageStoreUtil.MB, 100d * MessageStoreUtil.MB))) .setDescription("tiered_store_buffer_size_view"); res.add(new Pair<>(rpcLatencySelector, rpcLatencyViewBuilder)); res.add(new Pair<>(providerRpcLatencySelector, rpcLatencyViewBuilder)); res.add(new Pair<>(uploadBufferSizeSelector, bufferSizeViewBuilder)); res.add(new Pair<>(downloadBufferSizeSelector, bufferSizeViewBuilder)); return res; }
@Test public void getMetricsView() { TieredStoreMetricsManager.getMetricsView(); }
static void verifyAddMissingValues(final List<KiePMMLMiningField> notTargetMiningFields, final PMMLRequestData requestData) { logger.debug("verifyMissingValues {} {}", notTargetMiningFields, requestData); Collection<ParameterInfo> requestParams = requestData.getRequestParams(); notTargetMiningFields .forEach(miningField -> { ParameterInfo parameterInfo = requestParams.stream() .filter(paramInfo -> miningField.getName().equals(paramInfo.getName())) .findFirst() .orElse(null); if (parameterInfo == null) { manageMissingValues(miningField, requestData); } }); }
@Test void verifyAddMissingValuesMissingReturnInvalid() { assertThatExceptionOfType(KiePMMLException.class).isThrownBy(() -> { List<KiePMMLMiningField> miningFields = IntStream.range(0, 3).mapToObj(i -> { DATA_TYPE dataType = DATA_TYPE.values()[i]; return KiePMMLMiningField.builder("FIELD-" + i, null) .withDataType(dataType) .withMissingValueTreatmentMethod(MISSING_VALUE_TREATMENT_METHOD.RETURN_INVALID) .build(); }) .collect(Collectors.toList()); PMMLRequestData pmmlRequestData = new PMMLRequestData("123", "modelName"); PreProcess.verifyAddMissingValues(miningFields, pmmlRequestData); }); }
public List<TypeDescriptor<?>> getArgumentTypes(Method method) { Invokable<?, ?> typedMethod = token.method(method); List<TypeDescriptor<?>> argTypes = Lists.newArrayList(); for (Parameter parameter : typedMethod.getParameters()) { argTypes.add(new SimpleTypeDescriptor<>(parameter.getType())); } return argTypes; }
@Test public void testGetArgumentTypes() throws Exception { Method identity = Id.class.getDeclaredMethod("identity", Object.class); TypeToken<Id<String>> token = new TypeToken<Id<String>>() {}; TypeDescriptor<Id<String>> descriptor = new TypeDescriptor<Id<String>>() {}; assertEquals( token.method(identity).getParameters().get(0).getType().getType(), descriptor.getArgumentTypes(identity).get(0).getType()); TypeToken<Id<List<String>>> genericToken = new TypeToken<Id<List<String>>>() {}; TypeDescriptor<Id<List<String>>> genericDescriptor = new TypeDescriptor<Id<List<String>>>() {}; assertEquals( genericToken.method(identity).getParameters().get(0).getType().getType(), genericDescriptor.getArgumentTypes(identity).get(0).getType()); }
public static <T> RBFNetwork<T> fit(T[] x, int[] y, RBF<T>[] rbf) { return fit(x, y, rbf, false); }
@Test public void testIris() { System.out.println("Iris"); MathEx.setSeed(19650218); // to get repeatable results. ClassificationMetrics metrics = LOOCV.classification(Iris.x, Iris.y, (x, y) -> RBFNetwork.fit(x, y, RBF.fit(x, 10))); System.out.println("RBF Network: " + metrics); assertEquals(0.9667, metrics.accuracy, 1E-4); metrics = LOOCV.classification(Iris.x, Iris.y, (x, y) -> RBFNetwork.fit(x, y, RBF.fit(x, 10), true)); System.out.println("Normalized RBF Network: " + metrics); assertEquals(0.9733, metrics.accuracy, 1E-4); }
@GET @Path("{path:.*}") @Produces({MediaType.APPLICATION_OCTET_STREAM + "; " + JettyUtils.UTF_8, MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8}) public Response get(@PathParam("path") String path, @Context UriInfo uriInfo, @QueryParam(OperationParam.NAME) OperationParam op, @Context Parameters params, @Context HttpServletRequest request) throws IOException, FileSystemAccessException { // Restrict access to only GETFILESTATUS and LISTSTATUS in write-only mode if((op.value() != HttpFSFileSystem.Operation.GETFILESTATUS) && (op.value() != HttpFSFileSystem.Operation.LISTSTATUS) && accessMode == AccessMode.WRITEONLY) { return Response.status(Response.Status.FORBIDDEN).build(); } UserGroupInformation user = HttpUserGroupInformation.get(); Response response; path = makeAbsolute(path); MDC.put(HttpFSFileSystem.OP_PARAM, op.value().name()); MDC.put("hostname", request.getRemoteAddr()); switch (op.value()) { case OPEN: { Boolean noRedirect = params.get( NoRedirectParam.NAME, NoRedirectParam.class); if (noRedirect) { URI redirectURL = createOpenRedirectionURL(uriInfo); final String js = JsonUtil.toJsonString("Location", redirectURL); response = Response.ok(js).type(MediaType.APPLICATION_JSON).build(); } else { //Invoking the command directly using an unmanaged FileSystem that is // released by the FileSystemReleaseFilter final FSOperations.FSOpen command = new FSOperations.FSOpen(path); final FileSystem fs = createFileSystem(user); InputStream is = null; UserGroupInformation ugi = UserGroupInformation .createProxyUser(user.getShortUserName(), UserGroupInformation.getLoginUser()); try { is = ugi.doAs(new PrivilegedExceptionAction<InputStream>() { @Override public InputStream run() throws Exception { return command.execute(fs); } }); } catch (InterruptedException ie) { LOG.warn("Open interrupted.", ie); Thread.currentThread().interrupt(); } Long offset = params.get(OffsetParam.NAME, OffsetParam.class); Long len = params.get(LenParam.NAME, LenParam.class); AUDIT_LOG.info("[{}] offset [{}] len [{}]", new Object[] { path, offset, len }); InputStreamEntity entity = new InputStreamEntity(is, offset, len); response = Response.ok(entity).type(MediaType.APPLICATION_OCTET_STREAM) .build(); } break; } case GETFILESTATUS: { FSOperations.FSFileStatus command = new FSOperations.FSFileStatus(path); Map json = fsExecute(user, command); AUDIT_LOG.info("[{}]", path); response = Response.ok(json).type(MediaType.APPLICATION_JSON).build(); break; } case LISTSTATUS: { String filter = params.get(FilterParam.NAME, FilterParam.class); FSOperations.FSListStatus command = new FSOperations.FSListStatus(path, filter); Map json = fsExecute(user, command); AUDIT_LOG.info("[{}] filter [{}]", path, (filter != null) ? filter : "-"); response = Response.ok(json).type(MediaType.APPLICATION_JSON).build(); break; } case GETHOMEDIRECTORY: { enforceRootPath(op.value(), path); FSOperations.FSHomeDir command = new FSOperations.FSHomeDir(); JSONObject json = fsExecute(user, command); AUDIT_LOG.info("Home Directory for [{}]", user); response = Response.ok(json).type(MediaType.APPLICATION_JSON).build(); break; } case INSTRUMENTATION: { enforceRootPath(op.value(), path); Groups groups = HttpFSServerWebApp.get().get(Groups.class); Set<String> userGroups = groups.getGroupsSet(user.getShortUserName()); if (!userGroups.contains(HttpFSServerWebApp.get().getAdminGroup())) { throw new AccessControlException( "User not in HttpFSServer admin group"); } Instrumentation instrumentation = HttpFSServerWebApp.get().get(Instrumentation.class); Map snapshot = instrumentation.getSnapshot(); response = Response.ok(snapshot).build(); break; } case GETCONTENTSUMMARY: { FSOperations.FSContentSummary command = new FSOperations.FSContentSummary(path); Map json = fsExecute(user, command); AUDIT_LOG.info("Content summary for [{}]", path); response = Response.ok(json).type(MediaType.APPLICATION_JSON).build(); break; } case GETQUOTAUSAGE: { FSOperations.FSQuotaUsage command = new FSOperations.FSQuotaUsage(path); Map json = fsExecute(user, command); AUDIT_LOG.info("Quota Usage for [{}]", path); response = Response.ok(json).type(MediaType.APPLICATION_JSON).build(); break; } case GETFILECHECKSUM: { FSOperations.FSFileChecksum command = new FSOperations.FSFileChecksum(path); Boolean noRedirect = params.get( NoRedirectParam.NAME, NoRedirectParam.class); AUDIT_LOG.info("[{}]", path); if (noRedirect) { URI redirectURL = createOpenRedirectionURL(uriInfo); final String js = JsonUtil.toJsonString("Location", redirectURL); response = Response.ok(js).type(MediaType.APPLICATION_JSON).build(); } else { Map json = fsExecute(user, command); response = Response.ok(json).type(MediaType.APPLICATION_JSON).build(); } break; } case GETFILEBLOCKLOCATIONS: { long offset = 0; long len = Long.MAX_VALUE; Long offsetParam = params.get(OffsetParam.NAME, OffsetParam.class); Long lenParam = params.get(LenParam.NAME, LenParam.class); AUDIT_LOG.info("[{}] offset [{}] len [{}]", path, offsetParam, lenParam); if (offsetParam != null && offsetParam > 0) { offset = offsetParam; } if (lenParam != null && lenParam > 0) { len = lenParam; } FSOperations.FSFileBlockLocations command = new FSOperations.FSFileBlockLocations(path, offset, len); @SuppressWarnings("rawtypes") Map locations = fsExecute(user, command); final String json = JsonUtil.toJsonString("BlockLocations", locations); response = Response.ok(json).type(MediaType.APPLICATION_JSON).build(); break; } case GETACLSTATUS: { FSOperations.FSAclStatus command = new FSOperations.FSAclStatus(path); Map json = fsExecute(user, command); AUDIT_LOG.info("ACL status for [{}]", path); response = Response.ok(json).type(MediaType.APPLICATION_JSON).build(); break; } case GETXATTRS: { List<String> xattrNames = params.getValues(XAttrNameParam.NAME, XAttrNameParam.class); XAttrCodec encoding = params.get(XAttrEncodingParam.NAME, XAttrEncodingParam.class); FSOperations.FSGetXAttrs command = new FSOperations.FSGetXAttrs(path, xattrNames, encoding); @SuppressWarnings("rawtypes") Map json = fsExecute(user, command); AUDIT_LOG.info("XAttrs for [{}]", path); response = Response.ok(json).type(MediaType.APPLICATION_JSON).build(); break; } case LISTXATTRS: { FSOperations.FSListXAttrs command = new FSOperations.FSListXAttrs(path); @SuppressWarnings("rawtypes") Map json = fsExecute(user, command); AUDIT_LOG.info("XAttr names for [{}]", path); response = Response.ok(json).type(MediaType.APPLICATION_JSON).build(); break; } case LISTSTATUS_BATCH: { String startAfter = params.get( HttpFSParametersProvider.StartAfterParam.NAME, HttpFSParametersProvider.StartAfterParam.class); byte[] token = HttpFSUtils.EMPTY_BYTES; if (startAfter != null) { token = startAfter.getBytes(StandardCharsets.UTF_8); } FSOperations.FSListStatusBatch command = new FSOperations .FSListStatusBatch(path, token); @SuppressWarnings("rawtypes") Map json = fsExecute(user, command); AUDIT_LOG.info("[{}] token [{}]", path, token); response = Response.ok(json).type(MediaType.APPLICATION_JSON).build(); break; } case GETTRASHROOT: { FSOperations.FSTrashRoot command = new FSOperations.FSTrashRoot(path); JSONObject json = fsExecute(user, command); AUDIT_LOG.info("[{}]", path); response = Response.ok(json).type(MediaType.APPLICATION_JSON).build(); break; } case GETALLSTORAGEPOLICY: { FSOperations.FSGetAllStoragePolicies command = new FSOperations.FSGetAllStoragePolicies(); JSONObject json = fsExecute(user, command); AUDIT_LOG.info("[{}]", path); response = Response.ok(json).type(MediaType.APPLICATION_JSON).build(); break; } case GETSTORAGEPOLICY: { FSOperations.FSGetStoragePolicy command = new FSOperations.FSGetStoragePolicy(path); JSONObject json = fsExecute(user, command); AUDIT_LOG.info("[{}]", path); response = Response.ok(json).type(MediaType.APPLICATION_JSON).build(); break; } case GETSNAPSHOTDIFF: { String oldSnapshotName = params.get(OldSnapshotNameParam.NAME, OldSnapshotNameParam.class); String snapshotName = params.get(SnapshotNameParam.NAME, SnapshotNameParam.class); FSOperations.FSGetSnapshotDiff command = new FSOperations.FSGetSnapshotDiff(path, oldSnapshotName, snapshotName); String js = fsExecute(user, command); AUDIT_LOG.info("[{}]", path); response = Response.ok(js).type(MediaType.APPLICATION_JSON).build(); break; } case GETSNAPSHOTDIFFLISTING: { String oldSnapshotName = params.get(OldSnapshotNameParam.NAME, OldSnapshotNameParam.class); String snapshotName = params.get(SnapshotNameParam.NAME, SnapshotNameParam.class); String snapshotDiffStartPath = params .get(HttpFSParametersProvider.SnapshotDiffStartPathParam.NAME, HttpFSParametersProvider.SnapshotDiffStartPathParam.class); Integer snapshotDiffIndex = params.get(HttpFSParametersProvider.SnapshotDiffIndexParam.NAME, HttpFSParametersProvider.SnapshotDiffIndexParam.class); FSOperations.FSGetSnapshotDiffListing command = new FSOperations.FSGetSnapshotDiffListing(path, oldSnapshotName, snapshotName, snapshotDiffStartPath, snapshotDiffIndex); String js = fsExecute(user, command); AUDIT_LOG.info("[{}]", path); response = Response.ok(js).type(MediaType.APPLICATION_JSON).build(); break; } case GETSNAPSHOTTABLEDIRECTORYLIST: { FSOperations.FSGetSnapshottableDirListing command = new FSOperations.FSGetSnapshottableDirListing(); String js = fsExecute(user, command); AUDIT_LOG.info("[{}]", "/"); response = Response.ok(js).type(MediaType.APPLICATION_JSON).build(); break; } case GETSNAPSHOTLIST: { FSOperations.FSGetSnapshotListing command = new FSOperations.FSGetSnapshotListing(path); String js = fsExecute(user, command); AUDIT_LOG.info("[{}]", "/"); response = Response.ok(js).type(MediaType.APPLICATION_JSON).build(); break; } case GETSERVERDEFAULTS: { FSOperations.FSGetServerDefaults command = new FSOperations.FSGetServerDefaults(); String js = fsExecute(user, command); AUDIT_LOG.info("[{}]", "/"); response = Response.ok(js).type(MediaType.APPLICATION_JSON).build(); break; } case CHECKACCESS: { String mode = params.get(FsActionParam.NAME, FsActionParam.class); FsActionParam fsparam = new FsActionParam(mode); FSOperations.FSAccess command = new FSOperations.FSAccess(path, FsAction.getFsAction(fsparam.value())); fsExecute(user, command); AUDIT_LOG.info("[{}]", "/"); response = Response.ok().build(); break; } case GETECPOLICY: { FSOperations.FSGetErasureCodingPolicy command = new FSOperations.FSGetErasureCodingPolicy(path); String js = fsExecute(user, command); AUDIT_LOG.info("[{}]", path); response = Response.ok(js).type(MediaType.APPLICATION_JSON).build(); break; } case GETECPOLICIES: { FSOperations.FSGetErasureCodingPolicies command = new FSOperations.FSGetErasureCodingPolicies(); String js = fsExecute(user, command); AUDIT_LOG.info("[{}]", path); response = Response.ok(js).type(MediaType.APPLICATION_JSON).build(); break; } case GETECCODECS: { FSOperations.FSGetErasureCodingCodecs command = new FSOperations.FSGetErasureCodingCodecs(); Map json = fsExecute(user, command); AUDIT_LOG.info("[{}]", path); response = Response.ok(json).type(MediaType.APPLICATION_JSON).build(); break; } case GET_BLOCK_LOCATIONS: { long offset = 0; long len = Long.MAX_VALUE; Long offsetParam = params.get(OffsetParam.NAME, OffsetParam.class); Long lenParam = params.get(LenParam.NAME, LenParam.class); AUDIT_LOG.info("[{}] offset [{}] len [{}]", path, offsetParam, lenParam); if (offsetParam != null && offsetParam > 0) { offset = offsetParam; } if (lenParam != null && lenParam > 0) { len = lenParam; } FSOperations.FSFileBlockLocationsLegacy command = new FSOperations.FSFileBlockLocationsLegacy(path, offset, len); @SuppressWarnings("rawtypes") Map locations = fsExecute(user, command); final String json = JsonUtil.toJsonString("LocatedBlocks", locations); response = Response.ok(json).type(MediaType.APPLICATION_JSON).build(); break; } case GETFILELINKSTATUS: { FSOperations.FSFileLinkStatus command = new FSOperations.FSFileLinkStatus(path); @SuppressWarnings("rawtypes") Map js = fsExecute(user, command); AUDIT_LOG.info("[{}]", path); response = Response.ok(js).type(MediaType.APPLICATION_JSON).build(); break; } case GETSTATUS: { FSOperations.FSStatus command = new FSOperations.FSStatus(path); @SuppressWarnings("rawtypes") Map js = fsExecute(user, command); response = Response.ok(js).type(MediaType.APPLICATION_JSON).build(); break; } case GETTRASHROOTS: { Boolean allUsers = params.get(AllUsersParam.NAME, AllUsersParam.class); FSOperations.FSGetTrashRoots command = new FSOperations.FSGetTrashRoots(allUsers); Map json = fsExecute(user, command); AUDIT_LOG.info("allUsers [{}]", allUsers); response = Response.ok(json).type(MediaType.APPLICATION_JSON).build(); break; } default: { throw new IOException( MessageFormat.format("Invalid HTTP GET operation [{0}]", op.value())); } } return response; }
@Test @TestDir @TestJetty @TestHdfs public void testECPolicy() throws Exception { createHttpFSServer(false, false); final ErasureCodingPolicy ecPolicy = SystemErasureCodingPolicies .getByID(SystemErasureCodingPolicies.RS_3_2_POLICY_ID); final String ecPolicyName = ecPolicy.getName(); // Create an EC dir and write a test file in it final Path ecDir = new Path("/ec"); DistributedFileSystem dfs = (DistributedFileSystem) FileSystem .get(ecDir.toUri(), TestHdfsHelper.getHdfsConf()); Path ecFile = new Path(ecDir, "ec_file.txt"); dfs.mkdirs(ecDir); dfs.enableErasureCodingPolicy(ecPolicyName); dfs.setErasureCodingPolicy(ecDir, ecPolicyName); // Create a EC file DFSTestUtil.createFile(dfs, ecFile, 1024, (short) 1, 0); // Verify that ecPolicy is set in getFileStatus response for ecFile String getFileStatusResponse = getStatus(ecFile.toString(), "GETFILESTATUS"); JSONParser parser = new JSONParser(); JSONObject jsonObject = (JSONObject) parser.parse(getFileStatusResponse); JSONObject details = (JSONObject) jsonObject.get("FileStatus"); String ecpolicyForECfile = (String) details.get("ecPolicy"); assertEquals("EC policy for ecFile should match the set EC policy", ecpolicyForECfile, ecPolicyName); // Verify httpFs getFileStatus with WEBHDFS REST API WebHdfsFileSystem httpfsWebHdfs = (WebHdfsFileSystem) FileSystem.get( new URI("webhdfs://" + TestJettyHelper.getJettyURL().toURI().getAuthority()), TestHdfsHelper.getHdfsConf()); HdfsFileStatus httpfsFileStatus = (HdfsFileStatus) httpfsWebHdfs.getFileStatus(ecFile); assertNotNull(httpfsFileStatus.getErasureCodingPolicy()); }
public Plan validateReservationUpdateRequest( ReservationSystem reservationSystem, ReservationUpdateRequest request) throws YarnException { ReservationId reservationId = request.getReservationId(); Plan plan = validateReservation(reservationSystem, reservationId, AuditConstants.UPDATE_RESERVATION_REQUEST); validateReservationDefinition(reservationId, request.getReservationDefinition(), plan, AuditConstants.UPDATE_RESERVATION_REQUEST); return plan; }
@Test public void testUpdateReservationExceedsGangSize() { ReservationUpdateRequest request = createSimpleReservationUpdateRequest(1, 1, 1, 5, 4); Resource resource = Resource.newInstance(512, 1); when(plan.getTotalCapacity()).thenReturn(resource); Plan plan = null; try { plan = rrValidator.validateReservationUpdateRequest(rSystem, request); Assert.fail(); } catch (YarnException e) { Assert.assertNull(plan); String message = e.getMessage(); Assert.assertTrue(message.startsWith( "The size of the largest gang in the reservation definition")); Assert.assertTrue(message.contains( "exceed the capacity available ")); LOG.info(message); } }
@Override protected SchemaTransform from(Configuration configuration) { return new ExplodeTransform(configuration); }
@Test @Category(NeedsRunner.class) public void testZipProduct() { PCollection<Row> input = pipeline.apply(Create.of(INPUT_ROWS)).setRowSchema(INPUT_SCHEMA); PCollection<Row> exploded = PCollectionRowTuple.of(JavaExplodeTransformProvider.INPUT_ROWS_TAG, input) .apply( new JavaExplodeTransformProvider() .from( JavaExplodeTransformProvider.Configuration.builder() .setFields(ImmutableList.of("a", "c")) .setCrossProduct(false) .build())) .get(JavaExplodeTransformProvider.OUTPUT_ROWS_TAG); PAssert.that(exploded) .containsInAnyOrder( Row.withSchema(OUTPUT_SCHEMA).addValues(1, 1.5, "x").build(), Row.withSchema(OUTPUT_SCHEMA).addValues(2, 1.5, "y").build()); pipeline.run(); }
public ReadyCheckingSideInputReader createReaderForViews( Collection<PCollectionView<?>> newContainedViews) { if (!containedViews.containsAll(newContainedViews)) { Set<PCollectionView<?>> currentlyContained = ImmutableSet.copyOf(containedViews); Set<PCollectionView<?>> newRequested = ImmutableSet.copyOf(newContainedViews); throw new IllegalArgumentException( "Can't create a SideInputReader with unknown views " + Sets.difference(newRequested, currentlyContained)); } return new SideInputContainerSideInputReader(newContainedViews); }
@Test public void withViewsForViewNotInContainerFails() { PCollection<KV<String, String>> input = pipeline.apply(Create.empty(new TypeDescriptor<KV<String, String>>() {})); PCollectionView<Map<String, Iterable<String>>> newView = input.apply(View.asMultimap()); thrown.expect(IllegalArgumentException.class); thrown.expectMessage("unknown views"); thrown.expectMessage(newView.toString()); container.createReaderForViews(ImmutableList.of(newView)); }
@PostConstruct @SuppressWarnings("PMD.ThreadPoolCreationRule") public void init() { // All servers have jobs that modify usage, idempotent. ConfigExecutor.scheduleCorrectUsageTask(() -> { LOGGER.info("[capacityManagement] start correct usage"); StopWatch watch = new StopWatch(); watch.start(); correctUsage(); watch.stop(); LOGGER.info("[capacityManagement] end correct usage, cost: {}s", watch.getTotalTimeSeconds()); }, PropertyUtil.getCorrectUsageDelay(), PropertyUtil.getCorrectUsageDelay(), TimeUnit.SECONDS); }
@Test void testInit() { service.init(); }
@Override public ParameterType<?> parameterType() { return parameterType; }
@Test void can_define_parameter_type_converters_with_var_args() throws NoSuchMethodException { Method method = JavaParameterTypeDefinitionTest.class.getMethod("convert_varargs_capture_group_to_string", String[].class); JavaParameterTypeDefinition definition = new JavaParameterTypeDefinition("", "([^ ]*) ([^ ]*)", method, false, false, false, lookup); registry.defineParameterType(definition.parameterType()); Expression cucumberExpression = new ExpressionFactory(registry) .createExpression("{convert_varargs_capture_group_to_string}"); List<Argument<?>> test = cucumberExpression.match("test test"); assertThat(test.get(0).getValue(), equalTo("convert_varargs_capture_group_to_string")); }
@Override public boolean equals(Object obj) { if (obj == null) return false; @SuppressWarnings("unchecked") final GHPoint3D other = (GHPoint3D) obj; if (Double.isNaN(ele)) // very special case necessary in QueryGraph, asserted via test return NumHelper.equalsEps(lat, other.lat) && NumHelper.equalsEps(lon, other.lon); else return NumHelper.equalsEps(lat, other.lat) && NumHelper.equalsEps(lon, other.lon) && NumHelper.equalsEps(ele, other.ele, 0.01); }
@Test public void testEquals() { GHPoint3D point1 = new GHPoint3D(1, 2, Double.NaN); GHPoint3D point2 = new GHPoint3D(1, 2, Double.NaN); assertEquals(point1, point2); point1 = new GHPoint3D(1, 2, 0); point2 = new GHPoint3D(1, 2, 1); assertNotEquals(point1, point2); point1 = new GHPoint3D(1, 2, 0); point2 = new GHPoint3D(1, 2.1, 0); assertNotEquals(point1, point2); point1 = new GHPoint3D(1, 2.1, 0); point2 = new GHPoint3D(1, 2.1, 0); assertEquals(point1, point2); }
public static <T> Supplier<T> memoizeConcurrent(Supplier<T> onceSupplier) { return new ConcurrentMemoizingSupplier<>(onceSupplier); }
@Test(expected = NullPointerException.class) public void when_memoizeConcurrentWithNullSupplier_then_exception() { Supplier<Object> supplier = () -> null; memoizeConcurrent(supplier).get(); }
public static Object get(Object object, int index) { if (index < 0) { throw new IndexOutOfBoundsException("Index cannot be negative: " + index); } if (object instanceof Map) { Map map = (Map) object; Iterator iterator = map.entrySet().iterator(); return get(iterator, index); } else if (object instanceof List) { return ((List) object).get(index); } else if (object instanceof Object[]) { return ((Object[]) object)[index]; } else if (object instanceof Iterator) { Iterator it = (Iterator) object; while (it.hasNext()) { index--; if (index == -1) { return it.next(); } else { it.next(); } } throw new IndexOutOfBoundsException("Entry does not exist: " + index); } else if (object instanceof Collection) { Iterator iterator = ((Collection) object).iterator(); return get(iterator, index); } else if (object instanceof Enumeration) { Enumeration it = (Enumeration) object; while (it.hasMoreElements()) { index--; if (index == -1) { return it.nextElement(); } else { it.nextElement(); } } throw new IndexOutOfBoundsException("Entry does not exist: " + index); } else if (object == null) { throw new IllegalArgumentException("Unsupported object type: null"); } else { try { return Array.get(object, index); } catch (IllegalArgumentException ex) { throw new IllegalArgumentException("Unsupported object type: " + object.getClass().getName()); } } }
@Test void testGetMap3() { Map<String, String> map1 = new LinkedHashMap(1); Map<String, String> map2 = new LinkedHashMap(2); map1.put("key", "value"); map2.put("key1", "value1"); map2.put("key2", "value2"); Iterator<Map.Entry<String, String>> iter = map1.entrySet().iterator(); assertEquals(iter.next(), CollectionUtils.get(map1, 0)); Iterator<Map.Entry<String, String>> iter2 = map2.entrySet().iterator(); iter2.next(); Map.Entry<String, String> second = iter2.next(); assertEquals(second, CollectionUtils.get(map2, 1)); }
public static String calculateTypeName(CompilationUnit compilationUnit, FullyQualifiedJavaType fqjt) { if (fqjt.isArray()) { // if array, then calculate the name of the base (non-array) type // then add the array indicators back in String fqn = fqjt.getFullyQualifiedName(); String typeName = calculateTypeName(compilationUnit, new FullyQualifiedJavaType(fqn.substring(0, fqn.indexOf('[')))); return typeName + fqn.substring(fqn.indexOf('[')); } if (!fqjt.getTypeArguments().isEmpty()) { return calculateParameterizedTypeName(compilationUnit, fqjt); } if (compilationUnit == null || typeDoesNotRequireImport(fqjt) || typeIsInSamePackage(compilationUnit, fqjt) || typeIsAlreadyImported(compilationUnit, fqjt)) { return fqjt.getShortName(); } else { return fqjt.getFullyQualifiedName(); } }
@Test void testArray() { Interface interfaze = new Interface(new FullyQualifiedJavaType("com.foo.UserMapper")); interfaze.addImportedType(new FullyQualifiedJavaType("java.math.BigDecimal[]")); FullyQualifiedJavaType fqjt = new FullyQualifiedJavaType("java.math.BigDecimal[]"); assertEquals("BigDecimal[]", JavaDomUtils.calculateTypeName(interfaze, fqjt)); }
public static int getTag(byte[] raw) { try (final Asn1InputStream is = new Asn1InputStream(raw)) { return is.readTag(); } }
@Test public void getTagTripleByte() { assertEquals(0x7f8102, Asn1Utils.getTag(new byte[] { 0x7f, (byte) 0x81, 02, 0})); }
public void execute() { new PathAwareCrawler<>( FormulaExecutorComponentVisitor.newBuilder(metricRepository, measureRepository).buildFor(formulas)) .visit(treeRootHolder.getReportTreeRoot()); }
@Test public void compute_and_aggregate_duplicated_lines_density_using_lines() { addDuplicatedBlock(FILE_1_REF, 2); addDuplicatedBlock(FILE_2_REF, 3); when(FILE_1_ATTRS.getLines()).thenReturn(10); when(FILE_2_ATTRS.getLines()).thenReturn(40); // this should have no effect as it's a test file when(FILE_5_ATTRS.getLines()).thenReturn(1_000_000); underTest.execute(); assertRawMeasureValue(FILE_1_REF, DUPLICATED_LINES_DENSITY_KEY, 20d); assertRawMeasureValue(FILE_2_REF, DUPLICATED_LINES_DENSITY_KEY, 7.5d); assertNoRawMeasure(FILE_3_REF, DUPLICATED_LINES_DENSITY_KEY); assertNoRawMeasure(FILE_4_REF, DUPLICATED_LINES_DENSITY_KEY); assertRawMeasureValue(DIRECTORY_REF, DUPLICATED_LINES_DENSITY_KEY, 10d); assertRawMeasureValue(ROOT_REF, DUPLICATED_LINES_DENSITY_KEY, 10d); }
public static <T> MutationDetector forValueWithCoder(T value, Coder<T> coder) throws CoderException { if (value == null) { return noopMutationDetector(); } else { return new CodedValueMutationDetector<>(value, coder); } }
@Test public void testImmutableSet() throws Exception { Set<Integer> value = Sets.newHashSet(Arrays.asList(1, 2, 3, 4)); MutationDetector detector = MutationDetectors.forValueWithCoder(value, IterableCoder.of(VarIntCoder.of())); detector.verifyUnmodified(); }
@Override public DnsServerAddressStream nameServerAddressStream(String hostname) { for (;;) { int i = hostname.indexOf('.', 1); if (i < 0 || i == hostname.length() - 1) { return defaultNameServerAddresses.stream(); } DnsServerAddresses addresses = domainToNameServerStreamMap.get(hostname); if (addresses != null) { return addresses.stream(); } hostname = hostname.substring(i + 1); } }
@Test public void defaultLookupShouldReturnResultsIfOnlySingleFileSpecified(@TempDir Path tempDir) throws Exception { File f = buildFile(tempDir, "domain linecorp.local\n" + "nameserver 127.0.0.2\n" + "nameserver 127.0.0.3\n"); UnixResolverDnsServerAddressStreamProvider p = new UnixResolverDnsServerAddressStreamProvider(f, null); DnsServerAddressStream stream = p.nameServerAddressStream("somehost"); assertHostNameEquals("127.0.0.2", stream.next()); assertHostNameEquals("127.0.0.3", stream.next()); }
public static Slice truncateToLength(Slice slice, Type type) { requireNonNull(type, "type is null"); if (!isVarcharType(type)) { throw new IllegalArgumentException("type must be the instance of VarcharType"); } return truncateToLength(slice, VarcharType.class.cast(type)); }
@Test public void testTruncateToLength() { // Single byte code points assertEquals(truncateToLength(Slices.utf8Slice("abc"), 0), Slices.utf8Slice("")); assertEquals(truncateToLength(Slices.utf8Slice("abc"), 1), Slices.utf8Slice("a")); assertEquals(truncateToLength(Slices.utf8Slice("abc"), 4), Slices.utf8Slice("abc")); assertEquals(truncateToLength(Slices.utf8Slice("abcde"), 5), Slices.utf8Slice("abcde")); // 2 bytes code points assertEquals(truncateToLength(Slices.utf8Slice("абв"), 0), Slices.utf8Slice("")); assertEquals(truncateToLength(Slices.utf8Slice("абв"), 1), Slices.utf8Slice("а")); assertEquals(truncateToLength(Slices.utf8Slice("абв"), 4), Slices.utf8Slice("абв")); assertEquals(truncateToLength(Slices.utf8Slice("абвгд"), 5), Slices.utf8Slice("абвгд")); // 4 bytes code points assertEquals(truncateToLength(Slices.utf8Slice("\uD841\uDF0E\uD841\uDF31\uD841\uDF79\uD843\uDC53\uD843\uDC78"), 0), Slices.utf8Slice("")); assertEquals(truncateToLength(Slices.utf8Slice("\uD841\uDF0E\uD841\uDF31\uD841\uDF79\uD843\uDC53\uD843\uDC78"), 1), Slices.utf8Slice("\uD841\uDF0E")); assertEquals(truncateToLength(Slices.utf8Slice("\uD841\uDF0E\uD841\uDF31\uD841\uDF79"), 4), Slices.utf8Slice("\uD841\uDF0E\uD841\uDF31\uD841\uDF79")); assertEquals(truncateToLength(Slices.utf8Slice("\uD841\uDF0E\uD841\uDF31\uD841\uDF79\uD843\uDC53\uD843\uDC78"), 5), Slices.utf8Slice("\uD841\uDF0E\uD841\uDF31\uD841\uDF79\uD843\uDC53\uD843\uDC78")); assertEquals(truncateToLength(Slices.utf8Slice("abc"), createVarcharType(1)), Slices.utf8Slice("a")); assertEquals(truncateToLength(Slices.utf8Slice("abc"), (Type) createVarcharType(1)), Slices.utf8Slice("a")); }
public Optional<Measure> toMeasure(@Nullable LiveMeasureDto measureDto, Metric metric) { requireNonNull(metric); if (measureDto == null) { return Optional.empty(); } Double value = measureDto.getValue(); String data = measureDto.getDataAsString(); switch (metric.getType().getValueType()) { case INT: return toIntegerMeasure(value, data); case LONG: return toLongMeasure(value, data); case DOUBLE: return toDoubleMeasure(value, data); case BOOLEAN: return toBooleanMeasure(value, data); case STRING: return toStringMeasure(data); case LEVEL: return toLevelMeasure(data); case NO_VALUE: return toNoValueMeasure(); default: throw new IllegalArgumentException("Unsupported Measure.ValueType " + metric.getType().getValueType()); } }
@Test public void toMeasure_returns_no_QualityGateStatus_if_alertStatus_has_data_in_wrong_case_for_Level_Metric() { Optional<Measure> measure = underTest.toMeasure(new LiveMeasureDto().setData("waRn"), SOME_STRING_METRIC); assertThat(measure).isPresent(); assertThat(measure.get().hasQualityGateStatus()).isFalse(); }
@Override public List<String> assignSegment(String segmentName, Map<String, Map<String, String>> currentAssignment, Map<InstancePartitionsType, InstancePartitions> instancePartitionsMap) { Preconditions.checkState(instancePartitionsMap.size() == 1, "One instance partition type should be provided"); InstancePartitions instancePartitions = instancePartitionsMap.get(InstancePartitionsType.CONSUMING); Preconditions.checkState(instancePartitions != null, "Failed to find CONSUMING instance partitions for table: %s", _tableNameWithType); _logger.info("Assigning segment: {} with instance partitions: {} for table: {}", segmentName, instancePartitions, _tableNameWithType); int partitionId = getPartitionId(segmentName); List<String> instancesAssigned = assignConsumingSegment(partitionId, instancePartitions); Set<String> existingAssignment = getExistingAssignment(partitionId, currentAssignment); // Check if the candidate assignment is consistent with existing assignment. Use existing assignment if not. if (existingAssignment == null) { _logger.info("No existing assignment from idealState, using the one decided by instancePartitions"); } else if (!isSameAssignment(existingAssignment, instancesAssigned)) { _logger.warn("Assignment: {} is inconsistent with idealState: {}, using the one from idealState", instancesAssigned, existingAssignment); instancesAssigned = new ArrayList<>(existingAssignment); if (_controllerMetrics != null) { _controllerMetrics.addMeteredTableValue(_tableNameWithType, ControllerMeter.CONTROLLER_REALTIME_TABLE_SEGMENT_ASSIGNMENT_MISMATCH, 1L); } } _logger.info("Assigned segment: {} to instances: {} for table: {}", segmentName, instancesAssigned, _tableNameWithType); return instancesAssigned; }
@Test public void testAssignSegment() { assertTrue(_segmentAssignment instanceof StrictRealtimeSegmentAssignment); Map<InstancePartitionsType, InstancePartitions> onlyConsumingInstancePartitionMap = ImmutableMap.of(InstancePartitionsType.CONSUMING, _instancePartitionsMap.get(InstancePartitionsType.CONSUMING)); int numInstancesPerReplicaGroup = NUM_CONSUMING_INSTANCES / NUM_REPLICAS; Map<String, Map<String, String>> currentAssignment = new TreeMap<>(); // Add segments for partition 0/1/2, but add no segment for partition 3. List<String> instancesAssigned; boolean consistent; for (int segmentId = 0; segmentId < 3; segmentId++) { String segmentName = _segments.get(segmentId); instancesAssigned = _segmentAssignment.assignSegment(segmentName, currentAssignment, onlyConsumingInstancePartitionMap); assertEquals(instancesAssigned.size(), NUM_REPLICAS); // Segment 0 (partition 0) should be assigned to instance 0, 3, 6 // Segment 1 (partition 1) should be assigned to instance 1, 4, 7 // Segment 2 (partition 2) should be assigned to instance 2, 5, 8 // Following segments are assigned to those instances if continue to use the same instancePartition // Segment 3 (partition 3) should be assigned to instance 0, 3, 6 // Segment 4 (partition 0) should be assigned to instance 0, 3, 6 // Segment 5 (partition 1) should be assigned to instance 1, 4, 7 // ... for (int replicaGroupId = 0; replicaGroupId < NUM_REPLICAS; replicaGroupId++) { int partitionId = segmentId % NUM_PARTITIONS; int expectedAssignedInstanceId = partitionId % numInstancesPerReplicaGroup + replicaGroupId * numInstancesPerReplicaGroup; assertEquals(instancesAssigned.get(replicaGroupId), CONSUMING_INSTANCES.get(expectedAssignedInstanceId)); } addToAssignment(currentAssignment, segmentId, instancesAssigned); } // Use new instancePartition to assign the new segments below. ImmutableMap<InstancePartitionsType, InstancePartitions> newConsumingInstancePartitionMap = ImmutableMap.of(InstancePartitionsType.CONSUMING, _newConsumingInstancePartitions); // No existing segments for partition 3, so use the assignment decided by new instancePartition. // So segment 3 (partition 3) should be assigned to instance new_0, new_3, new_6 int segmentId = 3; String segmentName = _segments.get(segmentId); instancesAssigned = _segmentAssignment.assignSegment(segmentName, currentAssignment, newConsumingInstancePartitionMap); assertEquals(instancesAssigned, Arrays.asList("new_consumingInstance_0", "new_consumingInstance_3", "new_consumingInstance_6")); addToAssignment(currentAssignment, segmentId, instancesAssigned); // Use existing assignment for partition 0/1/2, instead of the one decided by new instancePartition. for (segmentId = 4; segmentId < 7; segmentId++) { segmentName = _segments.get(segmentId); instancesAssigned = _segmentAssignment.assignSegment(segmentName, currentAssignment, newConsumingInstancePartitionMap); assertEquals(instancesAssigned.size(), NUM_REPLICAS); // Those segments are assigned according to the assignment from idealState, instead of using new_xxx instances // Segment 4 (partition 0) should be assigned to instance 0, 3, 6 // Segment 5 (partition 1) should be assigned to instance 1, 4, 7 // Segment 6 (partition 2) should be assigned to instance 2, 5, 8 for (int replicaGroupId = 0; replicaGroupId < NUM_REPLICAS; replicaGroupId++) { int partitionId = segmentId % NUM_PARTITIONS; int expectedAssignedInstanceId = partitionId % numInstancesPerReplicaGroup + replicaGroupId * numInstancesPerReplicaGroup; assertEquals(instancesAssigned.get(replicaGroupId), CONSUMING_INSTANCES.get(expectedAssignedInstanceId)); } addToAssignment(currentAssignment, segmentId, instancesAssigned); } }
@Override public HttpResponse send(HttpRequest httpRequest) throws IOException { return send(httpRequest, null); }
@Test public void send_whenGetRequest_returnsExpectedHttpResponse() throws IOException { String responseBody = "test response"; mockWebServer.enqueue( new MockResponse() .setResponseCode(HttpStatus.OK.code()) .setHeader(CONTENT_TYPE, MediaType.PLAIN_TEXT_UTF_8.toString()) .setBody(responseBody)); mockWebServer.start(); String requestUrl = mockWebServer.url("/test/get").toString(); HttpResponse response = httpClient.send(get(requestUrl).withEmptyHeaders().build()); assertThat(response) .isEqualTo( HttpResponse.builder() .setStatus(HttpStatus.OK) .setHeaders( HttpHeaders.builder() .addHeader(CONTENT_TYPE, MediaType.PLAIN_TEXT_UTF_8.toString()) // MockWebServer always adds this response header. .addHeader(CONTENT_LENGTH, String.valueOf(responseBody.length())) .build()) .setBodyBytes(ByteString.copyFrom(responseBody, UTF_8)) .setResponseUrl(HttpUrl.parse(requestUrl)) .build()); }
@Override String getInterfaceName(Invoker invoker, String prefix) { return DubboUtils.getInterfaceName(invoker, prefix); }
@Test public void testDegradeSync() { try (MockedStatic<TimeUtil> mocked = super.mockTimeUtil()) { setCurrentMillis(mocked, 1740000000000L); Invocation invocation = DubboTestUtil.getDefaultMockInvocationOne(); Invoker invoker = DubboTestUtil.getDefaultMockInvoker(); initDegradeRule(DubboUtils.getInterfaceName(invoker)); Result result = invokeDubboRpc(false, invoker, invocation); verifyInvocationStructureForCallFinish(invoker, invocation); assertEquals("normal", result.getValue()); // inc the clusterNode's exception to trigger the fallback for (int i = 0; i < 5; i++) { invokeDubboRpc(true, invoker, invocation); verifyInvocationStructureForCallFinish(invoker, invocation); } Result result2 = invokeDubboRpc(false, invoker, invocation); assertEquals("fallback", result2.getValue()); // sleeping 1000 ms to reset exception sleep(mocked, 1000); Result result3 = invokeDubboRpc(false, invoker, invocation); assertEquals("normal", result3.getValue()); Context context = ContextUtil.getContext(); assertNull(context); } }
@Override public synchronized void putConnectorConfig(String connName, final Map<String, String> config, boolean allowReplace, final Callback<Created<ConnectorInfo>> callback) { putConnectorConfig(connName, config, null, allowReplace, callback); }
@Test public void testPutConnectorConfig() throws Exception { initialize(true); Map<String, String> connConfig = connectorConfig(SourceSink.SOURCE); Map<String, String> newConnConfig = new HashMap<>(connConfig); newConnConfig.put("foo", "bar"); Callback<Map<String, String>> connectorConfigCb = mock(Callback.class); expectAdd(SourceSink.SOURCE, false); expectConfigValidation(SourceSink.SOURCE, connConfig, newConnConfig); // Should get first config doNothing().when(connectorConfigCb).onCompletion(null, connConfig); // Update config, which requires stopping and restarting doNothing().when(worker).stopAndAwaitConnector(CONNECTOR_NAME); final ArgumentCaptor<Map<String, String>> capturedConfig = ArgumentCaptor.forClass(Map.class); final ArgumentCaptor<Callback<TargetState>> onStart = ArgumentCaptor.forClass(Callback.class); doAnswer(invocation -> { onStart.getValue().onCompletion(null, TargetState.STARTED); return true; }).when(worker).startConnector(eq(CONNECTOR_NAME), capturedConfig.capture(), any(), eq(herder), eq(TargetState.STARTED), onStart.capture()); ConnectorTaskId taskId = new ConnectorTaskId(CONNECTOR_NAME, 0); // Generate same task config, but from different connector config, resulting // in task restarts when(worker.connectorTaskConfigs(CONNECTOR_NAME, new SourceConnectorConfig(plugins, newConnConfig, true))) .thenReturn(singletonList(taskConfig(SourceSink.SOURCE))); doNothing().when(worker).stopAndAwaitTasks(Collections.singletonList(taskId)); doNothing().when(statusBackingStore).put(new TaskStatus(taskId, TaskStatus.State.DESTROYED, WORKER_ID, 0)); when(worker.startSourceTask(eq(taskId), any(), eq(newConnConfig), eq(taskConfig(SourceSink.SOURCE)), eq(herder), eq(TargetState.STARTED))).thenReturn(true); herder.putConnectorConfig(CONNECTOR_NAME, connConfig, false, createCallback); Herder.Created<ConnectorInfo> connectorInfo = createCallback.get(WAIT_TIME_MS, TimeUnit.MILLISECONDS); assertEquals(createdInfo(SourceSink.SOURCE), connectorInfo.result()); herder.connectorConfig(CONNECTOR_NAME, connectorConfigCb); FutureCallback<Herder.Created<ConnectorInfo>> reconfigureCallback = new FutureCallback<>(); doNothing().when(connectorConfigCb).onCompletion(null, newConnConfig); herder.putConnectorConfig(CONNECTOR_NAME, newConnConfig, true, reconfigureCallback); Herder.Created<ConnectorInfo> newConnectorInfo = reconfigureCallback.get(1000L, TimeUnit.SECONDS); ConnectorInfo newConnInfo = new ConnectorInfo(CONNECTOR_NAME, newConnConfig, singletonList(new ConnectorTaskId(CONNECTOR_NAME, 0)), ConnectorType.SOURCE); assertEquals(newConnInfo, newConnectorInfo.result()); assertEquals("bar", capturedConfig.getValue().get("foo")); herder.connectorConfig(CONNECTOR_NAME, connectorConfigCb); verifyNoMoreInteractions(connectorConfigCb); }
public static KeyValueBytesStoreSupplier inMemoryKeyValueStore(final String name) { Objects.requireNonNull(name, "name cannot be null"); return new InMemoryKeyValueBytesStoreSupplier(name); }
@Test public void shouldThrowIfIMemoryKeyValueStoreStoreNameIsNull() { final Exception e = assertThrows(NullPointerException.class, () -> Stores.inMemoryKeyValueStore(null)); assertEquals("name cannot be null", e.getMessage()); }
public void write(ImageWriter writer, ImageWriterOptions options) { if (options.metadataVersion().isScramSupported()) { for (Entry<ScramMechanism, Map<String, ScramCredentialData>> mechanismEntry : mechanisms.entrySet()) { for (Entry<String, ScramCredentialData> userEntry : mechanismEntry.getValue().entrySet()) { writer.write(0, userEntry.getValue().toRecord(userEntry.getKey(), mechanismEntry.getKey())); } } } else { boolean isEmpty = true; StringBuilder scramImageString = new StringBuilder("ScramImage({"); for (Entry<ScramMechanism, Map<String, ScramCredentialData>> mechanismEntry : mechanisms.entrySet()) { if (!mechanismEntry.getValue().isEmpty()) { scramImageString.append(mechanismEntry.getKey()).append(":"); List<String> users = new ArrayList<>(mechanismEntry.getValue().keySet()); scramImageString.append(String.join(", ", users)); scramImageString.append("},{"); isEmpty = false; } } if (!isEmpty) { scramImageString.append("})"); options.handleLoss(scramImageString.toString()); } } }
@Test public void testImage1withInvalidIBP() { ImageWriterOptions imageWriterOptions = new ImageWriterOptions.Builder(). setMetadataVersion(MetadataVersion.IBP_3_4_IV0).build(); RecordListWriter writer = new RecordListWriter(); try { IMAGE1.write(writer, imageWriterOptions); fail("expected exception writing IMAGE with SCRAM records for MetadataVersion.IBP_3_4_IV0"); } catch (Exception expected) { // ignore, expected } }
List<GSBlobIdentifier> getComponentBlobIds(GSFileSystemOptions options) { String temporaryBucketName = BlobUtils.getTemporaryBucketName(finalBlobIdentifier, options); List<GSBlobIdentifier> componentBlobIdentifiers = componentObjectIds.stream() .map( temporaryObjectId -> options.isFileSinkEntropyEnabled() ? BlobUtils.getTemporaryObjectNameWithEntropy( finalBlobIdentifier, temporaryObjectId) : BlobUtils.getTemporaryObjectName( finalBlobIdentifier, temporaryObjectId)) .map( temporaryObjectName -> new GSBlobIdentifier( temporaryBucketName, temporaryObjectName)) .collect(Collectors.toList()); LOGGER.trace( "Resolved component blob identifiers for blob {}: {}", finalBlobIdentifier, componentBlobIdentifiers); return componentBlobIdentifiers; }
@Test public void shouldGetComponentBlobIdsWithEntropy() { // configure options, if this test configuration has a temporary bucket name, set it Configuration flinkConfig = new Configuration(); if (temporaryBucketName != null) { flinkConfig.set(GSFileSystemOptions.WRITER_TEMPORARY_BUCKET_NAME, temporaryBucketName); } // enable filesink entropy flinkConfig.set(GSFileSystemOptions.ENABLE_FILESINK_ENTROPY, Boolean.TRUE); GSFileSystemOptions options = new GSFileSystemOptions(flinkConfig); GSCommitRecoverable commitRecoverable = new GSCommitRecoverable(blobIdentifier, componentObjectIds); List<GSBlobIdentifier> componentBlobIdentifiers = commitRecoverable.getComponentBlobIds(options); for (int i = 0; i < componentObjectIds.size(); i++) { UUID componentObjectId = componentObjectIds.get(i); GSBlobIdentifier componentBlobIdentifier = componentBlobIdentifiers.get(i); // if a temporary bucket is specified in options, the component blob identifier // should be in this bucket; otherwise, it should be in the bucket with the final blob assertEquals( temporaryBucketName == null ? blobIdentifier.bucketName : temporaryBucketName, componentBlobIdentifier.bucketName); // make sure the name is what is expected String expectedObjectName = String.format( "%s.inprogress/%s/%s/%s", componentObjectId, blobIdentifier.bucketName, blobIdentifier.objectName, componentObjectId); assertEquals(expectedObjectName, componentBlobIdentifier.objectName); } }
public RetryableException(final String message, final Throwable cause, final Date retryAfter, final ShenyuRequest request) { super(message, cause); this.httpMethod = request.getHttpMethod(); this.retryAfter = Optional.ofNullable(retryAfter).map(Date::getTime).orElse(null); }
@Test public void retryableExceptionTest() { Assert.assertNotNull(retryableException); }
@Override public RouteContext route(final ShardingRule shardingRule) { RouteContext result = new RouteContext(); Collection<DataNode> dataNodes = getDataNodes(shardingRule, shardingRule.getShardingTable(logicTableName)); result.getOriginalDataNodes().addAll(originalDataNodes); for (DataNode each : dataNodes) { result.getRouteUnits().add( new RouteUnit(new RouteMapper(each.getDataSourceName(), each.getDataSourceName()), Collections.singleton(new RouteMapper(logicTableName, each.getTableName())))); } return result; }
@Test void assertRouteByMixedWithHintTableOnly() { SQLStatementContext sqlStatementContext = mock(SQLStatementContext.class, withSettings().extraInterfaces(TableAvailable.class).defaultAnswer(RETURNS_DEEP_STUBS)); when(((TableAvailable) sqlStatementContext).getTablesContext().getTableNames()).thenReturn(Collections.singleton("t_hint_table_test")); ShardingStandardRoutingEngine standardRoutingEngine = createShardingStandardRoutingEngine("t_hint_table_test", new ShardingConditions(Collections.emptyList(), sqlStatementContext, mock(ShardingRule.class)), sqlStatementContext, new HintValueContext()); HintManager hintManager = HintManager.getInstance(); hintManager.addTableShardingValue("t_hint_table_test", 1); RouteContext routeContext = standardRoutingEngine.route(ShardingRoutingEngineFixtureBuilder.createMixedShardingRule()); List<RouteUnit> routeUnits = new ArrayList<>(routeContext.getRouteUnits()); assertThat(routeContext.getRouteUnits().size(), is(2)); assertThat(routeUnits.get(0).getDataSourceMapper().getActualName(), is("ds_0")); assertThat(routeUnits.get(0).getTableMappers().size(), is(1)); assertThat(routeUnits.get(0).getTableMappers().iterator().next().getActualName(), is("t_hint_table_test_1")); assertThat(routeUnits.get(0).getTableMappers().iterator().next().getLogicName(), is("t_hint_table_test")); assertThat(routeUnits.get(1).getDataSourceMapper().getActualName(), is("ds_1")); assertThat(routeUnits.get(1).getTableMappers().size(), is(1)); assertThat(routeUnits.get(1).getTableMappers().iterator().next().getActualName(), is("t_hint_table_test_1")); assertThat(routeUnits.get(1).getTableMappers().iterator().next().getLogicName(), is("t_hint_table_test")); }
@Override public boolean isSatisfied(int index, TradingRecord tradingRecord) { int lastRuleWasSatisfiedAfterBars = 0; int startIndex = index; if (!initialRule.isSatisfied(index, tradingRecord)) { traceIsSatisfied(index, false); return false; } traceIsSatisfied(index, true); for (ChainLink link : rulesInChain) { boolean satisfiedWithinThreshold = false; startIndex = startIndex - lastRuleWasSatisfiedAfterBars; lastRuleWasSatisfiedAfterBars = 0; for (int i = 0; i <= link.getThreshold(); i++) { int resultingIndex = startIndex - i; if (resultingIndex < 0) { break; } satisfiedWithinThreshold = link.getRule().isSatisfied(resultingIndex, tradingRecord); if (satisfiedWithinThreshold) { break; } lastRuleWasSatisfiedAfterBars++; } if (!satisfiedWithinThreshold) { traceIsSatisfied(index, false); return false; } } traceIsSatisfied(index, true); return true; }
@Test public void isSatisfied() { assertFalse(chainRule.isSatisfied(0)); assertTrue(chainRule.isSatisfied(4)); assertTrue(chainRule.isSatisfied(6)); assertFalse(chainRule.isSatisfied(7)); }
@VisibleForTesting PlanNodeStatsEstimate addJoinComplementStats( PlanNodeStatsEstimate sourceStats, PlanNodeStatsEstimate innerJoinStats, PlanNodeStatsEstimate joinComplementStats) { double innerJoinRowCount = innerJoinStats.getOutputRowCount(); double joinComplementRowCount = joinComplementStats.getOutputRowCount(); if (joinComplementRowCount == 0 || joinComplementStats.equals(PlanNodeStatsEstimate.unknown())) { return innerJoinStats; } double outputRowCount = innerJoinRowCount + joinComplementRowCount; PlanNodeStatsEstimate.Builder outputStats = PlanNodeStatsEstimate.buildFrom(innerJoinStats); outputStats.setOutputRowCount(outputRowCount); for (VariableReferenceExpression variable : joinComplementStats.getVariablesWithKnownStatistics()) { VariableStatsEstimate leftSymbolStats = sourceStats.getVariableStatistics(variable); VariableStatsEstimate innerJoinSymbolStats = innerJoinStats.getVariableStatistics(variable); VariableStatsEstimate joinComplementSymbolStats = joinComplementStats.getVariableStatistics(variable); // weighted average double newNullsFraction = (innerJoinSymbolStats.getNullsFraction() * innerJoinRowCount + joinComplementSymbolStats.getNullsFraction() * joinComplementRowCount) / outputRowCount; outputStats.addVariableStatistics(variable, VariableStatsEstimate.buildFrom(innerJoinSymbolStats) // in outer join low value, high value and NDVs of outer side columns are preserved .setLowValue(leftSymbolStats.getLowValue()) .setHighValue(leftSymbolStats.getHighValue()) .setDistinctValuesCount(leftSymbolStats.getDistinctValuesCount()) .setNullsFraction(newNullsFraction) .build()); } // add nulls to columns that don't exist in right stats for (VariableReferenceExpression variable : difference(innerJoinStats.getVariablesWithKnownStatistics(), joinComplementStats.getVariablesWithKnownStatistics())) { VariableStatsEstimate innerJoinSymbolStats = innerJoinStats.getVariableStatistics(variable); double newNullsFraction = (innerJoinSymbolStats.getNullsFraction() * innerJoinRowCount + joinComplementRowCount) / outputRowCount; outputStats.addVariableStatistics(variable, innerJoinSymbolStats.mapNullsFraction(nullsFraction -> newNullsFraction)); } return outputStats.build(); }
@Test public void testAddJoinComplementStats() { double statsToAddNdv = 5; PlanNodeStatsEstimate statsToAdd = planNodeStats(RIGHT_ROWS_COUNT, variableStatistics(LEFT_JOIN_COLUMN, 0.0, 5.0, 0.2, statsToAddNdv)); PlanNodeStatsEstimate addedStats = planNodeStats(TOTAL_ROWS_COUNT, variableStatistics(LEFT_JOIN_COLUMN, 0.0, 20.0, (LEFT_ROWS_COUNT * LEFT_JOIN_COLUMN_NULLS + RIGHT_ROWS_COUNT * 0.2) / TOTAL_ROWS_COUNT, LEFT_JOIN_COLUMN_NDV), variableStatistics(LEFT_OTHER_COLUMN, 42, 42, (0.42 * LEFT_ROWS_COUNT + RIGHT_ROWS_COUNT) / TOTAL_ROWS_COUNT, 1)); assertThat(JOIN_STATS_RULE.addJoinComplementStats( LEFT_STATS, LEFT_STATS, statsToAdd)) .equalTo(addedStats); }