focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
public static <T> RetryOperator<T> of(Retry retry) { return new RetryOperator<>(retry); }
@Test public void retryOnResultFailAfterMaxAttemptsWithExceptionUsingMono() { RetryConfig config = RetryConfig.<String>custom() .retryOnResult("retry"::equals) .waitDuration(Duration.ofMillis(10)) .maxAttempts(3) .failAfterMaxAttempts(true) .build(); Retry retry = Retry.of("testName", config); given(helloWorldService.returnHelloWorld()) .willReturn("retry"); StepVerifier.create(Mono.fromCallable(helloWorldService::returnHelloWorld) .transformDeferred(RetryOperator.of(retry))) .expectSubscription() .expectError(MaxRetriesExceededException.class) .verify(Duration.ofSeconds(1)); then(helloWorldService).should(times(3)).returnHelloWorld(); }
@Override public CaseInsensitiveString getName() { if (((name == null) || isEmpty(name.toString())) && packageDefinition != null) { return new CaseInsensitiveString(getPackageDefinition().getRepository().getName() + "_" + packageDefinition.getName()); } else { return name; } }
@Test void shouldReturnNameAsNullIfPackageDefinitionIsNotSet() { assertThat(new PackageMaterial().getName()).isNull(); }
public static CustomWeighting.Parameters createWeightingParameters(CustomModel customModel, EncodedValueLookup lookup) { String key = customModel.toString(); Class<?> clazz = customModel.isInternal() ? INTERNAL_CACHE.get(key) : null; if (CACHE_SIZE > 0 && clazz == null) clazz = CACHE.get(key); if (clazz == null) { clazz = createClazz(customModel, lookup); if (customModel.isInternal()) { INTERNAL_CACHE.put(key, clazz); if (INTERNAL_CACHE.size() > 100) { CACHE.putAll(INTERNAL_CACHE); INTERNAL_CACHE.clear(); LoggerFactory.getLogger(CustomModelParser.class).warn("Internal cache must stay small but was " + INTERNAL_CACHE.size() + ". Cleared it. Misuse of CustomModel::internal?"); } } else if (CACHE_SIZE > 0) { CACHE.put(key, clazz); } } try { // The class does not need to be thread-safe as we create an instance per request CustomWeightingHelper prio = (CustomWeightingHelper) clazz.getDeclaredConstructor().newInstance(); prio.init(customModel, lookup, CustomModel.getAreasAsMap(customModel.getAreas())); return new CustomWeighting.Parameters( prio::getSpeed, prio::calcMaxSpeed, prio::getPriority, prio::calcMaxPriority, customModel.getDistanceInfluence() == null ? 0 : customModel.getDistanceInfluence(), customModel.getHeadingPenalty() == null ? Parameters.Routing.DEFAULT_HEADING_PENALTY : customModel.getHeadingPenalty()); } catch (ReflectiveOperationException ex) { throw new IllegalArgumentException("Cannot compile expression " + ex.getMessage(), ex); } }
@Test public void testBrackets() { EdgeIteratorState primary = graph.edge(0, 1).setDistance(10).set(accessEnc, true, true). set(roadClassEnc, PRIMARY).set(avgSpeedEnc, 80); EdgeIteratorState secondary = graph.edge(0, 1).setDistance(10).set(accessEnc, true, true). set(roadClassEnc, SECONDARY).set(avgSpeedEnc, 40); CustomModel customModel = new CustomModel(); customModel.addToPriority(If("(road_class == PRIMARY || car_access == true) && car_average_speed > 50", MULTIPLY, "0.9")); customModel.addToSpeed(If("true", LIMIT, "100")); CustomWeighting.Parameters parameters = CustomModelParser.createWeightingParameters(customModel, encodingManager); assertEquals(0.9, parameters.getEdgeToPriorityMapping().get(primary, false), 0.01); assertEquals(1, parameters.getEdgeToPriorityMapping().get(secondary, false), 0.01); }
public Double getProcessCpuLoad() { return getMXBeanValueAsDouble("ProcessCpuLoad"); }
@Test void ifOperatingSystemMXBeanReturnsNaNForProcessCpuLoadOnFirstCall_NegativeIsReturned() throws JMException { when(mBeanServer.getAttribute(objectName, "ProcessCpuLoad")).thenReturn(Double.NaN); assertThat(jobServerStats.getProcessCpuLoad()).isEqualTo(-1); }
public void dropExternalAnalyzeStatus(String tableUUID) { List<AnalyzeStatus> expireList = analyzeStatusMap.values().stream(). filter(status -> status instanceof ExternalAnalyzeStatus). filter(status -> ((ExternalAnalyzeStatus) status).getTableUUID().equals(tableUUID)). collect(Collectors.toList()); expireList.forEach(status -> analyzeStatusMap.remove(status.getId())); for (AnalyzeStatus status : expireList) { GlobalStateMgr.getCurrentState().getEditLog().logRemoveAnalyzeStatus(status); } }
@Test public void testDropExternalAnalyzeStatus() { Table table = connectContext.getGlobalStateMgr().getMetadataMgr().getTable("hive0", "partitioned_db", "t1"); AnalyzeMgr analyzeMgr = new AnalyzeMgr(); AnalyzeStatus analyzeStatus = new ExternalAnalyzeStatus(100, "hive0", "partitioned_db", "t1", table.getUUID(), ImmutableList.of("c1", "c2"), StatsConstants.AnalyzeType.FULL, StatsConstants.ScheduleType.ONCE, Maps.newHashMap(), LocalDateTime.now()); analyzeMgr.addAnalyzeStatus(analyzeStatus); analyzeMgr.dropExternalAnalyzeStatus(table.getUUID()); Assert.assertEquals(0, analyzeMgr.getAnalyzeStatusMap().size()); }
public static JavaToSqlTypeConverter javaToSqlConverter() { return JAVA_TO_SQL_CONVERTER; }
@Test public void shouldConvertJavaStringToSqlString() { assertThat(javaToSqlConverter().toSqlType(String.class), is(SqlBaseType.STRING)); }
public boolean isFiller() { if(filler == null) return false; return !this.getFiller().equals(Filler.NONE); }
@Test void isFiller_true() { assertTrue(product.isFiller()); }
@Override public AnalyticsPluginInfo pluginInfoFor(GoPluginDescriptor descriptor) { Capabilities capabilities = capabilities(descriptor.id()); PluggableInstanceSettings pluginSettingsAndView = getPluginSettingsAndView(descriptor, extension); Image image = image(descriptor.id()); return new AnalyticsPluginInfo(descriptor, image, capabilities, pluginSettingsAndView); }
@Test public void shouldBuildPluginInfoWithPluginSettingsConfiguration() { GoPluginDescriptor descriptor = GoPluginDescriptor.builder().id("plugin1").build(); PluginSettingsConfiguration value = new PluginSettingsConfiguration(); value.add(new PluginSettingsProperty("username", null).with(Property.REQUIRED, true).with(Property.SECURE, false)); value.add(new PluginSettingsProperty("password", null).with(Property.REQUIRED, true).with(Property.SECURE, true)); when(extension.getPluginSettingsConfiguration("plugin1")).thenReturn(value); when(extension.getPluginSettingsView("plugin1")).thenReturn("some-html"); AnalyticsPluginInfo pluginInfo = new AnalyticsPluginInfoBuilder(extension).pluginInfoFor(descriptor); List<PluginConfiguration> pluginConfigurations = List.of( new PluginConfiguration("username", new Metadata(true, false)), new PluginConfiguration("password", new Metadata(true, true)) ); PluginView pluginView = new PluginView("some-html"); assertThat(pluginInfo.getDescriptor(), is(descriptor)); assertThat(pluginInfo.getExtensionName(), is("analytics")); assertThat(pluginInfo.getPluginSettings(), is(new PluggableInstanceSettings(pluginConfigurations, pluginView))); }
public byte[] verifyAuthenticate(byte[] seed, byte[] result) throws RdaException { final SecureMessaging sm = new TDEASecureMessaging(seed, 0, 16, null); final byte[] calculatedMac = sm.mac( m -> m.update(result, 0, 32)); if (!CryptoUtils.compare(calculatedMac, result, 32)) { throw new RdaException(RdaError.AUTHENTICATE, "Invalid MAC"); } return sm.decrypt(false, false, result, 0, 32); }
@Test public void shouldThrowErrorIfAuthenticateMacIsDifferent() throws Exception { final CardVerifier verifier = verifier(null, null); final byte[] seed = Hex.decode("SSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSS"); final byte[] result = Hex.decode("" + "SSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSS" ); Exception exception = assertThrows(RdaException.class, () -> { verifier.verifyAuthenticate(seed, result); }); assertEquals(RdaError.AUTHENTICATE, ((RdaException) exception).error); assertEquals("Invalid MAC", exception.getMessage()); }
public Properties getAllPropertiesByTags(final List<String> tagList) { Properties prop = new Properties(); for (String tag : tagList) { prop.putAll(this.getAllPropertiesByTag(tag)); } return prop; }
@Test public void testGetAllPropertiesByTags() throws Exception { try{ out = new BufferedWriter(new FileWriter(CONFIG_CORE)); startConfig(); appendProperty("hadoop.tags.system", "YARN,HDFS,NAMENODE"); appendProperty("hadoop.tags.custom", "MYCUSTOMTAG"); appendPropertyByTag("dfs.cblock.trace.io", "false", "YARN"); appendPropertyByTag("dfs.replication", "1", "HDFS"); appendPropertyByTag("dfs.namenode.logging.level", "INFO", "NAMENODE"); appendPropertyByTag("dfs.random.key", "XYZ", "MYCUSTOMTAG"); endConfig(); Path fileResource = new Path(CONFIG_CORE); conf.addResource(fileResource); conf.getProps(); } finally { out.close(); } System.out.println(Files.readAllLines(Paths.get(CONFIG_CORE))); List<String> tagList = new ArrayList<>(); tagList.add("YARN"); tagList.add("HDFS"); tagList.add("NAMENODE"); Properties properties = conf.getAllPropertiesByTags(tagList); String[] sources = conf.getPropertySources("dfs.replication"); assertTrue(sources.length == 1); assertTrue(Arrays.toString(sources).contains("core-site.xml")); assertEq(3, properties.size()); assertEq(true, properties.containsKey("dfs.namenode.logging.level")); assertEq(true, properties.containsKey("dfs.replication")); assertEq(true, properties.containsKey("dfs.cblock.trace.io")); assertEq(false, properties.containsKey("namenode.host")); properties = conf.getAllPropertiesByTag("DEBUG"); assertEq(0, properties.size()); assertEq(false, properties.containsKey("dfs.namenode.logging.level")); assertEq(true, conf.isPropertyTag("YARN")); assertEq(true, conf.isPropertyTag("HDFS")); assertEq(true, conf.isPropertyTag("NAMENODE")); assertEq(true, conf.isPropertyTag("MYCUSTOMTAG")); assertEq(false, conf.isPropertyTag("CMYCUSTOMTAG2")); }
@Operation(summary = "createToken", description = "CREATE_TOKEN_NOTES") @Parameters({ @Parameter(name = "userId", description = "USER_ID", schema = @Schema(implementation = int.class), required = true), @Parameter(name = "expireTime", description = "EXPIRE_TIME", schema = @Schema(implementation = String.class), required = true, example = "2021-12-31 00:00:00"), @Parameter(name = "token", description = "TOKEN", required = false, schema = @Schema(implementation = String.class), example = "xxxx") }) @PostMapping() @ResponseStatus(HttpStatus.CREATED) @ApiException(CREATE_ACCESS_TOKEN_ERROR) public Result<AccessToken> createToken(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam(value = "userId") int userId, @RequestParam(value = "expireTime") String expireTime, @RequestParam(value = "token", required = false) String token) { AccessToken accessToken = accessTokenService.createToken(loginUser, userId, expireTime, token); return Result.success(accessToken); }
@Test public void testCreateToken() throws Exception { MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>(); paramsMap.add("userId", "4"); paramsMap.add("expireTime", "2019-12-18 00:00:00"); paramsMap.add("token", "607f5aeaaa2093dbdff5d5522ce00510"); MvcResult mvcResult = mockMvc.perform(post("/access-tokens") .header("sessionId", sessionId) .params(paramsMap)) .andExpect(status().isCreated()) .andExpect(content().contentType(MediaType.APPLICATION_JSON)) .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Assertions.assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue()); logger.info(mvcResult.getResponse().getContentAsString()); }
public static <InputT, OutputT> MapElements<InputT, OutputT> via( final InferableFunction<InputT, OutputT> fn) { return new MapElements<>(fn, fn.getInputTypeDescriptor(), fn.getOutputTypeDescriptor()); }
@Test @Category(ValidatesRunner.class) public void testPrimitiveDisplayData() { SimpleFunction<Integer, ?> mapFn = new SimpleFunction<Integer, Integer>() { @Override public Integer apply(Integer input) { return input; } }; MapElements<Integer, ?> map = MapElements.via(mapFn); DisplayDataEvaluator evaluator = DisplayDataEvaluator.create(); Set<DisplayData> displayData = evaluator.displayDataForPrimitiveTransforms(map); assertThat( "MapElements should include the mapFn in its primitive display data", displayData, hasItem(hasDisplayItem("class", mapFn.getClass()))); }
@Subscribe public void handleDebugEvent(DebugEvent event) { LOG.debug("Received local debug event: {}", event); DebugEventHolder.setLocalDebugEvent(event); }
@Test public void testHandleDebugEvent() throws Exception { DebugEvent event = DebugEvent.create("Node ID", "Test"); assertThat(DebugEventHolder.getLocalDebugEvent()).isNull(); serverEventBus.post(event); assertThat(DebugEventHolder.getLocalDebugEvent()).isSameAs(event); }
@Override public void readFrame(ChannelHandlerContext ctx, ByteBuf input, Http2FrameListener listener) throws Http2Exception { if (readError) { input.skipBytes(input.readableBytes()); return; } try { do { if (readingHeaders && !preProcessFrame(input)) { return; } // The header is complete, fall into the next case to process the payload. // This is to ensure the proper handling of zero-length payloads. In this // case, we don't want to loop around because there may be no more data // available, causing us to exit the loop. Instead, we just want to perform // the first pass at payload processing now. // Wait until the entire payload has been read. if (input.readableBytes() < payloadLength) { return; } // Slice to work only on the frame being read ByteBuf framePayload = input.readSlice(payloadLength); // We have consumed the data for this frame, next time we read, // we will be expecting to read a new frame header. readingHeaders = true; verifyFrameState(); processPayloadState(ctx, framePayload, listener); } while (input.isReadable()); } catch (Http2Exception e) { readError = !Http2Exception.isStreamError(e); throw e; } catch (RuntimeException e) { readError = true; throw e; } catch (Throwable cause) { readError = true; PlatformDependent.throwException(cause); } }
@Test public void failedWhenStreamWindowUpdateFrameWithZeroDelta() throws Http2Exception { final ByteBuf input = Unpooled.buffer(); try { writeFrameHeader(input, 4, WINDOW_UPDATE, new Http2Flags(), 1); input.writeInt(0); Http2Exception ex = assertThrows(Http2Exception.class, new Executable() { @Override public void execute() throws Throwable { frameReader.readFrame(ctx, input, listener); } }); assertInstanceOf(Http2Exception.StreamException.class, ex); } finally { input.release(); } }
static long sizeOf(Mutation m) { if (m.getOperation() == Mutation.Op.DELETE) { return sizeOf(m.getKeySet()); } long result = 0; for (Value v : m.getValues()) { switch (v.getType().getCode()) { case ARRAY: result += estimateArrayValue(v); break; case STRUCT: throw new IllegalArgumentException("Structs are not supported in mutation."); default: result += estimatePrimitiveValue(v); } } return result; }
@Test public void pgJsonb() throws Exception { Mutation empty = Mutation.newInsertOrUpdateBuilder("test").set("one").to(Value.pgJsonb("{}")).build(); Mutation nullValue = Mutation.newInsertOrUpdateBuilder("test") .set("one") .to(Value.pgJsonb((String) null)) .build(); Mutation sample = Mutation.newInsertOrUpdateBuilder("test") .set("one") .to(Value.pgJsonb("{\"type_name\":\"number\",\"value\":12345.123}")) .build(); Mutation nullArray = Mutation.newInsertOrUpdateBuilder("test").set("one").toPgJsonbArray(null).build(); assertThat(MutationSizeEstimator.sizeOf(empty), is(2L)); assertThat(MutationSizeEstimator.sizeOf(nullValue), is(0L)); assertThat(MutationSizeEstimator.sizeOf(sample), is(40L)); assertThat(MutationSizeEstimator.sizeOf(nullArray), is(0L)); }
@Override public Space get() throws BackgroundException { try { final Path home = new DefaultHomeFinderService(session).find(); if(!home.isRoot()) { if(SDSQuotaFeature.unknown == home.attributes().getQuota()) { log.warn(String.format("No quota set for node %s", home)); } else { return home.attributes().getQuota(); } } final CustomerData info = new UserApi(session.getClient()).requestCustomerInfo(StringUtils.EMPTY); return new Space(info.getSpaceUsed(), info.getSpaceLimit() - info.getSpaceUsed()); } catch(ApiException e) { throw new SDSExceptionMappingService(nodeid).map("Failure to read attributes of {0}", e, new Path(String.valueOf(Path.DELIMITER), EnumSet.of(Path.Type.volume, Path.Type.directory))); } }
@Test public void testRoom() throws Exception { final SDSNodeIdProvider nodeid = new SDSNodeIdProvider(session); final Path room = new SDSDirectoryFeature(session, nodeid).mkdir( new Path(new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory, Path.Type.volume)), new TransferStatus()); final Quota.Space quota = new SDSQuotaFeature(session, nodeid).get(); assertNotNull(quota.available); assertNotNull(quota.used); new SDSDeleteFeature(session, nodeid).delete(Collections.singletonList(room), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
public void addNameChangedListener( NameChangedListener listener ) { if ( listener != null ) { nameChangedListeners.add( listener ); } }
@Test public void testAddNameChangedListener() { meta.fireNameChangedListeners( "a", "a" ); meta.fireNameChangedListeners( "a", "b" ); meta.addNameChangedListener( null ); meta.fireNameChangedListeners( "a", "b" ); NameChangedListener listener = mock( NameChangedListener.class ); meta.addNameChangedListener( listener ); meta.fireNameChangedListeners( "b", "a" ); verify( listener, times( 1 ) ).nameChanged( meta, "b", "a" ); meta.removeNameChangedListener( null ); meta.removeNameChangedListener( listener ); meta.fireNameChangedListeners( "b", "a" ); verifyNoMoreInteractions( listener ); }
public static Optional<SingleRouteEngine> newInstance(final Collection<QualifiedTable> singleTables, final SQLStatement sqlStatement) { if (!singleTables.isEmpty()) { return Optional.of(new SingleStandardRouteEngine(singleTables, sqlStatement)); } // TODO move this logic to common route logic if (isSchemaDDLStatement(sqlStatement)) { return Optional.of(new SingleDatabaseBroadcastRouteEngine()); } return Optional.empty(); }
@Test void assertNewInstanceWithEmptySingleTableNameAndOtherStatement() { assertFalse(SingleRouteEngineFactory.newInstance(Collections.emptyList(), mock(SQLStatement.class)).isPresent()); }
@Override public String getName() { return ANALYZER_NAME; }
@Test public void testGetAnalyzerName() { assertEquals("MSBuild Project Analyzer", instance.getName()); }
public S loadFirstInstance() { load(); if (classList.size() == 0) { return null; } Class<? extends S> serviceClass = classList.get(0); S instance = createInstance(serviceClass); return instance; }
@Test public void testLoadFirstInstance() { ProcessorSlot slot = SpiLoader.of(ProcessorSlot.class).loadFirstInstance(); assertNotNull(slot); assertTrue(slot instanceof NodeSelectorSlot); SlotChainBuilder chainBuilder = SpiLoader.of(SlotChainBuilder.class).loadFirstInstance(); assertNotNull(chainBuilder); assertTrue(chainBuilder instanceof SlotChainBuilder); InitFunc initFunc = SpiLoader.of(InitFunc.class).loadFirstInstance(); assertNotNull(initFunc); assertTrue(initFunc instanceof MetricCallbackInit); }
public static Sessions withGapDuration(Duration gapDuration) { return new Sessions(gapDuration); }
@Test public void testConsecutive() throws Exception { Map<IntervalWindow, Set<String>> expected = new HashMap<>(); expected.put(new IntervalWindow(new Instant(1), new Instant(19)), set(1, 2, 5, 9)); expected.put(new IntervalWindow(new Instant(100), new Instant(111)), set(100, 101)); assertEquals( expected, runWindowFn( Sessions.withGapDuration(Duration.millis(10)), Arrays.asList(1L, 2L, 5L, 9L, 100L, 101L))); }
@Udf(description = "Returns a new string encoded using the outputEncoding ") public String encode( @UdfParameter( description = "The source string. If null, then function returns null.") final String str, @UdfParameter( description = "The input encoding." + " If null, then function returns null.") final String inputEncoding, @UdfParameter( description = "The output encoding." + " If null, then function returns null.") final String outputEncoding) { if (str == null || inputEncoding == null || outputEncoding == null) { return null; } final String encodedString = inputEncoding.toLowerCase() + outputEncoding.toLowerCase(); final Encode.Encoder encoder = ENCODER_MAP.get(encodedString); if (encoder == null) { throw new KsqlFunctionException("Supported input and output encodings are: " + "hex, utf8, ascii and base64"); } return encoder.apply(str); }
@Test public void shouldEncodeAsciiToHex() { assertThat(udf.encode("Example!", "ascii", "hex"), is("4578616d706c6521")); assertThat(udf.encode("Plant trees", "ascii", "hex"), is("506c616e74207472656573")); assertThat(udf.encode("1 + 1 = 1", "ascii", "hex"), is("31202b2031203d2031")); assertThat(udf.encode("Ελλάδα", "ascii", "hex"), is("3f3f3f3f3f3f")); assertThat(udf.encode("Übermensch", "ascii", "hex"), is("3f6265726d656e736368")); }
public boolean isAnonymous() { return anonymous; }
@Test public void isAnonymous() throws Exception { assertTrue( ActingPrincipal.ANONYMOUS.isAnonymous() ); assertFalse( new ActingPrincipal( "harold" ).isAnonymous() ); assertFalse( new ActingPrincipal( "" ).isAnonymous() ); }
public static WaitForOptions defaults() { return new WaitForOptions().setInterval(DEFAULT_INTERVAL).setTimeoutMs(NEVER); }
@Test public void defaults() { WaitForOptions options = WaitForOptions.defaults(); assertEquals(WaitForOptions.DEFAULT_INTERVAL, options.getInterval()); assertEquals(WaitForOptions.NEVER, options.getTimeoutMs()); }
@Override public void batchUnSubscribe(List<ConsumerConfig> configs) { for (ConsumerConfig config : configs) { unSubscribe(config); } }
@Test public void testBatchUnSubscribe() { ConsumerConfig<Object> config1 = new ConsumerConfig<>(); String direct1 = "2"; config1.setDirectUrl(direct1); ConsumerConfig<Object> config2 = new ConsumerConfig<>(); String direct2 = "1"; config2.setDirectUrl(direct2); domainRegistry.subscribe(config1); domainRegistry.subscribe(config2); assertTrue(domainRegistry.notifyListeners.containsKey(direct1)); assertEquals(1, domainRegistry.notifyListeners.get(direct1).size()); assertTrue(domainRegistry.notifyListeners.containsKey(direct2)); assertEquals(1, domainRegistry.notifyListeners.get(direct2).size()); List<ConsumerConfig> consumerConfigs = Arrays.asList(config1, config2); domainRegistry.batchUnSubscribe(consumerConfigs); assertTrue(domainRegistry.notifyListeners.containsKey(direct1)); assertEquals(0, domainRegistry.notifyListeners.get(direct1).size()); assertTrue(domainRegistry.notifyListeners.containsKey(direct2)); assertEquals(0, domainRegistry.notifyListeners.get(direct2).size()); }
protected Credentials configureCredentials(AuthConfiguration auth) { if (null != auth.getCredentialType() && auth.getCredentialType().equalsIgnoreCase(AuthConfiguration.NT_CREDS)) { return new NTCredentials(auth.getUsername(), auth.getPassword().toCharArray(), auth.getHostname(), auth.getDomain()); } else { return new UsernamePasswordCredentials(auth.getUsername(), auth.getPassword().toCharArray()); } }
@Test void configureCredentialReturnsNTCredentialsForNTLMConfig() { assertThat(builder.configureCredentials(new AuthConfiguration("username", "password", "NTLM", "realm", "hostname", "domain", "NT"))) .isInstanceOfSatisfying(NTCredentials.class, credentials -> assertThat(credentials) .satisfies(c -> assertThat(c.getPassword()).isEqualTo("password".toCharArray())) .satisfies(c -> assertThat(c.getUserPrincipal().getName()).isEqualTo("DOMAIN\\username"))); }
@Override public void close() { dataSource.close(); }
@Test void assertClose() { repository.close(); HikariDataSource hikariDataSource = mockedConstruction.constructed().get(0); verify(hikariDataSource).close(); }
public void validate(String clientId, String clientSecret, String workspace) { Token token = validateAccessToken(clientId, clientSecret); if (token.getScopes() == null || !token.getScopes().contains("pullrequest")) { LOG.info(MISSING_PULL_REQUEST_READ_PERMISSION + String.format(SCOPE, token.getScopes())); throw new IllegalArgumentException(ERROR_BBC_SERVERS + ": " + MISSING_PULL_REQUEST_READ_PERMISSION); } try { doGet(token.getAccessToken(), buildUrl("/repositories/" + workspace), r -> null); } catch (NotFoundException | IllegalStateException e) { throw new IllegalArgumentException(e.getMessage()); } }
@Test public void nullErrorBodyIsSupported() throws IOException { OkHttpClient clientMock = mock(OkHttpClient.class); Call callMock = mock(Call.class); String url = "http://any.test/"; String message = "Unknown issue"; when(callMock.execute()).thenReturn(new Response.Builder() .request(new Request.Builder().url(url).build()) .protocol(Protocol.HTTP_1_1) .code(500) .message(message) .build()); when(clientMock.newCall(any())).thenReturn(callMock); underTest = new BitbucketCloudRestClient(clientMock); assertThatIllegalArgumentException() .isThrownBy(() -> underTest.validate("clientId", "clientSecret", "workspace")) .withMessage(UNABLE_TO_CONTACT_BBC_SERVERS); assertThat(logTester.logs(Level.INFO)).containsExactly(String.format(BBC_FAIL_WITH_RESPONSE, url, "500", message)); }
@VisibleForTesting public static boolean isDateAfterOrSame( String date1, String date2 ) { return date2.compareTo( date1 ) >= 0; }
@Test public void isDateAfterOrSame_SameTest() { assertTrue( TransPreviewProgressDialog.isDateAfterOrSame( SAME_DATE_STR, SAME_DATE_STR ) ); }
public static void addBlockCacheCapacityMetric(final StreamsMetricsImpl streamsMetrics, final RocksDBMetricContext metricContext, final Gauge<BigInteger> valueProvider) { addMutableMetric( streamsMetrics, metricContext, valueProvider, CAPACITY_OF_BLOCK_CACHE, CAPACITY_OF_BLOCK_CACHE_DESCRIPTION ); }
@Test public void shouldAddBlockCacheCapacityMetric() { final String name = "block-cache-capacity"; final String description = "Capacity of the block cache in bytes"; runAndVerifyMutableMetric( name, description, () -> RocksDBMetrics.addBlockCacheCapacityMetric(streamsMetrics, ROCKSDB_METRIC_CONTEXT, VALUE_PROVIDER) ); }
@Override public List<String> splitAndEvaluate() { return Strings.isNullOrEmpty(inlineExpression) ? Collections.emptyList() : split(inlineExpression); }
@Test void assertEvaluateForSimpleString() { List<String> expected = TypedSPILoader.getService(InlineExpressionParser.class, "LITERAL", PropertiesBuilder.build( new PropertiesBuilder.Property(InlineExpressionParser.INLINE_EXPRESSION_KEY, " t_order_0, t_order_1 "))).splitAndEvaluate(); assertThat(expected.size(), is(2)); assertThat(expected, hasItems("t_order_0", "t_order_1")); }
@Override public void not() { get(notAsync()); }
@Test public void testNot() { RBitSet bs = redisson.getBitSet("testbitset"); bs.set(3); bs.set(5); bs.not(); assertThat(bs.toString()).isEqualTo("{0, 1, 2, 4, 6, 7}"); }
T getFunction(final List<SqlArgument> arguments) { // first try to get the candidates without any implicit casting Optional<T> candidate = findMatchingCandidate(arguments, false); if (candidate.isPresent()) { return candidate.get(); } else if (!supportsImplicitCasts) { throw createNoMatchingFunctionException(arguments); } // if none were found (candidate isn't present) try again with implicit casting candidate = findMatchingCandidate(arguments, true); if (candidate.isPresent()) { return candidate.get(); } throw createNoMatchingFunctionException(arguments); }
@Test public void shouldChooseCorrectLambdaFunction() { // Given: givenFunctions( function(FIRST_FUNC, -1, GENERIC_MAP, LAMBDA_KEY_FUNCTION) ); givenFunctions( function(SECOND_FUNC, -1, GENERIC_MAP, LAMBDA_VALUE_FUNCTION) ); // When: final KsqlScalarFunction first_fun = udfIndex.getFunction( ImmutableList.of( SqlArgument.of(MAP2_ARG), SqlArgument.of( SqlLambdaResolved.of( ImmutableList.of(SqlTypes.STRING), SqlTypes.STRING)))); final KsqlScalarFunction second_fun = udfIndex.getFunction( ImmutableList.of( SqlArgument.of(MAP2_ARG), SqlArgument.of( SqlLambdaResolved.of( ImmutableList.of(INTEGER), INTEGER)))); // Then: assertThat(first_fun.name(), equalTo(FIRST_FUNC)); assertThat(second_fun.name(), equalTo(SECOND_FUNC)); }
@Override public AttributedList<Path> list(final Path directory, final ListProgressListener listener) throws BackgroundException { return this.list(directory, listener, String.valueOf(Path.DELIMITER)); }
@Test public void testListEncodedCharacterFolderNonVersioned() throws Exception { final Path container = new Path("cyberduck-test-eu", EnumSet.of(Path.Type.directory, Path.Type.volume)); container.attributes().setRegion("us-east-1"); final Path placeholder = new GoogleStorageDirectoryFeature(session).mkdir( new Path(container, String.format("%s +", new AlphanumericRandomStringService().random()), EnumSet.of(Path.Type.directory)), new TransferStatus()); assertTrue(new GoogleStorageObjectListService(session).list(container, new DisabledListProgressListener(), String.valueOf(Path.DELIMITER), new HostPreferences(session.getHost()).getInteger("googlestorage.listing.chunksize"), VersioningConfiguration.empty()).contains(placeholder)); new GoogleStorageDeleteFeature(session).delete(Collections.singletonList(placeholder), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
public static <E> E checkNotInstanceOf(Class type, E object, String errorMessage) { isNotNull(type, "type"); if (type.isInstance(object)) { throw new IllegalArgumentException(errorMessage); } return object; }
@Test public void test_checkNotInstanceOf_withNullObject() { Object value = checkNotInstanceOf(Integer.class, null, "argumentName"); assertNull(value); }
@Override public Checksum compute(final InputStream in, final TransferStatus status) throws BackgroundException { return new Checksum(HashAlgorithm.md5, this.digest("MD5", this.normalize(in, status), status)); }
@Test public void testNormalize() throws Exception { assertEquals("a43c1b0aa53a0c908810c06ab1ff3967", new MD5ChecksumCompute().compute(IOUtils.toInputStream("input", Charset.defaultCharset()), new TransferStatus()).hash); assertEquals("a43c1b0aa53a0c908810c06ab1ff3967", new MD5ChecksumCompute().compute(IOUtils.toInputStream("_input", Charset.defaultCharset()), new TransferStatus().withOffset(1)).hash); assertEquals("a43c1b0aa53a0c908810c06ab1ff3967", new MD5ChecksumCompute().compute(IOUtils.toInputStream("_input_", Charset.defaultCharset()), new TransferStatus().withOffset(1).withLength(5)).hash); }
public static <F extends Future<Void>> Mono<Void> deferFuture(Supplier<F> deferredFuture) { return new DeferredFutureMono<>(deferredFuture); }
@Test void testDeferredFutureMonoImmediate() { ImmediateEventExecutor eventExecutor = ImmediateEventExecutor.INSTANCE; Supplier<Future<Void>> promiseSupplier = () -> eventExecutor.newFailedFuture(new ClosedChannelException()); StepVerifier.create(FutureMono.deferFuture(promiseSupplier)) .expectError(AbortedException.class) .verify(Duration.ofSeconds(30)); }
public static List<Integer> asIntegerList(@Nonnull int[] array) { checkNotNull(array, "null array"); return new AbstractList<>() { @Override public Integer get(int index) { return array[index]; } @Override public int size() { return array.length; } }; }
@Test public void testToIntegerList_whenEmpty() { List<Integer> result = asIntegerList(new int[0]); assertEquals(0, result.size()); }
@Override public ContinuousEnumerationResult planSplits(IcebergEnumeratorPosition lastPosition) { table.refresh(); if (lastPosition != null) { return discoverIncrementalSplits(lastPosition); } else { return discoverInitialSplits(); } }
@Test public void testIncrementalFromEarliestSnapshotWithEmptyTable() throws Exception { ScanContext scanContext = ScanContext.builder() .startingStrategy(StreamingStartingStrategy.INCREMENTAL_FROM_EARLIEST_SNAPSHOT) .build(); ContinuousSplitPlannerImpl splitPlanner = new ContinuousSplitPlannerImpl(TABLE_RESOURCE.tableLoader().clone(), scanContext, null); ContinuousEnumerationResult emptyTableInitialDiscoveryResult = splitPlanner.planSplits(null); assertThat(emptyTableInitialDiscoveryResult.splits()).isEmpty(); assertThat(emptyTableInitialDiscoveryResult.fromPosition()).isNull(); assertThat(emptyTableInitialDiscoveryResult.toPosition().snapshotId()).isNull(); assertThat(emptyTableInitialDiscoveryResult.toPosition().snapshotTimestampMs()).isNull(); ContinuousEnumerationResult emptyTableSecondDiscoveryResult = splitPlanner.planSplits(emptyTableInitialDiscoveryResult.toPosition()); assertThat(emptyTableSecondDiscoveryResult.splits()).isEmpty(); assertThat(emptyTableSecondDiscoveryResult.fromPosition().snapshotId()).isNull(); assertThat(emptyTableSecondDiscoveryResult.fromPosition().snapshotTimestampMs()).isNull(); assertThat(emptyTableSecondDiscoveryResult.toPosition().snapshotId()).isNull(); assertThat(emptyTableSecondDiscoveryResult.toPosition().snapshotTimestampMs()).isNull(); // next 3 snapshots IcebergEnumeratorPosition lastPosition = emptyTableSecondDiscoveryResult.toPosition(); for (int i = 0; i < 3; ++i) { lastPosition = verifyOneCycle(splitPlanner, lastPosition).lastPosition; } }
@Override public Optional<ReadError> read(DbFileSources.Line.Builder lineBuilder) { ScannerReport.LineCoverage reportCoverage = getNextLineCoverageIfMatchLine(lineBuilder.getLine()); if (reportCoverage != null) { processCoverage(lineBuilder, reportCoverage); coverage = null; } return Optional.empty(); }
@Test public void does_not_set_deprecated_coverage_fields() { CoverageLineReader computeCoverageLine = new CoverageLineReader(newArrayList(ScannerReport.LineCoverage.newBuilder() .setLine(1) .setConditions(10) .setHits(true) .setCoveredConditions(2) .build()).iterator()); DbFileSources.Line.Builder lineBuilder = DbFileSources.Data.newBuilder().addLinesBuilder().setLine(1); assertThat(computeCoverageLine.read(lineBuilder)).isEmpty(); assertThat(lineBuilder.hasDeprecatedUtLineHits()).isFalse(); assertThat(lineBuilder.hasDeprecatedUtConditions()).isFalse(); assertThat(lineBuilder.hasDeprecatedUtCoveredConditions()).isFalse(); assertThat(lineBuilder.hasDeprecatedOverallLineHits()).isFalse(); assertThat(lineBuilder.hasDeprecatedOverallConditions()).isFalse(); assertThat(lineBuilder.hasDeprecatedOverallCoveredConditions()).isFalse(); assertThat(lineBuilder.hasDeprecatedItLineHits()).isFalse(); assertThat(lineBuilder.hasDeprecatedItConditions()).isFalse(); assertThat(lineBuilder.hasDeprecatedItCoveredConditions()).isFalse(); }
public static Date getDate(Object date) { return getDate(date, Calendar.getInstance().getTime()); }
@Test @SuppressWarnings("UndefinedEquals") public void testGetDateObjectDateWithValidStringAndNullDefault() { Calendar cal = new GregorianCalendar(); cal.set(Calendar.HOUR_OF_DAY, 0); cal.set(Calendar.MINUTE, 0); cal.set(Calendar.SECOND, 0); cal.set(Calendar.MILLISECOND, 0); Date time = cal.getTime(); for (int formatId : new int[]{DateFormat.SHORT, DateFormat.MEDIUM, DateFormat.LONG, DateFormat.FULL}) { DateFormat formatter = DateFormat.getDateInstance(formatId); assertEquals(time, Converter.getDate(formatter.format(time), null)); } }
@Override protected @UnknownKeyFor @NonNull @Initialized SchemaTransform from( JdbcReadSchemaTransformConfiguration configuration) { configuration.validate(); return new JdbcReadSchemaTransform(configuration); }
@Test public void testReadWithJdbcTypeSpecified() { JdbcReadSchemaTransformProvider provider = null; for (SchemaTransformProvider p : ServiceLoader.load(SchemaTransformProvider.class)) { if (p instanceof JdbcReadSchemaTransformProvider) { provider = (JdbcReadSchemaTransformProvider) p; break; } } assertNotNull(provider); PCollection<Row> output = PCollectionRowTuple.empty(pipeline) .apply( provider.from( JdbcReadSchemaTransformProvider.JdbcReadSchemaTransformConfiguration.builder() .setJdbcUrl(DATA_SOURCE_CONFIGURATION.getUrl().get()) .setJdbcType("derby") .setLocation(READ_TABLE_NAME) .build())) .get("output"); Long expected = Long.valueOf(EXPECTED_ROW_COUNT); PAssert.that(output.apply(Count.globally())).containsInAnyOrder(expected); pipeline.run(); }
static String toDatabaseName(Namespace namespace, boolean skipNameValidation) { if (!skipNameValidation) { validateNamespace(namespace); } return namespace.level(0); }
@Test public void testToDatabaseName() { assertThat(IcebergToGlueConverter.toDatabaseName(Namespace.of("db"), false)).isEqualTo("db"); }
@Override public byte[] serialize() { byte[] payloadData = null; if (this.payload != null) { this.payload.setParent(this); payloadData = this.payload.serialize(); } int payloadLength = 0; if (payloadData != null) { payloadLength = payloadData.length; } final byte[] data = new byte[HEADER_LENGTH + payloadLength]; final ByteBuffer bb = ByteBuffer.wrap(data); bb.put(this.nextHeader); bb.put((byte) 0); bb.putShort((short) ( (this.fragmentOffset & 0x1fff) << 3 | this.moreFragment & 0x1 )); bb.putInt(this.identification); if (payloadData != null) { bb.put(payloadData); } if (this.parent != null && this.parent instanceof IExtensionHeader) { ((IExtensionHeader) this.parent).setNextHeader(IPv6.PROTOCOL_FRAG); } return data; }
@Test public void testSerialize() { Fragment frag = new Fragment(); frag.setNextHeader((byte) 0x11); frag.setFragmentOffset((short) 0x1f); frag.setMoreFragment((byte) 1); frag.setIdentification(0x1357); frag.setPayload(udp); assertArrayEquals(frag.serialize(), bytePacket); }
@Override public Object construct(String componentName) { ClusteringConfiguration clusteringConfiguration = configuration.clustering(); boolean shouldSegment = clusteringConfiguration.cacheMode().needsStateTransfer(); int level = configuration.locking().concurrencyLevel(); MemoryConfiguration memoryConfiguration = configuration.memory(); boolean offHeap = memoryConfiguration.isOffHeap(); EvictionStrategy strategy = memoryConfiguration.whenFull(); //handle case when < 0 value signifies unbounded container or when we are not removal based if (strategy.isExceptionBased() || !strategy.isEnabled()) { if (offHeap) { if (shouldSegment) { int segments = clusteringConfiguration.hash().numSegments(); Supplier<PeekableTouchableMap<WrappedBytes, WrappedBytes>> mapSupplier = this::createAndStartOffHeapConcurrentMap; if (clusteringConfiguration.l1().enabled()) { return new L1SegmentedDataContainer<>(mapSupplier, segments); } return new DefaultSegmentedDataContainer<>(mapSupplier, segments); } else { return new OffHeapDataContainer(); } } else if (shouldSegment) { Supplier<PeekableTouchableMap<Object, Object>> mapSupplier = PeekableTouchableContainerMap::new; int segments = clusteringConfiguration.hash().numSegments(); if (clusteringConfiguration.l1().enabled()) { return new L1SegmentedDataContainer<>(mapSupplier, segments); } return new DefaultSegmentedDataContainer<>(mapSupplier, segments); } else { return DefaultDataContainer.unBoundedDataContainer(level); } } boolean sizeInBytes = memoryConfiguration.maxSize() != null; long thresholdSize = sizeInBytes ? memoryConfiguration.maxSizeBytes() : memoryConfiguration.maxCount(); DataContainer<?, ?> dataContainer; if (offHeap) { if (shouldSegment) { int segments = clusteringConfiguration.hash().numSegments(); dataContainer = new SegmentedBoundedOffHeapDataContainer(segments, thresholdSize, memoryConfiguration.evictionType()); } else { dataContainer = new BoundedOffHeapDataContainer(thresholdSize, memoryConfiguration.evictionType()); } } else if (shouldSegment) { int segments = clusteringConfiguration.hash().numSegments(); dataContainer = new BoundedSegmentedDataContainer<>(segments, thresholdSize, memoryConfiguration.evictionType()); } else { dataContainer = DefaultDataContainer.boundedDataContainer(level, thresholdSize, memoryConfiguration.evictionType()); } if (sizeInBytes) { memoryConfiguration.attributes().attribute(MemoryConfiguration.MAX_SIZE) .addListener((newSize, old) -> dataContainer.resize(memoryConfiguration.maxSizeBytes())); } else { memoryConfiguration.attributes().attribute(MemoryConfiguration.MAX_COUNT) .addListener((newSize, old) -> dataContainer.resize(newSize.get())); } return dataContainer; }
@Test public void testEvictionRemoveSegmented() { dataContainerFactory.configuration = new ConfigurationBuilder().clustering() .memory().evictionStrategy(EvictionStrategy.REMOVE).size(1000) .clustering().cacheMode(CacheMode.DIST_ASYNC).build(); Object component = dataContainerFactory.construct(COMPONENT_NAME); assertEquals(BoundedSegmentedDataContainer.class, component.getClass()); }
public final BarcodeParameters getParams() { return params; }
@Test final void testConstructorWithSize() throws IOException { try (BarcodeDataFormat barcodeDataFormat = new BarcodeDataFormat(200, 250)) { this.checkParams(BarcodeParameters.IMAGE_TYPE, 200, 250, BarcodeParameters.FORMAT, barcodeDataFormat.getParams()); } }
public boolean hasAnyMethodHandlerAnnotation() { return !operationsWithHandlerAnnotation.isEmpty(); }
@Test public void testHandlerOnSyntheticProxy() { Object proxy = buildProxyObject(); BeanInfo info = new BeanInfo(context, proxy.getClass()); assertTrue(info.hasAnyMethodHandlerAnnotation()); }
public boolean is(T state, T... otherStates) { return EnumSet.of(state, otherStates).contains(currentState); }
@Test public void testIsInInitialState_whenCreated() { assertTrue(machine.is(State.A)); }
public List<ShenyuServiceInstance> getCopyInstances() { List<ShenyuServiceInstance> copy = new ArrayList<>(shenyuServiceInstances.size()); shenyuServiceInstances.forEach(instance -> { ShenyuServiceInstance cp = ShenyuServiceTransfer.INSTANCE.deepCopy(instance); copy.add(cp); }); return copy; }
@Test public void getCopyInstances() { List<ShenyuServiceInstance> list = shenyuServiceInstanceLists.getCopyInstances(); Assertions.assertEquals(1, list.size()); }
public void markAsUnchanged(DefaultInputFile file) { if (isFeatureActive()) { if (file.status() != InputFile.Status.SAME) { LOG.error("File '{}' was marked as unchanged but its status is {}", file.getProjectRelativePath(), file.status()); } else { LOG.debug("File '{}' marked as unchanged", file.getProjectRelativePath()); file.setMarkedAsUnchanged(true); } } }
@Test public void not_active_if_using_different_reference() { logTester.setLevel(Level.DEBUG); BranchConfiguration differentRefConfig = branchConfiguration("a", "b", false); UnchangedFilesHandler handler = new UnchangedFilesHandler(enabledConfig, differentRefConfig, executingSensorContext); assertThat(logTester.logs()).contains("Optimization for unchanged files not enabled because it's not an analysis of a branch with a previous analysis"); handler.markAsUnchanged(file); verifyNoInteractions(file); }
@Override public void start() { // we request a split only if we did not get splits during the checkpoint restore if (getNumberOfCurrentlyAssignedSplits() == 0) { context.sendSplitRequest(); } }
@Test void testRequestSplitWhenNoSplitRestored() throws Exception { final TestingReaderContext context = new TestingReaderContext(); final FileSourceReader<String, FileSourceSplit> reader = createReader(context); reader.start(); reader.close(); assertThat(context.getNumSplitRequests()).isEqualTo(1); }
public final <K, V> void addSink(final String name, final String topic, final Serializer<K> keySerializer, final Serializer<V> valSerializer, final StreamPartitioner<? super K, ? super V> partitioner, final String... predecessorNames) { Objects.requireNonNull(name, "name must not be null"); Objects.requireNonNull(topic, "topic must not be null"); Objects.requireNonNull(predecessorNames, "predecessor names must not be null"); if (predecessorNames.length == 0) { throw new TopologyException("Sink " + name + " must have at least one parent"); } addSink(name, new StaticTopicNameExtractor<>(topic), keySerializer, valSerializer, partitioner, predecessorNames); nodeToSinkTopic.put(name, topic); nodeGroups = null; }
@Test public void testAddSinkWithWrongParent() { assertThrows(TopologyException.class, () -> builder.addSink("sink", "topic-2", null, null, null, "source")); }
public void assign(AssignType assignType, String name, String exp, boolean docString) { name = StringUtils.trimToEmpty(name); validateVariableName(name); // always validate when gherkin if (vars.containsKey(name)) { LOGGER.debug("over-writing existing variable '{}' with new value: {}", name, exp); } setVariable(name, evalAndCastTo(assignType, exp, docString)); }
@Test void testResponseShortCuts() { assign("response", "{ foo: 'bar' }"); matchEquals("response", "{ foo: 'bar' }"); matchEquals("$", "{ foo: 'bar' }"); matchEquals("response.foo", "'bar'"); matchEquals("$.foo", "'bar'"); assign("response", "<root><foo>bar</foo></root>"); matchEquals("response", "<root><foo>bar</foo></root>"); matchEquals("/", "<root><foo>bar</foo></root>"); matchEquals("response/", "<root><foo>bar</foo></root>"); matchEquals("response /", "<root><foo>bar</foo></root>"); }
public static boolean isValidScopeToken(String scopeToken) { return VALID_SCOPE_TOKEN.matcher(scopeToken).matches(); }
@Test public void testOAuthScopeTokenValidation() { // valid scope tokens are from ascii 0x21 to 0x7E, excluding 0x22 (") and 0x5C (\) // test characters that are outside of the ! to ~ range and the excluded characters, " and \ assertThat(OAuth2Util.isValidScopeToken("a\\b")) .as("Should reject scope token with \\") .isFalse(); assertThat(OAuth2Util.isValidScopeToken("a b")) .as("Should reject scope token with space") .isFalse(); assertThat(OAuth2Util.isValidScopeToken("a\"b")) .as("Should reject scope token with \"") .isFalse(); assertThat(OAuth2Util.isValidScopeToken("\u007F")) .as("Should reject scope token with DEL") .isFalse(); // test all characters that are inside of the ! to ~ range and are not excluded assertThat(OAuth2Util.isValidScopeToken("!#$%&'()*+,-./")) .as("Should accept scope token with !-/") .isTrue(); assertThat(OAuth2Util.isValidScopeToken("0123456789")) .as("Should accept scope token with 0-9") .isTrue(); assertThat(OAuth2Util.isValidScopeToken(":;<=>?@")) .as("Should accept scope token with :-@") .isTrue(); assertThat(OAuth2Util.isValidScopeToken("ABCDEFGHIJKLM")) .as("Should accept scope token with A-M") .isTrue(); assertThat(OAuth2Util.isValidScopeToken("NOPQRSTUVWXYZ")) .as("Should accept scope token with N-Z") .isTrue(); assertThat(OAuth2Util.isValidScopeToken("[]^_`")) .as("Should accept scope token with [-`, not \\") .isTrue(); assertThat(OAuth2Util.isValidScopeToken("abcdefghijklm")) .as("Should accept scope token with a-m") .isTrue(); assertThat(OAuth2Util.isValidScopeToken("nopqrstuvwxyz")) .as("Should accept scope token with n-z") .isTrue(); assertThat(OAuth2Util.isValidScopeToken("{|}~")) .as("Should accept scope token with {-~") .isTrue(); }
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ) throws KettleException { try { rep.saveDatabaseMetaStepAttribute( id_transformation, id_step, "id_connection", databaseMeta ); rep.saveStepAttribute( id_transformation, id_step, "schema", schemaName ); rep.saveStepAttribute( id_transformation, id_step, "table", tableName ); rep.saveStepAttribute( id_transformation, id_step, "commit", commitSize ); rep.saveStepAttribute( id_transformation, id_step, "truncate", truncateTable ); rep.saveStepAttribute( id_transformation, id_step, "ignore_errors", ignoreErrors ); rep.saveStepAttribute( id_transformation, id_step, "use_batch", useBatchUpdate ); rep.saveStepAttribute( id_transformation, id_step, "specify_fields", specifyFields ); rep.saveStepAttribute( id_transformation, id_step, "partitioning_enabled", partitioningEnabled ); rep.saveStepAttribute( id_transformation, id_step, "partitioning_field", partitioningField ); rep.saveStepAttribute( id_transformation, id_step, "partitioning_daily", partitioningDaily ); rep.saveStepAttribute( id_transformation, id_step, "partitioning_monthly", partitioningMonthly ); rep.saveStepAttribute( id_transformation, id_step, "tablename_in_field", tableNameInField ); rep.saveStepAttribute( id_transformation, id_step, "tablename_field", tableNameField ); rep.saveStepAttribute( id_transformation, id_step, "tablename_in_table", tableNameInTable ); rep.saveStepAttribute( id_transformation, id_step, "return_keys", returningGeneratedKeys ); rep.saveStepAttribute( id_transformation, id_step, "return_field", generatedKeyField ); int nrRows = ( fieldDatabase.length < fieldStream.length ? fieldStream.length : fieldDatabase.length ); for ( int idx = 0; idx < nrRows; idx++ ) { String columnName = ( idx < fieldDatabase.length ? fieldDatabase[ idx ] : "" ); String streamName = ( idx < fieldStream.length ? fieldStream[ idx ] : "" ); rep.saveStepAttribute( id_transformation, id_step, idx, "column_name", columnName ); rep.saveStepAttribute( id_transformation, id_step, idx, "stream_name", streamName ); } // Also, save the step-database relationship! if ( databaseMeta != null ) { rep.insertStepDatabase( id_transformation, id_step, databaseMeta.getObjectId() ); } } catch ( Exception e ) { throw new KettleException( "Unable to save step information to the repository for id_step=" + id_step, e ); } }
@Test public void testSaveRep() throws Exception { TableOutputMeta tableOutputMeta = new TableOutputMeta(); tableOutputMeta.loadXML( getTestNode(), databases, metaStore ); StringObjectId id_step = new StringObjectId( "stepid" ); StringObjectId id_transformation = new StringObjectId( "transid" ); Repository rep = mock( Repository.class ); tableOutputMeta.saveRep( rep, metaStore, id_transformation, id_step ); verify( rep ).saveDatabaseMetaStepAttribute( id_transformation, id_step, "id_connection", null ); verify( rep ).saveStepAttribute( id_transformation, id_step, "schema", "public" ); verify( rep ).saveStepAttribute( id_transformation, id_step, "table", "sales_csv" ); verify( rep ).saveStepAttribute( id_transformation, id_step, "commit", "1000" ); verify( rep ).saveStepAttribute( id_transformation, id_step, "truncate", true ); verify( rep ).saveStepAttribute( id_transformation, id_step, "ignore_errors", false ); verify( rep ).saveStepAttribute( id_transformation, id_step, "use_batch", true ); verify( rep ).saveStepAttribute( id_transformation, id_step, "specify_fields", true ); verify( rep ).saveStepAttribute( id_transformation, id_step, "partitioning_enabled", false ); verify( rep ).saveStepAttribute( id_transformation, id_step, "partitioning_field", null ); verify( rep ).saveStepAttribute( id_transformation, id_step, "partitioning_daily", false ); verify( rep ).saveStepAttribute( id_transformation, id_step, "partitioning_monthly", true ); verify( rep ).saveStepAttribute( id_transformation, id_step, "tablename_in_field", false ); verify( rep ).saveStepAttribute( id_transformation, id_step, "tablename_field", null ); verify( rep ).saveStepAttribute( id_transformation, id_step, "tablename_in_table", true ); verify( rep ).saveStepAttribute( id_transformation, id_step, "return_keys", false ); verify( rep ).saveStepAttribute( id_transformation, id_step, "return_field", null ); verify( rep ).saveStepAttribute( id_transformation, id_step, 0, "column_name", "ORDERNUMBER" ); verify( rep ).saveStepAttribute( id_transformation, id_step, 0, "stream_name", "ORDERNUMBER" ); verify( rep ).saveStepAttribute( id_transformation, id_step, 1, "column_name", "QUANTITYORDERED" ); verify( rep ).saveStepAttribute( id_transformation, id_step, 1, "stream_name", "QUANTITYORDERED" ); verify( rep ).saveStepAttribute( id_transformation, id_step, 2, "column_name", "PRICEEACH" ); verify( rep ).saveStepAttribute( id_transformation, id_step, 2, "stream_name", "PRICEEACH" ); verifyNoMoreInteractions( rep ); }
public OptExpression next() { // For logic scan to physical scan, we only need to match once if (isPatternWithoutChildren && groupExpressionIndex.get(0) > 0) { return null; } OptExpression expression; do { this.groupTraceKey = 0; // Match with the next groupExpression of the last group node int lastNode = this.groupExpressionIndex.size() - 1; int lastNodeIndex = this.groupExpressionIndex.get(lastNode); this.groupExpressionIndex.set(lastNode, lastNodeIndex + 1); expression = match(pattern, groupExpression); } while (expression == null && this.groupExpressionIndex.size() != 1); nextIdx++; return expression; }
@Test public void testBinderDepth2Repeat1() { OptExpression expr1 = OptExpression.create(new MockOperator(OperatorType.LOGICAL_JOIN, 0), OptExpression.create(new MockOperator(OperatorType.LOGICAL_OLAP_SCAN, 1)), OptExpression.create(new MockOperator(OperatorType.LOGICAL_OLAP_SCAN, 2))); OptExpression expr2 = OptExpression.create(new MockOperator(OperatorType.LOGICAL_OLAP_SCAN, 3)); OptExpression expr3 = OptExpression.create(new MockOperator(OperatorType.LOGICAL_OLAP_SCAN, 4)); Memo memo = new Memo(); GroupExpression ge = memo.init(expr1); memo.copyIn(ge.inputAt(0), expr2); memo.copyIn(ge.inputAt(1), expr3); Pattern pattern = Pattern.create(OperatorType.LOGICAL_JOIN) .addChildren(Pattern.create(OperatorType.PATTERN_LEAF)) .addChildren(Pattern.create(OperatorType.PATTERN_LEAF)); Binder binder = new Binder(pattern, ge); OptExpression result; result = binder.next(); assertEquals(OperatorType.LOGICAL_JOIN, result.getOp().getOpType()); assertEquals(OperatorType.LOGICAL_OLAP_SCAN, result.inputAt(0).getOp().getOpType()); assertEquals(1, ((MockOperator) result.inputAt(0).getOp()).getValue()); assertEquals(OperatorType.LOGICAL_OLAP_SCAN, result.inputAt(1).getOp().getOpType()); assertEquals(2, ((MockOperator) result.inputAt(1).getOp()).getValue()); assertNull(binder.next()); }
public String multipleConditionsExample() { IamPolicy policy = IamPolicy.builder() .addStatement(b -> b .effect(IamEffect.ALLOW) .addAction("dynamodb:GetItem") .addAction("dynamodb:BatchGetItem") .addAction("dynamodb:Query") .addAction("dynamodb:PutItem") .addAction("dynamodb:UpdateItem") .addAction("dynamodb:DeleteItem") .addAction("dynamodb:BatchWriteItem") .addResource("arn:aws:dynamodb:*:*:table/table-name") .addConditions(IamConditionOperator.STRING_EQUALS .addPrefix("ForAllValues:"), "dynamodb:Attributes", List.of("column-name1", "column-name2", "column-name3")) .addCondition(b1 -> b1 .operator(IamConditionOperator.STRING_EQUALS .addSuffix("IfExists")) .key("dynamodb:Select") .value("SPECIFIC_ATTRIBUTES"))) .build(); return policy.toJson(IamPolicyWriter.builder() .prettyPrint(true).build()); }
@Test @Tag("IntegrationTest") void multipleConditionsExample() { String jsonPolicy = examples.multipleConditionsExample(); logger.info(jsonPolicy); analyze(jsonPolicy, PolicyType.IDENTITY_POLICY); }
public static FlinkPod loadPodFromTemplateFile( FlinkKubeClient kubeClient, File podTemplateFile, String mainContainerName) { final KubernetesPod pod = kubeClient.loadPodFromTemplateFile(podTemplateFile); final List<Container> otherContainers = new ArrayList<>(); Container mainContainer = null; if (null != pod.getInternalResource().getSpec()) { for (Container container : pod.getInternalResource().getSpec().getContainers()) { if (mainContainerName.equals(container.getName())) { mainContainer = container; } else { otherContainers.add(container); } } pod.getInternalResource().getSpec().setContainers(otherContainers); } else { // Set an empty spec for pod template pod.getInternalResource().setSpec(new PodSpecBuilder().build()); } if (mainContainer == null) { LOG.info( "Could not find main container {} in pod template, using empty one to initialize.", mainContainerName); mainContainer = new ContainerBuilder().build(); } return new FlinkPod(pod.getInternalResource(), mainContainer); }
@Test void testLoadPodFromTemplateAndCheckMainContainer() { final FlinkPod flinkPod = KubernetesUtils.loadPodFromTemplateFile( flinkKubeClient, KubernetesPodTemplateTestUtils.getPodTemplateFile(), KubernetesPodTemplateTestUtils.TESTING_MAIN_CONTAINER_NAME); assertThat(flinkPod.getMainContainer().getName()) .isEqualTo(KubernetesPodTemplateTestUtils.TESTING_MAIN_CONTAINER_NAME); assertThat(flinkPod.getMainContainer().getVolumeMounts()) .contains(KubernetesPodTemplateTestUtils.createVolumeMount()); }
abstract void execute(Admin admin, Namespace ns, PrintStream out) throws Exception;
@Test public void testNewBrokerAbortTransaction() throws Exception { TopicPartition topicPartition = new TopicPartition("foo", 5); long startOffset = 9173; long producerId = 12345L; short producerEpoch = 15; int coordinatorEpoch = 76; String[] args = new String[] { "--bootstrap-server", "localhost:9092", "abort", "--topic", topicPartition.topic(), "--partition", String.valueOf(topicPartition.partition()), "--start-offset", String.valueOf(startOffset) }; DescribeProducersResult describeResult = Mockito.mock(DescribeProducersResult.class); KafkaFuture<PartitionProducerState> describeFuture = completedFuture( new PartitionProducerState(singletonList( new ProducerState(producerId, producerEpoch, 1300, 1599509565L, OptionalInt.of(coordinatorEpoch), OptionalLong.of(startOffset)) ))); AbortTransactionResult abortTransactionResult = Mockito.mock(AbortTransactionResult.class); KafkaFuture<Void> abortFuture = completedFuture(null); AbortTransactionSpec expectedAbortSpec = new AbortTransactionSpec( topicPartition, producerId, producerEpoch, coordinatorEpoch); Mockito.when(describeResult.partitionResult(topicPartition)).thenReturn(describeFuture); Mockito.when(admin.describeProducers(singleton(topicPartition))).thenReturn(describeResult); Mockito.when(abortTransactionResult.all()).thenReturn(abortFuture); Mockito.when(admin.abortTransaction(expectedAbortSpec)).thenReturn(abortTransactionResult); execute(args); assertNormalExit(); }
public static String deepToString(Iterator<?> iter) { StringBuilder bld = new StringBuilder("["); String prefix = ""; while (iter.hasNext()) { Object object = iter.next(); bld.append(prefix); bld.append(object.toString()); prefix = ", "; } bld.append("]"); return bld.toString(); }
@Test public void testDeepToString() { assertEquals("[1, 2, 3]", MessageUtil.deepToString(Arrays.asList(1, 2, 3).iterator())); assertEquals("[foo]", MessageUtil.deepToString(Collections.singletonList("foo").iterator())); }
public String getBaseUrl() { String url = config.get(SERVER_BASE_URL).orElse(""); if (isEmpty(url)) { url = computeBaseUrl(); } // Remove trailing slashes return StringUtils.removeEnd(url, "/"); }
@Test public void base_url_is_http_localhost_900_specified_context_when_context_is_set() { settings.setProperty(CONTEXT_PROPERTY, "sdsd"); assertThat(underTest().getBaseUrl()).isEqualTo("http://localhost:9000sdsd"); }
public boolean isAllAllowed() { return allAllowed; }
@Test public void testWildCardAccessControlList() throws Exception { AccessControlList acl; acl = new AccessControlList("*"); assertTrue(acl.isAllAllowed()); acl = new AccessControlList(" * "); assertTrue(acl.isAllAllowed()); acl = new AccessControlList(" *"); assertTrue(acl.isAllAllowed()); acl = new AccessControlList("* "); assertTrue(acl.isAllAllowed()); }
public static Field p(String fieldName) { return SELECT_ALL_FROM_SOURCES_ALL.where(fieldName); }
@Test void contains_phrase_near_onear_equiv() { { String q1 = Q.p("f1").containsPhrase("p1", "p2", "p3") .build(); String q2 = Q.p("f1").containsPhrase(List.of("p1", "p2", "p3")) .build(); assertEquals(q1, "yql=select * from sources * where f1 contains phrase(\"p1\", \"p2\", \"p3\")"); assertEquals(q2, "yql=select * from sources * where f1 contains phrase(\"p1\", \"p2\", \"p3\")"); } { String q1 = Q.p("f1").containsNear("p1", "p2", "p3") .build(); String q2 = Q.p("f1").containsNear(List.of("p1", "p2", "p3")) .build(); assertEquals(q1, "yql=select * from sources * where f1 contains near(\"p1\", \"p2\", \"p3\")"); assertEquals(q2, "yql=select * from sources * where f1 contains near(\"p1\", \"p2\", \"p3\")"); } { String q1 = Q.p("f1").containsOnear("p1", "p2", "p3") .build(); String q2 = Q.p("f1").containsOnear(List.of("p1", "p2", "p3")) .build(); assertEquals(q1, "yql=select * from sources * where f1 contains onear(\"p1\", \"p2\", \"p3\")"); assertEquals(q2, "yql=select * from sources * where f1 contains onear(\"p1\", \"p2\", \"p3\")"); } { String q1 = Q.p("f1").containsEquiv("p1", "p2", "p3") .build(); String q2 = Q.p("f1").containsEquiv(List.of("p1", "p2", "p3")) .build(); assertEquals(q1, "yql=select * from sources * where f1 contains equiv(\"p1\", \"p2\", \"p3\")"); assertEquals(q2, "yql=select * from sources * where f1 contains equiv(\"p1\", \"p2\", \"p3\")"); } }
public String allowCrossAccountAccessExample() { IamPolicy policy = IamPolicy.builder() .addStatement(b -> b .effect(IamEffect.ALLOW) .addPrincipal(IamPrincipalType.AWS, "111122223333") .addAction("s3:PutObject") .addResource("arn:aws:s3:::DOC-EXAMPLE-BUCKET/*") .addCondition(b1 -> b1 .operator(IamConditionOperator.STRING_EQUALS) .key("s3:x-amz-acl") .value("bucket-owner-full-control"))) .build(); return policy.toJson(IamPolicyWriter.builder() .prettyPrint(true).build()); }
@Test @Tag("IntegrationTest") void allowCrossAccountAccessExample() { String policyJson = examples.allowCrossAccountAccessExample(); logger.info(policyJson); analyze(policyJson, PolicyType.RESOURCE_POLICY); }
@Override public RecordSet getRecordSet(ConnectorTransactionHandle transactionHandle, ConnectorSession session, ConnectorSplit split, List<? extends ColumnHandle> columns) { PrometheusSplit prometheusSplit = (PrometheusSplit) split; ImmutableList.Builder<PrometheusColumnHandle> handles = ImmutableList.builder(); for (ColumnHandle handle : columns) { handles.add((PrometheusColumnHandle) handle); } return new PrometheusRecordSet(prometheusClient, prometheusSplit, handles.build()); }
@Test public void testGetRecordSet() { PrometheusRecordSetProvider recordSetProvider = new PrometheusRecordSetProvider(client); RecordSet recordSet = recordSetProvider.getRecordSet( PrometheusTransactionHandle.INSTANCE, SESSION, new PrometheusSplit(dataUri), ImmutableList.of( new PrometheusColumnHandle("labels", varcharMapType, 0), new PrometheusColumnHandle("timestamp", TIMESTAMP_WITH_TIME_ZONE, 1), new PrometheusColumnHandle("value", DoubleType.DOUBLE, 2))); assertNotNull(recordSet, "recordSet is null"); RecordCursor cursor = recordSet.cursor(); assertNotNull(cursor, "cursor is null"); Map<Instant, Map<?, ?>> actual = new LinkedHashMap<>(); while (cursor.advanceNextPosition()) { actual.put((Instant) cursor.getObject(1), getMapFromBlock(varcharMapType, (Block) cursor.getObject(0))); } Map<Instant, Map<String, String>> expected = ImmutableMap.<Instant, Map<String, String>>builder() .put(ofEpochMilli(1565962969044L), ImmutableMap.of("instance", "localhost:9090", "__name__", "up", "job", "prometheus")) .put(ofEpochMilli(1565962984045L), ImmutableMap.of("instance", "localhost:9090", "__name__", "up", "job", "prometheus")) .put(ofEpochMilli(1565962999044L), ImmutableMap.of("instance", "localhost:9090", "__name__", "up", "job", "prometheus")) .put(ofEpochMilli(1565963014044L), ImmutableMap.of("instance", "localhost:9090", "__name__", "up", "job", "prometheus")) .build(); assertEquals(actual, expected); }
@Override public double logp(int k) { if (k < 0) { return Double.NEGATIVE_INFINITY; } else { return k * Math.log(1 - p) + Math.log(p); } }
@Test public void testLogP() { System.out.println("logP"); GeometricDistribution instance = new GeometricDistribution(0.3); instance.rand(); assertEquals(Math.log(0.3), instance.logp(0), 1E-6); assertEquals(Math.log(0.21), instance.logp(1), 1E-6); assertEquals(Math.log(0.147), instance.logp(2), 1E-6); assertEquals(Math.log(0.1029), instance.logp(3), 1E-6); assertEquals(Math.log(0.07203), instance.logp(4), 1E-6); assertEquals(Math.log(0.008474257), instance.logp(10), 1E-6); assertEquals(Math.log(0.0002393768), instance.logp(20), 1E-6); }
public FEELFnResult<Boolean> invoke(@ParameterName( "range" ) Range range, @ParameterName( "point" ) Comparable point) { if ( point == null ) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "point", "cannot be null")); } if ( range == null ) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "range", "cannot be null")); } try { boolean result = ( range.getHighBoundary() == Range.RangeBoundary.CLOSED && point.compareTo( range.getHighEndPoint() ) == 0 ); return FEELFnResult.ofResult( result ); } catch( Exception e ) { // points are not comparable return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "point", "cannot be compared to range")); } }
@Test void invokeParamRangeAndRange() { FunctionTestUtil.assertResult( finishedByFunction.invoke( new RangeImpl( Range.RangeBoundary.CLOSED, "a", "f", Range.RangeBoundary.CLOSED ), new RangeImpl( Range.RangeBoundary.CLOSED, "a", "f", Range.RangeBoundary.CLOSED ) ), Boolean.TRUE ); FunctionTestUtil.assertResult( finishedByFunction.invoke( new RangeImpl( Range.RangeBoundary.CLOSED, "a", "f", Range.RangeBoundary.CLOSED ), new RangeImpl( Range.RangeBoundary.CLOSED, "c", "f", Range.RangeBoundary.CLOSED ) ), Boolean.TRUE ); FunctionTestUtil.assertResult( finishedByFunction.invoke( new RangeImpl( Range.RangeBoundary.CLOSED, "e", "f", Range.RangeBoundary.CLOSED ), new RangeImpl( Range.RangeBoundary.CLOSED, "c", "f", Range.RangeBoundary.CLOSED ) ), Boolean.FALSE ); FunctionTestUtil.assertResult( finishedByFunction.invoke( new RangeImpl( Range.RangeBoundary.CLOSED, "a", "f", Range.RangeBoundary.CLOSED ), new RangeImpl( Range.RangeBoundary.CLOSED, "c", "f", Range.RangeBoundary.OPEN ) ), Boolean.FALSE ); }
@Override public String getAcknowledgmentType() { return "AR"; }
@Test public void testGetAcknowledgmentType() { instance = new MllpApplicationRejectAcknowledgementException( HL7_MESSAGE_BYTES, HL7_ACKNOWLEDGEMENT_BYTES, LOG_PHI_TRUE); assertEquals("AR", instance.getAcknowledgmentType()); }
@Override public void selectInstances(Map<Integer, List<InstanceConfig>> poolToInstanceConfigsMap, InstancePartitions instancePartitions) { int numPools = poolToInstanceConfigsMap.size(); Preconditions.checkState(numPools != 0, "No pool qualified for selection"); int tableNameHash = Math.abs(_tableNameWithType.hashCode()); List<Integer> pools = new ArrayList<>(poolToInstanceConfigsMap.keySet()); pools.sort(null); LOGGER.info("Starting instance replica-group/partition selection for table: {} with hash: {} from pools: {}, " + "minimize data movement: {}", _tableNameWithType, tableNameHash, pools, _minimizeDataMovement); if (_replicaGroupPartitionConfig.isReplicaGroupBased()) { if (_minimizeDataMovement) { replicaGroupBasedMinimumMovement(poolToInstanceConfigsMap, instancePartitions, pools, tableNameHash); } else { replicaGroupBasedSimple(poolToInstanceConfigsMap, instancePartitions, pools, tableNameHash); } } else { nonReplicaGroupBased(poolToInstanceConfigsMap, instancePartitions, pools, tableNameHash); } }
@Test public void testSelectPoolsWhenExistingReplicaGroupMapsToMultiplePools() throws JsonProcessingException { // The "rg0-2" instance used to belong to Pool 1, but now it belongs to Pool 0. //@formatter:off String existingPartitionsJson = "{\n" + " \"instancePartitionsName\": \"0f97dac8-4123-47c6-9a4d-b8ce039c5ea5_OFFLINE\",\n" + " \"partitionToInstancesMap\": {\n" + " \"0_0\": [\n" + " \"Server_pinot-server-rg0-0.pinot-server-headless.pinot.svc.cluster.local_8098\",\n" + " \"Server_pinot-server-rg0-1.pinot-server-headless.pinot.svc.cluster.local_8098\"\n" + " ],\n" + " \"0_1\": [\n" + " \"Server_pinot-server-rg0-2.pinot-server-headless.pinot.svc.cluster.local_8098\",\n" + " \"Server_pinot-server-rg1-0.pinot-server-headless.pinot.svc.cluster.local_8098\"\n" + " ]\n" + " }\n" + "}"; //@formatter:on InstancePartitions existing = OBJECT_MAPPER.readValue(existingPartitionsJson, InstancePartitions.class); InstanceReplicaGroupPartitionConfig config = new InstanceReplicaGroupPartitionConfig(true, 0, 2, 2, 1, 2, true, null); InstanceReplicaGroupPartitionSelector selector = new InstanceReplicaGroupPartitionSelector(config, "tableNameBlah", existing, true); String[] serverNames = {"rg0-0", "rg0-1", "rg0-2", "rg1-0", "rg1-1", "rg1-2"}; String[] poolNumbers = {"0", "0", "0", "1", "1", "1"}; Map<Integer, List<InstanceConfig>> poolToInstanceConfigsMap = new HashMap<>(); for (int i = 0; i < serverNames.length; i++) { Map<String, String> valuesMap = new HashMap<>(); valuesMap.put("serverName", serverNames[i]); valuesMap.put("pool", poolNumbers[i]); StringSubstitutor substitutor = new StringSubstitutor(valuesMap); String resolvedString = substitutor.replace(INSTANCE_CONFIG_TEMPLATE); ZNRecord znRecord = OBJECT_MAPPER.readValue(resolvedString, ZNRecord.class); int poolNumber = Integer.parseInt(poolNumbers[i]); poolToInstanceConfigsMap.computeIfAbsent(poolNumber, k -> new ArrayList<>()).add(new InstanceConfig(znRecord)); } InstancePartitions assignedPartitions = new InstancePartitions("0f97dac8-4123-47c6-9a4d-b8ce039c5ea5_OFFLINE"); selector.selectInstances(poolToInstanceConfigsMap, assignedPartitions); // The "rg0-2" instance is replaced by "rg1-0" (which belongs to Pool 1), as "rg0-2" no longer belongs to Pool 1. // And "rg1-0" remains the same position as it's always under Pool 1. //@formatter:off String expectedInstancePartitions = "{\n" + " \"instancePartitionsName\": \"0f97dac8-4123-47c6-9a4d-b8ce039c5ea5_OFFLINE\",\n" + " \"partitionToInstancesMap\": {\n" + " \"0_0\": [\n" + " \"Server_pinot-server-rg0-0.pinot-server-headless.pinot.svc.cluster.local_8098\",\n" + " \"Server_pinot-server-rg0-1.pinot-server-headless.pinot.svc.cluster.local_8098\"\n" + " ],\n" + " \"0_1\": [\n" + " \"Server_pinot-server-rg1-1.pinot-server-headless.pinot.svc.cluster.local_8098\",\n" + " \"Server_pinot-server-rg1-0.pinot-server-headless.pinot.svc.cluster.local_8098\"\n" + " ]\n" + " }\n" + "}"; //@formatter:on InstancePartitions expectedPartitions = OBJECT_MAPPER.readValue(expectedInstancePartitions, InstancePartitions.class); assertEquals(assignedPartitions, expectedPartitions); }
public static void populateGetCreatedLocalTransformationsMethod(final ClassOrInterfaceDeclaration toPopulate, final LocalTransformations localTransformations) { if (localTransformations != null) { BlockStmt createLocalTransformationsBody = KiePMMLLocalTransformationsFactory.getKiePMMLLocalTransformationsVariableDeclaration(localTransformations); createLocalTransformationsBody.addStatement(getReturnStmt(LOCAL_TRANSFORMATIONS)); final MethodDeclaration methodDeclaration = toPopulate.getMethodsByName(GET_CREATED_LOCAL_TRANSFORMATIONS).get(0); methodDeclaration.setBody(createLocalTransformationsBody); } }
@Test void populateGetCreatedLocalTransformationsMethod() throws IOException { org.kie.pmml.compiler.commons.codegenfactories.KiePMMLModelFactoryUtils.populateGetCreatedLocalTransformationsMethod(classOrInterfaceDeclaration, model.getLocalTransformations()); final MethodDeclaration retrieved = classOrInterfaceDeclaration.getMethodsByName(GET_CREATED_LOCAL_TRANSFORMATIONS).get(0); String text = getFileContent(TEST_08_SOURCE); MethodDeclaration expected = JavaParserUtils.parseMethod(text); assertThat(JavaParserUtils.equalsNode(expected, retrieved)).isTrue(); }
public static void validateImageInDaemonConf(Map<String, Object> conf) { List<String> allowedImages = getAllowedImages(conf, true); if (allowedImages.isEmpty()) { LOG.debug("{} is not configured; skip image validation", DaemonConfig.STORM_OCI_ALLOWED_IMAGES); } else { String defaultImage = (String) conf.get(DaemonConfig.STORM_OCI_IMAGE); validateImage(allowedImages, defaultImage, DaemonConfig.STORM_OCI_IMAGE); } }
@Test public void validateImageInDaemonConfTest() { Map<String, Object> conf = new HashMap<>(); List<String> allowedImages = new ArrayList<>(); allowedImages.add("storm/rhel7:dev_test"); allowedImages.add("storm/rhel7:dev_current"); conf.put(DaemonConfig.STORM_OCI_ALLOWED_IMAGES, allowedImages); conf.put(DaemonConfig.STORM_OCI_IMAGE, "storm/rhel7:dev_test"); OciUtils.validateImageInDaemonConf(conf); allowedImages.add("*"); conf.put(DaemonConfig.STORM_OCI_IMAGE, "storm/rhel7:wow"); OciUtils.validateImageInDaemonConf(conf); }
@Override public Map<String, String> getLabels(Properties properties) { LOGGER.info("DefaultLabelsCollectorManager get labels....."); Map<String, String> labels = getLabels(labelsCollectorsList, properties); LOGGER.info("DefaultLabelsCollectorManager get labels finished,labels :{}", labels); return labels; }
@Test void tagV2LabelsCollectorTest() { Properties properties = new Properties(); properties.put(Constants.APP_CONN_LABELS_KEY, "k1=v1,gray=properties_pre"); properties.put(Constants.CONFIG_GRAY_LABEL, "properties_after"); DefaultLabelsCollectorManager defaultLabelsCollectorManager = new DefaultLabelsCollectorManager(); Map<String, String> labels = defaultLabelsCollectorManager.getLabels(properties); assertEquals("properties_after", labels.get(Constants.CONFIG_GRAY_LABEL)); assertEquals("v1", labels.get("k1")); }
public List<String> split(String in) { final StringBuilder result = new StringBuilder(); final char[] chars = in.toCharArray(); for (int i = 0; i < chars.length; i++) { final char c = chars[i]; if (CHAR_OPERATORS.contains(String.valueOf(c))) { if (i < chars.length - 2 && CHAR_OPERATORS.contains(String.valueOf(chars[i + 1])) && !("(".equals(String.valueOf(chars[i + 1])) || ")".equals(String.valueOf(chars[i + 1])))) { result.append(" ").append(c).append(chars[i + 1]).append(" "); i++; } else { result.append(" ").append(c).append(" "); } } else { result.append(c); } } final String[] tokens = result.toString().split(SPLIT_EXPRESSION); final List<String> list = new ArrayList<>(); for (int i = 0; i < tokens.length; i++) { tokens[i] = tokens[i].trim(); if (!tokens[i].equals("")) { list.add(tokens[i]); } } return list; }
@Test public void split4() { List<String> tokens = parser.split("a and b AND(((a>c AND b> d) OR (x = y )) ) OR t>u"); assertEquals(Arrays.asList("a", "and", "b", "AND", "(", "(", "(", "a", ">", "c", "AND", "b", ">", "d", ")", "OR", "(", "x", "=", "y", ")", ")", ")", "OR", "t", ">", "u"), tokens); }
public T_IdKeyPair generateOmemoIdentityKeyPair() { return keyUtil().generateOmemoIdentityKeyPair(); }
@Test public void generateOmemoIdentityKeyPairDoesNotReturnNull() { assertNotNull(store.generateOmemoIdentityKeyPair()); }
@Override public boolean add(E e) { final int priorityLevel = e.getPriorityLevel(); // try offering to all queues. if (!offerQueues(priorityLevel, e, true)) { CallQueueOverflowException ex; if (serverFailOverEnabled) { // Signal clients to failover and try a separate server. ex = CallQueueOverflowException.FAILOVER; } else if (priorityLevel == queues.size() - 1){ // only disconnect the lowest priority users that overflow the queue. ex = CallQueueOverflowException.DISCONNECT; } else { ex = CallQueueOverflowException.KEEPALIVE; } throw ex; } return true; }
@Test public void testFairCallQueueMetrics() throws Exception { final String fcqMetrics = "ns.FairCallQueue"; Schedulable p0 = mockCall("a", 0); Schedulable p1 = mockCall("b", 1); assertGauge("FairCallQueueSize_p0", 0, getMetrics(fcqMetrics)); assertGauge("FairCallQueueSize_p1", 0, getMetrics(fcqMetrics)); assertCounter("FairCallQueueOverflowedCalls_p0", 0L, getMetrics(fcqMetrics)); assertCounter("FairCallQueueOverflowedCalls_p1", 0L, getMetrics(fcqMetrics)); for (int i = 0; i < 5; i++) { fcq.add(p0); fcq.add(p1); } try { fcq.add(p1); fail("didn't overflow"); } catch (IllegalStateException ise) { // Expected exception } assertGauge("FairCallQueueSize_p0", 5, getMetrics(fcqMetrics)); assertGauge("FairCallQueueSize_p1", 5, getMetrics(fcqMetrics)); assertCounter("FairCallQueueOverflowedCalls_p0", 0L, getMetrics(fcqMetrics)); assertCounter("FairCallQueueOverflowedCalls_p1", 1L, getMetrics(fcqMetrics)); }
public static boolean isServletRequestAuthenticatorInstanceOf(Class<? extends ServletRequestAuthenticator> clazz) { final AuthCheckFilter instance = getInstance(); if (instance == null) { // We've not yet been instantiated return false; } return servletRequestAuthenticator != null && clazz.isAssignableFrom(servletRequestAuthenticator.getClass()); }
@Test public void willReturnTrueIfTheCorrectServletRequestAuthenticatorIsConfigured() { new AuthCheckFilter(adminManager, loginLimitManager); AuthCheckFilter.SERVLET_REQUEST_AUTHENTICATOR.setValue(NormalUserServletAuthenticatorClass.class); assertThat(AuthCheckFilter.isServletRequestAuthenticatorInstanceOf(NormalUserServletAuthenticatorClass.class), is(true)); }
public void saveV2(ImageWriter imageWriter) throws IOException { try { // 1 json for myself,1 json for number of users, 2 json for each user(kv) // 1 json for number of roles, 2 json for each role(kv) final int cnt = 1 + 1 + userToPrivilegeCollection.size() * 2 + 1 + roleIdToPrivilegeCollection.size() * 2; SRMetaBlockWriter writer = imageWriter.getBlockWriter(SRMetaBlockID.AUTHORIZATION_MGR, cnt); // 1 json for myself writer.writeJson(this); // 1 json for num user writer.writeInt(userToPrivilegeCollection.size()); for (Map.Entry<UserIdentity, UserPrivilegeCollectionV2> entry : userToPrivilegeCollection.entrySet()) { writer.writeJson(entry.getKey()); writer.writeJson(entry.getValue()); } // 1 json for num roles writer.writeInt(roleIdToPrivilegeCollection.size()); for (Map.Entry<Long, RolePrivilegeCollectionV2> entry : roleIdToPrivilegeCollection.entrySet()) { RolePrivilegeCollectionV2 value = entry.getValue(); // Avoid newly added PEntryObject type corrupt forward compatibility, // since built-in roles are always initialized on startup, we don't need to persist them. // But to keep the correct relationship with roles inherited from them, we still need to persist // an empty role for them, just for the role id. if (PrivilegeBuiltinConstants.IMMUTABLE_BUILT_IN_ROLE_IDS.contains(entry.getKey())) { // clone to avoid race condition RolePrivilegeCollectionV2 clone = value.cloneSelf(); clone.typeToPrivilegeEntryList = new HashMap<>(); value = clone; } writer.writeLong(entry.getKey()); writer.writeJson(value); } writer.close(); } catch (SRMetaBlockException e) { throw new IOException("failed to save AuthenticationManager!", e); } }
@Test public void testWontSavePrivCollForBuiltInRole() throws Exception { GlobalStateMgr masterGlobalStateMgr = ctx.getGlobalStateMgr(); AuthorizationMgr authorizationMgr = masterGlobalStateMgr.getAuthorizationMgr(); UtFrameUtils.PseudoJournalReplayer.resetFollowerJournalQueue(); UtFrameUtils.PseudoImage emptyImage = new UtFrameUtils.PseudoImage(); authorizationMgr.saveV2(emptyImage.getImageWriter()); SRMetaBlockReader reader = new SRMetaBlockReaderV2(emptyImage.getJsonReader()); // read the whole first reader.readJson(AuthorizationMgr.class); // read the number of user int numUser = reader.readInt(); // there should be only 2 users: root and test_user Assert.assertEquals(2, numUser); // read users and ignore them for (int i = 0; i != numUser; ++i) { // 2 json for each user(kv) reader.readJson(UserIdentity.class); reader.readJson(UserPrivilegeCollectionV2.class); } // read the number of roles int numRole = reader.readInt(); for (int i = 0; i != numRole; ++i) { // 2 json for each role(kv) Long roleId = reader.readLong(); RolePrivilegeCollectionV2 collection = reader.readJson(RolePrivilegeCollectionV2.class); if (PrivilegeBuiltinConstants.IMMUTABLE_BUILT_IN_ROLE_IDS.contains(roleId)) { // built-in role's priv collection should not be saved Assert.assertTrue(collection.typeToPrivilegeEntryList.isEmpty()); } } }
public static boolean equals(double num1, double num2) { return Double.doubleToLongBits(num1) == Double.doubleToLongBits(num2); }
@Test public void equalsTest() { assertTrue(NumberUtil.equals(new BigDecimal("0.00"), BigDecimal.ZERO)); }
@Override public List<SnowflakeIdentifier> listIcebergTables(SnowflakeIdentifier scope) { StringBuilder baseQuery = new StringBuilder("SHOW ICEBERG TABLES"); String[] queryParams = null; switch (scope.type()) { case ROOT: // account-level listing baseQuery.append(" IN ACCOUNT"); break; case DATABASE: // database-level listing baseQuery.append(" IN DATABASE IDENTIFIER(?)"); queryParams = new String[] {scope.toIdentifierString()}; break; case SCHEMA: // schema-level listing baseQuery.append(" IN SCHEMA IDENTIFIER(?)"); queryParams = new String[] {scope.toIdentifierString()}; break; default: throw new IllegalArgumentException( String.format("Unsupported scope type for listIcebergTables: %s", scope)); } final String finalQuery = baseQuery.toString(); final String[] finalQueryParams = queryParams; List<SnowflakeIdentifier> tables; try { tables = connectionPool.run( conn -> queryHarness.query(conn, finalQuery, TABLE_RESULT_SET_HANDLER, finalQueryParams)); } catch (SQLException e) { throw snowflakeExceptionToIcebergException( scope, e, String.format("Failed to list tables for scope '%s'", scope)); } catch (InterruptedException e) { throw new UncheckedInterruptedException( e, "Interrupted while listing tables for scope '%s'", scope); } tables.forEach( table -> Preconditions.checkState( table.type() == SnowflakeIdentifier.Type.TABLE, "Expected TABLE, got identifier '%s' for scope '%s'", table, scope)); return tables; }
@SuppressWarnings("unchecked") @Test public void testListIcebergTablesInDatabase() throws SQLException { when(mockResultSet.next()).thenReturn(true).thenReturn(true).thenReturn(true).thenReturn(false); when(mockResultSet.getString("database_name")) .thenReturn("DB_1") .thenReturn("DB_1") .thenReturn("DB_1"); when(mockResultSet.getString("schema_name")) .thenReturn("SCHEMA_1") .thenReturn("SCHEMA_1") .thenReturn("SCHEMA_2"); when(mockResultSet.getString("name")) .thenReturn("TABLE_1") .thenReturn("TABLE_2") .thenReturn("TABLE_3"); List<SnowflakeIdentifier> actualList = snowflakeClient.listIcebergTables(SnowflakeIdentifier.ofDatabase("DB_1")); verify(mockQueryHarness) .query( eq(mockConnection), eq("SHOW ICEBERG TABLES IN DATABASE IDENTIFIER(?)"), any(JdbcSnowflakeClient.ResultSetParser.class), eq("DB_1")); assertThat(actualList) .containsExactly( SnowflakeIdentifier.ofTable("DB_1", "SCHEMA_1", "TABLE_1"), SnowflakeIdentifier.ofTable("DB_1", "SCHEMA_1", "TABLE_2"), SnowflakeIdentifier.ofTable("DB_1", "SCHEMA_2", "TABLE_3")); }
public void seek(long pos) throws IOException { if (input instanceof RandomAccessFile) { ((RandomAccessFile) input).seek(pos); } else if (input instanceof DataInputStream) { throw new UnsupportedOperationException("Can not seek on Hollow Blob Input of type DataInputStream"); } else { throw new UnsupportedOperationException("Unknown Hollow Blob Input type"); } }
@Test public void testSeek() throws IOException { try (HollowBlobInput inStream = HollowBlobInput.modeBasedSelector(MemoryMode.ON_HEAP, mockBlob)) { inStream.seek(3); fail(); } catch (UnsupportedOperationException e) { // pass } catch (Exception e) { fail(); } HollowBlobInput inBuffer = HollowBlobInput.modeBasedSelector(MemoryMode.SHARED_MEMORY_LAZY, mockBlob); inBuffer.seek(3); assertEquals(3, inBuffer.getFilePointer()); // first byte is 0 }
@InvokeOnHeader(Web3jConstants.ETH_GET_UNCLE_BY_BLOCK_HASH_AND_INDEX) void ethGetUncleByBlockHashAndIndex(Message message) throws IOException { String blockHash = message.getHeader(Web3jConstants.BLOCK_HASH, configuration::getBlockHash, String.class); BigInteger uncleIndex = message.getHeader(Web3jConstants.INDEX, configuration::getIndex, BigInteger.class); Request<?, EthBlock> request = web3j.ethGetUncleByBlockHashAndIndex(blockHash, uncleIndex); setRequestId(message, request); EthBlock response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.getBlock()); } }
@Test public void ethGetUncleByBlockHashAndIndexTest() throws Exception { EthBlock response = Mockito.mock(EthBlock.class); Mockito.when(mockWeb3j.ethGetUncleByBlockHashAndIndex(any(), any())).thenReturn(request); Mockito.when(request.send()).thenReturn(response); Mockito.when(response.getBlock()).thenReturn(Mockito.mock(EthBlock.Block.class)); Exchange exchange = createExchangeWithBodyAndHeader(null, OPERATION, Web3jConstants.ETH_GET_UNCLE_BY_BLOCK_HASH_AND_INDEX); template.send(exchange); EthBlock.Block body = exchange.getIn().getBody(EthBlock.Block.class); assertNotNull(body); }
public FEELFnResult<Boolean> invoke(@ParameterName( "point" ) Comparable point, @ParameterName( "range" ) Range range) { if ( point == null ) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "point", "cannot be null")); } if ( range == null ) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "range", "cannot be null")); } try { boolean result = (range.getLowEndPoint().compareTo(point) < 0 && range.getHighEndPoint().compareTo(point) > 0) || (range.getLowEndPoint().compareTo(point) == 0 && range.getLowBoundary() == RangeBoundary.CLOSED) || (range.getHighEndPoint().compareTo(point) == 0 && range.getHighBoundary() == RangeBoundary.CLOSED); return FEELFnResult.ofResult( result ); } catch( Exception e ) { // points are not comparable return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "point", "cannot be compared to range")); } }
@Test void invokeParamRangeAndRange() { FunctionTestUtil.assertResult( duringFunction.invoke( new RangeImpl( Range.RangeBoundary.CLOSED, "a", "f", Range.RangeBoundary.CLOSED ), new RangeImpl( Range.RangeBoundary.CLOSED, "a", "f", Range.RangeBoundary.CLOSED ) ), Boolean.TRUE ); FunctionTestUtil.assertResult( duringFunction.invoke( new RangeImpl( Range.RangeBoundary.CLOSED, "c", "f", Range.RangeBoundary.CLOSED ), new RangeImpl( Range.RangeBoundary.CLOSED, "a", "k", Range.RangeBoundary.CLOSED ) ), Boolean.TRUE ); FunctionTestUtil.assertResult( duringFunction.invoke( new RangeImpl( Range.RangeBoundary.CLOSED, "a", "f", Range.RangeBoundary.CLOSED ), new RangeImpl( Range.RangeBoundary.CLOSED, "c", "d", Range.RangeBoundary.CLOSED ) ), Boolean.FALSE ); FunctionTestUtil.assertResult( duringFunction.invoke( new RangeImpl( Range.RangeBoundary.CLOSED, "a", "f", Range.RangeBoundary.CLOSED ), new RangeImpl( Range.RangeBoundary.OPEN, "a", "k", Range.RangeBoundary.CLOSED ) ), Boolean.FALSE ); }
@Override public ScheduledFuture<?> scheduleAtFixedRate(Runnable command, long initialDelay, long period, TimeUnit unit) { ObjectUtil.checkNotNull(command, "command"); ObjectUtil.checkNotNull(unit, "unit"); if (initialDelay < 0) { throw new IllegalArgumentException( String.format("initialDelay: %d (expected: >= 0)", initialDelay)); } if (period <= 0) { throw new IllegalArgumentException( String.format("period: %d (expected: > 0)", period)); } validateScheduled0(initialDelay, unit); validateScheduled0(period, unit); return schedule(new ScheduledFutureTask<Void>( this, command, deadlineNanos(getCurrentTimeNanos(), unit.toNanos(initialDelay)), unit.toNanos(period))); }
@Test public void testScheduleAtFixedRateRunnableZero() { final TestScheduledEventExecutor executor = new TestScheduledEventExecutor(); assertThrows(IllegalArgumentException.class, new Executable() { @Override public void execute() { executor.scheduleAtFixedRate(TEST_RUNNABLE, 0, 0, TimeUnit.DAYS); } }); }
@Operation(summary = "countDefinitionByUser", description = "COUNT_PROCESS_DEFINITION_BY_USER_NOTES") @Parameters({ @Parameter(name = "projectCode", description = "PROJECT_CODE", schema = @Schema(implementation = long.class, example = "100")) }) @GetMapping(value = "/define-user-count") @ResponseStatus(HttpStatus.OK) @ApiException(COUNT_PROCESS_DEFINITION_USER_ERROR) public Result<WorkflowDefinitionCountVO> countDefinitionByUser(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam(value = "projectCode", required = false) Long projectCode) { if (projectCode == null) { return Result.success(dataAnalysisService.getAllWorkflowDefinitionCount(loginUser)); } return Result.success(dataAnalysisService.getWorkflowDefinitionCountByProject(loginUser, projectCode)); }
@Test public void testCountDefinitionByUser() throws Exception { MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>(); paramsMap.add("projectId", "16"); MvcResult mvcResult = mockMvc.perform(get("/projects/analysis/define-user-count") .header("sessionId", sessionId) .params(paramsMap)) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON)) .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); assertThat(result.getCode().intValue()).isEqualTo(Status.SUCCESS.getCode()); logger.info(mvcResult.getResponse().getContentAsString()); }
public static KeyFormat sanitizeKeyFormat( final KeyFormat keyFormat, final List<SqlType> newKeyColumnSqlTypes, final boolean allowKeyFormatChangeToSupportNewKeySchema ) { return sanitizeKeyFormatWrapping( !allowKeyFormatChangeToSupportNewKeySchema ? keyFormat : sanitizeKeyFormatForTypeCompatibility( sanitizeKeyFormatForMultipleColumns( keyFormat, newKeyColumnSqlTypes.size()), newKeyColumnSqlTypes ), newKeyColumnSqlTypes.size() == 1 ); }
@Test public void shouldAddKeyWrappingWhenSanitizing() { // Given: final KeyFormat format = KeyFormat.nonWindowed( FormatInfo.of(JsonFormat.NAME), SerdeFeatures.of()); // When: final KeyFormat sanitized = SerdeFeaturesFactory.sanitizeKeyFormat(format, SINGLE_SQL_TYPE, true); // Then: assertThat(sanitized.getFormatInfo(), equalTo(FormatInfo.of(JsonFormat.NAME))); assertThat(sanitized.getFeatures(), equalTo(SerdeFeatures.of(SerdeFeature.UNWRAP_SINGLES))); }
@VisibleForTesting static Function<HiveColumnHandle, ColumnMetadata> columnMetadataGetter(Table table, TypeManager typeManager, ColumnConverter columnConverter, List<String> notNullColumns) { ImmutableList.Builder<String> columnNames = ImmutableList.builder(); table.getPartitionColumns().stream().map(Column::getName).forEach(columnNames::add); table.getDataColumns().stream().map(Column::getName).forEach(columnNames::add); List<String> allColumnNames = columnNames.build(); if (allColumnNames.size() > Sets.newHashSet(allColumnNames).size()) { throw new PrestoException(HIVE_INVALID_METADATA, format("Hive metadata for table %s is invalid: Table descriptor contains duplicate columns", table.getTableName())); } List<Column> tableColumns = table.getDataColumns(); ImmutableMap.Builder<String, Optional<String>> builder = ImmutableMap.builder(); ImmutableMap.Builder<String, Optional<String>> typeMetadataBuilder = ImmutableMap.builder(); for (Column field : concat(tableColumns, table.getPartitionColumns())) { if (field.getComment().isPresent() && !field.getComment().get().equals("from deserializer")) { builder.put(field.getName(), field.getComment()); } else { builder.put(field.getName(), Optional.empty()); } typeMetadataBuilder.put(field.getName(), field.getTypeMetadata()); } // add hidden columns builder.put(PATH_COLUMN_NAME, Optional.empty()); if (table.getStorage().getBucketProperty().isPresent()) { builder.put(BUCKET_COLUMN_NAME, Optional.empty()); } builder.put(FILE_SIZE_COLUMN_NAME, Optional.empty()); builder.put(FILE_MODIFIED_TIME_COLUMN_NAME, Optional.empty()); builder.put(ROW_ID_COLUMN_NAME, Optional.empty()); Map<String, Optional<String>> columnComment = builder.build(); Map<String, Optional<String>> typeMetadata = typeMetadataBuilder.build(); return handle -> new ColumnMetadata( handle.getName(), typeManager.getType(columnConverter.getTypeSignature(handle.getHiveType(), typeMetadata.getOrDefault(handle.getName(), Optional.empty()))), !notNullColumns.contains(handle.getName()), columnComment.get(handle.getName()).orElse(null), columnExtraInfo(handle.isPartitionKey()), handle.isHidden(), ImmutableMap.of()); }
@Test public void testColumnMetadataGetter() { TypeManager mockTypeManager = new TestingTypeManager(); Column column1 = new Column("c1", HIVE_INT, Optional.empty(), Optional.of("some-metadata")); HiveColumnHandle hiveColumnHandle1 = new HiveColumnHandle( column1.getName(), HiveType.HIVE_INT, TypeSignature.parseTypeSignature("int"), 0, HiveColumnHandle.ColumnType.REGULAR, Optional.empty(), Optional.empty()); HiveColumnHandle hidden = new HiveColumnHandle( HiveColumnHandle.PATH_COLUMN_NAME, HiveType.HIVE_INT, TypeSignature.parseTypeSignature("int"), 0, HiveColumnHandle.ColumnType.SYNTHESIZED, Optional.empty(), Optional.empty()); Column partitionColumn = new Column("ds", HIVE_STRING, Optional.empty(), Optional.empty()); Table mockTable = new Table( "schema", "table", "user", PrestoTableType.MANAGED_TABLE, new Storage(fromHiveStorageFormat(ORC), "location", Optional.of(new HiveBucketProperty( ImmutableList.of(column1.getName()), 100, ImmutableList.of(), HIVE_COMPATIBLE, Optional.empty())), false, ImmutableMap.of(), ImmutableMap.of()), ImmutableList.of(column1), ImmutableList.of(partitionColumn), ImmutableMap.of(), Optional.empty(), Optional.empty()); ColumnMetadata actual = HiveMetadata.columnMetadataGetter(mockTable, mockTypeManager, new HiveColumnConverter(), ImmutableList.of()).apply(hiveColumnHandle1); ColumnMetadata expected = new ColumnMetadata("c1", IntegerType.INTEGER); assertEquals(actual, expected); actual = HiveMetadata.columnMetadataGetter(mockTable, mockTypeManager, new TestColumnConverter(), ImmutableList.of()).apply(hidden); expected = ColumnMetadata.builder().setName(HiveColumnHandle.PATH_COLUMN_NAME).setType(IntegerType.INTEGER).setHidden(true).build(); assertEquals(actual, expected); }
@Override public int getForwardingSourceField(int input, int targetField) { if (input != 0) { throw new IndexOutOfBoundsException(); } for (Map.Entry<Integer, FieldSet> e : fieldMapping.entrySet()) { if (e.getValue().contains(targetField)) { return e.getKey(); } } return -1; }
@Test void testAllForwardedSingleInputSemPropsInvalidIndex1() { assertThatThrownBy( () -> { SingleInputSemanticProperties sp = new SingleInputSemanticProperties .AllFieldsForwardedProperties(); sp.getForwardingSourceField(1, 0); }) .isInstanceOf(IndexOutOfBoundsException.class); }
@Override public void register(URL url) { if (url == null) { throw new IllegalArgumentException("register url == null"); } if (url.getPort() != 0) { if (logger.isInfoEnabled()) { logger.info("Register: " + url); } } registered.add(url); }
@Test void testRegisterIfURLNULL() { Assertions.assertThrows(IllegalArgumentException.class, () -> { abstractRegistry.register(null); Assertions.fail("register url == null"); }); }
@Description("Given a (longitude, latitude) point, returns the surrounding Bing tiles at the specified zoom level") @ScalarFunction("bing_tiles_around") @SqlType("array(" + BingTileType.NAME + ")") public static Block bingTilesAround( @SqlType(StandardTypes.DOUBLE) double latitude, @SqlType(StandardTypes.DOUBLE) double longitude, @SqlType(StandardTypes.INTEGER) long zoomLevel) { checkLatitude(latitude, LATITUDE_OUT_OF_RANGE); checkLongitude(longitude, LONGITUDE_OUT_OF_RANGE); checkZoomLevel(zoomLevel); long mapSize = mapSize(toIntExact(zoomLevel)); long maxTileIndex = (mapSize / TILE_PIXELS) - 1; int tileX = longitudeToTileX(longitude, mapSize); int tileY = latitudeToTileY(latitude, mapSize); BlockBuilder blockBuilder = BIGINT.createBlockBuilder(null, 9); for (int i = -1; i <= 1; i++) { for (int j = -1; j <= 1; j++) { int x = tileX + i; int y = tileY + j; if (x >= 0 && x <= maxTileIndex && y >= 0 && y <= maxTileIndex) { BIGINT.writeLong(blockBuilder, BingTile.fromCoordinates(x, y, toIntExact(zoomLevel)).encode()); } } } return blockBuilder.build(); }
@Test public void testBingTilesAround() { assertFunction( "transform(bing_tiles_around(30.12, 60, 1), x -> bing_tile_quadkey(x))", new ArrayType(VARCHAR), ImmutableList.of("0", "2", "1", "3")); assertFunction( "transform(bing_tiles_around(30.12, 60, 15), x -> bing_tile_quadkey(x))", new ArrayType(VARCHAR), ImmutableList.of( "123030123010102", "123030123010120", "123030123010122", "123030123010103", "123030123010121", "123030123010123", "123030123010112", "123030123010130", "123030123010132")); assertFunction( "transform(bing_tiles_around(30.12, 60, 23), x -> bing_tile_quadkey(x))", new ArrayType(VARCHAR), ImmutableList.of( "12303012301012121210122", "12303012301012121210300", "12303012301012121210302", "12303012301012121210123", "12303012301012121210301", "12303012301012121210303", "12303012301012121210132", "12303012301012121210310", "12303012301012121210312")); }
@Override public ItemChangeSets resolve(long namespaceId, String configText, List<ItemDTO> baseItems) { Map<Integer, ItemDTO> oldLineNumMapItem = BeanUtils.mapByKey("lineNum", baseItems); Map<String, ItemDTO> oldKeyMapItem = BeanUtils.mapByKey("key", baseItems); //remove comment and blank item map. oldKeyMapItem.remove(""); String[] newItems = configText.split(ITEM_SEPARATOR); Set<String> repeatKeys = new HashSet<>(); if (isHasRepeatKey(newItems, repeatKeys)) { throw new BadRequestException("Config text has repeated keys: %s, please check your input.", repeatKeys); } ItemChangeSets changeSets = new ItemChangeSets(); Map<Integer, String> newLineNumMapItem = new HashMap<>();//use for delete blank and comment item int lineCounter = 1; for (String newItem : newItems) { newItem = newItem.trim(); newLineNumMapItem.put(lineCounter, newItem); ItemDTO oldItemByLine = oldLineNumMapItem.get(lineCounter); //comment item if (isCommentItem(newItem)) { handleCommentLine(namespaceId, oldItemByLine, newItem, lineCounter, changeSets); //blank item } else if (isBlankItem(newItem)) { handleBlankLine(namespaceId, oldItemByLine, lineCounter, changeSets); //normal item } else { handleNormalLine(namespaceId, oldKeyMapItem, newItem, lineCounter, changeSets); } lineCounter++; } deleteCommentAndBlankItem(oldLineNumMapItem, newLineNumMapItem, changeSets); deleteNormalKVItem(oldKeyMapItem, changeSets); return changeSets; }
@Test public void testAddItemBeforeHasItem() { ItemChangeSets changeSets = resolver.resolve(1, "x=y\na=b\nb=c\nc=d", mockBaseItemHas3Key()); Assert.assertEquals("x", changeSets.getCreateItems().get(0).getKey()); Assert.assertEquals(1, changeSets.getCreateItems().size()); Assert.assertEquals(3, changeSets.getUpdateItems().size()); }
public void putIfNotNull(String key, String value) { if (value != null) put(key, value); }
@Test public void putIfNotNull() { EnvVars env = new EnvVars(); env.putIfNotNull("foo", null); assertTrue(env.isEmpty()); env.putIfNotNull("foo", "bar"); assertFalse(env.isEmpty()); }
public MessageType convert(Schema avroSchema) { if (!avroSchema.getType().equals(Schema.Type.RECORD)) { throw new IllegalArgumentException("Avro schema must be a record."); } return new MessageType(avroSchema.getFullName(), convertFields(avroSchema.getFields(), "")); }
@Test public void testLocalTimestampMicrosType() throws Exception { Schema date = LogicalTypes.localTimestampMicros().addToSchema(Schema.create(LONG)); Schema expected = Schema.createRecord( "myrecord", null, null, false, Arrays.asList(new Schema.Field("timestamp", date, null, null))); testRoundTripConversion( expected, "message myrecord {\n" + " required int64 timestamp (TIMESTAMP(MICROS,false));\n" + "}\n"); for (PrimitiveTypeName primitive : new PrimitiveTypeName[] {INT32, INT96, FLOAT, DOUBLE, BOOLEAN, BINARY, FIXED_LEN_BYTE_ARRAY}) { final PrimitiveType type; if (primitive == FIXED_LEN_BYTE_ARRAY) { type = new PrimitiveType(REQUIRED, primitive, 12, "test", TIMESTAMP_MICROS); } else { type = new PrimitiveType(REQUIRED, primitive, "test", TIMESTAMP_MICROS); } assertThrows( "Should not allow TIMESTAMP_MICROS with " + primitive, IllegalArgumentException.class, () -> new AvroSchemaConverter().convert(message(type))); } }
public static <T> Iterator<T> prepend(T prepend, @Nonnull Iterator<? extends T> iterator) { checkNotNull(iterator, "iterator cannot be null."); return new PrependIterator<>(prepend, iterator); }
@Test public void prependEmptyIterator() { var actual = IterableUtil.prepend(1, Collections.emptyIterator()); assertIteratorsEquals(List.of(1), actual); }
public synchronized boolean subscribe(Set<String> topics, Optional<ConsumerRebalanceListener> listener) { registerRebalanceListener(listener); setSubscriptionType(SubscriptionType.AUTO_TOPICS); return changeSubscription(topics); }
@Test public void cantSubscribePatternAndTopic() { state.subscribe(Pattern.compile(".*"), Optional.of(rebalanceListener)); assertThrows(IllegalStateException.class, () -> state.subscribe(singleton(topic), Optional.of(rebalanceListener))); }
public OpenAPI filter(OpenAPI openAPI, OpenAPISpecFilter filter, Map<String, List<String>> params, Map<String, String> cookies, Map<String, List<String>> headers) { OpenAPI filteredOpenAPI = filterOpenAPI(filter, openAPI, params, cookies, headers); if (filteredOpenAPI == null) { return filteredOpenAPI; } OpenAPI clone = new OpenAPI(); clone.info(filteredOpenAPI.getInfo()); clone.openapi(filteredOpenAPI.getOpenapi()); clone.jsonSchemaDialect(filteredOpenAPI.getJsonSchemaDialect()); clone.setSpecVersion(filteredOpenAPI.getSpecVersion()); clone.setExtensions(filteredOpenAPI.getExtensions()); clone.setExternalDocs(filteredOpenAPI.getExternalDocs()); clone.setSecurity(filteredOpenAPI.getSecurity()); clone.setServers(filteredOpenAPI.getServers()); clone.tags(filteredOpenAPI.getTags() == null ? null : new ArrayList<>(openAPI.getTags())); final Set<String> allowedTags = new HashSet<>(); final Set<String> filteredTags = new HashSet<>(); Paths clonedPaths = new Paths(); if (filteredOpenAPI.getPaths() != null) { for (String resourcePath : filteredOpenAPI.getPaths().keySet()) { PathItem pathItem = filteredOpenAPI.getPaths().get(resourcePath); PathItem filteredPathItem = filterPathItem(filter, pathItem, resourcePath, params, cookies, headers); PathItem clonedPathItem = cloneFilteredPathItem(filter,filteredPathItem, resourcePath, params, cookies, headers, allowedTags, filteredTags); if (clonedPathItem != null) { if (!clonedPathItem.readOperations().isEmpty()) { clonedPaths.addPathItem(resourcePath, clonedPathItem); } } } clone.paths(clonedPaths); } filteredTags.removeAll(allowedTags); final List<Tag> tags = clone.getTags(); if (tags != null && !filteredTags.isEmpty()) { tags.removeIf(tag -> filteredTags.contains(tag.getName())); if (clone.getTags().isEmpty()) { clone.setTags(null); } } if (filteredOpenAPI.getWebhooks() != null) { for (String resourcePath : filteredOpenAPI.getWebhooks().keySet()) { PathItem pathItem = filteredOpenAPI.getPaths().get(resourcePath); PathItem filteredPathItem = filterPathItem(filter, pathItem, resourcePath, params, cookies, headers); PathItem clonedPathItem = cloneFilteredPathItem(filter,filteredPathItem, resourcePath, params, cookies, headers, allowedTags, filteredTags); if (clonedPathItem != null) { if (!clonedPathItem.readOperations().isEmpty()) { clone.addWebhooks(resourcePath, clonedPathItem); } } } } if (filteredOpenAPI.getComponents() != null) { clone.components(new Components()); clone.getComponents().setSchemas(filterComponentsSchema(filter, filteredOpenAPI.getComponents().getSchemas(), params, cookies, headers)); clone.getComponents().setSecuritySchemes(filteredOpenAPI.getComponents().getSecuritySchemes()); clone.getComponents().setCallbacks(filteredOpenAPI.getComponents().getCallbacks()); clone.getComponents().setExamples(filteredOpenAPI.getComponents().getExamples()); clone.getComponents().setExtensions(filteredOpenAPI.getComponents().getExtensions()); clone.getComponents().setHeaders(filteredOpenAPI.getComponents().getHeaders()); clone.getComponents().setLinks(filteredOpenAPI.getComponents().getLinks()); clone.getComponents().setParameters(filteredOpenAPI.getComponents().getParameters()); clone.getComponents().setRequestBodies(filteredOpenAPI.getComponents().getRequestBodies()); clone.getComponents().setResponses(filteredOpenAPI.getComponents().getResponses()); clone.getComponents().setPathItems(filteredOpenAPI.getComponents().getPathItems()); } if (filter.isRemovingUnreferencedDefinitions()) { clone = removeBrokenReferenceDefinitions(clone); } return clone; }
@Test(description = "it should not contain user tags in the top level OpenAPI object") public void shouldNotContainTopLevelUserTags() throws IOException { final OpenAPI openAPI = getOpenAPI(RESOURCE_REFERRED_SCHEMAS); final NoPetOperationsFilter filter = new NoPetOperationsFilter(); final OpenAPI filtered = new SpecFilter().filter(openAPI, filter, null, null, null); assertEquals(getTagNames(filtered), Sets.newHashSet(USER_TAG, STORE_TAG)); }
@Override public void checkDone() throws IllegalStateException { // If the range is empty, it is done if (range.getFrom().compareTo(range.getTo()) == 0) { return; } // If nothing was attempted, throws an exception checkState( lastAttemptedPosition != null, "Key range is non-empty %s and no keys have been attempted.", range); // If the end of the range was not attempted, throws an exception final Timestamp nextPosition = next(lastAttemptedPosition); if (nextPosition.compareTo(range.getTo()) < 0) { throw new IllegalStateException( String.format( "Last attempted key was %s in range %s, claiming work in [%s, %s) was not attempted", lastAttemptedPosition, range, nextPosition, range.getTo())); } }
@Test public void testCheckDoneSucceedsWhenFromIsEqualToTheEndOfTheRange() { final Timestamp from = Timestamp.ofTimeMicroseconds(10L); final TimestampRange range = TimestampRange.of(from, from); final TimestampRangeTracker tracker = new TimestampRangeTracker(range); // Method is void, succeeds if exception is not thrown tracker.checkDone(); }
public String exportResources( VariableSpace space, Map<String, ResourceDefinition> definitions, ResourceNamingInterface namingInterface, Repository repository, IMetaStore metaStore ) throws KettleException { String resourceName = null; try { // Handle naming for both repository and XML bases resources... // String baseName; String originalPath; String fullname; String extension = "kjb"; if ( Utils.isEmpty( getFilename() ) ) { // Assume repository... // originalPath = directory.getPath(); baseName = getName(); fullname = directory.getPath() + ( directory.getPath().endsWith( RepositoryDirectory.DIRECTORY_SEPARATOR ) ? "" : RepositoryDirectory.DIRECTORY_SEPARATOR ) + getName() + "." + extension; // } else { // Assume file // FileObject fileObject = KettleVFS.getFileObject( space.environmentSubstitute( getFilename() ), space ); originalPath = fileObject.getParent().getName().getPath(); baseName = fileObject.getName().getBaseName(); fullname = fileObject.getName().getPath(); } resourceName = namingInterface .nameResource( baseName, originalPath, extension, ResourceNamingInterface.FileNamingType.JOB ); ResourceDefinition definition = definitions.get( resourceName ); if ( definition == null ) { // If we do this once, it will be plenty :-) // JobMeta jobMeta = (JobMeta) this.realClone( false ); // All objects get re-located to the root folder, // but, when exporting, we need to see current directory // in order to make 'Internal.Entry.Current.Directory' variable work jobMeta.setRepositoryDirectory( directory ); // Add used resources, modify transMeta accordingly // Go through the list of steps, etc. // These critters change the steps in the cloned TransMeta // At the end we make a new XML version of it in "exported" // format... // loop over steps, databases will be exported to XML anyway. // for ( JobEntryCopy jobEntry : jobMeta.jobcopies ) { compatibleJobEntryExportResources( jobEntry.getEntry(), jobMeta, definitions, namingInterface, repository ); jobEntry.getEntry().exportResources( jobMeta, definitions, namingInterface, repository, metaStore ); } // Set a number of parameters for all the data files referenced so far... // Map<String, String> directoryMap = namingInterface.getDirectoryMap(); if ( directoryMap != null ) { for ( Map.Entry<String, String> entry : directoryMap.entrySet() ) { jobMeta.addParameterDefinition( entry.getValue(), entry.getKey(), "Data file path discovered during export" ); } } // At the end, add ourselves to the map... // String jobMetaContent = jobMeta.getXML(); definition = new ResourceDefinition( resourceName, jobMetaContent ); // Also remember the original filename (if any), including variables etc. // if ( Utils.isEmpty( this.getFilename() ) ) { // Repository definition.setOrigin( fullname ); } else { definition.setOrigin( this.getFilename() ); } definitions.put( fullname, definition ); } } catch ( FileSystemException e ) { throw new KettleException( BaseMessages.getString( PKG, "JobMeta.Exception.AnErrorOccuredReadingJob", getFilename() ), e ); } catch ( KettleFileException e ) { throw new KettleException( BaseMessages.getString( PKG, "JobMeta.Exception.AnErrorOccuredReadingJob", getFilename() ), e ); } return resourceName; }
@Test public void shouldUseExistingRepositoryDirectoryWhenExporting() throws KettleException { final JobMeta jobMetaSpy = spy( jobMeta ); JobMeta jobMeta = new JobMeta() { @Override public Object realClone( boolean doClear ) { return jobMetaSpy; } }; jobMeta.setRepositoryDirectory( directoryJob ); jobMeta.setName( JOB_META_NAME ); jobMeta.exportResources( null, new HashMap<String, ResourceDefinition>( 4 ), mock( ResourceNamingInterface.class ), null, null ); // assert verify( jobMetaSpy ).setRepositoryDirectory( directoryJob ); }
Record convert(Object data) { return convert(data, null); }
@Test public void testEvolveTypeDetectionStructNested() { org.apache.iceberg.Schema structColSchema = new org.apache.iceberg.Schema( NestedField.required(1, "ii", IntegerType.get()), NestedField.required(2, "ff", FloatType.get())); org.apache.iceberg.Schema tableSchema = new org.apache.iceberg.Schema( NestedField.required(3, "i", IntegerType.get()), NestedField.required(4, "st", structColSchema.asStruct())); Table table = mock(Table.class); when(table.schema()).thenReturn(tableSchema); RecordConverter converter = new RecordConverter(table, config); Schema structSchema = SchemaBuilder.struct().field("ii", Schema.INT64_SCHEMA).field("ff", Schema.FLOAT64_SCHEMA); Schema schema = SchemaBuilder.struct().field("i", Schema.INT32_SCHEMA).field("st", structSchema); Struct structValue = new Struct(structSchema).put("ii", 11L).put("ff", 22d); Struct data = new Struct(schema).put("i", 1).put("st", structValue); SchemaUpdate.Consumer consumer = new SchemaUpdate.Consumer(); converter.convert(data, consumer); Collection<UpdateType> updates = consumer.updateTypes(); assertThat(updates).hasSize(2); Map<String, UpdateType> updateMap = Maps.newHashMap(); updates.forEach(update -> updateMap.put(update.name(), update)); assertThat(updateMap.get("st.ii").type()).isInstanceOf(LongType.class); assertThat(updateMap.get("st.ff").type()).isInstanceOf(DoubleType.class); }
@ScalarOperator(EQUAL) @SqlType(StandardTypes.BOOLEAN) @SqlNullable public static Boolean equal(@SqlType(StandardTypes.TINYINT) long left, @SqlType(StandardTypes.TINYINT) long right) { return left == right; }
@Test public void testEqual() { assertFunction("TINYINT'37' = TINYINT'37'", BOOLEAN, true); assertFunction("TINYINT'37' = TINYINT'17'", BOOLEAN, false); assertFunction("TINYINT'17' = TINYINT'37'", BOOLEAN, false); assertFunction("TINYINT'17' = TINYINT'17'", BOOLEAN, true); }
public double sub(int i, int j, double b) { return A[index(i, j)] -= b; }
@Test public void testSub() { System.out.println("sub"); double[][] A = { { 0.7220180, 0.07121225, 0.6881997f}, {-0.2648886, -0.89044952, 0.3700456f}, {-0.6391588, 0.44947578, 0.6240573f} }; double[][] B = { {0.6881997, -0.07121225, 0.7220180f}, {0.3700456, 0.89044952, -0.2648886f}, {0.6240573, -0.44947578, -0.6391588f} }; double[][] C = { { 0.0338183, 0.1424245, -0.0338183f}, {-0.6349342, -1.7808990, 0.6349342f}, {-1.2632161, 0.8989516, 1.2632161f} }; Matrix a = Matrix.of(A); Matrix b = Matrix.of(B); a.sub(b); assertTrue(MathEx.equals(C, a.toArray(), 1E-7)); }