focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
public Result runExtractor(String value) { final Matcher matcher = pattern.matcher(value); final boolean found = matcher.find(); if (!found) { return null; } final int start = matcher.groupCount() > 0 ? matcher.start(1) : -1; final int end = matcher.groupCount() > 0 ? matcher.end(1) : -1; final String s; try { s = replaceAll ? matcher.replaceAll(replacement) : matcher.replaceFirst(replacement); } catch (Exception e) { throw new RuntimeException("Error while trying to replace string", e); } return new Result(s, start, end); }
@Test public void testReplacementWithCustomReplacement() throws Exception { final Message message = messageFactory.createMessage("Foobar 123", "source", Tools.nowUTC()); final RegexReplaceExtractor extractor = new RegexReplaceExtractor( metricRegistry, "id", "title", 0L, Extractor.CursorStrategy.COPY, "message", "message", ImmutableMap.<String, Object>of("regex", "(Foobar) (\\d+)", "replacement", "$2/$1"), "user", Collections.<Converter>emptyList(), Extractor.ConditionType.NONE, null); extractor.runExtractor(message); assertThat(message.getMessage()).isEqualTo("123/Foobar"); }
public FloatArrayAsIterable usingExactEquality() { return new FloatArrayAsIterable(EXACT_EQUALITY_CORRESPONDENCE, iterableSubject()); }
@Test public void usingExactEquality_containsExactly_primitiveFloatArray_inOrder_failure() { expectFailureWhenTestingThat(array(1.1f, 2.2f, 3.3f)) .usingExactEquality() .containsExactly(array(2.2f, 1.1f, 3.3f)) .inOrder(); assertFailureKeys( "value of", "contents match, but order was wrong", "expected", "testing whether", "but was"); assertFailureValue("expected", lenientFormat("[%s, %s, %s]", 2.2f, 1.1f, 3.3f)); }
public static TypeBuilder<Schema> builder() { return new TypeBuilder<>(new SchemaCompletion(), new NameContext()); }
@Test void namesFailAbsent() { assertThrows(SchemaParseException.class, () -> { SchemaBuilder.builder().type("notdefined"); }); }
@Override public NodeInfo getNode(String nodeId) { final Collection<SubClusterInfo> subClustersActive = federationFacade.getActiveSubClusters(); if (subClustersActive.isEmpty()) { throw new NotFoundException(FederationPolicyUtils.NO_ACTIVE_SUBCLUSTER_AVAILABLE); } final Map<SubClusterInfo, NodeInfo> results = getNode(subClustersActive, nodeId); // Collect the responses NodeInfo nodeInfo = null; for (NodeInfo nodeResponse : results.values()) { try { // Check if the node was already found in a different SubCluster and // it has an old health report if (nodeInfo == null || nodeInfo.getLastHealthUpdate() < nodeResponse.getLastHealthUpdate()) { nodeInfo = nodeResponse; } } catch (Throwable e) { LOG.warn("Failed to get node report ", e); } } if (nodeInfo == null) { throw new NotFoundException("nodeId, " + nodeId + ", is not found"); } return nodeInfo; }
@Test public void testGetNode() { NodeInfo responseGet = interceptor.getNode("testGetNode"); Assert.assertNotNull(responseGet); Assert.assertEquals(NUM_SUBCLUSTER - 1, responseGet.getLastHealthUpdate()); }
public boolean isVersionedExpirationTimerSupported() { return allDevicesHaveCapability(DeviceCapabilities::versionedExpirationTimer); }
@Test void isVersionedExpirationTimerSupported() { assertTrue(AccountsHelper.generateTestAccount("+18005551234", UUID.randomUUID(), UUID.randomUUID(), List.of(versionedExpirationTimerCapableDevice), "1234".getBytes(StandardCharsets.UTF_8)).isVersionedExpirationTimerSupported()); assertFalse(AccountsHelper.generateTestAccount("+18005551234", UUID.randomUUID(), UUID.randomUUID(), List.of(versionedExpirationTimerIncapableDevice, versionedExpirationTimerCapableDevice), "1234".getBytes(StandardCharsets.UTF_8)).isVersionedExpirationTimerSupported()); }
@Override public boolean doOffer(final Runnable runnable) { return super.offer(runnable); }
@Test public void testOffer() { MemoryLimitedTaskQueue memoryLimitedTaskQueue = new MemoryLimitedTaskQueue<>(instrumentation); assertTrue(memoryLimitedTaskQueue.doOffer(() -> { })); }
@Override public V put(K key, V value, Duration ttl) { return get(putAsync(key, value, ttl)); }
@Test public void testKeySet() throws InterruptedException { RMapCacheNative<SimpleKey, SimpleValue> map = redisson.getMapCacheNative("simple03"); map.put(new SimpleKey("33"), new SimpleValue("44"), Duration.ofSeconds(1)); map.put(new SimpleKey("1"), new SimpleValue("2")); Assertions.assertTrue(map.keySet().contains(new SimpleKey("33"))); Assertions.assertFalse(map.keySet().contains(new SimpleKey("44"))); Assertions.assertTrue(map.keySet().contains(new SimpleKey("1"))); Thread.sleep(1000); Assertions.assertFalse(map.keySet().contains(new SimpleKey("33"))); Assertions.assertFalse(map.keySet().contains(new SimpleKey("44"))); Assertions.assertTrue(map.keySet().contains(new SimpleKey("1"))); map.destroy(); }
public static String[][] normalizeArrays( int normalizeToLength, String[]... arraysToNormalize ) { if ( arraysToNormalize == null ) { return null; } int arraysToProcess = arraysToNormalize.length; String[][] rtn = new String[ arraysToProcess ][]; for ( int i = 0; i < arraysToNormalize.length; i++ ) { String[] nextArray = arraysToNormalize[ i ]; if ( nextArray != null ) { if ( nextArray.length < normalizeToLength ) { String[] newArray = new String[ normalizeToLength ]; System.arraycopy( nextArray, 0, newArray, 0, nextArray.length ); rtn[ i ] = newArray; } else { rtn[ i ] = nextArray; } } else { rtn[ i ] = new String[ normalizeToLength ]; } } return rtn; }
@Test public void testNormalizeArraysMethods() { String[] s1 = new String[] { "one" }; String[] s2 = new String[] { "one", "two" }; String[] s3 = new String[] { "one", "two", "three" }; long[] l1 = new long[] { 1 }; long[] l2 = new long[] { 1, 2 }; long[] l3 = new long[] { 1, 2, 3 }; short[] sh1 = new short[] { 1 }; short[] sh2 = new short[] { 1, 2 }; short[] sh3 = new short[] { 1, 2, 3 }; boolean[] b1 = new boolean[] { true }; boolean[] b2 = new boolean[] { true, false }; boolean[] b3 = new boolean[] { true, false, true }; int[] i1 = new int[] { 1 }; int[] i2 = new int[] { 1, 2 }; int[] i3 = new int[] { 1, 3 }; String[][] newS = Utils.normalizeArrays( 3, s1, s2 ); assertEquals( 2, newS.length ); assertEquals( 3, newS[ 0 ].length ); assertEquals( 3, newS[ 1 ].length ); newS = Utils.normalizeArrays( 3, s1, null ); assertEquals( 2, newS.length ); assertEquals( 3, newS[ 0 ].length ); assertEquals( 3, newS[ 1 ].length ); newS = Utils.normalizeArrays( 2, s2 ); assertEquals( 1, newS.length ); assertEquals( 2, newS[ 0 ].length ); assertArrayEquals( newS[ 0 ], s2 ); assertTrue( newS[ 0 ] == s2 ); // If arrays are equal sized, it should return original object long[][] newL = Utils.normalizeArrays( 3, l1, l2 ); assertEquals( 2, newL.length ); assertEquals( 3, newL[ 0 ].length ); assertEquals( 3, newL[ 1 ].length ); newL = Utils.normalizeArrays( 3, l1, null ); assertEquals( 2, newL.length ); assertEquals( 3, newL[ 0 ].length ); assertEquals( 3, newL[ 1 ].length ); newL = Utils.normalizeArrays( 2, l2 ); assertEquals( 1, newL.length ); assertEquals( 2, newL[ 0 ].length ); assertArrayEquals( newL[ 0 ], l2 ); assertTrue( newL[ 0 ] == l2 ); // If arrays are equal sized, it should return original object short[][] newSh = Utils.normalizeArrays( 3, sh1, sh2 ); assertEquals( 2, newSh.length ); assertEquals( 3, newSh[ 0 ].length ); assertEquals( 3, newSh[ 1 ].length ); newSh = Utils.normalizeArrays( 3, sh1, null ); assertEquals( 2, newSh.length ); assertEquals( 3, newSh[ 0 ].length ); assertEquals( 3, newSh[ 1 ].length ); newSh = Utils.normalizeArrays( 2, sh2 ); assertEquals( 1, newSh.length ); assertEquals( 2, newSh[ 0 ].length ); assertArrayEquals( newSh[ 0 ], sh2 ); assertTrue( newSh[ 0 ] == sh2 ); // If arrays are equal sized, it should return original object boolean[][] newB = Utils.normalizeArrays( 3, b1, b2 ); assertEquals( 2, newB.length ); assertEquals( 3, newB[ 0 ].length ); assertEquals( 3, newB[ 1 ].length ); newB = Utils.normalizeArrays( 3, b1, null ); assertEquals( 2, newB.length ); assertEquals( 3, newB[ 0 ].length ); assertEquals( 3, newB[ 1 ].length ); newB = Utils.normalizeArrays( 2, b2 ); assertEquals( 1, newB.length ); assertEquals( 2, newB[ 0 ].length ); assertTrue( newB[ 0 ] == b2 ); // If arrays are equal sized, it should return original object int[][] newI = Utils.normalizeArrays( 3, i1, i2 ); assertEquals( 2, newI.length ); assertEquals( 3, newI[ 0 ].length ); assertEquals( 3, newI[ 1 ].length ); newI = Utils.normalizeArrays( 3, i1, null ); assertEquals( 2, newI.length ); assertEquals( 3, newI[ 0 ].length ); assertEquals( 3, newI[ 1 ].length ); newI = Utils.normalizeArrays( 2, i2 ); assertEquals( 1, newI.length ); assertEquals( 2, newI[ 0 ].length ); assertArrayEquals( newI[ 0 ], i2 ); assertTrue( newI[ 0 ] == i2 ); // If arrays are equal sized, it should return original object }
public long getBlock_len() { return block_len; }
@Test public void testGetBlock_len() { assertEquals(TestParameters.VP_BLOCK_LENGTH, chmItspHeader.getBlock_len()); }
private static String parseArchitecture(P4Info p4info) { if (p4info.hasPkgInfo()) { return p4info.getPkgInfo().getArch(); } return null; }
@Test public void testParseArchitecture() throws Exception { // Generate two PiPipelineModels from the same p4Info file PiPipelineModel model = P4InfoParser.parse(p4InfoUrl); PiPipelineModel sameAsModel = P4InfoParser.parse(p4InfoUrl); PiPipelineModel model3 = P4InfoParser.parse(p4InfoUrl2); String architecture1 = model.architecture().orElse(null); String architecture2 = sameAsModel.architecture().orElse(null); assertThat("null value is returned if `arch` not present in P4Info", architecture1, is(nullValue())); assertThat("null value is returned if `arch` not present in P4Info", architecture2, is(nullValue())); String architecture3 = model3.architecture().orElse(null); assertThat("test that `arch` field is correctly parsed", architecture3, is("v1model")); }
@Override public JsonObject toJson() { JsonObject root = new JsonObject(); root.add("clusterState", clusterState.name()); root.add("nodeState", nodeState.name()); root.add("clusterVersion", clusterVersion.toString()); root.add("memberVersion", memberVersion.toString()); JsonObject weaknesses = new JsonObject(); for (Map.Entry<String, List<String>> entry : weakSecretsConfigs.entrySet()) { JsonArray values = new JsonArray(); for (String value : entry.getValue()) { values.add(value); } weaknesses.add(entry.getKey(), values); } root.add("weakConfigs", weaknesses); return root; }
@Test public void toJson() throws Exception { ClusterState clusterState = ClusterState.ACTIVE; com.hazelcast.instance.impl.NodeState nodeState = com.hazelcast.instance.impl.NodeState.PASSIVE; Version clusterVersion = Version.of("3.8"); MemberVersion memberVersion = MemberVersion.of("3.9.0"); NodeState state = new NodeStateImpl(clusterState, nodeState, clusterVersion, memberVersion); NodeState deserialized = new NodeStateImpl(); deserialized.fromJson(state.toJson()); assertEquals(clusterState, deserialized.getClusterState()); assertEquals(nodeState, deserialized.getNodeState()); assertEquals(clusterVersion, deserialized.getClusterVersion()); assertEquals(memberVersion, deserialized.getMemberVersion()); }
@Override public void onSelectorChanged(final List<SelectorData> selectorDataList, final DataEventTypeEnum eventType) { WebsocketData<SelectorData> websocketData = new WebsocketData<>(ConfigGroupEnum.SELECTOR.name(), eventType.name(), selectorDataList); WebsocketCollector.send(GsonUtils.getInstance().toJson(websocketData), eventType); }
@Test public void testOnSelectorChanged() { String message = "{\"groupType\":\"SELECTOR\",\"eventType\":\"UPDATE\",\"data\":" + "[{\"id\":\"1336329408516136960\",\"pluginId\":\"5\",\"pluginName\":\"divide\",\"name\":" + "\"/http\",\"matchMode\":0,\"type\":1,\"sort\":1,\"enabled\":true,\"logged\":true," + "\"continued\":true,\"handle\":\"[{\\\\\\\"upstreamHost\\\\\\\":\\\\\\\"localhost\\\\\\\"," + "\\\\\\\"protocol\\\\\\\":\\\\\\\"http://\\\\\\\",\\\\\\\"upstreamUrl\\\\\\\":" + "\\\\\\\"127.0.0.1:8187\\\\\\\",\\\\\\\"weight\\\\\\\":\\\\\\\"51\\\\\\\"}," + "{\\\\\\\"upstreamHost\\\\\\\":\\\\\\\"localhost\\\\\\\",\\\\\\\"protocol\\\\\\\":" + "\\\\\\\"http://\\\\\\\",\\\\\\\"upstreamUrl\\\\\\\":\\\\\\\"127.0.0.1:8188\\\\\\\"," + "\\\\\\\"weight\\\\\\\":\\\\\\\"49\\\\\\\"}]\",\"conditionList\":[{\"paramType\":\"uri\"," + "\"operator\":\"match\",\"paramName\":\"/\",\"paramValue\":\"/http/**\"}]}]}"; MockedStatic.Verification verification = () -> WebsocketCollector.send(message, DataEventTypeEnum.UPDATE); try (MockedStatic<WebsocketCollector> mockedStatic = mockStatic(WebsocketCollector.class)) { mockedStatic.when(verification).thenAnswer((Answer<Void>) invocation -> null); websocketDataChangedListener.onSelectorChanged(selectorDataList, DataEventTypeEnum.UPDATE); mockedStatic.verify(verification); } }
@Override @InterfaceAudience.Private public void readFields(DataInput in) throws IOException { this.length = in.readLong(); this.fileCount = in.readLong(); this.directoryCount = in.readLong(); setQuota(in.readLong()); setSpaceConsumed(in.readLong()); setSpaceQuota(in.readLong()); }
@Test public void testReadFields() throws IOException { long length = 11111; long fileCount = 22222; long directoryCount = 33333; long quota = 44444; long spaceConsumed = 55555; long spaceQuota = 66666; ContentSummary contentSummary = new ContentSummary.Builder().build(); DataInput in = mock(DataInput.class); when(in.readLong()).thenReturn(length).thenReturn(fileCount) .thenReturn(directoryCount).thenReturn(quota).thenReturn(spaceConsumed) .thenReturn(spaceQuota); contentSummary.readFields(in); assertEquals("getLength", length, contentSummary.getLength()); assertEquals("getFileCount", fileCount, contentSummary.getFileCount()); assertEquals("getDirectoryCount", directoryCount, contentSummary.getDirectoryCount()); assertEquals("getQuota", quota, contentSummary.getQuota()); assertEquals("getSpaceConsumed", spaceConsumed, contentSummary.getSpaceConsumed()); assertEquals("getSpaceQuota", spaceQuota, contentSummary.getSpaceQuota()); }
public static ResourceModel processResource(final Class<?> resourceClass) { return processResource(resourceClass, null); }
@Test(expectedExceptions = ResourceConfigException.class) public void failsOnInvalidActionReturnTypeRef() { @RestLiCollection(name = "invalidReturnTypeRef") class LocalClass extends CollectionResourceTemplate<Long, EmptyRecord> { @Action(name = "invalidReturnTypeRef", returnTyperef = StringRef.class) public Long invalidReturnTypeRef(@ActionParam(value = "someId") String someId) { return null; } } RestLiAnnotationReader.processResource(LocalClass.class); Assert.fail("#getActionTyperefDataSchema should fail throwing a ResourceConfigException"); }
@VisibleForTesting public static void validateAndResolveService(Service service, SliderFileSystem fs, org.apache.hadoop.conf.Configuration conf) throws IOException { boolean dnsEnabled = conf.getBoolean(RegistryConstants.KEY_DNS_ENABLED, RegistryConstants.DEFAULT_DNS_ENABLED); if (dnsEnabled) { if (RegistryUtils.currentUser().length() > RegistryConstants.MAX_FQDN_LABEL_LENGTH) { throw new IllegalArgumentException( RestApiErrorMessages.ERROR_USER_NAME_INVALID); } userNamePattern.validate(RegistryUtils.currentUser()); } if (StringUtils.isEmpty(service.getName())) { throw new IllegalArgumentException( RestApiErrorMessages.ERROR_APPLICATION_NAME_INVALID); } if (StringUtils.isEmpty(service.getVersion())) { throw new IllegalArgumentException(String.format( RestApiErrorMessages.ERROR_APPLICATION_VERSION_INVALID, service.getName())); } validateNameFormat(service.getName(), conf); // If the service has no components, throw error if (!hasComponent(service)) { throw new IllegalArgumentException( "No component specified for " + service.getName()); } if (UserGroupInformation.isSecurityEnabled()) { validateKerberosPrincipal(service.getKerberosPrincipal()); } // Validate the Docker client config. try { validateDockerClientConfiguration(service, conf); } catch (IOException e) { throw new IllegalArgumentException(e); } // Validate there are no component name collisions (collisions are not // currently supported) and add any components from external services Configuration globalConf = service.getConfiguration(); Set<String> componentNames = new HashSet<>(); List<Component> componentsToRemove = new ArrayList<>(); List<Component> componentsToAdd = new ArrayList<>(); for (Component comp : service.getComponents()) { int maxCompLength = RegistryConstants.MAX_FQDN_LABEL_LENGTH; maxCompLength = maxCompLength - Long.toString(Long.MAX_VALUE).length(); if (dnsEnabled && comp.getName().length() > maxCompLength) { throw new IllegalArgumentException(String.format(RestApiErrorMessages .ERROR_COMPONENT_NAME_INVALID, maxCompLength, comp.getName())); } if (service.getName().equals(comp.getName())) { throw new IllegalArgumentException(String.format(RestApiErrorMessages .ERROR_COMPONENT_NAME_CONFLICTS_WITH_SERVICE_NAME, comp.getName(), service.getName())); } if (componentNames.contains(comp.getName())) { throw new IllegalArgumentException("Component name collision: " + comp.getName()); } // If artifact is of type SERVICE (which cannot be filled from global), // read external service and add its components to this service if (comp.getArtifact() != null && comp.getArtifact().getType() == Artifact.TypeEnum.SERVICE) { if (StringUtils.isEmpty(comp.getArtifact().getId())) { throw new IllegalArgumentException( RestApiErrorMessages.ERROR_ARTIFACT_ID_INVALID); } LOG.info("Marking {} for removal", comp.getName()); componentsToRemove.add(comp); List<Component> externalComponents = getComponents(fs, comp.getArtifact().getId()); for (Component c : externalComponents) { Component override = service.getComponent(c.getName()); if (override != null && override.getArtifact() == null) { // allow properties from external components to be overridden / // augmented by properties in this component, except for artifact // which must be read from external component override.mergeFrom(c); LOG.info("Merging external component {} from external {}", c .getName(), comp.getName()); } else { if (componentNames.contains(c.getName())) { throw new IllegalArgumentException("Component name collision: " + c.getName()); } componentNames.add(c.getName()); componentsToAdd.add(c); LOG.info("Adding component {} from external {}", c.getName(), comp.getName()); } } } else { // otherwise handle as a normal component componentNames.add(comp.getName()); // configuration comp.getConfiguration().mergeFrom(globalConf); } } service.getComponents().removeAll(componentsToRemove); service.getComponents().addAll(componentsToAdd); // Validate components and let global values take effect if component level // values are not provided Artifact globalArtifact = service.getArtifact(); Resource globalResource = service.getResource(); for (Component comp : service.getComponents()) { // fill in global artifact unless it is type SERVICE if (comp.getArtifact() == null && service.getArtifact() != null && service.getArtifact().getType() != Artifact.TypeEnum .SERVICE) { comp.setArtifact(globalArtifact); } // fill in global resource if (comp.getResource() == null) { comp.setResource(globalResource); } // validate dependency existence if (comp.getDependencies() != null) { for (String dependency : comp.getDependencies()) { if (!componentNames.contains(dependency)) { throw new IllegalArgumentException(String.format( RestApiErrorMessages.ERROR_DEPENDENCY_INVALID, dependency, comp.getName())); } } } validateComponent(comp, fs.getFileSystem(), conf); } validatePlacementPolicy(service.getComponents(), componentNames); // validate dependency tree sortByDependencies(service.getComponents()); // Service lifetime if not specified, is set to unlimited lifetime if (service.getLifetime() == null) { service.setLifetime(RestApiConstants.DEFAULT_UNLIMITED_LIFETIME); } }
@Test public void testArtifacts() throws IOException { SliderFileSystem sfs = ServiceTestUtils.initMockFs(); Service app = new Service(); app.setName("service1"); app.setVersion("v1"); Resource res = new Resource(); app.setResource(res); res.setMemory("512M"); // no artifact id fails with default type Artifact artifact = new Artifact(); app.setArtifact(artifact); String compName = "comp1"; Component comp = ServiceTestUtils.createComponent(compName); app.setComponents(Collections.singletonList(comp)); try { ServiceApiUtil.validateAndResolveService(app, sfs, CONF_DNS_ENABLED); Assert.fail(EXCEPTION_PREFIX + "service with no artifact id"); } catch (IllegalArgumentException e) { assertEquals(String.format(ERROR_ARTIFACT_ID_FOR_COMP_INVALID, compName), e.getMessage()); } // no artifact id fails with SERVICE type artifact.setType(Artifact.TypeEnum.SERVICE); try { ServiceApiUtil.validateAndResolveService(app, sfs, CONF_DNS_ENABLED); Assert.fail(EXCEPTION_PREFIX + "service with no artifact id"); } catch (IllegalArgumentException e) { assertEquals(ERROR_ARTIFACT_ID_INVALID, e.getMessage()); } // no artifact id fails with TARBALL type artifact.setType(Artifact.TypeEnum.TARBALL); try { ServiceApiUtil.validateAndResolveService(app, sfs, CONF_DNS_ENABLED); Assert.fail(EXCEPTION_PREFIX + "service with no artifact id"); } catch (IllegalArgumentException e) { assertEquals(String.format(ERROR_ARTIFACT_ID_FOR_COMP_INVALID, compName), e.getMessage()); } // everything valid here artifact.setType(Artifact.TypeEnum.DOCKER); artifact.setId("docker.io/centos:centos7"); try { ServiceApiUtil.validateAndResolveService(app, sfs, CONF_DNS_ENABLED); } catch (IllegalArgumentException e) { LOG.error("service attributes specified should be valid here", e); Assert.fail(NO_EXCEPTION_PREFIX + e.getMessage()); } assertThat(app.getLifetime()).isEqualTo(DEFAULT_UNLIMITED_LIFETIME); }
public static boolean checkpw(String plaintext, String hashed) { byte hashed_bytes[]; byte try_bytes[]; try { String try_pw = hashpw(plaintext, hashed); hashed_bytes = hashed.getBytes("UTF-8"); try_bytes = try_pw.getBytes("UTF-8"); } catch (UnsupportedEncodingException uee) { return false; } if (hashed_bytes.length != try_bytes.length) return false; byte ret = 0; for (int i = 0; i < try_bytes.length; i++) ret |= hashed_bytes[i] ^ try_bytes[i]; return ret == 0; }
@Test public void testCheckpw_success() { System.out.print("BCrypt.checkpw w/ good passwords: "); for (int i = 0; i < test_vectors.length; i++) { String plain = test_vectors[i][0]; String expected = test_vectors[i][2]; Assert.assertTrue(BCrypt.checkpw(plain, expected)); System.out.print("."); } System.out.println(""); }
@Override public Map<String, Metric> getMetrics() { final Map<String, Metric> gauges = new HashMap<>(); gauges.put("loaded", (Gauge<Long>) mxBean::getTotalLoadedClassCount); gauges.put("unloaded", (Gauge<Long>) mxBean::getUnloadedClassCount); return gauges; }
@Test public void loadedGauge() { final Gauge gauge = (Gauge) gauges.getMetrics().get("loaded"); assertThat(gauge.getValue()).isEqualTo(2L); }
@Override public Thread newThread(Runnable r) { String name = prefix + "_" + counter.incrementAndGet(); if (totalSize > 1) { name += "_" + totalSize; } Thread thread = new FastThreadLocalThread(group, r, name); thread.setDaemon(makeDaemons); if (thread.getPriority() != Thread.NORM_PRIORITY) { thread.setPriority(Thread.NORM_PRIORITY); } return thread; }
@Test public void testNamedThreadFactoryWithSecurityManager() { NamedThreadFactory factory = new NamedThreadFactory("testThreadGroup", true); Thread thread = factory.newThread(() -> {}); assertThat(thread.getThreadGroup()).isNotNull(); }
public Component buildProject(ScannerReport.Component project, String scmBasePath) { this.rootComponent = project; this.scmBasePath = trimToNull(scmBasePath); Node root = createProjectHierarchy(project); return buildComponent(root, "", ""); }
@Test void project_name_is_loaded_from_db_if_not_on_main_branch() { String reportName = randomAlphabetic(5); ScannerReport.Component reportProject = newBuilder() .setType(PROJECT) .setName(reportName) .build(); Component root = newUnderTest(SOME_PROJECT_ATTRIBUTES, false) .buildProject(reportProject, NO_SCM_BASE_PATH); assertThat(root.getName()).isEqualTo(projectInDb.getName()); }
@Override public String execute(CommandContext commandContext, String[] args) { if (ArrayUtils.isEmpty(args)) { return "Please input method name, eg: \r\ninvoke xxxMethod(1234, \"abcd\", {\"prop\" : \"value\"})\r\n" + "invoke XxxService.xxxMethod(1234, \"abcd\", {\"prop\" : \"value\"})\r\n" + "invoke com.xxx.XxxService.xxxMethod(1234, \"abcd\", {\"prop\" : \"value\"})"; } Channel channel = commandContext.getRemote(); String service = channel.attr(ChangeTelnet.SERVICE_KEY) != null ? channel.attr(ChangeTelnet.SERVICE_KEY).get() : null; String message = args[0]; int i = message.indexOf("("); if (i < 0 || !message.endsWith(")")) { return "Invalid parameters, format: service.method(args)"; } String method = message.substring(0, i).trim(); String param = message.substring(i + 1, message.length() - 1).trim(); i = method.lastIndexOf("."); if (i >= 0) { service = method.substring(0, i).trim(); method = method.substring(i + 1).trim(); } if (StringUtils.isEmpty(service)) { return "If you want to invoke like [invoke sayHello(\"xxxx\")], please execute cd command first," + " or you can execute it like [invoke IHelloService.sayHello(\"xxxx\")]"; } List<Object> list; try { list = JsonUtils.toJavaList("[" + param + "]", Object.class); } catch (Throwable t) { return "Invalid json argument, cause: " + t.getMessage(); } StringBuilder buf = new StringBuilder(); Method invokeMethod = null; ProviderModel selectedProvider = null; if (isInvokedSelectCommand(channel)) { selectedProvider = channel.attr(INVOKE_METHOD_PROVIDER_KEY).get(); invokeMethod = channel.attr(SelectTelnet.SELECT_METHOD_KEY).get(); } else { for (ProviderModel provider : frameworkModel.getServiceRepository().allProviderModels()) { if (!isServiceMatch(service, provider)) { continue; } selectedProvider = provider; List<Method> methodList = findSameSignatureMethod(provider.getAllMethods(), method, list); if (CollectionUtils.isEmpty(methodList)) { break; } if (methodList.size() == 1) { invokeMethod = methodList.get(0); } else { List<Method> matchMethods = findMatchMethods(methodList, list); if (CollectionUtils.isEmpty(matchMethods)) { break; } if (matchMethods.size() == 1) { invokeMethod = matchMethods.get(0); } else { // exist overridden method channel.attr(INVOKE_METHOD_PROVIDER_KEY).set(provider); channel.attr(INVOKE_METHOD_LIST_KEY).set(matchMethods); channel.attr(INVOKE_MESSAGE_KEY).set(message); printSelectMessage(buf, matchMethods); return buf.toString(); } } break; } } if (!StringUtils.isEmpty(service)) { buf.append("Use default service ").append(service).append('.'); } if (selectedProvider == null) { buf.append("\r\nNo such service ").append(service); return buf.toString(); } if (invokeMethod == null) { buf.append("\r\nNo such method ") .append(method) .append(" in service ") .append(service); return buf.toString(); } try { Object[] array = realize(list.toArray(), invokeMethod.getParameterTypes(), invokeMethod.getGenericParameterTypes()); long start = System.currentTimeMillis(); AppResponse result = new AppResponse(); try { Object o = invokeMethod.invoke(selectedProvider.getServiceInstance(), array); boolean setValueDone = false; if (RpcContext.getServerAttachment().isAsyncStarted()) { AsyncContext asyncContext = RpcContext.getServerAttachment().getAsyncContext(); if (asyncContext instanceof AsyncContextImpl) { CompletableFuture<Object> internalFuture = ((AsyncContextImpl) asyncContext).getInternalFuture(); result.setValue(internalFuture.get()); setValueDone = true; } } if (!setValueDone) { result.setValue(o); } } catch (Throwable t) { result.setException(t); if (t instanceof InterruptedException) { Thread.currentThread().interrupt(); } } finally { RpcContext.removeContext(); } long end = System.currentTimeMillis(); buf.append("\r\nresult: "); buf.append(JsonUtils.toJson(result.recreate())); buf.append("\r\nelapsed: "); buf.append(end - start); buf.append(" ms."); } catch (Throwable t) { return "Failed to invoke method " + invokeMethod.getName() + ", cause: " + StringUtils.toString(t); } return buf.toString(); }
@Test void testOverriddenMethodWithSpecifyParamType() throws RemotingException { defaultAttributeMap.attr(ChangeTelnet.SERVICE_KEY).set(DemoService.class.getName()); defaultAttributeMap.attr(SelectTelnet.SELECT_KEY).set(null); given(mockChannel.attr(ChangeTelnet.SERVICE_KEY)) .willReturn(defaultAttributeMap.attr(ChangeTelnet.SERVICE_KEY)); given(mockChannel.attr(SelectTelnet.SELECT_KEY)).willReturn(defaultAttributeMap.attr(SelectTelnet.SELECT_KEY)); registerProvider(DemoService.class.getName(), new DemoServiceImpl(), DemoService.class); String result = invoke.execute(mockCommandContext, new String[] { "getPerson({\"name\":\"zhangsan\",\"age\":12,\"class\":\"org.apache.dubbo.qos.legacy.service.Person\"})" }); assertTrue(result.contains("result: 12")); defaultAttributeMap.attr(ChangeTelnet.SERVICE_KEY).remove(); defaultAttributeMap.attr(SelectTelnet.SELECT_KEY).remove(); }
public static UParens create(UExpression expression) { return new AutoValue_UParens(expression); }
@Test public void serialization() { SerializableTester.reserializeAndAssert(UParens.create(ULiteral.longLit(5L))); }
@VisibleForTesting static int calculateTextWidth(FontMetrics metrics, String line) { char[] chars = line.toCharArray(); int textWidth = 0; int begin = 0; boolean inTag = false; for (int j = 0; j < chars.length; j++) { if (chars[j] == '<') { textWidth += metrics.stringWidth(line.substring(begin, j)); begin = j; inTag = true; } else if (chars[j] == '>' && inTag) { String subLine = line.substring(begin + 1, j); if (subLine.startsWith("img=")) { textWidth += MOD_ICON_WIDTH; } else if (!subLine.startsWith("col=") && !subLine.startsWith("/col")) { textWidth += metrics.stringWidth(line.substring(begin, j + 1)); } begin = j + 1; inTag = false; } } // Include trailing text (after last tag) textWidth += metrics.stringWidth(line.substring(begin)); return textWidth; }
@Test public void testCalculateTextWidth() { FontMetrics fontMetics = mock(FontMetrics.class); when(fontMetics.stringWidth(anyString())).thenAnswer((invocation) -> ((String) invocation.getArguments()[0]).length()); assertEquals(11, calculateTextWidth(fontMetics, "line1<col=ff0000>>line2")); }
public static String escapeHtml(String input) { // Avoid building a new string in the majority of cases (nothing to escape) StringBuilder sb = null; loop: for (int i = 0; i < input.length(); i++) { char c = input.charAt(i); String replacement; switch (c) { case '&': replacement = "&amp;"; break; case '<': replacement = "&lt;"; break; case '>': replacement = "&gt;"; break; case '\"': replacement = "&quot;"; break; default: if (sb != null) { sb.append(c); } continue loop; } if (sb == null) { sb = new StringBuilder(); sb.append(input, 0, i); } sb.append(replacement); } return sb != null ? sb.toString() : input; }
@Test public void testEscapeHtml() { assertEquals("nothing to escape", Escaping.escapeHtml("nothing to escape")); assertEquals("&amp;", Escaping.escapeHtml("&")); assertEquals("&lt;", Escaping.escapeHtml("<")); assertEquals("&gt;", Escaping.escapeHtml(">")); assertEquals("&quot;", Escaping.escapeHtml("\"")); assertEquals("&lt; start", Escaping.escapeHtml("< start")); assertEquals("end &gt;", Escaping.escapeHtml("end >")); assertEquals("&lt; both &gt;", Escaping.escapeHtml("< both >")); assertEquals("&lt; middle &amp; too &gt;", Escaping.escapeHtml("< middle & too >")); }
@Override @PublicAPI(usage = ACCESS) public String getName() { return WILDCARD_TYPE_NAME + boundsToString(); }
@Test public void wildcard_name_upper_bounded_by_array() { @SuppressWarnings("unused") class UpperBounded<T extends List<? extends String[][]>> { } JavaWildcardType wildcardType = importWildcardTypeOf(UpperBounded.class); assertThat(wildcardType.getName()).isEqualTo("? extends java.lang.String[][]"); }
public static Configuration configurePythonDependencies(ReadableConfig config) { final PythonDependencyManager pythonDependencyManager = new PythonDependencyManager(config); final Configuration pythonDependencyConfig = new Configuration(); pythonDependencyManager.applyToConfiguration(pythonDependencyConfig); return pythonDependencyConfig; }
@Test void testPythonArchives() { Configuration config = new Configuration(); config.set( PythonOptions.PYTHON_ARCHIVES, "hdfs:///tmp_dir/file1.zip," + "hdfs:///tmp_dir/file1.zip," + "tmp_dir/py37.zip," + "tmp_dir/py37.zip#venv," + "tmp_dir/py37.zip#venv2,tmp_dir/py37.zip#venv"); Configuration actual = configurePythonDependencies(config); Map<String, String> expectedCachedFiles = new HashMap<>(); expectedCachedFiles.put( "python_archive_4cc74e4003de886434723f351771df2a84f72531c52085acc0915e19d70df2ba", "hdfs:///tmp_dir/file1.zip"); expectedCachedFiles.put( "python_archive_f8a1c874251230f21094880d9dd878ffb5714454b69184d8ad268a6563269f0c", "tmp_dir/py37.zip"); expectedCachedFiles.put( "python_archive_5f3fca2a4165c7d9c94b00bfab956c15f14c41e9e03f6037c83eb61157fce09c", "tmp_dir/py37.zip"); expectedCachedFiles.put( "python_archive_c7d970ce1c5794367974ce8ef536c2343bed8fcfe7c2422c51548e58007eee6a", "tmp_dir/py37.zip"); verifyCachedFiles(expectedCachedFiles, config); Configuration expectedConfiguration = new Configuration(); expectedConfiguration.set(PYTHON_ARCHIVES_DISTRIBUTED_CACHE_INFO, new HashMap<>()); expectedConfiguration .get(PYTHON_ARCHIVES_DISTRIBUTED_CACHE_INFO) .put( "python_archive_4cc74e4003de886434723f351771df2a84f72531c52085acc0915e19d70df2ba", "file1.zip"); expectedConfiguration .get(PYTHON_ARCHIVES_DISTRIBUTED_CACHE_INFO) .put( "python_archive_5f3fca2a4165c7d9c94b00bfab956c15f14c41e9e03f6037c83eb61157fce09c", "py37.zip"); expectedConfiguration .get(PYTHON_ARCHIVES_DISTRIBUTED_CACHE_INFO) .put( "python_archive_f8a1c874251230f21094880d9dd878ffb5714454b69184d8ad268a6563269f0c", "py37.zip#venv2"); expectedConfiguration .get(PYTHON_ARCHIVES_DISTRIBUTED_CACHE_INFO) .put( "python_archive_c7d970ce1c5794367974ce8ef536c2343bed8fcfe7c2422c51548e58007eee6a", "py37.zip#venv"); verifyConfiguration(expectedConfiguration, actual); }
static PodSecurityProvider findProviderOrThrow(String providerClass) { ServiceLoader<PodSecurityProvider> loader = ServiceLoader.load(PodSecurityProvider.class); for (PodSecurityProvider provider : loader) { if (providerClass.equals(provider.getClass().getCanonicalName())) { LOGGER.info("Found PodSecurityProvider {}", providerClass); return provider; } } // The provider was not found LOGGER.warn("PodSecurityProvider {} was not found. Available providers are {}", providerClass, loader.stream().map(p -> p.getClass().getCanonicalName()).collect(Collectors.toSet())); throw new InvalidConfigurationException("PodSecurityProvider " + providerClass + " was not found."); }
@Test public void testExistingClass() { assertThat(PodSecurityProviderFactory.findProviderOrThrow("io.strimzi.plugin.security.profiles.impl.RestrictedPodSecurityProvider"), is(instanceOf(RestrictedPodSecurityProvider.class))); }
@ExceptionHandler(NullPointerException.class) protected ShenyuAdminResult handleNullPointException(final NullPointerException exception) { LOG.error("null pointer exception ", exception); return ShenyuAdminResult.error(CommonErrorCode.NOT_FOUND_EXCEPTION, ShenyuResultMessage.NOT_FOUND_EXCEPTION); }
@Test public void testNullPointExceptionHandler() { NullPointerException nullPointerException = new NullPointerException("TEST NULL POINT EXCEPTION"); ShenyuAdminResult result = exceptionHandlersUnderTest.handleNullPointException(nullPointerException); Assertions.assertEquals(result.getCode().intValue(), CommonErrorCode.NOT_FOUND_EXCEPTION); Assertions.assertEquals(result.getMessage(), ShenyuResultMessage.NOT_FOUND_EXCEPTION); }
@Override public int getNumOfPartitions() { return topicMetadata.numPartitions(); }
@Test public void testGetNumOfPartitions() throws Exception { String topicName = "test-get-num-of-partitions"; ClientConfigurationData conf = new ClientConfigurationData(); conf.setServiceUrl("pulsar://localhost:6650"); conf.setStatsIntervalSeconds(100); ThreadFactory threadFactory = new DefaultThreadFactory("client-test-stats", Thread.currentThread().isDaemon()); @Cleanup("shutdownGracefully") EventLoopGroup eventLoopGroup = EventLoopUtil.newEventLoopGroup(conf.getNumIoThreads(), false, threadFactory); @Cleanup PulsarClientImpl clientImpl = new PulsarClientImpl(conf, eventLoopGroup); ProducerConfigurationData producerConfData = new ProducerConfigurationData(); producerConfData.setMessageRoutingMode(MessageRoutingMode.CustomPartition); producerConfData.setCustomMessageRouter(new CustomMessageRouter()); PartitionedProducerImpl partitionedProducerImpl = new PartitionedProducerImpl( clientImpl, topicName, producerConfData, 1, null, null, null); assertEquals(partitionedProducerImpl.getNumOfPartitions(), 1); String nonPartitionedTopicName = "test-get-num-of-partitions-for-non-partitioned-topic"; ProducerConfigurationData producerConfDataNonPartitioned = new ProducerConfigurationData(); ProducerImpl producerImpl = new ProducerImpl(clientImpl, nonPartitionedTopicName, producerConfDataNonPartitioned, null, 0, null, null, Optional.empty()); assertEquals(producerImpl.getNumOfPartitions(), 0); }
public long getSize() { return size; }
@Test public void testGetSize() { assertEquals(TestParameters.VP_CONTROL_DATA_SIZE, chmLzxcControlData.getSize()); }
@VisibleForTesting static String extractTableName(MultivaluedMap<String, String> pathParameters, MultivaluedMap<String, String> queryParameters) { String tableName = extractTableName(pathParameters); if (tableName != null) { return tableName; } return extractTableName(queryParameters); }
@Test public void testExtractTableNameWithTableNameWithTypeInQueryParams() { MultivaluedMap<String, String> pathParams = new MultivaluedHashMap<>(); MultivaluedMap<String, String> queryParams = new MultivaluedHashMap<>(); queryParams.putSingle("tableNameWithType", "E"); queryParams.putSingle("schemaName", "F"); assertEquals(AuthenticationFilter.extractTableName(pathParams, queryParams), "E"); }
@Override public void preflight(final Path source, final Path target) throws BackgroundException { if(!CteraTouchFeature.validate(target.getName())) { throw new InvalidFilenameException(MessageFormat.format(LocaleFactory.localizedString("Cannot rename {0}", "Error"), source.getName())).withFile(source); } assumeRole(source, DELETEPERMISSION); // defaults to Acl.EMPTY (disabling role checking) if target does not exist assumeRole(target, WRITEPERMISSION); // no createfilespermission required for now if(source.isDirectory()) { assumeRole(target.getParent(), target.getName(), CREATEDIRECTORIESPERMISSION); } }
@Test public void testPreflightFileAccessDeniedTargetExistsNotWritableCustomProps() throws Exception { final Path source = new Path(new DefaultHomeFinderService(session).find(), new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)); source.setAttributes(source.attributes().withAcl(new Acl(new Acl.CanonicalUser(), CteraAttributesFinderFeature.DELETEPERMISSION))); final Path target = new Path(new DefaultHomeFinderService(session).find(), new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)); target.setAttributes(target.attributes().withAcl(new Acl(new Acl.CanonicalUser()))); // no createfilespermission required for now target.getParent().setAttributes(target.getParent().attributes().withAcl(new Acl(new Acl.CanonicalUser()))); final CteraAttributesFinderFeature mock = mock(CteraAttributesFinderFeature.class); // target exists and not writable when(mock.find(eq(target))).thenReturn(new PathAttributes().withAcl(new Acl(new Acl.CanonicalUser()))); final AccessDeniedException accessDeniedException = assertThrows(AccessDeniedException.class, () -> new CteraMoveFeature(session, mock).preflight(source, target)); assertTrue(accessDeniedException.getDetail().contains(MessageFormat.format(LocaleFactory.localizedString("Upload {0} failed", "Error"), target.getName()))); }
public static String buildPopRetryTopicV1(String topic, String cid) { return MixAll.RETRY_GROUP_TOPIC_PREFIX + cid + POP_RETRY_SEPARATOR_V1 + topic; }
@Test public void testBuildPopRetryTopicV1() { assertThat(KeyBuilder.buildPopRetryTopicV1(topic, group)).isEqualTo(MixAll.RETRY_GROUP_TOPIC_PREFIX + group + "_" + topic); }
@Override public ExecuteContext before(ExecuteContext context) { String name = context.getMethod().getName(); if (context.getArguments() == null || context.getArguments().length == 0) { return context; } Object argument = context.getArguments()[0]; if ("setName".equals(name)) { if (argument == null || argument instanceof String) { setAppNameAndPutParameters(context.getObject(), (String) argument); } } else { if (argument == null || argument instanceof Map<?, ?>) { context.getArguments()[0] = ParametersUtils.putParameters((Map<String, String>) argument, routerConfig); } } return context; }
@Test public void testPutParametersWithNull() throws NoSuchMethodException { Object[] args = new Object[1]; args[0] = null; ExecuteContext context = ExecuteContext.forMemberMethod(new Object(), ApplicationConfig.class.getMethod("setParameters", Map.class), args, null, null); // map is null interceptor.before(context); Map<String, String> parameters = (Map<String, String>) context.getArguments()[0]; Assert.assertEquals(config.getParameters().size() + 2, parameters.size()); Assert.assertEquals(config.getRouterVersion(), parameters.get(RouterConstant.META_VERSION_KEY)); Assert.assertEquals(config.getZone(), parameters.get(RouterConstant.META_ZONE_KEY)); Map<String, String> configParameters = config.getParameters(); configParameters.forEach( (key, value) -> Assert.assertEquals(value, parameters.get(RouterConstant.PARAMETERS_KEY_PREFIX + key))); }
@Override public AuthenticationDataSource getAuthDataSource() { return authenticationDataSource; }
@Test public void verifyGetAuthRoleBeforeAuthenticateFails() { CountingAuthenticationProvider provider = new CountingAuthenticationProvider(); AuthData authData = AuthData.of("role".getBytes()); OneStageAuthenticationState authState = new OneStageAuthenticationState(authData, null, null, provider); assertThrows(AuthenticationException.class, authState::getAuthRole); assertNull(authState.getAuthDataSource()); }
@VisibleForTesting Class<?> cookClass( UserDefinedJavaClassDef def, ClassLoader clsloader ) throws CompileException, IOException, RuntimeException, KettleStepException { String checksum = def.getChecksum(); Class<?> rtn = UserDefinedJavaClassMeta.classCache.getIfPresent( checksum ); if ( rtn != null ) { return rtn; } if ( Thread.currentThread().getContextClassLoader() == null ) { Thread.currentThread().setContextClassLoader( this.getClass().getClassLoader() ); } ClassBodyEvaluator cbe = new ClassBodyEvaluator(); if ( clsloader == null ) { cbe.setParentClassLoader( Thread.currentThread().getContextClassLoader() ); } else { cbe.setParentClassLoader( clsloader ); } cbe.setClassName( def.getClassName() ); StringReader sr; if ( def.isTransformClass() ) { cbe.setExtendedType( TransformClassBase.class ); sr = new StringReader( def.getTransformedSource() ); } else { sr = new StringReader( def.getSource() ); } cbe.setDefaultImports( new String[] { "org.pentaho.di.trans.steps.userdefinedjavaclass.*", "org.pentaho.di.trans.step.*", "org.pentaho.di.core.row.*", "org.pentaho.di.core.*", "org.pentaho.di.core.exception.*" } ); cbe.cook( new Scanner( null, sr ) ); rtn = cbe.getClazz(); UserDefinedJavaClassMeta.classCache.put( checksum, rtn ); return rtn; }
@Test public void cookClassesCachingTest() throws Exception { String codeBlock1 = "public boolean processRow() {\n" + " return true;\n" + "}\n\n"; String codeBlock2 = "public boolean processRow() {\n" + " // Random comment\n" + " return true;\n" + "}\n\n"; UserDefinedJavaClassMeta userDefinedJavaClassMeta1 = new UserDefinedJavaClassMeta(); UserDefinedJavaClassDef userDefinedJavaClassDef1 = new UserDefinedJavaClassDef( UserDefinedJavaClassDef.ClassType.NORMAL_CLASS, "MainClass", codeBlock1 ); StepMeta stepMeta = Mockito.mock( StepMeta.class ); Mockito.when( stepMeta.getName() ).thenReturn( "User Defined Java Class" ); userDefinedJavaClassMeta1.setParentStepMeta( stepMeta ); UserDefinedJavaClassMeta userDefinedJavaClassMetaSpy = Mockito.spy( userDefinedJavaClassMeta1 ); // Added classloader for https://jira.pentaho.com/browse/PDI-44134 Class<?> clazz1 = userDefinedJavaClassMetaSpy.cookClass( userDefinedJavaClassDef1, null ); Class<?> clazz2 = userDefinedJavaClassMetaSpy.cookClass( userDefinedJavaClassDef1, clazz1.getClassLoader() ); Assert.assertTrue( clazz1 == clazz2 ); // Caching should work here and return exact same class UserDefinedJavaClassMeta userDefinedJavaClassMeta2 = new UserDefinedJavaClassMeta(); UserDefinedJavaClassDef userDefinedJavaClassDef2 = new UserDefinedJavaClassDef( UserDefinedJavaClassDef.ClassType.NORMAL_CLASS, "AnotherClass", codeBlock2 ); StepMeta stepMeta2 = Mockito.mock( StepMeta.class ); Mockito.when( stepMeta2.getName() ).thenReturn( "Another UDJC" ); userDefinedJavaClassMeta2.setParentStepMeta( stepMeta2 ); UserDefinedJavaClassMeta userDefinedJavaClassMeta2Spy = Mockito.spy( userDefinedJavaClassMeta2 ); Class<?> clazz3 = userDefinedJavaClassMeta2Spy.cookClass( userDefinedJavaClassDef2, clazz2.getClassLoader() ); Assert.assertTrue( clazz3 != clazz1 ); // They should not be the exact same class }
public static UStaticIdent create(UClassIdent classIdent, CharSequence member, UType memberType) { return new AutoValue_UStaticIdent(classIdent, StringName.of(member), memberType); }
@Test public void serialization() { SerializableTester.reserializeAndAssert( UStaticIdent.create( "java.lang.Integer", "valueOf", UMethodType.create( UClassType.create("java.lang.Integer"), UClassType.create("java.lang.String")))); }
@Udf public <T> String toJsonString(@UdfParameter final T input) { return toJson(input); }
@Test public void shouldSerializeBoolean() { // When: final String result = udf.toJsonString(true); // Then: assertEquals("true", result); }
@Override public synchronized void start() { LOG.info("Starting {}", this.getClass().getSimpleName()); startRejectingServer(); }
@Test public void doubleStartRejectingServer() { RpcServerService service = RpcServerService.Factory.create(mRpcAddress, mMasterProcess, mRegistry); service.start(); Assert.assertThrows("rejecting server must not be running", IllegalStateException.class, service::start); }
public static void main(String[] args) { var customer = Customer.newCustomer(BORROWER, INVESTOR); LOGGER.info("New customer created : {}", customer); var hasBorrowerRole = customer.hasRole(BORROWER); LOGGER.info("Customer has a borrower role - {}", hasBorrowerRole); var hasInvestorRole = customer.hasRole(INVESTOR); LOGGER.info("Customer has an investor role - {}", hasInvestorRole); customer.getRole(INVESTOR, InvestorRole.class) .ifPresent(inv -> { inv.setAmountToInvest(1000); inv.setName("Billy"); }); customer.getRole(BORROWER, BorrowerRole.class) .ifPresent(inv -> inv.setName("Johny")); customer.getRole(INVESTOR, InvestorRole.class) .map(InvestorRole::invest) .ifPresent(LOGGER::info); customer.getRole(BORROWER, BorrowerRole.class) .map(BorrowerRole::borrow) .ifPresent(LOGGER::info); }
@Test void shouldExecuteApplicationWithoutException() { assertDoesNotThrow(() -> ApplicationRoleObject.main(new String[]{})); }
@Operation(summary = "create", description = "CREATE_TASK_GROUP_NOTE") @Parameters({ @Parameter(name = "name", description = "NAME", schema = @Schema(implementation = String.class)), @Parameter(name = "projectCode", description = "PROJECT_CODE", schema = @Schema(implementation = long.class)), @Parameter(name = "description", description = "TASK_GROUP_DESCRIPTION", schema = @Schema(implementation = String.class)), @Parameter(name = "groupSize", description = "GROUP_SIZE", schema = @Schema(implementation = int.class)), }) @PostMapping(value = "/create") @ResponseStatus(HttpStatus.CREATED) @ApiException(CREATE_TASK_GROUP_ERROR) public Result createTaskGroup(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam("name") String name, @RequestParam(value = "projectCode", required = false, defaultValue = "0") Long projectCode, @RequestParam("description") String description, @RequestParam("groupSize") Integer groupSize) { Map<String, Object> result = taskGroupService.createTaskGroup(loginUser, projectCode, name, description, groupSize); return returnDataList(result); }
@Test public void testCreateTaskGroup() throws Exception { // success MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>(); paramsMap.add("name", "TGQ1"); paramsMap.add("description", "this is a task group queue!"); paramsMap.add("groupSize", "10"); MvcResult mvcResult = mockMvc.perform(post("/task-group/create") .header(SESSION_ID, sessionId) .params(paramsMap)) .andExpect(status().isCreated()) .andExpect(content().contentType(MediaType.APPLICATION_JSON)) .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Assertions.assertTrue(result != null && result.isSuccess()); logger.info("create queue return result:{}", mvcResult.getResponse().getContentAsString()); // failed // name exists paramsMap.clear(); paramsMap.add("name", "TGQ1"); paramsMap.add("description", "this is a task group queue!"); paramsMap.add("groupSize", "10"); MvcResult mvcResult1 = mockMvc.perform(post("/task-group/create") .header(SESSION_ID, sessionId) .params(paramsMap)) .andExpect(status().isCreated()) .andExpect(content().contentType(MediaType.APPLICATION_JSON)) .andReturn(); Result result1 = JSONUtils.parseObject(mvcResult1.getResponse().getContentAsString(), Result.class); Assertions.assertTrue(result1 != null && result1.isFailed()); logger.info("create queue return result:{}", mvcResult1.getResponse().getContentAsString()); }
public List<RoutingProto.Route> getServiceRouterRule(String namespace, String sourceService, String dstService) { LOG.debug("Get service router rules with namespace:{} and sourceService:{} and dstService:{}.", namespace, sourceService, dstService); List<RoutingProto.Route> rules = new ArrayList<>(); //get source service outbound rules. ServiceRule sourceServiceRule = getServiceRule(namespace, sourceService, ServiceEventKey.EventType.ROUTING); if (sourceServiceRule != null) { Object rule = sourceServiceRule.getRule(); if (rule instanceof RoutingProto.Routing) { rules.addAll(((RoutingProto.Routing) rule).getOutboundsList()); } } //get peer service inbound rules. ServiceRule dstServiceRule = getServiceRule(namespace, dstService, ServiceEventKey.EventType.ROUTING); if (dstServiceRule != null) { Object rule = dstServiceRule.getRule(); if (rule instanceof RoutingProto.Routing) { rules.addAll(((RoutingProto.Routing) rule).getInboundsList()); } } return rules; }
@Test public void testGetServiceRouterRule() { final String testNamespace = "testNamespace"; final String testSourceService = "testSourceService"; final String testDstService = "testDstService"; RoutingProto.Routing routing = RoutingProto.Routing.newBuilder() .addOutbounds(RoutingProto.Route.newBuilder().build()) .build(); ServiceRule serviceRule = new ServiceRuleByProto(routing, "111", false, ServiceEventKey.EventType.ROUTING); ServiceRuleResponse serviceRuleResponse = new ServiceRuleResponse(serviceRule); // source when(consumerAPI.getServiceRule( argThat(request -> request != null && testNamespace.equals(request.getNamespace()) && testSourceService.equals(request.getService()) && ServiceEventKey.EventType.ROUTING.equals(request.getRuleType())) )).thenReturn(serviceRuleResponse); ServiceRuleResponse emptyRuleResponse = new ServiceRuleResponse(null); // destination when(consumerAPI.getServiceRule( argThat(request -> request != null && testNamespace.equals(request.getNamespace()) && testDstService.equals(request.getService()) && ServiceEventKey.EventType.ROUTING.equals(request.getRuleType())) )).thenReturn(emptyRuleResponse); ServiceRuleManager serviceRuleManager = new ServiceRuleManager(sdkContext, consumerAPI); List<RoutingProto.Route> serviceRouterRule = serviceRuleManager.getServiceRouterRule(testNamespace, testSourceService, testDstService); assertThat(serviceRouterRule).hasSize(1); }
@Override public E putIfAbsent(String key, E value) { return computeIfAbsent(key, k -> value); }
@Test public void putIfAbsent_cacheMiss_updatesCache() { Function<String, Integer> mappingFunction = k -> 17; doReturn(null).when(mutableEntryMock).getValue(); entryProcessorMock = new CacheRegistryStore.AtomicComputeProcessor<>(); entryProcessorArgMock = mappingFunction; Integer cacheResult = classUnderTest.putIfAbsent(CACHE_KEY, 17); verify(mutableEntryMock, times(1)).setValue(17); assertEquals(Integer.valueOf(17), cacheResult); }
public List<String> getDeletedIds() { return deletedIds; }
@Test void getDeletedIds() { List<String> expectDeleteIds = ListUtil.map(selectorDOList, BaseDO::getId); List<String> actualDeleteIds = batchSelectorDeletedEvent.getDeletedIds(); assertArrayEquals(expectDeleteIds.toArray(new String[0]), actualDeleteIds.toArray(new String[0])); }
public FindBrokerResult findBrokerAddressInSubscribe( final String brokerName, final long brokerId, final boolean onlyThisBroker ) { if (brokerName == null) { return null; } String brokerAddr = null; boolean slave = false; boolean found = false; HashMap<Long/* brokerId */, String/* address */> map = this.brokerAddrTable.get(brokerName); if (map != null && !map.isEmpty()) { brokerAddr = map.get(brokerId); slave = brokerId != MixAll.MASTER_ID; found = brokerAddr != null; if (!found && slave) { brokerAddr = map.get(brokerId + 1); found = brokerAddr != null; } if (!found && !onlyThisBroker) { Entry<Long, String> entry = map.entrySet().iterator().next(); brokerAddr = entry.getValue(); slave = entry.getKey() != MixAll.MASTER_ID; found = brokerAddr != null; } } if (found) { return new FindBrokerResult(brokerAddr, slave, findBrokerVersion(brokerName, brokerAddr)); } return null; }
@Test public void testFindBrokerAddressInSubscribeWithOneBroker() throws IllegalAccessException { brokerAddrTable.put(defaultBroker, createBrokerAddrMap()); consumerTable.put(group, createMQConsumerInner()); ConcurrentMap<String, HashMap<String, Integer>> brokerVersionTable = new ConcurrentHashMap<>(); HashMap<String, Integer> addressMap = new HashMap<>(); addressMap.put(defaultBrokerAddr, 0); brokerVersionTable.put(defaultBroker, addressMap); FieldUtils.writeDeclaredField(mqClientInstance, "brokerVersionTable", brokerVersionTable, true); FindBrokerResult actual = mqClientInstance.findBrokerAddressInSubscribe(defaultBroker, 1L, false); assertNotNull(actual); assertEquals(defaultBrokerAddr, actual.getBrokerAddr()); }
@SneakyThrows(ReflectiveOperationException.class) public static <T extends YamlConfiguration> T unmarshal(final File yamlFile, final Class<T> classType) throws IOException { try (BufferedReader inputStreamReader = Files.newBufferedReader(Paths.get(yamlFile.toURI()))) { T result = new Yaml(new ShardingSphereYamlConstructor(classType)).loadAs(inputStreamReader, classType); return null == result ? classType.getConstructor().newInstance() : result; } }
@Test void assertUnmarshalWithEmptyYamlBytes() throws IOException { URL url = getClass().getClassLoader().getResource("yaml/empty-config.yaml"); assertNotNull(url); String yamlContent = readContent(url); YamlShortcutsConfigurationFixture actual = YamlEngine.unmarshal(yamlContent.getBytes(), YamlShortcutsConfigurationFixture.class); assertNotNull(actual); assertTrue(actual.isEmpty()); }
static Optional<SearchPath> fromString(String path) { if (path == null || path.isEmpty()) { return Optional.empty(); } if (path.indexOf(';') >= 0) { return Optional.empty(); // multi-level not supported at this time } try { SearchPath sp = parseElement(path); if (sp.isEmpty()) { return Optional.empty(); } else { return Optional.of(sp); } } catch (NumberFormatException | InvalidSearchPathException e) { throw new InvalidSearchPathException("Invalid search path '" + path + "'", e); } }
@Test void invalidPartMustThrowException() { try { SearchPath.fromString("p/0"); fail("Expected exception"); } catch (InvalidSearchPathException e) { // success } }
public void addLast(PDOutlineItem newChild) { requireSingleNode(newChild); append(newChild); updateParentOpenCountForAddedChild(newChild); }
@Test void cannotAddLastAList() { PDOutlineItem child = new PDOutlineItem(); child.insertSiblingAfter(new PDOutlineItem()); child.insertSiblingAfter(new PDOutlineItem()); assertThrows(IllegalArgumentException.class, () -> root.addLast(child)); }
public JspEmail(String sFileID, String sLocale, Object oCaller) { if (sLocale != null) { sFileID = sLocale + "-" + sFileID; } sHtmlName_ = "/" + sFileID + "-html.jsp"; sPlainName_ = "/" + sFileID + "-plain.jsp"; request_ = new StringHttpServletRequest(); response_ = new StringHttpServletResponse(); request_.getSession().setAttribute(PARAM_CALLER, oCaller); // if using GameServer and jsp servlet engine not created, create it synchronized (JspEmail.class) { if (LOCALJSP && jsp_ == null) { jsp_ = new JspServlet(); try { File logDir = new File(new DefaultRuntimeDirectory().getServerHome(), "log"); File scratch = new File(logDir, "jsp-" + ConfigManager.getAppName()); ConfigUtils.verifyNewDirectory(scratch); logger.info("JSP Email scratch in " + scratch.getAbsolutePath()); ServletConfig config = new EmbeddedServletConfig("email", scratch.getAbsolutePath()); config.getServletContext().setAttribute(InstanceManager.class.getName(), new SimpleInstanceManager()); jsp_.init(config); } catch (ServletException se) { throw new ApplicationError(se); } } } }
@Test public void jspEmail() { new ConfigManager("testapp", ApplicationType.COMMAND_LINE); JspEmail jsp = new JspEmail("jsp_email", null, this); jsp.getSession().setAttribute("name", "JSPEmail Unit Test"); jsp.executeJSP(); String plain = jsp.getPlain(); String html = jsp.getHtml(); String subject = jsp.getSubject(); assertEquals("HTML JSPEmail Unit Test", html); assertEquals("PLAIN JSPEmail Unit Test", plain); assertEquals("Subject Test", subject); }
public WeightedItem<T> addOrVote(T item) { for (int i = 0; i < list.size(); i++) { WeightedItem<T> weightedItem = list.get(i); if (weightedItem.item.equals(item)) { voteFor(weightedItem); return weightedItem; } } return organizeAndAdd(item); }
@Test public void testListReorganizesAfterMaxSize() { WeightedEvictableList<String> list = new WeightedEvictableList<>(3, 100); list.addOrVote("c"); list.addOrVote("b"); list.addOrVote("b"); list.addOrVote("a"); list.addOrVote("a"); list.addOrVote("a"); list.addOrVote("d"); assertItemsInOrder(list, "a", "d"); // weights are reset after max-size is reached and list is re-organized // new item is retained, and it gets a vote assertWeightsInOrder(list, 0, 1); }
@Config("failure-resolver.enabled") public FailureResolverConfig setEnabled(boolean enabled) { this.enabled = enabled; return this; }
@Test public void testExplicitPropertyMappings() { Map<String, String> properties = new ImmutableMap.Builder<String, String>() .put("failure-resolver.enabled", "false") .build(); FailureResolverConfig expected = new FailureResolverConfig() .setEnabled(false); assertFullMapping(properties, expected); }
protected RequestInterceptor createRequestInterceptorChain() { Configuration conf = getConfig(); List<String> interceptorClassNames = getInterceptorClassNames(conf); RequestInterceptor pipeline = null; RequestInterceptor current = null; for (String interceptorClassName : interceptorClassNames) { try { Class<?> interceptorClass = conf.getClassByName(interceptorClassName); if (RequestInterceptor.class.isAssignableFrom(interceptorClass)) { RequestInterceptor interceptorInstance = (RequestInterceptor) ReflectionUtils.newInstance( interceptorClass, conf); if (pipeline == null) { pipeline = interceptorInstance; current = interceptorInstance; continue; } else { current.setNextInterceptor(interceptorInstance); current = interceptorInstance; } } else { throw new YarnRuntimeException("Class: " + interceptorClassName + " not instance of " + RequestInterceptor.class.getCanonicalName()); } } catch (ClassNotFoundException e) { throw new YarnRuntimeException( "Could not instantiate ApplicationMasterRequestInterceptor: " + interceptorClassName, e); } } if (pipeline == null) { throw new YarnRuntimeException( "RequestInterceptor pipeline is not configured in the system"); } return pipeline; }
@Test public void testRequestInterceptorChainCreation() throws Exception { RequestInterceptor root = super.getAMRMProxyService().createRequestInterceptorChain(); int index = 0; while (root != null) { switch (index) { case 0: case 1: case 2: Assert.assertEquals(PassThroughRequestInterceptor.class.getName(), root.getClass().getName()); break; case 3: Assert.assertEquals(MockRequestInterceptor.class.getName(), root.getClass().getName()); break; } root = root.getNextInterceptor(); index++; } Assert.assertEquals("The number of interceptors in chain does not match", Integer.toString(4), Integer.toString(index)); }
@Override public SchemaAndValue toConnectData(String topic, byte[] value) { JsonNode jsonValue; // This handles a tombstone message if (value == null) { return SchemaAndValue.NULL; } try { jsonValue = deserializer.deserialize(topic, value); } catch (SerializationException e) { throw new DataException("Converting byte[] to Kafka Connect data failed due to serialization error: ", e); } if (config.schemasEnabled() && (!jsonValue.isObject() || jsonValue.size() != 2 || !jsonValue.has(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME) || !jsonValue.has(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME))) throw new DataException("JsonConverter with schemas.enable requires \"schema\" and \"payload\" fields and may not contain additional fields." + " If you are trying to deserialize plain JSON data, set schemas.enable=false in your converter configuration."); // The deserialized data should either be an envelope object containing the schema and the payload or the schema // was stripped during serialization and we need to fill in an all-encompassing schema. if (!config.schemasEnabled()) { ObjectNode envelope = JSON_NODE_FACTORY.objectNode(); envelope.set(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME, null); envelope.set(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME, jsonValue); jsonValue = envelope; } Schema schema = asConnectSchema(jsonValue.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME)); return new SchemaAndValue( schema, convertToConnect(schema, jsonValue.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME), config) ); }
@Test public void highPrecisionNumericDecimalToConnect() { // this number is too big to be kept in a float64! BigDecimal reference = new BigDecimal("1.23456789123456789"); Schema schema = Decimal.schema(17); String msg = "{ \"schema\": { \"type\": \"bytes\", \"name\": \"org.apache.kafka.connect.data.Decimal\", \"version\": 1, \"parameters\": { \"scale\": \"17\" } }, \"payload\": 1.23456789123456789 }"; SchemaAndValue schemaAndValue = converter.toConnectData(TOPIC, msg.getBytes()); assertEquals(schema, schemaAndValue.schema()); assertEquals(reference, schemaAndValue.value()); }
public Type getGELFType() { if (payload.length < Type.HEADER_SIZE) { throw new IllegalStateException("GELF message is too short. Not even the type header would fit."); } return Type.determineType(payload[0], payload[1]); }
@Test public void testGetGELFTypeDetectsGZIPCompressedMessage() throws Exception { byte[] fakeData = new byte[20]; fakeData[0] = (byte) 0x1f; fakeData[1] = (byte) 0x8b; GELFMessage msg = new GELFMessage(fakeData); assertEquals(GELFMessage.Type.GZIP, msg.getGELFType()); }
@Override public <R> HoodieData<HoodieRecord<R>> tagLocation( HoodieData<HoodieRecord<R>> records, HoodieEngineContext context, HoodieTable hoodieTable) { return HoodieJavaRDD.of(HoodieJavaRDD.getJavaRDD(records) .mapPartitionsWithIndex(locationTagFunction(hoodieTable.getMetaClient()), true)); }
@Test public void testEnsureTagLocationUsesCommitTimeline() throws Exception { // Load to memory HoodieWriteConfig config = getConfigBuilder(100, false, false) .withRollbackUsingMarkers(false).build(); SparkHoodieHBaseIndex index = new SparkHoodieHBaseIndex(config); try (SparkRDDWriteClient writeClient = getHoodieWriteClient(config)) { String commitTime1 = writeClient.startCommit(); JavaRDD<HoodieRecord> writeRecords1 = generateAndCommitRecords(writeClient, 20, commitTime1); // rollback the commit - leaves a clean file in timeline. writeClient.rollback(commitTime1); // create a second commit with 20 records metaClient = HoodieTableMetaClient.reload(metaClient); generateAndCommitRecords(writeClient, 20); // Now tagLocation for the first set of rolledback records, hbaseIndex should tag them metaClient = HoodieTableMetaClient.reload(metaClient); HoodieTable hoodieTable = HoodieSparkTable.create(config, context, metaClient); JavaRDD<HoodieRecord> javaRDD1 = tagLocation(index, writeRecords1, hoodieTable); assert (javaRDD1.filter(HoodieRecord::isCurrentLocationKnown).collect().size() == 20); } }
public static int computeNetworkBuffersForAnnouncing( final int numBuffersPerChannel, final int numFloatingBuffersPerGate, final Optional<Integer> maxRequiredBuffersPerGate, final int sortShuffleMinParallelism, final int sortShuffleMinBuffers, final Map<IntermediateDataSetID, Integer> inputChannelNums, final Map<IntermediateDataSetID, Integer> partitionReuseCount, final Map<IntermediateDataSetID, Integer> subpartitionNums, final Map<IntermediateDataSetID, ResultPartitionType> inputPartitionTypes, final Map<IntermediateDataSetID, ResultPartitionType> partitionTypes) { int requirementForInputs = 0; for (IntermediateDataSetID dataSetId : inputChannelNums.keySet()) { int numChannels = inputChannelNums.get(dataSetId); ResultPartitionType inputPartitionType = inputPartitionTypes.get(dataSetId); checkNotNull(inputPartitionType); int numSingleGateBuffers = getNumBuffersToAnnounceForInputGate( inputPartitionType, numBuffersPerChannel, numFloatingBuffersPerGate, maxRequiredBuffersPerGate, numChannels); checkState(partitionReuseCount.containsKey(dataSetId)); requirementForInputs += numSingleGateBuffers * partitionReuseCount.get(dataSetId); } int requirementForOutputs = 0; for (IntermediateDataSetID dataSetId : subpartitionNums.keySet()) { int numSubs = subpartitionNums.get(dataSetId); ResultPartitionType partitionType = partitionTypes.get(dataSetId); checkNotNull(partitionType); requirementForOutputs += getNumBuffersToAnnounceForResultPartition( partitionType, numBuffersPerChannel, numFloatingBuffersPerGate, sortShuffleMinParallelism, sortShuffleMinBuffers, numSubs); } return requirementForInputs + requirementForOutputs; }
@Test void testComputeRequiredNetworkBuffers() throws Exception { int numBuffersPerChannel = 5; int numBuffersPerGate = 8; Optional<Integer> maxRequiredBuffersPerGate = Optional.of(Integer.MAX_VALUE); int sortShuffleMinParallelism = 8; int numSortShuffleMinBuffers = 12; IntermediateDataSetID ids1 = new IntermediateDataSetID(); IntermediateDataSetID ids2 = new IntermediateDataSetID(); int numChannels1 = 3; int numChannels2 = 4; IntermediateDataSetID ds1 = new IntermediateDataSetID(); IntermediateDataSetID ds2 = new IntermediateDataSetID(); IntermediateDataSetID ds3 = new IntermediateDataSetID(); int numSubs1 = 5; // pipelined shuffle int numSubs2 = 6; // hash blocking shuffle int numSubs3 = 10; // sort blocking shuffle Map<IntermediateDataSetID, Integer> subpartitionNums = ImmutableMap.of(ds1, numSubs1, ds2, numSubs2, ds3, numSubs3); Map<IntermediateDataSetID, ResultPartitionType> partitionTypes = ImmutableMap.of(ds1, PIPELINED_BOUNDED, ds2, BLOCKING, ds3, BLOCKING); Map<IntermediateDataSetID, Integer> numInputChannels = ImmutableMap.of(ids1, numChannels1, ids2, numChannels2); Map<IntermediateDataSetID, Integer> partitionReuseCount = ImmutableMap.of(ids1, 1, ids2, 1); Map<IntermediateDataSetID, ResultPartitionType> inputPartitionTypes = ImmutableMap.of(ids1, PIPELINED_BOUNDED, ids2, BLOCKING); int numTotalBuffers = NettyShuffleUtils.computeNetworkBuffersForAnnouncing( numBuffersPerChannel, numBuffersPerGate, maxRequiredBuffersPerGate, sortShuffleMinParallelism, numSortShuffleMinBuffers, numInputChannels, partitionReuseCount, subpartitionNums, inputPartitionTypes, partitionTypes); NettyShuffleEnvironment sEnv = new NettyShuffleEnvironmentBuilder() .setNumNetworkBuffers(numTotalBuffers) .setNetworkBuffersPerChannel(numBuffersPerChannel) .setSortShuffleMinBuffers(numSortShuffleMinBuffers) .setSortShuffleMinParallelism(sortShuffleMinParallelism) .build(); SingleInputGate inputGate1 = createInputGate(sEnv, PIPELINED_BOUNDED, numChannels1); inputGate1.setup(); SingleInputGate inputGate2 = createInputGate(sEnv, BLOCKING, numChannels2); inputGate2.setup(); ResultPartition resultPartition1 = createResultPartition(sEnv, PIPELINED_BOUNDED, numSubs1); resultPartition1.setup(); ResultPartition resultPartition2 = createResultPartition(sEnv, BLOCKING, numSubs2); resultPartition2.setup(); ResultPartition resultPartition3 = createResultPartition(sEnv, BLOCKING, numSubs3); resultPartition3.setup(); int expected = calculateBuffersConsumption(inputGate1) + calculateBuffersConsumption(inputGate2) + calculateBuffersConsumption(resultPartition1) + calculateBuffersConsumption(resultPartition2) + calculateBuffersConsumption(resultPartition3); assertThat(numTotalBuffers).isEqualTo(expected); inputGate1.close(); inputGate2.close(); resultPartition1.close(); resultPartition2.close(); resultPartition3.close(); }
@Override public Flux<ReactiveRedisConnection.BooleanResponse<RenameCommand>> renameNX(Publisher<RenameCommand> commands) { return execute(commands, command -> { Assert.notNull(command.getKey(), "Key must not be null!"); Assert.notNull(command.getNewKey(), "New name must not be null!"); byte[] keyBuf = toByteArray(command.getKey()); byte[] newKeyBuf = toByteArray(command.getNewKey()); if (executorService.getConnectionManager().calcSlot(keyBuf) == executorService.getConnectionManager().calcSlot(newKeyBuf)) { return super.renameNX(commands); } return exists(command.getNewKey()) .zipWith(read(keyBuf, ByteArrayCodec.INSTANCE, RedisCommands.DUMP, keyBuf)) .filter(newKeyExistsAndDump -> !newKeyExistsAndDump.getT1() && Objects.nonNull(newKeyExistsAndDump.getT2())) .map(Tuple2::getT2) .zipWhen(value -> pTtl(command.getKey()) .filter(Objects::nonNull) .map(ttl -> Math.max(0, ttl)) .switchIfEmpty(Mono.just(0L)) ) .flatMap(valueAndTtl -> write(newKeyBuf, StringCodec.INSTANCE, RedisCommands.RESTORE, newKeyBuf, valueAndTtl.getT2(), valueAndTtl.getT1()) .then(Mono.just(true))) .switchIfEmpty(Mono.just(false)) .doOnSuccess(didRename -> { if (didRename) { del(command.getKey()); } }) .map(didRename -> new BooleanResponse<>(command, didRename)); }); }
@Test public void testRenameNX() { testInClusterReactive(connection -> { connection.stringCommands().set(originalKey, value).block(); if (hasTtl) { connection.keyCommands().expire(originalKey, Duration.ofSeconds(1000)).block(); } Integer originalSlot = getSlotForKey(originalKey, (RedissonReactiveRedisClusterConnection) connection); newKey = getNewKeyForSlot(new String(originalKey.array()), getTargetSlot(originalSlot), connection); Boolean result = connection.keyCommands().renameNX(originalKey, newKey).block(); assertThat(result).isTrue(); assertThat(connection.stringCommands().get(newKey).block()).isEqualTo(value); if (hasTtl) { assertThat(connection.keyCommands().ttl(newKey).block()).isGreaterThan(0); } else { assertThat(connection.keyCommands().ttl(newKey).block()).isEqualTo(-1); } connection.stringCommands().set(originalKey, value).block(); result = connection.keyCommands().renameNX(originalKey, newKey).block(); assertThat(result).isFalse(); }); }
@JsonProperty("type") public FSTType getFstType() { return _fstType; }
@Test public void withDisabledFalse() throws JsonProcessingException { String confStr = "{\"disabled\": false}"; FstIndexConfig config = JsonUtils.stringToObject(confStr, FstIndexConfig.class); assertFalse(config.isDisabled(), "Unexpected disabled"); assertNull(config.getFstType(), "Unexpected type"); }
public static List<UpdateRequirement> forReplaceView( ViewMetadata base, List<MetadataUpdate> metadataUpdates) { Preconditions.checkArgument(null != base, "Invalid view metadata: null"); Preconditions.checkArgument(null != metadataUpdates, "Invalid metadata updates: null"); Builder builder = new Builder(null, false); builder.require(new UpdateRequirement.AssertViewUUID(base.uuid())); metadataUpdates.forEach(builder::update); return builder.build(); }
@Test public void setLocationForView() { List<UpdateRequirement> requirements = UpdateRequirements.forReplaceView( viewMetadata, ImmutableList.of(new MetadataUpdate.SetLocation("location"))); requirements.forEach(req -> req.validate(viewMetadata)); assertThat(requirements) .hasSize(1) .hasOnlyElementsOfTypes(UpdateRequirement.AssertViewUUID.class); assertViewUUID(requirements); }
public static Predicate parse(String expression) { final Stack<Predicate> predicateStack = new Stack<>(); final Stack<Character> operatorStack = new Stack<>(); final String trimmedExpression = TRIMMER_PATTERN.matcher(expression).replaceAll(""); final StringTokenizer tokenizer = new StringTokenizer(trimmedExpression, OPERATORS, true); boolean isTokenMode = true; while (true) { final Character operator; final String token; if (isTokenMode) { if (tokenizer.hasMoreTokens()) { token = tokenizer.nextToken(); } else { break; } if (OPERATORS.contains(token)) { operator = token.charAt(0); } else { operator = null; } } else { operator = operatorStack.pop(); token = null; } isTokenMode = true; if (operator == null) { try { predicateStack.push(Class.forName(token).asSubclass(Predicate.class).getDeclaredConstructor().newInstance()); } catch (ClassCastException e) { throw new RuntimeException(token + " must implement " + Predicate.class.getName(), e); } catch (Exception e) { throw new RuntimeException(e); } } else { if (operatorStack.empty() || operator == '(') { operatorStack.push(operator); } else if (operator == ')') { while (operatorStack.peek() != '(') { evaluate(predicateStack, operatorStack); } operatorStack.pop(); } else { if (OPERATOR_PRECEDENCE.get(operator) < OPERATOR_PRECEDENCE.get(operatorStack.peek())) { evaluate(predicateStack, operatorStack); isTokenMode = false; } operatorStack.push(operator); } } } while (!operatorStack.empty()) { evaluate(predicateStack, operatorStack); } if (predicateStack.size() > 1) { throw new RuntimeException("Invalid logical expression"); } return predicateStack.pop(); }
@Test(expectedExceptions = EmptyStackException.class) public void testNotMissingOperandAnd() { PredicateExpressionParser.parse("! & com.linkedin.data.it.AlwaysFalsePredicate"); }
public void rehash() { resize(keys.length); }
@Test public void testRehash() { removeOdd(); map.trimToSize(); testGet(map); }
@SuppressWarnings({"unchecked", "rawtypes"}) @Override public @Nullable <InputT> TransformEvaluator<InputT> forApplication( AppliedPTransform<?, ?, ?> application, CommittedBundle<?> inputBundle) { return createEvaluator((AppliedPTransform) application); }
@Test public void unboundedSourceWithDuplicatesMultipleCalls() throws Exception { Long[] outputs = new Long[20]; for (long i = 0L; i < 20L; i++) { outputs[(int) i] = i % 5L; } TestUnboundedSource<Long> source = new TestUnboundedSource<>(BigEndianLongCoder.of(), outputs); source.dedupes = true; PCollection<Long> pcollection = p.apply(Read.from(source)); SplittableParDo.convertReadBasedSplittableDoFnsToPrimitiveReads(p); AppliedPTransform<?, ?, ?> sourceTransform = getProducer(pcollection); when(context.createRootBundle()).thenReturn(bundleFactory.createRootBundle()); Collection<CommittedBundle<?>> initialInputs = new UnboundedReadEvaluatorFactory.InputProvider(context, p.getOptions()) .getInitialInputs(sourceTransform, 1); UncommittedBundle<Long> output = bundleFactory.createBundle(pcollection); when(context.createBundle(pcollection)).thenReturn(output); CommittedBundle<?> inputBundle = Iterables.getOnlyElement(initialInputs); TransformEvaluator<UnboundedSourceShard<Long, TestCheckpointMark>> evaluator = factory.forApplication(sourceTransform, inputBundle); for (WindowedValue<?> value : inputBundle.getElements()) { evaluator.processElement( (WindowedValue<UnboundedSourceShard<Long, TestCheckpointMark>>) value); } TransformResult<UnboundedSourceShard<Long, TestCheckpointMark>> result = evaluator.finishBundle(); assertThat( output.commit(Instant.now()).getElements(), containsInAnyOrder(tgw(1L), tgw(2L), tgw(4L), tgw(3L), tgw(0L))); UncommittedBundle<Long> secondOutput = bundleFactory.createBundle(longs); when(context.createBundle(longs)).thenReturn(secondOutput); TransformEvaluator<UnboundedSourceShard<Long, TestCheckpointMark>> secondEvaluator = factory.forApplication(sourceTransform, inputBundle); WindowedValue<UnboundedSourceShard<Long, TestCheckpointMark>> residual = (WindowedValue<UnboundedSourceShard<Long, TestCheckpointMark>>) Iterables.getOnlyElement(result.getUnprocessedElements()); secondEvaluator.processElement(residual); secondEvaluator.finishBundle(); assertThat(secondOutput.commit(Instant.now()).getElements(), Matchers.emptyIterable()); }
public FloatArrayAsIterable usingTolerance(double tolerance) { return new FloatArrayAsIterable(tolerance(tolerance), iterableSubject()); }
@Test public void usingTolerance_containsAnyOf_primitiveFloatArray_success() { assertThat(array(1.0f, TOLERABLE_2POINT2, 3.0f)) .usingTolerance(DEFAULT_TOLERANCE) .containsAnyOf(array(99.99f, 2.2f)); }
void wakeup() { wokenUp.set(true); lock.lock(); try { notEmptyCondition.signalAll(); } finally { lock.unlock(); } }
@Test public void testWakeup() throws Exception { try (ShareFetchBuffer fetchBuffer = new ShareFetchBuffer(logContext)) { final Thread waitingThread = new Thread(() -> { final Timer timer = time.timer(Duration.ofMinutes(1)); fetchBuffer.awaitNotEmpty(timer); }); waitingThread.start(); fetchBuffer.wakeup(); waitingThread.join(Duration.ofSeconds(30).toMillis()); assertFalse(waitingThread.isAlive()); } }
public Optional<Integer> leaderOpt() { return leader == LeaderAndIsr.NO_LEADER ? Optional.empty() : Optional.of(leader); }
@Test public void testLeaderOpt() { LeaderAndIsr leaderAndIsr = new LeaderAndIsr(2, Arrays.asList(1, 2, 3)); assertEquals(2, leaderAndIsr.leaderOpt().orElse(0)); }
@Override public Path move(final Path file, final Path renamed, final TransferStatus status, final Delete.Callback callback, final ConnectionCallback connectionCallback) throws BackgroundException { try { if(status.isExists()) { delete.delete(Collections.singletonMap(renamed, status), connectionCallback, callback); } if(!session.getClient().rename(file.getAbsolute(), renamed.getAbsolute())) { throw new FTPException(session.getClient().getReplyCode(), session.getClient().getReplyString()); } // Copy original file attributes return renamed.withAttributes(file.attributes()); } catch(IOException e) { throw new FTPExceptionMappingService().map("Cannot rename {0}", e, file); } }
@Test public void testMoveNotFound() throws Exception { final Home workdir = new FTPWorkdirService(session); final Path test = new Path(workdir.find(), UUID.randomUUID().toString(), EnumSet.of(Path.Type.file)); assertThrows(NotfoundException.class, () -> new FTPMoveFeature(session).move(test, new Path(workdir.find(), UUID.randomUUID().toString(), EnumSet.of(Path.Type.file)), new TransferStatus(), new Delete.DisabledCallback(), new DisabledConnectionCallback())); }
public static MemberSelector and(MemberSelector... selectors) { return new AndMemberSelector(selectors); }
@Test public void testAndMemberSelector3() { when(member.localMember()).thenReturn(true); when(member.isLiteMember()).thenReturn(true); MemberSelector selector = MemberSelectors.and(LOCAL_MEMBER_SELECTOR, LITE_MEMBER_SELECTOR); assertTrue(selector.select(member)); verify(member).localMember(); verify(member).isLiteMember(); }
@Override public boolean isSatisfied(int index, TradingRecord tradingRecord) { if (tradingRecord != null && !tradingRecord.isClosed()) { Num entryPrice = tradingRecord.getCurrentPosition().getEntry().getNetPrice(); Num currentPrice = this.referencePrice.getValue(index); Num threshold = this.stopLossThreshold.getValue(index); int barsSinceEntry = index - tradingRecord.getCurrentPosition().getEntry().getIndex() + 1; if (tradingRecord.getCurrentPosition().getEntry().isBuy()) { HighestValueIndicator highestPrice = new HighestValueIndicator(this.referencePrice, barsSinceEntry); Num thresholdPrice = entryPrice.max(highestPrice.getValue(index)).minus(threshold); return currentPrice.isLessThan(thresholdPrice); } else { LowestValueIndicator lowestPrice = new LowestValueIndicator(this.referencePrice, barsSinceEntry); Num thresholdPrice = entryPrice.min(lowestPrice.getValue(index)).plus(threshold); return currentPrice.isGreaterThan(thresholdPrice); } } return false; }
@Test public void testStopLossTriggeredOnLongPosition() { TradingRecord tradingRecord = new BaseTradingRecord(); tradingRecord.enter(0, series.getBar(0).getClosePrice(), series.numOf(1)); AverageTrueRangeTrailingStopLossRule rule = new AverageTrueRangeTrailingStopLossRule(series, 3, 1.0); assertFalse(rule.isSatisfied(1, tradingRecord)); // Price is still above stop loss assertFalse(rule.isSatisfied(2, tradingRecord)); // Price is still above stop loss // Simulate a price drop to trigger stop loss series.addBar(series.getLastBar().getEndTime().plusDays(1), 11, 12, 9, 10, 1000); assertTrue(rule.isSatisfied(5, tradingRecord)); // Stop loss should trigger now }
@Override public void onChannelClose(String remoteAddr, Channel channel) { this.namesrvController.getRouteInfoManager().onChannelDestroy(channel); }
@Test public void testOnChannelClose() { brokerHousekeepingService.onChannelClose("127.0.0.1:9876", null); }
@Override public V replace(K key, V newValue) { return map.replace(key, newValue); }
@Test public void testReplace() { map.put(42, "oldValue"); String oldValue = adapter.replace(42, "newValue"); assertEquals("oldValue", oldValue); assertEquals("newValue", map.get(42)); }
public static Builder from(K8sNode node) { return new Builder() .hostname(node.hostname()) .clusterName(node.clusterName()) .type(node.type()) .segmentId(node.segmentId()) .intgBridge(node.intgBridge()) .extBridge(node.extBridge()) .localBridge(node.localBridge()) .tunBridge(node.tunBridge()) .extIntf(node.extIntf()) .managementIp(node.managementIp()) .dataIp(node.dataIp()) .nodeInfo(node.nodeInfo()) .state(node.state()) .extBridgeIp(node.extBridgeIp()) .extGatewayIp(node.extGatewayIp()) .extGatewayMac(node.extGatewayMac()) .extIntf(node.extIntf()) .podCidr(node.podCidr()); }
@Test public void testFrom() { K8sNode updatedNode = DefaultK8sNode.from(refNode).build(); assertEquals(updatedNode, refNode); }
@Override public boolean overlap(final Window other) { if (getClass() != other.getClass()) { throw new IllegalArgumentException("Cannot compare windows of different type. Other window has type " + other.getClass() + "."); } return true; }
@Test public void cannotCompareUnlimitedWindowWithDifferentWindowType() { assertThrows(IllegalArgumentException.class, () -> window.overlap(sessionWindow)); }
public byte[] getNextTag() { byte[] tagBytes = null; if (tagPool != null) { tagBytes = tagPool.pollFirst(); } if (tagBytes == null) { long tag = nextTagId++; int size = encodingSize(tag); tagBytes = new byte[size]; for (int i = 0; i < size; ++i) { tagBytes[size - 1 - i] = (byte) (tag >>> (i * 8)); } } return tagBytes; }
@Test public void testTagGenerationWorksWithIdRollover() throws Exception { AmqpTransferTagGenerator tagGen = new AmqpTransferTagGenerator(false); Field urisField = tagGen.getClass().getDeclaredField("nextTagId"); urisField.setAccessible(true); urisField.set(tagGen, Long.MAX_VALUE + 1); { byte[] tag = tagGen.getNextTag(); ByteArrayInputStream bais = new ByteArrayInputStream(tag); DataInputStream dis = new DataInputStream(bais); assertEquals(8, tag.length); assertEquals(Long.MAX_VALUE + 1, dis.readLong()); } { byte[] tag = tagGen.getNextTag(); ByteArrayInputStream bais = new ByteArrayInputStream(tag); DataInputStream dis = new DataInputStream(bais); assertEquals(8, tag.length); assertEquals(Long.MAX_VALUE + 2, dis.readLong()); } }
@Override public long removeConsumer(String groupName, String consumerName) { return get(removeConsumerAsync(groupName, consumerName)); }
@Test public void testRemoveConsumer() { RStream<String, String> stream = redisson.getStream("test"); stream.add(StreamAddArgs.entry("0", "0")); stream.createGroup(StreamCreateGroupArgs.name("testGroup").makeStream()); StreamMessageId id1 = stream.add(StreamAddArgs.entry("1", "1")); StreamMessageId id2 = stream.add(StreamAddArgs.entry("2", "2")); Map<StreamMessageId, Map<String, String>> s = stream.readGroup("testGroup", "consumer1", StreamReadGroupArgs.neverDelivered()); assertThat(s.size()).isEqualTo(2); assertThat(stream.removeConsumer("testGroup", "consumer1")).isEqualTo(2); assertThat(stream.removeConsumer("testGroup", "consumer2")).isZero(); }
@Override public void publishLong(MetricDescriptor descriptor, long value) { publishNumber(descriptor, value, LONG); }
@Test public void when_singleMetric() throws Exception { MetricDescriptor descriptor = newDescriptor() .withMetric("c") .withTag("tag1", "a") .withTag("tag2", "b"); jmxPublisher.publishLong(descriptor, 1L); helper.assertMBeans(singletonList( metric(domainPrefix + ":type=Metrics,instance=inst1,tag0=\"tag1=a\",tag1=\"tag2=b\"", singletonList(longValue("c", 1L))))); }
@Override public void onNewResourcesAvailable() { checkDesiredOrSufficientResourcesAvailable(); }
@Test void testNotifyNewResourcesAvailable() { ctx.setHasDesiredResources(() -> false); // initially, not enough resources WaitingForResources wfr = new WaitingForResources(ctx, LOG, Duration.ZERO, STABILIZATION_TIMEOUT); ctx.setHasDesiredResources(() -> true); // make resources available ctx.setExpectCreatingExecutionGraph(); wfr.onNewResourcesAvailable(); // .. and notify }
boolean shouldRetry(GetQueryExecutionResponse getQueryExecutionResponse) { String stateChangeReason = getQueryExecutionResponse.queryExecution().status().stateChangeReason(); if (this.retry.contains("never")) { LOG.trace("AWS Athena start query execution detected error ({}), marked as not retryable", stateChangeReason); return false; } if (this.retry.contains("always")) { LOG.trace("AWS Athena start query execution detected error ({}), marked as retryable", stateChangeReason); return true; } // Generic errors happen sometimes in Athena. It's possible that a retry will fix the problem. if (stateChangeReason != null && stateChangeReason.contains("GENERIC_INTERNAL_ERROR") && (this.retry.contains("generic") || this.retry.contains("retryable"))) { LOG.trace("AWS Athena start query execution detected generic error ({}), marked as retryable", stateChangeReason); return true; } // Resource exhaustion happens sometimes in Athena. It's possible that a retry will fix the problem. if (stateChangeReason != null && stateChangeReason.contains("exhausted resources at this scale factor") && (this.retry.contains("exhausted") || this.retry.contains("retryable"))) { LOG.trace("AWS Athena start query execution detected resource exhaustion error ({}), marked as retryable", stateChangeReason); return true; } return false; }
@Test public void shouldRetryReturnsTrueForExhaustedResourcedError() { Athena2QueryHelper helper = athena2QueryHelperWithRetry("retryable"); assertTrue(helper.shouldRetry( newGetQueryExecutionResponse(QueryExecutionState.FAILED, "exhausted resources at this scale factor"))); }
@Override public List<RemoteFileInfo> getRemoteFiles(Table table, GetRemoteFilesParams params) { TableVersionRange version = params.getTableVersionRange(); long snapshotId = version.end().isPresent() ? version.end().get() : -1; return getRemoteFiles((IcebergTable) table, snapshotId, params.getPredicate(), params.getLimit()); }
@Test public void testGetRemoteFile() throws IOException { IcebergHiveCatalog icebergHiveCatalog = new IcebergHiveCatalog(CATALOG_NAME, new Configuration(), DEFAULT_CONFIG); List<Column> columns = Lists.newArrayList(new Column("k1", INT), new Column("k2", INT)); IcebergMetadata metadata = new IcebergMetadata(CATALOG_NAME, HDFS_ENVIRONMENT, icebergHiveCatalog, Executors.newSingleThreadExecutor(), Executors.newSingleThreadExecutor(), null); IcebergTable icebergTable = new IcebergTable(1, "srTableName", CATALOG_NAME, "resource_name", "iceberg_db", "iceberg_table", "", columns, mockedNativeTableB, Maps.newHashMap()); mockedNativeTableB.newAppend().appendFile(FILE_B_1).appendFile(FILE_B_2).commit(); mockedNativeTableB.refresh(); long snapshotId = mockedNativeTableB.currentSnapshot().snapshotId(); ScalarOperator predicate = new BinaryPredicateOperator(BinaryType.GE, new ColumnRefOperator(1, INT, "k2", true), ConstantOperator.createInt(1)); List<RemoteFileInfo> res = metadata.getRemoteFiles(icebergTable, GetRemoteFilesParams.newBuilder().setTableVersionRange(TableVersionRange.withEnd(Optional.of(snapshotId))) .setPredicate(predicate).setFieldNames(Lists.newArrayList()).setLimit(10).build()); IcebergRemoteFileDesc fileDesc = (IcebergRemoteFileDesc) res.get(0).getFiles().get(0); Assert.assertEquals(7, fileDesc.getIcebergScanTasks().stream() .map(x -> x.file().recordCount()).reduce(0L, Long::sum), 0.001); StarRocksAssert starRocksAssert = new StarRocksAssert(); starRocksAssert.getCtx().getSessionVariable().setEnablePruneIcebergManifest(true); mockedNativeTableB.refresh(); snapshotId = mockedNativeTableB.currentSnapshot().snapshotId(); predicate = new BinaryPredicateOperator(BinaryType.EQ, new ColumnRefOperator(1, INT, "k2", true), ConstantOperator.createInt(2)); res = metadata.getRemoteFiles(icebergTable, GetRemoteFilesParams.newBuilder().setTableVersionRange(TableVersionRange.withEnd(Optional.of(snapshotId))) .setPredicate(predicate).setFieldNames(Lists.newArrayList()).setLimit(10).build()); fileDesc = (IcebergRemoteFileDesc) res.get(0).getFiles().get(0); Assert.assertEquals(1, fileDesc.getIcebergScanTasks().size()); Assert.assertEquals(3, fileDesc.getIcebergScanTasks().get(0).file().recordCount()); PredicateSearchKey filter = PredicateSearchKey.of("db", "table", 1, null); Assert.assertEquals("Filter{databaseName='db', tableName='table', snapshotId=1, predicate=true}", filter.toString()); }
public void parse(InputStream stream, ContentHandler handler, Metadata metadata, ParseContext context) throws IOException, SAXException, TikaException { byte[] header = new byte[4]; IOUtils.read(stream, header, 0, 4); // Extract magic byte if (header[0] == (byte) 'i' && header[1] == (byte) 'c' && header[2] == (byte) 'n' && header[3] == (byte) 's') { // Good, signature found } else { throw new TikaException("ICNS magic signature invalid"); } IOUtils.read(stream, header, 0, 4); //Extract image size/length of bytes in file int image_length = java.nio.ByteBuffer.wrap(header).getInt(); image_length -= 8;//for the bytes read so far if (image_length > MAX_IMAGE_LENGTH_BYTES) { throw new TikaMemoryLimitException(image_length, MAX_IMAGE_LENGTH_BYTES); } else if (image_length < 0) { throw new TikaException("image length must be >= 0"); } byte[] full_file = new byte[image_length]; IOUtils.readFully(stream, full_file); ArrayList<ICNSType> icons = new ArrayList<>(); ArrayList<ICNSType> icon_masks = new ArrayList<>(); byte[] tempByteArray = new byte[4]; for (int offset = 0; offset < image_length - 8; ) { //Read the ResType/OSTYpe identifier for sub-icon tempByteArray[0] = full_file[offset]; tempByteArray[1] = full_file[offset + 1]; tempByteArray[2] = full_file[offset + 2]; tempByteArray[3] = full_file[offset + 3]; ICNSType icnstype = findIconType(tempByteArray); if (icnstype == null) { //exit out of loop //No more icons left break; } else if (icnstype.hasMask() == true) { icon_masks.add(findIconType(tempByteArray)); } else { icons.add(findIconType(tempByteArray)); } //Read the sub-icon length tempByteArray[0] = full_file[offset + 4]; tempByteArray[1] = full_file[offset + 5]; tempByteArray[2] = full_file[offset + 6]; tempByteArray[3] = full_file[offset + 7]; int icon_length = java.nio.ByteBuffer.wrap(tempByteArray).getInt(); if (icon_length <= 0) { break; } offset = offset + icon_length; } StringBuilder icon_details = new StringBuilder(); StringBuilder iconmask_details = new StringBuilder(); String bitsPerPixel; String dimensions; for (ICNSType icon : icons) { bitsPerPixel = (icon.getBitsPerPixel() != 0) ? icon.getBitsPerPixel() + " bpp" : "JPEG 2000 or PNG format"; dimensions = (!icon.hasRetinaDisplay()) ? (icon.getHeight() + "x" + icon.getWidth()) : (icon.getHeight() + "x" + icon.getWidth() + "@2X"); icon_details.append(", ").append(dimensions).append(" (").append(bitsPerPixel).append(")"); } for (ICNSType icon : icon_masks) { iconmask_details .append(", ") .append(icon.getHeight()) .append("x") .append(icon.getWidth()) .append(" (") .append(icon.getBitsPerPixel()) .append(" bpp") .append(")"); } metadata.set(Metadata.CONTENT_TYPE, ICNS_MIME_TYPE); if (!icon_details.toString().equals("")) { metadata.set("Icon count", String.valueOf(icons.size())); icon_details = new StringBuilder(icon_details.substring(2)); metadata.set("Icon details", icon_details.toString()); } if (!iconmask_details.toString().equals("")) { metadata.set("Masked icon count", String.valueOf(icon_masks.size())); iconmask_details = new StringBuilder(iconmask_details.substring(2)); metadata.set("Masked icon details", iconmask_details.toString()); } XHTMLContentHandler xhtml = new XHTMLContentHandler(handler, metadata); xhtml.startDocument(); xhtml.endDocument(); }
@Test public void testICNS_basic() throws Exception { Metadata metadata = new Metadata(); metadata.set(Metadata.CONTENT_TYPE, "image/icns"); metadata.set("Icons count", "1"); metadata.set("Icons details", "512x512 (JPEG 2000 or PNG format)"); try (InputStream stream = getClass() .getResourceAsStream("/test-documents/testICNS_basic.icns")) { parser.parse(stream, new DefaultHandler(), metadata, new ParseContext()); } }
public List<KuduPredicate> convert(ScalarOperator operator) { if (operator == null) { return null; } return operator.accept(this, null); }
@Test public void testNull() { List<KuduPredicate> result = CONVERTER.convert(null); Assert.assertNull(result); }
public Quantity<U> zoomBy(double zoom) { return new Quantity<U>(value * zoom, unit); }
@Test public void zoomQuantity() throws Exception { Quantity<Metrics> q = new Quantity<Metrics>(100, Metrics.cm); assertThat(q.zoomBy(0.5)).isEqualTo(new Quantity<Metrics>(50, Metrics.cm)); }
@Override public void showPreviewForKey( Keyboard.Key key, Drawable icon, View parentView, PreviewPopupTheme previewPopupTheme) { KeyPreview popup = getPopupForKey(key, parentView, previewPopupTheme); Point previewPosition = mPositionCalculator.calculatePositionForPreview( key, previewPopupTheme, getLocationInWindow(parentView)); popup.showPreviewForKey(key, icon, previewPosition); }
@Test public void testPopupForRegularKey() { KeyPreviewsManager underTest = new KeyPreviewsManager(getApplicationContext(), mPositionCalculator, 3); Assert.assertNull(getLatestCreatedPopupWindow()); underTest.showPreviewForKey(mTestKeys[0], "y", mKeyboardView, mTheme); Assert.assertNotNull(getLatestCreatedPopupWindow()); }
@Override public JdbcRecordIterator getRecordIterator(Configuration conf, String partitionColumn, String lowerBound, String upperBound, int limit, int offset) throws HiveJdbcDatabaseAccessException { Connection conn = null; PreparedStatement ps = null; ResultSet rs = null; try { initializeDatabaseConnection(conf); String tableName = getQualifiedTableName(conf); // Always use JDBC_QUERY if available both for correctness and performance. JDBC_QUERY can be set by the user // or the CBO including pushdown optimizations. SELECT all query should be used only when JDBC_QUERY is null. String sql = firstNonNull(conf.get(Constants.JDBC_QUERY), selectAllFromTable(tableName)); String partitionQuery; if (partitionColumn != null) { partitionQuery = addBoundaryToQuery(tableName, sql, partitionColumn, lowerBound, upperBound); } else { partitionQuery = addLimitAndOffsetToQuery(sql, limit, offset); } LOGGER.info("Query to execute is [{}]", partitionQuery); conn = dbcpDataSource.getConnection(); ps = conn.prepareStatement(partitionQuery, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); ps.setFetchSize(getFetchSize(conf)); rs = ps.executeQuery(); return new JdbcRecordIterator(this, conn, ps, rs, conf); } catch (Exception e) { LOGGER.error("Caught exception while trying to execute query", e); cleanupResources(conn, ps, rs); throw new HiveJdbcDatabaseAccessException("Caught exception while trying to execute query: " + e.getMessage(), e); } }
@Test(expected = HiveJdbcDatabaseAccessException.class) public void testGetRecordIterator_invalidQuery() throws HiveJdbcDatabaseAccessException { Configuration conf = buildConfiguration(); conf.set(JdbcStorageConfig.QUERY.getPropertyName(), "select * from strategyx"); DatabaseAccessor accessor = DatabaseAccessorFactory.getAccessor(conf); @SuppressWarnings("unused") JdbcRecordIterator iterator = accessor.getRecordIterator(conf, null, null, null, 0, 2); }
public List<SpoonPluginInterface> getPlugins() { return Collections.unmodifiableList( Arrays.asList( plugins.values().toArray( new SpoonPluginInterface[] {} ) ) ); }
@Test public void testGetPlugins() throws Exception { spoonPluginManager.pluginAdded( plugin1 ); spoonPluginManager.pluginAdded( plugin2 ); List<SpoonPluginInterface> pluginInterfaces = spoonPluginManager.getPlugins(); assertEquals( 2, pluginInterfaces.size() ); assertTrue( pluginInterfaces .containsAll( Arrays.asList( spoonPluginInterface1, spoonPluginInterface2 ) ) ); }
public ProviderBuilder threadPool(String threadPool) { this.threadpool = threadPool; return getThis(); }
@Test void threadPool() { ProviderBuilder builder = ProviderBuilder.newBuilder(); builder.threadPool("mockthreadpool"); Assertions.assertEquals("mockthreadpool", builder.build().getThreadpool()); }
public Object execute(ProceedingJoinPoint proceedingJoinPoint, Method method, String fallbackMethodValue, CheckedSupplier<Object> primaryFunction) throws Throwable { String fallbackMethodName = spelResolver.resolve(method, proceedingJoinPoint.getArgs(), fallbackMethodValue); FallbackMethod fallbackMethod = null; if (StringUtils.hasLength(fallbackMethodName)) { try { fallbackMethod = FallbackMethod .create(fallbackMethodName, method, proceedingJoinPoint.getArgs(), proceedingJoinPoint.getTarget(), proceedingJoinPoint.getThis()); } catch (NoSuchMethodException ex) { logger.warn("No fallback method match found", ex); } } if (fallbackMethod == null) { return primaryFunction.get(); } else { return fallbackDecorators.decorate(fallbackMethod, primaryFunction).get(); } }
@Test public void testPrimaryMethodExecutionWithFallbackNotFound() throws Throwable { Method method = this.getClass().getMethod("getName", String.class); final CheckedSupplier<Object> primaryFunction = () -> getName("Name"); final String fallbackMethodValue = "incorrectFallbackMethodName"; when(proceedingJoinPoint.getArgs()).thenReturn(new Object[]{}); when(proceedingJoinPoint.getTarget()).thenReturn(this); when(spelResolver.resolve(method, proceedingJoinPoint.getArgs(), fallbackMethodValue)).thenReturn(fallbackMethodValue); when(fallbackDecorators.decorate(any(),eq(primaryFunction))).thenReturn(primaryFunction); final Object result = fallbackExecutor.execute(proceedingJoinPoint, method, fallbackMethodValue, primaryFunction); assertThat(result).isEqualTo("Name"); verify(spelResolver, times(1)).resolve(method, proceedingJoinPoint.getArgs(), fallbackMethodValue); verify(fallbackDecorators, never()).decorate(any(),any()); }
public static RuntimeException peel(final Throwable t) { return (RuntimeException) peel(t, null, null, HAZELCAST_EXCEPTION_WRAPPER); }
@Test public void testPeel_whenThrowableIsExecutionExceptionWithNullCause_thenReturnHazelcastException() { ExecutionException exception = new ExecutionException(null); RuntimeException result = ExceptionUtil.peel(exception); assertTrue(result instanceof HazelcastException); assertEquals(exception, result.getCause()); }
@Override public Iterator<IndexEntry> readyIndexesIterator() { readLock.lock(); try { var readyIndexes = new ArrayList<IndexEntry>(); for (IndexEntry entry : indexEntries) { if (entry.getIndexDescriptor().isReady()) { readyIndexes.add(entry); } } return readyIndexes.iterator(); } finally { readLock.unlock(); } }
@Test void readyIndexesIterator() { var indexContainer = new IndexEntryContainer(); var descriptor = new IndexDescriptor(getNameIndexSpec()); descriptor.setReady(true); var nameIndexEntry = new IndexEntryImpl(descriptor); indexContainer.add(nameIndexEntry); var indexer = new DefaultIndexer(List.of(descriptor), indexContainer); var iterator = indexer.readyIndexesIterator(); assertThat(iterator.hasNext()).isTrue(); descriptor.setReady(false); iterator = indexer.readyIndexesIterator(); assertThat(iterator.hasNext()).isFalse(); }
public static IssueChangeContextBuilder issueChangeContextByScanBuilder(Date date) { return newBuilder().withScan().setUserUuid(null).setDate(date); }
@Test public void test_issueChangeContextByScanBuilder() { context = issueChangeContextByScanBuilder(NOW).build(); verifyContext(true, false, null, null, null); }
@Override protected Mono<Void> doExecute(final ServerWebExchange exchange, final ShenyuPluginChain chain, final SelectorData selector, final RuleData rule) { Map<String, List<GeneralContextHandle>> generalContextHandleMap = GeneralContextPluginDataHandler.CACHED_HANDLE.get().obtainHandle(CacheKeyUtils.INST.getKey(rule)); if (generalContextHandleMap.isEmpty()) { return chain.execute(exchange); } Map<String, Map<String, String>> generalContextMap = new HashMap<>(); HttpHeaders headers = exchange.getRequest().getHeaders(); generalContextHandleMap.forEach((rpcType, v) -> { if (CollectionUtils.isEmpty(v)) { return; } Map<String, String> generalContextMapWithRpcType = new HashMap<>(); v.forEach(each -> { if (StringUtils.isBlank(each.getGeneralContextType()) || StringUtils.isBlank(each.getGeneralContextKey())) { return; } switch (each.getGeneralContextType()) { case Constants.ADD_GENERAL_CONTEXT_TYPE: generalContextMapWithRpcType.put(each.getGeneralContextKey(), each.getGeneralContextValue()); break; case Constants.TRANSMIT_HEADER_TO_GENERAL_CONTEXT_TYPE: final List<String> header = headers.get(each.getGeneralContextKey()); generalContextMapWithRpcType.put(StringUtils.isBlank(each.getGeneralContextValue()) ? each.getGeneralContextKey() : each.getGeneralContextValue(), CollectionUtils.isEmpty(header) ? null : String.join(", ", header)); break; default: break; } }); generalContextMap.put(rpcType, generalContextMapWithRpcType); }); exchange.getAttributes().put(Constants.GENERAL_CONTEXT, generalContextMap); return chain.execute(exchange); }
@Test public void testDoExecute() { SelectorData selectorData = mock(SelectorData.class); when(this.chain.execute(any())).thenReturn(Mono.empty()); StepVerifier.create(generalContextPlugin.doExecute(this.exchange, this.chain, selectorData, this.ruleData)).expectSubscription().verifyComplete(); ArgumentCaptor<ServerWebExchange> newExchange = ArgumentCaptor.forClass(ServerWebExchange.class); Mockito.verify(this.chain, times(1)).execute(newExchange.capture()); Map<String, String> shenyuGeneralContext = ((Map<String, Map<String, String>>) newExchange.getValue().getAttributes().get(Constants.GENERAL_CONTEXT)).get(PluginEnum.DUBBO.getName()); assertTrue(shenyuGeneralContext.containsKey("addGeneralContextKey")); assertTrue(shenyuGeneralContext.containsKey("shenyuTestHeaderNewKey")); assertEquals(shenyuGeneralContext.get("addGeneralContextKey"), "addGeneralContextValue"); assertEquals(shenyuGeneralContext.get("shenyuTestHeaderNewKey"), "shenyuTestHeaderValue"); shenyuGeneralContext = ((Map<String, Map<String, String>>) newExchange.getValue().getAttributes().get(Constants.GENERAL_CONTEXT)).get(PluginEnum.SOFA.getName()); assertNull(shenyuGeneralContext); }
@Override public void draw(int x, int y) { pixels[getIndex(x, y)] = Pixel.BLACK; }
@Test void testDraw() { var frameBuffer = new FrameBuffer(); frameBuffer.draw(0, 0); assertEquals(Pixel.BLACK, frameBuffer.getPixels()[0]); }
@Override public SelArray assignOps(SelOp op, SelType rhs) { if (op == SelOp.ASSIGN) { SelTypeUtil.checkTypeMatch(this.type(), rhs.type()); this.val = ((SelArray) rhs).val; // direct assignment return this; } throw new UnsupportedOperationException( this.type() + " DO NOT support assignment operation " + op); }
@Test public void testAssignOps() { one.assignOps(SelOp.ASSIGN, new SelArray(1, SelTypes.STRING_ARRAY)); assertEquals("STRING_ARRAY: [null]", one.type() + ": " + one); }
public static boolean isInstantiationStrategy(Object extension, String strategy) { InstantiationStrategy annotation = AnnotationUtils.getAnnotation(extension, InstantiationStrategy.class); if (annotation != null) { return strategy.equals(annotation.value()); } return InstantiationStrategy.PER_PROJECT.equals(strategy); }
@Test public void shouldBeProjectInstantiationStrategy() { assertThat(ExtensionUtils.isInstantiationStrategy(ProjectService.class, InstantiationStrategy.PER_PROJECT)).isTrue(); assertThat(ExtensionUtils.isInstantiationStrategy(new ProjectService(), InstantiationStrategy.PER_PROJECT)).isTrue(); assertThat(ExtensionUtils.isInstantiationStrategy(DefaultService.class, InstantiationStrategy.PER_PROJECT)).isTrue(); assertThat(ExtensionUtils.isInstantiationStrategy(new DefaultService(), InstantiationStrategy.PER_PROJECT)).isTrue(); assertThat(ExtensionUtils.isInstantiationStrategy(DefaultScannerService.class, InstantiationStrategy.PER_PROJECT)).isTrue(); assertThat(ExtensionUtils.isInstantiationStrategy(new DefaultScannerService(), InstantiationStrategy.PER_PROJECT)).isTrue(); }
public void extractTablesFromSelect(final SelectStatement selectStatement) { if (selectStatement.getCombine().isPresent()) { CombineSegment combineSegment = selectStatement.getCombine().get(); extractTablesFromSelect(combineSegment.getLeft().getSelect()); extractTablesFromSelect(combineSegment.getRight().getSelect()); } if (selectStatement.getFrom().isPresent() && !selectStatement.getCombine().isPresent()) { extractTablesFromTableSegment(selectStatement.getFrom().get()); } selectStatement.getWhere().ifPresent(optional -> extractTablesFromExpression(optional.getExpr())); if (null != selectStatement.getProjections() && !selectStatement.getCombine().isPresent()) { extractTablesFromProjections(selectStatement.getProjections()); } selectStatement.getGroupBy().ifPresent(optional -> extractTablesFromOrderByItems(optional.getGroupByItems())); selectStatement.getOrderBy().ifPresent(optional -> extractTablesFromOrderByItems(optional.getOrderByItems())); selectStatement.getHaving().ifPresent(optional -> extractTablesFromExpression(optional.getExpr())); selectStatement.getWithSegment().ifPresent(optional -> extractTablesFromCTEs(optional.getCommonTableExpressions())); selectStatement.getLock().ifPresent(this::extractTablesFromLock); }
@Test void assertExtractTablesFromSelectProjects() { AggregationProjectionSegment aggregationProjection = new AggregationProjectionSegment(10, 20, AggregationType.SUM, "SUM(t_order.id)"); ColumnSegment columnSegment = new ColumnSegment(133, 136, new IdentifierValue("id")); columnSegment.setOwner(new OwnerSegment(130, 132, new IdentifierValue("t_order"))); aggregationProjection.getParameters().add(columnSegment); ProjectionsSegment projectionsSegment = new ProjectionsSegment(10, 20); projectionsSegment.getProjections().add(aggregationProjection); SelectStatement selectStatement = mock(SelectStatement.class); when(selectStatement.getProjections()).thenReturn(projectionsSegment); tableExtractor.extractTablesFromSelect(selectStatement); assertThat(tableExtractor.getRewriteTables().size(), is(1)); Iterator<SimpleTableSegment> tableSegmentIterator = tableExtractor.getRewriteTables().iterator(); assertTableSegment(tableSegmentIterator.next(), 130, 132, "t_order"); }
@Override public boolean alterOffsets(Map<String, String> connectorConfig, Map<Map<String, ?>, Map<String, ?>> offsets) { for (Map.Entry<Map<String, ?>, Map<String, ?>> offsetEntry : offsets.entrySet()) { Map<String, ?> sourceOffset = offsetEntry.getValue(); if (sourceOffset == null) { // We allow tombstones for anything; if there's garbage in the offsets for the connector, we don't // want to prevent users from being able to clean it up using the REST API continue; } Map<String, ?> sourcePartition = offsetEntry.getKey(); if (sourcePartition == null) { throw new ConnectException("Source partitions may not be null"); } MirrorUtils.validateSourcePartitionString(sourcePartition, CONSUMER_GROUP_ID_KEY); MirrorUtils.validateSourcePartitionString(sourcePartition, TOPIC_KEY); MirrorUtils.validateSourcePartitionPartition(sourcePartition); MirrorUtils.validateSourceOffset(sourcePartition, sourceOffset, true); } // We don't actually use these offsets in the task class, so no additional effort is required beyond just validating // the format of the user-supplied offsets return true; }
@Test public void testAlterOffsetsMissingPartitionKey() { MirrorCheckpointConnector connector = new MirrorCheckpointConnector(); Function<Map<String, ?>, Boolean> alterOffsets = partition -> connector.alterOffsets(null, Collections.singletonMap( partition, SOURCE_OFFSET )); Map<String, ?> validPartition = sourcePartition("consumer-app-1", "t", 3); // Sanity check to make sure our valid partition is actually valid assertTrue(alterOffsets.apply(validPartition)); for (String key : Arrays.asList(CONSUMER_GROUP_ID_KEY, TOPIC_KEY, PARTITION_KEY)) { Map<String, ?> invalidPartition = new HashMap<>(validPartition); invalidPartition.remove(key); assertThrows(ConnectException.class, () -> alterOffsets.apply(invalidPartition)); } }
public static Frequency ofMHz(long value) { return new Frequency(value * MHZ); }
@Test public void testofMHz() { Frequency frequency = Frequency.ofMHz(1.0); assertThat(frequency.asKHz(), is(1000.0)); }
@Activate protected void activate() { this.loadConfigs(); log.info("Started"); }
@Test public void badConfig() throws IOException { stageTestResource("badConfig.json"); loader.activate(); assertNull("incorrect configuration", service.component); }
@SuppressWarnings("unchecked") public static <R> R getField(final Object object, final String fieldName) { try { return traverseClassHierarchy( object.getClass(), NoSuchFieldException.class, traversalClass -> { Field field = traversalClass.getDeclaredField(fieldName); field.setAccessible(true); return (R) field.get(object); }); } catch (Exception e) { throw new RuntimeException(e); } }
@Test public void getFieldReflectively_getsInheritedFields() { ExampleDescendant example = new ExampleDescendant(); example.setNotOverridden(6); assertThat((int) ReflectionHelpers.getField(example, "notOverridden")).isEqualTo(6); }
public void consumeStringMessage(String messageString) throws IOException { logger.info("Consuming message '{}'", messageString); UserCreatedMessage message = objectMapper.readValue(messageString, UserCreatedMessage.class); Validator validator = Validation.buildDefaultValidatorFactory().getValidator(); Set<ConstraintViolation<UserCreatedMessage>> violations = validator.validate(message); if(!violations.isEmpty()){ throw new ConstraintViolationException(violations); } // pass message into business use case }
@Test @PactVerification("userCreatedMessagePact") public void verifyCreatePersonPact() throws IOException { messageConsumer.consumeStringMessage(new String(this.currentMessage)); }
static <T extends Comparable<? super T>> int compareListWithFillValue( List<T> left, List<T> right, T fillValue) { int longest = Math.max(left.size(), right.size()); for (int i = 0; i < longest; i++) { T leftElement = fillValue; T rightElement = fillValue; if (i < left.size()) { leftElement = left.get(i); } if (i < right.size()) { rightElement = right.get(i); } int compareResult = leftElement.compareTo(rightElement); if (compareResult != 0) { return compareResult; } } return 0; }
@Test public void compareWithFillValue_oneEmptyListAndSmallFillValue_returnsNegative() { assertThat( ComparisonUtility.compareListWithFillValue( Lists.newArrayList(), Lists.newArrayList(1, 2, 3), 0)) .isLessThan(0); }
@Override public List<PMMLModel> getPMMLModels(PMMLRuntimeContext context) { logger.debug("getPMMLModels {}", context); return PMMLRuntimeHelper.getPMMLModels(context); }
@Test void getPMMLModels() { List<PMMLModel> retrieved = pmmlRuntimeInternal.getPMMLModels(pmmlRuntimeContext); assertThat(retrieved).isNotNull().hasSize(1); PMMLModel pmmlModel = retrieved.get(0); assertThat(pmmlModel.getFileName()).isEqualTo(fileName); assertThat(pmmlModel.getName()).isEqualTo(modelName); }