focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
@Override public byte[] encode(ILoggingEvent event) { final int initialCapacity = event.getThrowableProxy() == null ? DEFAULT_SIZE : DEFAULT_SIZE_WITH_THROWABLE; StringBuilder sb = new StringBuilder(initialCapacity); sb.append(OPEN_OBJ); if (withSequenceNumber) { appenderMemberWithLongValue(sb, SEQUENCE_NUMBER_ATTR_NAME, event.getSequenceNumber()); sb.append(VALUE_SEPARATOR); } if (withTimestamp) { appenderMemberWithLongValue(sb, TIMESTAMP_ATTR_NAME, event.getTimeStamp()); sb.append(VALUE_SEPARATOR); } if (withNanoseconds) { appenderMemberWithLongValue(sb, NANOSECONDS_ATTR_NAME, event.getNanoseconds()); sb.append(VALUE_SEPARATOR); } if (withLevel) { String levelStr = event.getLevel() != null ? event.getLevel().levelStr : NULL_STR; appenderMember(sb, LEVEL_ATTR_NAME, levelStr); sb.append(VALUE_SEPARATOR); } if (withThreadName) { appenderMember(sb, THREAD_NAME_ATTR_NAME, jsonEscape(event.getThreadName())); sb.append(VALUE_SEPARATOR); } if (withLoggerName) { appenderMember(sb, LOGGER_ATTR_NAME, event.getLoggerName()); sb.append(VALUE_SEPARATOR); } if (withContext) { appendLoggerContext(sb, event.getLoggerContextVO()); sb.append(VALUE_SEPARATOR); } if (withMarkers) appendMarkers(sb, event); if (withMDC) appendMDC(sb, event); if (withKVPList) appendKeyValuePairs(sb, event); if (withMessage) { appenderMember(sb, MESSAGE_ATTR_NAME, jsonEscape(event.getMessage())); sb.append(VALUE_SEPARATOR); } if (withFormattedMessage) { appenderMember(sb, FORMATTED_MESSAGE_ATTR_NAME, jsonEscape(event.getFormattedMessage())); sb.append(VALUE_SEPARATOR); } if (withArguments) appendArgumentArray(sb, event); if (withThrowable) appendThrowableProxy(sb, THROWABLE_ATTR_NAME, event.getThrowableProxy()); sb.append(CLOSE_OBJ); sb.append(CoreConstants.JSON_LINE_SEPARATOR); return sb.toString().getBytes(UTF_8_CHARSET); }
@Test void smoke() throws JsonProcessingException { LoggingEvent event = new LoggingEvent("x", logger, Level.WARN, "hello", null, null); byte[] resultBytes = jsonEncoder.encode(event); String resultString = new String(resultBytes, StandardCharsets.UTF_8); //System.out.println(resultString); JsonLoggingEvent resultEvent = stringToLoggingEventMapper.mapStringToLoggingEvent(resultString); compareEvents(event, resultEvent); }
public static <T> GoConfigClassLoader<T> classParser(Element e, Class<T> aClass, ConfigCache configCache, GoCipher goCipher, final ConfigElementImplementationRegistry registry, ConfigReferenceElements configReferenceElements) { return new GoConfigClassLoader<>(e, aClass, configCache, goCipher, registry, configReferenceElements); }
@Test public void shouldErrorOutIfElementDoesNotHaveConfigTagAnnotation() { final Element element = new Element("cruise"); final GoConfigClassLoader<ConfigWithoutAnnotation> loader = GoConfigClassLoader.classParser(element, ConfigWithoutAnnotation.class, configCache, goCipher, registry, referenceElements); assertThatThrownBy(loader::parse) .isInstanceOf(RuntimeException.class) .hasMessageContaining("Unable to parse element <cruise> for class ConfigWithoutAnnotation"); }
public static RecordBuilder<Schema> record(String name) { return builder().record(name); }
@Test void validateDefaultsDisabled() { final String fieldName = "IntegerField"; final String defaultValue = "foo"; Schema schema = SchemaBuilder.record("ValidationRecord").fields().name(fieldName).notValidatingDefaults() .type("int").withDefault(defaultValue) // Would throw an exception on endRecord() if validations enabled .endRecord(); assertNull(schema.getField(fieldName).defaultVal(), "Differing types, so this returns null"); assertEquals(defaultValue, schema.getField(fieldName).defaultValue().asText(), "Schema is able to be successfully created as is without validation"); }
public boolean isHealthy() { Optional<Boolean> operatorsAreReady = areOperatorsStarted(operators); if (operatorsAreReady.isEmpty() || !operatorsAreReady.get()) { return false; } Optional<Boolean> runtimeInfosAreHealthy = operators.stream() .map(operator -> checkInformersHealth(operator.getRuntimeInfo())) .reduce((a, b) -> a && b); if (runtimeInfosAreHealthy.isEmpty() || !runtimeInfosAreHealthy.get()) { return false; } for (SentinelManager<?> sentinelManager : sentinelManagers) { if (!sentinelManager.allSentinelsAreHealthy()) { log.error("One sentinel manager {} reported an unhealthy condition.", sentinelManager); return false; } } return true; }
@Test void testHealthProbeWithInformerHealthWithMultiOperators() { HealthProbe healthyProbe = new HealthProbe(operators, Collections.emptyList()); isRunning.set(true); assertFalse( healthyProbe.isHealthy(), "Healthy Probe should fail when the spark conf monitor operator is not running"); isRunning2.set(true); assertTrue( healthyProbe.isHealthy(), "Healthy Probe should pass when both operators are running"); unhealthyEventSources2.put( "c1", Map.of("e1", informerHealthIndicator(Map.of("i1", Status.UNHEALTHY)))); assertFalse( healthyProbe.isHealthy(), "Healthy Probe should fail when monitor's informer health is not healthy"); unhealthyEventSources2.clear(); assertTrue(healthyProbe.isHealthy(), "Healthy Probe should pass"); }
public static Class<?> maxType(Class<?>... numericTypes) { Preconditions.checkArgument(numericTypes.length >= 2); int maxIndex = 0; for (Class<?> numericType : numericTypes) { int index; if (isPrimitive(numericType)) { index = sortedPrimitiveClasses.indexOf(numericType); } else { index = sortedBoxedClasses.indexOf(numericType); } if (index == -1) { throw new IllegalArgumentException( String.format("Wrong numericTypes %s", Arrays.toString(numericTypes))); } maxIndex = Math.max(maxIndex, index); } return sortedPrimitiveClasses.get(maxIndex); }
@Test public void testMaxType() { assertEquals(TypeUtils.maxType(int.class, long.class), long.class); assertEquals(TypeUtils.maxType(long.class, int.class), long.class); assertEquals(TypeUtils.maxType(float.class, long.class), long.class); assertEquals(TypeUtils.maxType(long.class, float.class), long.class); List<Class<?>> classes = Arrays.asList( void.class, boolean.class, byte.class, char.class, short.class, int.class, float.class, long.class, double.class); for (int i = 0; i < classes.size() - 1; i++) { assertEquals(TypeUtils.maxType(classes.get(i), classes.get(i + 1)), classes.get(i + 1)); } }
@Override public void initialize(URI uri, Configuration conf) throws IOException { requireNonNull(uri, "uri is null"); requireNonNull(conf, "conf is null"); super.initialize(uri, conf); setConf(conf); this.uri = URI.create(uri.getScheme() + "://" + uri.getAuthority()); this.workingDirectory = new Path(PATH_SEPARATOR).makeQualified(this.uri, new Path(PATH_SEPARATOR)); HiveS3Config defaults = new HiveS3Config(); this.stagingDirectory = new File(conf.get(S3_STAGING_DIRECTORY, defaults.getS3StagingDirectory().toString())); this.maxAttempts = conf.getInt(S3_MAX_CLIENT_RETRIES, defaults.getS3MaxClientRetries()) + 1; this.maxBackoffTime = Duration.valueOf(conf.get(S3_MAX_BACKOFF_TIME, defaults.getS3MaxBackoffTime().toString())); this.maxRetryTime = Duration.valueOf(conf.get(S3_MAX_RETRY_TIME, defaults.getS3MaxRetryTime().toString())); int maxErrorRetries = conf.getInt(S3_MAX_ERROR_RETRIES, defaults.getS3MaxErrorRetries()); boolean sslEnabled = conf.getBoolean(S3_SSL_ENABLED, defaults.isS3SslEnabled()); Duration connectTimeout = Duration.valueOf(conf.get(S3_CONNECT_TIMEOUT, defaults.getS3ConnectTimeout().toString())); Duration socketTimeout = Duration.valueOf(conf.get(S3_SOCKET_TIMEOUT, defaults.getS3SocketTimeout().toString())); int maxConnections = conf.getInt(S3_MAX_CONNECTIONS, defaults.getS3MaxConnections()); this.multiPartUploadMinFileSize = conf.getLong(S3_MULTIPART_MIN_FILE_SIZE, defaults.getS3MultipartMinFileSize().toBytes()); this.multiPartUploadMinPartSize = conf.getLong(S3_MULTIPART_MIN_PART_SIZE, defaults.getS3MultipartMinPartSize().toBytes()); this.isPathStyleAccess = conf.getBoolean(S3_PATH_STYLE_ACCESS, defaults.isS3PathStyleAccess()); this.useInstanceCredentials = conf.getBoolean(S3_USE_INSTANCE_CREDENTIALS, defaults.isS3UseInstanceCredentials()); this.pinS3ClientToCurrentRegion = conf.getBoolean(S3_PIN_CLIENT_TO_CURRENT_REGION, defaults.isPinS3ClientToCurrentRegion()); this.s3IamRole = conf.get(S3_IAM_ROLE, defaults.getS3IamRole()); this.s3IamRoleSessionName = conf.get(S3_IAM_ROLE_SESSION_NAME, defaults.getS3IamRoleSessionName()); verify(!(useInstanceCredentials && conf.get(S3_IAM_ROLE) != null), "Invalid configuration: either use instance credentials or specify an iam role"); verify((pinS3ClientToCurrentRegion && conf.get(S3_ENDPOINT) == null) || !pinS3ClientToCurrentRegion, "Invalid configuration: either endpoint can be set or S3 client can be pinned to the current region"); this.sseEnabled = conf.getBoolean(S3_SSE_ENABLED, defaults.isS3SseEnabled()); this.sseType = PrestoS3SseType.valueOf(conf.get(S3_SSE_TYPE, defaults.getS3SseType().name())); this.sseKmsKeyId = conf.get(S3_SSE_KMS_KEY_ID, defaults.getS3SseKmsKeyId()); this.s3AclType = PrestoS3AclType.valueOf(conf.get(S3_ACL_TYPE, defaults.getS3AclType().name())); String userAgentPrefix = conf.get(S3_USER_AGENT_PREFIX, defaults.getS3UserAgentPrefix()); this.skipGlacierObjects = conf.getBoolean(S3_SKIP_GLACIER_OBJECTS, defaults.isSkipGlacierObjects()); this.s3StorageClass = conf.getEnum(S3_STORAGE_CLASS, defaults.getS3StorageClass()); ClientConfiguration configuration = new ClientConfiguration() .withMaxErrorRetry(maxErrorRetries) .withProtocol(sslEnabled ? Protocol.HTTPS : Protocol.HTTP) .withConnectionTimeout(toIntExact(connectTimeout.toMillis())) .withSocketTimeout(toIntExact(socketTimeout.toMillis())) .withMaxConnections(maxConnections) .withUserAgentPrefix(userAgentPrefix) .withUserAgentSuffix(S3_USER_AGENT_SUFFIX); this.credentialsProvider = createAwsCredentialsProvider(uri, conf); this.s3 = createAmazonS3Client(conf, configuration); }
@Test public void testCustomCredentialsProvider() throws Exception { Configuration config = new Configuration(); config.set(S3_USE_INSTANCE_CREDENTIALS, "false"); config.set(S3_CREDENTIALS_PROVIDER, TestCredentialsProvider.class.getName()); try (PrestoS3FileSystem fs = new PrestoS3FileSystem()) { fs.initialize(new URI("s3n://test-bucket/"), config); assertInstanceOf(getAwsCredentialsProvider(fs), TestCredentialsProvider.class); } }
@Override public int compareTo(ByteBuf that) { return ByteBufUtil.compare(this, that); }
@Test public void testCompareTo() { try { buffer.compareTo(null); fail(); } catch (NullPointerException e) { // Expected } // Fill the random stuff byte[] value = new byte[32]; random.nextBytes(value); // Prevent overflow / underflow if (value[0] == 0) { value[0] ++; } else if (value[0] == -1) { value[0] --; } buffer.setIndex(0, value.length); buffer.setBytes(0, value); assertEquals(0, buffer.compareTo(wrappedBuffer(value))); assertEquals(0, buffer.compareTo(wrappedBuffer(value).order(LITTLE_ENDIAN))); value[0] ++; assertTrue(buffer.compareTo(wrappedBuffer(value)) < 0); assertTrue(buffer.compareTo(wrappedBuffer(value).order(LITTLE_ENDIAN)) < 0); value[0] -= 2; assertTrue(buffer.compareTo(wrappedBuffer(value)) > 0); assertTrue(buffer.compareTo(wrappedBuffer(value).order(LITTLE_ENDIAN)) > 0); value[0] ++; assertTrue(buffer.compareTo(wrappedBuffer(value, 0, 31)) > 0); assertTrue(buffer.compareTo(wrappedBuffer(value, 0, 31).order(LITTLE_ENDIAN)) > 0); assertTrue(buffer.slice(0, 31).compareTo(wrappedBuffer(value)) < 0); assertTrue(buffer.slice(0, 31).compareTo(wrappedBuffer(value).order(LITTLE_ENDIAN)) < 0); ByteBuf retainedSlice = buffer.retainedSlice(0, 31); assertTrue(retainedSlice.compareTo(wrappedBuffer(value)) < 0); retainedSlice.release(); retainedSlice = buffer.retainedSlice(0, 31); assertTrue(retainedSlice.compareTo(wrappedBuffer(value).order(LITTLE_ENDIAN)) < 0); retainedSlice.release(); }
public UriProperties(String clusterName, Map<URI, Map<Integer, PartitionData>> partitionDescriptions) { this(clusterName, partitionDescriptions, Collections.<URI, Map<String, Object>>emptyMap()); }
@Test public void testUriProperties() { Map<URI, Map<Integer, PartitionData>> uriData = new HashMap<>(); uriData.put(URI_1, MAP_1); uriData.put(URI_2, MAP_2); uriData.put(URI_3, MAP_3); String clusterName = "TestCluster"; UriProperties properties = new UriProperties(clusterName, uriData); // test construction Assert.assertEquals(clusterName, properties.getClusterName()); Assert.assertEquals(properties.getPartitionDesc(), uriData); Assert.assertEquals(properties.Uris(), uriData.keySet()); Assert.assertEquals(properties.getPartitionDataMap(URI_1), MAP_1); Assert.assertEquals(properties.getPartitionDataMap(URI_2), MAP_2); Assert.assertEquals(properties.getPartitionDataMap(URI_3), MAP_3); // test getUriBySchemeAndPartition Set<URI> set = new HashSet<>(1); set.add(URI_1); Assert.assertEquals(properties.getUriBySchemeAndPartition("http", 0), set); set.add(URI_2); Assert.assertEquals(properties.getUriBySchemeAndPartition("http", 1), set); set.clear(); set.add(URI_3); Assert.assertEquals(properties.getUriBySchemeAndPartition("https", 1), set); Assert.assertNull(properties.getUriBySchemeAndPartition("rtp", 0)); Assert.assertNull(properties.getUriBySchemeAndPartition("http", 2)); // test unmodifiability Map<URI, Map<Integer, PartitionData>> partitionDesc = properties.getPartitionDesc(); Map<Integer, PartitionData> partitionDataMap = properties.getPartitionDataMap(URI_1); URI testUri = URI.create("test"); try { partitionDesc.put(testUri, null); Assert.fail("Should not be modifiable"); } catch (UnsupportedOperationException ignored) { } try { partitionDataMap.put(1, new PartitionData(1)); Assert.fail("Should not be modifiable"); } catch (UnsupportedOperationException ignored) { } }
@Override public void putConnectorConfig(String connector, Map<String, String> properties, TargetState targetState) { log.debug("Writing connector configuration for connector '{}'", connector); Struct connectConfig = new Struct(CONNECTOR_CONFIGURATION_V0); connectConfig.put("properties", properties); byte[] serializedConfig = converter.fromConnectData(topic, CONNECTOR_CONFIGURATION_V0, connectConfig); try { Timer timer = time.timer(READ_WRITE_TOTAL_TIMEOUT_MS); List<ProducerKeyValue> keyValues = new ArrayList<>(); if (targetState != null) { log.debug("Writing target state {} for connector {}", targetState, connector); keyValues.add(new ProducerKeyValue(TARGET_STATE_KEY(connector), serializeTargetState(targetState))); } keyValues.add(new ProducerKeyValue(CONNECTOR_KEY(connector), serializedConfig)); sendPrivileged(keyValues, timer); configLog.readToEnd().get(timer.remainingMs(), TimeUnit.MILLISECONDS); } catch (InterruptedException | ExecutionException | TimeoutException e) { log.error("Failed to write connector configuration to Kafka: ", e); throw new ConnectException("Error writing connector configuration to Kafka", e); } }
@Test public void testPutConnectorConfig() throws Exception { when(configLog.partitionCount()).thenReturn(1); configStorage.setupAndCreateKafkaBasedLog(TOPIC, config); verifyConfigure(); configStorage.start(); // Null before writing ClusterConfigState configState = configStorage.snapshot(); assertEquals(-1, configState.offset()); assertNull(configState.connectorConfig(CONNECTOR_IDS.get(0))); assertNull(configState.connectorConfig(CONNECTOR_IDS.get(1))); String configKey = CONNECTOR_CONFIG_KEYS.get(1); String targetStateKey = TARGET_STATE_KEYS.get(1); doAnswer(expectReadToEnd(Collections.singletonMap(CONNECTOR_CONFIG_KEYS.get(0), CONFIGS_SERIALIZED.get(0)))) .doAnswer(expectReadToEnd(Collections.singletonMap(CONNECTOR_CONFIG_KEYS.get(1), CONFIGS_SERIALIZED.get(1)))) // Config deletion .doAnswer(expectReadToEnd(new LinkedHashMap<String, byte[]>() {{ put(configKey, null); put(targetStateKey, null); }}) ).when(configLog).readToEnd(); // Writing should block until it is written and read back from Kafka expectConvertWriteRead( CONNECTOR_CONFIG_KEYS.get(0), KafkaConfigBackingStore.CONNECTOR_CONFIGURATION_V0, CONFIGS_SERIALIZED.get(0), "properties", SAMPLE_CONFIGS.get(0)); configStorage.putConnectorConfig(CONNECTOR_IDS.get(0), SAMPLE_CONFIGS.get(0), null); configState = configStorage.snapshot(); assertEquals(1, configState.offset()); assertEquals(SAMPLE_CONFIGS.get(0), configState.connectorConfig(CONNECTOR_IDS.get(0))); assertNull(configState.connectorConfig(CONNECTOR_IDS.get(1))); verify(configUpdateListener).onConnectorConfigUpdate(CONNECTOR_IDS.get(0)); // Second should also block and all configs should still be available expectConvertWriteRead( CONNECTOR_CONFIG_KEYS.get(1), KafkaConfigBackingStore.CONNECTOR_CONFIGURATION_V0, CONFIGS_SERIALIZED.get(1), "properties", SAMPLE_CONFIGS.get(1)); configStorage.putConnectorConfig(CONNECTOR_IDS.get(1), SAMPLE_CONFIGS.get(1), null); configState = configStorage.snapshot(); assertEquals(2, configState.offset()); assertEquals(SAMPLE_CONFIGS.get(0), configState.connectorConfig(CONNECTOR_IDS.get(0))); assertEquals(SAMPLE_CONFIGS.get(1), configState.connectorConfig(CONNECTOR_IDS.get(1))); verify(configUpdateListener).onConnectorConfigUpdate(CONNECTOR_IDS.get(1)); // Config deletion when(producerFuture.get(anyLong(), any(TimeUnit.class))).thenReturn(null); when(converter.toConnectData(TOPIC, null)).thenReturn(new SchemaAndValue(null, null)); when(configLog.sendWithReceipt(AdditionalMatchers.or(Mockito.eq(configKey), Mockito.eq(targetStateKey)), Mockito.isNull())).thenReturn(producerFuture); // Deletion should remove the second one we added configStorage.removeConnectorConfig(CONNECTOR_IDS.get(1)); configState = configStorage.snapshot(); assertEquals(4, configState.offset()); assertEquals(SAMPLE_CONFIGS.get(0), configState.connectorConfig(CONNECTOR_IDS.get(0))); assertNull(configState.connectorConfig(CONNECTOR_IDS.get(1))); assertNull(configState.targetState(CONNECTOR_IDS.get(1))); verify(configUpdateListener).onConnectorConfigRemove(CONNECTOR_IDS.get(1)); configStorage.stop(); verify(configLog).stop(); }
protected boolean isSimpleTypeNode(JsonNode jsonNode) { if (!jsonNode.isObject()) { return false; } ObjectNode objectNode = (ObjectNode) jsonNode; int numberOfFields = objectNode.size(); return numberOfFields == 1 && objectNode.has(VALUE); }
@Test public void isSimpleTypeNode_nodeWithValueFieldAndOtherField() { ObjectNode jsonNode = new ObjectNode(factory); jsonNode.set(VALUE, new TextNode("test")); assertThat(expressionEvaluator.isSimpleTypeNode(jsonNode)).isTrue(); }
@Override public int run(String[] args) throws Exception { YarnConfiguration yarnConf = getConf() == null ? new YarnConfiguration() : new YarnConfiguration( getConf()); boolean isHAEnabled = yarnConf.getBoolean(YarnConfiguration.RM_HA_ENABLED, YarnConfiguration.DEFAULT_RM_HA_ENABLED); if (args.length < 1) { printUsage("", isHAEnabled); return -1; } int exitCode = -1; int i = 0; String cmd = args[i++]; exitCode = 0; if ("-help".equals(cmd)) { if (i < args.length) { printUsage(args[i], isHAEnabled); } else { printHelp("", isHAEnabled); } return exitCode; } if (USAGE.containsKey(cmd)) { if (isHAEnabled) { return super.run(args); } System.out.println("Cannot run " + cmd + " when ResourceManager HA is not enabled"); return -1; } // // verify that we have enough command line parameters // String subClusterId = StringUtils.EMPTY; if ("-refreshAdminAcls".equals(cmd) || "-refreshQueues".equals(cmd) || "-refreshNodesResources".equals(cmd) || "-refreshServiceAcl".equals(cmd) || "-refreshUserToGroupsMappings".equals(cmd) || "-refreshSuperUserGroupsConfiguration".equals(cmd) || "-refreshClusterMaxPriority".equals(cmd)) { subClusterId = parseSubClusterId(args, isHAEnabled); // If we enable Federation mode, the number of args may be either one or three. // Example: -refreshQueues or -refreshQueues -subClusterId SC-1 if (isYarnFederationEnabled(getConf()) && args.length != 1 && args.length != 3) { printUsage(cmd, isHAEnabled); return exitCode; } else if (!isYarnFederationEnabled(getConf()) && args.length != 1) { // If Federation mode is not enabled, then the number of args can only be one. // Example: -refreshQueues printUsage(cmd, isHAEnabled); return exitCode; } } // If it is federation mode, we will print federation mode information if (isYarnFederationEnabled(getConf())) { System.out.println("Using YARN Federation mode."); } try { if ("-refreshQueues".equals(cmd)) { exitCode = refreshQueues(subClusterId); } else if ("-refreshNodes".equals(cmd)) { exitCode = handleRefreshNodes(args, cmd, isHAEnabled); } else if ("-refreshNodesResources".equals(cmd)) { exitCode = refreshNodesResources(subClusterId); } else if ("-refreshUserToGroupsMappings".equals(cmd)) { exitCode = refreshUserToGroupsMappings(subClusterId); } else if ("-refreshSuperUserGroupsConfiguration".equals(cmd)) { exitCode = refreshSuperUserGroupsConfiguration(subClusterId); } else if ("-refreshAdminAcls".equals(cmd)) { exitCode = refreshAdminAcls(subClusterId); } else if ("-refreshServiceAcl".equals(cmd)) { exitCode = refreshServiceAcls(subClusterId); } else if ("-refreshClusterMaxPriority".equals(cmd)) { exitCode = refreshClusterMaxPriority(subClusterId); } else if ("-getGroups".equals(cmd)) { String[] usernames = Arrays.copyOfRange(args, i, args.length); exitCode = getGroups(usernames); } else if ("-updateNodeResource".equals(cmd)) { exitCode = handleUpdateNodeResource(args, cmd, isHAEnabled, subClusterId); } else if ("-addToClusterNodeLabels".equals(cmd)) { exitCode = handleAddToClusterNodeLabels(args, cmd, isHAEnabled); } else if ("-removeFromClusterNodeLabels".equals(cmd)) { exitCode = handleRemoveFromClusterNodeLabels(args, cmd, isHAEnabled); } else if ("-replaceLabelsOnNode".equals(cmd)) { exitCode = handleReplaceLabelsOnNodes(args, cmd, isHAEnabled); } else { exitCode = -1; System.err.println(cmd.substring(1) + ": Unknown command"); printUsage("", isHAEnabled); } } catch (IllegalArgumentException arge) { exitCode = -1; System.err.println(cmd.substring(1) + ": " + arge.getLocalizedMessage()); printUsage(cmd, isHAEnabled); } catch (RemoteException e) { // // This is a error returned by hadoop server. Print // out the first line of the error message, ignore the stack trace. exitCode = -1; try { String[] content; content = e.getLocalizedMessage().split("\n"); System.err.println(cmd.substring(1) + ": " + content[0]); } catch (Exception ex) { System.err.println(cmd.substring(1) + ": " + ex.getLocalizedMessage()); } } catch (Exception e) { exitCode = -1; System.err.println(cmd.substring(1) + ": " + e.getLocalizedMessage()); } if (null != localNodeLabelsManager) { localNodeLabelsManager.stop(); } return exitCode; }
@Test public void testNoUnsupportedHACommandsInHelp() throws Exception { ByteArrayOutputStream dataErr = new ByteArrayOutputStream(); System.setErr(new PrintStream(dataErr)); String[] args = {}; assertEquals(-1, rmAdminCLIWithHAEnabled.run(args)); String errOut = dataErr.toString(); assertFalse(errOut.contains("-transitionToObserver")); dataErr.reset(); String[] args1 = {"-transitionToObserver"}; assertEquals(-1, rmAdminCLIWithHAEnabled.run(args1)); errOut = dataErr.toString(); assertTrue(errOut.contains("transitionToObserver: Unknown command")); dataErr.reset(); args1[0] = "-failover"; assertEquals(-1, rmAdminCLIWithHAEnabled.run(args1)); errOut = dataErr.toString(); assertTrue(errOut.contains("failover: Unknown command")); dataErr.reset(); String[] args2 = {"-help", "-transitionToObserver"}; assertEquals(0, rmAdminCLIWithHAEnabled.run(args2)); errOut = dataErr.toString(); assertFalse(errOut.contains("-transitionToObserver")); dataErr.reset(); args2[1] = "-failover"; assertEquals(0, rmAdminCLIWithHAEnabled.run(args2)); errOut = dataErr.toString(); assertFalse(errOut.contains("-failover")); dataErr.reset(); }
public Matcher parse(String xpath) { if (xpath.equals("/text()")) { return TextMatcher.INSTANCE; } else if (xpath.equals("/node()")) { return NodeMatcher.INSTANCE; } else if (xpath.equals("/descendant::node()") || xpath.equals("/descendant:node()")) { // for compatibility return new CompositeMatcher(TextMatcher.INSTANCE, new ChildMatcher(new SubtreeMatcher(NodeMatcher.INSTANCE))); } else if (xpath.equals("/@*")) { return AttributeMatcher.INSTANCE; } else if (xpath.length() == 0) { return ElementMatcher.INSTANCE; } else if (xpath.startsWith("/@")) { String name = xpath.substring(2); String prefix = null; int colon = name.indexOf(':'); if (colon != -1) { prefix = name.substring(0, colon); name = name.substring(colon + 1); } if (prefixes.containsKey(prefix)) { return new NamedAttributeMatcher(prefixes.get(prefix), name); } else { return Matcher.FAIL; } } else if (xpath.startsWith("/*")) { return new ChildMatcher(parse(xpath.substring(2))); } else if (xpath.startsWith("///")) { return Matcher.FAIL; } else if (xpath.startsWith("//")) { return new SubtreeMatcher(parse(xpath.substring(1))); } else if (xpath.startsWith("/")) { int slash = xpath.indexOf('/', 1); if (slash == -1) { slash = xpath.length(); } String name = xpath.substring(1, slash); String prefix = null; int colon = name.indexOf(':'); if (colon != -1) { prefix = name.substring(0, colon); name = name.substring(colon + 1); } if (prefixes.containsKey(prefix)) { return new NamedElementMatcher(prefixes.get(prefix), name, parse(xpath.substring(slash))); } else { return Matcher.FAIL; } } else { return Matcher.FAIL; } }
@Test public void testNamedElement() { Matcher matcher = parser.parse("/name"); assertFalse(matcher.matchesText()); assertFalse(matcher.matchesElement()); assertFalse(matcher.matchesAttribute(null, "name")); assertFalse(matcher.matchesAttribute(NS, "name")); assertFalse(matcher.matchesAttribute(NS, "eman")); assertEquals(Matcher.FAIL, matcher.descend(NS, "name")); assertEquals(Matcher.FAIL, matcher.descend(null, "enam")); matcher = matcher.descend(null, "name"); assertFalse(matcher.matchesText()); assertTrue(matcher.matchesElement()); assertFalse(matcher.matchesAttribute(null, "name")); assertFalse(matcher.matchesAttribute(NS, "name")); assertFalse(matcher.matchesAttribute(NS, "eman")); }
@Override public ExecuteContext before(ExecuteContext context) { Object object = context.getObject(); String serviceId = getServiceId(object).orElse(null); if (StringUtils.isBlank(serviceId)) { return context; } Object obj = context.getMemberFieldValue("serviceInstances"); if (obj instanceof Flux<?>) { List<Object> instances = getInstances((Flux<Object>) obj, object); if (CollectionUtils.isEmpty(instances)) { return context; } RequestData requestData = ThreadLocalUtils.getRequestData(); List<Object> targetInstances = loadBalancerService.getTargetInstances(serviceId, instances, requestData); context.skip(Flux.just(targetInstances)); } return context; }
@Test public void testBeforeWithInvalidObject() { ThreadLocalUtils.setRequestData(new RequestData(Collections.emptyMap(), "", "")); ExecuteContext context = ExecuteContext.forMemberMethod(new Object(), null, null, null, null); interceptor.before(context); Assert.assertNotNull(context.getObject()); }
public boolean isKeyColumn(final ColumnName columnName) { return findColumnMatching(withNamespace(Namespace.KEY).and(withName(columnName))) .isPresent(); }
@Test public void shouldMatchMetaColumnName() { assertThat(SystemColumns.isPseudoColumn(ROWTIME_NAME), is(true)); assertThat(SOME_SCHEMA.isKeyColumn(ROWTIME_NAME), is(false)); }
@Override public void render(Node node, Appendable output) { RendererContext context = new RendererContext(new MarkdownWriter(output)); context.render(node); }
@Test public void testEmphasis() { assertRoundTrip("*foo*\n"); assertRoundTrip("foo*bar*\n"); // When nesting, a different delimiter needs to be used assertRoundTrip("*_foo_*\n"); assertRoundTrip("*_*foo*_*\n"); assertRoundTrip("_*foo*_\n"); // Not emphasis (needs * inside words) assertRoundTrip("foo\\_bar\\_\n"); // Even when rendering a manually constructed tree, the emphasis delimiter needs to be chosen correctly. Document doc = new Document(); Paragraph p = new Paragraph(); doc.appendChild(p); Emphasis e1 = new Emphasis(); p.appendChild(e1); Emphasis e2 = new Emphasis(); e1.appendChild(e2); e2.appendChild(new Text("hi")); assertEquals("*_hi_*\n", render(doc)); }
static Object[] adjustByCoercion(Class<?>[] parameterTypes, Object[] actualParams) { logger.trace("adjustByCoercion {} {}", parameterTypes, actualParams); Object[] toReturn = actualParams; int counter = Math.min(parameterTypes.length, actualParams.length); for (int i = 0; i < counter; i++) { Class<?> expectedParameterType = parameterTypes[i]; Optional<Object[]> coercedParams; Object actualParam = actualParams[i]; if (actualParam != null) { Class<?> currentIdxActualParameterType = actualParam.getClass(); if (expectedParameterType.isAssignableFrom(currentIdxActualParameterType)) { // not null object assignable to expected type: no need to coerce coercedParams = Optional.of(toReturn); } else { // attempt to coerce coercedParams = coerceParams(currentIdxActualParameterType, expectedParameterType, toReturn, i); } } else { // null object - no need to coerce coercedParams = Optional.of(toReturn); } if (coercedParams.isPresent()) { toReturn = coercedParams.get(); continue; } return null; } return toReturn; }
@Test void adjustByCoercion() { // no coercion needed Object actualParam = List.of(true, false); Class<?>[] parameterTypes = new Class[]{List.class}; Object[] actualParams = {actualParam}; Object[] retrieved = BaseFEELFunctionHelper.adjustByCoercion(parameterTypes, actualParams); assertEquals(actualParams, retrieved); actualParam = "StringA"; parameterTypes = new Class[]{String.class}; actualParams = new Object[]{actualParam}; retrieved = BaseFEELFunctionHelper.adjustByCoercion(parameterTypes, actualParams); assertEquals(actualParams, retrieved); // coercing more objects to different types: fails parameterTypes = new Class[]{String.class, Integer.class}; actualParams = new Object[]{"String", 34 }; retrieved = BaseFEELFunctionHelper.adjustByCoercion(parameterTypes, actualParams); assertEquals(actualParams, retrieved); // not coercing null value to not-list type actualParam = null; actualParams = new Object[]{actualParam}; retrieved = BaseFEELFunctionHelper.adjustByCoercion(parameterTypes, actualParams); assertEquals(actualParams, retrieved); // not coercing null value to singleton list parameterTypes = new Class[]{List.class}; retrieved = BaseFEELFunctionHelper.adjustByCoercion(parameterTypes, actualParams); assertEquals(actualParams, retrieved); // coercing not-null value to singleton list actualParam = "StringA"; actualParams = new Object[]{actualParam}; retrieved = BaseFEELFunctionHelper.adjustByCoercion(parameterTypes, actualParams); assertNotNull(retrieved); assertNotEquals(actualParams, retrieved); assertEquals(1, retrieved.length); assertNotNull(retrieved[0]); assertThat(retrieved[0]).isInstanceOf(List.class); List retrievedList = (List) retrieved[0]; assertEquals(1, retrievedList.size()); assertEquals(actualParam, retrievedList.get(0)); // coercing null value to array: fails parameterTypes = new Class[]{Object.class.arrayType()}; retrieved = BaseFEELFunctionHelper.adjustByCoercion(parameterTypes, actualParams); assertNull(retrieved); // coercing one object to different type: fails actualParam = 45; parameterTypes = new Class[]{String.class}; actualParams = new Object[]{actualParam}; retrieved = BaseFEELFunctionHelper.adjustByCoercion(parameterTypes, actualParams); assertNull(retrieved); // coercing more objects to different types: fails parameterTypes = new Class[]{String.class, Integer.class}; actualParams = new Object[]{"String", "34" }; retrieved = BaseFEELFunctionHelper.adjustByCoercion(parameterTypes, actualParams); assertNull(retrieved); }
@Override public String symmetricEncryptType() { return "AES"; }
@Test public void symmetricEncryptType() { SARSAEncrypt rsaEncrypt = new SARSAEncrypt(); Assert.assertEquals("AES", rsaEncrypt.symmetricEncryptType()); }
public String format(AlarmEntity alarmEntity) { StringBuilder message = new StringBuilder(); for (int i = 0; i < formatSegments.size(); i++) { message.append(formatSegments.get(i)); if (i != formatSegments.size() - 1) { switch (valueFroms.get(i)) { case ID: message.append(alarmEntity.getId0()); break; case NAME: message.append(alarmEntity.getName()); } } } return message.toString(); }
@Test public void testStringFormatWithNoArg() { AlarmMessageFormatter formatter = new AlarmMessageFormatter("abc words {sdf"); String message = formatter.format(new AlarmEntity("SERVICE", -1, null, "", "")); Assertions.assertEquals("abc words {sdf", message); }
@Override public R apply(R record) { final Matcher matcher = regex.matcher(record.topic()); if (matcher.matches()) { final String topic = matcher.replaceFirst(replacement); log.trace("Rerouting from topic '{}' to new topic '{}'", record.topic(), topic); return record.newRecord(topic, record.kafkaPartition(), record.keySchema(), record.key(), record.valueSchema(), record.value(), record.timestamp()); } else { log.trace("Not rerouting topic '{}' as it does not match the configured regex", record.topic()); } return record; }
@Test public void doesntMatch() { assertEquals("orig", apply("foo", "bar", "orig")); }
@Override public long getPeriod() { return config.getLong(PERIOD_IN_MILISECONDS_PROPERTY).orElse(30_000L); }
@Test public void getPeriod_returnNumberFromConfig() { config.put("sonar.server.monitoring.ce.period", "100000"); long delay = underTest.getPeriod(); assertThat(delay).isEqualTo(100_000L); }
public static Protocol parse(File file) throws IOException { try (JsonParser jsonParser = Schema.FACTORY.createParser(file)) { return parse(jsonParser); } }
@Test public void parse() throws IOException { File fic = new File("target/test-classes/share/test/schemas/namespace.avpr"); Protocol protocol = Protocol.parse(fic); assertNotNull(protocol); assertEquals("TestNamespace", protocol.getName()); }
@Override public Flux<ReactiveRedisConnection.BooleanResponse<RenameCommand>> renameNX(Publisher<RenameCommand> commands) { return execute(commands, command -> { Assert.notNull(command.getKey(), "Key must not be null!"); Assert.notNull(command.getNewName(), "New name must not be null!"); byte[] keyBuf = toByteArray(command.getKey()); byte[] newKeyBuf = toByteArray(command.getNewName()); if (executorService.getConnectionManager().calcSlot(keyBuf) == executorService.getConnectionManager().calcSlot(newKeyBuf)) { return super.renameNX(commands); } return exists(command.getNewName()) .zipWith(read(keyBuf, ByteArrayCodec.INSTANCE, RedisCommands.DUMP, keyBuf)) .filter(newKeyExistsAndDump -> !newKeyExistsAndDump.getT1() && Objects.nonNull(newKeyExistsAndDump.getT2())) .map(Tuple2::getT2) .zipWhen(value -> pTtl(command.getKey()) .filter(Objects::nonNull) .map(ttl -> Math.max(0, ttl)) .switchIfEmpty(Mono.just(0L)) ) .flatMap(valueAndTtl -> write(newKeyBuf, StringCodec.INSTANCE, RedisCommands.RESTORE, newKeyBuf, valueAndTtl.getT2(), valueAndTtl.getT1()) .then(Mono.just(true))) .switchIfEmpty(Mono.just(false)) .doOnSuccess(didRename -> { if (didRename) { del(command.getKey()); } }) .map(didRename -> new BooleanResponse<>(command, didRename)); }); }
@Test public void testRenameNX() { connection.stringCommands().set(originalKey, value).block(); if (hasTtl) { connection.keyCommands().expire(originalKey, Duration.ofSeconds(1000)).block(); } Integer originalSlot = getSlotForKey(originalKey); newKey = getNewKeyForSlot(new String(originalKey.array()), getTargetSlot(originalSlot)); Boolean result = connection.keyCommands().renameNX(originalKey, newKey).block(); assertThat(result).isTrue(); assertThat(connection.stringCommands().get(newKey).block()).isEqualTo(value); if (hasTtl) { assertThat(connection.keyCommands().ttl(newKey).block()).isGreaterThan(0); } else { assertThat(connection.keyCommands().ttl(newKey).block()).isEqualTo(-1); } connection.stringCommands().set(originalKey, value).block(); result = connection.keyCommands().renameNX(originalKey, newKey).block(); assertThat(result).isFalse(); }
public static void checkURIIfPresent(@Nullable String uri, @Nonnull Predicate<URI> predicate) throws IllegalArgumentException { checkURIIfPresent(uri, predicate, null); }
@Test public void testCheckURIIfPresent() { checkURIIfPresent(null, uri -> false); checkURIIfPresent("", uri -> false); checkURIIfPresent("http://pulsar.apache.org", uri -> true); try { checkURIIfPresent("http/pulsar.apache.org", uri -> uri.getScheme() != null, "Error"); Assert.fail("Unexpected behaviour"); } catch (IllegalArgumentException ex) { // Ok } }
public static List<Type> decode(String rawInput, List<TypeReference<Type>> outputParameters) { return decoder.decodeFunctionResult(rawInput, outputParameters); }
@Test public void testDecodeMultipleStaticStructNested() { String rawInput = "0x0000000000000000000000000000000000000000000000000000000000000001" + "000000000000000000000000000000000000000000000000000000000000000a" + "0000000000000000000000000000000000000000000000000000000000000001" + "0000000000000000000000000000000000000000000000000000000000000001" + "000000000000000000000000000000000000000000000000000000000000000a" + "0000000000000000000000000000000000000000000000000000000000000001"; assertEquals( FunctionReturnDecoder.decode( rawInput, AbiV2TestFixture.getFuzzFuzzFunction.getOutputParameters()), Arrays.asList( new AbiV2TestFixture.Fuzz( new AbiV2TestFixture.Bar(BigInteger.ONE, BigInteger.TEN), BigInteger.ONE), new AbiV2TestFixture.Fuzz( new AbiV2TestFixture.Bar(BigInteger.ONE, BigInteger.TEN), BigInteger.ONE))); }
@Udf public String chr(@UdfParameter( description = "Decimal codepoint") final Integer decimalCode) { if (decimalCode == null) { return null; } if (!Character.isValidCodePoint(decimalCode)) { return null; } final char[] resultChars = Character.toChars(decimalCode); return String.valueOf(resultChars); }
@Test public void shouldReturnTwoCharsForNonBMPString() { final String result = udf.chr("\\ud800\\udc01"); assertThat(result.codePointAt(0), is(65537)); assertThat(result.toCharArray().length, is(2)); }
private AlarmEntityId(final URI uri) { super(uri); }
@Test public void validSchemaPermitted() { alarmEntityId("none:foo"); alarmEntityId("port:foo"); alarmEntityId("och:foo"); alarmEntityId("other:foo"); }
@Override public int forEachByteDesc(ByteProcessor processor) { ensureAccessible(); try { return forEachByteDesc0(writerIndex - 1, readerIndex, processor); } catch (Exception e) { PlatformDependent.throwException(e); return -1; } }
@Test public void testForEachByteDesc() { buffer.clear(); for (int i = 0; i < CAPACITY; i ++) { buffer.writeByte(i + 1); } final AtomicInteger lastIndex = new AtomicInteger(); assertThat(buffer.forEachByteDesc(CAPACITY / 4, CAPACITY * 2 / 4, new ByteProcessor() { int i = CAPACITY * 3 / 4 - 1; @Override public boolean process(byte value) throws Exception { assertThat(value, is((byte) (i + 1))); lastIndex.set(i); i --; return true; } }), is(-1)); assertThat(lastIndex.get(), is(CAPACITY / 4)); }
public Sketch<?> merge(Sketch<?> left, Sketch<?> right) { if (left instanceof NormalSketch && right instanceof NormalSketch) { return mergeNormalWithNormal(asNormal(left), asNormal(right)); } else if (left instanceof NormalSketch && right instanceof SparseSketch) { return mergeNormalWithSparse(asNormal(left), asSparse(right)); } else if (left instanceof SparseSketch && right instanceof NormalSketch) { return mergeNormalWithSparse(asNormal(right), asSparse(left)); } else if (left instanceof SparseSketch && right instanceof SparseSketch) { return mergeSparseWithSparse(asSparse(left), asSparse(right)); } else { throw new IllegalArgumentException( String.format("Invalid sketch types: left=%s, right=%s", right.getClass(), left.getClass())); } }
@Test public void requireThatMergingTwoSmallSparseSketchesReturnsSparseSketch() { SparseSketch s1 = SketchUtils.createSparseSketch(1); SparseSketch s2 = SketchUtils.createSparseSketch(2); Sketch<?> result = merger.merge(s1, s2); assertEquals(result.getClass(), SparseSketch.class); assertTrue("Should return the instance given by first argument.", result == s1); SketchUtils.assertSketchContains(result, 1, 2); }
@Override public int hashCode() { return Objects.hash(contextPath, ip, port, startupTime, healthy, enabled); }
@Test public void testHashCode() { assertEquals(236671917, upstreamInstance.hashCode()); }
public static void verifyChunkedSumsByteArray(int bytesPerSum, int checksumType, byte[] sums, int sumsOffset, byte[] data, int dataOffset, int dataLength, String fileName, long basePos) throws ChecksumException { nativeComputeChunkedSumsByteArray(bytesPerSum, checksumType, sums, sumsOffset, data, dataOffset, dataLength, fileName, basePos, true); }
@Test public void testVerifyChunkedSumsByteArrayFail() { allocateArrayByteBuffers(); fillDataAndInvalidChecksums(); assertThrows(ChecksumException.class, () -> NativeCrc32.verifyChunkedSumsByteArray(bytesPerChecksum, checksumType.id, checksums.array(), checksums.position(), data.array(), data.position(), data.remaining(), fileName, BASE_POSITION)); }
public void refreshActiveTime(String connectionId) { Connection connection = connections.get(connectionId); if (connection != null) { connection.freshActiveTime(); } }
@Test void testRefreshActiveTime() { try { connectionManager.refreshActiveTime(connectId); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } }
protected TransformerInput buildTransformerInput(List<Long> tokens, int maxTokens, boolean isQuery) { if (!isQuery) { tokens = tokens.stream().filter(token -> !skipTokens.contains(token)).toList(); } List<Long> inputIds = new ArrayList<>(maxTokens); List<Long> attentionMask = new ArrayList<>(maxTokens); if (tokens.size() > maxTokens - 3) tokens = tokens.subList(0, maxTokens - 3); inputIds.add(startSequenceToken); inputIds.add(isQuery? querySequenceToken: documentSequenceToken); inputIds.addAll(tokens); inputIds.add(endSequenceToken); int inputLength = inputIds.size(); long padTokenId = isQuery? maskSequenceToken: padSequenceToken; int padding = isQuery? maxTokens - inputLength: 0; for (int i = 0; i < padding; i++) inputIds.add(padTokenId); for (int i = 0; i < inputLength; i++) attentionMask.add((long) 1); for (int i = 0; i < padding; i++) attentionMask.add((long) 0); // Do not attend to mask paddings return new TransformerInput(inputIds, attentionMask); }
@Test public void testInputTensorsWordPiece() { // wordPiece tokenizer("this is a query !") -> [2023, 2003, 1037, 23032, 999] List<Long> tokens = List.of(2023L, 2003L, 1037L, 23032L, 999L); ColBertEmbedder.TransformerInput input = embedder.buildTransformerInput(tokens,10,true); assertEquals(10,input.inputIds().size()); assertEquals(10,input.attentionMask().size()); assertEquals(List.of(101L, 1L, 2023L, 2003L, 1037L, 23032L, 999L, 102L, 103L, 103L),input.inputIds()); assertEquals(List.of(1L, 1L, 1L, 1L, 1L, 1L, 1L, 1L, 0L, 0L),input.attentionMask()); input = embedder.buildTransformerInput(tokens,10,false); assertEquals(7,input.inputIds().size()); assertEquals(7,input.attentionMask().size()); assertEquals(List.of(101L, 2L, 2023L, 2003L, 1037L, 23032L, 102L),input.inputIds()); assertEquals(List.of(1L, 1L, 1L, 1L, 1L, 1L, 1L),input.attentionMask()); }
void afterWrite(Runnable task) { for (int i = 0; i < WRITE_BUFFER_RETRIES; i++) { if (writeBuffer.offer(task)) { scheduleAfterWrite(); return; } scheduleDrainBuffers(); Thread.onSpinWait(); } // In scenarios where the writing threads cannot make progress then they attempt to provide // assistance by performing the eviction work directly. This can resolve cases where the // maintenance task is scheduled but not running. That might occur due to all of the executor's // threads being busy (perhaps writing into this cache), the write rate greatly exceeds the // consuming rate, priority inversion, or if the executor silently discarded the maintenance // task. Unfortunately this cannot resolve when the eviction is blocked waiting on a long- // running computation due to an eviction listener, the victim is being computed on by a writer, // or the victim residing in the same hash bin as a computing entry. In those cases a warning is // logged to encourage the application to decouple these computations from the map operations. lock(); try { maintenance(task); } catch (RuntimeException e) { logger.log(Level.ERROR, "Exception thrown when performing the maintenance task", e); } finally { evictionLock.unlock(); } rescheduleCleanUpIfIncomplete(); }
@Test @CheckMaxLogLevel(ERROR) public void afterWrite_exception() { var expected = new RuntimeException(); var cache = new BoundedLocalCache<Object, Object>( Caffeine.newBuilder(), /* loader */ null, /* async */ false) { @Override void maintenance(Runnable task) { throw expected; } }; Runnable pendingTask = () -> {}; for (int i = 0; i < WRITE_BUFFER_MAX; i++) { cache.afterWrite(pendingTask); } assertThat(cache.drainStatus).isEqualTo(PROCESSING_TO_REQUIRED); cache.afterWrite(pendingTask); assertThat(logEvents() .withMessage("Exception thrown when performing the maintenance task") .withThrowable(expected) .withLevel(ERROR) .exclusively()) .hasSize(1); }
public GenericRow createRow(final KeyValue<List<?>, GenericRow> row) { if (row.value() != null) { throw new IllegalArgumentException("Not a tombstone: " + row); } final List<?> key = row.key(); if (key.size() < keyIndexes.size()) { throw new IllegalArgumentException("Not enough key columns. " + "expected at least" + keyIndexes.size() + ", got: " + key); } final GenericRow values = new GenericRow(numColumns); for (int columnIdx = 0; columnIdx < numColumns; columnIdx++) { final Integer keyIdx = keyIndexes.get(columnIdx); if (keyIdx == null) { values.append(null); } else { values.append(key.get(keyIdx)); } } return values; }
@Test public void shouldThrowIfDataRowNotATombstone() { // Given: final KeyValue<List<?>, GenericRow> kv = KeyValue.keyValue( ImmutableList.of(4, 2), genericRow(1, 2, 3, 4, 5) ); // When/Then: assertThrows( IllegalArgumentException.class, () -> factory.createRow(kv) ); }
@Override public void refreshTable(String srDbName, Table table, List<String> partitionNames, boolean onlyCachedPartitions) { OdpsTableName odpsTableName = OdpsTableName.of(srDbName, table.getName()); tableCache.invalidate(odpsTableName); get(tableCache, odpsTableName); if (!table.isUnPartitioned()) { partitionCache.invalidate(odpsTableName); get(partitionCache, odpsTableName); } }
@Test public void testRefreshTable() { Table odpsTable = odpsMetadata.getTable("project", "tableName"); // mock schema change when(table.getSchema()).thenReturn(new TableSchema()); Table cacheTable = odpsMetadata.getTable("project", "tableName"); Assert.assertTrue(cacheTable.getColumns().size() > 0); odpsMetadata.refreshTable("project", odpsTable, null, false); Table refreshTable = odpsMetadata.getTable("project", "tableName"); Assert.assertTrue(refreshTable.getColumns().size() == 0); }
public static String sanitize(final String s) { return s.replace(':', '-') .replace('_', '-') .replace('.', '-') .replace('/', '-') .replace('\\', '-'); }
@Test public void testNotFileFriendlySimpleSanitized() { String out = StringHelper.sanitize("c:\\helloworld"); assertEquals(-1, out.indexOf(':'), "Should not contain : "); assertEquals(-1, out.indexOf('.'), "Should not contain . "); }
public RelDataType createRelDataTypeFromSchema(Schema schema) { Builder builder = new Builder(this); boolean enableNullHandling = schema.isEnableColumnBasedNullHandling(); for (Map.Entry<String, FieldSpec> entry : schema.getFieldSpecMap().entrySet()) { builder.add(entry.getKey(), toRelDataType(entry.getValue(), enableNullHandling)); } return builder.build(); }
@Test(dataProvider = "relDataTypeConversion") public void testNotNullableArrayTypes(FieldSpec.DataType dataType, RelDataType arrayType, boolean columnNullMode) { TypeFactory typeFactory = new TypeFactory(); Schema testSchema = new Schema.SchemaBuilder() .addDimensionField("col", dataType, field -> { field.setNullable(false); field.setSingleValueField(false); }) .setEnableColumnBasedNullHandling(columnNullMode) .build(); RelDataType relDataTypeFromSchema = typeFactory.createRelDataTypeFromSchema(testSchema); List<RelDataTypeField> fieldList = relDataTypeFromSchema.getFieldList(); RelDataTypeField field = fieldList.get(0); RelDataType expectedType = typeFactory.createArrayType(arrayType, -1); Assert.assertEquals(field.getType(), expectedType); }
@Override public void handleRequest(RestRequest request, RequestContext requestContext, Callback<RestResponse> callback) { //This code path cannot accept content types or accept types that contain //multipart/related. This is because these types of requests will usually have very large payloads and therefore //would degrade server performance since RestRequest reads everything into memory. if (!isMultipart(request, requestContext, callback)) { _restRestLiServer.handleRequest(request, requestContext, callback); } }
@Test public void testRestRequestAttemptVerifyParseFailed() throws Exception { //This test verifies that a RestRequest sent to the RestLiServer throws an exception if the content type or accept types //fail to parse properly. This occurs when we try to verify that the request's content type or accept types do //not include multipart/related. RestRequest invalidContentTypeRequest = new RestRequestBuilder(new URI("/statuses/abcd")) .setHeader(RestConstants.HEADER_CONTENT_TYPE, "©").build(); Callback<RestResponse> callback = new Callback<RestResponse>() { @Override public void onSuccess(RestResponse restResponse) { fail(); } @Override public void onError(Throwable e) { assertTrue(e instanceof RestException); RestException restException = (RestException)e; RestResponse restResponse = restException.getResponse(); assertEquals(restResponse.getStatus(), 400); assertTrue(restResponse.getEntity().length() > 0); assertEquals(restResponse.getEntity().asString(Charset.defaultCharset()), "Unable to parse content or accept types."); } }; _server.handleRequest(invalidContentTypeRequest, new RequestContext(), callback); }
public static int wrappedReadForCompressedData(InputStream is, byte[] buf, int off, int len) throws IOException { try { return is.read(buf, off, len); } catch (IOException ie) { throw ie; } catch (Throwable t) { throw new IOException("Error while reading compressed data", t); } }
@Test public void testWrappedReadForCompressedData() throws IOException { byte[] buf = new byte[2]; InputStream mockStream = Mockito.mock(InputStream.class); Mockito.when(mockStream.read(buf, 0, 1)).thenReturn(1); Mockito.when(mockStream.read(buf, 0, 2)).thenThrow( new java.lang.InternalError()); try { assertEquals("Check expected value", 1, IOUtils.wrappedReadForCompressedData(mockStream, buf, 0, 1)); } catch (IOException ioe) { fail("Unexpected error while reading"); } try { IOUtils.wrappedReadForCompressedData(mockStream, buf, 0, 2); } catch (IOException ioe) { GenericTestUtils.assertExceptionContains( "Error while reading compressed data", ioe); } }
public static File saveKarateJson(String targetDir, FeatureResult result, String fileName) { if (fileName == null) { fileName = result.getFeature().getKarateJsonFileName(); } File file = new File(targetDir + File.separator + fileName); FileUtils.writeToFile(file, JsonUtils.toJson(result.toKarateJson())); return file; }
@Test void testReport() { final ByteArrayOutputStream outContent = new ByteArrayOutputStream(); final PrintStream originalOut = System.out; Feature feature = Feature.read("classpath:com/intuit/karate/report/test.feature"); FeatureRuntime fr = FeatureRuntime.of(feature); fr.run(); Report report = SuiteReports.DEFAULT.featureReport(fr.suite, fr.result); File file = report.render("target/report-test"); String html = FileUtils.toString(file); assertTrue(html.contains("<title>com.intuit.karate.report.test</title>")); assertTrue(html.contains("<img src=\"karate-labs-logo-ring.svg\" alt=\"Karate Labs\"/>")); assertTrue(html.contains("<div>Scenarios</div>")); assertTrue(html.contains("<a href=\"karate-summary.html\">Summary</a><span class=\"feature-label\">|</span>")); System.setOut(new PrintStream(outContent)); // Capture console output fr.suite.buildResults(); assertFalse(outContent.toString().contains(" | env: ")); System.setOut(originalOut); // restore console output // render summary report Runner.Builder builder = new Runner.Builder(); builder.reportDir("target/report-test"); Suite suite = new Suite(builder); File jsonFile = ReportUtils.saveKarateJson("target/report-test", fr.result, null); suite.featureResultFiles.add(jsonFile); Results results = Results.of(suite); // this will render summary via constructor TODO improve }
public static Timestamp toTimestamp(String timestampString) { try { return Timestamp.valueOf(timestampString); } catch (Exception e) { // Try the next format } try { return new Timestamp(Long.parseLong(timestampString)); } catch (Exception e) { } try { return Timestamp.from(ZonedDateTime.parse(timestampString, UNIVERSAL_DATE_TIME_FORMATTER).toInstant()); } catch (Exception e) { // Try the next format } try { LocalDateTime dateTime = LocalDateTime.parse(timestampString, UNIVERSAL_DATE_TIME_FORMATTER); return Timestamp.valueOf(dateTime); } catch (Exception e) { throw new IllegalArgumentException(String.format("Invalid timestamp: '%s'", timestampString)); } }
@Test public void testValidTimestampFormats() { // Test ISO8601 variations with and without milliseconds and timezones assertEquals( TimestampUtils.toTimestamp("2024-07-12T15:32:36Z"), Timestamp.from(LocalDateTime.of(2024, 7, 12, 15, 32, 36).atZone(ZoneOffset.UTC).toInstant())); assertEquals( TimestampUtils.toTimestamp("2024-07-12 15:32:36.111Z"), Timestamp.from(LocalDateTime.of(2024, 7, 12, 15, 32, 36, 111000000).atZone(ZoneOffset.UTC).toInstant())); for (int i = 1; i < 7; i++) { int fraction = Integer.parseInt("1".repeat(i) + "0".repeat(9 - i)); assertEquals( TimestampUtils.toTimestamp("2024-07-12T15:32:36." + fraction), Timestamp.valueOf("2024-07-12 15:32:36." + fraction)); assertEquals( TimestampUtils.toTimestamp("2024-07-12T15:32:36." + fraction + "Z"), Timestamp.from(LocalDateTime.of(2024, 7, 12, 15, 32, 36, fraction).atZone(ZoneOffset.UTC).toInstant())); } // Test date and time variations without 'T' assertEquals(TimestampUtils.toTimestamp("2024-07-12 15:32:36.111"), Timestamp.valueOf("2024-07-12 15:32:36.111")); assertEquals(TimestampUtils.toTimestamp("2024-07-12 15:32:36"), Timestamp.valueOf("2024-07-12 15:32:36")); assertEquals(TimestampUtils.toTimestamp("2024-07-12 15:32"), Timestamp.valueOf("2024-07-12 15:32:00")); assertEquals(TimestampUtils.toTimestamp("2024-07-12"), Timestamp.valueOf("2024-07-12 00:00:00")); assertEquals(TimestampUtils.toTimestamp("1720798356111"), new Timestamp(1720798356111L)); }
public Collection<StreamsMetadata> getAllMetadata() { return Collections.unmodifiableList(allMetadata); }
@Test public void shouldNotReturnMutableReferenceToInternalAllMetadataCollection() { final Collection<StreamsMetadata> allMetadata = metadataState.getAllMetadata(); assertFalse(allMetadata.isEmpty(), "invalid test"); try { // Either this should not affect internal state of 'metadataState' allMetadata.clear(); } catch (final UnsupportedOperationException e) { // Or should fail. } assertFalse(metadataState.getAllMetadata().isEmpty(), "encapsulation broken"); }
private static Map<String, Set<Dependency>> checkOptionalFlags( Map<String, Set<Dependency>> bundledDependenciesByModule, Map<String, DependencyTree> dependenciesByModule) { final Map<String, Set<Dependency>> allViolations = new HashMap<>(); for (String module : bundledDependenciesByModule.keySet()) { LOG.debug("Checking module '{}'.", module); if (!dependenciesByModule.containsKey(module)) { throw new IllegalStateException( String.format( "Module %s listed by shade-plugin, but not dependency-plugin.", module)); } final Collection<Dependency> bundledDependencies = bundledDependenciesByModule.get(module); final DependencyTree dependencyTree = dependenciesByModule.get(module); final Set<Dependency> violations = checkOptionalFlags(module, bundledDependencies, dependencyTree); if (violations.isEmpty()) { LOG.info("OK: {}", module); } else { allViolations.put(module, violations); } } return allViolations; }
@Test void testNonBundledDependencyIsIgnoredEvenIfOthersAreBundled() { final Dependency dependencyA = createMandatoryDependency("a"); final Dependency dependencyB = createMandatoryDependency("B"); final Set<Dependency> bundled = Collections.singleton(dependencyB); final DependencyTree dependencyTree = new DependencyTree() .addDirectDependency(dependencyA) .addDirectDependency(dependencyB); final Set<Dependency> violations = ShadeOptionalChecker.checkOptionalFlags(MODULE, bundled, dependencyTree); assertThat(violations).containsExactly(dependencyB); }
@Override public String buildContext() { final String metaData = ((Collection<?>) getSource()) .stream() .map(s -> ((MetaDataDO) s).getAppName()) .collect(Collectors.joining(",")); return String.format("the meta data [%s] is %s", metaData, StringUtils.lowerCase(getType().getType().toString())); }
@Test public void batchMetaDataDeletedContextTest() { BatchMetaDataDeletedEvent batchMetaDataChangedEvent = new BatchMetaDataDeletedEvent(Arrays.asList(one, two), "test-operator"); String context = String.format("the meta data [%s] is %s", "testAppNameOne,testAppNameTwo", StringUtils.lowerCase(EventTypeEnum.META_DATA_DELETE.getType().toString())); assertEquals(context, batchMetaDataChangedEvent.buildContext()); }
@Udf(description = "Returns the hyperbolic tangent of an INT value") public Double tanh( @UdfParameter( value = "value", description = "The value in radians to get the hyperbolic tangent of." ) final Integer value ) { return tanh(value == null ? null : value.doubleValue()); }
@Test public void shouldHandleZero() { assertThat(udf.tanh(0.0), closeTo(0.0, 0.000000000000001)); assertThat(udf.tanh(0), closeTo(0.0, 0.000000000000001)); assertThat(udf.tanh(0L), closeTo(0.0, 0.000000000000001)); }
public IssueQuery create(SearchRequest request) { try (DbSession dbSession = dbClient.openSession(false)) { final ZoneId timeZone = parseTimeZone(request.getTimeZone()).orElse(clock.getZone()); Collection<RuleDto> ruleDtos = ruleKeysToRuleId(dbSession, request.getRules()); Collection<String> ruleUuids = ruleDtos.stream().map(RuleDto::getUuid).collect(Collectors.toSet()); Collection<String> issueKeys = collectIssueKeys(dbSession, request); if (request.getRules() != null && request.getRules().stream().collect(Collectors.toSet()).size() != ruleDtos.size()) { ruleUuids.add("non-existing-uuid"); } IssueQuery.Builder builder = IssueQuery.builder() .issueKeys(issueKeys) .severities(request.getSeverities()) .cleanCodeAttributesCategories(request.getCleanCodeAttributesCategories()) .impactSoftwareQualities(request.getImpactSoftwareQualities()) .impactSeverities(request.getImpactSeverities()) .statuses(request.getStatuses()) .resolutions(request.getResolutions()) .issueStatuses(request.getIssueStatuses()) .resolved(request.getResolved()) .prioritizedRule(request.getPrioritizedRule()) .rules(ruleDtos) .ruleUuids(ruleUuids) .assigneeUuids(request.getAssigneeUuids()) .authors(request.getAuthors()) .scopes(request.getScopes()) .languages(request.getLanguages()) .tags(request.getTags()) .types(request.getTypes()) .pciDss32(request.getPciDss32()) .pciDss40(request.getPciDss40()) .owaspAsvs40(request.getOwaspAsvs40()) .owaspAsvsLevel(request.getOwaspAsvsLevel()) .owaspTop10(request.getOwaspTop10()) .owaspTop10For2021(request.getOwaspTop10For2021()) .stigAsdR5V3(request.getStigAsdV5R3()) .casa(request.getCasa()) .sansTop25(request.getSansTop25()) .cwe(request.getCwe()) .sonarsourceSecurity(request.getSonarsourceSecurity()) .assigned(request.getAssigned()) .createdAt(parseStartingDateOrDateTime(request.getCreatedAt(), timeZone)) .createdBefore(parseEndingDateOrDateTime(request.getCreatedBefore(), timeZone)) .facetMode(request.getFacetMode()) .timeZone(timeZone) .codeVariants(request.getCodeVariants()); List<ComponentDto> allComponents = new ArrayList<>(); boolean effectiveOnComponentOnly = mergeDeprecatedComponentParameters(dbSession, request, allComponents); addComponentParameters(builder, dbSession, effectiveOnComponentOnly, allComponents, request); setCreatedAfterFromRequest(dbSession, builder, request, allComponents, timeZone); String sort = request.getSort(); if (!isNullOrEmpty(sort)) { builder.sort(sort); builder.asc(request.getAsc()); } return builder.build(); } }
@Test public void param_componentUuids_enables_search_by_file() { ComponentDto project = db.components().insertPrivateProject().getMainBranchComponent(); ComponentDto file = db.components().insertComponent(newFileDto(project)); SearchRequest request = new SearchRequest() .setComponentUuids(asList(file.uuid())); IssueQuery query = underTest.create(request); assertThat(query.componentUuids()).containsExactly(file.uuid()); }
public static HttpAsyncClientBuilder custom(MetricRegistry metricRegistry) { return custom(metricRegistry, METHOD_ONLY); }
@Test public void registersExpectedMetricsGivenNameStrategy() throws Exception { client = InstrumentedHttpAsyncClients.custom(metricRegistry, metricNameStrategy).disableAutomaticRetries().build(); client.start(); final SimpleHttpRequest request = SimpleRequestBuilder .get("http://localhost:" + httpServer.getAddress().getPort() + "/") .build(); final String metricName = "some.made.up.metric.name"; httpServer.createContext("/", exchange -> { exchange.sendResponseHeaders(200, 0L); exchange.setStreams(null, null); exchange.getResponseBody().write("TEST".getBytes(StandardCharsets.US_ASCII)); exchange.close(); }); httpServer.start(); when(metricNameStrategy.getNameFor(any(), any(HttpRequest.class))).thenReturn(metricName); final Future<SimpleHttpResponse> responseFuture = client.execute(request, new FutureCallback<SimpleHttpResponse>() { @Override public void completed(SimpleHttpResponse result) { assertThat(result.getBodyText()).isEqualTo("TEST"); } @Override public void failed(Exception ex) { fail(); } @Override public void cancelled() { fail(); } }); responseFuture.get(1L, TimeUnit.SECONDS); verify(registryListener).onTimerAdded(eq(metricName), any(Timer.class)); }
@Override public <T> List<T> toList(DataTable dataTable, Type itemType) { requireNonNull(dataTable, "dataTable may not be null"); requireNonNull(itemType, "itemType may not be null"); if (dataTable.isEmpty()) { return emptyList(); } ListOrProblems<T> result = toListOrProblems(dataTable, itemType); if (result.hasList()) { return unmodifiableList(result.getList()); } throw listNoConverterDefined( itemType, result.getProblems()); }
@Test void to_list_of_unknown_type__throws_exception() { DataTable table = parse("", " | firstName | lastName | birthDate |", " | Annie M. G. | Schmidt | 1911-03-20 |", " | Roald | Dahl | 1916-09-13 |", " | Astrid | Lindgren | 1907-11-14 |"); CucumberDataTableException exception = assertThrows( CucumberDataTableException.class, () -> converter.toList(table, Author.class)); assertThat(exception.getMessage(), is("" + "Can't convert DataTable to List<io.cucumber.datatable.DataTableTypeRegistryTableConverterTest$Author>.\n" + "Please review these problems:\n" + "\n" + " - There was no table entry or table row transformer registered for io.cucumber.datatable.DataTableTypeRegistryTableConverterTest$Author.\n" + " Please consider registering a table entry or row transformer.\n" + "\n" + " - There was no default table entry transformer registered to transform io.cucumber.datatable.DataTableTypeRegistryTableConverterTest$Author.\n" + " Please consider registering a default table entry transformer.\n" + "\n" + "Note: Usually solving one is enough")); }
static ProjectMeasuresQuery newProjectMeasuresQuery(List<Criterion> criteria, @Nullable Set<String> projectUuids) { ProjectMeasuresQuery query = new ProjectMeasuresQuery(); Optional.ofNullable(projectUuids).ifPresent(query::setProjectUuids); criteria.forEach(criterion -> processCriterion(criterion, query)); return query; }
@Test public void fail_to_create_query_on_language_using_eq_operator_and_values() { assertThatThrownBy(() -> { newProjectMeasuresQuery(singletonList(Criterion.builder().setKey("languages").setOperator(EQ).setValues(asList("java")).build()), emptySet()); }) .isInstanceOf(IllegalArgumentException.class) .hasMessage("Languages should be set either by using 'languages = java' or 'languages IN (java, js)'"); }
@VisibleForTesting WxMpService getWxMpService(Integer userType) { // 第一步,查询 DB 的配置项,获得对应的 WxMpService 对象 SocialClientDO client = socialClientMapper.selectBySocialTypeAndUserType( SocialTypeEnum.WECHAT_MP.getType(), userType); if (client != null && Objects.equals(client.getStatus(), CommonStatusEnum.ENABLE.getStatus())) { return wxMpServiceCache.getUnchecked(client.getClientId() + ":" + client.getClientSecret()); } // 第二步,不存在 DB 配置项,则使用 application-*.yaml 对应的 WxMpService 对象 return wxMpService; }
@Test public void testGetWxMpService_clientNull() { // 准备参数 Integer userType = randomPojo(UserTypeEnum.class).getValue(); // mock 方法 // 调用 WxMpService result = socialClientService.getWxMpService(userType); // 断言 assertSame(wxMpService, result); }
public static FullyQualifiedKotlinType convert(FullyQualifiedJavaType javaType) { FullyQualifiedKotlinType kotlinType = convertBaseType(javaType); for (FullyQualifiedJavaType argument : javaType.getTypeArguments()) { kotlinType.addTypeArgument(convert(argument)); } return kotlinType; }
@Test void testUnmappedType() { FullyQualifiedJavaType jt = new FullyQualifiedJavaType("java.math.BigDecimal"); FullyQualifiedKotlinType kt = JavaToKotlinTypeConverter.convert(jt); assertThat(kt.getShortNameWithTypeArguments()).isEqualTo("BigDecimal"); assertThat(kt.getImportList()).hasSize(1); assertThat(kt.getImportList()).contains("java.math.BigDecimal"); }
public static String parse(String version) { if (startsWith(version, "1.")) { Matcher m = JAVA_VERSION_1_X.matcher(version); m.find(); return m.group(); } else { Matcher m = JAVA_VERSION_X.matcher(version); m.find(); return m.group(); } }
@Test public void testParse() { assertThat(JavaVersion.parse("1.8.0_362"), is("1.8")); assertThat(JavaVersion.parse("11.0.18"), is("11")); assertThat(JavaVersion.parse("9.0.4"), is("9")); assertThat(JavaVersion.parse("10.0.1"), is("10")); }
public static String extractCharset(String line, String defaultValue) { if (line == null) { return defaultValue; } final String[] parts = line.split(" "); String charsetInfo = ""; for (var part : parts) { if (part.startsWith("charset")) { charsetInfo = part; break; } } final String charset = charsetInfo.replace("charset=", "").replace(";", ""); if (charset.isBlank()) { return defaultValue; } return charset; }
@DisplayName("with more items than expected") @Test void testExtractCharsetWithMoreItems() { assertEquals("UTF-8", TelegramAsyncHandler.extractCharset("Content-Type: text/plain; charset=UTF-8; name=\"some-name\"", StandardCharsets.US_ASCII.name())); }
public static String findAddress(List<NodeAddress> addresses, NodeAddressType preferredAddressType) { if (addresses == null) { return null; } Map<String, String> addressMap = addresses.stream() .collect(Collectors.toMap(NodeAddress::getType, NodeAddress::getAddress, (address1, address2) -> { LOGGER.warnOp("Found multiple addresses with the same type. Only the first address '{}' will be used.", address1); return address1; })); // If user set preferred address type, we should check it first if (preferredAddressType != null && addressMap.containsKey(preferredAddressType.toValue())) { return addressMap.get(preferredAddressType.toValue()); } if (addressMap.containsKey("ExternalDNS")) { return addressMap.get("ExternalDNS"); } else if (addressMap.containsKey("ExternalIP")) { return addressMap.get("ExternalIP"); } else if (addressMap.containsKey("InternalDNS")) { return addressMap.get("InternalDNS"); } else if (addressMap.containsKey("InternalIP")) { return addressMap.get("InternalIP"); } else if (addressMap.containsKey("Hostname")) { return addressMap.get("Hostname"); } return null; }
@Test public void testFindAddressWithAddressType() { String address = NodeUtils.findAddress(ADDRESSES, NodeAddressType.INTERNAL_DNS); assertThat(address, is("my.internal.address")); }
public String map(AmountRequest request) { if (request instanceof OffsetBasedPageRequest && ((OffsetBasedPageRequest) request).getOffset() > 0L) { return sqlOffsetBasedPageRequestMapper.mapToSqlQuery((OffsetBasedPageRequest) request, jobTable); } else { return sqlAmountRequestMapper.mapToSqlQuery(request, jobTable); } }
@Test void sqlJobPageRequestMapperMapsAmountBasedPageRequest() { AmountRequest amountRequest = Paging.AmountBasedList.ascOnUpdatedAt(10); String filter = jobPageRequestMapper.map(amountRequest); assertThat(filter).isEqualTo(" ORDER BY updatedAt ASC LIMIT :limit"); }
@Override public Collection<ExecuteAwarePlugin> getExecuteAwarePluginList() { return mainLock.applyWithReadLock(executeAwarePluginList::getPlugins); }
@Test public void testGetExecuteAwarePluginList() { manager.register(new TestExecuteAwarePlugin()); Assert.assertEquals(1, manager.getExecuteAwarePluginList().size()); }
public List<Map<String, Object>> toListMap(final String json) { return GSON.fromJson(json, new TypeToken<List<Map<String, Object>>>() { }.getType()); }
@Test public void testToListMap() { Map<String, Object> map = ImmutableMap.of("id", "123", "name", "test", "data", "测试"); List<Map<String, Object>> list = ImmutableList.of(ImmutableMap.copyOf(map), ImmutableMap.copyOf(map), ImmutableMap.copyOf(map)); String json = "[{\"name\":\"test\",\"id\":\"123\",\"data\":\"测试\"}," + "{\"name\":\"test\",\"id\":\"123\",\"data\":\"测试\"}," + "{\"name\":\"test\",\"id\":\"123\",\"data\":\"测试\"}]"; assertEquals(list, GsonUtils.getInstance().toListMap(json)); }
@Override public FSDataOutputStream create(Path path, boolean overwrite, int bufferSize, short replication, long blockSize, Progressable progress) throws IOException { String confUmask = mAlluxioConf.getString(PropertyKey.SECURITY_AUTHORIZATION_PERMISSION_UMASK); Mode mode = ModeUtils.applyFileUMask(Mode.defaults(), confUmask); return this.create(path, new FsPermission(mode.toShort()), overwrite, bufferSize, replication, blockSize, progress); }
@Test public void initializeWithFullPrincipalUgi() throws Exception { mockUserGroupInformation("testuser@ALLUXIO.COM"); final org.apache.hadoop.conf.Configuration conf = getConf(); URI uri = URI.create(Constants.HEADER + "host:1"); org.apache.hadoop.fs.FileSystem.get(uri, conf); // FileSystem.create would have thrown an exception if the initialization failed. }
@Override public boolean ownDeletesAreVisible(final int type) { return false; }
@Test void assertOwnDeletesAreVisible() { assertFalse(metaData.ownDeletesAreVisible(0)); }
@Override public DefaultSchedulingPipelinedRegion getPipelinedRegionOfVertex( final ExecutionVertexID vertexId) { checkNotNull(pipelinedRegionsByVertex); final DefaultSchedulingPipelinedRegion pipelinedRegion = pipelinedRegionsByVertex.get(vertexId); if (pipelinedRegion == null) { throw new IllegalArgumentException("Unknown execution vertex " + vertexId); } return pipelinedRegion; }
@Test void testGetPipelinedRegionOfVertex() { for (DefaultExecutionVertex vertex : adapter.getVertices()) { final DefaultSchedulingPipelinedRegion pipelinedRegion = adapter.getPipelinedRegionOfVertex(vertex.getId()); assertRegionContainsAllVertices(pipelinedRegion); } }
Collection<AzureAddress> getAddresses() { LOGGER.finest("Fetching OAuth Access Token"); final String accessToken = fetchAccessToken(); LOGGER.finest("Fetching instances for subscription '%s' and resourceGroup '%s'", subscriptionId, resourceGroup); Collection<AzureAddress> addresses = azureComputeApi.instances(subscriptionId, resourceGroup, scaleSet, tag, accessToken); LOGGER.finest("Found the following instances for project '%s' and zone '%s': %s", subscriptionId, resourceGroup, addresses); return addresses; }
@Test public void getAddressesWithConfiguredSettings() { // given String tenantId = "tenant-id"; String clientId = "client-id"; String clientSecret = "client-secret"; given(azureAuthenticator.refreshAccessToken(tenantId, clientId, clientSecret)).willReturn(ACCESS_TOKEN); String subscriptionId = "subscription-2"; String resourceGroup = "resource-group-2"; String scaleSet = "scale-set-2"; given(azureComputeApi.instances(subscriptionId, resourceGroup, scaleSet, TAG, ACCESS_TOKEN)).willReturn(ADDRESSES); AzureConfig azureConfig = AzureConfig.builder() .setClientId(clientId) .setTenantId(tenantId) .setClientSecret(clientSecret) .setSubscriptionId(subscriptionId) .setResourceGroup(resourceGroup) .setScaleSet(scaleSet) .setInstanceMetadataAvailable(false) .setTag(TAG) .build(); AzureClient azureClient = new AzureClient(azureMetadataApi, azureComputeApi, azureAuthenticator, azureConfig); // when Collection<AzureAddress> result = azureClient.getAddresses(); // then assertEquals(ADDRESSES, result); }
public HtmlCreator addRowToTable(List<String> rowElements) { html.append("<tr>"); rowElements.forEach((re) -> html.append("<td>").append(re).append("</td>")); html.append("</tr>"); return this; }
@Test public void testAddRowToTable() { htmlCreator.addRowToTable(Arrays.asList("name", "age")); Assert.assertEquals(true, htmlCreator.html().contains("<td>name</td><td>age</td>")); }
@Override public byte[] fromConnectData(String topic, Schema schema, Object value) { if (schema != null && schema.type() != Schema.Type.BYTES) throw new DataException("Invalid schema type for ByteArrayConverter: " + schema.type().toString()); if (value != null && !(value instanceof byte[]) && !(value instanceof ByteBuffer)) throw new DataException("ByteArrayConverter is not compatible with objects of type " + value.getClass()); return value instanceof ByteBuffer ? getBytesFromByteBuffer((ByteBuffer) value) : (byte[]) value; }
@Test public void testFromConnectSchemaless() { assertArrayEquals( SAMPLE_BYTES, converter.fromConnectData(TOPIC, null, SAMPLE_BYTES) ); }
@Override public void parse(InputStream stream, ContentHandler handler, Metadata metadata, ParseContext context) throws IOException, SAXException, TikaException { // Automatically detect the character encoding try (AutoDetectReader reader = new AutoDetectReader(CloseShieldInputStream.wrap(stream), metadata, getEncodingDetector(context))) { //try to get detected content type; could be a subclass of text/plain //such as vcal, etc. String incomingMime = metadata.get(Metadata.CONTENT_TYPE); MediaType mediaType = MediaType.TEXT_PLAIN; if (incomingMime != null) { MediaType tmpMediaType = MediaType.parse(incomingMime); if (tmpMediaType != null) { mediaType = tmpMediaType; } } Charset charset = reader.getCharset(); MediaType type = new MediaType(mediaType, charset); metadata.set(Metadata.CONTENT_TYPE, type.toString()); // deprecated, see TIKA-431 metadata.set(Metadata.CONTENT_ENCODING, charset.name()); XHTMLContentHandler xhtml = new XHTMLContentHandler(handler, metadata); xhtml.startDocument(); xhtml.startElement("p"); char[] buffer = new char[4096]; int n = reader.read(buffer); while (n != -1) { xhtml.characters(buffer, 0, n); n = reader.read(buffer); } xhtml.endElement("p"); xhtml.endDocument(); } }
@Test public void testEmptyText() throws Exception { ContentHandler handler = new BodyContentHandler(); Metadata metadata = new Metadata(); parser.parse(new ByteArrayInputStream(new byte[0]), handler, metadata, new ParseContext()); assertEquals("text/plain; charset=UTF-8", metadata.get(Metadata.CONTENT_TYPE)); assertEquals("\n", handler.toString()); }
@Override public void updateSecurityGroup(KubevirtSecurityGroup sg) { checkNotNull(sg, ERR_NULL_SG); checkArgument(!Strings.isNullOrEmpty(sg.id()), ERR_NULL_SG_ID); sgStore.updateSecurityGroup(sg); }
@Test(expected = IllegalArgumentException.class) public void testUpdateUnregisteredSecurityGroup() { target.updateSecurityGroup(sg1); }
@VisibleForTesting Map<String, List<Operation>> computeOperations(SegmentDirectory.Reader segmentReader) throws Exception { Map<String, List<Operation>> columnOperationsMap = new HashMap<>(); // Does not work for segment versions < V3. if (_segmentDirectory.getSegmentMetadata().getVersion().compareTo(SegmentVersion.v3) < 0) { return columnOperationsMap; } Set<String> existingAllColumns = _segmentDirectory.getSegmentMetadata().getAllColumns(); Set<String> existingDictColumns = _segmentDirectory.getColumnsWithIndex(StandardIndexes.dictionary()); Set<String> existingForwardIndexColumns = _segmentDirectory.getColumnsWithIndex(StandardIndexes.forward()); for (String column : existingAllColumns) { if (_schema != null && !_schema.hasColumn(column)) { // _schema will be null only in tests LOGGER.info("Column {} is not in schema, skipping updating forward index", column); continue; } boolean existingHasDict = existingDictColumns.contains(column); boolean existingHasFwd = existingForwardIndexColumns.contains(column); FieldIndexConfigs newConf = _fieldIndexConfigs.get(column); boolean newIsFwd = newConf.getConfig(StandardIndexes.forward()).isEnabled(); boolean newIsDict = newConf.getConfig(StandardIndexes.dictionary()).isEnabled(); boolean newIsRange = newConf.getConfig(StandardIndexes.range()).isEnabled(); if (existingHasFwd && !newIsFwd) { // Existing column has a forward index. New column config disables the forward index ColumnMetadata columnMetadata = _segmentDirectory.getSegmentMetadata().getColumnMetadataFor(column); if (columnMetadata.isSorted()) { // Check if the column is sorted. If sorted, disabling forward index should be a no-op. Do not return an // operation for this column related to disabling forward index. LOGGER.warn("Trying to disable the forward index for a sorted column {}, ignoring", column); continue; } if (existingHasDict) { if (!newIsDict) { // Dictionary was also disabled. Just disable the dictionary and remove it along with the forward index // If range index exists, don't try to regenerate it on toggling the dictionary, throw an error instead Preconditions.checkState(!newIsRange, String.format( "Must disable range (enabled) index to disable the dictionary and forward index for column: %s or " + "refresh / back-fill the forward index", column)); columnOperationsMap.put(column, Arrays.asList(Operation.DISABLE_FORWARD_INDEX, Operation.DISABLE_DICTIONARY)); } else { // Dictionary is still enabled, keep it but remove the forward index columnOperationsMap.put(column, Collections.singletonList(Operation.DISABLE_FORWARD_INDEX)); } } else { if (!newIsDict) { // Dictionary remains disabled and we should not reconstruct temporary forward index as dictionary based columnOperationsMap.put(column, Collections.singletonList(Operation.DISABLE_FORWARD_INDEX)); } else { // Dictionary is enabled, creation of dictionary and conversion to dictionary based forward index is needed columnOperationsMap.put(column, Arrays.asList(Operation.DISABLE_FORWARD_INDEX, Operation.ENABLE_DICTIONARY)); } } } else if (!existingHasFwd && newIsFwd) { // Existing column does not have a forward index. New column config enables the forward index ColumnMetadata columnMetadata = _segmentDirectory.getSegmentMetadata().getColumnMetadataFor(column); if (columnMetadata != null && columnMetadata.isSorted()) { // Check if the column is sorted. If sorted, disabling forward index should be a no-op and forward index // should already exist. Do not return an operation for this column related to enabling forward index. LOGGER.warn("Trying to enable the forward index for a sorted column {}, ignoring", column); continue; } // Get list of columns with inverted index Set<String> existingInvertedIndexColumns = segmentReader.toSegmentDirectory().getColumnsWithIndex(StandardIndexes.inverted()); if (!existingHasDict || !existingInvertedIndexColumns.contains(column)) { // If either dictionary or inverted index is missing on the column there is no way to re-generate the forward // index. Treat this as a no-op and log a warning. LOGGER.warn("Trying to enable the forward index for a column {} missing either the dictionary ({}) and / or " + "the inverted index ({}) is not possible. Either a refresh or back-fill is required to get the " + "forward index, ignoring", column, existingHasDict ? "enabled" : "disabled", existingInvertedIndexColumns.contains(column) ? "enabled" : "disabled"); continue; } columnOperationsMap.put(column, Collections.singletonList(Operation.ENABLE_FORWARD_INDEX)); } else if (!existingHasFwd) { // Forward index is disabled for the existing column and should remain disabled based on the latest config // Need some checks to see whether the dictionary is being enabled or disabled here and take appropriate actions // If the dictionary is not enabled on the existing column it must be on the new noDictionary column list. // Cannot enable the dictionary for a column with forward index disabled. Preconditions.checkState(existingHasDict || !newIsDict, String.format("Cannot regenerate the dictionary for column %s with forward index disabled. Please " + "refresh or back-fill the data to add back the forward index", column)); if (existingHasDict && !newIsDict) { // Dictionary is currently enabled on this column but is supposed to be disabled. Remove the dictionary // and update the segment metadata If the range index exists then throw an error since we are not // regenerating the range index on toggling the dictionary Preconditions.checkState(!newIsRange, String.format( "Must disable range (enabled) index to disable the dictionary for a forwardIndexDisabled column: %s or " + "refresh / back-fill the forward index", column)); columnOperationsMap.put(column, Collections.singletonList(Operation.DISABLE_DICTIONARY)); } } else if (!existingHasDict && newIsDict) { // Existing column is RAW. New column is dictionary enabled. if (_schema == null || _tableConfig == null) { // This can only happen in tests. LOGGER.warn("Cannot enable dictionary for column={} as schema or tableConfig is null.", column); continue; } ColumnMetadata existingColumnMetadata = _segmentDirectory.getSegmentMetadata().getColumnMetadataFor(column); if (DictionaryIndexType.ignoreDictionaryOverride(_tableConfig.getIndexingConfig().isOptimizeDictionary(), _tableConfig.getIndexingConfig().isOptimizeDictionaryForMetrics(), _tableConfig.getIndexingConfig().getNoDictionarySizeRatioThreshold(), existingColumnMetadata.getFieldSpec(), _fieldIndexConfigs.get(column), existingColumnMetadata.getCardinality(), existingColumnMetadata.getTotalNumberOfEntries())) { columnOperationsMap.put(column, Collections.singletonList(Operation.ENABLE_DICTIONARY)); } } else if (existingHasDict && !newIsDict) { // Existing column has dictionary. New config for the column is RAW. if (shouldDisableDictionary(column, _segmentDirectory.getSegmentMetadata().getColumnMetadataFor(column))) { columnOperationsMap.put(column, Collections.singletonList(Operation.DISABLE_DICTIONARY)); } } else if (!existingHasDict) { // Both existing and new column is RAW forward index encoded. Check if compression needs to be changed. // TODO: Also check if raw index version needs to be changed if (shouldChangeRawCompressionType(column, segmentReader)) { columnOperationsMap.put(column, Collections.singletonList(Operation.CHANGE_INDEX_COMPRESSION_TYPE)); } } else { // Both existing and new column is dictionary encoded. Check if compression needs to be changed. if (shouldChangeDictIdCompressionType(column, segmentReader)) { columnOperationsMap.put(column, Collections.singletonList(Operation.CHANGE_INDEX_COMPRESSION_TYPE)); } } } return columnOperationsMap; }
@Test public void testComputeOperationChangeCompression() throws Exception { // Setup SegmentMetadataImpl existingSegmentMetadata = new SegmentMetadataImpl(_segmentDirectory); SegmentDirectory segmentLocalFSDirectory = new SegmentLocalFSDirectory(_segmentDirectory, existingSegmentMetadata, ReadMode.mmap); SegmentDirectory.Writer writer = segmentLocalFSDirectory.createWriter(); // TEST1: Change compression Random rand = new Random(); // Create new tableConfig with the modified fieldConfigs. List<FieldConfig> fieldConfigs = new ArrayList<>(_tableConfig.getFieldConfigList()); int randIdx; String name; do { // Only try to change compression type for forward index enabled columns randIdx = rand.nextInt(fieldConfigs.size()); name = fieldConfigs.get(randIdx).getName(); } while (SV_FORWARD_INDEX_DISABLED_COLUMNS.contains(name) || MV_FORWARD_INDEX_DISABLED_COLUMNS.contains(name) || MV_FORWARD_INDEX_DISABLED_DUPLICATES_COLUMNS.contains(name) || FORWARD_INDEX_DISABLED_RAW_COLUMNS.contains( name) || DIM_SV_FORWARD_INDEX_DISABLED_INTEGER_WITHOUT_INV_IDX.equals(name) || DIM_SV_FORWARD_INDEX_DISABLED_INTEGER_WITH_RANGE_INDEX.equals(name)); FieldConfig config = fieldConfigs.remove(randIdx); CompressionCodec newCompressionType = null; for (CompressionCodec type : CompressionCodec.values()) { if (config.getCompressionCodec() != type) { newCompressionType = type; break; } } FieldConfig newConfig = new FieldConfig(config.getName(), FieldConfig.EncodingType.RAW, Collections.emptyList(), newCompressionType, null); fieldConfigs.add(newConfig); TableConfig tableConfig = new TableConfigBuilder(TableType.OFFLINE).setTableName(TABLE_NAME).setNoDictionaryColumns(_noDictionaryColumns) .setFieldConfigList(fieldConfigs).build(); tableConfig.setFieldConfigList(fieldConfigs); IndexLoadingConfig indexLoadingConfig = new IndexLoadingConfig(null, tableConfig); ForwardIndexHandler fwdIndexHandler = new ForwardIndexHandler(segmentLocalFSDirectory, indexLoadingConfig, null); Map<String, List<ForwardIndexHandler.Operation>> operationMap = fwdIndexHandler.computeOperations(writer); assertEquals(operationMap.size(), 1); assertEquals(operationMap.get(config.getName()), Collections.singletonList(ForwardIndexHandler.Operation.CHANGE_INDEX_COMPRESSION_TYPE)); // TEST2: Change compression and add index. Change compressionType for more than 1 column. fieldConfigs = new ArrayList<>(_tableConfig.getFieldConfigList()); FieldConfig config1 = fieldConfigs.remove(0); FieldConfig config2 = fieldConfigs.remove(1); FieldConfig newConfig1 = new FieldConfig(config1.getName(), FieldConfig.EncodingType.RAW, Collections.emptyList(), CompressionCodec.ZSTANDARD, null); fieldConfigs.add(newConfig1); FieldConfig newConfig2 = new FieldConfig(config2.getName(), FieldConfig.EncodingType.RAW, Collections.emptyList(), CompressionCodec.ZSTANDARD, null); fieldConfigs.add(newConfig2); tableConfig = new TableConfigBuilder(TableType.OFFLINE).setTableName(TABLE_NAME).setNoDictionaryColumns(_noDictionaryColumns) .setFieldConfigList(fieldConfigs).build(); tableConfig.setFieldConfigList(fieldConfigs); indexLoadingConfig = new IndexLoadingConfig(null, tableConfig); indexLoadingConfig.addTextIndexColumns(config1.getName()); indexLoadingConfig.addInvertedIndexColumns(config1.getName()); fwdIndexHandler = new ForwardIndexHandler(segmentLocalFSDirectory, indexLoadingConfig, null); operationMap = fwdIndexHandler.computeOperations(writer); assertEquals(operationMap.size(), 2); assertEquals(operationMap.get(config1.getName()), Collections.singletonList(ForwardIndexHandler.Operation.CHANGE_INDEX_COMPRESSION_TYPE)); assertEquals(operationMap.get(config2.getName()), Collections.singletonList(ForwardIndexHandler.Operation.CHANGE_INDEX_COMPRESSION_TYPE)); // Tear down segmentLocalFSDirectory.close(); }
@Override @Transactional(rollbackFor = Exception.class) @CacheEvict(value = RedisKeyConstants.PERMISSION_MENU_ID_LIST, allEntries = true) // allEntries 清空所有缓存,因为此时不知道 id 对应的 permission 是多少。直接清理,简单有效 public void deleteMenu(Long id) { // 校验是否还有子菜单 if (menuMapper.selectCountByParentId(id) > 0) { throw exception(MENU_EXISTS_CHILDREN); } // 校验删除的菜单是否存在 if (menuMapper.selectById(id) == null) { throw exception(MENU_NOT_EXISTS); } // 标记删除 menuMapper.deleteById(id); // 删除授予给角色的权限 permissionService.processMenuDeleted(id); }
@Test public void testDeleteMenu_existChildren() { // mock 数据(构造父子菜单) MenuDO sonMenu = createParentAndSonMenu(); // 准备参数 Long parentId = sonMenu.getParentId(); // 调用并断言异常 assertServiceException(() -> menuService.deleteMenu(parentId), MENU_EXISTS_CHILDREN); }
public static ScheduledFuture<?> scheduleLongPolling(Runnable runnable, long period, TimeUnit unit) { return LONG_POLLING_EXECUTOR.schedule(runnable, period, unit); }
@Test public void scheduleLongPollingTest() { ConfigExecutor.scheduleLongPolling(() -> log.info(Thread.currentThread().getName()), 5, TimeUnit.SECONDS); }
@Deprecated public String resolveDefaultAddress() { return this.resolveDefaultAddress(true); }
@Test public void testResolveDefaultAddress() { AmazonInfo info = (AmazonInfo) instanceInfo.getDataCenterInfo(); config = createConfig(info); assertThat(config.resolveDefaultAddress(false), is(info.get(publicHostname))); info.getMetadata().remove(publicHostname.getName()); config = createConfig(info); assertThat(config.resolveDefaultAddress(false), is(info.get(localIpv4))); info.getMetadata().remove(localIpv4.getName()); config = createConfig(info); assertThat(config.resolveDefaultAddress(false), is(info.get(ipv6))); info.getMetadata().remove(ipv6.getName()); config = createConfig(info); assertThat(config.resolveDefaultAddress(false), is(dummyDefault)); }
@Override public Map<String, String> getRemoteRegionUrlsWithName() { String propName = namespace + "remoteRegionUrlsWithName"; String remoteRegionUrlWithNameString = configInstance.getStringProperty(propName, null).get(); if (null == remoteRegionUrlWithNameString) { return Collections.emptyMap(); } String[] remoteRegionUrlWithNamePairs = remoteRegionUrlWithNameString.split(","); Map<String, String> toReturn = new HashMap<String, String>(remoteRegionUrlWithNamePairs.length); final String pairSplitChar = ";"; for (String remoteRegionUrlWithNamePair : remoteRegionUrlWithNamePairs) { String[] pairSplit = remoteRegionUrlWithNamePair.split(pairSplitChar); if (pairSplit.length < 2) { logger.error("Error reading eureka remote region urls from property {}. " + "Invalid entry {} for remote region url. The entry must contain region name and url " + "separated by a {}. Ignoring this entry.", propName, remoteRegionUrlWithNamePair, pairSplitChar); } else { String regionName = pairSplit[0]; String regionUrl = pairSplit[1]; if (pairSplit.length > 2) { StringBuilder regionUrlAssembler = new StringBuilder(); for (int i = 1; i < pairSplit.length; i++) { if (regionUrlAssembler.length() != 0) { regionUrlAssembler.append(pairSplitChar); } regionUrlAssembler.append(pairSplit[i]); } regionUrl = regionUrlAssembler.toString(); } toReturn.put(regionName, regionUrl); } } return toReturn; }
@Test public void testRemoteRegionUrlsWithName1Region() throws Exception { String region1 = "myregion1"; String region1url = "http://local:888/eee"; ConfigurationManager.getConfigInstance().setProperty("eureka.remoteRegionUrlsWithName", region1 + ';' + region1url); DefaultEurekaServerConfig config = new DefaultEurekaServerConfig(); Map<String, String> remoteRegionUrlsWithName = config.getRemoteRegionUrlsWithName(); Assert.assertEquals("Unexpected remote region url count.", 1, remoteRegionUrlsWithName.size()); Assert.assertTrue("Remote region 1 not found.", remoteRegionUrlsWithName.containsKey(region1)); Assert.assertEquals("Unexpected remote region 1 url.", region1url, remoteRegionUrlsWithName.get(region1)); }
@VisibleForTesting static boolean validateArenaBlockSize(long arenaBlockSize, long mutableLimit) { return arenaBlockSize <= mutableLimit; }
@Test public void testValidateArenaBlockSize() { long arenaBlockSize = 8 * 1024 * 1024; assertFalse( RocksDBMemoryControllerUtils.validateArenaBlockSize( arenaBlockSize, (long) (arenaBlockSize * 0.5))); assertTrue( RocksDBMemoryControllerUtils.validateArenaBlockSize( arenaBlockSize, (long) (arenaBlockSize * 1.5))); }
@Udf(description = "Returns the inverse (arc) tangent of y / x") public Double atan2( @UdfParameter( value = "y", description = "The ordinate (y) coordinate." ) final Integer y, @UdfParameter( value = "x", description = "The abscissa (x) coordinate." ) final Integer x ) { return atan2(y == null ? null : y.doubleValue(), x == null ? null : x.doubleValue()); }
@Test public void shouldHandleZeroYZeroX() { assertThat(udf.atan2(0.0, 0.0), closeTo(0.0, 0.000000000000001)); assertThat(udf.atan2(0.0, 0.0), closeTo(0.0, 0.000000000000001)); assertThat(udf.atan2(0, 0), closeTo(0.0, 0.000000000000001)); assertThat(udf.atan2(0L, 0L), closeTo(0.0, 0.000000000000001)); }
private ResultUtil() { }
@Test public void testResultUtil() { Assertions.assertNotNull(ResultUtil.ok()); Assertions.assertNotNull(ResultUtil.ok(new Object())); Assertions.assertNotNull(ResultUtil.ok(new Object(), "success")); }
@Override public void writeObject(Object object) throws IOException { serializationService.writeObject(this, object); }
@Test public void testWriteObject() throws Exception { dataOutputStream.writeObject("INPUT"); verify(mockSerializationService).writeObject(dataOutputStream, "INPUT"); }
public void logResponse(Config config, HttpRequest request, Response response) { long startTime = request.getStartTime(); long elapsedTime = request.getEndTime() - startTime; response.setResponseTime(elapsedTime); StringBuilder sb = new StringBuilder(); String uri = request.getUrl(); HttpLogModifier responseModifier = logModifier(config, uri); sb.append("response time in milliseconds: ").append(elapsedTime).append('\n'); sb.append(requestCount).append(" < ").append(response.getStatus()); logHeaders(requestCount, " < ", sb, responseModifier, response.getHeaders()); ResourceType rt = response.getResourceType(); if (rt == null || rt.isBinary()) { // don't log body } else { logBody(config, responseModifier, sb, uri, response.getBody(), false, rt); } sb.append('\n'); logger.debug("{}", sb); }
@Test void testResponseLoggingXmlPretty() { config.configure("logPrettyResponse", new Variable(true)); setup("xml", "<hello>world</hello>", "application/xml"); httpRequestBuilder.path("/xml"); Response response = handle(); match(response.getBodyAsString(), "<hello>world</hello>"); match(response.getContentType(), "application/xml"); httpLogger.logResponse(config, request, response); String logs = logAppender.collect(); assertTrue(logs.contains("<hello>world</hello>")); assertTrue(logs.contains("Content-Type: application/xml")); }
@Override public Cancellable schedule(final Duration interval, final PunctuationType type, final Punctuator callback) { throw new UnsupportedOperationException("StateStores can't access schedule."); }
@Test public void shouldThrowOnScheduleWithDuration() { assertThrows(UnsupportedOperationException.class, () -> context.schedule(Duration.ZERO, PunctuationType.WALL_CLOCK_TIME, punctuator)); }
@Override public String getName() { return ANALYZER_NAME; }
@Test public void testGetName() { assertEquals("Analyzer name wrong.", "Autoconf Analyzer", analyzer.getName()); }
@Override public String toString() { String text = this.text; if (text == null) { // E.g.: "1000 Bye", "1009 Message too big" this.text = text = code() + " " + reasonText(); } return text; }
@Test public void testToString() { assertEquals("1000 Bye", NORMAL_CLOSURE.toString()); }
public static Read read() { return new Read(null, "", new Scan()); }
@Test public void testReadingFailsTableDoesNotExist() { final String table = tmpTable.getName(); // Exception will be thrown by read.expand() when read is applied. thrown.expect(IllegalArgumentException.class); thrown.expectMessage(String.format("Table %s does not exist", table)); runReadTest( HBaseIO.read().withConfiguration(conf).withTableId(table), false, new ArrayList<>()); runReadTest(HBaseIO.read().withConfiguration(conf).withTableId(table), true, new ArrayList<>()); }
@Override public void setConfigAttributes(Object attributes) { setConfigAttributes(attributes, null); }
@Test public void shouldAssignApprovalTypeOnFirstStageAsManual() { Map<String, Object> approvalAttributes = Map.of(Approval.TYPE, Approval.MANUAL); Map<String, Map<String, Object>> map = Map.of(StageConfig.APPROVAL, approvalAttributes); PipelineConfig pipelineConfig = PipelineConfigMother.createPipelineConfig("p1", "s1", "j1"); pipelineConfig.get(0).updateApproval(Approval.manualApproval()); pipelineConfig.setConfigAttributes(map); assertThat(pipelineConfig.get(0).getApproval().getType(), is(Approval.MANUAL)); }
public static <K, V> V removeKey(Map<K, V> map, K key, Predicate<V> removeJudge) { return map.computeIfPresent(key, (k, v) -> removeJudge.test(v) ? null : v); }
@Test void testRemoveKey() { Map<String, Integer> map = new HashMap<>(); map.put("A", 1); map.put("B", 2); map.put("C", 3); MapUtil.removeKey(map, "B", integer -> integer == 1); assertEquals(3, map.size()); MapUtil.removeKey(map, "B", integer -> integer == 2); assertEquals(2, map.size()); }
public static void addBlockCacheUsageMetric(final StreamsMetricsImpl streamsMetrics, final RocksDBMetricContext metricContext, final Gauge<BigInteger> valueProvider) { addMutableMetric( streamsMetrics, metricContext, valueProvider, USAGE_OF_BLOCK_CACHE, USAGE_OF_BLOCK_CACHE_DESCRIPTION ); }
@Test public void shouldAddBlockCacheUsageMetric() { final String name = "block-cache-usage"; final String description = "Memory size of the entries residing in block cache in bytes"; runAndVerifyMutableMetric( name, description, () -> RocksDBMetrics.addBlockCacheUsageMetric(streamsMetrics, ROCKSDB_METRIC_CONTEXT, VALUE_PROVIDER) ); }
public String normalize(final String name) { if(StringUtils.equals(name, ".")) { return finder.find().getAbsolute(); } if(StringUtils.equals(name, "..")) { return finder.find().getParent().getAbsolute(); } if(!this.isAbsolute(name)) { return String.format("%s%s%s", finder.find().getAbsolute(), PreferencesFactory.get().getProperty("local.delimiter"), name); } return name; }
@Test public void testCurrentDir() { assertEquals(System.getProperty("user.dir"), new WorkdirPrefixer().normalize(".")); }
public static MySQLCommandPacket newInstance(final MySQLCommandPacketType commandPacketType, final MySQLPacketPayload payload, final ConnectionSession connectionSession) { switch (commandPacketType) { case COM_QUIT: return new MySQLComQuitPacket(); case COM_INIT_DB: return new MySQLComInitDbPacket(payload); case COM_FIELD_LIST: return new MySQLComFieldListPacket(payload); case COM_QUERY: return new MySQLComQueryPacket(payload); case COM_STMT_PREPARE: return new MySQLComStmtPreparePacket(payload); case COM_STMT_EXECUTE: MySQLServerPreparedStatement serverPreparedStatement = connectionSession.getServerPreparedStatementRegistry().getPreparedStatement(payload.getByteBuf().getIntLE(payload.getByteBuf().readerIndex())); return new MySQLComStmtExecutePacket(payload, serverPreparedStatement.getSqlStatementContext().getSqlStatement().getParameterCount()); case COM_STMT_SEND_LONG_DATA: return new MySQLComStmtSendLongDataPacket(payload); case COM_STMT_RESET: return new MySQLComStmtResetPacket(payload); case COM_STMT_CLOSE: return new MySQLComStmtClosePacket(payload); case COM_SET_OPTION: return new MySQLComSetOptionPacket(payload); case COM_PING: return new MySQLComPingPacket(); case COM_RESET_CONNECTION: return new MySQLComResetConnectionPacket(); default: return new MySQLUnsupportedCommandPacket(commandPacketType); } }
@Test void assertNewInstanceWithComRegisterSlavePacket() { assertThat(MySQLCommandPacketFactory.newInstance(MySQLCommandPacketType.COM_REGISTER_SLAVE, payload, connectionSession), instanceOf(MySQLUnsupportedCommandPacket.class)); }
static Collection<String> findIssueKeys(String input, Pattern pattern) { Matcher m = pattern.matcher(input); Set<String> issues = new HashSet<>(); while (m.find()) { if (m.groupCount() >= 1) { String id = m.group(1); issues.add(id); } } return issues; }
@Test public void findIssueKeys() throws MalformedURLException { Pattern issuePattern = JiraSite.DEFAULT_ISSUE_PATTERN; Assert.assertEquals(Collections.singleton("JENKINS-43400"), BlueJiraIssue.findIssueKeys( "[JENKINS-43400] Print the error to the build log rather than", issuePattern)); Assert.assertEquals(Collections.singleton("JENKINS-43400"), BlueJiraIssue.findIssueKeys("JENKINS-43400 Print the error to the build log rather than", issuePattern)); Assert.assertEquals(Collections.singleton("JENKINS-43400"), BlueJiraIssue.findIssueKeys("foo/JENKINS-43400 Print the error to the build log rather than", issuePattern)); Assert.assertEquals(new HashSet(Arrays.asList("TEST-123", "EXAMPLE-123", "JENKINS-43400")), BlueJiraIssue.findIssueKeys("foo/JENKINS-43400 TEST-123 [EXAMPLE-123] Print the error to the build log rather than", issuePattern)); }
public static <K, V> Reshuffle<K, V> of() { return new Reshuffle<>(); }
@Test public void testNoOldTransformInRecentVersion() { pipeline.enableAbandonedNodeEnforcement(false); pipeline.getOptions().as(StreamingOptions.class).setUpdateCompatibilityVersion("2.54.0"); pipeline.apply(Create.of(KV.of("arbitrary", "kv"))).apply(Reshuffle.of()); OldTransformSeeker seeker = new OldTransformSeeker(); pipeline.traverseTopologically(seeker); assertFalse(seeker.isOldTransformFound); }
public static HttpRespStatus valueOf(int code) { HttpRespStatus status = valueOf0(code); return status != null ? status : new HttpRespStatus(code); }
@Test public void testValueOf() { Assert.assertEquals(CONTINUE, HttpRespStatus.valueOf(100)); Assert.assertEquals(OK, HttpRespStatus.valueOf(200)); Assert.assertEquals(MULTIPLE_CHOICES, HttpRespStatus.valueOf(300)); Assert.assertEquals(MOVED_PERMANENTLY, HttpRespStatus.valueOf(301)); Assert.assertEquals(BAD_REQUEST, HttpRespStatus.valueOf(400)); Assert.assertEquals(INTERNAL_SERVER_ERROR, HttpRespStatus.valueOf(500)); Assert.assertEquals(100, HttpRespStatus.valueOf(100).code()); Assert.assertEquals(200, HttpRespStatus.valueOf(200).code()); }
@SuppressWarnings({ "nullness" // TODO(https://github.com/apache/beam/issues/21068) }) /* * Returns an iterables containing all distinct keys in this multimap. */ public PrefetchableIterable<K> keys() { checkState( !isClosed, "Multimap user state is no longer usable because it is closed for %s", keysStateRequest.getStateKey()); if (isCleared) { List<K> keys = new ArrayList<>(pendingAdds.size()); for (Map.Entry<?, KV<K, List<V>>> entry : pendingAdds.entrySet()) { keys.add(entry.getValue().getKey()); } return PrefetchableIterables.concat(keys); } Set<Object> pendingRemovesNow = new HashSet<>(pendingRemoves.keySet()); Map<Object, K> pendingAddsNow = new HashMap<>(); for (Map.Entry<Object, KV<K, List<V>>> entry : pendingAdds.entrySet()) { pendingAddsNow.put(entry.getKey(), entry.getValue().getKey()); } return new PrefetchableIterables.Default<K>() { @Override public PrefetchableIterator<K> createIterator() { return new PrefetchableIterator<K>() { PrefetchableIterator<K> persistedKeysIterator = persistedKeys.iterator(); Iterator<K> pendingAddsNowIterator; boolean hasNext; K nextKey; @Override public boolean isReady() { return persistedKeysIterator.isReady(); } @Override public void prefetch() { if (!isReady()) { persistedKeysIterator.prefetch(); } } @Override public boolean hasNext() { if (hasNext) { return true; } while (persistedKeysIterator.hasNext()) { nextKey = persistedKeysIterator.next(); Object nextKeyStructuralValue = mapKeyCoder.structuralValue(nextKey); if (!pendingRemovesNow.contains(nextKeyStructuralValue)) { // Remove all keys that we will visit when passing over the persistedKeysIterator // so we do not revisit them when passing over the pendingAddsNowIterator if (pendingAddsNow.containsKey(nextKeyStructuralValue)) { pendingAddsNow.remove(nextKeyStructuralValue); } hasNext = true; return true; } } if (pendingAddsNowIterator == null) { pendingAddsNowIterator = pendingAddsNow.values().iterator(); } while (pendingAddsNowIterator.hasNext()) { nextKey = pendingAddsNowIterator.next(); hasNext = true; return true; } return false; } @Override public K next() { if (!hasNext()) { throw new NoSuchElementException(); } hasNext = false; return nextKey; } }; } }; }
@Test public void testGetKeysPrefetch() throws Exception { FakeBeamFnStateClient fakeClient = new FakeBeamFnStateClient( ImmutableMap.of( createMultimapKeyStateKey(), KV.of(ByteArrayCoder.of(), singletonList(A1)), createMultimapValueStateKey(A1), KV.of(StringUtf8Coder.of(), asList("V1", "V2")))); MultimapUserState<byte[], String> userState = new MultimapUserState<>( Caches.eternal(), fakeClient, "instructionId", createMultimapKeyStateKey(), ByteArrayCoder.of(), StringUtf8Coder.of()); PrefetchableIterable<byte[]> keys = userState.keys(); assertEquals(0, fakeClient.getCallCount()); keys.prefetch(); assertEquals(1, fakeClient.getCallCount()); assertArrayEquals(new byte[][] {A1}, Iterables.toArray(keys, byte[].class)); assertEquals(1, fakeClient.getCallCount()); }
@Override protected void channelRead0(ChannelHandlerContext ctx, String msg) throws Exception { if (StringUtils.isBlank(msg)) { ctx.writeAndFlush(QosProcessHandler.PROMPT); } else { CommandContext commandContext = TelnetCommandDecoder.decode(msg); commandContext.setQosConfiguration(qosConfiguration); commandContext.setRemote(ctx.channel()); try { String result = commandExecutor.execute(commandContext); if (StringUtils.isEquals(QosConstants.CLOSE, result)) { ctx.writeAndFlush(getByeLabel()).addListener(ChannelFutureListener.CLOSE); } else { ctx.writeAndFlush(result + QosConstants.BR_STR + QosProcessHandler.PROMPT); } } catch (NoSuchCommandException ex) { ctx.writeAndFlush(msg + " :no such command"); ctx.writeAndFlush(QosConstants.BR_STR + QosProcessHandler.PROMPT); log.error(QOS_COMMAND_NOT_FOUND, "", "", "can not found command " + commandContext, ex); } catch (PermissionDenyException ex) { ctx.writeAndFlush(msg + " :permission deny"); ctx.writeAndFlush(QosConstants.BR_STR + QosProcessHandler.PROMPT); log.error( QOS_PERMISSION_DENY_EXCEPTION, "", "", "permission deny to access command " + commandContext, ex); } catch (Exception ex) { ctx.writeAndFlush(msg + " :fail to execute commandContext by " + ex.getMessage()); ctx.writeAndFlush(QosConstants.BR_STR + QosProcessHandler.PROMPT); log.error( QOS_UNEXPECTED_EXCEPTION, "", "", "execute commandContext got exception " + commandContext, ex); } } }
@Test void testUnknownCommand() throws Exception { ChannelHandlerContext context = mock(ChannelHandlerContext.class); TelnetProcessHandler handler = new TelnetProcessHandler( FrameworkModel.defaultModel(), QosConfiguration.builder().build()); handler.channelRead0(context, "unknown"); ArgumentCaptor<String> captor = ArgumentCaptor.forClass(String.class); verify(context, Mockito.atLeastOnce()).writeAndFlush(captor.capture()); assertThat(captor.getAllValues(), contains("unknown :no such command", "\r\ndubbo>")); }
@Override protected void render(Block html) { String jid = $(JOB_ID); if (jid.isEmpty()) { html. p().__("Sorry, can't do anything without a JobID.").__(); return; } JobId jobID = MRApps.toJobID(jid); Job j = appContext.getJob(jobID); if (j == null) { html.p().__("Sorry, ", jid, " not found.").__(); return; } if(j instanceof UnparsedJob) { final int taskCount = j.getTotalMaps() + j.getTotalReduces(); UnparsedJob oversizedJob = (UnparsedJob) j; html.p().__("The job has a total of " + taskCount + " tasks. ") .__("Any job larger than " + oversizedJob.getMaxTasksAllowed() + " will not be loaded.").__(); html.p().__("You can either use the CLI tool: 'mapred job -history'" + " to view large jobs or adjust the property " + JHAdminConfig.MR_HS_LOADED_JOBS_TASKS_MAX + ".").__(); return; } List<AMInfo> amInfos = j.getAMInfos(); JobInfo job = new JobInfo(j); ResponseInfo infoBlock = info("Job Overview"). __("Job Name:", job.getName()). __("User Name:", job.getUserName()). __("Queue:", job.getQueueName()). __("State:", job.getState()). __("Uberized:", job.isUber()). __("Submitted:", new Date(job.getSubmitTime())). __("Started:", job.getStartTimeStr()). __("Finished:", new Date(job.getFinishTime())). __("Elapsed:", StringUtils.formatTime( Times.elapsed(job.getStartTime(), job.getFinishTime(), false))); String amString = amInfos.size() == 1 ? "ApplicationMaster" : "ApplicationMasters"; // todo - switch to use JobInfo List<String> diagnostics = j.getDiagnostics(); if(diagnostics != null && !diagnostics.isEmpty()) { StringBuilder b = new StringBuilder(); for(String diag: diagnostics) { b.append(addTaskLinks(diag)); } infoBlock._r("Diagnostics:", b.toString()); } if(job.getNumMaps() > 0) { infoBlock.__("Average Map Time", StringUtils.formatTime(job.getAvgMapTime())); } if(job.getNumReduces() > 0) { infoBlock.__("Average Shuffle Time", StringUtils.formatTime(job.getAvgShuffleTime())); infoBlock.__("Average Merge Time", StringUtils.formatTime(job.getAvgMergeTime())); infoBlock.__("Average Reduce Time", StringUtils.formatTime(job.getAvgReduceTime())); } for (ConfEntryInfo entry : job.getAcls()) { infoBlock.__("ACL "+entry.getName()+":", entry.getValue()); } DIV<Hamlet> div = html. __(InfoBlock.class). div(_INFO_WRAP); // MRAppMasters Table TABLE<DIV<Hamlet>> table = div.table("#job"); table. tr(). th(amString). __(). tr(). th(_TH, "Attempt Number"). th(_TH, "Start Time"). th(_TH, "Node"). th(_TH, "Logs"). __(); boolean odd = false; for (AMInfo amInfo : amInfos) { AMAttemptInfo attempt = new AMAttemptInfo(amInfo, job.getId(), job.getUserName(), "", ""); table.tr((odd = !odd) ? _ODD : _EVEN). td(String.valueOf(attempt.getAttemptId())). td(new Date(attempt.getStartTime()).toString()). td().a(".nodelink", url(MRWebAppUtil.getYARNWebappScheme(), attempt.getNodeHttpAddress()), attempt.getNodeHttpAddress()).__(). td().a(".logslink", url(attempt.getLogsLink()), "logs").__(). __(); } table.__(); div.__(); html.div(_INFO_WRAP). // Tasks table table("#job"). tr(). th(_TH, "Task Type"). th(_TH, "Total"). th(_TH, "Complete").__(). tr(_ODD). th(). a(url("tasks", jid, "m"), "Map").__(). td(String.valueOf(String.valueOf(job.getMapsTotal()))). td(String.valueOf(String.valueOf(job.getMapsCompleted()))).__(). tr(_EVEN). th(). a(url("tasks", jid, "r"), "Reduce").__(). td(String.valueOf(String.valueOf(job.getReducesTotal()))). td(String.valueOf(String.valueOf(job.getReducesCompleted()))).__() .__(). // Attempts table table("#job"). tr(). th(_TH, "Attempt Type"). th(_TH, "Failed"). th(_TH, "Killed"). th(_TH, "Successful").__(). tr(_ODD). th("Maps"). td().a(url("attempts", jid, "m", TaskAttemptStateUI.FAILED.toString()), String.valueOf(job.getFailedMapAttempts())).__(). td().a(url("attempts", jid, "m", TaskAttemptStateUI.KILLED.toString()), String.valueOf(job.getKilledMapAttempts())).__(). td().a(url("attempts", jid, "m", TaskAttemptStateUI.SUCCESSFUL.toString()), String.valueOf(job.getSuccessfulMapAttempts())).__(). __(). tr(_EVEN). th("Reduces"). td().a(url("attempts", jid, "r", TaskAttemptStateUI.FAILED.toString()), String.valueOf(job.getFailedReduceAttempts())).__(). td().a(url("attempts", jid, "r", TaskAttemptStateUI.KILLED.toString()), String.valueOf(job.getKilledReduceAttempts())).__(). td().a(url("attempts", jid, "r", TaskAttemptStateUI.SUCCESSFUL.toString()), String.valueOf(job.getSuccessfulReduceAttempts())).__(). __(). __(). __(); }
@Test public void testHsJobBlockForNormalSizeJobShouldNotDisplayWarningMessage() { Configuration config = new Configuration(); config.setInt(JHAdminConfig.MR_HS_LOADED_JOBS_TASKS_MAX, -1); JobHistory jobHistory = new JobHitoryStubWithAllNormalSizeJobs(); jobHistory.init(config); HsJobBlock jobBlock = new HsJobBlock(jobHistory) { // override this so that the job block can fetch a job id. @Override public Map<String, String> moreParams() { Map<String, String> map = new HashMap<>(); map.put(AMParams.JOB_ID, "job_0000_0001"); return map; } // override this to avoid view context lookup in render() @Override public ResponseInfo info(String about) { return new ResponseInfo().about(about); } // override this to avoid view context lookup in render() @Override public String url(String... parts) { return StringHelper.ujoin("", parts); } }; // set up the test block to render HsJobBLock to OutputStream outputStream = new ByteArrayOutputStream(); HtmlBlock.Block block = createBlockToCreateTo(outputStream); jobBlock.render(block); block.getWriter().flush(); String out = outputStream.toString(); Assert.assertTrue("Should display job overview for the job.", out.contains("ApplicationMaster")); }
public static <K, V> MutableMultimap<K, V> groupBy( Iterable<V> iterable, Function<? super V, ? extends K> function) { return FJIterate.groupBy(iterable, function, FJIterate.DEFAULT_MIN_FORK_SIZE, FJIterate.FORK_JOIN_POOL); }
@Test public void groupByWithInterval() { LazyIterable<Integer> iterable = Interval.oneTo(1000).concatenate(Interval.oneTo(1000)).concatenate(Interval.oneTo(1000)); Multimap<String, Integer> expected = iterable.toBag().groupBy(Functions.getToString()); Multimap<String, Integer> expectedAsSet = iterable.toSet().groupBy(Functions.getToString()); Multimap<String, Integer> result1 = FJIterate.groupBy(iterable.toList(), Functions.getToString(), 100); Multimap<String, Integer> result2 = FJIterate.groupBy(iterable.toList(), Functions.getToString()); Multimap<String, Integer> result3 = FJIterate.groupBy(iterable.toSet(), Functions.getToString(), SynchronizedPutUnifiedSetMultimap.newMultimap(), 100); Multimap<String, Integer> result4 = FJIterate.groupBy(iterable.toSet(), Functions.getToString(), SynchronizedPutUnifiedSetMultimap.newMultimap()); Multimap<String, Integer> result5 = FJIterate.groupBy(iterable.toSortedSet(), Functions.getToString(), SynchronizedPutUnifiedSetMultimap.newMultimap(), 100); Multimap<String, Integer> result6 = FJIterate.groupBy(iterable.toSortedSet(), Functions.getToString(), SynchronizedPutUnifiedSetMultimap.newMultimap()); Multimap<String, Integer> result7 = FJIterate.groupBy(iterable.toBag(), Functions.getToString(), SynchronizedPutHashBagMultimap.newMultimap(), 100); Multimap<String, Integer> result8 = FJIterate.groupBy(iterable.toBag(), Functions.getToString(), SynchronizedPutHashBagMultimap.newMultimap()); Multimap<String, Integer> result9 = FJIterate.groupBy(iterable.toList().toImmutable(), Functions.getToString()); assertEquals(expected, HashBagMultimap.newMultimap(result1)); assertEquals(expected, HashBagMultimap.newMultimap(result2)); assertEquals(expected, HashBagMultimap.newMultimap(result9)); assertEquals(expectedAsSet, result3); assertEquals(expectedAsSet, result4); assertEquals(expectedAsSet, result5); assertEquals(expectedAsSet, result6); assertEquals(expected, result7); assertEquals(expected, result8); }
@ProtoFactory public static MediaType fromString(String tree) { if (tree == null || tree.isEmpty()) throw CONTAINER.missingMediaType(); Matcher matcher = TREE_PATTERN.matcher(tree); return parseSingleMediaType(tree, matcher, false); }
@Test(expected = EncodingException.class) public void testParsingWhitespaceInParamName() { MediaType.fromString("application/json; charset =utf-8"); }
public FEELFnResult<BigDecimal> invoke(@ParameterName( "n" ) BigDecimal n) { return invoke(n, BigDecimal.ZERO); }
@Test void invokeOutRangeScale() { FunctionTestUtil.assertResultError(floorFunction.invoke(BigDecimal.valueOf(1.5), BigDecimal.valueOf(6177)), InvalidParametersEvent.class); FunctionTestUtil.assertResultError(floorFunction.invoke(BigDecimal.valueOf(1.5), BigDecimal.valueOf(-6122)), InvalidParametersEvent.class); }
@Override public void execute(final List<String> args, final PrintWriter terminal) { CliCmdUtil.ensureArgCountBounds(args, 1, 1, HELP); final String filePath = args.get(0); final String content = loadScript(filePath); requestExecutor.makeKsqlRequest(content); }
@Test public void shouldThrowIfFileDoesNotExist() { // When: final Exception e = assertThrows( KsqlException.class, () -> cmd.execute(ImmutableList.of("you-will-not-find-me"), terminal) ); // Then: assertThat(e.getMessage(), containsString( "Failed to read file: you-will-not-find-me")); assertThat(e.getCause(), (instanceOf(NoSuchFileException.class))); }
public static String encode(byte[] data) { return Base58Codec.INSTANCE.encode(data); }
@Test public void encodeTest() { String a = "hello world"; String encode = Base58.encode(a.getBytes(StandardCharsets.UTF_8)); assertEquals("StV1DL6CwTryKyV", encode); }
public LogicalSchema resolve(final ExecutionStep<?> step, final LogicalSchema schema) { return Optional.ofNullable(HANDLERS.get(step.getClass())) .map(h -> h.handle(this, schema, step)) .orElseThrow(() -> new IllegalStateException("Unhandled step class: " + step.getClass())); }
@Test public void shouldResolveSchemaForTableSelectKey() { // Given: final UnqualifiedColumnReferenceExp keyExpression1 = new UnqualifiedColumnReferenceExp(ColumnName.of("ORANGE")); final UnqualifiedColumnReferenceExp keyExpression2 = new UnqualifiedColumnReferenceExp(ColumnName.of("APPLE")); final TableSelectKey<GenericKey> step = new TableSelectKey<>( PROPERTIES, tableSource, formats, ImmutableList.of(keyExpression1, keyExpression2) ); // When: final LogicalSchema result = resolver.resolve(step, SCHEMA); // Then: assertThat(result, is(LogicalSchema.builder() .keyColumn(keyExpression1.getColumnName(), SqlTypes.INTEGER) .keyColumn(keyExpression2.getColumnName(), SqlTypes.BIGINT) .valueColumns(SCHEMA.value()) .build() )); }
public static <T> PCollections<T> pCollections() { return new PCollections<>(); }
@Test @Category(ValidatesRunner.class) public void testFlattenPCollectionsEmpty() { PCollection<String> output = PCollectionList.<String>empty(p) .apply(Flatten.pCollections()) .setCoder(StringUtf8Coder.of()); PAssert.that(output).empty(); p.run(); }
@Udf(description = "Returns the inverse (arc) sine of an INT value") public Double asin( @UdfParameter( value = "value", description = "The value to get the inverse sine of." ) final Integer value ) { return asin(value == null ? null : value.doubleValue()); }
@Test public void shouldHandleZero() { assertThat(udf.asin(0.0), closeTo(0.0, 0.000000000000001)); assertThat(udf.asin(0), closeTo(0.0, 0.000000000000001)); assertThat(udf.asin(0L), closeTo(0.0, 0.000000000000001)); }
public static Table loadTable(Configuration conf) { return loadTable( conf, conf.get(InputFormatConfig.TABLE_IDENTIFIER), conf.get(InputFormatConfig.TABLE_LOCATION), conf.get(InputFormatConfig.CATALOG_NAME)); }
@Test public void testLoadTableFromLocation() throws IOException { conf.set(CatalogUtil.ICEBERG_CATALOG_TYPE, Catalogs.LOCATION); assertThatThrownBy(() -> Catalogs.loadTable(conf)) .isInstanceOf(IllegalArgumentException.class) .hasMessage("Table location not set"); HadoopTables tables = new HadoopTables(); Table hadoopTable = tables.create(SCHEMA, temp.resolve("hadoop_tables").toString()); conf.set(InputFormatConfig.TABLE_LOCATION, hadoopTable.location()); assertThat(Catalogs.loadTable(conf).location()).isEqualTo(hadoopTable.location()); }
@SuppressWarnings("unchecked") public String fileLink(String path) throws IOException, InvalidTokenException { String url; try { url = getUriBuilder() .setPath(API_PATH_PREFIX + "/mounts/primary/files/download") .setParameter("path", path) .build() .toString(); } catch (URISyntaxException e) { throw new IllegalStateException("Could not produce url.", e); } Request.Builder requestBuilder = getRequestBuilder(url); try (Response response = getResponse(requestBuilder)) { int code = response.code(); ResponseBody body = response.body(); if (code < 200 || code > 299) { throw new KoofrClientIOException(response); } Map<String, Object> responseData = objectMapper.readValue(body.bytes(), Map.class); return (String) responseData.get("link"); } }
@Test public void testFileLink() throws Exception { server.enqueue( new MockResponse() .setResponseCode(200) .setHeader("Content-Type", "application/json") .setBody( "{\"link\":\"https://app-1.koofr.net/content/files/get/Video+1.mp4?base=TESTBASE\"}")); String link = client.fileLink("/Data transfer/Videos/Video 1.mp4"); assertEquals( "https://app-1.koofr.net/content/files/get/Video+1.mp4?base=TESTBASE", link); }