focal_method
stringlengths 13
60.9k
| test_case
stringlengths 25
109k
|
---|---|
public static String getInterfaceName(Invoker invoker) {
return getInterfaceName(invoker, false);
}
|
@Test
public void testGetInterfaceName() {
URL url = URL.valueOf("dubbo://127.0.0.1:2181")
.addParameter(CommonConstants.VERSION_KEY, "1.0.0")
.addParameter(CommonConstants.GROUP_KEY, "grp1")
.addParameter(CommonConstants.INTERFACE_KEY, DemoService.class.getName());
Invoker invoker = mock(Invoker.class);
when(invoker.getUrl()).thenReturn(url);
when(invoker.getInterface()).thenReturn(DemoService.class);
SentinelConfig.setConfig(DubboAdapterGlobalConfig.DUBBO_INTERFACE_GROUP_VERSION_ENABLED, "false");
assertEquals("com.alibaba.csp.sentinel.adapter.dubbo3.provider.DemoService", DubboUtils.getInterfaceName(invoker));
}
|
public static String getAppId(final String originalFilename) {
checkThreePart(originalFilename);
return getThreePart(originalFilename)[0];
}
|
@Test
public void getAppId() {
final String application = ConfigFileUtils.getAppId("application+default+application.properties");
assertEquals("application", application);
final String abc = ConfigFileUtils.getAppId("abc+default+application.yml");
assertEquals("abc", abc);
}
|
public static FunctionConfig validateUpdate(FunctionConfig existingConfig, FunctionConfig newConfig) {
FunctionConfig mergedConfig = existingConfig.toBuilder().build();
if (!existingConfig.getTenant().equals(newConfig.getTenant())) {
throw new IllegalArgumentException("Tenants differ");
}
if (!existingConfig.getNamespace().equals(newConfig.getNamespace())) {
throw new IllegalArgumentException("Namespaces differ");
}
if (!existingConfig.getName().equals(newConfig.getName())) {
throw new IllegalArgumentException("Function Names differ");
}
if (!StringUtils.isEmpty(newConfig.getClassName())) {
mergedConfig.setClassName(newConfig.getClassName());
}
if (!StringUtils.isEmpty(newConfig.getJar())) {
mergedConfig.setJar(newConfig.getJar());
}
if (newConfig.getInputSpecs() == null) {
newConfig.setInputSpecs(new HashMap<>());
}
if (mergedConfig.getInputSpecs() == null) {
mergedConfig.setInputSpecs(new HashMap<>());
}
if (newConfig.getInputs() != null) {
newConfig.getInputs().forEach((topicName -> {
newConfig.getInputSpecs().put(topicName,
ConsumerConfig.builder().isRegexPattern(false).build());
}));
}
if (newConfig.getTopicsPattern() != null && !newConfig.getTopicsPattern().isEmpty()) {
newConfig.getInputSpecs().put(newConfig.getTopicsPattern(),
ConsumerConfig.builder()
.isRegexPattern(true)
.build());
}
if (newConfig.getCustomSerdeInputs() != null) {
newConfig.getCustomSerdeInputs().forEach((topicName, serdeClassName) -> {
newConfig.getInputSpecs().put(topicName,
ConsumerConfig.builder()
.serdeClassName(serdeClassName)
.isRegexPattern(false)
.build());
});
}
if (newConfig.getCustomSchemaInputs() != null) {
newConfig.getCustomSchemaInputs().forEach((topicName, schemaClassname) -> {
newConfig.getInputSpecs().put(topicName,
ConsumerConfig.builder()
.schemaType(schemaClassname)
.isRegexPattern(false)
.build());
});
}
if (!newConfig.getInputSpecs().isEmpty()) {
newConfig.getInputSpecs().forEach((topicName, consumerConfig) -> {
if (!existingConfig.getInputSpecs().containsKey(topicName)) {
throw new IllegalArgumentException("Input Topics cannot be altered");
}
if (consumerConfig.isRegexPattern() != existingConfig.getInputSpecs().get(topicName).isRegexPattern()) {
throw new IllegalArgumentException(
"isRegexPattern for input topic " + topicName + " cannot be altered");
}
mergedConfig.getInputSpecs().put(topicName, consumerConfig);
});
}
if (!StringUtils.isEmpty(newConfig.getOutputSerdeClassName()) && !newConfig.getOutputSerdeClassName()
.equals(existingConfig.getOutputSerdeClassName())) {
throw new IllegalArgumentException("Output Serde mismatch");
}
if (!StringUtils.isEmpty(newConfig.getOutputSchemaType()) && !newConfig.getOutputSchemaType()
.equals(existingConfig.getOutputSchemaType())) {
throw new IllegalArgumentException("Output Schema mismatch");
}
if (!StringUtils.isEmpty(newConfig.getLogTopic())) {
mergedConfig.setLogTopic(newConfig.getLogTopic());
}
if (newConfig.getProcessingGuarantees() != null && !newConfig.getProcessingGuarantees()
.equals(existingConfig.getProcessingGuarantees())) {
throw new IllegalArgumentException("Processing Guarantees cannot be altered");
}
if (newConfig.getRetainOrdering() != null && !newConfig.getRetainOrdering()
.equals(existingConfig.getRetainOrdering())) {
throw new IllegalArgumentException("Retain Ordering cannot be altered");
}
if (newConfig.getRetainKeyOrdering() != null && !newConfig.getRetainKeyOrdering()
.equals(existingConfig.getRetainKeyOrdering())) {
throw new IllegalArgumentException("Retain Key Ordering cannot be altered");
}
if (!StringUtils.isEmpty(newConfig.getOutput())) {
mergedConfig.setOutput(newConfig.getOutput());
}
if (newConfig.getUserConfig() != null) {
mergedConfig.setUserConfig(newConfig.getUserConfig());
}
if (newConfig.getSecrets() != null) {
mergedConfig.setSecrets(newConfig.getSecrets());
}
if (newConfig.getRuntime() != null && !newConfig.getRuntime().equals(existingConfig.getRuntime())) {
throw new IllegalArgumentException("Runtime cannot be altered");
}
if (newConfig.getAutoAck() != null && !newConfig.getAutoAck().equals(existingConfig.getAutoAck())) {
throw new IllegalArgumentException("AutoAck cannot be altered");
}
if (newConfig.getMaxMessageRetries() != null) {
mergedConfig.setMaxMessageRetries(newConfig.getMaxMessageRetries());
}
if (!StringUtils.isEmpty(newConfig.getDeadLetterTopic())) {
mergedConfig.setDeadLetterTopic(newConfig.getDeadLetterTopic());
}
if (!StringUtils.isEmpty(newConfig.getSubName()) && !newConfig.getSubName()
.equals(existingConfig.getSubName())) {
throw new IllegalArgumentException("Subscription Name cannot be altered");
}
if (newConfig.getParallelism() != null) {
mergedConfig.setParallelism(newConfig.getParallelism());
}
if (newConfig.getResources() != null) {
mergedConfig
.setResources(ResourceConfigUtils.merge(existingConfig.getResources(), newConfig.getResources()));
}
if (newConfig.getWindowConfig() != null) {
mergedConfig.setWindowConfig(newConfig.getWindowConfig());
}
if (newConfig.getTimeoutMs() != null) {
mergedConfig.setTimeoutMs(newConfig.getTimeoutMs());
}
if (newConfig.getCleanupSubscription() != null) {
mergedConfig.setCleanupSubscription(newConfig.getCleanupSubscription());
}
if (!StringUtils.isEmpty(newConfig.getRuntimeFlags())) {
mergedConfig.setRuntimeFlags(newConfig.getRuntimeFlags());
}
if (!StringUtils.isEmpty(newConfig.getCustomRuntimeOptions())) {
mergedConfig.setCustomRuntimeOptions(newConfig.getCustomRuntimeOptions());
}
if (newConfig.getProducerConfig() != null) {
mergedConfig.setProducerConfig(newConfig.getProducerConfig());
}
return mergedConfig;
}
|
@Test(expectedExceptions = IllegalArgumentException.class, expectedExceptionsMessageRegExp = "Retain Ordering cannot be altered")
public void testMergeDifferentRetainOrdering() {
FunctionConfig functionConfig = createFunctionConfig();
FunctionConfig newFunctionConfig = createUpdatedFunctionConfig("retainOrdering", true);
FunctionConfigUtils.validateUpdate(functionConfig, newFunctionConfig);
}
|
@Override
public List<String> getRegisterData(final String key) {
try {
KV kvClient = etcdClient.getKVClient();
GetOption option = GetOption.newBuilder().isPrefix(true).build();
GetResponse response = kvClient.get(bytesOf(key), option).get();
return response.getKvs().stream()
.filter(o -> !o.getKey().equals(ByteSequence.from(key, StandardCharsets.UTF_8)))
.map(kv -> kv.getValue().toString(StandardCharsets.UTF_8))
.collect(Collectors.toList());
} catch (Exception e) {
LOGGER.error("etcd client get registered data with key: {} error", key, e);
throw new ShenyuException(e);
}
}
|
@Test
void testGetRegisterData() throws InterruptedException, ExecutionException {
final String key = "key";
final KV kv = mock(KV.class);
when(etcdClient.getKVClient()).thenReturn(kv);
final GetResponse getResponse = mock(GetResponse.class);
when(getResponse.getKvs()).thenReturn(Collections.emptyList());
final CompletableFuture<GetResponse> completableFuture = mock(CompletableFuture.class);
when(completableFuture.get()).thenReturn(getResponse);
when(kv.get(any(ByteSequence.class), any(GetOption.class))).thenReturn(completableFuture);
assertDoesNotThrow(() -> etcdDiscoveryServiceUnderTest.getRegisterData(key));
doThrow(new InterruptedException("test")).when(completableFuture).get();
assertThrows(ShenyuException.class, () -> etcdDiscoveryServiceUnderTest.getRegisterData(key));
}
|
@Override
public void onApplicationEvent(WebServerInitializedEvent event) {
String serverNamespace = event.getApplicationContext().getServerNamespace();
if (SPRING_MANAGEMENT_CONTEXT_NAMESPACE.equals(serverNamespace)) {
// ignore
// fix#issue https://github.com/alibaba/nacos/issues/7230
return;
}
getSelf().setState(NodeState.UP);
if (!EnvUtil.getStandaloneMode()) {
GlobalExecutor.scheduleByCommon(this.infoReportTask, DEFAULT_TASK_DELAY_TIME);
GlobalExecutor.scheduleByCommon(this.unhealthyMemberInfoReportTask, DEFAULT_TASK_DELAY_TIME);
}
EnvUtil.setPort(event.getWebServer().getPort());
EnvUtil.setLocalAddress(this.localAddress);
Loggers.CLUSTER.info("This node is ready to provide external services");
}
|
@Test
void testEnvSetPort() {
ServletWebServerApplicationContext context = new ServletWebServerApplicationContext();
context.setServerNamespace("management");
Mockito.when(mockEvent.getApplicationContext()).thenReturn(context);
serverMemberManager.onApplicationEvent(mockEvent);
int port = EnvUtil.getPort();
assertEquals(8848, port);
}
|
public List<ContainerLogMeta> collect(
LogAggregationFileController fileController) throws IOException {
List<ContainerLogMeta> containersLogMeta = new ArrayList<>();
RemoteIterator<FileStatus> appDirs = fileController.
getApplicationDirectoriesOfUser(logsRequest.getUser());
while (appDirs.hasNext()) {
FileStatus currentAppDir = appDirs.next();
if (logsRequest.getAppId() == null ||
logsRequest.getAppId().equals(currentAppDir.getPath().getName())) {
ApplicationId appId = ApplicationId.fromString(
currentAppDir.getPath().getName());
RemoteIterator<FileStatus> nodeFiles = fileController
.getNodeFilesOfApplicationDirectory(currentAppDir);
while (nodeFiles.hasNext()) {
FileStatus currentNodeFile = nodeFiles.next();
if (!logsRequest.getNodeId().match(currentNodeFile.getPath()
.getName())) {
continue;
}
if (currentNodeFile.getPath().getName().equals(
logsRequest.getAppId() + ".har")) {
Path p = new Path("har:///"
+ currentNodeFile.getPath().toUri().getRawPath());
nodeFiles = HarFs.get(p.toUri(), conf).listStatusIterator(p);
continue;
}
try {
Map<String, List<ContainerLogFileInfo>> metaFiles = fileController
.getLogMetaFilesOfNode(logsRequest, currentNodeFile, appId);
if (metaFiles == null) {
continue;
}
metaFiles.entrySet().removeIf(entry ->
!(logsRequest.getContainerId() == null ||
logsRequest.getContainerId().equals(entry.getKey())));
containersLogMeta.addAll(createContainerLogMetas(
currentNodeFile.getPath().getName(), metaFiles));
} catch (IOException ioe) {
LOG.warn("Can not get log meta from the log file:"
+ currentNodeFile.getPath() + "\n" + ioe.getMessage());
}
}
}
}
return containersLogMeta;
}
|
@Test
void testSingleNodeRequest() throws IOException {
ExtendedLogMetaRequest.ExtendedLogMetaRequestBuilder request =
new ExtendedLogMetaRequest.ExtendedLogMetaRequestBuilder();
request.setAppId(null);
request.setContainerId(null);
request.setFileName(null);
request.setFileSize(null);
request.setModificationTime(null);
request.setNodeId(TEST_NODE);
request.setUser(null);
LogAggregationMetaCollector collector = new LogAggregationMetaCollector(
request.build(), new YarnConfiguration());
List<ContainerLogMeta> res = collector.collect(fileController);
List<ContainerLogFileInfo> allFile = res.stream()
.flatMap(m -> m.getContainerLogMeta().stream())
.collect(Collectors.toList());
assertEquals(4, allFile.stream().
filter(f -> f.getFileName().contains(TEST_NODE)).count());
}
|
@VisibleForTesting
void recover() {
try (DbSession dbSession = dbClient.openSession(false)) {
Profiler profiler = Profiler.create(LOGGER).start();
long beforeDate = system2.now() - minAgeInMs;
IndexingResult result = new IndexingResult();
Collection<EsQueueDto> items = dbClient.esQueueDao().selectForRecovery(dbSession, beforeDate, loopLimit);
while (!items.isEmpty()) {
IndexingResult loopResult = new IndexingResult();
groupItemsByDocType(items).asMap().forEach((type, typeItems) -> loopResult.add(doIndex(dbSession, type, typeItems)));
result.add(loopResult);
if (loopResult.getSuccessRatio() <= CIRCUIT_BREAKER_IN_PERCENT) {
LOGGER.error(LOG_PREFIX + "too many failures [{}/{} documents], waiting for next run", loopResult.getFailures(), loopResult.getTotal());
break;
}
if (loopResult.getTotal() == 0L) {
break;
}
items = dbClient.esQueueDao().selectForRecovery(dbSession, beforeDate, loopLimit);
}
if (result.getTotal() > 0L) {
profiler.stopInfo(LOG_PREFIX + format("%d documents processed [%d failures]", result.getTotal(), result.getFailures()));
}
} catch (Throwable t) {
LOGGER.error(LOG_PREFIX + "fail to recover documents", t);
}
}
|
@Test
public void recent_records_are_not_recovered() {
EsQueueDto item = insertItem(FOO_TYPE, "f1");
SuccessfulFakeIndexer indexer = new SuccessfulFakeIndexer(FOO_TYPE);
// do not advance in time
underTest = newRecoveryIndexer(indexer);
underTest.recover();
assertThatQueueHasSize(1);
assertThat(indexer.called).isEmpty();
assertThatLogsDoNotContain(TRACE, "Elasticsearch recovery - processing 2 [foos/foo]");
assertThatLogsDoNotContain(INFO, "documents processed");
}
|
public int getNumServersQueried() {
// Lazily load the field from the JsonNode to avoid reading the stats when not needed.
return _brokerResponse.has(NUM_SERVERS_QUERIED) ? _brokerResponse.get(NUM_SERVERS_QUERIED).asInt() : -1;
}
|
@Test
public void testGetNumServersQueried() {
// Run the test
final int result = _executionStatsUnderTest.getNumServersQueried();
// Verify the results
assertEquals(10, result);
}
|
public boolean hasRequiredCoordinators()
{
return currentCoordinatorCount >= coordinatorMinCountActive;
}
|
@Test
public void testHasRequiredCoordinators()
throws InterruptedException
{
assertFalse(monitor.hasRequiredCoordinators());
for (int i = numResourceManagers.get(); i < DESIRED_COORDINATOR_COUNT_ACTIVE; i++) {
addCoordinator(nodeManager);
}
assertTrue(monitor.hasRequiredCoordinators());
}
|
@Override
public ProcNodeInterface lookup(String jobIdStr) throws AnalysisException {
throw new AnalysisException("Not support");
}
|
@Test(expected = AnalysisException.class)
public void testLookup() throws AnalysisException {
optimizeProcDir.lookup("");
}
|
public static GenericData get() {
return INSTANCE;
}
|
@Test
void arrayRemove() {
Schema schema = Schema.createArray(Schema.create(Schema.Type.INT));
GenericArray<Integer> array = new GenericData.Array<>(10, schema);
array.clear();
for (int i = 0; i < 10; ++i)
array.add(i);
assertEquals(10, array.size());
assertEquals(Integer.valueOf(0), array.get(0));
assertEquals(Integer.valueOf(9), array.get(9));
array.remove(0);
assertEquals(9, array.size());
assertEquals(Integer.valueOf(1), array.get(0));
assertEquals(Integer.valueOf(2), array.get(1));
assertEquals(Integer.valueOf(9), array.get(8));
// Test boundary errors.
try {
array.get(9);
fail("Expected IndexOutOfBoundsException after removing an element");
} catch (IndexOutOfBoundsException e) {
}
try {
array.set(9, 99);
fail("Expected IndexOutOfBoundsException after removing an element");
} catch (IndexOutOfBoundsException e) {
}
try {
array.remove(9);
fail("Expected IndexOutOfBoundsException after removing an element");
} catch (IndexOutOfBoundsException e) {
}
// Test that we can still remove for properly sized arrays, and the rval
assertEquals(Integer.valueOf(9), array.remove(8));
assertEquals(8, array.size());
// Test insertion after remove
array.add(88);
assertEquals(Integer.valueOf(88), array.get(8));
}
|
public JwtBuilder jwtBuilder() {
return new JwtBuilder();
}
|
@Test
void testParseWith48Key() {
NacosJwtParser parser = new NacosJwtParser(encode("SecretKey012345678901234567890120124568aa9012345"));
String token = parser.jwtBuilder().setUserName("nacos").setExpiredTime(100L).compact();
assertTrue(token.startsWith(NacosSignatureAlgorithm.HS384.getHeader()));
}
|
public TreeCache start() throws Exception {
Preconditions.checkState(treeState.compareAndSet(TreeState.LATENT, TreeState.STARTED), "already started");
if (createParentNodes) {
client.createContainers(root.path);
}
client.getConnectionStateListenable().addListener(connectionStateListener);
if (client.getZookeeperClient().isConnected()) {
root.wasCreated();
}
return this;
}
|
@Test
public void testStartEmpty() throws Exception {
cache = newTreeCacheWithListeners(client, "/test");
cache.start();
assertEvent(TreeCacheEvent.Type.INITIALIZED);
client.create().forPath("/test");
assertEvent(TreeCacheEvent.Type.NODE_ADDED, "/test");
assertNoMoreEvents();
}
|
@Override
public Future<?> schedule(Executor executor, Runnable command, long delay, TimeUnit unit) {
requireNonNull(executor);
requireNonNull(command);
requireNonNull(unit);
if (scheduledExecutorService.isShutdown()) {
return DisabledFuture.INSTANCE;
}
return scheduledExecutorService.schedule(() -> {
try {
executor.execute(command);
} catch (Throwable t) {
logger.log(Level.WARNING, "Exception thrown when submitting scheduled task", t);
throw t;
}
}, delay, unit);
}
|
@Test(dataProvider = "runnableSchedulers")
public void scheduler_exception(Scheduler scheduler) {
var thread = new AtomicReference<Thread>();
Executor executor = task -> {
thread.set(Thread.currentThread());
throw new IllegalStateException();
};
var future = scheduler.schedule(executor, () -> {}, 1L, TimeUnit.NANOSECONDS);
assertThat(future).isNotNull();
await().untilAtomic(thread, is(not(nullValue())));
if (thread.get() == Thread.currentThread()) {
assertThat(logEvents()
.withMessage("Exception thrown when submitting scheduled task")
.withThrowable(IllegalStateException.class)
.withLevel(WARN)
.exclusively())
.hasSize(1);
}
}
|
T call() throws IOException, RegistryException {
String apiRouteBase = "https://" + registryEndpointRequestProperties.getServerUrl() + "/v2/";
URL initialRequestUrl = registryEndpointProvider.getApiRoute(apiRouteBase);
return call(initialRequestUrl);
}
|
@Test
public void testCall_logErrorOnBrokenPipe() throws IOException, RegistryException {
IOException ioException = new IOException("this is due to broken pipe");
setUpRegistryResponse(ioException);
try {
endpointCaller.call();
Assert.fail();
} catch (IOException ex) {
Assert.assertSame(ioException, ex);
Mockito.verify(mockEventHandlers)
.dispatch(
LogEvent.error("\u001B[31;1mI/O error for image [serverUrl/imageName]:\u001B[0m"));
Mockito.verify(mockEventHandlers)
.dispatch(LogEvent.error("\u001B[31;1m java.io.IOException\u001B[0m"));
Mockito.verify(mockEventHandlers)
.dispatch(LogEvent.error("\u001B[31;1m this is due to broken pipe\u001B[0m"));
Mockito.verify(mockEventHandlers)
.dispatch(
LogEvent.error(
"\u001B[31;1mbroken pipe: the server shut down the connection. Check the server "
+ "log if possible. This could also be a proxy issue. For example, a proxy "
+ "may prevent sending packets that are too large.\u001B[0m"));
Mockito.verifyNoMoreInteractions(mockEventHandlers);
}
}
|
@Override
public String getDataSource() {
return DataSourceConstant.MYSQL;
}
|
@Test
void testGetDataSource() {
String dataSource = configTagsRelationMapperByMySql.getDataSource();
assertEquals(DataSourceConstant.MYSQL, dataSource);
}
|
public static <FnT extends DoFn<?, ?>> DoFnSignature getSignature(Class<FnT> fn) {
return signatureCache.computeIfAbsent(fn, DoFnSignatures::parseSignature);
}
|
@Test
public void testOnTimerDeclaredInSuperclass() throws Exception {
class DoFnDeclaringTimerAndProcessElement extends DoFn<KV<String, Integer>, Long> {
public static final String TIMER_ID = "my-timer-id";
@TimerId(TIMER_ID)
private final TimerSpec bizzle = TimerSpecs.timer(TimeDomain.EVENT_TIME);
@ProcessElement
public void foo(ProcessContext context) {}
}
DoFnDeclaringTimerAndProcessElement fn =
new DoFnDeclaringTimerAndProcessElement() {
@OnTimer(DoFnDeclaringTimerAndProcessElement.TIMER_ID)
public void onTimerFoo() {}
};
thrown.expect(IllegalArgumentException.class);
thrown.expectMessage("Callback");
thrown.expectMessage("declared in a different class");
thrown.expectMessage(DoFnDeclaringTimerAndProcessElement.TIMER_ID);
thrown.expectMessage(fn.getClass().getSimpleName());
thrown.expectMessage(not(mentionsState()));
thrown.expectMessage(mentionsTimers());
DoFnSignatures.getSignature(fn.getClass());
}
|
public static void sort(short[] array, ShortComparator comparator) {
sort(array, 0, array.length, comparator);
}
|
@Test
void test_sorting_custom_comparator() {
short[] array = {4, 2, 5};
PrimitiveArraySorter.sort(array, (a, b) -> Short.compare(b, a)); // Sort using inverse ordering.
short[] expected = {5, 4, 2};
assertArrayEquals(expected, array);
}
|
@PUT
@Path("/{connector}/config")
@Operation(summary = "Create or reconfigure the specified connector")
public Response putConnectorConfig(final @PathParam("connector") String connector,
final @Context HttpHeaders headers,
final @Parameter(hidden = true) @QueryParam("forward") Boolean forward,
final Map<String, String> connectorConfig) throws Throwable {
FutureCallback<Herder.Created<ConnectorInfo>> cb = new FutureCallback<>();
checkAndPutConnectorConfigName(connector, connectorConfig);
herder.putConnectorConfig(connector, connectorConfig, true, cb);
Herder.Created<ConnectorInfo> createdInfo = requestHandler.completeOrForwardRequest(cb, "/connectors/" + connector + "/config",
"PUT", headers, connectorConfig, new TypeReference<ConnectorInfo>() { }, new CreatedConnectorInfoTranslator(), forward);
Response.ResponseBuilder response;
if (createdInfo.created()) {
URI location = UriBuilder.fromUri("/connectors").path(connector).build();
response = Response.created(location);
} else {
response = Response.ok();
}
return response.entity(createdInfo.result()).build();
}
|
@Test
public void testPutConnectorConfigWithSpecialCharsInName() throws Throwable {
final ArgumentCaptor<Callback<Herder.Created<ConnectorInfo>>> cb = ArgumentCaptor.forClass(Callback.class);
expectAndCallbackResult(cb, new Herder.Created<>(true, new ConnectorInfo(CONNECTOR_NAME_SPECIAL_CHARS, CONNECTOR_CONFIG_SPECIAL_CHARS, CONNECTOR_TASK_NAMES,
ConnectorType.SINK))
).when(herder).putConnectorConfig(eq(CONNECTOR_NAME_SPECIAL_CHARS), eq(CONNECTOR_CONFIG_SPECIAL_CHARS), eq(true), cb.capture());
String rspLocation = connectorsResource.putConnectorConfig(CONNECTOR_NAME_SPECIAL_CHARS, NULL_HEADERS, FORWARD, CONNECTOR_CONFIG_SPECIAL_CHARS).getLocation().toString();
String decoded = new URI(rspLocation).getPath();
assertEquals("/connectors/" + CONNECTOR_NAME_SPECIAL_CHARS, decoded);
}
|
public T getRecordingProxy()
{
return _templateProxy;
}
|
@Test
public void testMethodsInheritedFromObjectOnProxy()
{
PatchTreeRecorder<PatchTreeTestModel> pc = makeOne();
PatchTreeTestModel testModel = pc.getRecordingProxy();
Assert.assertEquals(testModel.hashCode(), testModel.hashCode());
Assert.assertNotNull(testModel.toString());
Assert.assertTrue(testModel.equals(testModel));
Assert.assertFalse(testModel.equals(new PatchTreeTestModel()));
}
|
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
JsonPrimitive other = (JsonPrimitive) obj;
if (value == null) {
return other.value == null;
}
if (isIntegral(this) && isIntegral(other)) {
return (this.value instanceof BigInteger || other.value instanceof BigInteger)
? this.getAsBigInteger().equals(other.getAsBigInteger())
: this.getAsNumber().longValue() == other.getAsNumber().longValue();
}
if (value instanceof Number && other.value instanceof Number) {
if (value instanceof BigDecimal && other.value instanceof BigDecimal) {
// Uses compareTo to ignore scale of values, e.g. `0` and `0.00` should be considered equal
return this.getAsBigDecimal().compareTo(other.getAsBigDecimal()) == 0;
}
double thisAsDouble = this.getAsDouble();
double otherAsDouble = other.getAsDouble();
// Don't use Double.compare(double, double) because that considers -0.0 and +0.0 not equal
return (thisAsDouble == otherAsDouble)
|| (Double.isNaN(thisAsDouble) && Double.isNaN(otherAsDouble));
}
return value.equals(other.value);
}
|
@Test
public void testBigDecimalEqualsTransitive() {
JsonPrimitive x = new JsonPrimitive(new BigDecimal("0"));
JsonPrimitive y = new JsonPrimitive(0.0d);
JsonPrimitive z = new JsonPrimitive(new BigDecimal("0.00"));
assertThat(x.equals(y)).isTrue();
assertThat(y.equals(z)).isTrue();
// ... implies
assertThat(x.equals(z)).isTrue();
}
|
public GenericRecord convert(String json, Schema schema) {
try {
Map<String, Object> jsonObjectMap = mapper.readValue(json, Map.class);
return convertJsonToAvro(jsonObjectMap, schema, shouldSanitize, invalidCharMask);
} catch (IOException e) {
throw new HoodieIOException(e.getMessage(), e);
}
}
|
@Test
public void basicConversion() throws IOException {
Schema simpleSchema = SchemaTestUtil.getSimpleSchema();
String name = "John Smith";
int number = 1337;
String color = "Blue. No yellow!";
Map<String, Object> data = new HashMap<>();
data.put("name", name);
data.put("favorite_number", number);
data.put("favorite_color", color);
String json = MAPPER.writeValueAsString(data);
GenericRecord rec = new GenericData.Record(simpleSchema);
rec.put("name", name);
rec.put("favorite_number", number);
rec.put("favorite_color", color);
Assertions.assertEquals(rec, CONVERTER.convert(json, simpleSchema));
}
|
public Coin getBlockInflation(int height) {
return Coin.FIFTY_COINS.shiftRight(height / getSubsidyDecreaseBlockCount());
}
|
@Test
public void getBlockInflation() {
assertEquals(Coin.FIFTY_COINS, BITCOIN_PARAMS.getBlockInflation(209998));
assertEquals(Coin.FIFTY_COINS, BITCOIN_PARAMS.getBlockInflation(209999));
assertEquals(Coin.FIFTY_COINS.div(2), BITCOIN_PARAMS.getBlockInflation(210000));
assertEquals(Coin.FIFTY_COINS.div(2), BITCOIN_PARAMS.getBlockInflation(210001));
}
|
@Override
public Optional<DatabaseAdminExecutor> create(final SQLStatementContext sqlStatementContext) {
SQLStatement sqlStatement = sqlStatementContext.getSqlStatement();
if (sqlStatement instanceof ShowFunctionStatusStatement) {
return Optional.of(new ShowFunctionStatusExecutor((ShowFunctionStatusStatement) sqlStatement));
}
if (sqlStatement instanceof ShowProcedureStatusStatement) {
return Optional.of(new ShowProcedureStatusExecutor((ShowProcedureStatusStatement) sqlStatement));
}
if (sqlStatement instanceof ShowTablesStatement) {
return Optional.of(new ShowTablesExecutor((ShowTablesStatement) sqlStatement, sqlStatementContext.getDatabaseType()));
}
return Optional.empty();
}
|
@Test
void assertCreateWithSelectStatementFromInformationSchemaOfSchemaTable() {
initProxyContext(Collections.emptyMap());
SimpleTableSegment tableSegment = new SimpleTableSegment(new TableNameSegment(10, 13, new IdentifierValue("SCHEMATA")));
tableSegment.setOwner(new OwnerSegment(7, 8, new IdentifierValue("information_schema")));
MySQLSelectStatement selectStatement = mock(MySQLSelectStatement.class);
when(selectStatement.getFrom()).thenReturn(Optional.of(tableSegment));
when(sqlStatementContext.getSqlStatement()).thenReturn(selectStatement);
Optional<DatabaseAdminExecutor> actual = new MySQLAdminExecutorCreator().create(sqlStatementContext, "select SCHEMA_NAME from SCHEMATA", "information_schema", Collections.emptyList());
assertTrue(actual.isPresent());
assertThat(actual.get(), instanceOf(SelectInformationSchemataExecutor.class));
when(ProxyContext.getInstance().getContextManager().getDatabase("information_schema").isComplete()).thenReturn(true);
actual = new MySQLAdminExecutorCreator().create(sqlStatementContext, "select SCHEMA_NAME from SCHEMATA", "information_schema", Collections.emptyList());
assertFalse(actual.isPresent());
}
|
@Override
public Collection<String> getXADriverClassNames() {
return Collections.singletonList("com.microsoft.sqlserver.jdbc.SQLServerXADataSource");
}
|
@Test
void assertGetXADriverClassName() {
assertThat(new SQLServerXADataSourceDefinition().getXADriverClassNames(), is(Collections.singletonList("com.microsoft.sqlserver.jdbc.SQLServerXADataSource")));
}
|
public static String sanitizeUri(String uri) {
// use xxxxx as replacement as that works well with JMX also
String sanitized = uri;
if (uri != null) {
sanitized = ALL_SECRETS.matcher(sanitized).replaceAll("$1=xxxxxx");
sanitized = USERINFO_PASSWORD.matcher(sanitized).replaceFirst("$1xxxxxx$3");
}
return sanitized;
}
|
@Test
public void testSanitizeAccessToken() {
String out1 = URISupport
.sanitizeUri("google-sheets-stream://spreadsheets?accessToken=MY_TOKEN&clientId=foo&clientSecret=MY_SECRET");
assertEquals("google-sheets-stream://spreadsheets?accessToken=xxxxxx&clientId=xxxxxx&clientSecret=xxxxxx", out1);
}
|
public static MemberLookup createLookUp(ServerMemberManager memberManager) throws NacosException {
if (!EnvUtil.getStandaloneMode()) {
String lookupType = EnvUtil.getProperty(LOOKUP_MODE_TYPE);
LookupType type = chooseLookup(lookupType);
LOOK_UP = find(type);
currentLookupType = type;
} else {
LOOK_UP = new StandaloneMemberLookup();
}
LOOK_UP.injectMemberManager(memberManager);
Loggers.CLUSTER.info("Current addressing mode selection : {}", LOOK_UP.getClass().getSimpleName());
return LOOK_UP;
}
|
@Test
void createLookUpAddressServerMemberLookup() throws Exception {
EnvUtil.setIsStandalone(false);
mockEnvironment.setProperty(LOOKUP_MODE_TYPE, "address-server");
memberLookup = LookupFactory.createLookUp(memberManager);
assertEquals(AddressServerMemberLookup.class, memberLookup.getClass());
}
|
@Override
public boolean isQualified(final SQLStatementContext sqlStatementContext, final ReadwriteSplittingDataSourceGroupRule rule, final HintValueContext hintValueContext) {
return connectionContext.getTransactionContext().isInTransaction();
}
|
@Test
void assertWriteRouteTransaction() {
ConnectionContext connectionContext = mock(ConnectionContext.class);
TransactionConnectionContext transactionConnectionContext = mock(TransactionConnectionContext.class);
when(connectionContext.getTransactionContext()).thenReturn(transactionConnectionContext);
when(connectionContext.getTransactionContext().isInTransaction()).thenReturn(Boolean.TRUE);
assertTrue(new QualifiedReadwriteSplittingTransactionalDataSourceRouter(connectionContext).isQualified(null, null, hintValueContext));
when(connectionContext.getTransactionContext().isInTransaction()).thenReturn(Boolean.FALSE);
assertFalse(new QualifiedReadwriteSplittingTransactionalDataSourceRouter(connectionContext).isQualified(null, null, hintValueContext));
}
|
public boolean delete(String eventDefinitionId) {
return doDelete(eventDefinitionId,
() -> eventDefinitionService.deleteUnregister(eventDefinitionId) > 0);
}
|
@Test
@MongoDBFixtures("event-processors.json")
public void delete() {
assertThat(eventDefinitionService.get("54e3deadbeefdeadbeef0000")).isPresent();
assertThat(jobDefinitionService.get("54e3deadbeefdeadbeef0001")).isPresent();
assertThat(jobTriggerService.get("54e3deadbeefdeadbeef0002")).isPresent();
assertThat(handler.delete("54e3deadbeefdeadbeef0000")).isTrue();
assertThat(eventDefinitionService.get("54e3deadbeefdeadbeef0000")).isNotPresent();
assertThat(jobDefinitionService.get("54e3deadbeefdeadbeef0001")).isNotPresent();
assertThat(jobTriggerService.get("54e3deadbeefdeadbeef0002")).isNotPresent();
}
|
public InstanceType instanceType() {
return instanceType;
}
|
@Test
public void checkConstruction() {
assertThat(event1.type(), is(ClusterEvent.Type.INSTANCE_ADDED));
assertThat(event1.subject(), is(cNode1));
assertThat(event1.instanceType(), is(ClusterEvent.InstanceType.UNKNOWN));
assertThat(event7.time(), is(time));
assertThat(event7.type(), is(ClusterEvent.Type.INSTANCE_READY));
assertThat(event7.subject(), is(cNode2));
assertThat(event7.instanceType(), is(ClusterEvent.InstanceType.UNKNOWN));
assertThat(event8.type(), is(ClusterEvent.Type.INSTANCE_ADDED));
assertThat(event8.subject(), is(cNode2));
assertThat(event8.instanceType(), is(ClusterEvent.InstanceType.ONOS));
assertThat(event9.type(), is(ClusterEvent.Type.INSTANCE_ADDED));
assertThat(event9.subject(), is(cNode2));
assertThat(event9.instanceType(), is(ClusterEvent.InstanceType.STORAGE));
assertThat(event10.type(), is(ClusterEvent.Type.INSTANCE_REMOVED));
assertThat(event10.subject(), is(cNode2));
assertThat(event10.instanceType(), is(ClusterEvent.InstanceType.ONOS));
assertThat(event11.type(), is(ClusterEvent.Type.INSTANCE_REMOVED));
assertThat(event11.subject(), is(cNode2));
assertThat(event11.instanceType(), is(ClusterEvent.InstanceType.STORAGE));
assertThat(event12.type(), is(ClusterEvent.Type.INSTANCE_ACTIVATED));
assertThat(event12.subject(), is(cNode1));
assertThat(event12.instanceType(), is(ClusterEvent.InstanceType.ONOS));
assertThat(event13.type(), is(ClusterEvent.Type.INSTANCE_ACTIVATED));
assertThat(event13.subject(), is(cNode1));
assertThat(event13.instanceType(), is(ClusterEvent.InstanceType.STORAGE));
assertThat(event14.type(), is(ClusterEvent.Type.INSTANCE_READY));
assertThat(event14.subject(), is(cNode1));
assertThat(event14.instanceType(), is(ClusterEvent.InstanceType.ONOS));
assertThat(event15.type(), is(ClusterEvent.Type.INSTANCE_READY));
assertThat(event15.subject(), is(cNode1));
assertThat(event15.instanceType(), is(ClusterEvent.InstanceType.STORAGE));
assertThat(event16.type(), is(ClusterEvent.Type.INSTANCE_DEACTIVATED));
assertThat(event16.subject(), is(cNode1));
assertThat(event16.instanceType(), is(ClusterEvent.InstanceType.ONOS));
assertThat(event17.type(), is(ClusterEvent.Type.INSTANCE_DEACTIVATED));
assertThat(event17.subject(), is(cNode1));
assertThat(event17.instanceType(), is(ClusterEvent.InstanceType.STORAGE));
}
|
public boolean setImpacts(DefaultIssue issue, Map<SoftwareQuality, Severity> previousImpacts, IssueChangeContext context) {
Map<SoftwareQuality, Severity> currentImpacts = new EnumMap<>(issue.impacts());
if (!previousImpacts.equals(currentImpacts)) {
issue.replaceImpacts(currentImpacts);
issue.setUpdateDate(context.date());
issue.setChanged(true);
return true;
}
return false;
}
|
@Test
void setImpacts_whenImpactAdded_shouldBeUpdated() {
Map<SoftwareQuality, Severity> currentImpacts = Map.of(SoftwareQuality.RELIABILITY, Severity.LOW);
Map<SoftwareQuality, Severity> newImpacts = Map.of(SoftwareQuality.MAINTAINABILITY, Severity.HIGH);
issue.replaceImpacts(newImpacts);
boolean updated = underTest.setImpacts(issue, currentImpacts, context);
assertThat(updated).isTrue();
assertThat(issue.impacts()).isEqualTo(newImpacts);
}
|
void allocateCollectionField( Object object, BeanInjectionInfo beanInjectionInfo, String fieldName ) {
BeanInjectionInfo.Property property = getProperty( beanInjectionInfo, fieldName );
String groupName = ( property != null ) ? property.getGroupName() : null;
if ( groupName == null ) {
return;
}
List<BeanInjectionInfo.Property> groupProperties;
groupProperties = getGroupProperties( beanInjectionInfo, groupName );
Integer maxGroupSize = getMaxSize( groupProperties, object );
// not able to get numeric size
if ( maxGroupSize == null ) {
return;
}
// guaranteed to get at least one field for constant
allocateCollectionField( property, object, Math.max( 1, maxGroupSize ) );
}
|
@Test
public void allocateCollectionField_List() {
BeanInjector bi = new BeanInjector(null );
BeanInjectionInfo bii = new BeanInjectionInfo( MetaBeanLevel1.class );
MetaBeanLevel1 mbl1 = new MetaBeanLevel1();
mbl1.setSub( new MetaBeanLevel2() );
// should set other field based on this size
mbl1.getSub().setFilenames( new String[] {"one", "two", "three", "four"} );
assertNull( mbl1.getSub().getAscending() );
bi.allocateCollectionField( mbl1.getSub(), bii, "ASCENDING_LIST" );
assertEquals(4, mbl1.getSub().getAscending().size() );
}
|
public static Document parseXml(final InputStream is) throws Exception {
return parseXml(is, null);
}
|
@Test
public void testParseCamelContextForceNamespace() throws Exception {
InputStream fis = Files.newInputStream(Paths.get("src/test/resources/org/apache/camel/util/camel-context.xml"));
Document dom = XmlLineNumberParser.parseXml(fis, null, "camelContext", "http://camel.apache.org/schema/spring");
assertNotNull(dom);
NodeList list = dom.getElementsByTagName("camelContext");
assertEquals(1, list.getLength());
Node node = list.item(0);
String lineNumber = (String) node.getUserData(XmlLineNumberParser.LINE_NUMBER);
String lineNumberEnd = (String) node.getUserData(XmlLineNumberParser.LINE_NUMBER_END);
String ns = node.getNamespaceURI();
assertEquals("http://camel.apache.org/schema/spring", ns);
assertEquals("29", lineNumber);
assertEquals("47", lineNumberEnd);
// and there are two routes
list = dom.getElementsByTagName("route");
assertEquals(2, list.getLength());
Node node1 = list.item(0);
Node node2 = list.item(1);
String lineNumber1 = (String) node1.getUserData(XmlLineNumberParser.LINE_NUMBER);
String lineNumberEnd1 = (String) node1.getUserData(XmlLineNumberParser.LINE_NUMBER_END);
assertEquals("31", lineNumber1);
assertEquals("37", lineNumberEnd1);
String lineNumber2 = (String) node2.getUserData(XmlLineNumberParser.LINE_NUMBER);
String lineNumberEnd2 = (String) node2.getUserData(XmlLineNumberParser.LINE_NUMBER_END);
assertEquals("39", lineNumber2);
assertEquals("45", lineNumberEnd2);
}
|
@Override
public List<Input> allOfThisNode(final String nodeId) {
final List<BasicDBObject> query = ImmutableList.of(
new BasicDBObject(MessageInput.FIELD_NODE_ID, nodeId),
new BasicDBObject(MessageInput.FIELD_GLOBAL, true));
final List<DBObject> ownInputs = query(InputImpl.class, new BasicDBObject("$or", query));
final ImmutableList.Builder<Input> inputs = ImmutableList.builder();
for (final DBObject o : ownInputs) {
inputs.add(createFromDbObject(o));
}
return inputs.build();
}
|
@Test
@MongoDBFixtures("InputServiceImplTest.json")
public void allOfThisNodeReturnsGlobalInputsIfNodeIDDoesNotExist() {
final List<Input> inputs = inputService.allOfThisNode("cd03ee44-b2a7-0000-0000-000000000000");
assertThat(inputs).hasSize(1);
}
|
@Override
public List<MenuDO> getMenuList() {
return menuMapper.selectList();
}
|
@Test
public void testGetMenuList() {
// mock 数据
MenuDO menuDO = randomPojo(MenuDO.class, o -> o.setName("芋艿").setStatus(CommonStatusEnum.ENABLE.getStatus()));
menuMapper.insert(menuDO);
// 测试 status 不匹配
menuMapper.insert(cloneIgnoreId(menuDO, o -> o.setStatus(CommonStatusEnum.DISABLE.getStatus())));
// 测试 name 不匹配
menuMapper.insert(cloneIgnoreId(menuDO, o -> o.setName("艿")));
// 准备参数
MenuListReqVO reqVO = new MenuListReqVO().setName("芋").setStatus(CommonStatusEnum.ENABLE.getStatus());
// 调用
List<MenuDO> result = menuService.getMenuList(reqVO);
// 断言
assertEquals(1, result.size());
assertPojoEquals(menuDO, result.get(0));
}
|
@Override
public PageResult<DiyPageDO> getDiyPagePage(DiyPagePageReqVO pageReqVO) {
return diyPageMapper.selectPage(pageReqVO);
}
|
@Test
@Disabled // TODO 请修改 null 为需要的值,然后删除 @Disabled 注解
public void testGetDiyPagePage() {
// mock 数据
DiyPageDO dbDiyPage = randomPojo(DiyPageDO.class, o -> { // 等会查询到
o.setName(null);
o.setCreateTime(null);
});
diyPageMapper.insert(dbDiyPage);
// 测试 name 不匹配
diyPageMapper.insert(cloneIgnoreId(dbDiyPage, o -> o.setName(null)));
// 测试 createTime 不匹配
diyPageMapper.insert(cloneIgnoreId(dbDiyPage, o -> o.setCreateTime(null)));
// 准备参数
DiyPagePageReqVO reqVO = new DiyPagePageReqVO();
reqVO.setName(null);
reqVO.setCreateTime(buildBetweenTime(2023, 2, 1, 2023, 2, 28));
// 调用
PageResult<DiyPageDO> pageResult = diyPageService.getDiyPagePage(reqVO);
// 断言
assertEquals(1, pageResult.getTotal());
assertEquals(1, pageResult.getList().size());
assertPojoEquals(dbDiyPage, pageResult.getList().get(0));
}
|
public Node parse() throws ScanException {
return E();
}
|
@Test
public void testKeyword() throws Exception {
{
Parser<Object> p = new Parser<>("hello%xyz");
Node t = p.parse();
Node witness = new Node(Node.LITERAL, "hello");
witness.next = new SimpleKeywordNode("xyz");
Assertions.assertEquals(witness, t);
}
{
Parser<Object> p = new Parser<>("hello%xyz{x}");
Node t = p.parse();
Node witness = new Node(Node.LITERAL, "hello");
SimpleKeywordNode n = new SimpleKeywordNode("xyz");
List<String> optionList = new ArrayList<String>();
optionList.add("x");
n.setOptions(optionList);
witness.next = n;
Assertions.assertEquals(witness, t);
}
}
|
@Override
public long freeze() {
finalizeSnapshotWithFooter();
appendBatches(accumulator.drain());
snapshot.freeze();
accumulator.close();
return snapshot.sizeInBytes();
}
|
@Test
void testBuilderKRaftVersion1WithoutVoterSet() {
OffsetAndEpoch snapshotId = new OffsetAndEpoch(100, 10);
int maxBatchSize = 1024;
AtomicReference<ByteBuffer> buffer = new AtomicReference<>(null);
RecordsSnapshotWriter.Builder builder = new RecordsSnapshotWriter.Builder()
.setKraftVersion(KRaftVersion.KRAFT_VERSION_1)
.setVoterSet(Optional.empty())
.setTime(new MockTime())
.setMaxBatchSize(maxBatchSize)
.setRawSnapshotWriter(
new MockRawSnapshotWriter(snapshotId, buffer::set)
);
try (RecordsSnapshotWriter<String> snapshot = builder.build(STRING_SERDE)) {
snapshot.freeze();
}
try (RecordsSnapshotReader<String> reader = RecordsSnapshotReader.of(
new MockRawSnapshotReader(snapshotId, buffer.get()),
STRING_SERDE,
BufferSupplier.NO_CACHING,
maxBatchSize,
true
)
) {
// Consume the control record batch
Batch<String> batch = reader.next();
assertEquals(2, batch.controlRecords().size());
// Check snapshot header control record
assertEquals(ControlRecordType.SNAPSHOT_HEADER, batch.controlRecords().get(0).type());
assertEquals(new SnapshotHeaderRecord(), batch.controlRecords().get(0).message());
// Check kraft version control record
assertEquals(ControlRecordType.KRAFT_VERSION, batch.controlRecords().get(1).type());
assertEquals(new KRaftVersionRecord().setKRaftVersion((short) 1), batch.controlRecords().get(1).message());
// Consume the reader until we find a control record
do {
batch = reader.next();
}
while (batch.controlRecords().isEmpty());
// Check snapshot footer control record
assertEquals(1, batch.controlRecords().size());
assertEquals(ControlRecordType.SNAPSHOT_FOOTER, batch.controlRecords().get(0).type());
assertEquals(new SnapshotFooterRecord(), batch.controlRecords().get(0).message());
// Snapshot footer must be last record
assertFalse(reader.hasNext());
}
}
|
public static Ip4Prefix valueOf(int address, int prefixLength) {
return new Ip4Prefix(Ip4Address.valueOf(address), prefixLength);
}
|
@Test(expected = NullPointerException.class)
public void testInvalidValueOfNullArrayIPv4() {
Ip4Prefix ipPrefix;
byte[] value;
value = null;
ipPrefix = Ip4Prefix.valueOf(value, 24);
}
|
public static int bytesToIntLE(byte[] bytes, int off) {
return (bytes[off + 3] << 24) + ((bytes[off + 2] & 255) << 16)
+ ((bytes[off + 1] & 255) << 8) + (bytes[off] & 255);
}
|
@Test
public void testBytesToIntLE() {
assertEquals(-12345,
ByteUtils.bytesToIntLE(INT_12345_LE, 0));
}
|
public static String format(Object x) {
if (x != null) {
return format(x.toString());
} else {
return StrUtil.EMPTY;
}
}
|
@Test
public void testFormatDecimal() {
// 测试传入小数的情况
String result = NumberWordFormatter.format(1234.56);
assertEquals("ONE THOUSAND TWO HUNDRED AND THIRTY FOUR AND CENTS FIFTY SIX ONLY", result);
}
|
@Override
public ScheduledFuture<?> scheduleWithFixedDelay(Runnable command, long initialDelay, long delay, TimeUnit unit) {
ObjectUtil.checkNotNull(command, "command");
ObjectUtil.checkNotNull(unit, "unit");
if (initialDelay < 0) {
throw new IllegalArgumentException(
String.format("initialDelay: %d (expected: >= 0)", initialDelay));
}
if (delay <= 0) {
throw new IllegalArgumentException(
String.format("delay: %d (expected: > 0)", delay));
}
validateScheduled0(initialDelay, unit);
validateScheduled0(delay, unit);
return schedule(new ScheduledFutureTask<Void>(
this, command, deadlineNanos(getCurrentTimeNanos(), unit.toNanos(initialDelay)), -unit.toNanos(delay)));
}
|
@Test
public void testScheduleWithFixedDelayNegative() {
final TestScheduledEventExecutor executor = new TestScheduledEventExecutor();
assertThrows(IllegalArgumentException.class, new Executable() {
@Override
public void execute() {
executor.scheduleWithFixedDelay(TEST_RUNNABLE, 0, -1, TimeUnit.DAYS);
}
});
}
|
@SuppressWarnings("unchecked")
public <T> T convert(DocString docString, Type targetType) {
if (DocString.class.equals(targetType)) {
return (T) docString;
}
List<DocStringType> docStringTypes = docStringTypeRegistry.lookup(docString.getContentType(), targetType);
if (docStringTypes.isEmpty()) {
if (docString.getContentType() == null) {
throw new CucumberDocStringException(format(
"It appears you did not register docstring type for %s",
targetType.getTypeName()));
}
throw new CucumberDocStringException(format(
"It appears you did not register docstring type for '%s' or %s",
docString.getContentType(),
targetType.getTypeName()));
}
if (docStringTypes.size() > 1) {
List<String> suggestedContentTypes = suggestedContentTypes(docStringTypes);
if (docString.getContentType() == null) {
throw new CucumberDocStringException(format(
"Multiple converters found for type %s, add one of the following content types to your docstring %s",
targetType.getTypeName(),
suggestedContentTypes));
}
throw new CucumberDocStringException(format(
"Multiple converters found for type %s, and the content type '%s' did not match any of the registered types %s. Change the content type of the docstring or register a docstring type for '%s'",
targetType.getTypeName(),
docString.getContentType(),
suggestedContentTypes,
docString.getContentType()));
}
return (T) docStringTypes.get(0).transform(docString.getContent());
}
|
@Test
void throws_when_conversion_fails() {
registry.defineDocStringType(jsonNodeForJsonThrowsException);
DocString docString = DocString.create("{\"hello\":\"world\"}", "json");
CucumberDocStringException exception = assertThrows(
CucumberDocStringException.class,
() -> converter.convert(docString, JsonNode.class));
assertThat(exception.getMessage(), is(equalToCompressingWhiteSpace("" +
"'json' could not transform\n" +
" \"\"\"json\n" +
" {\"hello\":\"world\"}\n" +
" \"\"\"")));
}
|
public Set<? extends AuthenticationRequest> getRequest(final Host bookmark, final LoginCallback prompt)
throws LoginCanceledException {
final StringBuilder url = new StringBuilder();
url.append(bookmark.getProtocol().getScheme().toString()).append("://");
url.append(bookmark.getHostname());
if(!(bookmark.getProtocol().getScheme().getPort() == bookmark.getPort())) {
url.append(":").append(bookmark.getPort());
}
final String context = PathNormalizer.normalize(bookmark.getProtocol().getContext());
// Custom authentication context
url.append(context);
if(bookmark.getProtocol().getDefaultHostname().endsWith("identity.api.rackspacecloud.com")
|| bookmark.getHostname().endsWith("identity.api.rackspacecloud.com")) {
return Collections.singleton(new Authentication20RAXUsernameKeyRequest(
URI.create(url.toString()),
bookmark.getCredentials().getUsername(), bookmark.getCredentials().getPassword(), null)
);
}
final LoginOptions options = new LoginOptions(bookmark.getProtocol()).password(false).anonymous(false).publickey(false);
if(context.contains("1.0")) {
return Collections.singleton(new Authentication10UsernameKeyRequest(URI.create(url.toString()),
bookmark.getCredentials().getUsername(), bookmark.getCredentials().getPassword()));
}
else if(context.contains("1.1")) {
return Collections.singleton(new Authentication11UsernameKeyRequest(URI.create(url.toString()),
bookmark.getCredentials().getUsername(), bookmark.getCredentials().getPassword()));
}
else if(context.contains("2.0")) {
// Prompt for tenant
final String user;
final String tenant;
if(StringUtils.contains(bookmark.getCredentials().getUsername(), ':')) {
final String[] parts = StringUtils.splitPreserveAllTokens(bookmark.getCredentials().getUsername(), ':');
tenant = parts[0];
user = parts[1];
}
else {
user = bookmark.getCredentials().getUsername();
tenant = prompt.prompt(bookmark, bookmark.getCredentials().getUsername(),
LocaleFactory.localizedString("Provide additional login credentials", "Credentials"),
LocaleFactory.localizedString("Tenant Name", "Mosso"), options
.usernamePlaceholder(LocaleFactory.localizedString("Tenant Name", "Mosso"))).getUsername();
// Save tenant in username
bookmark.getCredentials().setUsername(String.format("%s:%s", tenant, bookmark.getCredentials().getUsername()));
}
final Set<AuthenticationRequest> requests = new LinkedHashSet<>();
requests.add(new Authentication20UsernamePasswordRequest(
URI.create(url.toString()),
user, bookmark.getCredentials().getPassword(), tenant)
);
requests.add(new Authentication20UsernamePasswordTenantIdRequest(
URI.create(url.toString()),
user, bookmark.getCredentials().getPassword(), tenant)
);
requests.add(new Authentication20AccessKeySecretKeyRequest(
URI.create(url.toString()),
user, bookmark.getCredentials().getPassword(), tenant));
return requests;
}
else if(context.contains("3")) {
// Prompt for project
final String user;
final String project;
final String domain;
if(StringUtils.contains(bookmark.getCredentials().getUsername(), ':')) {
final String[] parts = StringUtils.splitPreserveAllTokens(bookmark.getCredentials().getUsername(), ':');
if(parts.length == 3) {
project = parts[0];
domain = parts[1];
user = parts[2];
}
else {
project = parts[0];
user = parts[1];
domain = prompt.prompt(bookmark, bookmark.getCredentials().getUsername(),
LocaleFactory.localizedString("Provide additional login credentials", "Credentials"),
LocaleFactory.localizedString("Project Domain Name", "Mosso"), options
.usernamePlaceholder(LocaleFactory.localizedString("Project Domain Name", "Mosso"))).getUsername();
// Save project name and domain in username
bookmark.getCredentials().setUsername(String.format("%s:%s:%s", project, domain, bookmark.getCredentials().getUsername()));
}
}
else {
user = bookmark.getCredentials().getUsername();
final Credentials projectName = prompt.prompt(bookmark, bookmark.getCredentials().getUsername(),
LocaleFactory.localizedString("Provide additional login credentials", "Credentials"),
LocaleFactory.localizedString("Project Name", "Mosso"), options
.usernamePlaceholder(LocaleFactory.localizedString("Project Name", "Mosso")));
if(StringUtils.contains(bookmark.getCredentials().getUsername(), ':')) {
final String[] parts = StringUtils.splitPreserveAllTokens(projectName.getUsername(), ':');
project = parts[0];
domain = parts[1];
}
else {
project = projectName.getUsername();
domain = prompt.prompt(bookmark, bookmark.getCredentials().getUsername(),
LocaleFactory.localizedString("Provide additional login credentials", "Credentials"),
LocaleFactory.localizedString("Project Domain Name", "Mosso"), options
.usernamePlaceholder(LocaleFactory.localizedString("Project Domain Name", "Mosso"))).getUsername();
}
// Save project name and domain in username
bookmark.getCredentials().setUsername(String.format("%s:%s:%s", project, domain, bookmark.getCredentials().getUsername()));
}
final Set<AuthenticationRequest> requests = new LinkedHashSet<>();
requests.add(new Authentication3UsernamePasswordProjectRequest(
URI.create(url.toString()),
user, bookmark.getCredentials().getPassword(), project, domain)
);
return requests;
}
else {
log.warn(String.format("Unknown context version in %s. Default to v1 authentication.", context));
// Default to 1.0
return Collections.singleton(new Authentication10UsernameKeyRequest(URI.create(url.toString()),
bookmark.getCredentials().getUsername(), bookmark.getCredentials().getPassword()));
}
}
|
@Test(expected = LoginCanceledException.class)
public void testGetDefault2NoTenant() throws Exception {
final SwiftAuthenticationService s = new SwiftAuthenticationService();
final Credentials credentials = new Credentials("u", "P");
final SwiftProtocol protocol = new SwiftProtocol() {
@Override
public String getContext() {
return "/v2.0/tokens";
}
};
assertEquals(Client.AuthVersion.v20,
s.getRequest(new Host(protocol, "region-b.geo-1.identity.hpcloudsvc.com", credentials),
new DisabledLoginCallback()).iterator().next().getVersion());
}
|
@Override
public Authentication attemptAuthentication(HttpServletRequest request, HttpServletResponse response) throws AuthenticationException, IOException, ServletException {
if (!Strings.isNullOrEmpty(request.getParameter("error"))) {
// there's an error coming back from the server, need to handle this
handleError(request, response);
return null; // no auth, response is sent to display page or something
} else if (!Strings.isNullOrEmpty(request.getParameter("code"))) {
// we got back the code, need to process this to get our tokens
Authentication auth = handleAuthorizationCodeResponse(request, response);
return auth;
} else {
// not an error, not a code, must be an initial login of some type
handleAuthorizationRequest(request, response);
return null; // no auth, response redirected to the server's Auth Endpoint (or possibly to the account chooser)
}
}
|
@Test
public void attemptAuthentication_error() throws Exception {
HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
Mockito.when(request.getParameter("error")).thenReturn("Error");
Mockito.when(request.getParameter("error_description")).thenReturn("Description");
Mockito.when(request.getParameter("error_uri")).thenReturn("http://example.com");
try {
filter.attemptAuthentication(request, mock(HttpServletResponse.class));
fail("AuthorizationEndpointException expected.");
}
catch (AuthorizationEndpointException exception) {
assertThat(exception.getMessage(),
is("Error from Authorization Endpoint: Error Description http://example.com"));
assertThat(exception.getError(), is("Error"));
assertThat(exception.getErrorDescription(), is("Description"));
assertThat(exception.getErrorURI(), is("http://example.com"));
assertThat(exception, is(instanceOf(AuthenticationServiceException.class)));
}
}
|
public void registerStrategy(BatchingStrategy<?, ?, ?> strategy) {
_strategies.add(strategy);
}
|
@Test
public void testSingletonsInvoked() {
RecordingStrategy<Integer, Integer, String> strategy =
new RecordingStrategy<>((key, promise) -> promise.done(String.valueOf(key)), key -> key);
_batchingSupport.registerStrategy(strategy);
Task<String> task = Task.par(strategy.batchable(0), strategy.batchable(1))
.map("concat", (s0, s1) -> s0 + s1);
String result = runAndWait("TestBatchingSupport.testSingletonsInvoked", task);
assertEquals(result, "01");
assertTrue(strategy.getClassifiedKeys().contains(0));
assertTrue(strategy.getClassifiedKeys().contains(1));
assertEquals(strategy.getExecutedBatches().size(), 0);
assertEquals(strategy.getExecutedSingletons().size(), 2);
}
|
public static AzureStoragePath parseAzureStoragePath(String path) {
try {
URI uri = new URI(path);
String rawAuthority = uri.getRawAuthority();
if (rawAuthority == null) {
throw new URISyntaxException(path, "Illegal azure storage path");
}
String[] parts = uri.getRawAuthority().split("@");
if (parts.length < 2) {
throw new URISyntaxException(path, "Illegal azure storage path");
}
if (!path.contains(".blob.core.windows.net") && !path.contains(".dfs.core.windows.net")) {
throw new URISyntaxException(path, "Illegal azure storage path");
}
String container = parts[0];
if (container.isEmpty()) {
throw new URISyntaxException(path, "Empty container name in azure storage path");
}
String[] leftParts = parts[1].split("\\.");
String storageAccount = leftParts[0];
if (storageAccount.isEmpty()) {
throw new URISyntaxException(path, "Empty storage account in azure storage path");
}
return new AzureStoragePath(storageAccount, container);
} catch (URISyntaxException exception) {
LOG.debug(exception.getMessage());
}
// Return empty AzureStoragePath
return new AzureStoragePath("", "");
}
|
@Test
public void testAzurePathParseWithWASB() {
String uri = "wasb://bottle@smith.blob.core.windows.net/path/1/2";
AzureStoragePath path = CredentialUtil.parseAzureStoragePath(uri);
Assert.assertEquals(path.getContainer(), "bottle");
Assert.assertEquals(path.getStorageAccount(), "smith");
uri = "wasbs://bottle@smith.blob.core.windows.net/path/1/2";
path = CredentialUtil.parseAzureStoragePath(uri);
Assert.assertEquals("bottle", path.getContainer());
Assert.assertEquals("smith", path.getStorageAccount());
uri = "wasb://a@.blob.core.windows.net/path/1/2";
path = CredentialUtil.parseAzureStoragePath(uri);
Assert.assertEquals("", path.getContainer());
Assert.assertEquals("", path.getStorageAccount());
uri = "wasb://a@p/path/1/2";
path = CredentialUtil.parseAzureStoragePath(uri);
Assert.assertEquals("", path.getContainer());
Assert.assertEquals("", path.getStorageAccount());
}
|
public static HashingAlgorithm getHashingAlgorithm(String password)
{
if (password.startsWith("$2y")) {
if (getBCryptCost(password) < BCRYPT_MIN_COST) {
throw new HashedPasswordException("Minimum cost of BCrypt password must be " + BCRYPT_MIN_COST);
}
return HashingAlgorithm.BCRYPT;
}
if (password.contains(":")) {
if (getPBKDF2Iterations(password) < PBKDF2_MIN_ITERATIONS) {
throw new HashedPasswordException("Minimum iterations of PBKDF2 password must be " + PBKDF2_MIN_ITERATIONS);
}
return HashingAlgorithm.PBKDF2;
}
throw new HashedPasswordException("Password hashing algorithm cannot be determined");
}
|
@Test
public void testHashingAlgorithmPBKDF2()
{
String password = "1000:5b4240333032306164:f38d165fce8ce42f59d366139ef5d9e1ca1247f0e06e503ee1a611dd9ec40876bb5edb8409f5abe5504aab6628e70cfb3d3a18e99d70357d295002c3d0a308a0";
assertEquals(getHashingAlgorithm(password), PBKDF2);
}
|
static String asErrorJson(String message) {
try {
return Jackson.mapper().writeValueAsString(Map.of("error", message));
} catch (JsonProcessingException e) {
log.log(WARNING, "Could not encode error message to json:", e);
return "Could not encode error message to json, check the log for details.";
}
}
|
@Test
void error_message_is_wrapped_in_json_object() {
var json = ErrorResponse.asErrorJson("bad");
assertEquals("{\"error\":\"bad\"}", json);
}
|
static int encodeTrailingString(
final UnsafeBuffer encodingBuffer, final int offset, final int remainingCapacity, final String value)
{
final int maxLength = remainingCapacity - SIZE_OF_INT;
if (value.length() <= maxLength)
{
return encodingBuffer.putStringAscii(offset, value, LITTLE_ENDIAN);
}
else
{
encodingBuffer.putInt(offset, maxLength, LITTLE_ENDIAN);
encodingBuffer.putStringWithoutLengthAscii(offset + SIZE_OF_INT, value, 0, maxLength - 3);
encodingBuffer.putStringWithoutLengthAscii(offset + SIZE_OF_INT + maxLength - 3, "...");
return remainingCapacity;
}
}
|
@Test
void encodeTrailingStringAsAsciiWhenPayloadIsSmallerThanMaxMessageSizeWithoutHeader()
{
final int offset = 17;
final int remainingCapacity = 22;
final int encodedLength = encodeTrailingString(buffer, offset, remainingCapacity, "ab©d️");
assertEquals(SIZE_OF_INT + 5, encodedLength);
assertEquals("ab?d?", buffer.getStringAscii(offset));
}
|
@Override
public void initialize(ServiceConfiguration config) throws IOException, IllegalArgumentException {
String prefix = (String) config.getProperty(CONF_TOKEN_SETTING_PREFIX);
if (null == prefix) {
prefix = "";
}
this.confTokenSecretKeySettingName = prefix + CONF_TOKEN_SECRET_KEY;
this.confTokenPublicKeySettingName = prefix + CONF_TOKEN_PUBLIC_KEY;
this.confTokenAuthClaimSettingName = prefix + CONF_TOKEN_AUTH_CLAIM;
this.confTokenPublicAlgSettingName = prefix + CONF_TOKEN_PUBLIC_ALG;
this.confTokenAudienceClaimSettingName = prefix + CONF_TOKEN_AUDIENCE_CLAIM;
this.confTokenAudienceSettingName = prefix + CONF_TOKEN_AUDIENCE;
this.confTokenAllowedClockSkewSecondsSettingName = prefix + CONF_TOKEN_ALLOWED_CLOCK_SKEW_SECONDS;
// we need to fetch the algorithm before we fetch the key
this.publicKeyAlg = getPublicKeyAlgType(config);
this.validationKey = getValidationKey(config);
this.roleClaim = getTokenRoleClaim(config);
this.audienceClaim = getTokenAudienceClaim(config);
this.audience = getTokenAudience(config);
long allowedSkew = getConfTokenAllowedClockSkewSeconds(config);
this.parser = Jwts.parserBuilder()
.setAllowedClockSkewSeconds(allowedSkew)
.setSigningKey(this.validationKey)
.build();
if (audienceClaim != null && audience == null) {
throw new IllegalArgumentException("Token Audience Claim [" + audienceClaim
+ "] configured, but Audience stands for this broker not.");
}
}
|
@Test(expectedExceptions = IOException.class)
public void testValidationKeyWhenBlankPublicKeyIsPassed() throws IOException {
Properties properties = new Properties();
properties.setProperty(AuthenticationProviderToken.CONF_TOKEN_PUBLIC_KEY, " ");
ServiceConfiguration conf = new ServiceConfiguration();
conf.setProperties(properties);
AuthenticationProviderToken provider = new AuthenticationProviderToken();
provider.initialize(conf);
}
|
public static final void loadAttributesMap( DataNode dataNode, AttributesInterface attributesInterface )
throws KettleException {
loadAttributesMap( dataNode, attributesInterface, NODE_ATTRIBUTE_GROUPS );
}
|
@Test
public void testLoadAttributesMap_CustomTag() throws Exception {
try ( MockedStatic<AttributesMapUtil> mockedAttributesMapUtil = mockStatic( AttributesMapUtil.class ) ) {
mockedAttributesMapUtil.when( () -> AttributesMapUtil.loadAttributesMap( any( DataNode.class ),
any( AttributesInterface.class ) ) ).thenCallRealMethod();
mockedAttributesMapUtil.when( () -> AttributesMapUtil.loadAttributesMap( any( DataNode.class ),
any( AttributesInterface.class ), anyString() ) ).thenCallRealMethod();
DataNode dataNode = new DataNode( CNST_DUMMY );
DataNode groupsDataNode = dataNode.addNode( CUSTOM_TAG );
DataNode aGroupDataNode = groupsDataNode.addNode( A_GROUP );
aGroupDataNode.setProperty( A_KEY, A_VALUE );
JobEntryCopy jobEntryCopy = new JobEntryCopy();
AttributesMapUtil.loadAttributesMap( dataNode, jobEntryCopy, CUSTOM_TAG );
assertNotNull( jobEntryCopy.getAttributesMap() );
assertNotNull( jobEntryCopy.getAttributes( A_GROUP ) );
assertEquals( A_VALUE, jobEntryCopy.getAttribute( A_GROUP, A_KEY ) );
}
}
|
public CompletableFuture<LookupResult> createLookupResult(String candidateBroker, boolean authoritativeRedirect,
final String advertisedListenerName) {
CompletableFuture<LookupResult> lookupFuture = new CompletableFuture<>();
try {
checkArgument(StringUtils.isNotBlank(candidateBroker), "Lookup broker can't be null %s", candidateBroker);
String path = LoadManager.LOADBALANCE_BROKERS_ROOT + "/" + candidateBroker;
localBrokerDataCache.get(path).thenAccept(reportData -> {
if (reportData.isPresent()) {
LocalBrokerData lookupData = reportData.get();
if (StringUtils.isNotBlank(advertisedListenerName)) {
AdvertisedListener listener = lookupData.getAdvertisedListeners().get(advertisedListenerName);
if (listener == null) {
lookupFuture.completeExceptionally(
new PulsarServerException(
"the broker do not have " + advertisedListenerName + " listener"));
} else {
URI url = listener.getBrokerServiceUrl();
URI urlTls = listener.getBrokerServiceUrlTls();
lookupFuture.complete(new LookupResult(lookupData.getWebServiceUrl(),
lookupData.getWebServiceUrlTls(), url == null ? null : url.toString(),
urlTls == null ? null : urlTls.toString(), authoritativeRedirect));
}
} else {
lookupFuture.complete(new LookupResult(lookupData.getWebServiceUrl(),
lookupData.getWebServiceUrlTls(), lookupData.getPulsarServiceUrl(),
lookupData.getPulsarServiceUrlTls(), authoritativeRedirect));
}
} else {
lookupFuture.completeExceptionally(new MetadataStoreException.NotFoundException(path));
}
}).exceptionally(ex -> {
lookupFuture.completeExceptionally(ex);
return null;
});
} catch (Exception e) {
lookupFuture.completeExceptionally(e);
}
return lookupFuture;
}
|
@Test
public void testCreateLookupResult() throws Exception {
final String candidateBroker = "localhost:8080";
final String brokerUrl = "pulsar://localhost:6650";
final String listenerUrl = "pulsar://localhost:7000";
final String listenerUrlTls = "pulsar://localhost:8000";
final String listener = "listenerName";
Map<String, AdvertisedListener> advertisedListeners = new HashMap<>();
advertisedListeners.put(listener, AdvertisedListener.builder().brokerServiceUrl(new URI(listenerUrl)).brokerServiceUrlTls(new URI(listenerUrlTls)).build());
LocalBrokerData ld = new LocalBrokerData("http://" + candidateBroker, null, brokerUrl, null, advertisedListeners);
String path = String.format("%s/%s", LoadManager.LOADBALANCE_BROKERS_ROOT, candidateBroker);
pulsar.getLocalMetadataStore().put(path,
ObjectMapperFactory.getMapper().writer().writeValueAsBytes(ld),
Optional.empty(),
EnumSet.of(CreateOption.Ephemeral)).join();
LookupResult noListener = pulsar.getNamespaceService().createLookupResult(candidateBroker, false, null).get();
LookupResult withListener = pulsar.getNamespaceService().createLookupResult(candidateBroker, false, listener).get();
Assert.assertEquals(noListener.getLookupData().getBrokerUrl(), brokerUrl);
Assert.assertEquals(withListener.getLookupData().getBrokerUrl(), listenerUrl);
Assert.assertEquals(withListener.getLookupData().getBrokerUrlTls(), listenerUrlTls);
System.out.println(withListener);
}
|
public static TypeTransformation legacyRawToTypeInfoRaw() {
return LegacyRawTypeTransformation.INSTANCE;
}
|
@Test
void testLegacyRawToTypeInfoRaw() {
DataType dataType =
DataTypes.ROW(
DataTypes.FIELD("a", DataTypes.STRING()),
DataTypes.FIELD("b", DataTypes.DECIMAL(10, 3)),
DataTypes.FIELD("c", createLegacyRaw()),
DataTypes.FIELD("d", DataTypes.ARRAY(createLegacyRaw())));
TypeInformation<TypeTransformationsTest> typeInformation =
TypeExtractor.getForClass(TypeTransformationsTest.class);
DataType rawDataType = new AtomicDataType(new TypeInformationRawType<>(typeInformation));
DataType expected =
DataTypes.ROW(
DataTypes.FIELD("a", DataTypes.STRING()),
DataTypes.FIELD("b", DataTypes.DECIMAL(10, 3)),
DataTypes.FIELD("c", rawDataType),
DataTypes.FIELD("d", DataTypes.ARRAY(rawDataType)));
assertThat(DataTypeUtils.transform(dataType, legacyRawToTypeInfoRaw())).isEqualTo(expected);
}
|
@Override
@MethodNotAvailable
public void loadAll(boolean replaceExistingValues) {
throw new MethodNotAvailableException();
}
|
@Test(expected = MethodNotAvailableException.class)
public void testLoadAll() {
adapter.loadAll(true);
}
|
@Override
public String getProviderMethodProperty(String service, String method, String key) {
return config.getProperty(DynamicConfigKeyHelper.buildProviderMethodProKey(service, method, key),
DynamicHelper.DEFAULT_DYNAMIC_VALUE);
}
|
@Test
public void getProviderMethodProperty() {
}
|
@VisibleForTesting
static List<String> tokenizeArguments(@Nullable final String args) {
if (args == null) {
return Collections.emptyList();
}
final Matcher matcher = ARGUMENTS_TOKENIZE_PATTERN.matcher(args);
final List<String> tokens = new ArrayList<>();
while (matcher.find()) {
tokens.add(matcher.group().trim().replace("\"", "").replace("\'", ""));
}
return tokens;
}
|
@Test
void testTokenizeSingleQuoted() {
final List<String> arguments = JarHandlerUtils.tokenizeArguments("--foo 'bar baz '");
assertThat(arguments.get(0)).isEqualTo("--foo");
assertThat(arguments.get(1)).isEqualTo("bar baz ");
}
|
public static PDImageXObject createFromFile(PDDocument document, File file)
throws IOException
{
return createFromFile(document, file, 0);
}
|
@Test
void testByteShortPaddedWithGarbage() throws IOException
{
try (PDDocument document = new PDDocument())
{
String basePath = "src/test/resources/org/apache/pdfbox/pdmodel/graphics/image/ccittg3-garbage-padded-fields";
for (String ext : Arrays.asList(".tif", "-bigendian.tif"))
{
String tiffPath = basePath + ext;
PDImageXObject ximage3 = CCITTFactory.createFromFile(document, new File(tiffPath));
validate(ximage3, 1, 344, 287, "tiff", PDDeviceGray.INSTANCE.getName());
}
}
}
|
@Override
public T deserialize(final String topic, final byte[] bytes) {
try {
if (bytes == null) {
return null;
}
// don't use the JsonSchemaConverter to read this data because
// we require that the MAPPER enables USE_BIG_DECIMAL_FOR_FLOATS,
// which is not currently available in the standard converters
final JsonNode value = isJsonSchema
? JsonSerdeUtils.readJsonSR(bytes, MAPPER, JsonNode.class)
: MAPPER.readTree(bytes);
final Object coerced = enforceFieldType(
"$",
new JsonValueContext(value, schema)
);
if (LOG.isTraceEnabled()) {
LOG.trace("Deserialized {}. topic:{}, row:{}", target, topic, coerced);
}
return SerdeUtils.castToTargetType(coerced, targetType);
} catch (final Exception e) {
// Clear location in order to avoid logging data, for security reasons
if (e instanceof JsonParseException) {
((JsonParseException) e).clearLocation();
}
throw new SerializationException(
"Failed to deserialize " + target + " from topic: " + topic + ". " + e.getMessage(), e);
}
}
|
@Test
public void shouldDeserializedJsonText() {
// Given:
final KsqlJsonDeserializer<String> deserializer =
givenDeserializerForSchema(Schema.OPTIONAL_STRING_SCHEMA, String.class);
final Map<String, String> validCoercions = ImmutableMap.<String, String>builder()
.put("true", "true")
.put("42", "42")
.put("42.000", "42.000")
.put("42.001", "42.001")
.put("\"just a string\"", "just a string")
.put("{\"json\": \"object\"}", "{\"json\":\"object\"}")
.put("[\"json\", \"array\"]", "[json, array]")
.build();
validCoercions.forEach((jsonValue, expectedValue) -> {
final byte[] bytes = addMagic(jsonValue.getBytes(StandardCharsets.UTF_8));
// When:
final Object result = deserializer.deserialize(SOME_TOPIC, bytes);
// Then:
assertThat(result, is(expectedValue));
});
}
|
public static int getSessionCount() {
if (!instanceCreated) {
return -1;
}
// nous pourrions nous contenter d'utiliser SESSION_MAP_BY_ID.size()
// mais on se contente de SESSION_COUNT qui est suffisant pour avoir cette valeur
// (SESSION_MAP_BY_ID servira pour la fonction d'invalidateAllSessions entre autres)
return SESSION_COUNT.get();
}
|
@Test
public void testGetSessionCount() {
sessionListener.sessionCreated(createSessionEvent());
if (SessionListener.getSessionCount() != 1) {
fail("getSessionCount");
}
}
|
public R apply(R record) {
if (predicate == null || negate ^ predicate.test(record)) {
return transformation.apply(record);
}
return record;
}
|
@Test
public void apply() {
applyAndAssert(true, false, transformed);
applyAndAssert(true, true, initial);
applyAndAssert(false, false, initial);
applyAndAssert(false, true, transformed);
}
|
public static TimestampExtractionPolicy create(
final KsqlConfig ksqlConfig,
final LogicalSchema schema,
final Optional<TimestampColumn> timestampColumn
) {
if (!timestampColumn.isPresent()) {
return new MetadataTimestampExtractionPolicy(getDefaultTimestampExtractor(ksqlConfig));
}
final ColumnName col = timestampColumn.get().getColumn();
final Optional<String> timestampFormat = timestampColumn.get().getFormat();
final Column column = schema.findColumn(col)
.orElseThrow(() -> new KsqlException(
"The TIMESTAMP column set in the WITH clause does not exist in the schema: '"
+ col.toString(FormatOptions.noEscape()) + "'"));
final SqlBaseType tsColumnType = column.type().baseType();
if (tsColumnType == SqlBaseType.STRING) {
final String format = timestampFormat.orElseThrow(() -> new KsqlException(
"A String timestamp field has been specified without"
+ " also specifying the "
+ CommonCreateConfigs.TIMESTAMP_FORMAT_PROPERTY.toLowerCase()));
return new StringTimestampExtractionPolicy(col, format);
}
if (timestampFormat.isPresent()) {
throw new KsqlException("'" + CommonCreateConfigs.TIMESTAMP_FORMAT_PROPERTY
+ "' set in the WITH clause can only be used "
+ "when the timestamp column is of type STRING.");
}
if (tsColumnType == SqlBaseType.BIGINT) {
return new LongColumnTimestampExtractionPolicy(col);
}
if (tsColumnType == SqlBaseType.TIMESTAMP) {
return new TimestampColumnTimestampExtractionPolicy(col);
}
throw new KsqlException(
"Timestamp column, " + col + ", should be LONG(INT64), TIMESTAMP,"
+ " or a String with a "
+ CommonCreateConfigs.TIMESTAMP_FORMAT_PROPERTY.toLowerCase()
+ " specified.");
}
|
@Test
public void shouldThrowIfLongTimestampTypeAndFormatIsSupplied() {
// Given:
final String timestamp = "timestamp";
final LogicalSchema schema = schemaBuilder2
.valueColumn(ColumnName.of(timestamp.toUpperCase()), SqlTypes.BIGINT)
.build();
// When:
assertThrows(
KsqlException.class,
() -> TimestampExtractionPolicyFactory
.create(ksqlConfig,
schema,
Optional.of(
new TimestampColumn(
ColumnName.of(timestamp.toUpperCase()),
Optional.of("b")
)
)
)
);
}
|
@Udf
public Map<String, String> splitToMap(
@UdfParameter(
description = "Separator string and values to join") final String input,
@UdfParameter(
description = "Separator string and values to join") final String entryDelimiter,
@UdfParameter(
description = "Separator string and values to join") final String kvDelimiter) {
if (input == null || entryDelimiter == null || kvDelimiter == null) {
return null;
}
if (entryDelimiter.isEmpty() || kvDelimiter.isEmpty() || entryDelimiter.equals(kvDelimiter)) {
return null;
}
final Iterable<String> entries = Splitter.on(entryDelimiter).omitEmptyStrings().split(input);
return StreamSupport.stream(entries.spliterator(), false)
.filter(e -> e.contains(kvDelimiter))
.map(kv -> Splitter.on(kvDelimiter).split(kv).iterator())
.collect(Collectors.toMap(
Iterator::next,
Iterator::next,
(v1, v2) -> v2));
}
|
@Test
public void shouldReturnEmptyForInputWithoutDelimiters() {
Map<String, String> result = udf.splitToMap("cherry", "/", ":=");
assertThat(result, is(Collections.EMPTY_MAP));
}
|
public Statement buildStatement(final ParserRuleContext parseTree) {
return build(Optional.of(getSources(parseTree)), parseTree);
}
|
@Test
public void shouldHandleAliasQualifiedSelectStarOnRightJoinSource() {
// Given:
final SingleStatementContext stmt =
givenQuery("SELECT T2.* FROM TEST1 JOIN TEST2 T2 WITHIN 1 SECOND ON TEST1.ID = T2.ID;");
// When:
final Query result = (Query) builder.buildStatement(stmt);
// Then:
assertThat(result.getSelect(),
is(new Select(ImmutableList.of(new AllColumns(Optional.of(SourceName.of("T2")))))));
}
|
public static Coordinate wgs84ToBd09(double lng, double lat) {
final Coordinate gcj02 = wgs84ToGcj02(lng, lat);
return gcj02ToBd09(gcj02.lng, gcj02.lat);
}
|
@Test
public void wgs84toBd09Test2() {
// https://tool.lu/coordinate/
final CoordinateUtil.Coordinate coordinate = CoordinateUtil.wgs84ToBd09(122.99395597D, 44.99804071D);
assertEquals(123.00636516028885D, coordinate.getLng(), 0.00000000000001D);
// 不同jdk版本、不同架构jdk, 精度有差异,数值不完全相等,这里增加精度控制delta
// 参考:从Java Math底层实现看Arm与x86的差异:https://yikun.github.io/2020/04/10/%E4%BB%8EJava-Math%E5%BA%95%E5%B1%82%E5%AE%9E%E7%8E%B0%E7%9C%8BArm%E4%B8%8Ex86%E7%9A%84%E5%B7%AE%E5%BC%82/
assertEquals(45.00636909189589D, coordinate.getLat(), 0.00000000000001D);
}
|
@SuppressWarnings("unchecked")
public static void validateFormat(Object offsetData) {
if (offsetData == null)
return;
if (!(offsetData instanceof Map))
throw new DataException("Offsets must be specified as a Map");
validateFormat((Map<Object, Object>) offsetData);
}
|
@Test
public void testValidateFormatNotMap() {
DataException e = assertThrows(DataException.class, () -> OffsetUtils.validateFormat(new Object()));
assertThat(e.getMessage(), containsString("Offsets must be specified as a Map"));
}
|
public static Profiler createIfTrace(Logger logger) {
if (logger.isTraceEnabled()) {
return create(logger);
}
return NullProfiler.NULL_INSTANCE;
}
|
@Test
public void create_null_profiler_if_trace_level_is_disabled() {
tester.setLevel(LoggerLevel.TRACE);
Profiler profiler = Profiler.createIfTrace(LoggerFactory.getLogger("foo"));
assertThat(profiler).isInstanceOf(DefaultProfiler.class);
tester.setLevel(LoggerLevel.DEBUG);
profiler = Profiler.createIfTrace(LoggerFactory.getLogger("foo"));
assertThat(profiler).isInstanceOf(NullProfiler.class);
}
|
@Override
public TCreatePartitionResult createPartition(TCreatePartitionRequest request) throws TException {
LOG.info("Receive create partition: {}", request);
TCreatePartitionResult result;
try {
if (partitionRequestNum.incrementAndGet() >= Config.thrift_server_max_worker_threads / 4) {
result = new TCreatePartitionResult();
TStatus errorStatus = new TStatus(SERVICE_UNAVAILABLE);
errorStatus.setError_msgs(Lists.newArrayList(
String.format("Too many create partition requests, please try again later txn_id=%d",
request.getTxn_id())));
result.setStatus(errorStatus);
return result;
}
result = createPartitionProcess(request);
} catch (Exception t) {
LOG.warn(DebugUtil.getStackTrace(t));
result = new TCreatePartitionResult();
TStatus errorStatus = new TStatus(RUNTIME_ERROR);
errorStatus.setError_msgs(Lists.newArrayList(String.format("txn_id=%d failed. %s",
request.getTxn_id(), t.getMessage())));
result.setStatus(errorStatus);
} finally {
partitionRequestNum.decrementAndGet();
}
return result;
}
|
@Test
public void testCreatePartitionWithRollup() throws TException {
new MockUp<GlobalTransactionMgr>() {
@Mock
public TransactionState getTransactionState(long dbId, long transactionId) {
return new TransactionState();
}
};
Database db = GlobalStateMgr.getCurrentState().getDb("test");
Table table = db.getTable("site_access_day");
((OlapTable) table).setState(OlapTable.OlapTableState.ROLLUP);
List<List<String>> partitionValues = Lists.newArrayList();
List<String> values = Lists.newArrayList();
values.add("1990-04-24");
partitionValues.add(values);
FrontendServiceImpl impl = new FrontendServiceImpl(exeEnv);
TCreatePartitionRequest request = new TCreatePartitionRequest();
request.setDb_id(db.getId());
request.setTable_id(table.getId());
request.setPartition_values(partitionValues);
TCreatePartitionResult partition = impl.createPartition(request);
Assert.assertEquals(TStatusCode.RUNTIME_ERROR, partition.getStatus().getStatus_code());
((OlapTable) table).setState(OlapTable.OlapTableState.NORMAL);
}
|
public Result<Boolean> deleteConfig(ConfigInfo request) {
Result<Boolean> checkResult = checkConnection(request);
if (!checkResult.isSuccess()) {
return checkResult;
}
String group = request.getGroup();
ConfigClient client = getConfigClient(request.getNamespace());
if (client instanceof NacosClient) {
group = NacosUtils.rebuildGroup(request.getGroup());
}
boolean result = client.removeConfig(request.getKey(), group);
if (result) {
return new Result<>(ResultCodeType.SUCCESS.getCode(), null, true);
}
return new Result<>(ResultCodeType.DELETE_FAIL.getCode(), null, false);
}
|
@Test
public void deleteConfig() {
ConfigInfo configInfo = new ConfigInfo();
configInfo.setGroup(GROUP);
configInfo.setKey(KEY);
Result<Boolean> result = configService.deleteConfig(configInfo);
Assert.assertTrue(result.isSuccess());
Assert.assertTrue(result.getData());
}
|
public void isEqualTo(@Nullable Object expected) {
standardIsEqualTo(expected);
}
|
@Test
public void isEqualToFailureWithDifferentTypesAndSameToString() {
Object a = "true";
Object b = true;
expectFailure.whenTesting().that(a).isEqualTo(b);
assertFailureKeys("expected", "an instance of", "but was", "an instance of");
assertFailureValue("expected", "true");
assertFailureValueIndexed("an instance of", 0, "java.lang.Boolean");
assertFailureValue("but was", "(non-equal value with same string representation)");
assertFailureValueIndexed("an instance of", 1, "java.lang.String");
}
|
public ClientSession toClientSession()
{
return new ClientSession(
parseServer(server),
user,
source,
Optional.empty(),
parseClientTags(clientTags),
clientInfo,
catalog,
schema,
TimeZone.getDefault().getID(),
Locale.getDefault(),
toResourceEstimates(resourceEstimates),
toProperties(sessionProperties),
emptyMap(),
emptyMap(),
toExtraCredentials(extraCredentials),
null,
clientRequestTimeout,
disableCompression,
emptyMap(),
emptyMap(),
validateNextUriSource);
}
|
@Test(expectedExceptions = IllegalArgumentException.class)
public void testInvalidServer()
{
ClientOptions options = new ClientOptions();
options.server = "x:y";
options.toClientSession();
}
|
@Override
public <T> @Nullable Schema schemaFor(TypeDescriptor<T> typeDescriptor) {
checkForDynamicType(typeDescriptor);
return ProtoSchemaTranslator.getSchema((Class<Message>) typeDescriptor.getRawType());
}
|
@Test
public void testOneOfSchema() {
Schema schema = new ProtoMessageSchema().schemaFor(TypeDescriptor.of(OneOf.class));
assertEquals(ONEOF_SCHEMA, schema);
}
|
public CsvReader ignoreFirstLine() {
skipFirstLineAsHeader = true;
return this;
}
|
@Test
void testIgnoreHeaderConfigure() {
CsvReader reader = getCsvReader();
reader.ignoreFirstLine();
assertThat(reader.skipFirstLineAsHeader).isTrue();
}
|
public void createDir(File dir) {
Path dirPath = requireNonNull(dir, "dir can not be null").toPath();
if (dirPath.toFile().exists()) {
checkState(dirPath.toFile().isDirectory(), "%s is not a directory", dirPath);
} else {
try {
createDirectories(dirPath);
} catch (IOException e) {
throw new IllegalStateException(format("Failed to create directory %s", dirPath), e);
}
}
}
|
@Test
public void createDir_creates_specified_directory_and_missing_parents() throws IOException {
File dir1 = new File(temp.newFolder(), "dir1");
File dir2 = new File(dir1, "dir2");
File dir = new File(dir2, "someDir");
assertThat(dir1).doesNotExist();
assertThat(dir2).doesNotExist();
assertThat(dir).doesNotExist();
underTest.createDir(dir);
assertThat(dir1).exists();
assertThat(dir2).exists();
assertThat(dir).exists();
}
|
@Override
public String getValue() {
return value();
}
|
@Test
void testSetValue() {
StringNode n = new StringNode();
n.setValue("\"foo\"");
assertEquals("foo", n.getValue());
n.setValue("foo");
assertEquals("foo", n.getValue());
}
|
static String headerLine(CSVFormat csvFormat) {
return String.join(String.valueOf(csvFormat.getDelimiter()), csvFormat.getHeader());
}
|
@Test
public void givenCustomQuoteCharacter_includesSpecialCharacters() {
CSVFormat csvFormat = csvFormat().withQuote(':');
PCollection<String> input =
pipeline.apply(Create.of(headerLine(csvFormat), ":a,:,1,1.1", "b,2,2.2", "c,3,3.3"));
CsvIOStringToCsvRecord underTest = new CsvIOStringToCsvRecord(csvFormat);
CsvIOParseResult<List<String>> result = input.apply(underTest);
PAssert.that(result.getOutput())
.containsInAnyOrder(
Arrays.asList(
Arrays.asList("a,", "1", "1.1"),
Arrays.asList("b", "2", "2.2"),
Arrays.asList("c", "3", "3.3")));
PAssert.that(result.getErrors()).empty();
pipeline.run();
}
|
@Override
public double score(int[] truth, int[] prediction) {
return of(truth, prediction);
}
|
@Test
public void test() {
System.out.println("specificity");
int[] truth = {
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
};
int[] prediction = {
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
};
Specificity instance = new Specificity();
double expResult = 0.9714;
double result = instance.score(truth, prediction);
assertEquals(expResult, result, 1E-4);
}
|
@VisibleForTesting
void saveApprove(Long userId, Integer userType, String clientId,
String scope, Boolean approved, LocalDateTime expireTime) {
// 先更新
OAuth2ApproveDO approveDO = new OAuth2ApproveDO().setUserId(userId).setUserType(userType)
.setClientId(clientId).setScope(scope).setApproved(approved).setExpiresTime(expireTime);
if (oauth2ApproveMapper.update(approveDO) == 1) {
return;
}
// 失败,则说明不存在,进行更新
oauth2ApproveMapper.insert(approveDO);
}
|
@Test
public void testSaveApprove_insert() {
// 准备参数
Long userId = randomLongId();
Integer userType = randomEle(UserTypeEnum.values()).getValue();
String clientId = randomString();
String scope = randomString();
Boolean approved = randomBoolean();
LocalDateTime expireTime = LocalDateTime.ofInstant(randomDay(1, 30).toInstant(), ZoneId.systemDefault());
// mock 方法
// 调用
oauth2ApproveService.saveApprove(userId, userType, clientId,
scope, approved, expireTime);
// 断言
List<OAuth2ApproveDO> result = oauth2ApproveMapper.selectList();
assertEquals(1, result.size());
assertEquals(userId, result.get(0).getUserId());
assertEquals(userType, result.get(0).getUserType());
assertEquals(clientId, result.get(0).getClientId());
assertEquals(scope, result.get(0).getScope());
assertEquals(approved, result.get(0).getApproved());
assertEquals(expireTime, result.get(0).getExpiresTime());
}
|
public static NamingSelector newClusterSelector(Collection<String> clusters) {
if (CollectionUtils.isNotEmpty(clusters)) {
final Set<String> set = new HashSet<>(clusters);
Predicate<Instance> filter = instance -> set.contains(instance.getClusterName());
String clusterString = getUniqueClusterString(clusters);
return new ClusterSelector(filter, clusterString);
} else {
return EMPTY_SELECTOR;
}
}
|
@Test
public void testNewClusterSelector1() {
Instance ins1 = new Instance();
ins1.setClusterName("a");
Instance ins2 = new Instance();
ins2.setClusterName("b");
Instance ins3 = new Instance();
ins3.setClusterName("c");
NamingContext namingContext = mock(NamingContext.class);
when(namingContext.getInstances()).thenReturn(Arrays.asList(ins1, ins2, ins3));
NamingSelector namingSelector1 = NamingSelectorFactory.newClusterSelector(Collections.singletonList("a"));
NamingResult result1 = namingSelector1.select(namingContext);
assertEquals("a", result1.getResult().get(0).getClusterName());
NamingSelector namingSelector2 = NamingSelectorFactory.newClusterSelector(Collections.emptyList());
NamingResult result2 = namingSelector2.select(namingContext);
assertEquals(3, result2.getResult().size());
}
|
public synchronized NumaResourceAllocation allocateNumaNodes(
Container container) throws ResourceHandlerException {
NumaResourceAllocation allocation = allocate(container.getContainerId(),
container.getResource());
if (allocation != null) {
try {
// Update state store.
context.getNMStateStore().storeAssignedResources(container,
NUMA_RESOURCE_TYPE, Arrays.asList(allocation));
} catch (IOException e) {
releaseNumaResource(container.getContainerId());
throw new ResourceHandlerException(e);
}
}
return allocation;
}
|
@Test
public void testAllocateNumaNodeWithMultipleNodesForCpus() throws Exception {
NumaResourceAllocation nodeInfo = numaResourceAllocator
.allocateNumaNodes(getContainer(
ContainerId.fromString("container_1481156246874_0001_01_000001"),
Resource.newInstance(2048, 6)));
Assert.assertEquals("0", String.join(",", nodeInfo.getMemNodes()));
Assert.assertEquals("0,1", String.join(",", nodeInfo.getCpuNodes()));
}
|
@Override
public KTable<Windowed<K>, V> reduce(final Reducer<V> reducer) {
return reduce(reducer, NamedInternal.empty());
}
|
@Test
@SuppressWarnings("unchecked")
public void shouldThrowNullPointerOnMaterializedReduceIfMaterializedIsNull() {
assertThrows(NullPointerException.class, () -> windowedStream.reduce(MockReducer.STRING_ADDER, (Materialized) null));
}
|
@Override
public boolean schemaExists(SnowflakeIdentifier schema) {
Preconditions.checkArgument(
schema.type() == SnowflakeIdentifier.Type.SCHEMA,
"schemaExists requires a SCHEMA identifier, got '%s'",
schema);
if (!databaseExists(SnowflakeIdentifier.ofDatabase(schema.databaseName()))) {
return false;
}
final String finalQuery = "SHOW TABLES IN SCHEMA IDENTIFIER(?) LIMIT 1";
try {
connectionPool.run(
conn ->
queryHarness.query(
conn, finalQuery, TABLE_RESULT_SET_HANDLER, schema.toIdentifierString()));
} catch (SQLException e) {
if (SCHEMA_NOT_FOUND_ERROR_CODES.contains(e.getErrorCode())) {
return false;
}
throw new UncheckedSQLException(e, "Failed to check if schema '%s' exists", schema);
} catch (InterruptedException e) {
throw new UncheckedInterruptedException(
e, "Interrupted while checking if schema '%s' exists", schema);
}
return true;
}
|
@SuppressWarnings("unchecked")
@Test
public void testSchemaExists() throws SQLException {
when(mockResultSet.next())
.thenReturn(true)
.thenReturn(false)
.thenReturn(true)
.thenReturn(false);
when(mockResultSet.getString("name")).thenReturn("DB1").thenReturn("SCHEMA1");
when(mockResultSet.getString("database_name")).thenReturn("DB1");
when(mockResultSet.getString("schema_name")).thenReturn("SCHEMA1");
assertThat(snowflakeClient.schemaExists(SnowflakeIdentifier.ofSchema("DB1", "SCHEMA1")))
.isTrue();
verify(mockQueryHarness)
.query(
eq(mockConnection),
eq("SHOW SCHEMAS IN DATABASE IDENTIFIER(?) LIMIT 1"),
any(JdbcSnowflakeClient.ResultSetParser.class),
eq("DB1"));
verify(mockQueryHarness)
.query(
eq(mockConnection),
eq("SHOW TABLES IN SCHEMA IDENTIFIER(?) LIMIT 1"),
any(JdbcSnowflakeClient.ResultSetParser.class),
eq("DB1.SCHEMA1"));
}
|
@Override
public void addJobStorageOnChangeListener(StorageProviderChangeListener listener) {
onChangeListeners.add(listener);
startTimerToSendUpdates();
}
|
@Test
void metadataChangeListenersAreNotifiedOfMetadataChanges() {
final JobRunrMetadata jobRunrMetadata = new JobRunrMetadata(SOME_METADATA_NAME, "some owner", "some value");
storageProvider.saveMetadata(jobRunrMetadata);
final MetadataChangeListenerForTest changeListener = new MetadataChangeListenerForTest();
storageProvider.addJobStorageOnChangeListener(changeListener);
await()
.untilAsserted(() -> assertThat(changeListener.metadataList).isNotNull());
}
|
@Override
public String getName() {
return getLogger().getName();
}
|
@Test
public void getName() throws Exception {
Logger loggerFromClass = new SLF4JLoggerImpl(SLF4JLoggerImplTest.class);
Logger logger = new SLF4JLoggerImpl(SLF4JLoggerImplTest.class.getCanonicalName());
Assert.assertEquals(loggerFromClass.getName(), logger.getName());
String appName = "app";
if (logger.isDebugEnabled()) {
logger.debug("debug");
logger.debug("debug {}", "1");
logger.debug("debug {} {} {}", "1", "2", "3");
logger.debug("debug", new RuntimeException("runtime"));
}
if (logger.isDebugEnabled(appName)) {
logger.debugWithApp(appName, "debug");
logger.debugWithApp(appName, "debug {}", "1");
logger.debugWithApp(appName, "debug {} {} {}", "1", "2", "3");
logger.debugWithApp(appName, "debug", new RuntimeException("runtime"));
}
if (logger.isInfoEnabled()) {
logger.info("info");
logger.info("info {}", "1");
logger.info("info {} {} {}", "1", "2", "3");
logger.info("info", new RuntimeException("runtime"));
}
if (logger.isInfoEnabled(appName)) {
logger.infoWithApp(appName, "info");
logger.infoWithApp(appName, "info {}", "1");
logger.infoWithApp(appName, "info {} {} {}", "1", "2", "3");
logger.infoWithApp(appName, "info", new RuntimeException("runtime"));
}
if (logger.isWarnEnabled()) {
logger.warn("warn");
logger.warn("warn {}", "1");
logger.warn("warn {} {} {}", "1", "2", "3");
logger.warn("warn", new RuntimeException("runtime"));
}
if (logger.isWarnEnabled(appName)) {
logger.warn(appName, "warn");
logger.warnWithApp(appName, "warn {}", "1");
logger.warnWithApp(appName, "warn {} {} {}", "1", "2", "3");
logger.warnWithApp(appName, "warn", new RuntimeException("runtime"));
}
if (logger.isErrorEnabled()) {
logger.error("error");
logger.error("error {}", "1");
logger.error("error {} {} {}", "1", "2", "3");
logger.error("error", new RuntimeException("runtime"));
}
if (logger.isErrorEnabled(appName)) {
logger.errorWithApp(appName, "error");
logger.errorWithApp(appName, "error {}", "1");
logger.errorWithApp(appName, "error {} {} {}", "1", "2", "3");
logger.errorWithApp(appName, "error", new RuntimeException("runtime"));
}
}
|
@Override
public List<SmsReceiveRespDTO> parseSmsReceiveStatus(String text) {
List<SmsReceiveStatus> statuses = JsonUtils.parseArray(text, SmsReceiveStatus.class);
return convertList(statuses, status -> new SmsReceiveRespDTO().setSuccess(status.getSuccess())
.setErrorCode(status.getErrCode()).setErrorMsg(status.getErrMsg())
.setMobile(status.getPhoneNumber()).setReceiveTime(status.getReportTime())
.setSerialNo(status.getBizId()).setLogId(Long.valueOf(status.getOutId())));
}
|
@Test
public void testParseSmsReceiveStatus() {
// 准备参数
String text = "[\n" +
" {\n" +
" \"phone_number\" : \"13900000001\",\n" +
" \"send_time\" : \"2017-01-01 11:12:13\",\n" +
" \"report_time\" : \"2017-02-02 22:23:24\",\n" +
" \"success\" : true,\n" +
" \"err_code\" : \"DELIVERED\",\n" +
" \"err_msg\" : \"用户接收成功\",\n" +
" \"sms_size\" : \"1\",\n" +
" \"biz_id\" : \"12345\",\n" +
" \"out_id\" : \"67890\"\n" +
" }\n" +
"]";
// mock 方法
// 调用
List<SmsReceiveRespDTO> statuses = smsClient.parseSmsReceiveStatus(text);
// 断言
assertEquals(1, statuses.size());
assertTrue(statuses.get(0).getSuccess());
assertEquals("DELIVERED", statuses.get(0).getErrorCode());
assertEquals("用户接收成功", statuses.get(0).getErrorMsg());
assertEquals("13900000001", statuses.get(0).getMobile());
assertEquals(LocalDateTime.of(2017, 2, 2, 22, 23, 24),
statuses.get(0).getReceiveTime());
assertEquals("12345", statuses.get(0).getSerialNo());
assertEquals(67890L, statuses.get(0).getLogId());
}
|
public static CommandExecutor newInstance(final CommandPacketType commandPacketType, final PostgreSQLCommandPacket commandPacket,
final ConnectionSession connectionSession, final PortalContext portalContext) throws SQLException {
if (commandPacket instanceof SQLReceivedPacket) {
log.debug("Execute packet type: {}, sql: {}", commandPacketType, ((SQLReceivedPacket) commandPacket).getSQL());
} else {
log.debug("Execute packet type: {}", commandPacketType);
}
if (!(commandPacket instanceof PostgreSQLAggregatedCommandPacket)) {
return getCommandExecutor(commandPacketType, commandPacket, connectionSession, portalContext);
}
PostgreSQLAggregatedCommandPacket aggregatedCommandPacket = (PostgreSQLAggregatedCommandPacket) commandPacket;
if (aggregatedCommandPacket.isContainsBatchedStatements() && aggregatedCommandPacket.getPackets().stream().noneMatch(OpenGaussComBatchBindPacket.class::isInstance)) {
return new PostgreSQLAggregatedCommandExecutor(getExecutorsOfAggregatedBatchedStatements(aggregatedCommandPacket, connectionSession, portalContext));
}
List<CommandExecutor> result = new ArrayList<>(aggregatedCommandPacket.getPackets().size());
for (PostgreSQLCommandPacket each : aggregatedCommandPacket.getPackets()) {
result.add(getCommandExecutor((CommandPacketType) each.getIdentifier(), each, connectionSession, portalContext));
}
return new PostgreSQLAggregatedCommandExecutor(result);
}
|
@Test
void assertNewPostgreSQLSimpleQueryExecutor() throws SQLException {
PostgreSQLComQueryPacket queryPacket = mock(PostgreSQLComQueryPacket.class);
when(queryPacket.getSQL()).thenReturn("");
CommandExecutor actual = OpenGaussCommandExecutorFactory.newInstance(PostgreSQLCommandPacketType.SIMPLE_QUERY, queryPacket, connectionSession, portalContext);
assertThat(actual, instanceOf(OpenGaussComQueryExecutor.class));
}
|
@SuppressWarnings("unchecked")
public static <S, F> S visit(final SqlType type, final SqlTypeWalker.Visitor<S, F> visitor) {
final BiFunction<SqlTypeWalker.Visitor<?, ?>, SqlType, Object> handler = HANDLER
.get(type.baseType());
if (handler == null) {
throw new UnsupportedOperationException("Unsupported schema type: " + type.baseType());
}
return (S) handler.apply(visitor, type);
}
|
@Test
public void shouldVisitBigInt() {
// Given:
final SqlPrimitiveType type = SqlTypes.BIGINT;
when(visitor.visitBigInt(any())).thenReturn("Expected");
// When:
final String result = SqlTypeWalker.visit(type, visitor);
// Then:
verify(visitor).visitBigInt(same(type));
assertThat(result, is("Expected"));
}
|
public List<S> loadInstanceListSorted() {
load();
return createInstanceList(sortedClassList);
}
|
@Test
public void testLoadInstanceListSorted() {
List<ProcessorSlot> sortedSlots = SpiLoader.of(ProcessorSlot.class).loadInstanceListSorted();
assertNotNull(sortedSlots);
// Total 8 default slot in sentinel-core
assertEquals(9, sortedSlots.size());
// Verify the order of slot
int index = 0;
assertTrue(sortedSlots.get(index++) instanceof NodeSelectorSlot);
assertTrue(sortedSlots.get(index++) instanceof ClusterBuilderSlot);
assertTrue(sortedSlots.get(index++) instanceof LogSlot);
assertTrue(sortedSlots.get(index++) instanceof StatisticSlot);
assertTrue(sortedSlots.get(index++) instanceof AuthoritySlot);
assertTrue(sortedSlots.get(index++) instanceof SystemSlot);
assertTrue(sortedSlots.get(index++) instanceof FlowSlot);
assertTrue(sortedSlots.get(index++) instanceof DefaultCircuitBreakerSlot);
assertTrue(sortedSlots.get(index++) instanceof DegradeSlot);
}
|
@Override
public byte[] randomKey(RedisClusterNode node) {
RedisClient entry = getEntry(node);
RFuture<byte[]> f = executorService.readRandomAsync(entry, ByteArrayCodec.INSTANCE, RedisCommands.RANDOM_KEY);
return syncFuture(f);
}
|
@Test
public void testRandomKey() {
testInCluster(connection -> {
RedissonClient redisson = (RedissonClient) connection.getNativeConnection();
StringRedisTemplate redisTemplate = new StringRedisTemplate();
redisTemplate.setConnectionFactory(new RedissonConnectionFactory(redisson));
redisTemplate.afterPropertiesSet();
for (int i = 0; i < 10; i++) {
redisTemplate.opsForValue().set("i" + i, "i" + i);
}
for (RedisClusterNode clusterNode : redisTemplate.getConnectionFactory().getClusterConnection().clusterGetNodes()) {
String key = redisTemplate.opsForCluster().randomKey(clusterNode);
assertThat(key).isNotNull();
}
});
}
|
@Override
public void handleRequest(RestRequest request, RequestContext requestContext, final Callback<RestResponse> callback)
{
if (HttpMethod.POST != HttpMethod.valueOf(request.getMethod()))
{
_log.error("POST is expected, but " + request.getMethod() + " received");
callback.onError(RestException.forError(HttpStatus.S_405_METHOD_NOT_ALLOWED.getCode(), "Invalid method"));
return;
}
// Disable server-side latency instrumentation for multiplexed requests
requestContext.putLocalAttr(TimingContextUtil.TIMINGS_DISABLED_KEY_NAME, true);
IndividualRequestMap individualRequests;
try
{
individualRequests = extractIndividualRequests(request);
if (_multiplexerSingletonFilter != null) {
individualRequests = _multiplexerSingletonFilter.filterRequests(individualRequests);
}
}
catch (RestException e)
{
_log.error("Invalid multiplexed request", e);
callback.onError(e);
return;
}
catch (Exception e)
{
_log.error("Invalid multiplexed request", e);
callback.onError(RestException.forError(HttpStatus.S_400_BAD_REQUEST.getCode(), e));
return;
}
// prepare the map of individual responses to be collected
final IndividualResponseMap individualResponses = new IndividualResponseMap(individualRequests.size());
final Map<String, HttpCookie> responseCookies = new HashMap<>();
// all tasks are Void and side effect based, that will be useful when we add streaming
Task<?> requestProcessingTask = createParallelRequestsTask(request, requestContext, individualRequests, individualResponses, responseCookies);
Task<Void> responseAggregationTask = Task.action("send aggregated response", () ->
{
RestResponse aggregatedResponse = aggregateResponses(individualResponses, responseCookies);
callback.onSuccess(aggregatedResponse);
}
);
_engine.run(requestProcessingTask.andThen(responseAggregationTask), MUX_PLAN_CLASS);
}
|
@Test(dataProvider = "multiplexerConfigurations")
public void testHandleTooManyParallelRequests(MultiplexerRunMode multiplexerRunMode) throws Exception
{
// MultiplexedRequestHandlerImpl is created with the request limit set to 2
MultiplexedRequestHandlerImpl multiplexer = createMultiplexer(null, multiplexerRunMode);
RestRequest request = fakeMuxRestRequest(ImmutableMap.of("0", fakeIndRequest(FOO_URL),
"1", fakeIndRequest(FOO_URL),
"2", fakeIndRequest(FOO_URL)));
FutureCallback<RestResponse> callback = new FutureCallback<>();
multiplexer.handleRequest(request, new RequestContext(), callback);
assertEquals(getErrorStatus(callback), HttpStatus.S_400_BAD_REQUEST);
}
|
public CoercedExpressionResult coerce() {
final Class<?> leftClass = left.getRawClass();
final Class<?> nonPrimitiveLeftClass = toNonPrimitiveType(leftClass);
final Class<?> rightClass = right.getRawClass();
final Class<?> nonPrimitiveRightClass = toNonPrimitiveType(rightClass);
boolean sameClass = leftClass == rightClass;
boolean isUnificationExpression = left instanceof UnificationTypedExpression || right instanceof UnificationTypedExpression;
if (sameClass || isUnificationExpression) {
return new CoercedExpressionResult(left, right);
}
if (!canCoerce()) {
throw new CoercedExpressionException(new InvalidExpressionErrorResult("Comparison operation requires compatible types. Found " + leftClass + " and " + rightClass));
}
if ((nonPrimitiveLeftClass == Integer.class || nonPrimitiveLeftClass == Long.class) && nonPrimitiveRightClass == Double.class) {
CastExpr castExpression = new CastExpr(PrimitiveType.doubleType(), this.left.getExpression());
return new CoercedExpressionResult(
new TypedExpression(castExpression, double.class, left.getType()),
right,
false);
}
final boolean leftIsPrimitive = leftClass.isPrimitive() || Number.class.isAssignableFrom( leftClass );
final boolean canCoerceLiteralNumberExpr = canCoerceLiteralNumberExpr(leftClass);
boolean rightAsStaticField = false;
final Expression rightExpression = right.getExpression();
final TypedExpression coercedRight;
if (leftIsPrimitive && canCoerceLiteralNumberExpr && rightExpression instanceof LiteralStringValueExpr) {
final Expression coercedLiteralNumberExprToType = coerceLiteralNumberExprToType((LiteralStringValueExpr) right.getExpression(), leftClass);
coercedRight = right.cloneWithNewExpression(coercedLiteralNumberExprToType);
coercedRight.setType( leftClass );
} else if (shouldCoerceBToString(left, right)) {
coercedRight = coerceToString(right);
} else if (isNotBinaryExpression(right) && canBeNarrowed(leftClass, rightClass) && right.isNumberLiteral()) {
coercedRight = castToClass(leftClass);
} else if (leftClass == long.class && rightClass == int.class) {
coercedRight = right.cloneWithNewExpression(new CastExpr(PrimitiveType.longType(), right.getExpression()));
} else if (leftClass == Date.class && rightClass == String.class) {
coercedRight = coerceToDate(right);
rightAsStaticField = true;
} else if (leftClass == LocalDate.class && rightClass == String.class) {
coercedRight = coerceToLocalDate(right);
rightAsStaticField = true;
} else if (leftClass == LocalDateTime.class && rightClass == String.class) {
coercedRight = coerceToLocalDateTime(right);
rightAsStaticField = true;
} else if (shouldCoerceBToMap()) {
coercedRight = castToClass(toNonPrimitiveType(leftClass));
} else if (isBoolean(leftClass) && !isBoolean(rightClass)) {
coercedRight = coerceBoolean(right);
} else {
coercedRight = right;
}
final TypedExpression coercedLeft;
if (nonPrimitiveLeftClass == Character.class && shouldCoerceBToString(right, left)) {
coercedLeft = coerceToString(left);
} else {
coercedLeft = left;
}
return new CoercedExpressionResult(coercedLeft, coercedRight, rightAsStaticField);
}
|
@Test
public void castToShort() {
final TypedExpression left = expr(THIS_PLACEHOLDER + ".getAgeAsShort()", java.lang.Short.class);
final TypedExpression right = expr("40", int.class);
final CoercedExpression.CoercedExpressionResult coerce = new CoercedExpression(left, right, false).coerce();
assertThat(coerce.getCoercedRight()).isEqualTo(expr("(short)40", int.class));
}
|
protected List<ProviderInfo> resolveDomain(String directUrl) {
List<ProviderInfo> providerInfos = domainCache.get(directUrl);
if (providerInfos != null) {
return providerInfos;
}
ProviderInfo providerInfo = convertToProviderInfo(directUrl);
List<ProviderInfo> result = directUrl2IpUrl(providerInfo, domainCache.get(directUrl));
domainCache.put(directUrl, result);
return result;
}
|
@Test
public void testResolveDomain() {
String mockKey = "mock";
List<ProviderInfo> value = new ArrayList<>();
domainRegistry.domainCache.put(mockKey, value);
assertSame(value, domainRegistry.resolveDomain(mockKey));
String local = "127.0.0.1";
List<ProviderInfo> actual = domainRegistry.resolveDomain(local);
assertEquals(1, actual.size());
assertEquals(local, actual.get(0).getHost());
}
|
public static String convertToHtml(String input) {
return new Markdown().convert(StringEscapeUtils.escapeHtml4(input));
}
|
@Test
public void shouldSupportEmptyQuoteLineWithAndWithoutLeadingSpace() {
assertThat(Markdown.convertToHtml("""
>just some quotation without leading space
>
>
> continue quotation"""
)).isEqualTo("""
<blockquote>just some quotation without leading space<br/>
<br/>
<br/>
continue quotation<br/></blockquote>""");
}
@Test
public void shouldConvertSingleGreaterThanChar() {
assertThat(Markdown.convertToHtml(">")).isEqualTo("<blockquote><br/></blockquote>");
}
|
public String getName() {
return name;
}
|
@Test
public void testGetName_ShouldReturnCorrectName() {
assertEquals("testAttribute", attribute.getName());
}
|
@EventListener(ApplicationEvent.class)
void onApplicationEvent(ApplicationEvent event) {
if (AnnotationUtils.findAnnotation(event.getClass(), SharedEvent.class) == null) {
return;
}
// we should copy the plugins list to avoid ConcurrentModificationException
var startedPlugins = new ArrayList<>(pluginManager.getStartedPlugins());
// broadcast event to all started plugins except the publisher
for (var startedPlugin : startedPlugins) {
var plugin = startedPlugin.getPlugin();
if (!(plugin instanceof SpringPlugin springPlugin)) {
continue;
}
var context = springPlugin.getApplicationContext();
// make sure the context is running before publishing the event
if (context instanceof Lifecycle lifecycle && lifecycle.isRunning()) {
context.publishEvent(new HaloSharedEventDelegator(this, event));
}
}
}
|
@Test
void shouldUnwrapPluginSharedEventAndRepublish() {
var event = new PluginSharedEventDelegator(this, new FakeSharedEvent(this));
dispatcher.onApplicationEvent(event);
verify(publisher).publishEvent(event.getDelegate());
}
|
public void validate(CreateReviewAnswerRequest request) {
validateNotContainingText(request);
Question question = questionRepository.findById(request.questionId())
.orElseThrow(() -> new SubmittedQuestionNotFoundException(request.questionId()));
OptionGroup optionGroup = optionGroupRepository.findByQuestionId(question.getId())
.orElseThrow(() -> new OptionGroupNotFoundByQuestionIdException(question.getId()));
validateRequiredQuestion(request, question);
validateOnlyIncludingProvidedOptionItem(request, optionGroup);
validateCheckedOptionItemCount(request, optionGroup);
}
|
@Test
void 저장되지_않은_옵션그룹에_대해_응답하면_예외가_발생한다() {
// given
CreateReviewAnswerRequest request = new CreateReviewAnswerRequest(
savedQuestion.getId(), List.of(1L), null
);
// when, then
assertThatCode(() -> createCheckBoxAnswerRequestValidator.validate(request))
.isInstanceOf(OptionGroupNotFoundByQuestionIdException.class);
}
|
@Override
public void batchRegisterInstance(String serviceName, String groupName, List<Instance> instances)
throws NacosException {
NamingUtils.batchCheckInstanceIsLegal(instances);
batchCheckAndStripGroupNamePrefix(instances, groupName);
clientProxy.batchRegisterService(serviceName, groupName, instances);
}
|
@Test
void testBatchRegisterInstance() throws NacosException {
Instance instance = new Instance();
String serviceName = "service1";
String ip = "1.1.1.1";
int port = 10000;
instance.setServiceName(serviceName);
instance.setEphemeral(true);
instance.setPort(port);
instance.setIp(ip);
List<Instance> instanceList = new ArrayList<>();
instanceList.add(instance);
//when
client.batchRegisterInstance(serviceName, Constants.DEFAULT_GROUP, instanceList);
//then
verify(proxy, times(1)).batchRegisterService(eq(serviceName), eq(Constants.DEFAULT_GROUP),
argThat(instances -> CollectionUtils.isEqualCollection(instanceList, instances)));
}
|
public static void warn(final Logger logger, final String format, final Supplier<Object> supplier) {
if (logger.isWarnEnabled()) {
logger.warn(format, supplier.get());
}
}
|
@Test
public void testAtLeastOnceWarnWithFormat() {
when(logger.isWarnEnabled()).thenReturn(true);
LogUtils.warn(logger, "testWarn: {}", supplier);
verify(supplier, atLeastOnce()).get();
}
|
@Override
public String toSpec() {
return String.format("plain:%s:%s", algorithm.toString().toLowerCase(), Base64.getEncoder().encodeToString(plainKey));
}
|
@Test
public void testToSpec() {
String base64Key = Base64.getEncoder().encodeToString(normalKey.getPlainKey());
String expectedSpec = "plain:aes_128:" + base64Key;
assertEquals(expectedSpec, normalKey.toSpec());
}
|
public static String getSonarqubeVersion() {
if (sonarqubeVersion == null) {
loadVersion();
}
return sonarqubeVersion;
}
|
@Test
public void getSonarQubeVersion_must_always_return_same_value() {
String sonarqubeVersion = SonarQubeVersionHelper.getSonarqubeVersion();
for (int i = 0; i < 3; i++) {
assertThat(SonarQubeVersionHelper.getSonarqubeVersion()).isEqualTo(sonarqubeVersion);
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.