focal_method
stringlengths 13
60.9k
| test_case
stringlengths 25
109k
|
---|---|
@Override
public void addHeatMapActivity(Class<?> activity) {
} | @Test
public void addHeatMapActivity() {
mSensorsAPI.addHeatMapActivity(EmptyActivity.class);
Assert.assertFalse(mSensorsAPI.isHeatMapActivity(EmptyActivity.class));
} |
public String destinationURL(File rootPath, File file) {
return destinationURL(rootPath, file, getSrc(), getDest());
} | @Test
public void shouldProvideAppendFilePathToDestWhenUsingSingleStarToMatchFile() {
ArtifactPlan artifactPlan = new ArtifactPlan(ArtifactPlanType.file, "test/a/b/*.log", "logs");
assertThat(artifactPlan.destinationURL(new File("pipelines/pipelineA"),
new File("pipelines/pipelineA/test/a/b/a.log"))).isEqualTo("logs");
} |
public void clean(final Date now) {
List<String> files = this.findFiles();
List<String> expiredFiles = this.filterFiles(files, this.createExpiredFileFilter(now));
for (String f : expiredFiles) {
this.delete(new File(f));
}
if (this.totalSizeCap != CoreConstants.UNBOUNDED_TOTAL_SIZE_CAP && this.totalSizeCap > 0) {
this.capTotalSize(files);
}
List<String> emptyDirs = this.findEmptyDirs();
for (String dir : emptyDirs) {
this.delete(new File(dir));
}
} | @Test
public void removesOnlyExpiredFiles() {
remover.clean(EXPIRY);
for (File f : expiredFiles) {
verify(fileProvider).deleteFile(f);
}
for (File f : recentFiles) {
verify(fileProvider, never()).deleteFile(f);
}
} |
@Override
public Iterator<LongStatistic> getLongStatistics() {
return new LongStatisticIterator(stats.getData());
} | @Test
public void testGetLongStatistics() {
Iterator<LongStatistic> iter = storageStatistics.getLongStatistics();
while (iter.hasNext()) {
final LongStatistic longStat = iter.next();
assertNotNull(longStat);
final long expectedStat = getStatisticsValue(longStat.getName());
LOG.info("{}: FileSystem.Statistics={}, FileSystemStorageStatistics={}",
longStat.getName(), expectedStat, longStat.getValue());
assertEquals(expectedStat, longStat.getValue());
}
} |
public static int getKeyMetaLen(int level) {
Preconditions.checkArgument(
level >= 0 && level < KEY_META_LEN_BY_LEVEL_ARRAY.length,
"level " + level + " out of range [0, " + KEY_META_LEN_BY_LEVEL_ARRAY.length + ")");
return KEY_META_LEN_BY_LEVEL_ARRAY[level];
} | @Test
void testKeySpacePutAndGet() {
for (int level = 0; level <= MAX_LEVEL; level++) {
int keyLen = ThreadLocalRandom.current().nextInt(100) + 1;
KeySpace keySpace = createKeySpace(level, keyLen);
int keyMetaLen = SkipListUtils.getKeyMetaLen(level);
int totalKeySpaceLen = keyMetaLen + keyLen;
int offset = 100;
MemorySegment segment =
MemorySegmentFactory.allocateUnpooledSegment(totalKeySpaceLen + offset);
putKeySpace(keySpace, segment, offset);
verifyGetKeySpace(keySpace, segment, offset);
}
} |
public static Iterator<Integer> lineOffsetIterator(String input) {
return new LineOffsetIterator(input);
} | @Test
public void terminalOffset() {
Iterator<Integer> it = Newlines.lineOffsetIterator("foo\nbar\n");
it.next();
it.next();
it.next();
try {
it.next();
fail();
} catch (NoSuchElementException e) {
// expected
}
it = Newlines.lineOffsetIterator("foo\nbar");
it.next();
it.next();
try {
it.next();
fail();
} catch (NoSuchElementException e) {
// expected
}
} |
static <T> T copy(T object, DataComplexTable alreadyCopied) throws CloneNotSupportedException
{
if (object == null)
{
return null;
}
else if (isComplex(object))
{
DataComplex src = (DataComplex) object;
@SuppressWarnings("unchecked")
T found = (T) alreadyCopied.get(src);
if (found != null)
{
return found;
}
else
{
DataComplex clone = src.clone();
alreadyCopied.put(src, clone);
if (clone instanceof DataMap)
{
((DataMap)clone).copyReferencedObjects(alreadyCopied);
}
else if (clone instanceof DataList)
{
((DataList)clone).copyReferencedObjects(alreadyCopied);
}
@SuppressWarnings("unchecked")
T converted = (T) clone;
return converted;
}
}
else if (isPrimitive(object))
{
return object;
}
else
{
throw new CloneNotSupportedException("Illegal value encountered: " + object);
}
} | @Test
public void testDeepCopy() throws CloneNotSupportedException
{
DataMap root = new DataMap();
DataMap a = new DataMap();
a.put("key", "a");
DataMap b = a.copy();
b.put("key", "b");
root.put("a", a);
root.put("b", b);
DataMap copy = root.copy();
assertEquals(root, copy);
((DataMap)copy.get("a")).put("key", "A");
((DataMap)copy.get("b")).put("key", "B");
DataMap rootA = (DataMap)root.get("a");
DataMap rootB = (DataMap)root.get("b");
DataMap copyA = (DataMap)copy.get("a");
DataMap copyB = (DataMap)copy.get("b");
assertEquals(rootA.get("key"), ("a"));
assertEquals(rootB.get("key"), ("b"));
assertEquals(copyA.get("key"), ("A"));
assertEquals(copyB.get("key"), ("B"));
} |
CachedLayer writeCompressed(Blob compressedLayerBlob) throws IOException {
// Creates the layers directory if it doesn't exist.
Files.createDirectories(cacheStorageFiles.getLayersDirectory());
// Creates the temporary directory.
Files.createDirectories(cacheStorageFiles.getTemporaryDirectory());
try (TempDirectoryProvider tempDirectoryProvider = new TempDirectoryProvider()) {
Path temporaryLayerDirectory =
tempDirectoryProvider.newDirectory(cacheStorageFiles.getTemporaryDirectory());
// Writes the layer file to the temporary directory.
WrittenLayer writtenLayer =
writeCompressedLayerBlobToDirectory(compressedLayerBlob, temporaryLayerDirectory);
// Moves the temporary directory to the final location.
moveIfDoesNotExist(
temporaryLayerDirectory, cacheStorageFiles.getLayerDirectory(writtenLayer.layerDigest));
// Updates cachedLayer with the blob information.
Path layerFile =
cacheStorageFiles.getLayerFile(writtenLayer.layerDigest, writtenLayer.layerDiffId);
return CachedLayer.builder()
.setLayerDigest(writtenLayer.layerDigest)
.setLayerDiffId(writtenLayer.layerDiffId)
.setLayerSize(writtenLayer.layerSize)
.setLayerBlob(Blobs.from(layerFile))
.build();
}
} | @Test
public void testWriteCompressed() throws IOException {
Blob uncompressedLayerBlob = Blobs.from("uncompressedLayerBlob");
Blob compressedLayerBlob = compress(uncompressedLayerBlob);
CachedLayer cachedLayer = cacheStorageWriter.writeCompressed(compressedLayerBlob);
verifyCachedLayer(cachedLayer, uncompressedLayerBlob, compressedLayerBlob);
} |
@GET
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
@Override
public ClusterInfo get() {
return getClusterInfo();
} | @Test
public void testClusterSchedulerOverviewFifo() throws JSONException, Exception {
WebResource r = resource();
ClientResponse response = r.path("ws").path("v1").path("cluster")
.path("scheduler-overview").accept(MediaType.APPLICATION_JSON)
.get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
response.getType().toString());
JSONObject json = response.getEntity(JSONObject.class);
verifyClusterSchedulerOverView(json, "Fifo Scheduler");
} |
public void execute() {
Optional<String> login = configuration.get(CoreProperties.LOGIN);
Optional<String> password = configuration.get(CoreProperties.PASSWORD);
String warningMessage = null;
if (password.isPresent()) {
warningMessage = PASSWORD_WARN_MESSAGE;
} else if (login.isPresent()) {
warningMessage = LOGIN_WARN_MESSAGE;
}
if (warningMessage != null) {
if (isScannerDotNet()) {
warningMessage += SCANNER_DOTNET_WARN_MESSAGE;
}
LOG.warn(warningMessage);
analysisWarnings.addUnique(warningMessage);
}
} | @Test
public void execute_whenUsingLogin_shouldAddWarning() {
settings.setProperty(CoreProperties.LOGIN, "test");
underTest.execute();
verify(analysisWarnings, times(1)).addUnique(LOGIN_WARN_MESSAGE);
Assertions.assertThat(logger.logs(Level.WARN)).contains(LOGIN_WARN_MESSAGE);
} |
public Map<String, List<TopicPartitionInfo>> getTopicPartitionInfo(final Set<String> topics) {
log.debug("Starting to describe topics {} in partition assignor.", topics);
long currentWallClockMs = time.milliseconds();
final long deadlineMs = currentWallClockMs + retryTimeoutMs;
final Set<String> topicsToDescribe = new HashSet<>(topics);
final Map<String, List<TopicPartitionInfo>> topicPartitionInfo = new HashMap<>();
while (!topicsToDescribe.isEmpty()) {
final Map<String, List<TopicPartitionInfo>> existed = getTopicPartitionInfo(topicsToDescribe, null);
topicPartitionInfo.putAll(existed);
topicsToDescribe.removeAll(topicPartitionInfo.keySet());
if (!topicsToDescribe.isEmpty()) {
currentWallClockMs = time.milliseconds();
if (currentWallClockMs >= deadlineMs) {
final String timeoutError = String.format(
"Could not create topics within %d milliseconds. " +
"This can happen if the Kafka cluster is temporarily not available.",
retryTimeoutMs);
log.error(timeoutError);
throw new TimeoutException(timeoutError);
}
log.info(
"Topics {} could not be describe fully. Will retry in {} milliseconds. Remaining time in milliseconds: {}",
topics,
retryBackOffMs,
deadlineMs - currentWallClockMs
);
Utils.sleep(retryBackOffMs);
}
}
log.debug("Completed describing topics");
return topicPartitionInfo;
} | @Test
public void shouldThrowTimeoutExceptionIfGetPartitionInfoHasTopicDescriptionTimeout() {
mockAdminClient.timeoutNextRequest(1);
final InternalTopicManager internalTopicManager =
new InternalTopicManager(time, mockAdminClient, new StreamsConfig(config));
try {
final Set<String> topic1set = new HashSet<>(Collections.singletonList(topic1));
internalTopicManager.getTopicPartitionInfo(topic1set, null);
} catch (final TimeoutException expected) {
assertEquals(TimeoutException.class, expected.getCause().getClass());
}
mockAdminClient.timeoutNextRequest(1);
try {
final Set<String> topic2set = new HashSet<>(Collections.singletonList(topic2));
internalTopicManager.getTopicPartitionInfo(topic2set, null);
} catch (final TimeoutException expected) {
assertEquals(TimeoutException.class, expected.getCause().getClass());
}
} |
public static boolean getBooleanValue(String key, boolean defaultValue) {
return getBooleanValue(null, key, defaultValue);
} | @Test
public void getBooleanValue() throws Exception {
} |
@VisibleForTesting
void validateMenu(Long parentId, String name, Long id) {
MenuDO menu = menuMapper.selectByParentIdAndName(parentId, name);
if (menu == null) {
return;
}
// 如果 id 为空,说明不用比较是否为相同 id 的菜单
if (id == null) {
throw exception(MENU_NAME_DUPLICATE);
}
if (!menu.getId().equals(id)) {
throw exception(MENU_NAME_DUPLICATE);
}
} | @Test
public void testValidateMenu_sonMenuNameDuplicate() {
// mock 父子菜单
MenuDO sonMenu = createParentAndSonMenu();
// 准备参数
Long parentId = sonMenu.getParentId();
Long otherSonMenuId = randomLongId();
String otherSonMenuName = sonMenu.getName(); //相同名称
// 调用,并断言异常
assertServiceException(() -> menuService.validateMenu(parentId, otherSonMenuName, otherSonMenuId),
MENU_NAME_DUPLICATE);
} |
@Override
public Serde<GenericKey> create(
final FormatInfo format,
final PersistenceSchema schema,
final KsqlConfig ksqlConfig,
final Supplier<SchemaRegistryClient> schemaRegistryClientFactory,
final String loggerNamePrefix,
final ProcessingLogContext processingLogContext,
final Optional<TrackedCallback> tracker
) {
return createInner(
format,
schema,
ksqlConfig,
schemaRegistryClientFactory,
loggerNamePrefix,
processingLogContext,
tracker
);
} | @Test
public void shouldReturnedSessionWindowedSerdeForSessionWindowed() {
// When:
final Serde<Windowed<GenericKey>> result = factory
.create(format, SESSION_WND, schema, config, srClientFactory, LOGGER_PREFIX,
processingLogCxt,
Optional.empty());
// Then:
assertThat(result, is(instanceOf(SessionWindowedSerde.class)));
} |
public static <T> T clone(T value) {
return ObjectMapperWrapper.INSTANCE.clone(value);
} | @Test
public void cloneDeserializeStepErrorTest() {
MyEntity entity = new MyEntity();
entity.setValue("some value");
entity.setPojos(Arrays.asList(
createMyPojo("first value", MyType.A, "1.1", createOtherPojo("USD")),
createMyPojo("second value", MyType.B, "1.2", createOtherPojo("BRL"))
));
MyEntity clone = JacksonUtil.clone(entity);
assertEquals(clone, entity);
List<MyPojo> clonePojos = JacksonUtil.clone(entity.getPojos());
assertEquals(clonePojos, entity.getPojos());
} |
@Override
public void register(@NonNull ThreadPoolPlugin plugin) {
mainLock.runWithWriteLock(() -> {
String id = plugin.getId();
Assert.isTrue(!isRegistered(id), "The plugin with id [" + id + "] has been registered");
registeredPlugins.put(id, plugin);
forQuickIndexes(quickIndex -> quickIndex.addIfPossible(plugin));
plugin.start();
});
} | @Test
public void testRegister() {
manager.register(new TestShutdownAwarePlugin());
Assert.assertEquals(1, manager.getAllPlugins().size());
} |
@Override
public Type classify(final Throwable e) {
Type type = Type.UNKNOWN;
if (e instanceof KsqlFunctionException
|| (e instanceof StreamsException
&& ExceptionUtils.getRootCause(e) instanceof KsqlFunctionException)) {
type = Type.USER;
}
if (type == Type.USER) {
LOG.info(
"Classified error as USER error based on invalid user input. Query ID: {} Exception: {}",
queryId,
e);
}
return type;
} | @Test
public void shouldClassifyWrappedStreamsExceptionWithoutKsqlFunctionExceptionAsUnknownError() {
// Given:
final Exception e = new StreamsException(new ArithmeticException());
// When:
final Type type = new KsqlFunctionClassifier("").classify(e);
// Then:
assertThat(type, is(Type.UNKNOWN));
} |
KubernetesApiProvider buildKubernetesApiUrlProvider() {
try {
String endpointSlicesUrlString =
String.format("%s/apis/discovery.k8s.io/v1/namespaces/%s/endpointslices", kubernetesMaster, namespace);
callGet(endpointSlicesUrlString);
LOGGER.finest("Using EndpointSlices API to discover endpoints.");
} catch (Exception e) {
LOGGER.finest("EndpointSlices are not available, using Endpoints API to discover endpoints.");
return new KubernetesApiEndpointProvider();
}
return new KubernetesApiEndpointSlicesProvider();
} | @Test
public void buildKubernetesApiUrlProviderReturnsEndpointProvider() {
//language=JSON
String endpointSlicesResponse = """
{
"kind": "Status",
"apiVersion": "v1",
"metadata": {
\s
},
"status": "Failure",
"message": "the server could not find the requested resource",
"reason":"NotFound",
"details": {
\s
},
"code": 404
}""";
stub(String.format("/apis/discovery.k8s.io/v1/namespaces/%s/endpointslices", NAMESPACE),
HttpURLConnection.HTTP_NOT_FOUND, endpointSlicesResponse);
assertThat(kubernetesClient.buildKubernetesApiUrlProvider()).isInstanceOf(KubernetesApiEndpointProvider.class);
} |
public ClassInfo get(Class<?> clz) {
return get(clz.getClassLoader(), clz.getName());
} | @Test
void getClazz() {
ClassInfo ci = instance.get(String.class);
assertNotNull(ci);
ClassInfo ci1 = instance.get(String.class);
assertEquals(ci1, ci);
} |
@Override
public SarifImportResults importSarif(SarifSchema210 sarif210) {
int successFullyImportedIssues = 0;
int successFullyImportedRuns = 0;
int failedRuns = 0;
List<Run> runs = requireNonNull(sarif210.getRuns(), "The runs section of the Sarif report is null");
for (Run run : runs) {
RunMapperResult runMapperResult = tryMapRun(run);
if (runMapperResult.isSuccess()) {
List<NewAdHocRule> newAdHocRules = runMapperResult.getNewAdHocRules();
newAdHocRules.forEach(NewAdHocRule::save);
List<NewExternalIssue> newExternalIssues = runMapperResult.getNewExternalIssues();
successFullyImportedRuns += 1;
successFullyImportedIssues += newExternalIssues.size();
newExternalIssues.forEach(NewExternalIssue::save);
} else {
failedRuns += 1;
}
}
return SarifImportResults.builder()
.successFullyImportedIssues(successFullyImportedIssues)
.successFullyImportedRuns(successFullyImportedRuns)
.failedRuns(failedRuns)
.build();
} | @Test
public void importSarif_shouldDelegateRunMapping_toRunMapper() {
SarifSchema210 sarif210 = mock(SarifSchema210.class);
Run run1 = mock(Run.class);
Run run2 = mock(Run.class);
when(sarif210.getRuns()).thenReturn(List.of(run1, run2));
NewExternalIssue issue1run1 = mock(NewExternalIssue.class);
NewExternalIssue issue2run1 = mock(NewExternalIssue.class);
NewExternalIssue issue1run2 = mock(NewExternalIssue.class);
when(runMapper.mapRun(run1)).thenReturn(new RunMapperResult().newExternalIssues(List.of(issue1run1, issue2run1)));
when(runMapper.mapRun(run2)).thenReturn(new RunMapperResult().newExternalIssues(List.of(issue1run2)));
SarifImportResults sarifImportResults = sarif210Importer.importSarif(sarif210);
assertThat(sarifImportResults.getSuccessFullyImportedIssues()).isEqualTo(3);
assertThat(sarifImportResults.getSuccessFullyImportedRuns()).isEqualTo(2);
assertThat(sarifImportResults.getFailedRuns()).isZero();
verify(issue1run1).save();
verify(issue2run1).save();
verify(issue1run2).save();
} |
@Override
public void debug(String msg) {
logger.debug(msg);
} | @Test
void testDebugWithFormat3() {
jobRunrDashboardLogger.debug("Debug with {} {} {}", "format1", "format2", "format3");
verify(slfLogger).debug("Debug with {} {} {}", "format1", "format2", "format3");
} |
@Override
protected HttpApiSpecificInfo doParse(final ApiBean.ApiDefinition apiDefinition) {
String produce = ShenyuClientConstants.MEDIA_TYPE_ALL_VALUE;
String consume = ShenyuClientConstants.MEDIA_TYPE_ALL_VALUE;
List<ApiHttpMethodEnum> apiHttpMethodEnums = Lists.newArrayList(ApiHttpMethodEnum.NOT_HTTP);
return new HttpApiSpecificInfo(produce, consume, apiHttpMethodEnums);
} | @Test
public void testDoParse() {
final TestApiBeanAnnotatedClassAndMethod bean = new TestApiBeanAnnotatedClassAndMethod();
ApiBean apiBean = new ApiBean(RpcTypeEnum.HTTP.getName(), "bean", bean);
apiBean.addApiDefinition(null, null);
AbstractApiDocRegistrar.HttpApiSpecificInfo httpApiSpecificInfo =
noHttpApiDocRegistrar.doParse(apiBean.getApiDefinitions().get(0));
assertThat(httpApiSpecificInfo.getApiHttpMethodEnums().get(0), is(ApiHttpMethodEnum.NOT_HTTP));
assertThat(httpApiSpecificInfo.getConsume(), is(ShenyuClientConstants.MEDIA_TYPE_ALL_VALUE));
assertThat(httpApiSpecificInfo.getProduce(), is(ShenyuClientConstants.MEDIA_TYPE_ALL_VALUE));
} |
public static void deleteIfExists(final File file)
{
try
{
Files.deleteIfExists(file.toPath());
}
catch (final IOException ex)
{
LangUtil.rethrowUnchecked(ex);
}
} | @Test
void deleteIfExistsErrorHandlerNonExistingFile()
{
final ErrorHandler errorHandler = mock(ErrorHandler.class);
final Path file = tempDir.resolve("non-existing.txt");
IoUtil.deleteIfExists(file.toFile(), errorHandler);
assertFalse(Files.exists(file));
verifyNoInteractions(errorHandler);
} |
public static HttpHeaders parseFromString(String headersString) {
HttpHeaders headers = new HttpHeaders(headersString);
if (StringUtils.isNotEmpty(headersString)) {
try (BufferedReader reader = new BufferedReader(new StringReader(headersString))) {
String line = reader.readLine();
while (line != null) {
int colonIndex = line.indexOf(':');
if (colonIndex > 0) {
String headerName = line.substring(0, colonIndex);
if (line.length() > colonIndex + 2) {
headers.add(headerName, StringUtils.strip(line.substring(colonIndex + 1)));
} else {
headers.add(headerName, "");
}
line = reader.readLine();
} else {
throw new FlowableIllegalArgumentException("Header line '" + line + "' is invalid");
}
}
} catch (IOException ex) {
throw new FlowableException("IO exception occurred", ex);
}
}
return headers;
} | @Test
void parseFromInvalidStringHeader() {
assertThatThrownBy(() -> HttpHeaders.parseFromString("Content-Type"))
.isInstanceOf(FlowableIllegalArgumentException.class)
.hasMessage("Header line 'Content-Type' is invalid");
} |
private EidStatInfoBuilder() {
} | @Test
public void eidstatinfoBuilder_validParameters_infoCorrect() throws SoapValidationException {
EIDSTATINFO result = eidStatInfoBuilder.eidstatinfoBuilder("PPPPPPPPP", "PPPPPPPPPPPP");
assertEquals("PPPPPPPPP", result.getEIDSTATAGEG().getBURGSERVNRA().toString());
assertEquals("PPPPPPPPPPPP", result.getEIDSTATAGEG().getEIDVOLGNRA().toString());
assertEquals("RIJBEWIJS", result.getEIDSTATAGEG().getEIDDOCTYPE().toString());
} |
public static <E> E checkInstanceOf(Class<E> type, Object object, String errorMessage) {
isNotNull(type, "type");
if (!type.isInstance(object)) {
throw new IllegalArgumentException(errorMessage);
}
return (E) object;
} | @Test(expected = IllegalArgumentException.class)
public void test_checkInstanceOf_whenSuppliedObjectIsNotInstanceOfExpectedType() {
checkInstanceOf(Integer.class, BigInteger.ONE, "argumentName");
} |
@Override
public Type type() {
return Type.ACTION_PROFILE_GROUP_ID;
} | @Test
public void testMethods() {
assertThat(piActionGroupId1, is(notNullValue()));
assertThat(piActionGroupId1.type(), is(PiTableAction.Type.ACTION_PROFILE_GROUP_ID));
assertThat(piActionGroupId1.id(), is(10));
} |
public int compare(Logger l1, Logger l2) {
if (l1.getName().equals(l2.getName())) {
return 0;
}
if (l1.getName().equals(Logger.ROOT_LOGGER_NAME)) {
return -1;
}
if (l2.getName().equals(Logger.ROOT_LOGGER_NAME)) {
return 1;
}
return l1.getName().compareTo(l2.getName());
} | @Test
public void testSmoke() {
assertEquals(0, comparator.compare(a, a));
assertEquals(-1, comparator.compare(a, b));
assertEquals(1, comparator.compare(b, a));
assertEquals(-1, comparator.compare(root, a));
// following two tests failed before bug #127 was fixed
assertEquals(1, comparator.compare(a, root));
assertEquals(0, comparator.compare(root, root));
} |
public boolean isDisableOptionalRecord() {
return disableOptionalRecord;
} | @Test
void disableOptionalRecord() {
assertThat(builder.build().isDisableOptionalRecord()).isFalse();
builder.disableOptionalRecord(true);
assertThat(builder.build().isDisableOptionalRecord()).isTrue();
} |
@Override
public Object convertDataUsingConversionMetaData( Object data ) throws KettleValueException {
if ( conversionMetadata == null ) {
throw new KettleValueException(
"API coding error: please specify the conversion metadata before attempting to convert value " + name );
}
// Suppose we have an Integer 123, length 5
// The string variation of this is " 00123"
// To convert this back to an Integer we use the storage metadata
// Specifically, in method convertStringToInteger() we consult the
// storageMetaData to get the correct conversion mask
// That way we're always sure that a conversion works both ways.
//
switch ( conversionMetadata.getType() ) {
case TYPE_STRING:
return getString( data );
case TYPE_INTEGER:
return getInteger( data );
case TYPE_NUMBER:
return getNumber( data );
case TYPE_DATE:
return getDate( data );
case TYPE_BIGNUMBER:
return getBigNumber( data );
case TYPE_BOOLEAN:
return getBoolean( data );
case TYPE_BINARY:
return getBinary( data );
case TYPE_TIMESTAMP:
return getDate( data );
default:
throw new KettleValueException( toString() + " : I can't convert the specified value to data type : "
+ conversionMetadata.getType() );
}
} | @Test
public void testConvertDataUsingConversionMetaData() throws KettleValueException, ParseException {
ValueMetaString base = new ValueMetaString();
double DELTA = 1e-15;
base.setConversionMetadata( new ValueMetaString( "STRING" ) );
Object defaultStringData = "STRING DATA";
String convertedStringData = (String) base.convertDataUsingConversionMetaData( defaultStringData );
assertEquals( "STRING DATA", convertedStringData );
base.setConversionMetadata( new ValueMetaInteger( "INTEGER" ) );
Object defaultIntegerData = "1";
long convertedIntegerData = (long) base.convertDataUsingConversionMetaData( defaultIntegerData );
assertEquals( 1, convertedIntegerData );
base.setConversionMetadata( new ValueMetaNumber( "NUMBER" ) );
Object defaultNumberData = "1.999";
double convertedNumberData = (double) base.convertDataUsingConversionMetaData( defaultNumberData );
assertEquals( 1.999, convertedNumberData, DELTA );
ValueMetaInterface dateConversionMeta = new ValueMetaDate( "DATE" );
dateConversionMeta.setDateFormatTimeZone( TimeZone.getTimeZone( "CST" ) );
base.setConversionMetadata( dateConversionMeta );
Object defaultDateData = "1990/02/18 00:00:00.000";
Date date1 = new Date( 635320800000L );
Date convertedDateData = (Date) base.convertDataUsingConversionMetaData( defaultDateData );
assertEquals( date1, convertedDateData );
base.setConversionMetadata( new ValueMetaBigNumber( "BIG_NUMBER" ) );
Object defaultBigNumber = String.valueOf( BigDecimal.ONE );
BigDecimal convertedBigNumber = (BigDecimal) base.convertDataUsingConversionMetaData( defaultBigNumber );
assertEquals( BigDecimal.ONE, convertedBigNumber );
base.setConversionMetadata( new ValueMetaBoolean( "BOOLEAN" ) );
Object defaultBoolean = "true";
boolean convertedBoolean = (boolean) base.convertDataUsingConversionMetaData( defaultBoolean );
assertEquals( true, convertedBoolean );
} |
@Override public String getLegacyColumnName( DatabaseMetaData dbMetaData, ResultSetMetaData rsMetaData, int index ) throws KettleDatabaseException {
if ( dbMetaData == null ) {
throw new KettleDatabaseException( BaseMessages.getString( PKG, "MySQLDatabaseMeta.Exception.LegacyColumnNameNoDBMetaDataException" ) );
}
if ( rsMetaData == null ) {
throw new KettleDatabaseException( BaseMessages.getString( PKG, "MySQLDatabaseMeta.Exception.LegacyColumnNameNoRSMetaDataException" ) );
}
try {
return rsMetaData.getColumnLabel( index );
} catch ( Exception e ) {
throw new KettleDatabaseException( String.format( "%s: %s", BaseMessages.getString( PKG, "MySQLDatabaseMeta.Exception.LegacyColumnNameException" ), e.getMessage() ), e );
}
} | @Test( expected = KettleDatabaseException.class )
public void testGetLegacyColumnNameDatabaseException() throws Exception {
new MariaDBDatabaseMeta().getLegacyColumnName( mock( DatabaseMetaData.class ), getResultSetMetaDataException(), 1 );
} |
public CompletionStage<E> add(CompletionStage<E> future) throws InterruptedException {
checkNotNull(future, "future can't be null");
this.thread = Thread.currentThread();
down();
futures.add(future);
future.whenCompleteAsync((response, t) -> up(), CALLER_RUNS);
return future;
} | @Test
public void testInterrupt() throws Exception {
final Pipelining<String> pipelining = new Pipelining<>(1);
pipelining.add(mock(CompletionStage.class));
TestThread t = new TestThread() {
@Override
public void doRun() throws Throwable {
pipelining.add(mock(CompletionStage.class));
}
};
t.start();
t.interrupt();
t.assertFailsEventually(InterruptedException.class);
} |
@Override
public void commitOffset(Offset offset) {
if (!committer.isRunning()) {
throw new IllegalStateException(
"Committer not running when commitOffset called.", committer.failureCause());
}
try {
committer.commitOffset(offset).get(1, TimeUnit.MINUTES);
} catch (Exception e) {
throw toCanonical(e).underlying;
}
} | @Test
public void commit() {
doReturn(ApiFutures.immediateFuture(null)).when(fakeCommitter).commitOffset(Offset.of(42));
committer.commitOffset(Offset.of(42));
} |
@Override
public Long clusterCountKeysInSlot(int slot) {
RedisClusterNode node = clusterGetNodeForSlot(slot);
MasterSlaveEntry entry = executorService.getConnectionManager().getEntry(new InetSocketAddress(node.getHost(), node.getPort()));
RFuture<Long> f = executorService.readAsync(entry, StringCodec.INSTANCE, RedisCommands.CLUSTER_COUNTKEYSINSLOT, slot);
return syncFuture(f);
} | @Test
public void testClusterCountKeysInSlot() {
Long t = connection.clusterCountKeysInSlot(1);
assertThat(t).isZero();
} |
public static Criterion matchMplsLabel(MplsLabel mplsLabel) {
return new MplsCriterion(mplsLabel);
} | @Test
public void testMatchMplsLabelMethod() {
Criterion matchMplsLabel = Criteria.matchMplsLabel(mpls1);
MplsCriterion mplsCriterion =
checkAndConvert(matchMplsLabel,
Criterion.Type.MPLS_LABEL,
MplsCriterion.class);
assertThat(mplsCriterion.label(), is(equalTo(mpls1)));
} |
@Override
public void onDataReceived(@NonNull final BluetoothDevice device, @NonNull final Data data) {
super.onDataReceived(device, data);
final Integer offset = readTimeZone(data, 0);
if (offset == null) {
onInvalidDataReceived(device, data);
return;
}
if (offset == -128) {
onUnknownTimeZoneReceived(device);
} else if (offset < -48 || offset > 56) {
onInvalidDataReceived(device, data);
} else {
onTimeZoneReceived(device, offset * 15);
}
} | @Test
public void onTimeZoneReceived_unknown() {
final Data data = new Data(new byte[] { -128 });
callback.onDataReceived(null, data);
assertTrue(unknownTimeZone);
} |
@Override
public Object evaluate(EvaluationContext ctx) {
try {
ctx.enterFrame();
List<Object> toReturn = new ArrayList<>();
ctx.setValue("partial", toReturn);
populateToReturn(0, ctx, toReturn);
LOG.trace("returning {}", toReturn);
return toReturn;
} catch (EndpointOfRangeNotValidTypeException | EndpointOfRangeOfDifferentTypeException e) {
// ast error already reported
return null;
} finally {
ctx.exitFrame();
}
} | @Test
void evaluateNestedArray() {
Map<String, List<String>> firstIterationContext = new LinkedHashMap<>();
firstIterationContext.put("1, 2", Arrays.asList("1", "2"));
firstIterationContext.put("3, 4", Arrays.asList("3", "4"));
IterationContextNode x = getIterationContextNode("x", getNestedListNode("[ [1, 2], [3, 4] ]", firstIterationContext), "x in [ [1, 2], [3, 4] ]");
IterationContextNode y = getIterationContextNode("y", getNameRefNode(BuiltInType.UNKNOWN, "x"), "y in x");
ForExpressionNode forExpressionNode = new ForExpressionNode(Arrays.asList(x, y), getNameRefNode(BuiltInType.UNKNOWN, "y"), "for x in [ [1, 2], [3, 4] ], y in x return y");
Object retrieved = forExpressionNode.evaluate(CodegenTestUtil.newEmptyEvaluationContext());
assertThat(retrieved).isInstanceOf(List.class).asList().
containsExactly(BigDecimal.ONE, BigDecimal.valueOf(2), BigDecimal.valueOf(3), BigDecimal.valueOf(4));
} |
@Override
public NotifyTemplateDO getNotifyTemplate(Long id) {
return notifyTemplateMapper.selectById(id);
} | @Test
public void testGetNotifyTemplate() {
// mock 数据
NotifyTemplateDO dbNotifyTemplate = randomPojo(NotifyTemplateDO.class);
notifyTemplateMapper.insert(dbNotifyTemplate);
// 准备参数
Long id = dbNotifyTemplate.getId();
// 调用
NotifyTemplateDO notifyTemplate = notifyTemplateService.getNotifyTemplate(id);
// 断言
assertPojoEquals(dbNotifyTemplate, notifyTemplate);
} |
static boolean unprotectedSetTimes(
FSDirectory fsd, INodesInPath iip, long mtime, long atime, boolean force)
throws FileNotFoundException {
assert fsd.hasWriteLock();
boolean status = false;
INode inode = iip.getLastINode();
if (inode == null) {
throw new FileNotFoundException("File/Directory " + iip.getPath() +
" does not exist.");
}
int latest = iip.getLatestSnapshotId();
if (mtime >= 0) {
inode = inode.setModificationTime(mtime, latest);
status = true;
}
// if the last access time update was within the last precision interval,
// then no need to store access time
if (atime >= 0 && (status || force
|| atime > inode.getAccessTime() + fsd.getAccessTimePrecision())) {
inode.setAccessTime(atime, latest,
fsd.getFSNamesystem().getSnapshotManager().
getSkipCaptureAccessTimeOnlyChange());
status = true;
}
return status;
} | @Test
public void testUnprotectedSetTimes() throws Exception {
// atime < access time + precision
assertFalse("SetTimes should not update access time "
+ "because it's within the last precision interval",
unprotectedSetTimes(100, 0, 1000, -1, false));
// atime = access time + precision
assertFalse("SetTimes should not update access time "
+ "because it's within the last precision interval",
unprotectedSetTimes(1000, 0, 1000, -1, false));
// atime > access time + precision
assertTrue("SetTimes should update access time",
unprotectedSetTimes(1011, 10, 1000, -1, false));
// atime < access time + precision, but force is set
assertTrue("SetTimes should update access time",
unprotectedSetTimes(100, 0, 1000, -1, true));
// atime < access time + precision, but mtime is set
assertTrue("SetTimes should update access time",
unprotectedSetTimes(100, 0, 1000, 1, false));
} |
public static void checkParam(String dataId, String group, String content) throws NacosException {
checkKeyParam(dataId, group);
if (StringUtils.isBlank(content)) {
throw new NacosException(NacosException.CLIENT_INVALID_PARAM, CONTENT_INVALID_MSG);
}
} | @Test
void testCheckParamFail() throws NacosException {
Throwable exception = assertThrows(NacosException.class, () -> {
String dataId = "b";
String group = "c";
String content = "";
ParamUtils.checkParam(dataId, group, content);
});
assertTrue(exception.getMessage().contains("content invalid"));
} |
public boolean evaluate( RowMetaInterface rowMeta, Object[] r ) {
// Start of evaluate
boolean retval = false;
// If we have 0 items in the list, evaluate the current condition
// Otherwise, evaluate all sub-conditions
//
try {
if ( isAtomic() ) {
if ( function == FUNC_TRUE ) {
return !negate;
}
// Get fieldnrs left value
//
// Check out the fieldnrs if we don't have them...
if ( leftValuename != null && leftValuename.length() > 0 ) {
leftFieldnr = rowMeta.indexOfValue( leftValuename );
}
// Get fieldnrs right value
//
if ( rightValuename != null && rightValuename.length() > 0 ) {
rightFieldnr = rowMeta.indexOfValue( rightValuename );
}
// Get fieldnrs left field
ValueMetaInterface fieldMeta = null;
Object field = null;
if ( leftFieldnr >= 0 ) {
fieldMeta = rowMeta.getValueMeta( leftFieldnr );
field = r[ leftFieldnr ];
} else {
return false; // no fields to evaluate
}
// Get fieldnrs right exact
ValueMetaInterface fieldMeta2 = rightExact != null ? rightExact.getValueMeta() : null;
Object field2 = rightExact != null ? rightExact.getValueData() : null;
if ( field2 == null && rightFieldnr >= 0 ) {
fieldMeta2 = rowMeta.getValueMeta( rightFieldnr );
field2 = r[ rightFieldnr ];
}
// Evaluate
switch ( function ) {
case FUNC_EQUAL:
retval = ( fieldMeta.compare( field, fieldMeta2, field2 ) == 0 );
break;
case FUNC_NOT_EQUAL:
retval = ( fieldMeta.compare( field, fieldMeta2, field2 ) != 0 );
break;
case FUNC_SMALLER:
// Added this if/else to accommodate for CUST-270
if ( "Y".equalsIgnoreCase( System.getProperty( Const.KETTLE_FILTER_TREAT_NULLS_AS_NOT_ZERO, "N" ) )
&& fieldMeta.isNull( field ) ) {
retval = false;
} else {
retval = ( fieldMeta.compare( field, fieldMeta2, field2 ) < 0 );
}
break;
case FUNC_SMALLER_EQUAL:
// Added this if/else to accommodate for CUST-270
if ( "Y".equalsIgnoreCase( System.getProperty( Const.KETTLE_FILTER_TREAT_NULLS_AS_NOT_ZERO, "N" ) )
&& fieldMeta.isNull( field ) ) {
retval = false;
} else {
retval = ( fieldMeta.compare( field, fieldMeta2, field2 ) <= 0 );
}
break;
case FUNC_LARGER:
retval = ( fieldMeta.compare( field, fieldMeta2, field2 ) > 0 );
break;
case FUNC_LARGER_EQUAL:
retval = ( fieldMeta.compare( field, fieldMeta2, field2 ) >= 0 );
break;
case FUNC_REGEXP:
if ( fieldMeta.isNull( field ) || field2 == null ) {
retval = false;
} else {
retval =
Pattern
.matches( fieldMeta2.getCompatibleString( field2 ), fieldMeta.getCompatibleString( field ) );
}
break;
case FUNC_NULL:
retval = ( fieldMeta.isNull( field ) );
break;
case FUNC_NOT_NULL:
retval = ( !fieldMeta.isNull( field ) );
break;
case FUNC_IN_LIST:
// performance reason: create the array first or again when it is against a field and not a constant
//
if ( inList == null || rightFieldnr >= 0 ) {
inList = Const.splitString( fieldMeta2.getString( field2 ), ';', true );
for ( int i = 0; i < inList.length; i++ ) {
inList[i] = inList[i] == null ? null : inList[i].replace( "\\", "" );
}
Arrays.sort( inList );
}
String searchString = fieldMeta.getCompatibleString( field );
int inIndex = -1;
if ( searchString != null ) {
inIndex = Arrays.binarySearch( inList, searchString );
}
retval = inIndex >= 0;
break;
case FUNC_CONTAINS:
String fm2CompatibleContains = fieldMeta2.getCompatibleString( field2 );
retval = Optional.ofNullable( fieldMeta.getCompatibleString( field ) )
.filter( s -> s.contains( fm2CompatibleContains ) ).isPresent();
break;
case FUNC_STARTS_WITH:
String fm2CompatibleStarts = fieldMeta2.getCompatibleString( field2 );
retval = Optional.ofNullable( fieldMeta.getCompatibleString( field ) )
.filter( s -> s.startsWith( fm2CompatibleStarts ) ).isPresent();
break;
case FUNC_ENDS_WITH:
String string = fieldMeta.getCompatibleString( field );
if ( !Utils.isEmpty( string ) ) {
if ( rightString == null && field2 != null ) {
rightString = fieldMeta2.getCompatibleString( field2 );
}
if ( rightString != null ) {
retval = string.endsWith( fieldMeta2.getCompatibleString( field2 ) );
} else {
retval = false;
}
} else {
retval = false;
}
break;
case FUNC_LIKE:
// Converts to a regular expression
// TODO: optimize the patterns and String replacements
//
if ( fieldMeta.isNull( field ) || field2 == null ) {
retval = false;
} else {
String regex = fieldMeta2.getCompatibleString( field2 );
regex = regex.replace( "%", ".*" );
regex = regex.replace( "?", "." );
retval = Pattern.matches( regex, fieldMeta.getCompatibleString( field ) );
}
break;
default:
break;
}
// Only NOT makes sense, the rest doesn't, so ignore!!!!
// Optionally negate
//
if ( isNegated() ) {
retval = !retval;
}
} else {
// Composite : get first
Condition cb0 = list.get( 0 );
retval = cb0.evaluate( rowMeta, r );
// Loop over the conditions listed below.
//
for ( int i = 1; i < list.size(); i++ ) {
// Composite : #i
// Get right hand condition
Condition cb = list.get( i );
// Evaluate the right hand side of the condition cb.evaluate() within
// the switch statement
// because the condition may be short-circuited due to the left hand
// side (retval)
switch ( cb.getOperator() ) {
case Condition.OPERATOR_OR:
retval = retval || cb.evaluate( rowMeta, r );
break;
case Condition.OPERATOR_AND:
retval = retval && cb.evaluate( rowMeta, r );
break;
case Condition.OPERATOR_OR_NOT:
retval = retval || ( !cb.evaluate( rowMeta, r ) );
break;
case Condition.OPERATOR_AND_NOT:
retval = retval && ( !cb.evaluate( rowMeta, r ) );
break;
case Condition.OPERATOR_XOR:
retval = retval ^ cb.evaluate( rowMeta, r );
break;
default:
break;
}
}
// Composite: optionally negate
if ( isNegated() ) {
retval = !retval;
}
}
} catch ( Exception e ) {
throw new RuntimeException( "Unexpected error evaluation condition [" + toString() + "]", e );
}
return retval;
} | @Test
public void testZeroLargerThanNull() {
String left = "left";
String right = "right";
Long leftValue = 0L;
Long rightValue = null;
RowMetaInterface rowMeta = new RowMeta();
rowMeta.addValueMeta( new ValueMetaInteger( left ) );
rowMeta.addValueMeta( new ValueMetaInteger( right ) );
Condition condition = new Condition( left, Condition.FUNC_LARGER, right, null );
assertTrue( condition.evaluate( rowMeta, new Object[] { leftValue, rightValue } ) );
} |
@Override
public long extractWatermark(IcebergSourceSplit split) {
return split.task().files().stream()
.map(
scanTask -> {
Preconditions.checkArgument(
scanTask.file().lowerBounds() != null
&& scanTask.file().lowerBounds().get(eventTimeFieldId) != null,
"Missing statistics for column name = %s in file = %s",
eventTimeFieldName,
eventTimeFieldId,
scanTask.file());
return timeUnit.toMillis(
Conversions.fromByteBuffer(
Types.LongType.get(), scanTask.file().lowerBounds().get(eventTimeFieldId)));
})
.min(Comparator.comparingLong(l -> l))
.get();
} | @TestTemplate
public void testTimeUnit() throws IOException {
assumeThat(columnName).isEqualTo("long_column");
ColumnStatsWatermarkExtractor extractor =
new ColumnStatsWatermarkExtractor(SCHEMA, columnName, TimeUnit.MICROSECONDS);
assertThat(extractor.extractWatermark(split(0)))
.isEqualTo(MIN_VALUES.get(0).get(columnName) / 1000L);
} |
@Override
public Result invoke(Invocation invocation) throws RpcException {
Result result;
String value = getUrl().getMethodParameter(
RpcUtils.getMethodName(invocation), MOCK_KEY, Boolean.FALSE.toString())
.trim();
if (ConfigUtils.isEmpty(value)) {
// no mock
result = this.invoker.invoke(invocation);
} else if (value.startsWith(FORCE_KEY)) {
if (logger.isWarnEnabled()) {
logger.warn(
CLUSTER_FAILED_MOCK_REQUEST,
"force mock",
"",
"force-mock: " + RpcUtils.getMethodName(invocation) + " force-mock enabled , url : "
+ getUrl());
}
// force:direct mock
result = doMockInvoke(invocation, null);
} else {
// fail-mock
try {
result = this.invoker.invoke(invocation);
// fix:#4585
if (result.getException() != null && result.getException() instanceof RpcException) {
RpcException rpcException = (RpcException) result.getException();
if (rpcException.isBiz()) {
throw rpcException;
} else {
result = doMockInvoke(invocation, rpcException);
}
}
} catch (RpcException e) {
if (e.isBiz()) {
throw e;
}
if (logger.isWarnEnabled()) {
logger.warn(
CLUSTER_FAILED_MOCK_REQUEST,
"failed to mock invoke",
"",
"fail-mock: " + RpcUtils.getMethodName(invocation) + " fail-mock enabled , url : "
+ getUrl(),
e);
}
result = doMockInvoke(invocation, e);
}
}
return result;
} | @Test
void testMockInvokerInvoke_forcemock_defaultreturn() {
URL url = URL.valueOf("remote://1.2.3.4/" + IHelloService.class.getName())
.addParameter(
REFER_KEY, URL.encode(PATH_KEY + "=" + IHelloService.class.getName() + "&" + "mock=force"));
Invoker<IHelloService> cluster = getClusterInvoker(url);
URL mockUrl = URL.valueOf("mock://localhost/" + IHelloService.class.getName()
+ "?getSomething.mock=return aa&getSomething3xx.mock=return xx&sayHello.mock=return ")
.addParameters(url.getParameters());
Protocol protocol = new MockProtocol();
Invoker<IHelloService> mInvoker1 = protocol.refer(IHelloService.class, mockUrl);
invokers.add(mInvoker1);
RpcInvocation invocation = new RpcInvocation();
invocation.setMethodName("sayHello");
Result ret = cluster.invoke(invocation);
Assertions.assertNull(ret.getValue());
} |
@Modified
public void modified(ComponentContext context) {
if (context == null) {
log.info("No component configuration");
return;
} else {
Dictionary<?, ?> properties = context.getProperties();
int newPollFrequency = getNewPollFrequency(properties, linkPollFrequencySeconds);
int newPollDelay = getNewPollDealy(properties, linkPollDelaySeconds);
int newDiscoveryTimeout = getNewDiscoveryTimeout(properties, linkDiscoveryTimeoutSeconds);
if (newPollFrequency != linkPollFrequencySeconds ||
newPollDelay != linkPollDelaySeconds ||
newDiscoveryTimeout != linkDiscoveryTimeoutSeconds) {
linkPollFrequencySeconds = newPollFrequency;
linkPollDelaySeconds = newPollDelay;
linkDiscoveryTimeoutSeconds = newDiscoveryTimeout;
//stops the old scheduled task
scheduledTask.cancel(true);
//schedules new task at the new polling rate
scheduledTask = schedulePolling();
}
}
log.info("Modified");
} | @Test
@Ignore("FIXME: fails intermittently; suspecting insufficient time and race condition")
public void linksTestForStoredDevice() {
provider.modified(CONTEXT);
providerService.discoveredLinkDescriptions().put(LINKKEY1, LINK1);
providerService.discoveredLinkDescriptions().put(LINKKEY2, LINK2);
providerService.discoveredLinkDescriptions().put(LINKKEY4, LINK4);
testLink.addLinkDesc(LINK2);
testLink.addLinkDesc(LINK3);
assertAfter(1100, () -> {
assertEquals("Total number of link must be 3", 3, providerService.discoveredLinkDescriptions().size());
assertFalse("Link1 should be removed",
providerService.discoveredLinkDescriptions().containsKey(LINKKEY1));
assertTrue("Link2 should be present",
providerService.discoveredLinkDescriptions().containsKey(LINKKEY2));
assertTrue("Link3 should be added",
providerService.discoveredLinkDescriptions().containsKey(LINKKEY3));
assertEquals("Link3 should be annotated", SCHEME_NAME.toUpperCase(),
providerService.discoveredLinkDescriptions()
.get(LINKKEY3).annotations().value(AnnotationKeys.PROTOCOL));
assertTrue("Link4 should be present because it is not related to the LinkDiscovery",
providerService.discoveredLinkDescriptions().containsKey(LINKKEY4));
});
clear();
} |
static Set<Dependency> tryCreateFromField(JavaField field) {
return tryCreateDependency(field, "has type", field.getRawType());
} | @Test
@UseDataProvider("field_array_types")
public void Dependencies_from_field_with_component_type(Field reflectionArrayField) {
Class<?> reflectionDeclaringClass = reflectionArrayField.getDeclaringClass();
JavaField field = new ClassFileImporter().importClasses(reflectionDeclaringClass).get(reflectionDeclaringClass).getField(reflectionArrayField.getName());
Set<Dependency> dependencies = Dependency.tryCreateFromField(field);
DependenciesAssertion.ExpectedDependencies expectedDependencies = from(reflectionDeclaringClass).to(reflectionArrayField.getType())
.withDescriptionContaining("Field <%s> has type <%s>", field.getFullName(), reflectionArrayField.getType().getName())
.inLocation(DependencyTest.class, 0);
Class<?> expectedComponentType = reflectionArrayField.getType().getComponentType();
while (expectedComponentType != null) {
expectedDependencies.from(reflectionDeclaringClass).to(expectedComponentType)
.withDescriptionContaining("Field <%s> depends on component type <%s>", field.getFullName(), expectedComponentType.getName())
.inLocation(DependencyTest.class, 0);
expectedComponentType = expectedComponentType.getComponentType();
}
assertThatDependencies(dependencies).containOnly(expectedDependencies);
} |
public static <E> List<E> executePaginatedRequest(String request, OAuth20Service scribe, OAuth2AccessToken accessToken, Function<String, List<E>> function) {
List<E> result = new ArrayList<>();
readPage(result, scribe, accessToken, addPerPageQueryParameter(request, DEFAULT_PAGE_SIZE), function);
return result;
} | @Test
public void execute_paginated_request_with_query_parameter() throws InterruptedException {
mockWebServer.enqueue(new MockResponse()
.setHeader("Link", "<" + serverUrl + "/test?param=value&per_page=100&page=2>; rel=\"next\", <" + serverUrl + "/test?param=value&per_page=100&page=2>; rel=\"last\"")
.setBody("A"));
mockWebServer.enqueue(new MockResponse()
.setHeader("Link", "<" + serverUrl + "/test?param=value&per_page=100&page=1>; rel=\"prev\", <" + serverUrl + "/test?param=value&per_page=100&page=1>; rel=\"first\"")
.setBody("B"));
List<String> response = executePaginatedRequest(serverUrl + "/test?param=value", oAuth20Service, auth2AccessToken, Arrays::asList);
assertThat(response).contains("A", "B");
assertThat(mockWebServer.takeRequest().getPath()).isEqualTo("/test?param=value&per_page=100");
assertThat(mockWebServer.takeRequest().getPath()).isEqualTo("/test?param=value&per_page=100&page=2");
} |
byte[] readFromChannel(StreamSourceChannel source) throws IOException {
final ByteArrayOutputStream out = new ByteArrayOutputStream();
final ByteBuffer buffer = ByteBuffer.wrap(new byte[1024]);
ReadableByteChannel blockingSource = new BlockingReadableByteChannel(source);
for (;;) {
int res = blockingSource.read(buffer);
if (res == -1) {
return out.toByteArray();
} else if (res == 0) {
LOG.error("Channel did not block");
} else {
cast(buffer).flip();
out.write(buffer.array(), buffer.arrayOffset() + cast(buffer).position(),
buffer.arrayOffset() + cast(buffer).limit());
cast((Buffer) buffer).clear();
}
}
} | @Timeout(10)
@Test
public void readEntireMultiDelayedWithPausePayload() throws Exception {
String[] delayedPayloads = new String[] {
"",
"first ",
"",
"second",
};
StreamSourceChannel source = source(delayedPayloads);
DefaultUndertowHttpBinding binding = new DefaultUndertowHttpBinding();
String result = new String(binding.readFromChannel(source));
checkResult(result, delayedPayloads);
} |
@Override
public Optional<String> validate(String password) {
final String LENGTH_REASONING = "must be length of between " + minimumLength + " and " + MAXIMUM_LENGTH;
return Strings.isNullOrEmpty(password) || password.length() < minimumLength
? Optional.of(LENGTH_REASONING)
: Optional.empty();
} | @Test
public void testValidateSuccess() {
Optional<String> result = lengthValidator.validate("Password123");
Assert.assertFalse(result.isPresent());
} |
public boolean includes(String ipAddress) {
if (all) {
return true;
}
if (ipAddress == null) {
throw new IllegalArgumentException("ipAddress is null.");
}
try {
return includes(addressFactory.getByName(ipAddress));
} catch (UnknownHostException e) {
return false;
}
} | @Test
public void testHostNamesReverserIpMatch() throws UnknownHostException {
// create MachineList with a list of of Hostnames
TestAddressFactory addressFactory = new TestAddressFactory();
addressFactory.put("1.2.3.1", "host1");
addressFactory.put("1.2.3.4", "host4");
addressFactory.put("1.2.3.5", "host5");
MachineList ml = new MachineList(
StringUtils.getTrimmedStringCollection(HOST_LIST), addressFactory );
//test for inclusion with an known IP
assertTrue(ml.includes("1.2.3.4"));
//test for exclusion with an unknown IP
assertFalse(ml.includes("1.2.3.5"));
} |
public static String toString(Object obj) {
if (null == obj) {
return StrUtil.NULL;
}
if (obj instanceof Map) {
return obj.toString();
}
return Convert.toStr(obj);
} | @Test
public void toStringTest() {
ArrayList<String> strings = CollUtil.newArrayList("1", "2");
String result = ObjectUtil.toString(strings);
assertEquals("[1, 2]", result);
} |
@Override
public void run() {
try { // make sure we call afterRun() even on crashes
// and operate countdown latches, else we may hang the parallel runner
if (steps == null) {
beforeRun();
}
if (skipped) {
return;
}
int count = steps.size();
int index = 0;
while ((index = nextStepIndex()) < count) {
currentStep = steps.get(index);
execute(currentStep);
if (currentStepResult != null) { // can be null if debug step-back or hook skip
result.addStepResult(currentStepResult);
}
}
} catch (Exception e) {
if (currentStepResult != null) {
result.addStepResult(currentStepResult);
}
logError("scenario [run] failed\n" + StringUtils.throwableToString(e));
currentStepResult = result.addFakeStepResult("scenario [run] failed", e);
} finally {
if (!skipped) {
afterRun();
if (isFailed() && engine.getConfig().isAbortSuiteOnFailure()) {
featureRuntime.suite.abort();
}
}
if (caller.isNone()) {
logAppender.close(); // reclaim memory
}
}
} | @Test
void testRepeat() {
run(
"def res1 = karate.repeat(3, i => i + 1 )",
"def res2 = karate.repeat(3, i => ({ a: 1 }))",
"def res3 = karate.repeat(3, i => ({ a: i + 1 }))"
);
matchVar("res1", "[1, 2, 3]");
matchVar("res2", "[{ a: 1 }, { a: 1 }, { a: 1 }]");
matchVar("res3", "[{ a: 1 }, { a: 2 }, { a: 3 }]");
} |
@Override
public void checkAppPermissions(GithubAppConfiguration githubAppConfiguration) {
AppToken appToken = appSecurity.createAppToken(githubAppConfiguration.getId(), githubAppConfiguration.getPrivateKey());
Map<String, String> permissions = new HashMap<>();
permissions.put("checks", WRITE_PERMISSION_NAME);
permissions.put("pull_requests", WRITE_PERMISSION_NAME);
permissions.put("metadata", READ_PERMISSION_NAME);
String endPoint = "/app";
GetResponse response;
try {
response = githubApplicationHttpClient.get(githubAppConfiguration.getApiEndpoint(), appToken, endPoint);
} catch (IOException e) {
LOG.warn(FAILED_TO_REQUEST_BEGIN_MSG + githubAppConfiguration.getApiEndpoint() + endPoint, e);
throw new IllegalArgumentException("Failed to validate configuration, check URL and Private Key");
}
if (response.getCode() == HTTP_OK) {
Map<String, String> perms = handleResponse(response, endPoint, GsonApp.class)
.map(GsonApp::getPermissions)
.orElseThrow(() -> new IllegalArgumentException("Failed to get app permissions, unexpected response body"));
List<String> missingPermissions = permissions.entrySet().stream()
.filter(permission -> !Objects.equals(permission.getValue(), perms.get(permission.getKey())))
.map(Map.Entry::getKey)
.toList();
if (!missingPermissions.isEmpty()) {
String message = missingPermissions.stream()
.map(perm -> perm + " is '" + perms.get(perm) + "', should be '" + permissions.get(perm) + "'")
.collect(Collectors.joining(", "));
throw new IllegalArgumentException("Missing permissions; permission granted on " + message);
}
} else if (response.getCode() == HTTP_UNAUTHORIZED || response.getCode() == HTTP_FORBIDDEN) {
throw new IllegalArgumentException("Authentication failed, verify the Client Id, Client Secret and Private Key fields");
} else {
throw new IllegalArgumentException("Failed to check permissions with Github, check the configuration");
}
} | @Test
public void checkAppPermissions() throws IOException {
AppToken appToken = mockAppToken();
String json = "{"
+ " \"permissions\": {\n"
+ " \"checks\": \"write\",\n"
+ " \"metadata\": \"read\",\n"
+ " \"pull_requests\": \"write\"\n"
+ " }\n"
+ "}";
when(githubApplicationHttpClient.get(appUrl, appToken, "/app")).thenReturn(new OkGetResponse(json));
assertThatCode(() -> underTest.checkAppPermissions(githubAppConfiguration)).isNull();
} |
public static ConfigurableResource parseResourceConfigValue(String value)
throws AllocationConfigurationException {
return parseResourceConfigValue(value, Long.MAX_VALUE);
} | @Test
public void testDuplicateVcoresDefinitionAbsolute() throws Exception {
expectInvalidResource("vcores");
parseResourceConfigValue("1024 mb, 2 4 vcores");
} |
@Override
public ExecuteContext doBefore(ExecuteContext context) {
String serviceId = (String) context.getArguments()[0];
final RegisterCenterService service = PluginServiceManager.getPluginService(RegisterCenterService.class);
final List<MicroServiceInstance> microServiceInstances = service.getServerList(serviceId);
if (microServiceInstances.isEmpty()) {
return context;
}
context.setLocalFieldValue(SERVICE_ID, serviceId);
context.setLocalFieldValue(MICRO_SERVICE_INSTANCES, microServiceInstances);
if (RegisterContext.INSTANCE.isAvailable()
&& !RegisterDynamicConfig.INSTANCE.isNeedCloseOriginRegisterCenter()) {
return context;
}
final Object target = context.getObject();
context.skip(isWebfLux(target) ? Flux.fromIterable(Collections.emptyList())
: Collections.emptyList());
return context;
} | @Test
public void doBefore() throws NoSuchMethodException {
Mockito.when(registerCenterService.getServerList(serviceName)).thenReturn(instanceList);
// A normal scenario where isEmpty is false and isAvailable is true
RegisterContext.INSTANCE.setAvailable(true);
REGISTER_CONFIG.setEnableSpringRegister(true);
REGISTER_CONFIG.setOpenMigration(true);
RegisterDynamicConfig.INSTANCE.setClose(false);
final ExecuteContext context = interceptor.doBefore(
buildContext(client, new Object[]{serviceName}, zkInstanceList));
final ExecuteContext contextResult = interceptor.doAfter(context);
Assert.assertTrue(contextResult.getResult() instanceof List);
Assert.assertEquals(((List<?>) contextResult.getResult()).size(), zkInstanceList.size() + instanceList.size());
// IsWebfLux scenario where isEmpty is false and isAvailable is true
final ExecuteContext fluxContext = interceptor.doBefore(buildContext(reactiveDiscoveryClient,
new Object[]{serviceName}, Flux.fromIterable(zkInstanceList)));
final ExecuteContext fluxContextResult = interceptor.doAfter(fluxContext);
Assert.assertTrue(fluxContextResult.getResult() instanceof Flux);
final List<?> block = ((Flux<?>) fluxContextResult.getResult()).collectList().block();
Assert.assertNotNull(block);
Assert.assertEquals(block.size(), zkInstanceList.size() + instanceList.size());
// Scenario where isEmpty is false and isAvailable is false
RegisterContext.INSTANCE.setAvailable(false);
final ExecuteContext notAvailableContext = interceptor.doBefore(
buildContext(client, new Object[]{serviceName}, zkInstanceList));
Assert.assertTrue(notAvailableContext.isSkip());
Assert.assertTrue(CollectionUtils.isEmpty((Collection<?>) notAvailableContext.getResult()));
final ExecuteContext notAvailableContextResult = interceptor.doAfter(notAvailableContext);
Assert.assertTrue(notAvailableContextResult.getResult() instanceof List);
Assert.assertEquals(((List<?>) notAvailableContextResult.getResult()).size(), instanceList.size());
// Scenario where isEmpty is true and isAvailable is true
RegisterContext.INSTANCE.setAvailable(true);
Mockito.when(registerCenterService.getServerList(serviceName)).thenReturn(Collections.emptyList());
final ExecuteContext contextWithEmptyList = interceptor.doBefore(
buildContext(client, new Object[]{serviceName}, zkInstanceList));
final ExecuteContext contextWithEmptyListResult = interceptor.doAfter(contextWithEmptyList);
Assert.assertTrue(contextWithEmptyListResult.getResult() instanceof List);
Assert.assertEquals(((List<?>) contextWithEmptyListResult.getResult()).size(), zkInstanceList.size());
// Scenario where isEmpty is true and isAvailable is false
Mockito.when(registerCenterService.getServerList(serviceName)).thenReturn(Collections.emptyList());
final ExecuteContext contextWithEmptyListx = interceptor.doBefore(
buildContext(client, new Object[]{serviceName}, zkInstanceList));
final ExecuteContext contextWithEmptyListResultx = interceptor.doAfter(contextWithEmptyListx);
Assert.assertTrue(contextWithEmptyListResultx.getResult() instanceof List);
Assert.assertEquals(((List<?>) contextWithEmptyListResultx.getResult()).size(), zkInstanceList.size());
REGISTER_CONFIG.setEnableSpringRegister(false);
REGISTER_CONFIG.setOpenMigration(false);
} |
public static Set<AttributeKvEntry> convertToAttributes(JsonElement element) {
Set<AttributeKvEntry> result = new HashSet<>();
long ts = System.currentTimeMillis();
result.addAll(parseValues(element.getAsJsonObject()).stream().map(kv -> new BaseAttributeKvEntry(kv, ts)).collect(Collectors.toList()));
return result;
} | @Test
public void testParseAttributesBigDecimalAsLong() {
var result = new ArrayList<>(JsonConverter.convertToAttributes(JsonParser.parseString("{\"meterReadingDelta\": 1E1}")));
Assertions.assertEquals(10L, result.get(0).getLongValue().get().longValue());
} |
public Task<T> getTask()
{
return _task;
} | @Test
public void testGetTaskOfParSeqBasedCompletionStage() throws Exception {
// Control: CompletableFuture with completed value
CompletionStage<String> completionStageCompletableFuture = CompletableFuture.completedFuture(TESTVALUE2);
testWithUnFinishedStage(completionStageCompletableFuture);
// treatment: Use a ParSeqBasedCompletionStage with A Task already resolved
CompletionStage<String> completionStageParSeq = createTestStage(TESTVALUE2, 0);
assert(completionStageParSeq instanceof ParSeqBasedCompletionStage);
CompletionStage<String> spyStage = Mockito.spy(completionStageParSeq);
testWithUnFinishedStage(spyStage);
verify((ParSeqBasedCompletionStage) spyStage, times(1)).getTask();
// treatment: Use a ParSeqBasedCompletionStage with a task has not started
Task<String> testTask = Task.value(TESTVALUE2);
CompletionStage<String> completionStageParSeq2 = createStageFromTask(testTask);
assert(completionStageParSeq2 instanceof ParSeqBasedCompletionStage);
CompletionStage<String> spyStage2 = Mockito.spy(completionStageParSeq2);
testWithUnStartedStage(spyStage2, testTask);
// treatment: Use a ParSeqBasedCompletionStage started but will finish later
CompletionStage<String> completionStageParSeq3 = createTestStage(TESTVALUE2, 100);
assert(completionStageParSeq3 instanceof ParSeqBasedCompletionStage);
CompletionStage<String> spyStage3 = Mockito.spy(completionStageParSeq3);
testWithUnFinishedStage(spyStage3);
verify((ParSeqBasedCompletionStage) spyStage3, times(1)).getTask();
} |
public static SslContextFactory.Server createServerSideSslContextFactory(AbstractConfig config, String prefix) {
Map<String, Object> sslConfigValues = config.valuesWithPrefixAllOrNothing(prefix);
final SslContextFactory.Server ssl = new SslContextFactory.Server();
configureSslContextFactoryKeyStore(ssl, sslConfigValues);
configureSslContextFactoryTrustStore(ssl, sslConfigValues);
configureSslContextFactoryAlgorithms(ssl, sslConfigValues);
configureSslContextFactoryAuthentication(ssl, sslConfigValues);
return ssl;
} | @Test
public void testCreateServerSideSslContextFactoryDefaultValues() {
Map<String, String> configMap = new HashMap<>();
configMap.put("ssl.keystore.location", "/path/to/keystore");
configMap.put("ssl.keystore.password", "123456");
configMap.put("ssl.key.password", "123456");
configMap.put("ssl.truststore.location", "/path/to/truststore");
configMap.put("ssl.truststore.password", "123456");
configMap.put("ssl.provider", "SunJSSE");
configMap.put("ssl.cipher.suites", "SSL_RSA_WITH_RC4_128_SHA,SSL_RSA_WITH_RC4_128_MD5");
configMap.put("ssl.secure.random.implementation", "SHA1PRNG");
RestServerConfig config = RestServerConfig.forPublic(null, configMap);
SslContextFactory.Server ssl = SSLUtils.createServerSideSslContextFactory(config);
assertEquals(SslConfigs.DEFAULT_SSL_KEYSTORE_TYPE, ssl.getKeyStoreType());
assertEquals(SslConfigs.DEFAULT_SSL_TRUSTSTORE_TYPE, ssl.getTrustStoreType());
assertEquals(SslConfigs.DEFAULT_SSL_PROTOCOL, ssl.getProtocol());
assertArrayEquals(Arrays.asList(SslConfigs.DEFAULT_SSL_ENABLED_PROTOCOLS.split("\\s*,\\s*")).toArray(), ssl.getIncludeProtocols());
assertEquals(SslConfigs.DEFAULT_SSL_KEYMANGER_ALGORITHM, ssl.getKeyManagerFactoryAlgorithm());
assertEquals(SslConfigs.DEFAULT_SSL_TRUSTMANAGER_ALGORITHM, ssl.getTrustManagerFactoryAlgorithm());
assertFalse(ssl.getNeedClientAuth());
assertFalse(ssl.getWantClientAuth());
} |
public static <KLeftT, KRightT> KTableHolder<KLeftT> build(
final KTableHolder<KLeftT> left,
final KTableHolder<KRightT> right,
final ForeignKeyTableTableJoin<KLeftT, KRightT> join,
final RuntimeBuildContext buildContext
) {
final LogicalSchema leftSchema = left.getSchema();
final LogicalSchema rightSchema = right.getSchema();
final ProcessingLogger logger = buildContext.getProcessingLogger(
join.getProperties().getQueryContext()
);
final ExpressionEvaluator expressionEvaluator;
final CodeGenRunner codeGenRunner = new CodeGenRunner(
leftSchema,
buildContext.getKsqlConfig(),
buildContext.getFunctionRegistry()
);
final Optional<ColumnName> leftColumnName = join.getLeftJoinColumnName();
final Optional<Expression> leftJoinExpression = join.getLeftJoinExpression();
if (leftColumnName.isPresent()) {
expressionEvaluator = codeGenRunner.buildCodeGenFromParseTree(
new UnqualifiedColumnReferenceExp(leftColumnName.get()),
"Left Join Expression"
);
} else if (leftJoinExpression.isPresent()) {
expressionEvaluator = codeGenRunner.buildCodeGenFromParseTree(
leftJoinExpression.get(),
"Left Join Expression"
);
} else {
throw new IllegalStateException("Both leftColumnName and leftJoinExpression are empty.");
}
final ForeignKeyJoinParams<KRightT> joinParams = ForeignKeyJoinParamsFactory
.create(expressionEvaluator, leftSchema, rightSchema, logger);
final Formats formats = join.getFormats();
final PhysicalSchema physicalSchema = PhysicalSchema.from(
joinParams.getSchema(),
formats.getKeyFeatures(),
formats.getValueFeatures()
);
final Serde<KLeftT> keySerde = left.getExecutionKeyFactory().buildKeySerde(
formats.getKeyFormat(),
physicalSchema,
join.getProperties().getQueryContext()
);
final Serde<GenericRow> valSerde = buildContext.buildValueSerde(
formats.getValueFormat(),
physicalSchema,
join.getProperties().getQueryContext()
);
final KTable<KLeftT, GenericRow> result;
switch (join.getJoinType()) {
case INNER:
result = left.getTable().join(
right.getTable(),
joinParams.getKeyExtractor(),
joinParams.getJoiner(),
buildContext.getMaterializedFactory().create(keySerde, valSerde)
);
break;
case LEFT:
result = left.getTable().leftJoin(
right.getTable(),
joinParams.getKeyExtractor(),
joinParams.getJoiner(),
buildContext.getMaterializedFactory().create(keySerde, valSerde)
);
break;
default:
throw new IllegalStateException("invalid join type: " + join.getJoinType());
}
return KTableHolder.unmaterialized(
result,
joinParams.getSchema(),
left.getExecutionKeyFactory()
);
} | @Test
public void shouldReturnCorrectSchema() {
// Given:
givenInnerJoin(left, JOIN_COLUMN);
// When:
final KTableHolder<Struct> result = join.build(planBuilder, planInfo);
// Then:
assertThat(
result.getSchema(),
is(LogicalSchema.builder()
.keyColumns(LEFT_SCHEMA.key())
.valueColumns(LEFT_SCHEMA.value())
.valueColumns(RIGHT_SCHEMA.value())
.build()
)
);
} |
@Override
public ImportResult importItem(
UUID jobId,
IdempotentImportExecutor idempotentExecutor,
TokensAndUrlAuthData authData,
MediaContainerResource data)
throws Exception {
if (data == null) {
return ImportResult.OK;
}
for (PhotoModel photoModel: data.getPhotos()) {
monitor.debug(() -> "AppleMediaImporter received data",
AuditKeys.dataId, photoModel.getDataId(),
AuditKeys.updatedTimeInMs, photoModel.getUploadedTime());
}
AppleMediaInterface mediaInterface = factory
.getOrCreateMediaInterface(jobId, authData, appCredentials, exportingService, monitor);
final int albumCount =
mediaInterface.importAlbums(
jobId,
idempotentExecutor,
data.getAlbums(),
DataVertical.MEDIA.getDataType());
final Map<String, Long> importPhotosMap =
mediaInterface.importAllMedia(
jobId,
idempotentExecutor,
data.getPhotos(),
DataVertical.MEDIA.getDataType());
final Map<String, Long> importVideosResult =
mediaInterface.importAllMedia(
jobId,
idempotentExecutor,
data.getVideos(),
DataVertical.MEDIA.getDataType());
final Map<String, Integer> counts =
new ImmutableMap.Builder<String, Integer>()
.put(MediaContainerResource.ALBUMS_COUNT_DATA_NAME, albumCount)
.put(
MediaContainerResource.PHOTOS_COUNT_DATA_NAME,
importPhotosMap.getOrDefault(ApplePhotosConstants.COUNT_KEY, 0L).intValue())
.put(
MediaContainerResource.VIDEOS_COUNT_DATA_NAME,
importVideosResult.getOrDefault(ApplePhotosConstants.COUNT_KEY, 0L).intValue())
.build();
monitor.info(() -> "AppleMediaImporter imported batch",
MediaContainerResource.ALBUMS_COUNT_DATA_NAME, albumCount,
MediaContainerResource.PHOTOS_COUNT_DATA_NAME, importPhotosMap.getOrDefault(ApplePhotosConstants.COUNT_KEY, 0L).intValue(),
MediaContainerResource.VIDEOS_COUNT_DATA_NAME, importVideosResult.getOrDefault(ApplePhotosConstants.COUNT_KEY, 0L).intValue());
return ImportResult.OK
.copyWithBytes(
importPhotosMap.getOrDefault(ApplePhotosConstants.BYTES_KEY, 0L)
+ importVideosResult.getOrDefault(ApplePhotosConstants.BYTES_KEY, 0L))
.copyWithCounts(counts);
} | @Test
public void importPhotosVideosAndAlbums() throws Exception {
// set up albums
final int albumCount = 1;
final List<MediaAlbum> mediaAlbums =
createTestAlbums(albumCount).stream()
.map(MediaAlbum::photoToMediaAlbum)
.collect(Collectors.toList());
setUpCreateAlbumsResponse(
mediaAlbums.stream()
.collect(
Collectors.toMap(MediaAlbum::getId, photoAlbum -> SC_OK)));
// set up photos
final int photoCount = 2;
final List<PhotoModel> photos = createTestPhotos(photoCount);
final Map<String, Integer> dataIdToStatus =
photos.stream()
.collect(
Collectors.toMap(PhotoModel::getDataId, photoModel -> SC_OK));
// set up videos
final int videoCount = 3;
final List<VideoModel> videos =
createTestVideos(videoCount).stream().collect(Collectors.toList());
dataIdToStatus.putAll(
videos.stream()
.collect(
Collectors.toMap(
VideoModel::getDataId, videoModel -> SC_OK)));
setUpGetUploadUrlResponse(dataIdToStatus);
setUpUploadContentResponse(dataIdToStatus);
setUpCreateMediaResponse(dataIdToStatus);
MediaContainerResource mediaData = new MediaContainerResource(mediaAlbums, photos, videos);
final ImportResult importResult =
appleMediaImporter.importItem(uuid, executor, authData, mediaData);
// verify correct methods were called
final List<String> photosDataIds =
photos.stream().map(PhotoModel::getDataId).collect(Collectors.toList());
final List<String> videosDataIds =
videos.stream().map(VideoModel::getDataId).collect(Collectors.toList());
verify(mediaInterface)
.createAlbums(uuid.toString(), DataVertical.MEDIA.getDataType(), mediaAlbums);
verify(mediaInterface)
.getUploadUrl(uuid.toString(), DataVertical.MEDIA.getDataType(), photosDataIds);
verify(mediaInterface)
.getUploadUrl(uuid.toString(), DataVertical.MEDIA.getDataType(), videosDataIds);
verify(mediaInterface, times(2)).uploadContent(anyMap(), anyList());
verify(mediaInterface, times(2)).createMedia(anyString(), anyString(), argThat(newMediaRequestList -> {
assertThat(newMediaRequestList).isNotNull();
assertThat(newMediaRequestList.stream().allMatch(newMediaRequest -> newMediaRequest.hasCreationDateInMillis())).isTrue();
assertThat(newMediaRequestList.stream().allMatch(newMediaRequest -> newMediaRequest.hasIsFavorite() && newMediaRequest.getIsFavorite()==1L)).isTrue();
return true;
}));
// check the result
assertThat(importResult.getCounts().isPresent()).isTrue();
assertThat(
importResult.getCounts().get().get(PhotosContainerResource.ALBUMS_COUNT_DATA_NAME) == 1).isTrue();
assertThat(
importResult.getCounts().get().get(PhotosContainerResource.PHOTOS_COUNT_DATA_NAME)
== photoCount).isTrue();
assertThat(
importResult.getCounts().get().get(VideosContainerResource.VIDEOS_COUNT_DATA_NAME)
== videoCount).isTrue();
assertThat(
importResult.getBytes().get()
== photoCount * PHOTOS_FILE_SIZE + videoCount * VIDEOS_FILE_SIZE).isTrue();
final Map<String, Serializable> expectedKnownValue =
mediaAlbums.stream()
.collect(
Collectors.toMap(
MediaAlbum::getId, mediaAlbum -> ALBUM_RECORDID_BASE + mediaAlbum.getId()));
expectedKnownValue.putAll(
photos.stream()
.collect(
Collectors.toMap(
photoModel -> photoModel.getAlbumId() + "-" + photoModel.getDataId(),
photoModel -> MEDIA_RECORDID_BASE + photoModel.getDataId())));
expectedKnownValue.putAll(
videos.stream()
.collect(
Collectors.toMap(
videoModel -> videoModel.getAlbumId() + "-" +videoModel.getDataId(),
videoModel -> MEDIA_RECORDID_BASE + videoModel.getDataId())));
checkKnownValues(expectedKnownValue);
} |
public static JsonAsserter with(String json) {
return new JsonAsserterImpl(JsonPath.parse(json).json());
} | @Test
public void testAssertEqualsInteger() throws Exception {
with(getResourceAsStream("lotto.json")).assertEquals("lotto.winners[0].winnerId", 23);
} |
public ColumnSizeCommand(Logger console) {
super(console);
} | @Test
public void testColumnSizeCommand() throws IOException {
File file = parquetFile();
ColumnSizeCommand command = new ColumnSizeCommand(createLogger());
command.target = file.getAbsolutePath();
command.setConf(new Configuration());
Assert.assertEquals(0, command.run());
} |
public String getFilepath() {
return filepath;
} | @Test
public void testConstructorThrowable() {
try {
throw new KettleFileNotFoundException( cause );
} catch ( KettleFileNotFoundException e ) {
assertEquals( cause, e.getCause() );
assertTrue( e.getMessage().contains( causeExceptionMessage ) );
assertEquals( null, e.getFilepath() );
}
} |
@Override
public void decode(final ChannelHandlerContext context, final ByteBuf in, final List<Object> out) {
while (isValidHeader(in.readableBytes())) {
if (startupPhase) {
handleStartupPhase(in, out);
return;
}
int payloadLength = in.getInt(in.readerIndex() + 1);
if (in.readableBytes() < MESSAGE_TYPE_LENGTH + payloadLength) {
return;
}
byte type = in.getByte(in.readerIndex());
CommandPacketType commandPacketType = OpenGaussCommandPacketType.valueOf(type);
if (requireAggregation(commandPacketType)) {
pendingMessages.add(in.readRetainedSlice(MESSAGE_TYPE_LENGTH + payloadLength));
} else if (pendingMessages.isEmpty()) {
out.add(in.readRetainedSlice(MESSAGE_TYPE_LENGTH + payloadLength));
} else {
handlePendingMessages(context, in, out, payloadLength);
}
}
} | @Test
void assertDecodeWithStickyPacket() {
List<Object> out = new LinkedList<>();
new OpenGaussPacketCodecEngine().decode(context, byteBuf, out);
assertTrue(out.isEmpty());
} |
@Override
public Set<String> getInputMetrics() {
return inputMetricKeys;
} | @Test
public void input_metrics_is_empty_when_not_set() {
MeasureComputer.MeasureComputerDefinition measureComputer = new MeasureComputerDefinitionImpl.BuilderImpl()
.setOutputMetrics("comment_density_1", "comment_density_2")
.build();
assertThat(measureComputer.getInputMetrics()).isEmpty();
} |
public static String sanitizeName(String name) {
return sanitizeName(name, MASK_FOR_INVALID_CHARS_IN_NAMES);
} | @Test
public void testSanitizeName() {
assertEquals("__23456", sanitizeName("123456"));
assertEquals("abcdef", sanitizeName("abcdef"));
assertEquals("_1", sanitizeName("_1"));
assertEquals("a*bc", sanitizeName("a.bc", "*"));
assertEquals("abcdef___", sanitizeName("abcdef_."));
assertEquals("__ab__cd__", sanitizeName("1ab*cd?"));
} |
public Host get(final String url) throws HostParserException {
final StringReader reader = new StringReader(url);
final Protocol parsedProtocol, protocol;
if((parsedProtocol = findProtocol(reader, factory)) != null) {
protocol = parsedProtocol;
}
else {
protocol = defaultScheme;
}
final Consumer<HostParserException> parsedProtocolDecorator = e -> e.withProtocol(parsedProtocol);
final Host host = new Host(protocol);
final URITypes uriType = findURIType(reader);
if(uriType == URITypes.Undefined) {
// scheme:
if(StringUtils.isBlank(protocol.getDefaultHostname())) {
throw decorate(new HostParserException(String.format("Missing hostname in URI %s", url)), parsedProtocolDecorator);
}
return host;
}
if(uriType == URITypes.Authority) {
if(host.getProtocol().isHostnameConfigurable()) {
parseAuthority(reader, host, parsedProtocolDecorator);
}
else {
parseRootless(reader, host, parsedProtocolDecorator);
}
}
else if(uriType == URITypes.Rootless) {
parseRootless(reader, host, parsedProtocolDecorator);
}
else if(uriType == URITypes.Absolute) {
parseAbsolute(reader, host, parsedProtocolDecorator);
}
if(log.isDebugEnabled()) {
log.debug(String.format("Parsed %s as %s", url, host));
}
return host;
} | @Test
public void parseEmptyHost() throws HostParserException {
final Host host = new HostParser(new ProtocolFactory(Collections.singleton(new TestProtocol(Scheme.https) {
@Override
public String getDefaultHostname() {
return "host";
}
@Override
public String getDefaultPath() {
return "/default-path";
}
@Override
public boolean isPathConfigurable() {
return false;
}
}))).get("https://");
assertEquals("host", host.getHostname());
assertEquals("/default-path", host.getDefaultPath());
} |
public String getShortMethodDescriptor(MethodReference methodReference) {
StringWriter writer = new StringWriter();
try {
getWriter(writer).writeShortMethodDescriptor(methodReference);
} catch (IOException e) {
throw new AssertionError("Unexpected IOException");
}
return writer.toString();
} | @Test
public void testGetShortMethodReference() throws IOException {
TestDexFormatter formatter = new TestDexFormatter();
Assert.assertEquals(
"short method descriptor",
formatter.getShortMethodDescriptor(mock(MethodReference.class)));
} |
public Object execute(ProceedingJoinPoint proceedingJoinPoint, Method method, String fallbackMethodValue, CheckedSupplier<Object> primaryFunction) throws Throwable {
String fallbackMethodName = spelResolver.resolve(method, proceedingJoinPoint.getArgs(), fallbackMethodValue);
FallbackMethod fallbackMethod = null;
if (StringUtils.hasLength(fallbackMethodName)) {
try {
fallbackMethod = FallbackMethod
.create(fallbackMethodName, method, proceedingJoinPoint.getArgs(), proceedingJoinPoint.getTarget(), proceedingJoinPoint.getThis());
} catch (NoSuchMethodException ex) {
logger.warn("No fallback method match found", ex);
}
}
if (fallbackMethod == null) {
return primaryFunction.get();
} else {
return fallbackDecorators.decorate(fallbackMethod, primaryFunction).get();
}
} | @Test
public void testPrimaryMethodExecutionWithoutFallback() throws Throwable {
Method method = this.getClass().getMethod("getName", String.class);
final CheckedSupplier<Object> primaryFunction = () -> getName("Name");
final String fallbackMethodValue = "";
when(proceedingJoinPoint.getArgs()).thenReturn(new Object[]{});
when(proceedingJoinPoint.getTarget()).thenReturn(this);
when(spelResolver.resolve(method, proceedingJoinPoint.getArgs(), fallbackMethodValue)).thenReturn(fallbackMethodValue);
when(fallbackDecorators.decorate(any(),eq(primaryFunction))).thenReturn(primaryFunction);
final Object result = fallbackExecutor.execute(proceedingJoinPoint, method, fallbackMethodValue, primaryFunction);
assertThat(result).isEqualTo("Name");
verify(spelResolver, times(1)).resolve(method, proceedingJoinPoint.getArgs(), fallbackMethodValue);
verify(fallbackDecorators, never()).decorate(any(),any());
} |
public static Optional<ESEventOriginContext> parseESContext(String url) {
if (url.startsWith(ES_EVENT) || url.startsWith(ES_MESSAGE)) {
final String[] tokens = url.split(":");
if (tokens.length != 6) {
return Optional.empty();
}
return Optional.of(ESEventOriginContext.create(tokens[4], tokens[5]));
} else {
return Optional.empty();
}
} | @Test
public void parseEventESContext() {
assertThat(EventOriginContext.parseESContext("urn:graylog:event:es:index-42:01DF13GB094MT6390TYQB2Q73Q"))
.isPresent()
.get()
.satisfies(context -> {
assertThat(context.indexName()).isEqualTo("index-42");
assertThat(context.messageId()).isEqualTo("01DF13GB094MT6390TYQB2Q73Q");
});
} |
public static byte[] jsonNodeToBinary(JsonNode node, String about) {
try {
byte[] value = node.binaryValue();
if (value == null) {
throw new IllegalArgumentException(about + ": expected Base64-encoded binary data.");
}
return value;
} catch (IOException e) {
throw new UncheckedIOException(about + ": unable to retrieve Base64-encoded binary data", e);
}
} | @Test
public void testInvalidBinaryNode() {
assertThrows(
IllegalArgumentException.class,
() -> MessageUtil.jsonNodeToBinary(new IntNode(42), "Test int to binary")
);
assertThrows(
UncheckedIOException.class,
() -> MessageUtil.jsonNodeToBinary(new TextNode("This is not base64!"), "Test non-base64 to binary")
);
} |
public static Class<?> getLiteral(String className, String literal) {
LiteralAnalyzer analyzer = ANALYZERS.get( className );
Class result = null;
if ( analyzer != null ) {
analyzer.validate( literal );
result = analyzer.getLiteral();
}
return result;
} | @Test
public void testBooleanLiteralFromJLS() {
assertThat( getLiteral( boolean.class.getCanonicalName(), "true" ) ).isNotNull();
assertThat( getLiteral( boolean.class.getCanonicalName(), "false" ) ).isNotNull();
assertThat( getLiteral( boolean.class.getCanonicalName(), "FALSE" ) ).isNull();
} |
@Override
public void loadConfiguration(NacosLoggingProperties loggingProperties) {
Log4j2NacosLoggingPropertiesHolder.setProperties(loggingProperties);
String location = loggingProperties.getLocation();
loadConfiguration(location);
} | @Test
void testLoadConfigurationWithWrongLocation() {
assertThrows(IllegalStateException.class, () -> {
System.setProperty("nacos.logging.config", "http://localhost");
nacosLoggingProperties = new NacosLoggingProperties("classpath:nacos-log4j2.xml", System.getProperties());
log4J2NacosLoggingAdapter = new Log4J2NacosLoggingAdapter();
log4J2NacosLoggingAdapter.loadConfiguration(nacosLoggingProperties);
verify(propertyChangeListener, never()).propertyChange(any());
});
} |
@Override
public IndexMainType getMainType() {
checkState(mainType != null, "Main type has not been defined");
return mainType;
} | @Test
@UseDataProvider("indexes")
public void getMainType_fails_with_ISE_if_createTypeMapping_with_IndexMainType_has_not_been_called(Index index) {
NewRegularIndex newIndex = new NewRegularIndex(index, defaultSettingsConfiguration);
assertThatThrownBy(() -> newIndex.getMainType())
.isInstanceOf(IllegalStateException.class)
.hasMessage("Main type has not been defined");
} |
@Override
public void removeSubscriber(Subscriber subscriber) {
removeSubscriber(subscriber, subscriber.subscribeType());
} | @Test
void testRemoveSubscriber() {
traceEventPublisher.addSubscriber(subscriber, TraceTestEvent.TraceTestEvent1.class);
traceEventPublisher.addSubscriber(smartSubscriber, TraceTestEvent.TraceTestEvent1.class);
TraceTestEvent.TraceTestEvent1 traceTestEvent1 = new TraceTestEvent.TraceTestEvent1();
traceEventPublisher.publish(traceTestEvent1);
ThreadUtils.sleep(2000L);
verify(subscriber).onEvent(traceTestEvent1);
verify(smartSubscriber).onEvent(traceTestEvent1);
traceEventPublisher.removeSubscriber(smartSubscriber, TraceTestEvent.TraceTestEvent1.class);
traceTestEvent1 = new TraceTestEvent.TraceTestEvent1();
traceEventPublisher.publish(traceTestEvent1);
ThreadUtils.sleep(500L);
verify(subscriber).onEvent(traceTestEvent1);
verify(smartSubscriber, never()).onEvent(traceTestEvent1);
reset(subscriber);
when(subscriber.subscribeType()).thenReturn(TraceTestEvent.TraceTestEvent1.class);
traceEventPublisher.removeSubscriber(subscriber);
traceEventPublisher.publish(traceTestEvent1);
ThreadUtils.sleep(500L);
verify(subscriber, never()).onEvent(traceTestEvent1);
verify(smartSubscriber, never()).onEvent(traceTestEvent1);
} |
@Override
public void init(DatabaseMetaData metaData) throws SQLException {
checkState(!initialized, "onInit() must be called once");
Version version = checkDbVersion(metaData, MIN_SUPPORTED_VERSION);
supportsNullNotDistinct = version.compareTo(MIN_NULL_NOT_DISTINCT_VERSION) >= 0;
initialized = true;
} | @Test
void postgresql_9_2_is_not_supported() {
assertThatThrownBy(() -> {
DatabaseMetaData metadata = newMetadata(9, 2);
underTest.init(metadata);
})
.isInstanceOf(MessageException.class)
.hasMessage("Unsupported postgresql version: 9.2. Minimal supported version is 11.0.");
} |
public final boolean offer(int queueIndex, E item) {
return offer(queues[queueIndex], item);
} | @Test
public void when_offerToGivenQueue_then_poll() {
// when
boolean didOffer = conveyor.offer(defaultQ, item1);
// then
assertTrue(didOffer);
assertSame(item1, defaultQ.poll());
} |
@EventListener(ApplicationEvent.class)
void onApplicationEvent(ApplicationEvent event) {
if (AnnotationUtils.findAnnotation(event.getClass(), SharedEvent.class) == null) {
return;
}
// we should copy the plugins list to avoid ConcurrentModificationException
var startedPlugins = new ArrayList<>(pluginManager.getStartedPlugins());
// broadcast event to all started plugins except the publisher
for (var startedPlugin : startedPlugins) {
var plugin = startedPlugin.getPlugin();
if (!(plugin instanceof SpringPlugin springPlugin)) {
continue;
}
var context = springPlugin.getApplicationContext();
// make sure the context is running before publishing the event
if (context instanceof Lifecycle lifecycle && lifecycle.isRunning()) {
context.publishEvent(new HaloSharedEventDelegator(this, event));
}
}
} | @Test
void shouldNotDispatchEventIfNotSharedEvent() {
dispatcher.onApplicationEvent(new FakeEvent(this));
verify(pluginManager, never()).getStartedPlugins();
} |
@Override
public ByteString data()
{
return _data;
} | @Test
public void testWrapping()
throws InstantiationException, IllegalAccessException
{
String input = "12345";
ByteString input1 = ByteString.copyAvroString(input, false);
Fixed5 fixed1 = DataTemplateUtil.wrap(input1, Fixed5.class);
assertSame(input1, fixed1.data());
ByteString input2 = ByteString.copyAvroString("67890", false);
Fixed5 fixed2 = DataTemplateUtil.wrap(input2, Fixed5.SCHEMA, Fixed5.class);
assertSame(input2, fixed2.data());
Fixed5 fixed3 = DataTemplateUtil.wrap(input, Fixed5.class);
assertEquals(fixed1, fixed3);
Fixed5 fixed4 = DataTemplateUtil.wrap(input, Fixed5.SCHEMA, Fixed5.class);
assertEquals(fixed3, fixed4);
} |
@Override
public boolean isValid(K key, UUID ticket) {
if (timeouts.containsKey(key)) {
Timeout<K> timeout = timeouts.get(key);
return timeout.getTicket().equals(ticket);
} else {
return false;
}
} | @Test
void testIsValidInitiallyReturnsFalse() {
final DefaultTimerService<AllocationID> timerService = createAndStartTimerService();
assertThat(timerService.isValid(new AllocationID(), UUID.randomUUID())).isFalse();
} |
public ExecuteJobServlet() {
} | @Test
public void testExecuteJobServletTest()
throws ServletException, IOException, KettleException {
try ( MockedStatic<Encr> encrMockedStatic = mockStatic( Encr.class ) ) {
encrMockedStatic.when( () -> Encr.decryptPasswordOptionallyEncrypted( eq( PASSWORD ) ) ).thenReturn( PASSWORD );
doReturn( ExecuteJobServlet.CONTEXT_PATH ).when( mockHttpServletRequest ).getContextPath();
doReturn( REPOSITORY_NAME ).when( mockHttpServletRequest ).getParameter( "rep" );
doReturn( AUTHORIZED_USER ).when( mockHttpServletRequest ).getParameter( "user" );
doReturn( PASSWORD ).when( mockHttpServletRequest ).getParameter( "pass" );
doReturn( JOB_NAME ).when( mockHttpServletRequest ).getParameter( "job" );
doReturn( LEVEL ).when( mockHttpServletRequest ).getParameter( "level" );
doReturn( repository ).when( spyExecuteJobServlet ).openRepository( REPOSITORY_NAME, AUTHORIZED_USER, PASSWORD );
JobMeta jobMeta = buildJobMeta();
RepositoryDirectoryInterface repositoryDirectoryInterface = mock( RepositoryDirectoryInterface.class );
doReturn( repositoryDirectoryInterface ).when( repository ).loadRepositoryDirectoryTree();
doReturn( mock( RepositoryDirectoryInterface.class ) ).when( repositoryDirectoryInterface )
.findDirectory( anyString() );
doReturn( mock( ObjectId.class ) ).when( repository )
.getJobId( anyString(), any( RepositoryDirectoryInterface.class ) );
doReturn( jobMeta ).when( repository ).loadJob( any( ObjectId.class ), nullable( String.class ) );
doReturn( Collections.emptyEnumeration() ).when( mockHttpServletRequest ).getParameterNames();
StringWriter out = mockWriter();
spyExecuteJobServlet.doGet( mockHttpServletRequest, spyHttpServletResponse );
assertTrue( out.toString().contains( WebResult.STRING_OK ) );
assertTrue( out.toString().contains( "Job started" ) );
}
} |
@Override
public <VR> KTable<K, VR> mapValues(final ValueMapper<? super V, ? extends VR> mapper) {
Objects.requireNonNull(mapper, "mapper can't be null");
return doMapValues(withKey(mapper), NamedInternal.empty(), null);
} | @Test
public void shouldNotAllowNullMapperOnMapValues() {
assertThrows(NullPointerException.class, () -> table.mapValues((ValueMapper) null));
} |
boolean hasReceivedNewVersionFromZooKeeper() {
return currentVersion <= lastZooKeeperVersion;
} | @Test
void new_version_from_zk_predicate_initially_false() {
final StateVersionTracker versionTracker = createWithMockedMetrics();
assertFalse(versionTracker.hasReceivedNewVersionFromZooKeeper());
} |
static public boolean notMarkedWithNoAutoStart(Object o) {
if (o == null) {
return false;
}
Class<?> clazz = o.getClass();
NoAutoStart a = clazz.getAnnotation(NoAutoStart.class);
return a == null;
} | @Test
public void commonObject() {
Object o = new Object();
assertTrue(NoAutoStartUtil.notMarkedWithNoAutoStart(o));
} |
static Optional<ExecutorService> lookupExecutorServiceRef(
CamelContext camelContext, String name, Object source, String executorServiceRef) {
ExecutorServiceManager manager = camelContext.getExecutorServiceManager();
ObjectHelper.notNull(manager, ESM_NAME);
ObjectHelper.notNull(executorServiceRef, "executorServiceRef");
// lookup in registry first and use existing thread pool if exists,
// or create a new thread pool, assuming that the executor service ref is a thread pool ID
return lookupByNameAndType(camelContext, executorServiceRef, ExecutorService.class)
.or(() -> Optional.ofNullable(manager.newThreadPool(source, name, executorServiceRef)));
} | @Test
void testLookupExecutorServiceRefWithInvalidRef() {
String name = "ThreadPool";
Object source = new Object();
String executorServiceRef = "InvalidRef";
when(camelContext.getExecutorServiceManager()).thenReturn(manager);
when(camelContext.getRegistry()).thenReturn(mockRegistry);
Optional<ExecutorService> executorService
= DynamicRouterRecipientListHelper.lookupExecutorServiceRef(camelContext, name, source, executorServiceRef);
Assertions.assertFalse(executorService.isPresent());
} |
public static BigDecimal cast(final Integer value, final int precision, final int scale) {
if (value == null) {
return null;
}
return cast(value.longValue(), precision, scale);
} | @Test
public void shouldNotCastStringTooBig() {
// When:
final Exception e = assertThrows(
ArithmeticException.class,
() -> cast("10", 2, 1)
);
// Then:
assertThat(e.getMessage(), containsString("Numeric field overflow"));
} |
@Override
public MapperResult listGroupKeyMd5ByPageFetchRows(MapperContext context) {
return new MapperResult(" SELECT t.id,data_id,group_id,tenant_id,app_name,type,md5,gmt_modified "
+ "FROM ( SELECT id FROM config_info ORDER BY id OFFSET " + context.getStartRow() + " ROWS FETCH NEXT "
+ context.getPageSize() + " ROWS ONLY ) g, config_info t WHERE g.id = t.id", Collections.emptyList());
} | @Test
void testListGroupKeyMd5ByPageFetchRows() {
MapperResult mapperResult = configInfoMapperByDerby.listGroupKeyMd5ByPageFetchRows(context);
assertEquals(mapperResult.getSql(),
" SELECT t.id,data_id,group_id,tenant_id,app_name,type,md5,gmt_modified FROM ( SELECT id FROM config_info "
+ "ORDER BY id OFFSET " + startRow + " ROWS FETCH NEXT " + pageSize
+ " ROWS ONLY ) g, config_info t WHERE g.id = t.id");
assertArrayEquals(mapperResult.getParamList().toArray(), emptyObjs);
} |
@ScalarOperator(BETWEEN)
@SqlType(StandardTypes.BOOLEAN)
public static boolean between(@SqlType(StandardTypes.TINYINT) long value, @SqlType(StandardTypes.TINYINT) long min, @SqlType(StandardTypes.TINYINT) long max)
{
return min <= value && value <= max;
} | @Test
public void testBetween()
{
assertFunction("TINYINT'37' BETWEEN TINYINT'37' AND TINYINT'37'", BOOLEAN, true);
assertFunction("TINYINT'37' BETWEEN TINYINT'37' AND TINYINT'17'", BOOLEAN, false);
assertFunction("TINYINT'37' BETWEEN TINYINT'17' AND TINYINT'37'", BOOLEAN, true);
assertFunction("TINYINT'37' BETWEEN TINYINT'17' AND TINYINT'17'", BOOLEAN, false);
assertFunction("TINYINT'17' BETWEEN TINYINT'37' AND TINYINT'37'", BOOLEAN, false);
assertFunction("TINYINT'17' BETWEEN TINYINT'37' AND TINYINT'17'", BOOLEAN, false);
assertFunction("TINYINT'17' BETWEEN TINYINT'17' AND TINYINT'37'", BOOLEAN, true);
assertFunction("TINYINT'17' BETWEEN TINYINT'17' AND TINYINT'17'", BOOLEAN, true);
} |
public void setValue(byte[] value) {
this.value = value;
} | @Test
public void testSetValue() {
restValue.setValue(PAYLOAD);
assertEquals(PAYLOAD, restValue.getValue());
assertContains(restValue.toString(), "value.length=" + PAYLOAD.length);
} |
public static String u2or4(int v) {
if (v == (char) v) {
return u2(v);
} else {
return u4(v);
}
} | @Test
public void testU2or4() {
Assert.assertEquals("0000", Hex.u2or4(0));
Assert.assertEquals("04d2", Hex.u2or4(1234));
Assert.assertEquals("0001e240", Hex.u2or4(123456));
Assert.assertEquals("00bc614e", Hex.u2or4(12345678));
Assert.assertEquals("499602d2", Hex.u2or4(1234567890));
} |
public T send() throws IOException {
return web3jService.send(this, responseType);
} | @Test
public void testShhNewFilter() throws Exception {
web3j.shhNewFilter(
new ShhFilter(
"0x04f96a5e25610293e42a73908e93ccc8c4d4dc0edcfa9fa872f50cb214e08ebf61a03e245533f97284d442460f2998cd41858798ddfd4d661997d3940272b717b1")
.addSingleTopic("0x12341234bf4b564f"))
.send();
verifyResult(
"{\"jsonrpc\":\"2.0\",\"method\":\"shh_newFilter\",\"params\":[{\"topics\":[\"0x12341234bf4b564f\"],\"to\":\"0x04f96a5e25610293e42a73908e93ccc8c4d4dc0edcfa9fa872f50cb214e08ebf61a03e245533f97284d442460f2998cd41858798ddfd4d661997d3940272b717b1\"}],\"id\":1}");
} |
public static NetFlowV5Packet parsePacket(ByteBuf bb) {
final int readableBytes = bb.readableBytes();
final NetFlowV5Header header = parseHeader(bb.slice(bb.readerIndex(), HEADER_LENGTH));
final int packetLength = HEADER_LENGTH + header.count() * RECORD_LENGTH;
if (header.count() <= 0 || readableBytes < packetLength) {
throw new CorruptFlowPacketException("Insufficient data (expected: " + packetLength + " bytes, actual: " + readableBytes + " bytes)");
}
final ImmutableList.Builder<NetFlowV5Record> records = ImmutableList.builder();
int offset = HEADER_LENGTH;
for (int i = 0; i < header.count(); i++) {
records.add(parseRecord(bb.slice(offset + bb.readerIndex(), RECORD_LENGTH)));
offset += RECORD_LENGTH;
}
return NetFlowV5Packet.create(header, records.build(), offset);
} | @Test
public void pcap_softflowd_NetFlowV5() throws Exception {
final List<NetFlowV5Record> allRecords = new ArrayList<>();
try (InputStream inputStream = Resources.getResource("netflow-data/netflow5.pcap").openStream()) {
final Pcap pcap = Pcap.openStream(inputStream);
pcap.loop(packet -> {
if (packet.hasProtocol(Protocol.UDP)) {
final UDPPacket udp = (UDPPacket) packet.getPacket(Protocol.UDP);
final ByteBuf byteBuf = Unpooled.wrappedBuffer(udp.getPayload().getArray());
final NetFlowV5Packet netFlowV5Packet = NetFlowV5Parser.parsePacket(byteBuf);
assertThat(netFlowV5Packet).isNotNull();
allRecords.addAll(netFlowV5Packet.records());
}
return true;
}
);
}
assertThat(allRecords).hasSize(4);
} |
public AstNode rewrite(final AstNode node, final C context) {
return rewriter.process(node, context);
} | @Test
public void shouldRewriteJoin() {
// Given:
final Join join = givenJoin(Optional.empty());
// When:
final AstNode rewritten = rewriter.rewrite(join, context);
// Then:
assertThat(
rewritten,
equalTo(
new Join(
location,
rewrittenRelation,
ImmutableList.of(new JoinedSource(
Optional.empty(),
rewrittenRightRelation,
Type.LEFT,
joinCriteria,
Optional.empty()))))
);
} |
@Override
public AppSettings load() {
Properties p = loadPropertiesFile(homeDir);
Set<String> keysOverridableFromEnv = stream(ProcessProperties.Property.values()).map(ProcessProperties.Property::getKey)
.collect(Collectors.toSet());
keysOverridableFromEnv.addAll(p.stringPropertyNames());
// 1st pass to load static properties
Props staticProps = reloadProperties(keysOverridableFromEnv, p);
keysOverridableFromEnv.addAll(getDynamicPropertiesKeys(staticProps));
// 2nd pass to load dynamic properties like `ldap.*.url` or `ldap.*.baseDn` which keys depend on values of static
// properties loaded in 1st step
Props props = reloadProperties(keysOverridableFromEnv, p);
new ProcessProperties(serviceLoaderWrapper).completeDefaults(props);
stream(consumers).forEach(c -> c.accept(props));
return new AppSettingsImpl(props);
} | @Test
public void command_line_arguments_are_included_to_settings() throws Exception {
File homeDir = temp.newFolder();
AppSettingsLoaderImpl underTest = new AppSettingsLoaderImpl(system, new String[] {"-Dsonar.foo=bar", "-Dhello=world"}, homeDir, serviceLoaderWrapper);
AppSettings settings = underTest.load();
assertThat(settings.getProps().rawProperties())
.contains(entry("sonar.foo", "bar"))
.contains(entry("hello", "world"));
} |
public static int[] computePhysicalIndices(
List<TableColumn> logicalColumns,
DataType physicalType,
Function<String, String> nameRemapping) {
Map<TableColumn, Integer> physicalIndexLookup =
computePhysicalIndices(logicalColumns.stream(), physicalType, nameRemapping);
return logicalColumns.stream().mapToInt(physicalIndexLookup::get).toArray();
} | @Test
void testNameMappingDoesNotExist() {
assertThatThrownBy(
() ->
TypeMappingUtils.computePhysicalIndices(
TableSchema.builder()
.field("f0", DataTypes.BIGINT())
.build()
.getTableColumns(),
ROW(FIELD("f0", DataTypes.BIGINT())),
str -> null))
.isInstanceOf(ValidationException.class)
.hasMessage("Field 'f0' could not be resolved by the field mapping.");
} |
public static MySQLBinaryProtocolValue getBinaryProtocolValue(final BinaryColumnType binaryColumnType) {
Preconditions.checkArgument(BINARY_PROTOCOL_VALUES.containsKey(binaryColumnType), "Cannot find MySQL type '%s' in column type when process binary protocol value", binaryColumnType);
return BINARY_PROTOCOL_VALUES.get(binaryColumnType);
} | @Test
void assertGetBinaryProtocolValueWithMySQLTypeVarString() {
assertThat(MySQLBinaryProtocolValueFactory.getBinaryProtocolValue(MySQLBinaryColumnType.VAR_STRING), instanceOf(MySQLStringLenencBinaryProtocolValue.class));
} |
public int completeName(String buffer, int cursor, List<InterpreterCompletion> candidates,
Map<String, String> aliases) {
CursorArgument cursorArgument = parseCursorArgument(buffer, cursor);
// find schema and table name if they are
String schema;
String table;
String column;
if (cursorArgument.getSchema() == null) { // process all
List<CharSequence> keywordsCandidates = new ArrayList<>();
List<CharSequence> schemaCandidates = new ArrayList<>();
int keywordsRes = completeKeyword(buffer, cursor, keywordsCandidates);
int schemaRes = completeSchema(buffer, cursor, schemaCandidates);
addCompletions(candidates, keywordsCandidates, CompletionType.keyword.name());
addCompletions(candidates, schemaCandidates, CompletionType.schema.name());
return NumberUtils.max(keywordsRes, schemaRes);
} else {
schema = cursorArgument.getSchema();
if (aliases.containsKey(schema)) { // process alias case
String alias = aliases.get(schema);
int pointPos = alias.indexOf('.');
schema = alias.substring(0, pointPos);
table = alias.substring(pointPos + 1);
column = cursorArgument.getColumn();
List<CharSequence> columnCandidates = new ArrayList<>();
int columnRes = completeColumn(schema, table, column, cursorArgument.getCursorPosition(),
columnCandidates);
addCompletions(candidates, columnCandidates, CompletionType.column.name());
// process schema.table case
} else if (cursorArgument.getTable() != null && cursorArgument.getColumn() == null) {
List<CharSequence> tableCandidates = new ArrayList<>();
table = cursorArgument.getTable();
int tableRes = completeTable(schema, table, cursorArgument.getCursorPosition(),
tableCandidates);
addCompletions(candidates, tableCandidates, CompletionType.table.name());
return tableRes;
} else {
List<CharSequence> columnCandidates = new ArrayList<>();
table = cursorArgument.getTable();
column = cursorArgument.getColumn();
int columnRes = completeColumn(schema, table, column, cursorArgument.getCursorPosition(),
columnCandidates);
addCompletions(candidates, columnCandidates, CompletionType.column.name());
}
}
return -1;
} | @Test
void testCompleteName_SimpleColumn() {
String buffer = "prod_dds.financial_account.acc";
int cursor = 30;
List<InterpreterCompletion> candidates = new ArrayList<>();
Map<String, String> aliases = new HashMap<>();
sqlCompleter.completeName(buffer, cursor, candidates, aliases);
assertEquals(2, candidates.size());
assertTrue(candidates.contains(new InterpreterCompletion("account_rk", "account_rk",
CompletionType.column.name())));
assertTrue(candidates.contains(new InterpreterCompletion("account_id", "account_id",
CompletionType.column.name())));
} |
public RelDataType createRelDataTypeFromSchema(Schema schema) {
Builder builder = new Builder(this);
boolean enableNullHandling = schema.isEnableColumnBasedNullHandling();
for (Map.Entry<String, FieldSpec> entry : schema.getFieldSpecMap().entrySet()) {
builder.add(entry.getKey(), toRelDataType(entry.getValue(), enableNullHandling));
}
return builder.build();
} | @Test(dataProvider = "relDataTypeConversion")
public void testArrayTypes(FieldSpec.DataType dataType, RelDataType arrayType, boolean columnNullMode) {
TypeFactory typeFactory = new TypeFactory();
Schema testSchema = new Schema.SchemaBuilder()
.addMultiValueDimension("col", dataType)
.setEnableColumnBasedNullHandling(columnNullMode)
.build();
RelDataType relDataTypeFromSchema = typeFactory.createRelDataTypeFromSchema(testSchema);
List<RelDataTypeField> fieldList = relDataTypeFromSchema.getFieldList();
RelDataTypeField field = fieldList.get(0);
boolean nullable = isColNullable(testSchema);
RelDataType expectedType =
typeFactory.createTypeWithNullability(typeFactory.createArrayType(arrayType, -1), nullable);
Assert.assertEquals(field.getType(), expectedType);
} |
@Override
public String getKind(final String filename) {
if(StringUtils.isBlank(Path.getExtension(filename))) {
final String kind = this.kind(filename);
if(StringUtils.isBlank(kind)) {
return LocaleFactory.localizedString("Unknown");
}
return kind;
}
final String kind = this.kind(Path.getExtension(filename));
if(StringUtils.isBlank(kind)) {
return LocaleFactory.localizedString("Unknown");
}
return kind;
} | @Test
public void testGetKind() {
assertNotNull(new LaunchServicesFileDescriptor().getKind("/tmp/t.txt"));
} |
public static Select select(String fieldName) { return new Select(fieldName);
} | @Test
void basic_and_andnot_or_offset_limit_param_order_by_and_contains() {
String q = Q.select("*")
.from("sd1")
.where("f1").contains("v1")
.and("f2").contains("v2")
.or("f3").contains("v3")
.andnot("f4").contains("v4")
.offset(1)
.limit(2)
.timeout(3)
.orderByDesc("f1")
.orderByAsc("f2")
.fix()
.param("paramk1", "paramv1")
.build();
assertEquals(q, "yql=select * from sd1 where f1 contains \"v1\" and f2 contains \"v2\" or f3 contains \"v3\" and !(f4 contains \"v4\") order by f1 desc, f2 asc limit 2 offset 1 timeout 3¶mk1=paramv1");
} |
@Override
public long read() {
return gaugeSource.read();
} | @Test
public void whenProbeRegisteredAfterGauge() {
LongGauge gauge = metricsRegistry.newLongGauge("foo.longField");
SomeObject someObject = new SomeObject();
metricsRegistry.registerStaticMetrics(someObject, "foo");
assertEquals(someObject.longField, gauge.read());
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.