focal_method
stringlengths 13
60.9k
| test_case
stringlengths 25
109k
|
---|---|
@Override
public String toString() {
return String.format("FixedThreadPoolBulkhead '%s'", this.name);
} | @Test
public void testToString() {
String result = bulkhead.toString();
assertThat(result).isEqualTo("FixedThreadPoolBulkhead 'test'");
} |
@Override
public Object decode(Response response, Type type) throws IOException, DecodeException {
if (response.status() == 404 || response.status() == 204)
return Util.emptyValueOf(type);
if (response.body() == null)
return null;
ContentHandlerWithResult.Factory<?> handlerFactory = handlerFactories.get(type);
checkState(handlerFactory != null, "type %s not in configured handlers %s", type,
handlerFactories.keySet());
ContentHandlerWithResult<?> handler = handlerFactory.create();
try {
XMLReader xmlReader = XMLReaderFactory.createXMLReader();
xmlReader.setFeature("http://xml.org/sax/features/namespaces", false);
xmlReader.setFeature("http://xml.org/sax/features/validation", false);
/* Explicitly control sax configuration to prevent XXE attacks */
xmlReader.setFeature("http://xml.org/sax/features/external-general-entities", false);
xmlReader.setFeature("http://xml.org/sax/features/external-parameter-entities", false);
xmlReader.setFeature("http://apache.org/xml/features/disallow-doctype-decl", false);
xmlReader.setFeature("http://apache.org/xml/features/nonvalidating/load-external-dtd", false);
xmlReader.setContentHandler(handler);
InputStream inputStream = response.body().asInputStream();
try {
xmlReader.parse(new InputSource(inputStream));
} finally {
ensureClosed(inputStream);
}
return handler.result();
} catch (SAXException e) {
throw new DecodeException(response.status(), e.getMessage(), response.request(), e);
}
} | @Test
void niceErrorOnUnconfiguredType() throws ParseException, IOException {
Throwable exception = assertThrows(IllegalStateException.class, () ->
decoder.decode(statusFailedResponse(), int.class));
assertThat(exception.getMessage()).contains("type int not in configured handlers");
} |
public long getYear() {
return year;
} | @Test
public void TwoDigitYear() {
boolean hasException = false;
try {
DateLiteral literal = new DateLiteral("1997-10-07", Type.DATE);
Assert.assertEquals(1997, literal.getYear());
DateLiteral literal2 = new DateLiteral("97-10-07", Type.DATE);
Assert.assertEquals(1997, literal2.getYear());
DateLiteral literal3 = new DateLiteral("0097-10-07", Type.DATE);
Assert.assertEquals(97, literal3.getYear());
DateLiteral literal4 = new DateLiteral("99-10-07", Type.DATE);
Assert.assertEquals(1999, literal4.getYear());
DateLiteral literal5 = new DateLiteral("70-10-07", Type.DATE);
Assert.assertEquals(1970, literal5.getYear());
DateLiteral literal6 = new DateLiteral("69-10-07", Type.DATE);
Assert.assertEquals(2069, literal6.getYear());
DateLiteral literal7 = new DateLiteral("00-10-07", Type.DATE);
Assert.assertEquals(2000, literal7.getYear());
} catch (AnalysisException e) {
e.printStackTrace();
hasException = true;
}
Assert.assertFalse(hasException);
} |
public void startFunction(FunctionRuntimeInfo functionRuntimeInfo) {
try {
FunctionMetaData functionMetaData = functionRuntimeInfo.getFunctionInstance().getFunctionMetaData();
FunctionDetails functionDetails = functionMetaData.getFunctionDetails();
int instanceId = functionRuntimeInfo.getFunctionInstance().getInstanceId();
log.info("{}/{}/{}-{} Starting function ...", functionDetails.getTenant(), functionDetails.getNamespace(),
functionDetails.getName(), instanceId);
String packageFile;
String transformFunctionPackageFile = null;
Function.PackageLocationMetaData pkgLocation = functionMetaData.getPackageLocation();
Function.PackageLocationMetaData transformFunctionPkgLocation =
functionMetaData.getTransformFunctionPackageLocation();
if (runtimeFactory.externallyManaged()) {
packageFile = pkgLocation.getPackagePath();
transformFunctionPackageFile = transformFunctionPkgLocation.getPackagePath();
} else {
packageFile = getPackageFile(functionMetaData, functionDetails, instanceId, pkgLocation,
InstanceUtils.calculateSubjectType(functionDetails));
if (!isEmpty(transformFunctionPkgLocation.getPackagePath())) {
transformFunctionPackageFile =
getPackageFile(functionMetaData, functionDetails, instanceId, transformFunctionPkgLocation,
FunctionDetails.ComponentType.FUNCTION);
}
}
// Setup for batch sources if necessary
setupBatchSource(functionDetails);
RuntimeSpawner runtimeSpawner = getRuntimeSpawner(functionRuntimeInfo.getFunctionInstance(),
packageFile, transformFunctionPackageFile);
functionRuntimeInfo.setRuntimeSpawner(runtimeSpawner);
runtimeSpawner.start();
} catch (Exception ex) {
FunctionDetails details = functionRuntimeInfo.getFunctionInstance()
.getFunctionMetaData().getFunctionDetails();
log.error("{}/{}/{} Error starting function", details.getTenant(), details.getNamespace(),
details.getName(), ex);
functionRuntimeInfo.setStartupException(ex);
}
} | @Test
public void testStartFunctionWithPackageUrl() throws Exception {
WorkerConfig workerConfig = new WorkerConfig();
workerConfig.setWorkerId("worker-1");
workerConfig.setFunctionRuntimeFactoryClassName(ThreadRuntimeFactory.class.getName());
workerConfig.setFunctionRuntimeFactoryConfigs(
ObjectMapperFactory.getMapper().getObjectMapper().convertValue(
new ThreadRuntimeFactoryConfig().setThreadGroupName("test"), Map.class));
workerConfig.setPulsarServiceUrl("pulsar://localhost:6650");
workerConfig.setStateStorageServiceUrl("foo");
workerConfig.setFunctionAssignmentTopicName("assignments");
String downloadDir = this.getClass().getProtectionDomain().getCodeSource().getLocation().getPath();
workerConfig.setDownloadDirectory(downloadDir);
RuntimeFactory factory = mock(RuntimeFactory.class);
Runtime runtime = mock(Runtime.class);
doReturn(runtime).when(factory).createContainer(any(), any(), any(), any(), any(), any());
doNothing().when(runtime).start();
Namespace dlogNamespace = mock(Namespace.class);
final String exceptionMsg = "dl namespace not-found";
doThrow(new IllegalArgumentException(exceptionMsg)).when(dlogNamespace).openLog(any());
PulsarAdmin pulsarAdmin = mock(PulsarAdmin.class);
Packages packages = mock(Packages.class);
doReturn(packages).when(pulsarAdmin).packages();
doNothing().when(packages).download(any(), any());
@SuppressWarnings("resource")
FunctionActioner actioner = new FunctionActioner(workerConfig, factory, dlogNamespace,
new ConnectorsManager(workerConfig), new FunctionsManager(workerConfig), pulsarAdmin,
mock(PackageUrlValidator.class));
// (1) test with file url. functionActioner should be able to consider file-url and it should be able to call
// RuntimeSpawner
String pkgPathLocation = "function://public/default/test-function@latest";
startFunction(actioner, pkgPathLocation, pkgPathLocation);
verify(runtime, times(1)).start();
} |
public void removeOldLoadJob() {
// clean expired load job
long currentTimeMs = System.currentTimeMillis();
writeLock();
try {
// add load job to a sorted tree set
Set<LoadJob> jobs = new TreeSet<>(new Comparator<LoadJob>() {
@Override
public int compare(LoadJob o1, LoadJob o2) {
// sort by finish time desc
return Long.signum(o1.getFinishTimestamp() - o2.getFinishTimestamp());
}
});
for (Map.Entry<Long, LoadJob> entry : idToLoadJob.entrySet()) {
LoadJob job = entry.getValue();
if (!job.isCompleted()) {
continue;
}
if (isJobExpired(job, currentTimeMs)) {
// remove expired job
LOG.info("remove expired job: {}", job.getLabel());
unprotectedRemoveJobReleatedMeta(job);
} else {
jobs.add(job);
}
}
// if there are still more jobs than LABEL_KEEP_MAX_NUM
// remove the ones that finished earlier
int numJobsToRemove = idToLoadJob.size() - Config.label_keep_max_num;
if (numJobsToRemove > 0) {
LOG.info("remove {} jobs from {}", numJobsToRemove, jobs.size());
Iterator<LoadJob> iterator = jobs.iterator();
for (int i = 0; i != numJobsToRemove && iterator.hasNext(); ++i) {
LoadJob job = iterator.next();
LOG.info("remove redundant job: {}", job.getLabel());
unprotectedRemoveJobReleatedMeta(job);
}
}
} finally {
writeUnlock();
}
} | @Test
public void testRemoveOldLoadJob(@Mocked GlobalStateMgr globalStateMgr,
@Injectable Database db) throws Exception {
new Expectations() {
{
globalStateMgr.getDb(anyLong);
result = db;
}
};
loadManager = new LoadMgr(new LoadJobScheduler());
int origLabelKeepMaxSecond = Config.label_keep_max_second;
int origLabelKeepMaxNum = Config.label_keep_max_num;
Map<Long, LoadJob> idToLoadJob = Deencapsulation.getField(loadManager, "idToLoadJob");
Map<Long, Map<String, List<LoadJob>>> dbIdToLabelToLoadJobs = Deencapsulation.getField(
loadManager, "dbIdToLabelToLoadJobs");
long currentTimeMs = System.currentTimeMillis();
// finished insert job
LoadJob job0 = new InsertLoadJob("job0", 0L, 1L, currentTimeMs - 101000, "", "", null);
job0.id = 10;
job0.finishTimestamp = currentTimeMs - 101000;
Deencapsulation.invoke(loadManager, "addLoadJob", job0);
// broker load job
// loading
LoadJob job1 = new BrokerLoadJob(1L, "job1", null, null, null);
job1.state = JobState.LOADING;
job1.id = 11;
Deencapsulation.invoke(loadManager, "addLoadJob", job1);
// cancelled
LoadJob job2 = new BrokerLoadJob(1L, "job2", null, null, null);
job2.finishTimestamp = currentTimeMs - 3000;
job2.state = JobState.CANCELLED;
job2.id = 16;
Deencapsulation.invoke(loadManager, "addLoadJob", job2);
// finished
LoadJob job22 = new BrokerLoadJob(1L, "job2", null, null, null);
job22.finishTimestamp = currentTimeMs - 1000;
job22.state = JobState.FINISHED;
job22.id = 12;
Deencapsulation.invoke(loadManager, "addLoadJob", job22);
// spark load job
// etl
LoadJob job3 = new SparkLoadJob(2L, "job3", null, null);
job3.state = JobState.ETL;
job3.id = 13;
Deencapsulation.invoke(loadManager, "addLoadJob", job3);
// cancelled
LoadJob job4 = new SparkLoadJob(2L, "job4", null, null);
job4.finishTimestamp = currentTimeMs - 51000;
job4.state = JobState.CANCELLED;
job4.id = 14;
Deencapsulation.invoke(loadManager, "addLoadJob", job4);
// finished
LoadJob job42 = new SparkLoadJob(2L, "job4", null, null);
job42.finishTimestamp = currentTimeMs - 2000;
job42.state = JobState.FINISHED;
job42.id = 15;
Deencapsulation.invoke(loadManager, "addLoadJob", job42);
Assert.assertEquals(7, idToLoadJob.size());
Assert.assertEquals(3, dbIdToLabelToLoadJobs.size());
// test remove jobs by label_keep_max_second
// remove db 0, job0
Config.label_keep_max_second = 100;
Config.label_keep_max_num = 10;
loadManager.removeOldLoadJob();
System.out.println(idToLoadJob);
Assert.assertEquals(6, idToLoadJob.size());
Assert.assertFalse(idToLoadJob.containsKey(10L));
Assert.assertEquals(2, dbIdToLabelToLoadJobs.size());
Assert.assertFalse(dbIdToLabelToLoadJobs.containsKey(0L));
// remove cancelled job4
Config.label_keep_max_second = 50;
Config.label_keep_max_num = 10;
loadManager.removeOldLoadJob();
System.out.println(idToLoadJob);
Assert.assertEquals(5, idToLoadJob.size());
Assert.assertFalse(idToLoadJob.containsKey(14L));
Assert.assertEquals(2, dbIdToLabelToLoadJobs.size());
Assert.assertEquals(1, dbIdToLabelToLoadJobs.get(2L).get("job4").size());
// test remove jobs by label_keep_max_num
// remove cancelled job2, finished job4
Config.label_keep_max_second = 50;
Config.label_keep_max_num = 3;
loadManager.removeOldLoadJob();
System.out.println(idToLoadJob);
Assert.assertEquals(3, idToLoadJob.size());
Assert.assertFalse(idToLoadJob.containsKey(15L));
Assert.assertFalse(idToLoadJob.containsKey(16L));
Assert.assertEquals(2, dbIdToLabelToLoadJobs.size());
Assert.assertEquals(1, dbIdToLabelToLoadJobs.get(1L).get("job2").size());
Assert.assertFalse(dbIdToLabelToLoadJobs.get(2L).containsKey("job4"));
// remove finished job2
Config.label_keep_max_second = 50;
Config.label_keep_max_num = 1;
loadManager.removeOldLoadJob();
System.out.println(idToLoadJob);
Assert.assertEquals(2, idToLoadJob.size());
Assert.assertFalse(idToLoadJob.containsKey(12L));
Assert.assertEquals(2, dbIdToLabelToLoadJobs.size());
Assert.assertFalse(dbIdToLabelToLoadJobs.get(1L).containsKey("job2"));
// recover config
Config.label_keep_max_second = origLabelKeepMaxSecond;
Config.label_keep_max_num = origLabelKeepMaxNum;
} |
@Override
protected String getAnalyzerEnabledSettingKey() {
return Settings.KEYS.ANALYZER_VERSION_FILTER_ENABLED;
} | @Test
public void testGetAnalyzerEnabledSettingKey() {
VersionFilterAnalyzer instance = new VersionFilterAnalyzer();
instance.initialize(getSettings());
String expResult = Settings.KEYS.ANALYZER_VERSION_FILTER_ENABLED;
String result = instance.getAnalyzerEnabledSettingKey();
assertEquals(expResult, result);
} |
public static Object getNestedFieldVal(GenericRecord record, String fieldName, boolean returnNullIfNotFound, boolean consistentLogicalTimestampEnabled) {
String[] parts = fieldName.split("\\.");
GenericRecord valueNode = record;
for (int i = 0; i < parts.length; i++) {
String part = parts[i];
Object val;
try {
val = HoodieAvroUtils.getFieldVal(valueNode, part, returnNullIfNotFound);
} catch (AvroRuntimeException e) {
if (returnNullIfNotFound) {
return null;
} else {
throw new HoodieException(
fieldName + "(Part -" + parts[i] + ") field not found in record. Acceptable fields were :"
+ valueNode.getSchema().getFields().stream().map(Field::name).collect(Collectors.toList()));
}
}
if (i == parts.length - 1) {
// return, if last part of name
if (val == null) {
return null;
} else {
Schema fieldSchema = valueNode.getSchema().getField(part).schema();
return convertValueForSpecificDataTypes(fieldSchema, val, consistentLogicalTimestampEnabled);
}
} else {
if (!(val instanceof GenericRecord)) {
if (returnNullIfNotFound) {
return null;
} else {
throw new HoodieException("Cannot find a record at part value :" + part);
}
} else {
valueNode = (GenericRecord) val;
}
}
}
// This can only be reached if the length of parts is 0
if (returnNullIfNotFound) {
return null;
} else {
throw new HoodieException(
fieldName + " field not found in record. Acceptable fields were :"
+ valueNode.getSchema().getFields().stream().map(Field::name).collect(Collectors.toList()));
}
} | @Test
public void testGetNestedFieldValWithNestedField() {
Schema nestedSchema = new Schema.Parser().parse(SCHEMA_WITH_NESTED_FIELD);
GenericRecord rec = new GenericData.Record(nestedSchema);
// test get .
assertEquals(". field not found in record. Acceptable fields were :[firstname, lastname, student]",
assertThrows(HoodieException.class, () ->
HoodieAvroUtils.getNestedFieldVal(rec, ".", false, false)).getMessage());
// test get fake_key
assertEquals("fake_key(Part -fake_key) field not found in record. Acceptable fields were :[firstname, lastname, student]",
assertThrows(HoodieException.class, () ->
HoodieAvroUtils.getNestedFieldVal(rec, "fake_key", false, false)).getMessage());
// test get student(null)
assertNull(HoodieAvroUtils.getNestedFieldVal(rec, "student", false, false));
// test get student
GenericRecord studentRecord = new GenericData.Record(rec.getSchema().getField("student").schema());
studentRecord.put("firstname", "person");
rec.put("student", studentRecord);
assertEquals(studentRecord, HoodieAvroUtils.getNestedFieldVal(rec, "student", false, false));
// test get student.fake_key
assertEquals("student.fake_key(Part -fake_key) field not found in record. Acceptable fields were :[firstname, lastname]",
assertThrows(HoodieException.class, () ->
HoodieAvroUtils.getNestedFieldVal(rec, "student.fake_key", false, false)).getMessage());
// test get student.firstname
assertEquals("person", HoodieAvroUtils.getNestedFieldVal(rec, "student.firstname", false, false));
// test get student.lastname(null)
assertNull(HoodieAvroUtils.getNestedFieldVal(rec, "student.lastname", false, false));
// test get student.firstname.fake_key
assertEquals("Cannot find a record at part value :firstname",
assertThrows(HoodieException.class, () ->
HoodieAvroUtils.getNestedFieldVal(rec, "student.firstname.fake_key", false, false)).getMessage());
// test get student.lastname(null).fake_key
assertEquals("Cannot find a record at part value :lastname",
assertThrows(HoodieException.class, () ->
HoodieAvroUtils.getNestedFieldVal(rec, "student.lastname.fake_key", false, false)).getMessage());
} |
@Override
public void updateIndices(SegmentDirectory.Writer segmentWriter)
throws Exception {
Map<String, List<Operation>> columnOperationsMap = computeOperations(segmentWriter);
if (columnOperationsMap.isEmpty()) {
return;
}
for (Map.Entry<String, List<Operation>> entry : columnOperationsMap.entrySet()) {
String column = entry.getKey();
List<Operation> operations = entry.getValue();
for (Operation operation : operations) {
switch (operation) {
case DISABLE_FORWARD_INDEX:
// Deletion of the forward index will be handled outside the index handler to ensure that other index
// handlers that need the forward index to construct their own indexes will have it available.
_tmpForwardIndexColumns.add(column);
break;
case ENABLE_FORWARD_INDEX:
ColumnMetadata columnMetadata = createForwardIndexIfNeeded(segmentWriter, column, false);
if (columnMetadata.hasDictionary()) {
if (!segmentWriter.hasIndexFor(column, StandardIndexes.dictionary())) {
throw new IllegalStateException(String.format(
"Dictionary should still exist after rebuilding forward index for dictionary column: %s", column));
}
} else {
if (segmentWriter.hasIndexFor(column, StandardIndexes.dictionary())) {
throw new IllegalStateException(
String.format("Dictionary should not exist after rebuilding forward index for raw column: %s",
column));
}
}
break;
case DISABLE_DICTIONARY:
Set<String> newForwardIndexDisabledColumns =
FieldIndexConfigsUtil.columnsWithIndexDisabled(_fieldIndexConfigs.keySet(), StandardIndexes.forward(),
_fieldIndexConfigs);
if (newForwardIndexDisabledColumns.contains(column)) {
removeDictionaryFromForwardIndexDisabledColumn(column, segmentWriter);
if (segmentWriter.hasIndexFor(column, StandardIndexes.dictionary())) {
throw new IllegalStateException(
String.format("Dictionary should not exist after disabling dictionary for column: %s", column));
}
} else {
disableDictionaryAndCreateRawForwardIndex(column, segmentWriter);
}
break;
case ENABLE_DICTIONARY:
createDictBasedForwardIndex(column, segmentWriter);
if (!segmentWriter.hasIndexFor(column, StandardIndexes.forward())) {
throw new IllegalStateException(String.format("Forward index was not created for column: %s", column));
}
break;
case CHANGE_INDEX_COMPRESSION_TYPE:
rewriteForwardIndexForCompressionChange(column, segmentWriter);
break;
default:
throw new IllegalStateException("Unsupported operation for column " + column);
}
}
}
} | @Test
public void testEnableForwardIndexInDictModeForMultipleForwardIndexDisabledColumns()
throws Exception {
SegmentMetadataImpl existingSegmentMetadata = new SegmentMetadataImpl(_segmentDirectory);
SegmentDirectory segmentLocalFSDirectory =
new SegmentLocalFSDirectory(_segmentDirectory, existingSegmentMetadata, ReadMode.mmap);
SegmentDirectory.Writer writer = segmentLocalFSDirectory.createWriter();
IndexLoadingConfig indexLoadingConfig = new IndexLoadingConfig(null, _tableConfig);
Random rand = new Random();
// Remove from forward index list but keep the inverted index enabled
String col1 = SV_FORWARD_INDEX_DISABLED_COLUMNS.get(rand.nextInt(SV_FORWARD_INDEX_DISABLED_COLUMNS.size()));
indexLoadingConfig.removeForwardIndexDisabledColumns(col1);
String col2 = MV_FORWARD_INDEX_DISABLED_COLUMNS.get(rand.nextInt(MV_FORWARD_INDEX_DISABLED_COLUMNS.size()));
indexLoadingConfig.removeForwardIndexDisabledColumns(col2);
ForwardIndexHandler fwdIndexHandler = new ForwardIndexHandler(segmentLocalFSDirectory, indexLoadingConfig, _schema);
fwdIndexHandler.updateIndices(writer);
fwdIndexHandler.postUpdateIndicesCleanup(writer);
// Tear down before validation. Because columns.psf and index map cleanup happens at segmentDirectory.close()
segmentLocalFSDirectory.close();
// Col1 validation.
ColumnMetadata metadata = existingSegmentMetadata.getColumnMetadataFor(col1);
validateIndexMap(col1, true, false);
validateForwardIndex(col1, null, metadata.isSorted());
// In column metadata, nothing should change.
validateMetadataProperties(col1, metadata.hasDictionary(), metadata.getColumnMaxLength(), metadata.getCardinality(),
metadata.getTotalDocs(), metadata.getDataType(), metadata.getFieldType(), metadata.isSorted(),
metadata.isSingleValue(), metadata.getMaxNumberOfMultiValues(), metadata.getTotalNumberOfEntries(),
metadata.isAutoGenerated(), metadata.getMinValue(), metadata.getMaxValue(), false);
// Col2 validation.
metadata = existingSegmentMetadata.getColumnMetadataFor(col2);
validateIndexMap(col2, true, false);
validateForwardIndex(col2, null, metadata.isSorted());
// In column metadata, nothing should change.
validateMetadataProperties(col2, metadata.hasDictionary(), metadata.getColumnMaxLength(), metadata.getCardinality(),
metadata.getTotalDocs(), metadata.getDataType(), metadata.getFieldType(), metadata.isSorted(),
metadata.isSingleValue(), metadata.getMaxNumberOfMultiValues(), metadata.getTotalNumberOfEntries(),
metadata.isAutoGenerated(), metadata.getMinValue(), metadata.getMaxValue(), false);
} |
public static Read read() {
return Read.create();
} | @Test
public void testReadValidationFailsMissingInstanceId() {
BigtableIO.Read read =
BigtableIO.read()
.withTableId("table")
.withProjectId("project")
.withBigtableOptions(BigtableOptions.builder().build());
thrown.expect(IllegalArgumentException.class);
read.expand(null);
} |
public static <T extends SpecificRecordBase> T dataMapToSpecificRecord(DataMap map, RecordDataSchema dataSchema,
Schema avroSchema) throws DataTranslationException {
DataMapToSpecificRecordTranslator translator = new DataMapToSpecificRecordTranslator();
try {
T avroRecord = translator.translate(map, dataSchema, avroSchema);
translator.checkMessageListForErrorsAndThrowDataTranslationException();
return avroRecord;
} catch (RuntimeException e) {
throw translator.dataTranslationException(e);
} catch (ClassNotFoundException | InstantiationException | IllegalAccessException e) {
throw translator.dataTranslationException(new RuntimeException(e));
}
} | @Test
public void testArrayOfRecords() throws IOException {
RecordDataSchema recordDataSchema =
(RecordDataSchema) TestUtil.dataSchemaFromString(RecordArray.TEST_SCHEMA.toString());
DataMap stringRecord =
new DataMap(ImmutableMap.of("stringField", new DataMap(ImmutableMap.of("string", "stringVal"))));
DataList recordArray = new DataList(Arrays.asList(stringRecord));
RecordArray recordArrayEvent =
DataTranslator.dataMapToSpecificRecord(new DataMap(ImmutableMap.of("recordArray", recordArray)),
recordDataSchema, RecordArray.TEST_SCHEMA);
StringRecord stringRecordEvent = DataTranslator.dataMapToSpecificRecord(stringRecord,
(RecordDataSchema) TestUtil.dataSchemaFromString(StringRecord.TEST_SCHEMA.toString()), StringRecord.TEST_SCHEMA);
Assert.assertEquals(recordArrayEvent.get(0), Arrays.asList(stringRecordEvent));
} |
@RequestMapping(value = "/{product}/{cluster}", method = RequestMethod.GET)
public ResponseEntity<String> getCluster(@PathVariable String product, @PathVariable String cluster) {
String productName = addressServerBuilderManager.generateProductName(product);
String serviceName = addressServerBuilderManager.generateNacosServiceName(productName);
String serviceWithoutGroup = NamingUtils.getServiceName(serviceName);
String groupName = NamingUtils.getGroupName(serviceName);
Optional<Service> service = ServiceManager.getInstance()
.getSingletonIfExist(Constants.DEFAULT_NAMESPACE_ID, groupName, serviceWithoutGroup);
if (!service.isPresent()) {
return ResponseEntity.status(HttpStatus.NOT_FOUND).body("product=" + product + " not found.");
}
ClusterMetadata metadata = metadataManager.getServiceMetadata(service.get()).orElse(new ServiceMetadata())
.getClusters().get(cluster);
if (null == metadata) {
return ResponseEntity.status(HttpStatus.NOT_FOUND)
.body("product=" + product + ",cluster=" + cluster + " not found.");
}
ServiceInfo serviceInfo = serviceStorage.getData(service.get());
serviceInfo = ServiceUtil.selectInstances(serviceInfo, cluster, false);
return ResponseEntity.status(HttpStatus.OK)
.body(addressServerBuilderManager.generateResponseIps(serviceInfo.getHosts()));
} | @Test
void testGetCluster() throws Exception {
final Service service = Service
.newService(Constants.DEFAULT_NAMESPACE_ID, Constants.DEFAULT_GROUP, "nacos.as.default", false);
ServiceMetadata serviceMetadata = new ServiceMetadata();
serviceMetadata.getClusters().put("serverList", new ClusterMetadata());
when(metadataManager.getServiceMetadata(service)).thenReturn(Optional.of(serviceMetadata));
List<Instance> list = new ArrayList<>(2);
list.add(new Instance());
list.add(new Instance());
ServiceInfo serviceInfo = new ServiceInfo();
serviceInfo.setHosts(list);
when(serviceStorage.getData(service)).thenReturn(serviceInfo);
mockMvc.perform(get("/nacos/serverList")).andExpect(status().isOk());
} |
public final void hasSize(int expectedSize) {
checkArgument(expectedSize >= 0, "expectedSize(%s) must be >= 0", expectedSize);
int actualSize = size(checkNotNull(actual));
check("size()").that(actualSize).isEqualTo(expectedSize);
} | @Test
public void hasSizeFails() {
expectFailureWhenTestingThat(ImmutableList.of(1, 2, 3)).hasSize(4);
assertFailureValue("value of", "iterable.size()");
} |
public static <T> T loadWithSecrets(Map<String, Object> map, Class<T> clazz, SourceContext sourceContext) {
return loadWithSecrets(map, clazz, secretName -> sourceContext.getSecret(secretName));
} | @Test
public void testSourceLoadWithSecrets() {
Map<String, Object> configMap = new HashMap<>();
configMap.put("notSensitive", "foo");
TestConfig testConfig = IOConfigUtils.loadWithSecrets(configMap, TestConfig.class, new TestSourceContext());
Assert.assertEquals(testConfig.notSensitive, "foo");
Assert.assertEquals(testConfig.password, "my-source-password");
configMap = new HashMap<>();
configMap.put("notSensitive", "foo");
configMap.put("password", "another-password");
configMap.put("sensitiveLong", 5L);
testConfig = IOConfigUtils.loadWithSecrets(configMap, TestConfig.class, new TestSourceContext());
Assert.assertEquals(testConfig.notSensitive, "foo");
Assert.assertEquals(testConfig.password, "my-source-password");
Assert.assertEquals(testConfig.sensitiveLong, 5L);
// test derived classes
configMap = new HashMap<>();
configMap.put("notSensitive", "foo");
configMap.put("sensitiveLong", 5L);
DerivedConfig derivedConfig = IOConfigUtils.loadWithSecrets(configMap, DerivedConfig.class, new TestSourceContext());
Assert.assertEquals(derivedConfig.notSensitive, "foo");
Assert.assertEquals(derivedConfig.password, "my-source-password");
Assert.assertEquals(derivedConfig.sensitiveLong, 5L);
Assert.assertEquals(derivedConfig.moreSensitiveStuff, "more-sensitive-stuff");
configMap = new HashMap<>();
configMap.put("notSensitive", "foo");
configMap.put("sensitiveLong", 5L);
DerivedDerivedConfig derivedDerivedConfig = IOConfigUtils.loadWithSecrets(configMap, DerivedDerivedConfig.class, new TestSourceContext());
Assert.assertEquals(derivedDerivedConfig.notSensitive, "foo");
Assert.assertEquals(derivedDerivedConfig.password, "my-source-password");
Assert.assertEquals(derivedDerivedConfig.sensitiveLong, 5L);
Assert.assertEquals(derivedDerivedConfig.moreSensitiveStuff, "more-sensitive-stuff");
Assert.assertEquals(derivedDerivedConfig.derivedDerivedSensitive, "derived-derived-sensitive");
} |
@JsonProperty
public void setDepth(String depth) {
this.depth = depth;
} | @Test
void testSetDepth() {
ExceptionFormat ef = new ExceptionFormat();
assertThat(ef.getDepth()).isEqualTo("full");
ef.setDepth("short");
assertThat(ef.getDepth()).isEqualTo("short");
// Verify depth can be set to a number as well
ef.setDepth("25");
assertThat(ef.getDepth()).isEqualTo("25");
} |
public boolean isFound() {
return found;
} | @Test
public void testCalcInstructionsRoundabout2() {
roundaboutGraph.inverse3to6();
Weighting weighting = new SpeedWeighting(mixedCarSpeedEnc);
Path p = new Dijkstra(roundaboutGraph.g, weighting, TraversalMode.NODE_BASED)
.calcPath(1, 8);
assertTrue(p.isFound());
InstructionList wayList = InstructionsFromEdges.calcInstructions(p, p.graph, weighting, mixedEncodingManager, tr);
List<String> tmpList = getTurnDescriptions(wayList);
assertEquals(List.of("continue onto MainStreet 1 2",
"At roundabout, take exit 2 onto 5-8",
"arrive at destination"),
tmpList);
// Test Radian
double delta = roundaboutGraph.getAngle(1, 2, 5, 8);
RoundaboutInstruction instr = (RoundaboutInstruction) wayList.get(1);
assertEquals(delta, instr.getTurnAngle(), 0.01);
roundaboutGraph.inverse3to6();
} |
public void writeRuntimeDependencies(Counter counter, Range range) throws IOException {
try {
final Document myDocument = pdfDocumentFactory.createDocument(true);
myDocument.open();
final String counterLabel = getString(counter.getName() + "Label");
final String paragraphTitle = getFormattedString("Dependance_compteur", counterLabel)
+ " - " + range.getLabel();
myDocument.add(pdfDocumentFactory.createParagraphElement(paragraphTitle,
counter.getIconName()));
new PdfRuntimeDependenciesReport(counter, myDocument).toPdf();
myDocument.close();
} catch (final DocumentException e) {
throw createIOException(e);
}
} | @Test
public void testWriteRuntimeDependencies() throws IOException {
final ByteArrayOutputStream output = new ByteArrayOutputStream();
final Counter sqlCounter = new Counter("sql", null);
final Counter counter = new Counter("services", null, sqlCounter);
counter.bindContextIncludingCpu("BeanA.test");
counter.bindContextIncludingCpu("BeanA.test2");
counter.bindContextIncludingCpu("BeanB.test");
counter.addRequestForCurrentContext(false);
counter.bindContextIncludingCpu("BeanB.test2");
counter.addRequestForCurrentContext(false);
counter.addRequestForCurrentContext(false);
counter.addRequestForCurrentContext(false);
counter.bindContextIncludingCpu("test");
counter.bindContextIncludingCpu("BeanA.test");
counter.addRequestForCurrentContext(false);
counter.addRequestForCurrentContext(false);
counter.bindContextIncludingCpu("test2");
sqlCounter.bindContextIncludingCpu("sql");
sqlCounter.addRequestForCurrentContext(false);
counter.addRequestForCurrentContext(false);
final PdfOtherReport pdfOtherReport = new PdfOtherReport(TEST_APP, output);
pdfOtherReport.writeRuntimeDependencies(counter, Period.TOUT.getRange());
assertNotEmptyAndClear(output);
} |
public TopicRouteData getDefaultTopicRouteInfoFromNameServer(final long timeoutMillis)
throws RemotingException, MQClientException, InterruptedException {
return getTopicRouteInfoFromNameServer(TopicValidator.AUTO_CREATE_TOPIC_KEY_TOPIC, timeoutMillis, false);
} | @Test
public void assertGetDefaultTopicRouteInfoFromNameServer() throws RemotingException, InterruptedException, MQClientException {
mockInvokeSync();
TopicRouteData responseBody = new TopicRouteData();
responseBody.getQueueDatas().add(new QueueData());
responseBody.getBrokerDatas().add(new BrokerData());
responseBody.getFilterServerTable().put("key", Collections.emptyList());
Map<String, TopicQueueMappingInfo> topicQueueMappingByBroker = new HashMap<>();
topicQueueMappingByBroker.put("key", new TopicQueueMappingInfo());
responseBody.setTopicQueueMappingByBroker(topicQueueMappingByBroker);
setResponseBody(responseBody);
TopicRouteData actual = mqClientAPI.getDefaultTopicRouteInfoFromNameServer(defaultTimeout);
assertNotNull(actual);
assertEquals(1, actual.getQueueDatas().size());
assertEquals(1, actual.getBrokerDatas().size());
assertEquals(1, actual.getFilterServerTable().size());
assertEquals(1, actual.getTopicQueueMappingByBroker().size());
} |
public static Set<Result> anaylze(String log) {
Set<Result> results = new HashSet<>();
for (Rule rule : Rule.values()) {
Matcher matcher = rule.pattern.matcher(log);
if (matcher.find()) {
results.add(new Result(rule, log, matcher));
}
}
return results;
} | @Test
public void modResolution() throws IOException {
CrashReportAnalyzer.Result result = findResultByRule(
CrashReportAnalyzer.anaylze(loadLog("/logs/mod_resolution.txt")),
CrashReportAnalyzer.Rule.MOD_RESOLUTION);
assertEquals(("Errors were found!\n" +
" - Mod test depends on mod {fabricloader @ [>=0.11.3]}, which is missing!\n" +
" - Mod test depends on mod {fabric @ [*]}, which is missing!\n" +
" - Mod test depends on mod {java @ [>=16]}, which is missing!\n").replaceAll("\\s+", ""),
result.getMatcher().group("reason").replaceAll("\\s+", ""));
} |
public static void init(final Map<String, PluginConfiguration> pluginConfigs, final Collection<JarFile> pluginJars, final ClassLoader pluginClassLoader, final boolean isEnhancedForProxy) {
if (STARTED_FLAG.compareAndSet(false, true)) {
start(pluginConfigs, pluginClassLoader, isEnhancedForProxy);
Runtime.getRuntime().addShutdownHook(new Thread(() -> close(pluginJars)));
}
} | @Test
void assertInitPluginLifecycleServiceWithMap() {
Map<String, PluginConfiguration> pluginConfigs = Collections.singletonMap("Key", new PluginConfiguration("localhost", 8080, "random", new Properties()));
assertDoesNotThrow(() -> PluginLifecycleServiceManager.init(pluginConfigs, Collections.emptyList(), new MultipleParentClassLoader(Collections.emptyList()), true));
} |
public String getNormalizedErrorCode() {
// TODO: how to unify TStatusCode, ErrorCode, ErrType, ConnectContext.errorCode
if (StringUtils.isNotEmpty(errorCode)) {
// error happens in BE execution.
return errorCode;
}
if (state.getErrType() != QueryState.ErrType.UNKNOWN) {
// error happens in FE execution.
return state.getErrType().name();
}
return "";
} | @Test
public void testGetNormalizedErrorCode() {
ConnectContext ctx = new ConnectContext(socketChannel);
ctx.setState(new QueryState());
Status status = new Status(new TStatus(TStatusCode.MEM_LIMIT_EXCEEDED));
{
ctx.setErrorCodeOnce(status.getErrorCodeString());
ctx.getState().setErrType(QueryState.ErrType.ANALYSIS_ERR);
Assert.assertEquals("MEM_LIMIT_EXCEEDED", ctx.getNormalizedErrorCode());
}
{
ctx.resetErrorCode();
Assert.assertEquals("ANALYSIS_ERR", ctx.getNormalizedErrorCode());
}
} |
public RandomForest trim(int ntrees) {
if (ntrees > models.length) {
throw new IllegalArgumentException("The new model size is larger than the current size.");
}
if (ntrees <= 0) {
throw new IllegalArgumentException("Invalid new model size: " + ntrees);
}
Arrays.sort(models, Comparator.comparingDouble(model -> -model.weight));
// The OOB metrics are still the old one
// as we don't access to the training data here.
return new RandomForest(formula, k, Arrays.copyOf(models, ntrees), metrics, importance(models), classes);
} | @Test
public void testTrim() {
System.out.println("trim");
RandomForest model = RandomForest.fit(Segment.formula, Segment.train, 200, 16, SplitRule.GINI, 20, 100, 5, 1.0, null, Arrays.stream(seeds));
System.out.println(model.metrics());
assertEquals(200, model.size());
int[] prediction = model.predict(Segment.test);
int error = Error.of(Segment.testy, prediction);
System.out.println("Error = " + error);
assertEquals(34, error);
RandomForest trimmed = model.trim(100);
assertEquals(200, model.size());
assertEquals(100, trimmed.size());
double weight1 = Arrays.stream(model.models()).mapToDouble(m -> m.weight).min().getAsDouble();
double weight2 = Arrays.stream(trimmed.models()).mapToDouble(m -> m.weight).min().getAsDouble();
assertTrue(weight2 > weight1);
prediction = trimmed.predict(Segment.test);
error = Error.of(Segment.testy, prediction);
System.out.println("Error after trim = " + error);
assertEquals(32, error);
} |
public static Builder custom() {
return new Builder();
} | @Test
public void buildLimitRefreshPeriodIsNotWithinLimits() {
exception.expect(ThrowableCauseMatcher.hasCause(isA(ArithmeticException.class)));
exception.expectMessage("LimitRefreshPeriod too large");
RateLimiterConfig.custom()
.limitRefreshPeriod(Duration.ofSeconds(Long.MAX_VALUE));
} |
public static CompressionProviderFactory getInstance() {
return INSTANCE;
} | @Test
public void testGetInstance() {
assertNotNull( factory );
} |
@Override
public void deletePost(Long id) {
// 校验是否存在
validatePostExists(id);
// 删除部门
postMapper.deleteById(id);
} | @Test
public void testDeletePost_success() {
// mock 数据
PostDO postDO = randomPostDO();
postMapper.insert(postDO);
// 准备参数
Long id = postDO.getId();
// 调用
postService.deletePost(id);
assertNull(postMapper.selectById(id));
} |
@Override
public void onMsg(TbContext ctx, TbMsg msg) {
locks.computeIfAbsent(msg.getOriginator(), SemaphoreWithTbMsgQueue::new)
.addToQueueAndTryProcess(msg, ctx, this::processMsgAsync);
} | @Test
public void testExp4j() {
var node = initNodeWithCustomFunction("2a+3b",
new TbMathResult(TbMathArgumentType.MESSAGE_BODY, "${key1}", 2, false, false, null),
new TbMathArgument("a", TbMathArgumentType.MESSAGE_BODY, "${key2}"),
new TbMathArgument("b", TbMathArgumentType.MESSAGE_BODY, "$[key3]")
);
TbMsgMetaData metaData = new TbMsgMetaData();
metaData.putValue("key1", "firstMsgResult");
metaData.putValue("key2", "argumentA");
ObjectNode msgNode = JacksonUtil.newObjectNode()
.put("key3", "argumentB").put("argumentA", 2).put("argumentB", 2);
TbMsg msg = TbMsg.newMsg(TbMsgType.POST_TELEMETRY_REQUEST, originator, metaData, msgNode.toString());
node.onMsg(ctx, msg);
metaData.putValue("key1", "secondMsgResult");
metaData.putValue("key2", "argumentC");
msgNode = JacksonUtil.newObjectNode()
.put("key3", "argumentD").put("argumentC", 4).put("argumentD", 3);
msg = TbMsg.newMsg(TbMsgType.POST_TELEMETRY_REQUEST, originator, metaData, msgNode.toString());
node.onMsg(ctx, msg);
ArgumentCaptor<TbMsg> msgCaptor = ArgumentCaptor.forClass(TbMsg.class);
verify(ctx, timeout(TIMEOUT).times(2)).tellSuccess(msgCaptor.capture());
List<TbMsg> resultMsgs = msgCaptor.getAllValues();
assertFalse(resultMsgs.isEmpty());
assertEquals(2, resultMsgs.size());
for (int i = 0; i < resultMsgs.size(); i++) {
TbMsg outMsg = resultMsgs.get(i);
assertNotNull(outMsg);
assertNotNull(outMsg.getData());
var resultJson = JacksonUtil.toJsonNode(outMsg.getData());
String resultKey = i == 0 ? "firstMsgResult" : "secondMsgResult";
assertTrue(resultJson.has(resultKey));
assertEquals(i == 0 ? 10 : 17, resultJson.get(resultKey).asInt());
}
} |
@Nullable
@Override
public RecordAndPosition<E> next() {
if (records.hasNext()) {
recordAndPosition.setNext(records.next());
return recordAndPosition;
} else {
return null;
}
} | @Test
void testExhausted() {
final IteratorResultIterator<String> iter =
new IteratorResultIterator<>(Arrays.asList("1", "2").iterator(), 0L, 0L);
iter.next();
iter.next();
assertThat(iter.next()).isNull();
} |
public static double keepIn(double value, double min, double max) {
return Math.max(min, Math.min(value, max));
} | @Test
public void testKeepIn() {
assertEquals(2, Helper.keepIn(2, 1, 4), 1e-2);
assertEquals(3, Helper.keepIn(2, 3, 4), 1e-2);
assertEquals(3, Helper.keepIn(-2, 3, 4), 1e-2);
} |
public void removeMapping(String name, boolean ifExists) {
if (relationsStorage.removeMapping(name) != null) {
listeners.forEach(TableListener::onTableChanged);
} else if (!ifExists) {
throw QueryException.error("Mapping does not exist: " + name);
}
} | @Test
public void when_removesNonExistingMapping_then_throws() {
// given
String name = "name";
given(relationsStorage.removeMapping(name)).willReturn(null);
// when
// then
assertThatThrownBy(() -> catalog.removeMapping(name, false))
.isInstanceOf(QueryException.class)
.hasMessageContaining("Mapping does not exist: name");
verifyNoInteractions(listener);
} |
@VisibleForTesting
void validateTableInfo(TableInfo tableInfo) {
if (tableInfo == null) {
throw exception(CODEGEN_IMPORT_TABLE_NULL);
}
if (StrUtil.isEmpty(tableInfo.getComment())) {
throw exception(CODEGEN_TABLE_INFO_TABLE_COMMENT_IS_NULL);
}
if (CollUtil.isEmpty(tableInfo.getFields())) {
throw exception(CODEGEN_IMPORT_COLUMNS_NULL);
}
tableInfo.getFields().forEach(field -> {
if (StrUtil.isEmpty(field.getComment())) {
throw exception(CODEGEN_TABLE_INFO_COLUMN_COMMENT_IS_NULL, field.getName());
}
});
} | @Test
public void testValidateTableInfo() {
// 情况一
assertServiceException(() -> codegenService.validateTableInfo(null),
CODEGEN_IMPORT_TABLE_NULL);
// 情况二
TableInfo tableInfo = mock(TableInfo.class);
assertServiceException(() -> codegenService.validateTableInfo(tableInfo),
CODEGEN_TABLE_INFO_TABLE_COMMENT_IS_NULL);
// 情况三
when(tableInfo.getComment()).thenReturn("芋艿");
assertServiceException(() -> codegenService.validateTableInfo(tableInfo),
CODEGEN_IMPORT_COLUMNS_NULL);
// 情况四
TableField field = mock(TableField.class);
when(field.getName()).thenReturn("name");
when(tableInfo.getFields()).thenReturn(Collections.singletonList(field));
assertServiceException(() -> codegenService.validateTableInfo(tableInfo),
CODEGEN_TABLE_INFO_COLUMN_COMMENT_IS_NULL, field.getName());
} |
@Override
public CompletableFuture<Boolean> triggerCheckpointAsync(
CheckpointMetaData checkpointMetaData, CheckpointOptions checkpointOptions) {
checkForcedFullSnapshotSupport(checkpointOptions);
CompletableFuture<Boolean> result = new CompletableFuture<>();
mainMailboxExecutor.execute(
() -> {
try {
boolean noUnfinishedInputGates =
Arrays.stream(getEnvironment().getAllInputGates())
.allMatch(InputGate::isFinished);
if (noUnfinishedInputGates) {
result.complete(
triggerCheckpointAsyncInMailbox(
checkpointMetaData, checkpointOptions));
} else {
result.complete(
triggerUnfinishedChannelsCheckpoint(
checkpointMetaData, checkpointOptions));
}
} catch (Exception ex) {
// Report the failure both via the Future result but also to the mailbox
result.completeExceptionally(ex);
throw ex;
}
},
"checkpoint %s with %s",
checkpointMetaData,
checkpointOptions);
return result;
} | @Test
void testForceFullSnapshotOnIncompatibleStateBackend() throws Exception {
try (StreamTaskMailboxTestHarness<Integer> harness =
new StreamTaskMailboxTestHarnessBuilder<>(
OneInputStreamTask::new, BasicTypeInfo.INT_TYPE_INFO)
.modifyStreamConfig(
config -> config.setStateBackend(new OnlyIncrementalStateBackend()))
.addInput(BasicTypeInfo.INT_TYPE_INFO)
.setupOutputForSingletonOperatorChain(new StreamMap<>(value -> null))
.build()) {
assertThatThrownBy(
() ->
harness.streamTask.triggerCheckpointAsync(
new CheckpointMetaData(42L, 1L),
CheckpointOptions.forConfig(
CheckpointType.FULL_CHECKPOINT,
getDefault(),
true,
false,
0L)))
.isInstanceOf(IllegalStateException.class)
.hasMessage(
"Configured state backend (OnlyIncrementalStateBackend) does not"
+ " support enforcing a full snapshot. If you are restoring in"
+ " NO_CLAIM mode, please consider choosing CLAIM mode.");
}
} |
@Override
public String getParameter(String name) {
String[] values = stringMap.get(name);
if (values == null || values.length == 0) {
return null;
}
return values[0];
} | @Test
void testGetParameterEmpty() {
assertNull(reuseUploadFileHttpServletRequest.getParameter("nonExistentParam"));
} |
@SuppressWarnings("unchecked")
public <T extends Metric> T register(String name, T metric) throws IllegalArgumentException {
if (metric == null) {
throw new NullPointerException("metric == null");
}
if (metric instanceof MetricRegistry) {
final MetricRegistry childRegistry = (MetricRegistry) metric;
final String childName = name;
childRegistry.addListener(new MetricRegistryListener() {
@Override
public void onGaugeAdded(String name, Gauge<?> gauge) {
register(name(childName, name), gauge);
}
@Override
public void onGaugeRemoved(String name) {
remove(name(childName, name));
}
@Override
public void onCounterAdded(String name, Counter counter) {
register(name(childName, name), counter);
}
@Override
public void onCounterRemoved(String name) {
remove(name(childName, name));
}
@Override
public void onHistogramAdded(String name, Histogram histogram) {
register(name(childName, name), histogram);
}
@Override
public void onHistogramRemoved(String name) {
remove(name(childName, name));
}
@Override
public void onMeterAdded(String name, Meter meter) {
register(name(childName, name), meter);
}
@Override
public void onMeterRemoved(String name) {
remove(name(childName, name));
}
@Override
public void onTimerAdded(String name, Timer timer) {
register(name(childName, name), timer);
}
@Override
public void onTimerRemoved(String name) {
remove(name(childName, name));
}
});
} else if (metric instanceof MetricSet) {
registerAll(name, (MetricSet) metric);
} else {
final Metric existing = metrics.putIfAbsent(name, metric);
if (existing == null) {
onMetricAdded(name, metric);
} else {
throw new IllegalArgumentException("A metric named " + name + " already exists");
}
}
return metric;
} | @Test
public void registeringATimerTriggersANotification() {
assertThat(registry.register("thing", timer))
.isEqualTo(timer);
verify(listener).onTimerAdded("thing", timer);
} |
public void prepareIndices(final String idField, final Collection<String> sortFields, final Collection<String> caseInsensitiveStringSortFields) {
if (!sortFields.containsAll(caseInsensitiveStringSortFields)) {
throw new IllegalArgumentException("Case Insensitive String Sort Fields should be a subset of all Sort Fields ");
}
final var existingIndices = db.listIndexes();
for (String sortField : sortFields) {
if (!sortField.equals(idField)) { //id has index by default
final var existingIndex = getExistingIndex(existingIndices, sortField);
if (caseInsensitiveStringSortFields.contains(sortField)) { //index string fields with collation for more efficient case-insensitive sorting
if (existingIndex.isEmpty()) {
createCaseInsensitiveStringIndex(sortField);
} else if (existingIndex.get().get(COLLATION_KEY) == null) {
//replace simple index with "collation" index
dropIndex(sortField);
createCaseInsensitiveStringIndex(sortField);
}
} else {
if (existingIndex.isEmpty()) {
createSingleFieldIndex(sortField);
} else if (existingIndex.get().get(COLLATION_KEY) != null) {
//replace "collation" index with simple one
dropIndex(sortField);
createSingleFieldIndex(sortField);
}
}
}
}
} | @Test
void createsCollationIndexIfDoesNotExists() {
toTest.prepareIndices("id", List.of("summary"), List.of("summary"));
verify(db).createIndex(eq(Indexes.ascending("summary")), argThat(indexOptions -> indexOptions.getCollation().getLocale().equals("en")));
} |
public void start(
@Nullable Object key,
Work work,
WindmillStateReader stateReader,
SideInputStateFetcher sideInputStateFetcher,
OperationalLimits operationalLimits,
Windmill.WorkItemCommitRequest.Builder outputBuilder) {
this.key = key;
this.work = work;
this.computationKey = WindmillComputationKey.create(computationId, work.getShardedKey());
this.sideInputStateFetcher = sideInputStateFetcher;
this.operationalLimits = operationalLimits;
this.outputBuilder = outputBuilder;
this.sideInputCache.clear();
clearSinkFullHint();
Instant processingTime = computeProcessingTime(work.getWorkItem().getTimers().getTimersList());
Collection<? extends StepContext> stepContexts = getAllStepContexts();
if (!stepContexts.isEmpty()) {
// This must be only created once for the workItem as token validation will fail if the same
// work token is reused.
WindmillStateCache.ForKey cacheForKey =
stateCache.forKey(getComputationKey(), getWorkItem().getCacheToken(), getWorkToken());
for (StepContext stepContext : stepContexts) {
stepContext.start(stateReader, processingTime, cacheForKey, work.watermarks());
}
}
} | @Test(timeout = 2000)
public void stateSamplingInStreaming() {
// Test that when writing on one thread and reading from another, updates always eventually
// reach the reading thread.
StreamingModeExecutionState state =
new StreamingModeExecutionState(
NameContextsForTests.nameContextForTest(), "testState", null, NoopProfileScope.NOOP);
ExecutionStateSampler sampler = ExecutionStateSampler.newForTest();
try {
sampler.start();
ExecutionStateTracker tracker = new ExecutionStateTracker(sampler);
Thread executionThread = new Thread();
executionThread.setName("looping-thread-for-test");
tracker.activate(executionThread);
tracker.enterState(state);
// Wait for the state to be incremented 3 times
for (int i = 0; i < 3; i++) {
CounterUpdate update = null;
while (update == null) {
update = state.extractUpdate(false);
}
long newValue = splitIntToLong(update.getInteger());
assertThat(newValue, Matchers.greaterThan(0L));
}
} finally {
sampler.stop();
}
} |
public static String encodeReferenceToken(final String s) {
String encoded = s;
for (Map.Entry<String,String> sub : SUBSTITUTIONS.entrySet()) {
encoded = encoded.replace(sub.getKey(), sub.getValue());
}
return encoded;
} | @Test
public void testEncodeReferenceToken() {
assertThat(JsonPointerUtils.encodeReferenceToken("com/vsv#...?"), is("com~1vsv~2~3~3~3~4"));
assertThat(JsonPointerUtils.encodeReferenceToken("~1~2~01~3~4"), is("~01~02~001~03~04"));
} |
@Override
public LogicalSchema getSchema() {
return getSource().getSchema();
} | @Test
public void shouldThrowKeyExpressionThatDoestCoverKey_multipleDisjuncts() {
// Given:
when(source.getSchema()).thenReturn(INPUT_SCHEMA);
final Expression keyExp1 = new ComparisonExpression(
Type.EQUAL,
new UnqualifiedColumnReferenceExp(ColumnName.of("WINDOWSTART")),
new IntegerLiteral(1)
);
final Expression keyExp2 = new ComparisonExpression(
Type.EQUAL,
new UnqualifiedColumnReferenceExp(ColumnName.of("K")),
new IntegerLiteral(2)
);
final Expression expression = new LogicalBinaryExpression(
LogicalBinaryExpression.Type.OR,
keyExp1,
keyExp2
);
// When:
final KsqlException e = assertThrows(
KsqlException.class,
() -> new QueryFilterNode(
NODE_ID,
source,
expression,
metaStore,
ksqlConfig,
true,
plannerOptions
));
// Then:
assertThat(e.getMessage(), containsString("WHERE clause missing key column for disjunct: "
+ "(WINDOWSTART = 1)"));
} |
@Override
public <R> List<R> queryMany(String sql, Object[] args, RowMapper<R> mapper) {
return queryMany(jdbcTemplate, sql, args, mapper);
} | @Test
void testQueryMany6() {
final String sql = "SELECT * FROM config_info WHERE id >= ? AND id <= ?";
final Object[] args = new Object[] {1, 2};
MockConfigInfo configInfo1 = new MockConfigInfo();
configInfo1.setId(1);
MockConfigInfo configInfo2 = new MockConfigInfo();
configInfo2.setId(2);
List<MockConfigInfo> configInfos = new ArrayList<>();
configInfos.add(configInfo1);
configInfos.add(configInfo2);
when(tempJdbcTemplate.query(eq(sql), eq(args), any(RowMapper.class))).thenReturn(configInfos);
assertEquals(operate.queryMany(tempJdbcTemplate, sql, args, rowMapper), configInfos);
} |
@Override
public ObjectNode encode(KubevirtNode node, CodecContext context) {
checkNotNull(node, "Kubevirt node cannot be null");
ObjectNode result = context.mapper().createObjectNode()
.put(HOST_NAME, node.hostname())
.put(TYPE, node.type().name())
.put(STATE, node.state().name())
.put(MANAGEMENT_IP, node.managementIp().toString());
// serialize integration bridge config
if (node.intgBridge() != null) {
result.put(INTEGRATION_BRIDGE, node.intgBridge().toString());
}
// serialize tunnel bridge config
if (node.tunBridge() != null) {
result.put(TUNNEL_BRIDGE, node.tunBridge().toString());
}
// serialize data IP only if it presents
if (node.dataIp() != null) {
result.put(DATA_IP, node.dataIp().toString());
}
// serialize physical interfaces, it is valid only if any of physical interface presents
if (node.phyIntfs() != null && !node.phyIntfs().isEmpty()) {
ArrayNode phyIntfs = context.mapper().createArrayNode();
node.phyIntfs().forEach(phyIntf -> {
ObjectNode phyIntfJson =
context.codec(KubevirtPhyInterface.class).encode(phyIntf, context);
phyIntfs.add(phyIntfJson);
});
result.set(PHYSICAL_INTERFACES, phyIntfs);
}
// serialize external bridge if exist
if (node.gatewayBridgeName() != null) {
result.put(GATEWAY_BRIDGE_NAME, node.gatewayBridgeName());
}
return result;
} | @Test
public void testKubevirtGatweayNodeEncode() {
KubevirtNode node = DefaultKubevirtNode.builder()
.hostname("gateway")
.type(KubevirtNode.Type.GATEWAY)
.state(KubevirtNodeState.INIT)
.managementIp(IpAddress.valueOf("10.10.10.1"))
.intgBridge(DeviceId.deviceId("br-int"))
.tunBridge(DeviceId.deviceId("br-tun"))
.dataIp(IpAddress.valueOf("20.20.20.2"))
.gatewayBridgeName("gateway")
.build();
ObjectNode nodeJson = kubevirtNodeCodec.encode(node, context);
assertThat(nodeJson, matchesKubevirtNode(node));
} |
public static MongoIndexRange create(ObjectId id,
String indexName,
DateTime begin,
DateTime end,
DateTime calculatedAt,
int calculationDuration,
List<String> streamIds) {
return new AutoValue_MongoIndexRange(id, indexName, begin, end, calculatedAt, calculationDuration, streamIds);
} | @Test
public void testCreate() throws Exception {
String indexName = "test";
DateTime begin = new DateTime(2015, 1, 1, 0, 0, DateTimeZone.UTC);
DateTime end = new DateTime(2015, 2, 1, 0, 0, DateTimeZone.UTC);
DateTime calculatedAt = new DateTime(2015, 2, 1, 0, 0, DateTimeZone.UTC);
int calculationDuration = 42;
MongoIndexRange indexRange = MongoIndexRange.create(indexName, begin, end, calculatedAt, calculationDuration);
assertThat(indexRange.indexName()).isEqualTo(indexName);
assertThat(indexRange.begin()).isEqualTo(begin);
assertThat(indexRange.end()).isEqualTo(end);
assertThat(indexRange.calculatedAt()).isEqualTo(calculatedAt);
assertThat(indexRange.calculationDuration()).isEqualTo(calculationDuration);
} |
public static Set<Result> anaylze(String log) {
Set<Result> results = new HashSet<>();
for (Rule rule : Rule.values()) {
Matcher matcher = rule.pattern.matcher(log);
if (matcher.find()) {
results.add(new Result(rule, log, matcher));
}
}
return results;
} | @Test
public void incompleteForgeInstallation3() throws IOException {
CrashReportAnalyzer.Result result = findResultByRule(
CrashReportAnalyzer.anaylze(loadLog("/logs/incomplete_forge_installation3.txt")),
CrashReportAnalyzer.Rule.INCOMPLETE_FORGE_INSTALLATION);
} |
@VisibleForTesting
public WeightedPolicyInfo getWeightedPolicyInfo() {
return weightedPolicyInfo;
} | @Test
public void testPolicyInfoSetCorrectly() throws Exception {
serializeAndDeserializePolicyManager(wfp, expectedPolicyManager,
expectedAMRMProxyPolicy, expectedRouterPolicy);
// check the policyInfo propagates through ser/der correctly
Assert.assertEquals(((WeightedHomePolicyManager) wfp)
.getWeightedPolicyInfo(), policyInfo);
} |
public static LocalCacheManager create(CacheManagerOptions options,
PageMetaStore pageMetaStore)
throws IOException {
LocalCacheManager manager = new LocalCacheManager(options, pageMetaStore);
List<PageStoreDir> pageStoreDirs = pageMetaStore.getStoreDirs();
if (manager.mInitService.isPresent()) {
manager.mInitService.get().submit(() -> {
try {
manager.restoreOrInit(pageStoreDirs);
} catch (IOException e) {
LOG.error("Failed to restore LocalCacheManager", e);
}
});
} else {
manager.restoreOrInit(pageStoreDirs);
}
return manager;
} | @Test
public void createNonexistentRootDirAsyncRestore() throws Exception {
mConf.set(PropertyKey.USER_CLIENT_CACHE_ASYNC_RESTORE_ENABLED, true);
mConf.set(PropertyKey.USER_CLIENT_CACHE_DIRS,
PathUtils.concatPath(mTemp.getRoot().getAbsolutePath(), UUID.randomUUID().toString()));
mPageMetaStore =
new DefaultPageMetaStore(PageStoreDir.createPageStoreDirs(mCacheManagerOptions));
assertNotNull(LocalCacheManager.create(mCacheManagerOptions, mPageMetaStore));
} |
protected List<FileStatus> listStatus(JobContext job
) throws IOException {
Path[] dirs = getInputPaths(job);
if (dirs.length == 0) {
throw new IOException("No input paths specified in job");
}
// get tokens for all the required FileSystems..
TokenCache.obtainTokensForNamenodes(job.getCredentials(), dirs,
job.getConfiguration());
// Whether we need to recursive look into the directory structure
boolean recursive = getInputDirRecursive(job);
// creates a MultiPathFilter with the hiddenFileFilter and the
// user provided one (if any).
List<PathFilter> filters = new ArrayList<PathFilter>();
filters.add(hiddenFileFilter);
PathFilter jobFilter = getInputPathFilter(job);
if (jobFilter != null) {
filters.add(jobFilter);
}
PathFilter inputFilter = new MultiPathFilter(filters);
List<FileStatus> result = null;
int numThreads = job.getConfiguration().getInt(LIST_STATUS_NUM_THREADS,
DEFAULT_LIST_STATUS_NUM_THREADS);
StopWatch sw = new StopWatch().start();
if (numThreads == 1) {
result = singleThreadedListStatus(job, dirs, inputFilter, recursive);
} else {
Iterable<FileStatus> locatedFiles = null;
try {
LocatedFileStatusFetcher locatedFileStatusFetcher = new LocatedFileStatusFetcher(
job.getConfiguration(), dirs, recursive, inputFilter, true);
locatedFiles = locatedFileStatusFetcher.getFileStatuses();
} catch (InterruptedException e) {
throw (IOException)
new InterruptedIOException(
"Interrupted while getting file statuses")
.initCause(e);
}
result = Lists.newArrayList(locatedFiles);
}
sw.stop();
if (LOG.isDebugEnabled()) {
LOG.debug("Time taken to get FileStatuses: "
+ sw.now(TimeUnit.MILLISECONDS));
}
LOG.info("Total input files to process : " + result.size());
return result;
} | @Test
public void testListStatusSimple() throws IOException {
Configuration conf = new Configuration();
conf.setInt(FileInputFormat.LIST_STATUS_NUM_THREADS, numThreads);
List<Path> expectedPaths = configureTestSimple(conf, localFs);
Job job = Job.getInstance(conf);
FileInputFormat<?, ?> fif = new TextInputFormat();
List<FileStatus> statuses = fif.listStatus(job);
verifyFileStatuses(expectedPaths, statuses, localFs);
} |
static IndexComponentFilter findBestComponentFilter(
IndexType type,
List<IndexComponentCandidate> candidates,
QueryDataType converterType
) {
// First look for equality filters, assuming that they are more selective than ranges
IndexComponentFilter equalityComponentFilter = searchForEquality(candidates, converterType);
if (equalityComponentFilter != null) {
return equalityComponentFilter;
}
// Look for ranges filters
return searchForRange(type, candidates, converterType);
} | @Test
public void when_bothBoundsRangeFilterPresentAndNoBetterChoiceAndSortedIndex_then_itIsUsed() {
IndexComponentFilter bestFilter = IndexComponentFilterResolver.findBestComponentFilter(
indexType, WITH_BOTH_BOUNDS_AS_BEST_CANDIDATES, QUERY_DATA_TYPE
);
if (indexType == IndexType.SORTED) {
assertEquals(bestFilter.getFilter(), BOTH_BOUNDS_RANGE_CANDIDATE.getFilter());
} else {
assertNull(bestFilter);
}
} |
@Override
public void process(Tuple input) {
String key = filterMapper.getKeyFromTuple(input);
boolean found;
JedisCommandsContainer jedisCommand = null;
try {
jedisCommand = getInstance();
switch (dataType) {
case STRING:
found = jedisCommand.exists(key);
break;
case SET:
found = jedisCommand.sismember(additionalKey, key);
break;
case HASH:
found = jedisCommand.hexists(additionalKey, key);
break;
case SORTED_SET:
found = jedisCommand.zrank(additionalKey, key) != null;
break;
case HYPER_LOG_LOG:
found = jedisCommand.pfcount(key) > 0;
break;
case GEO:
List<GeoCoordinate> geopos = jedisCommand.geopos(additionalKey, key);
if (geopos == null || geopos.isEmpty()) {
found = false;
} else {
// If any entry is NOT null, then we have a match.
found = geopos.stream()
.anyMatch(Objects::nonNull);
}
break;
default:
throw new IllegalArgumentException("Cannot process such data type: " + dataType);
}
if (found) {
collector.emit(input, input.getValues());
}
collector.ack(input);
} catch (Exception e) {
this.collector.reportError(e);
this.collector.fail(input);
}
} | @Test
void smokeTest_sismember_notMember() {
// Define input key
final String setKey = "ThisIsMySet";
final String inputKey = "ThisIsMyKey";
// Create an input tuple
final Map<String, Object> values = new HashMap<>();
values.put("key", inputKey);
values.put("value", "ThisIsMyValue");
final Tuple tuple = new StubTuple(values);
final JedisPoolConfig config = configBuilder.build();
final TestMapper mapper = new TestMapper(SET, setKey);
final RedisFilterBolt bolt = new RedisFilterBolt(config, mapper);
bolt.prepare(new HashMap<>(), topologyContext, new OutputCollector(outputCollector));
bolt.process(tuple);
// Verify the bolt filtered the input tuple.
verifyTupleFiltered();
} |
public static JavaToSqlTypeConverter javaToSqlConverter() {
return JAVA_TO_SQL_CONVERTER;
} | @Test
public void shouldGetSqArrayForImplementationsOfJavaList() {
ImmutableList.<Class<?>>of(
ArrayList.class,
ImmutableList.class
).forEach(javaType -> {
assertThat(javaToSqlConverter().toSqlType(javaType), is(SqlBaseType.ARRAY));
});
} |
public static <K, V> String joinIgnoreNull(Map<K, V> map, String separator, String keyValueSeparator, String... otherParams) {
return join(map, separator, keyValueSeparator, true, otherParams);
} | @Test
public void joinIgnoreNullTest() {
final Dict v1 = Dict.of().set("id", 12).set("name", "张三").set("age", null);
final String s = MapUtil.joinIgnoreNull(v1, ",", "=");
assertEquals("id=12,name=张三", s);
} |
public void setSourcePort(int sourcePort) {
this.sourcePort = sourcePort;
} | @Test
void testSetSourcePort() {
assertEquals(0, addressContext.getSourcePort());
addressContext.setSourcePort(8080);
assertEquals(8080, addressContext.getSourcePort());
} |
public String getShardIterator(SimplifiedKinesisClient kinesisClient)
throws TransientKinesisException {
if (checkpointIsInTheMiddleOfAUserRecord()) {
return kinesisClient.getShardIterator(
streamName, shardId, AT_SEQUENCE_NUMBER, sequenceNumber, null);
}
return kinesisClient.getShardIterator(
streamName, shardId, shardIteratorType, sequenceNumber, timestamp);
} | @Test
public void testProvidingShardIterator() throws IOException, TransientKinesisException {
assertThat(checkpoint(AT_SEQUENCE_NUMBER, "100", null).getShardIterator(client))
.isEqualTo(AT_SEQUENCE_SHARD_IT);
assertThat(checkpoint(AFTER_SEQUENCE_NUMBER, "100", null).getShardIterator(client))
.isEqualTo(AFTER_SEQUENCE_SHARD_IT);
assertThat(checkpoint(AT_SEQUENCE_NUMBER, "100", 10L).getShardIterator(client))
.isEqualTo(AT_SEQUENCE_SHARD_IT);
assertThat(checkpoint(AFTER_SEQUENCE_NUMBER, "100", 10L).getShardIterator(client))
.isEqualTo(AT_SEQUENCE_SHARD_IT);
} |
@Override
public Path move(final Path source, final Path target, final TransferStatus status, final Delete.Callback callback,
final ConnectionCallback connectionCallback) throws BackgroundException {
if(containerService.isContainer(source)) {
if(new SimplePathPredicate(source.getParent()).test(target.getParent())) {
// Rename only
return proxy.move(source, target, status, callback, connectionCallback);
}
}
if(new SDSTripleCryptEncryptorFeature(session, nodeid).isEncrypted(source) ^ new SDSTripleCryptEncryptorFeature(session, nodeid).isEncrypted(containerService.getContainer(target))) {
// Moving into or from an encrypted room
final Copy copy = new SDSDelegatingCopyFeature(session, nodeid, new SDSCopyFeature(session, nodeid));
if(log.isDebugEnabled()) {
log.debug(String.format("Move %s to %s using copy feature %s", source, target, copy));
}
final Path c = copy.copy(source, target, status, connectionCallback, new DisabledStreamListener());
// Delete source file after copy is complete
final Delete delete = new SDSDeleteFeature(session, nodeid);
if(delete.isSupported(source)) {
log.warn(String.format("Delete source %s copied to %s", source, target));
delete.delete(Collections.singletonMap(source, status), connectionCallback, callback);
}
return c;
}
else {
return proxy.move(source, target, status, callback, connectionCallback);
}
} | @Test(expected = NotfoundException.class)
public void testMoveNotFound() throws Exception {
final Path room = new Path(
new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory, Path.Type.volume));
final Path test = new Path(room, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file));
final SDSNodeIdProvider nodeid = new SDSNodeIdProvider(session);
new SDSMoveFeature(session, nodeid).move(test, new Path(room, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)), new TransferStatus(), new Delete.DisabledCallback(), new DisabledConnectionCallback());
} |
public static Counter allocate(
final Aeron aeron,
final MutableDirectBuffer tempBuffer,
final int typeId,
final String name,
final long archiveId)
{
int index = 0;
tempBuffer.putLong(index, archiveId);
index += SIZE_OF_LONG;
final int keyLength = index;
index += tempBuffer.putStringWithoutLengthAscii(index, name);
index += appendArchiveIdLabel(tempBuffer, index, archiveId);
return aeron.addCounter(typeId, tempBuffer, 0, keyLength, tempBuffer, keyLength, index - keyLength);
} | @Test
void allocateCounterUsingAeronClientIdAsArchiveIdentifier()
{
final int typeId = 999;
final String name = "<test counter>";
final long archiveId = -1832178932131546L;
final String expectedLabel = name + " - archiveId=" + archiveId;
final Aeron aeron = mock(Aeron.class);
final MutableDirectBuffer tempBuffer = new UnsafeBuffer(new byte[200]);
final Counter counter = mock(Counter.class);
when(aeron.clientId()).thenReturn(archiveId);
when(aeron.addCounter(typeId, tempBuffer, 0, SIZE_OF_LONG, tempBuffer, SIZE_OF_LONG, expectedLabel.length()))
.thenReturn(counter);
final Counter result = ArchiveCounters.allocate(aeron, tempBuffer, typeId, name, aeron.clientId());
assertSame(counter, result);
final InOrder inOrder = inOrder(aeron);
inOrder.verify(aeron).clientId();
inOrder.verify(aeron).addCounter(anyInt(), any(), anyInt(), anyInt(), any(), anyInt(), anyInt());
inOrder.verifyNoMoreInteractions();
assertEquals(archiveId, tempBuffer.getLong(0));
assertEquals(expectedLabel, tempBuffer.getStringWithoutLengthAscii(SIZE_OF_LONG, expectedLabel.length()));
} |
@Nullable
public Float getFloatValue(@FloatFormat final int formatType,
@IntRange(from = 0) final int offset) {
if ((offset + getTypeLen(formatType)) > size()) return null;
switch (formatType) {
case FORMAT_SFLOAT -> {
if (mValue[offset + 1] == 0x07 && mValue[offset] == (byte) 0xFE)
return Float.POSITIVE_INFINITY;
if ((mValue[offset + 1] == 0x07 && mValue[offset] == (byte) 0xFF) ||
(mValue[offset + 1] == 0x08 && mValue[offset] == 0x00) ||
(mValue[offset + 1] == 0x08 && mValue[offset] == 0x01))
return Float.NaN;
if (mValue[offset + 1] == 0x08 && mValue[offset] == 0x02)
return Float.NEGATIVE_INFINITY;
return bytesToFloat(mValue[offset], mValue[offset + 1]);
}
case FORMAT_FLOAT -> {
if (mValue[offset + 3] == 0x00) {
if (mValue[offset + 2] == 0x7F && mValue[offset + 1] == (byte) 0xFF) {
if (mValue[offset] == (byte) 0xFE)
return Float.POSITIVE_INFINITY;
if (mValue[offset] == (byte) 0xFF)
return Float.NaN;
} else if (mValue[offset + 2] == (byte) 0x80 && mValue[offset + 1] == 0x00) {
if (mValue[offset] == 0x00 || mValue[offset] == 0x01)
return Float.NaN;
if (mValue[offset] == 0x02)
return Float.NEGATIVE_INFINITY;
}
}
return bytesToFloat(mValue[offset], mValue[offset + 1],
mValue[offset + 2], mValue[offset + 3]);
}
}
return null;
} | @Test
public void setValue_SFLOAT_positiveInfinity() {
final MutableData data = new MutableData(new byte[2]);
data.setValue(Float.POSITIVE_INFINITY, Data.FORMAT_SFLOAT, 0);
final float value = data.getFloatValue(Data.FORMAT_SFLOAT, 0);
assertEquals(Float.POSITIVE_INFINITY, value, 0.00);
} |
public static SQLException toSQLException(final Exception cause, final DatabaseType databaseType) {
if (cause instanceof SQLException) {
return (SQLException) cause;
}
if (cause instanceof ShardingSphereSQLException) {
return ((ShardingSphereSQLException) cause).toSQLException();
}
if (cause instanceof SQLDialectException) {
if (cause instanceof DatabaseProtocolException) {
return new DatabaseProtocolSQLException(cause.getMessage()).toSQLException();
}
Optional<SQLDialectExceptionMapper> dialectExceptionMapper = DatabaseTypedSPILoader.findService(SQLDialectExceptionMapper.class, databaseType);
if (dialectExceptionMapper.isPresent()) {
return dialectExceptionMapper.get().convert((SQLDialectException) cause);
}
}
if (cause instanceof ShardingSphereServerException) {
return new ServerSQLException(cause).toSQLException();
}
return new UnknownSQLException(cause).toSQLException();
} | @Test
void assertToSQLExceptionWithSQLException() {
SQLException cause = new SQLException("");
assertThat(SQLExceptionTransformEngine.toSQLException(cause, databaseType), is(cause));
} |
public void createPipe(CreatePipeStmt stmt) throws DdlException {
try {
lock.writeLock().lock();
Pair<Long, String> dbIdAndName = resolvePipeNameUnlock(stmt.getPipeName());
boolean existed = nameToId.containsKey(dbIdAndName);
if (existed) {
if (!stmt.isIfNotExists() && !stmt.isReplace()) {
ErrorReport.reportSemanticException(ErrorCode.ERR_PIPE_EXISTS);
}
if (stmt.isIfNotExists()) {
return;
} else if (stmt.isReplace()) {
LOG.info("Pipe {} already exist, replace it with a new one", stmt.getPipeName());
Pipe pipe = pipeMap.get(nameToId.get(dbIdAndName));
dropPipeImpl(pipe);
}
}
// Add pipe
long id = GlobalStateMgr.getCurrentState().getNextId();
Pipe pipe = Pipe.fromStatement(id, stmt);
putPipe(pipe);
repo.addPipe(pipe);
} finally {
lock.writeLock().unlock();
}
} | @Test
public void resumeAfterError() throws Exception {
final String pipeName = "p3";
String sql = "create pipe p3 as insert into tbl1 select * from files('path'='fake://pipe', 'format'='parquet')";
createPipe(sql);
mockPollError(1);
Pipe p3 = getPipe(pipeName);
p3.poll();
// get error
Assert.assertEquals(Pipe.State.ERROR, p3.getState());
// resume after error
resumePipe(pipeName);
Assert.assertEquals(Pipe.State.RUNNING, p3.getState());
Assert.assertEquals(0, p3.getFailedTaskExecutionCount());
} |
@Deprecated
@NonNull
public static WriteRequest newWriteRequest(
@Nullable final BluetoothGattCharacteristic characteristic,
@Nullable final byte[] value) {
return new WriteRequest(Type.WRITE, characteristic, value, 0,
value != null ? value.length : 0,
characteristic != null ?
characteristic.getWriteType() :
BluetoothGattCharacteristic.WRITE_TYPE_DEFAULT);
} | @Test
public void split_highMtu() {
final int MTU_HIGH = 276;
final WriteRequest request = Request.newWriteRequest(characteristic, text.getBytes(), BluetoothGattCharacteristic.WRITE_TYPE_NO_RESPONSE)
.split();
chunk = request.getData(MTU_HIGH);
// Verify the chunk
assertNotNull(chunk);
assertEquals(MTU_HIGH - 3, chunk.length);
final String expected = text.substring(0, MTU_HIGH - 3);
assertArrayEquals(expected.getBytes(), chunk);
} |
public static String resultToDelimitedString(SampleEvent event) {
return resultToDelimitedString(event, event.getResult().getSaveConfig()
.getDelimiter());
} | @Test
// sample format should not change unexpectedly
// if this test fails, check whether the default was intentionally changed or not
public void testSample() throws MalformedURLException {
final String RESULT = "1,2,3,4,5,6,7,true,,8,9,10,11,https://jmeter.apache.org,12,13,14";
SampleResult result = new SampleResult();
result.setSaveConfig(new SampleSaveConfiguration());
result.setStampAndTime(1, 2);
result.setSampleLabel("3");
result.setResponseCode("4");
result.setResponseMessage("5");
result.setThreadName("6");
result.setDataType("7");
result.setSuccessful(true);
result.setBytes(8L);
result.setURL(new URL("https://jmeter.apache.org"));
result.setSentBytes(9);
result.setGroupThreads(10);
result.setAllThreads(11);
result.setLatency(12);
result.setIdleTime(13);
result.setConnectTime(14);
assertEquals(RESULT, CSVSaveService.resultToDelimitedString(new SampleEvent(result,"")), "Result text has changed");
} |
@WithSpan
@Override
public SearchType.Result doExtractResult(SearchJob job, Query query, MessageList searchType, org.graylog.shaded.opensearch2.org.opensearch.action.search.SearchResponse result, Aggregations aggregations, OSGeneratedQueryContext queryContext) {
final List<ResultMessageSummary> messages = StreamSupport.stream(result.getHits().spliterator(), false)
.map(this::resultMessageFromSearchHit)
.map((resultMessage) -> ResultMessageSummary.create(resultMessage.highlightRanges, resultMessage.getMessage().getFields(), resultMessage.getIndex()))
.collect(Collectors.toList());
final String queryString = query.query().queryString();
final DateTime from = query.effectiveTimeRange(searchType).getFrom();
final DateTime to = query.effectiveTimeRange(searchType).getTo();
final SearchResponse searchResponse = SearchResponse.create(
queryString,
queryString,
Collections.emptySet(),
messages,
Collections.emptySet(),
0,
result.getHits().getTotalHits().value,
from,
to
);
final SearchResponse decoratedSearchResponse = decoratorProcessor.decorateSearchResponse(searchResponse, searchType.decorators());
final MessageList.Result.Builder resultBuilder = MessageList.Result.result(searchType.id())
.messages(decoratedSearchResponse.messages())
.effectiveTimerange(AbsoluteRange.create(from, to))
.totalResults(decoratedSearchResponse.totalResults());
return searchType.name().map(resultBuilder::name).orElse(resultBuilder).build();
} | @Test
public void includesCustomNameInResultIfPresent() {
final OSMessageList esMessageList = new OSMessageList(new LegacyDecoratorProcessor.Fake(),
new TestResultMessageFactory(), false);
final MessageList messageList = someMessageList().toBuilder().name("customResult").build();
final org.graylog.shaded.opensearch2.org.opensearch.action.search.SearchResponse result =
mock(org.graylog.shaded.opensearch2.org.opensearch.action.search.SearchResponse.class);
when(result.getHits()).thenReturn(SearchHits.empty());
final SearchType.Result searchTypeResult = esMessageList.doExtractResult(null, someQuery(), messageList, result, null, null);
assertThat(searchTypeResult.name()).contains("customResult");
} |
public void readWithKnownLength(DataInput in, int len) throws IOException {
ensureCapacity(len);
in.readFully(bytes, 0, len);
length = len;
textLength = -1;
} | @Test
public void testReadWithKnownLength() throws IOException {
String line = "hello world";
byte[] inputBytes = line.getBytes(StandardCharsets.UTF_8);
DataInputBuffer in = new DataInputBuffer();
Text text = new Text();
in.reset(inputBytes, inputBytes.length);
text.readWithKnownLength(in, 5);
assertEquals("hello", text.toString());
assertEquals(5, text.getTextLength());
// Read longer length, make sure it lengthens
in.reset(inputBytes, inputBytes.length);
text.readWithKnownLength(in, 7);
assertEquals("hello w", text.toString());
assertEquals(7, text.getTextLength());
// Read shorter length, make sure it shortens
in.reset(inputBytes, inputBytes.length);
text.readWithKnownLength(in, 2);
assertEquals("he", text.toString());
assertEquals(2, text.getTextLength());
} |
public void computeBeScanRanges() {
List<ComputeNode> nodeList;
if (RunMode.getCurrentRunMode() == RunMode.SHARED_DATA) {
long warehouseId = ConnectContext.get().getCurrentWarehouseId();
List<Long> computeNodeIds = GlobalStateMgr.getCurrentState().getWarehouseMgr().getAllComputeNodeIds(warehouseId);
nodeList = computeNodeIds.stream()
.map(id -> GlobalStateMgr.getCurrentState().getNodeMgr().getClusterInfo().getBackendOrComputeNode(id))
.collect(Collectors.toList());
} else {
nodeList = Lists.newArrayList();
nodeList.addAll(GlobalStateMgr.getCurrentState().getNodeMgr().getClusterInfo().getBackends());
nodeList.addAll(GlobalStateMgr.getCurrentState().getNodeMgr().getClusterInfo().getComputeNodes());
}
for (ComputeNode node : nodeList) {
// if user specifies BE id, we try to scan all BEs(including bad BE)
// if user doesn't specify BE id, we only scan live BEs
if ((node.isAlive() && beId == null) || (beId != null && beId.equals(node.getId()))) {
if (beScanRanges == null) {
beScanRanges = Lists.newArrayList();
}
TScanRangeLocations scanRangeLocations = new TScanRangeLocations();
TScanRangeLocation location = new TScanRangeLocation();
location.setBackend_id(node.getId());
location.setServer(new TNetworkAddress(node.getHost(), node.getBePort()));
scanRangeLocations.addToLocations(location);
TScanRange scanRange = new TScanRange();
scanRangeLocations.setScan_range(scanRange);
beScanRanges.add(scanRangeLocations);
}
}
} | @Test
public void testComputeBeScanRanges() {
new MockUp<RunMode>() {
@Mock
public RunMode getCurrentRunMode() {
return RunMode.SHARED_DATA;
}
};
new MockUp<WarehouseManager>() {
@Mock
public List<Long> getAllComputeNodeIds(long warehouseId) {
return Lists.newArrayList(1L);
}
};
new MockUp<SystemInfoService>() {
@Mock
public ComputeNode getBackendOrComputeNode(long nodeId) {
ComputeNode computeNode = new ComputeNode(1L, "127.0.0.1", 9030);
computeNode.setAlive(true);
return computeNode;
}
};
TupleDescriptor desc = new TupleDescriptor(new TupleId(0));
SystemTable table = new SystemTable(0, "fe_metrics", null, null, null);
desc.setTable(table);
SchemaScanNode scanNode = new SchemaScanNode(new PlanNodeId(0), desc);
scanNode.computeBeScanRanges();
} |
public static <T extends Metric> T getOrRegister(MetricRegistry metricRegistry, String name, T newMetric) {
final Metric metric = metricRegistry.getMetrics().get(name);
if (metric != null) {
//noinspection unchecked
return (T) metric;
}
try {
return metricRegistry.register(name, newMetric);
} catch (IllegalArgumentException ignored) {
//noinspection unchecked
return (T) metricRegistry.getMetrics().get(name);
}
} | @Test
public void getOrRegister() {
final MetricRegistry metricRegistry = new MetricRegistry();
final Counter newMetric1 = new Counter();
final Counter newMetric2 = new Counter();
assertThat(MetricUtils.getOrRegister(metricRegistry, "test1", newMetric1)).isEqualTo(newMetric1);
assertThat(MetricUtils.getOrRegister(metricRegistry, "test1", newMetric2)).isEqualTo(newMetric1);
} |
public static String readFile(String fileName, Class<?> clazz) throws FileNotFoundException {
return readFile(fileName, clazz, ResourceLocation.CLASSPATH);
} | @Test
public void testReadFile_ClasspathRoot() {
assertThrows(FileNotFoundException.class, () -> {
EmbeddedResourceLoader.readFile("nonexistent.txt", this.getClass(),
EmbeddedResourceLoader.ResourceLocation.CLASSPATH_ROOT);
});
} |
@Override
public Messages process(Messages messages) {
for (final MessageFilter filter : filterRegistry) {
for (Message msg : messages) {
final String timerName = name(filter.getClass(), "executionTime");
final Timer timer = metricRegistry.timer(timerName);
final Timer.Context timerContext = timer.time();
try {
LOG.trace("Applying filter [{}] on message <{}>.", filter.getName(), msg.getId());
if (filter.filter(msg)) {
LOG.debug("Filter [{}] marked message <{}> to be discarded. Dropping message.",
filter.getName(),
msg.getId());
msg.setFilterOut(true);
filteredOutMessages.mark();
messageQueueAcknowledger.acknowledge(msg);
}
} catch (Exception e) {
final String shortError = String.format(Locale.US, "Could not apply filter [%s] on message <%s>",
filter.getName(), msg.getId());
if (LOG.isDebugEnabled()) {
LOG.error("{}:", shortError, e);
} else {
LOG.error("{}:\n{}", shortError, ExceptionUtils.getShortenedStackTrace(e));
}
msg.addProcessingError(new Message.ProcessingError(ProcessingFailureCause.MessageFilterException,
shortError, ExceptionUtils.getRootCauseMessage(e)));
} finally {
final long elapsedNanos = timerContext.stop();
msg.recordTiming(serverStatus, timerName, elapsedNanos);
}
}
}
return messages;
} | @Test
public void testMessagesRecordProcessingFailures() {
final MessageFilter first = new ExceptingMessageFilter();
final Set<MessageFilter> filters = ImmutableSet.of(first);
final MessageFilterChainProcessor processor = new MessageFilterChainProcessor(new MetricRegistry(),
filters,
acknowledger,
serverStatus);
final Message message = messageFactory.createMessage("message", "source", new DateTime(2016, 1, 1, 0, 0, DateTimeZone.UTC));
final Messages result = processor.process(message);
assertThat(result).hasSize(1);
// passed message is mutated, so we can assert on that
assertThat(message.processingErrors()).hasSize(1);
assertThat(message.processingErrors().get(0)).satisfies(pe -> {
assertThat(pe.getCause()).isEqualTo(ProcessingFailureCause.MessageFilterException);
assertThat(pe.getMessage()).startsWith("Could not apply filter [Excepting filter] on message <");
assertThat(pe.getDetails()).isEqualTo("BOOM!");
});
} |
public FEELFnResult<List<Object>> invoke(@ParameterName("list") Object[] lists) {
if ( lists == null ) {
return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "list", "cannot be null"));
}
// spec requires us to return a new list
final List<Object> result = new ArrayList<>();
for ( Object list : lists ) {
if ( list == null ) {
// TODO review accordingly to spec, original behavior was: return null;
return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "lists", "one of the elements in the list is null"));
} else if ( list instanceof Collection ) {
result.addAll( (Collection) list );
} else {
result.add( list );
}
}
return FEELFnResult.ofResult( result );
} | @Test
void invokeArrayWithList() {
FunctionTestUtil.assertResultList(concatenateFunction.invoke(new Object[]{"test", 2, Arrays.asList(2, 3)}),
Arrays.asList("test", 2, 2, 3));
} |
@Override
public Object postProcessAfterInitialization(Object bean, String beanName) throws BeansException {
ReflectionUtils.doWithMethods(bean.getClass(), recurringJobFinderMethodCallback);
return bean;
} | @Test
void beansWithMethodsAnnotatedWithRecurringAnnotationWillAutomaticallyBeRegistered() {
// GIVEN
final RecurringJobPostProcessor recurringJobPostProcessor = getRecurringJobPostProcessor();
// WHEN
recurringJobPostProcessor.postProcessAfterInitialization(new MyServiceWithRecurringJob(), "not important");
// THEN
verify(jobScheduler).scheduleRecurrently(eq("my-recurring-job"), jobDetailsArgumentCaptor.capture(), eq(CronExpression.create("0 0/15 * * *")), any(ZoneId.class));
final JobDetails actualJobDetails = jobDetailsArgumentCaptor.getValue();
assertThat(actualJobDetails)
.isCacheable()
.hasClassName(MyServiceWithRecurringJob.class.getName())
.hasMethodName("myRecurringMethod")
.hasNoArgs();
} |
static <T> CompletionStage<T> executeCompletionStageSupplier(Observation observation,
Supplier<CompletionStage<T>> supplier) {
return decorateCompletionStageSupplier(observation, supplier).get();
} | @Test
public void shouldExecuteCompletionStageAndReturnWithExceptionAtASyncStage() throws Throwable {
given(helloWorldService.returnHelloWorld()).willThrow(new HelloWorldException());
Supplier<CompletionStage<String>> completionStageSupplier =
() -> CompletableFuture.supplyAsync(helloWorldService::returnHelloWorld);
CompletionStage<String> stringCompletionStage = Observations
.executeCompletionStageSupplier(observation, completionStageSupplier);
assertThatThrownBy(() -> stringCompletionStage.toCompletableFuture().get())
.isInstanceOf(ExecutionException.class).hasCause(new HelloWorldException());
Awaitility.await()
.until(() -> assertThat(observationRegistry)
.hasSingleObservationThat()
.hasNameEqualTo(ObservationsTest.class.getName())
.hasBeenStarted()
.hasBeenStopped()
.assertThatError()
.isInstanceOf(CompletionException.class).hasCause(new HelloWorldException()));
then(helloWorldService).should().returnHelloWorld();
} |
public Collection<Member> listNodes(String address, NodeState nodeState) throws NacosException {
Collection<Member> members = memberManager.allMembers();
Collection<Member> result = new ArrayList<>();
for (Member member : members) {
if (StringUtils.isNoneBlank(address) && !StringUtils.startsWith(member.getAddress(), address)) {
continue;
}
if (nodeState != null && member.getState() != nodeState) {
continue;
}
result.add(member);
}
return result;
} | @Test
void testListNodes() throws NacosException {
Member member1 = new Member();
member1.setIp("1.1.1.1");
member1.setPort(8848);
member1.setState(NodeState.DOWN);
Member member2 = new Member();
member2.setIp("2.2.2.2");
member2.setPort(8848);
List<Member> members = Arrays.asList(member1, member2);
when(serverMemberManager.allMembers()).thenReturn(members);
Collection<Member> result1 = nacosClusterOperationService.listNodes("1.1.1.1", null);
assertTrue(result1.stream().findFirst().isPresent());
assertEquals("1.1.1.1:8848", result1.stream().findFirst().get().getAddress());
Collection<Member> result2 = nacosClusterOperationService.listNodes(null, NodeState.UP);
assertTrue(result2.stream().findFirst().isPresent());
assertEquals("2.2.2.2:8848", result2.stream().findFirst().get().getAddress());
} |
@GET
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
public NodeInfo get() {
return getNodeInfo();
} | @Test
public void testGetYarnGpuResourceInfo()
throws YarnException, JSONException {
setupMockPluginsWithGpuResourceInfo();
WebResource r = resource();
ClientResponse response = getNMResourceResponse(r, "resource-1");
assertEquals("MediaType of the response is not the expected!",
MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
response.getType().toString());
JSONObject json = response.getEntity(JSONObject.class);
assertEquals("Unexpected driverVersion in the json response!",
"1.2.3",
json.getJSONObject("gpuDeviceInformation").get("driverVersion"));
assertEquals("Unexpected totalGpuDevices in the json response!",
3, json.getJSONArray("totalGpuDevices").length());
assertEquals("Unexpected assignedGpuDevices in the json response!",
2, json.getJSONArray("assignedGpuDevices").length());
} |
static String encodeNumeric(NumericType numericType) {
byte[] rawValue = toByteArray(numericType);
byte paddingValue = getPaddingValue(numericType);
byte[] paddedRawValue = new byte[MAX_BYTE_LENGTH];
if (paddingValue != 0) {
for (int i = 0; i < paddedRawValue.length; i++) {
paddedRawValue[i] = paddingValue;
}
}
System.arraycopy(
rawValue, 0, paddedRawValue, MAX_BYTE_LENGTH - rawValue.length, rawValue.length);
return Numeric.toHexStringNoPrefix(paddedRawValue);
} | @Test
public void testUintEncode() {
Uint zero8 = new Uint8(BigInteger.ZERO);
assertEquals(
TypeEncoder.encodeNumeric(zero8),
"0000000000000000000000000000000000000000000000000000000000000000");
Uint max8 = new Uint8(255);
assertEquals(
TypeEncoder.encodeNumeric(max8),
"00000000000000000000000000000000000000000000000000000000000000ff");
Uint zero16 = new Uint16(BigInteger.ZERO);
assertEquals(
TypeEncoder.encodeNumeric(zero16),
"0000000000000000000000000000000000000000000000000000000000000000");
Uint max16 = new Uint16(65535);
assertEquals(
TypeEncoder.encodeNumeric(max16),
"000000000000000000000000000000000000000000000000000000000000ffff");
Uint zero24 = new Uint24(BigInteger.ZERO);
assertEquals(
TypeEncoder.encodeNumeric(zero24),
"0000000000000000000000000000000000000000000000000000000000000000");
Uint max24 = new Uint24(16777215);
assertEquals(
TypeEncoder.encodeNumeric(max24),
"0000000000000000000000000000000000000000000000000000000000ffffff");
Uint zero32 = new Uint32(BigInteger.ZERO);
assertEquals(
TypeEncoder.encodeNumeric(zero32),
"0000000000000000000000000000000000000000000000000000000000000000");
Uint max32 = new Uint32(BigInteger.valueOf(4294967295L));
assertEquals(
TypeEncoder.encodeNumeric(max32),
"00000000000000000000000000000000000000000000000000000000ffffffff");
Uint zero40 = new Uint40(BigInteger.ZERO);
assertEquals(
TypeEncoder.encodeNumeric(zero40),
"0000000000000000000000000000000000000000000000000000000000000000");
Uint max40 = new Uint40(BigInteger.valueOf(1099511627775L));
assertEquals(
TypeEncoder.encodeNumeric(max40),
"000000000000000000000000000000000000000000000000000000ffffffffff");
Uint zero48 = new Uint48(BigInteger.ZERO);
assertEquals(
TypeEncoder.encodeNumeric(zero48),
"0000000000000000000000000000000000000000000000000000000000000000");
Uint max48 = new Uint48(BigInteger.valueOf(281474976710655L));
assertEquals(
TypeEncoder.encodeNumeric(max48),
"0000000000000000000000000000000000000000000000000000ffffffffffff");
Uint zero56 = new Uint56(BigInteger.ZERO);
assertEquals(
TypeEncoder.encodeNumeric(zero56),
"0000000000000000000000000000000000000000000000000000000000000000");
Uint max56 = new Uint56(BigInteger.valueOf(72057594037927935L));
assertEquals(
TypeEncoder.encodeNumeric(max56),
"00000000000000000000000000000000000000000000000000ffffffffffffff");
Uint zero64 = new Uint64(BigInteger.ZERO);
assertEquals(
TypeEncoder.encodeNumeric(zero64),
("0000000000000000000000000000000000000000000000000000000000000000"));
Uint maxLong = new Uint64(BigInteger.valueOf(java.lang.Long.MAX_VALUE));
assertEquals(
TypeEncoder.encodeNumeric(maxLong),
("0000000000000000000000000000000000000000000000007fffffffffffffff"));
Uint maxValue64 =
new Uint(
new BigInteger(
"ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff",
16));
assertEquals(
TypeEncoder.encodeNumeric(maxValue64),
("ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff"));
Uint largeValue =
new Uint(
new BigInteger(
"fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe",
16));
assertEquals(
TypeEncoder.encodeNumeric(largeValue),
("fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe"));
Uint zero72 = new Uint72(BigInteger.ZERO);
assertEquals(
TypeEncoder.encodeNumeric(zero72),
"0000000000000000000000000000000000000000000000000000000000000000");
Uint max72 = new Uint72(new BigInteger("4722366482869645213695"));
assertEquals(
TypeEncoder.encodeNumeric(max72),
"0000000000000000000000000000000000000000000000ffffffffffffffffff");
Uint zero80 = new Uint80(BigInteger.ZERO);
assertEquals(
TypeEncoder.encodeNumeric(zero80),
"0000000000000000000000000000000000000000000000000000000000000000");
Uint max80 = new Uint80(new BigInteger("1208925819614629174706175"));
assertEquals(
TypeEncoder.encodeNumeric(max80),
"00000000000000000000000000000000000000000000ffffffffffffffffffff");
Uint zero88 = new Uint88(BigInteger.ZERO);
assertEquals(
TypeEncoder.encodeNumeric(zero88),
"0000000000000000000000000000000000000000000000000000000000000000");
Uint max88 = new Uint88(new BigInteger("309485009821345068724781055"));
assertEquals(
TypeEncoder.encodeNumeric(max88),
"000000000000000000000000000000000000000000ffffffffffffffffffffff");
Uint zero96 = new Uint96(BigInteger.ZERO);
assertEquals(
TypeEncoder.encodeNumeric(zero96),
"0000000000000000000000000000000000000000000000000000000000000000");
Uint max96 = new Uint96(new BigInteger("79228162514264337593543950335"));
assertEquals(
TypeEncoder.encodeNumeric(max96),
"0000000000000000000000000000000000000000ffffffffffffffffffffffff");
Uint zero104 = new Uint104(BigInteger.ZERO);
assertEquals(
TypeEncoder.encodeNumeric(zero104),
"0000000000000000000000000000000000000000000000000000000000000000");
Uint max104 = new Uint104(new BigInteger("20282409603651670423947251286015"));
assertEquals(
TypeEncoder.encodeNumeric(max104),
"00000000000000000000000000000000000000ffffffffffffffffffffffffff");
Uint zero112 = new Uint112(BigInteger.ZERO);
assertEquals(
TypeEncoder.encodeNumeric(zero112),
"0000000000000000000000000000000000000000000000000000000000000000");
Uint max112 = new Uint112(new BigInteger("5192296858534827628530496329220095"));
assertEquals(
TypeEncoder.encodeNumeric(max112),
"000000000000000000000000000000000000ffffffffffffffffffffffffffff");
Uint zero120 = new Uint120(BigInteger.ZERO);
assertEquals(
TypeEncoder.encodeNumeric(zero120),
"0000000000000000000000000000000000000000000000000000000000000000");
Uint max120 = new Uint120(new BigInteger("1329227995784915872903807060280344575"));
assertEquals(
TypeEncoder.encodeNumeric(max120),
"0000000000000000000000000000000000ffffffffffffffffffffffffffffff");
Uint zero128 = new Uint128(BigInteger.ZERO);
assertEquals(
TypeEncoder.encodeNumeric(zero128),
"0000000000000000000000000000000000000000000000000000000000000000");
Uint max128 = new Uint128(new BigInteger("340282366920938463463374607431768211455"));
assertEquals(
TypeEncoder.encodeNumeric(max128),
"00000000000000000000000000000000ffffffffffffffffffffffffffffffff");
Uint zero136 = new Uint136(BigInteger.ZERO);
assertEquals(
TypeEncoder.encodeNumeric(zero136),
"0000000000000000000000000000000000000000000000000000000000000000");
Uint max136 = new Uint136(new BigInteger("87112285931760246646623899502532662132735"));
assertEquals(
TypeEncoder.encodeNumeric(max136),
"000000000000000000000000000000ffffffffffffffffffffffffffffffffff");
Uint zero144 = new Uint144(BigInteger.ZERO);
assertEquals(
TypeEncoder.encodeNumeric(zero144),
"0000000000000000000000000000000000000000000000000000000000000000");
Uint max144 = new Uint144(new BigInteger("22300745198530623141535718272648361505980415"));
assertEquals(
TypeEncoder.encodeNumeric(max144),
"0000000000000000000000000000ffffffffffffffffffffffffffffffffffff");
Uint zero152 = new Uint152(BigInteger.ZERO);
assertEquals(
TypeEncoder.encodeNumeric(zero152),
"0000000000000000000000000000000000000000000000000000000000000000");
Uint max152 = new Uint152(new BigInteger("5708990770823839524233143877797980545530986495"));
assertEquals(
TypeEncoder.encodeNumeric(max152),
"00000000000000000000000000ffffffffffffffffffffffffffffffffffffff");
Uint zero160 = new Uint160(BigInteger.ZERO);
assertEquals(
TypeEncoder.encodeNumeric(zero160),
"0000000000000000000000000000000000000000000000000000000000000000");
Uint max160 =
new Uint160(new BigInteger("1461501637330902918203684832716283019655932542975"));
assertEquals(
TypeEncoder.encodeNumeric(max160),
"000000000000000000000000ffffffffffffffffffffffffffffffffffffffff");
Uint zero168 = new Uint168(BigInteger.ZERO);
assertEquals(
TypeEncoder.encodeNumeric(zero168),
"0000000000000000000000000000000000000000000000000000000000000000");
Uint max168 =
new Uint168(new BigInteger("374144419156711147060143317175368453031918731001855"));
assertEquals(
TypeEncoder.encodeNumeric(max168),
"0000000000000000000000ffffffffffffffffffffffffffffffffffffffffff");
Uint zero176 = new Uint176(BigInteger.ZERO);
assertEquals(
TypeEncoder.encodeNumeric(zero176),
"0000000000000000000000000000000000000000000000000000000000000000");
Uint max176 =
new Uint176(
new BigInteger("95780971304118053647396689196894323976171195136475135"));
assertEquals(
TypeEncoder.encodeNumeric(max176),
"00000000000000000000ffffffffffffffffffffffffffffffffffffffffffff");
Uint zero184 = new Uint184(BigInteger.ZERO);
assertEquals(
TypeEncoder.encodeNumeric(zero184),
"0000000000000000000000000000000000000000000000000000000000000000");
Uint max184 =
new Uint184(
new BigInteger("24519928653854221733733552434404946937899825954937634815"));
assertEquals(
TypeEncoder.encodeNumeric(max184),
"000000000000000000ffffffffffffffffffffffffffffffffffffffffffffff");
Uint zero192 = new Uint192(BigInteger.ZERO);
assertEquals(
TypeEncoder.encodeNumeric(zero192),
"0000000000000000000000000000000000000000000000000000000000000000");
Uint max192 =
new Uint192(
new BigInteger(
"6277101735386680763835789423207666416102355444464034512895"));
assertEquals(
TypeEncoder.encodeNumeric(max192),
"0000000000000000ffffffffffffffffffffffffffffffffffffffffffffffff");
Uint zero200 = new Uint200(BigInteger.ZERO);
assertEquals(
TypeEncoder.encodeNumeric(zero200),
"0000000000000000000000000000000000000000000000000000000000000000");
Uint max200 =
new Uint200(
new BigInteger(
"1606938044258990275541962092341162602522202993782792835301375"));
assertEquals(
TypeEncoder.encodeNumeric(max200),
"00000000000000ffffffffffffffffffffffffffffffffffffffffffffffffff");
Uint zero208 = new Uint208(BigInteger.ZERO);
assertEquals(
TypeEncoder.encodeNumeric(zero208),
"0000000000000000000000000000000000000000000000000000000000000000");
Uint max208 =
new Uint208(
new BigInteger(
"411376139330301510538742295639337626245683966408394965837152255"));
assertEquals(
TypeEncoder.encodeNumeric(max208),
"000000000000ffffffffffffffffffffffffffffffffffffffffffffffffffff");
Uint zero216 = new Uint216(BigInteger.ZERO);
assertEquals(
TypeEncoder.encodeNumeric(zero216),
"0000000000000000000000000000000000000000000000000000000000000000");
Uint max216 =
new Uint216(
new BigInteger(
"105312291668557186697918027683670432318895095400549111254310977535"));
assertEquals(
TypeEncoder.encodeNumeric(max216),
"0000000000ffffffffffffffffffffffffffffffffffffffffffffffffffffff");
Uint zero224 = new Uint224(BigInteger.ZERO);
assertEquals(
TypeEncoder.encodeNumeric(zero224),
"0000000000000000000000000000000000000000000000000000000000000000");
Uint max224 =
new Uint224(
new BigInteger(
"26959946667150639794667015087019630673637144422540572481103610249215"));
assertEquals(
TypeEncoder.encodeNumeric(max224),
"00000000ffffffffffffffffffffffffffffffffffffffffffffffffffffffff");
Uint zero232 = new Uint232(BigInteger.ZERO);
assertEquals(
TypeEncoder.encodeNumeric(zero232),
"0000000000000000000000000000000000000000000000000000000000000000");
Uint max232 =
new Uint232(
new BigInteger(
"6901746346790563787434755862277025452451108972170386555162524223799295"));
assertEquals(
TypeEncoder.encodeNumeric(max232),
"000000ffffffffffffffffffffffffffffffffffffffffffffffffffffffffff");
Uint zero240 = new Uint232(BigInteger.ZERO);
assertEquals(
TypeEncoder.encodeNumeric(zero240),
"0000000000000000000000000000000000000000000000000000000000000000");
Uint max240 =
new Uint240(
new BigInteger(
"1766847064778384329583297500742918515827483896875618958121606201292619775"));
assertEquals(
TypeEncoder.encodeNumeric(max240),
"0000ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff");
Uint zero248 = new Uint248(BigInteger.ZERO);
assertEquals(
TypeEncoder.encodeNumeric(zero248),
"0000000000000000000000000000000000000000000000000000000000000000");
Uint max248 =
new Uint248(
new BigInteger(
"452312848583266388373324160190187140051835877600158453279131187530910662655"));
assertEquals(
TypeEncoder.encodeNumeric(max248),
"00ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff");
} |
public void setSendFullErrorException(boolean sendFullErrorException) {
this.sendFullErrorException = sendFullErrorException;
} | @Test
void handleUnknownExceptionWithoutSendFullErrorException() throws Exception {
testController.exceptionSupplier = () -> new RuntimeException("Some unknown message");
handlerAdvice.setSendFullErrorException(false);
String body = mockMvc.perform(get("/"))
.andExpect(status().isInternalServerError())
.andReturn()
.getResponse()
.getContentAsString();
assertThatJson(body)
.isEqualTo("{"
+ " message: 'Internal server error',"
+ " exception: '${json-unit.any-string}'"
+ "}");
assertThatJson(body)
.inPath("exception").asString().startsWith("Error with ID: ");
} |
public static Builder builder() {
return new Builder();
} | @Test
public void testBuilder() throws Exception {
String expectedBaseImageServerUrl = "someserver";
String expectedBaseImageName = "baseimage";
String expectedBaseImageTag = "baseimagetag";
String expectedTargetServerUrl = "someotherserver";
String expectedTargetImageName = "targetimage";
String expectedTargetTag = "targettag";
Set<String> additionalTargetImageTags = ImmutableSet.of("tag1", "tag2", "tag3");
Set<String> expectedTargetImageTags = ImmutableSet.of("targettag", "tag1", "tag2", "tag3");
List<CredentialRetriever> credentialRetrievers =
Collections.singletonList(() -> Optional.of(Credential.from("username", "password")));
Instant expectedCreationTime = Instant.ofEpochSecond(10000);
List<String> expectedEntrypoint = Arrays.asList("some", "entrypoint");
List<String> expectedProgramArguments = Arrays.asList("arg1", "arg2");
Map<String, String> expectedEnvironment = ImmutableMap.of("key", "value");
Set<Port> expectedExposedPorts = ImmutableSet.of(Port.tcp(1000), Port.tcp(2000));
Map<String, String> expectedLabels = ImmutableMap.of("key1", "value1", "key2", "value2");
Class<? extends BuildableManifestTemplate> expectedTargetFormat = OciManifestTemplate.class;
Path expectedApplicationLayersCacheDirectory = Paths.get("application/layers");
Path expectedBaseImageLayersCacheDirectory = Paths.get("base/image/layers");
List<FileEntriesLayer> expectedLayerConfigurations =
Collections.singletonList(
FileEntriesLayer.builder()
.addEntry(Paths.get("sourceFile"), AbsoluteUnixPath.get("/path/in/container"))
.build());
String expectedCreatedBy = "createdBy";
ListMultimap<String, String> expectedRegistryMirrors =
ImmutableListMultimap.of("some.registry", "mirror1", "some.registry", "mirror2");
ImageConfiguration baseImageConfiguration =
ImageConfiguration.builder(
ImageReference.of(
expectedBaseImageServerUrl, expectedBaseImageName, expectedBaseImageTag))
.build();
ImageConfiguration targetImageConfiguration =
ImageConfiguration.builder(
ImageReference.of(
expectedTargetServerUrl, expectedTargetImageName, expectedTargetTag))
.setCredentialRetrievers(credentialRetrievers)
.build();
ContainerConfiguration containerConfiguration =
ContainerConfiguration.builder()
.setCreationTime(expectedCreationTime)
.setEntrypoint(expectedEntrypoint)
.setProgramArguments(expectedProgramArguments)
.setEnvironment(expectedEnvironment)
.setExposedPorts(expectedExposedPorts)
.setLabels(expectedLabels)
.build();
BuildContext.Builder buildContextBuilder =
BuildContext.builder()
.setBaseImageConfiguration(baseImageConfiguration)
.setTargetImageConfiguration(targetImageConfiguration)
.setAdditionalTargetImageTags(additionalTargetImageTags)
.setContainerConfiguration(containerConfiguration)
.setApplicationLayersCacheDirectory(expectedApplicationLayersCacheDirectory)
.setBaseImageLayersCacheDirectory(expectedBaseImageLayersCacheDirectory)
.setTargetFormat(ImageFormat.OCI)
.setEnablePlatformTags(true)
.setAllowInsecureRegistries(true)
.setLayerConfigurations(expectedLayerConfigurations)
.setToolName(expectedCreatedBy)
.setRegistryMirrors(expectedRegistryMirrors);
BuildContext buildContext = buildContextBuilder.build();
Assert.assertEquals(
expectedCreationTime, buildContext.getContainerConfiguration().getCreationTime());
Assert.assertEquals(
expectedBaseImageServerUrl, buildContext.getBaseImageConfiguration().getImageRegistry());
Assert.assertEquals(
expectedBaseImageName, buildContext.getBaseImageConfiguration().getImageRepository());
Assert.assertEquals(
expectedBaseImageTag, buildContext.getBaseImageConfiguration().getImageQualifier());
Assert.assertEquals(
expectedTargetServerUrl, buildContext.getTargetImageConfiguration().getImageRegistry());
Assert.assertEquals(
expectedTargetImageName, buildContext.getTargetImageConfiguration().getImageRepository());
Assert.assertEquals(
expectedTargetTag, buildContext.getTargetImageConfiguration().getImageQualifier());
Assert.assertEquals(expectedTargetImageTags, buildContext.getAllTargetImageTags());
Assert.assertEquals(
Credential.from("username", "password"),
buildContext
.getTargetImageConfiguration()
.getCredentialRetrievers()
.get(0)
.retrieve()
.orElseThrow(AssertionError::new));
Assert.assertEquals(
expectedProgramArguments, buildContext.getContainerConfiguration().getProgramArguments());
Assert.assertEquals(
expectedEnvironment, buildContext.getContainerConfiguration().getEnvironmentMap());
Assert.assertEquals(
expectedExposedPorts, buildContext.getContainerConfiguration().getExposedPorts());
Assert.assertEquals(expectedLabels, buildContext.getContainerConfiguration().getLabels());
Assert.assertEquals(expectedTargetFormat, buildContext.getTargetFormat());
Assert.assertEquals(
expectedApplicationLayersCacheDirectory,
buildContextBuilder.getApplicationLayersCacheDirectory());
Assert.assertEquals(
expectedBaseImageLayersCacheDirectory,
buildContextBuilder.getBaseImageLayersCacheDirectory());
Assert.assertEquals(expectedLayerConfigurations, buildContext.getLayerConfigurations());
Assert.assertEquals(
expectedEntrypoint, buildContext.getContainerConfiguration().getEntrypoint());
Assert.assertEquals(expectedCreatedBy, buildContext.getToolName());
Assert.assertEquals(expectedRegistryMirrors, buildContext.getRegistryMirrors());
Assert.assertNotNull(buildContext.getExecutorService());
Assert.assertTrue(buildContext.getEnablePlatformTags());
} |
@ExceptionHandler({HttpMessageNotReadableException.class})
@ResponseStatus(HttpStatus.BAD_REQUEST)
protected ResponseEntity<RestError> handleHttpMessageNotReadableException(HttpMessageNotReadableException httpMessageNotReadableException) {
String exceptionMessage = getExceptionMessage(httpMessageNotReadableException);
return new ResponseEntity<>(new RestError(exceptionMessage), HttpStatus.BAD_REQUEST);
} | @Test
public void handleHttpMessageNotReadableException_whenCauseIsInvalidFormatException_shouldUseMessageFromCause() {
InvalidFormatException cause = mock(InvalidFormatException.class);
when(cause.getOriginalMessage()).thenReturn("Cause message");
HttpMessageNotReadableException exception = new HttpMessageNotReadableException("Message not readable", cause);
ResponseEntity<RestError> responseEntity = underTest.handleHttpMessageNotReadableException(exception);
assertThat(responseEntity.getStatusCode()).isEqualTo(HttpStatus.BAD_REQUEST);
assertThat(responseEntity.getBody().message()).isEqualTo("Cause message");
} |
@Deprecated
@Override
public void init(final ProcessorContext context,
final StateStore root) {
this.context = context instanceof InternalProcessorContext ? (InternalProcessorContext<?, ?>) context : null;
taskId = context.taskId();
initStoreSerde(context);
streamsMetrics = (StreamsMetricsImpl) context.metrics();
registerMetrics();
final Sensor restoreSensor =
StateStoreMetrics.restoreSensor(taskId.toString(), metricsScope, name(), streamsMetrics);
// register and possibly restore the state from the logs
maybeMeasureLatency(() -> super.init(context, root), time, restoreSensor);
} | @Test
public void testMetrics() {
setUp();
init();
final JmxReporter reporter = new JmxReporter();
final MetricsContext metricsContext = new KafkaMetricsContext("kafka.streams");
reporter.contextChange(metricsContext);
metrics.addReporter(reporter);
assertTrue(reporter.containsMbean(String.format(
"kafka.streams:type=%s,%s=%s,task-id=%s,%s-state-id=%s",
STORE_LEVEL_GROUP,
THREAD_ID_TAG_KEY,
threadId,
taskId,
STORE_TYPE,
STORE_NAME
)));
} |
public ApiError error() {
return error;
} | @Test
public void testError() {
ResultOrError<Integer> resultOrError =
new ResultOrError<>(Errors.INVALID_REQUEST, "missing foobar");
assertTrue(resultOrError.isError());
assertFalse(resultOrError.isResult());
assertNull(resultOrError.result());
assertEquals(new ApiError(Errors.INVALID_REQUEST, "missing foobar"),
resultOrError.error());
} |
public static ChannelBuffer dynamicBuffer() {
return dynamicBuffer(DEFAULT_CAPACITY);
} | @Test
void testDynamicBuffer() {
ChannelBuffer channelBuffer = ChannelBuffers.dynamicBuffer();
Assertions.assertTrue(channelBuffer instanceof DynamicChannelBuffer);
Assertions.assertEquals(channelBuffer.capacity(), DEFAULT_CAPACITY);
channelBuffer = ChannelBuffers.dynamicBuffer(32, DirectChannelBufferFactory.getInstance());
Assertions.assertTrue(channelBuffer instanceof DynamicChannelBuffer);
Assertions.assertTrue(channelBuffer.isDirect());
Assertions.assertEquals(channelBuffer.capacity(), 32);
} |
@Override
public PageResult<DictDataDO> getDictDataPage(DictDataPageReqVO pageReqVO) {
return dictDataMapper.selectPage(pageReqVO);
} | @Test
public void testGetDictDataPage() {
// mock 数据
DictDataDO dbDictData = randomPojo(DictDataDO.class, o -> { // 等会查询到
o.setLabel("芋艿");
o.setDictType("yunai");
o.setStatus(CommonStatusEnum.ENABLE.getStatus());
});
dictDataMapper.insert(dbDictData);
// 测试 label 不匹配
dictDataMapper.insert(cloneIgnoreId(dbDictData, o -> o.setLabel("艿")));
// 测试 dictType 不匹配
dictDataMapper.insert(cloneIgnoreId(dbDictData, o -> o.setDictType("nai")));
// 测试 status 不匹配
dictDataMapper.insert(cloneIgnoreId(dbDictData, o -> o.setStatus(CommonStatusEnum.DISABLE.getStatus())));
// 准备参数
DictDataPageReqVO reqVO = new DictDataPageReqVO();
reqVO.setLabel("芋");
reqVO.setDictType("yunai");
reqVO.setStatus(CommonStatusEnum.ENABLE.getStatus());
// 调用
PageResult<DictDataDO> pageResult = dictDataService.getDictDataPage(reqVO);
// 断言
assertEquals(1, pageResult.getTotal());
assertEquals(1, pageResult.getList().size());
assertPojoEquals(dbDictData, pageResult.getList().get(0));
} |
boolean convertDeviceProfileForVersion330(JsonNode profileData) {
boolean isUpdated = false;
if (profileData.has("alarms") && !profileData.get("alarms").isNull()) {
JsonNode alarms = profileData.get("alarms");
for (JsonNode alarm : alarms) {
if (alarm.has("createRules")) {
JsonNode createRules = alarm.get("createRules");
for (AlarmSeverity severity : AlarmSeverity.values()) {
if (createRules.has(severity.name())) {
JsonNode spec = createRules.get(severity.name()).get("condition").get("spec");
if (convertDeviceProfileAlarmRulesForVersion330(spec)) {
isUpdated = true;
}
}
}
}
if (alarm.has("clearRule") && !alarm.get("clearRule").isNull()) {
JsonNode spec = alarm.get("clearRule").get("condition").get("spec");
if (convertDeviceProfileAlarmRulesForVersion330(spec)) {
isUpdated = true;
}
}
}
}
return isUpdated;
} | @Test
void convertDeviceProfileAlarmRulesForVersion330SecondRun() throws IOException {
JsonNode spec = readFromResource("update/330/device_profile_001_out.json");
JsonNode expected = readFromResource("update/330/device_profile_001_out.json");
assertThat(service.convertDeviceProfileForVersion330(spec.get("profileData"))).isFalse();
assertThat(spec.toPrettyString()).isEqualTo(expected.toPrettyString()); // use IDE feature <Click to see difference>
} |
public static AvroGenericCoder of(Schema schema) {
return AvroGenericCoder.of(schema);
} | @Test
public void testDeterminismCollection() {
assertNonDeterministic(
AvroCoder.of(StringCollection.class),
reasonField(
StringCollection.class,
"stringCollection",
"java.util.Collection<java.lang.String> may not be deterministically ordered"));
} |
@Override
public boolean apply(InputFile inputFile) {
return originalPredicate.apply(inputFile) && InputFile.Status.SAME != inputFile.status();
} | @Test
public void apply_when_file_is_added_and_predicate_is_true() {
when(inputFile.status()).thenReturn(InputFile.Status.ADDED);
when(predicate.apply(inputFile)).thenReturn(true);
Assertions.assertThat(underTest.apply(inputFile)).isTrue();
verify(predicate, times(1)).apply(any());
verify(inputFile, times(1)).status();
} |
@Override
public ReactiveCache<E> getCache() {
if (cache != null) {
return cache;
}
if (cacheManager == null) {
return cache = UnSupportedReactiveCache.getInstance();
}
return cache = cacheManager.getCache(getCacheName());
} | @Test
public void test() {
TestEntity entity = TestEntity.of("test2",100,"testName");
entityService.insert(Mono.just(entity))
.as(StepVerifier::create)
.expectNext(1)
.verifyComplete();
entityService.findById(Mono.just(entity.getId()))
.map(TestEntity::getId)
.as(StepVerifier::create)
.expectNext(entity.getId())
.verifyComplete();
entityService.getCache()
.getMono("id:".concat(entity.getId()))
.map(TestEntity::getId)
.as(StepVerifier::create)
.expectNext(entity.getId())
.verifyComplete();
entityService.createUpdate()
.set("age",120)
.where("id",entity.getId())
.execute()
.as(StepVerifier::create)
.expectNext(1)
.verifyComplete();
entityService.getCache()
.getMono("id:".concat(entity.getId()))
.switchIfEmpty(Mono.error(NullPointerException::new))
.as(StepVerifier::create)
.expectError(NullPointerException.class)
.verify();
} |
public Future<KafkaVersionChange> reconcile() {
return getVersionFromController()
.compose(i -> getPods())
.compose(this::detectToAndFromVersions)
.compose(i -> prepareVersionChange());
} | @Test
public void testUpgradeFromStatefulSet(VertxTestContext context) {
String oldKafkaVersion = KafkaVersionTestUtils.PREVIOUS_KAFKA_VERSION;
String oldInterBrokerProtocolVersion = KafkaVersionTestUtils.PREVIOUS_PROTOCOL_VERSION;
String oldLogMessageFormatVersion = KafkaVersionTestUtils.PREVIOUS_FORMAT_VERSION;
String kafkaVersion = VERSIONS.defaultVersion().version();
String interBrokerProtocolVersion = VERSIONS.defaultVersion().protocolVersion();
String logMessageFormatVersion = VERSIONS.defaultVersion().messageVersion();
VersionChangeCreator vcc = mockVersionChangeCreator(
mockKafka(kafkaVersion, interBrokerProtocolVersion, logMessageFormatVersion),
mockNewCluster(
mockSts(oldKafkaVersion),
null,
mockUniformPods(oldKafkaVersion, oldInterBrokerProtocolVersion, oldLogMessageFormatVersion)
)
);
Checkpoint async = context.checkpoint();
vcc.reconcile().onComplete(context.succeeding(c -> context.verify(() -> {
assertThat(c.from(), is(VERSIONS.version(oldKafkaVersion)));
assertThat(c.to(), is(VERSIONS.defaultVersion()));
assertThat(c.interBrokerProtocolVersion(), is(oldInterBrokerProtocolVersion));
assertThat(c.logMessageFormatVersion(), is(oldLogMessageFormatVersion));
async.flag();
})));
} |
@Override
public void pickAddress() throws Exception {
if (publicAddress != null || bindAddress != null) {
return;
}
try {
AddressDefinition publicAddressDef = getPublicAddressByPortSearch();
if (publicAddressDef != null) {
publicAddress = createAddress(publicAddressDef, publicAddressDef.port);
logger.info("Using public address: " + publicAddress);
} else {
publicAddress = bindAddress;
logger.finest("Using public address the same as the bind address: " + publicAddress);
}
} catch (Exception e) {
ServerSocketChannel serverSocketChannel = getServerSocketChannel(endpointQualifier);
if (serverSocketChannel != null) {
serverSocketChannel.close();
}
logger.severe(e);
throw e;
}
} | @Test
public void testPublicAddress_whenBlankViaProperty() {
config.setProperty("hazelcast.local.publicAddress", " ");
addressPicker = new DefaultAddressPicker(config, logger);
assertThrows(IllegalArgumentException.class, () -> addressPicker.pickAddress());
} |
public List<File> process()
throws Exception {
try {
return doProcess();
} catch (Exception e) {
// Cleaning up output dir as processing has failed. file managers left from map or reduce phase will be cleaned
// up in the respective phases.
FileUtils.deleteQuietly(_segmentsOutputDir);
throw e;
} finally {
FileUtils.deleteDirectory(_mapperOutputDir);
FileUtils.deleteDirectory(_reducerOutputDir);
}
} | @Test
public void testMultipleSegments()
throws Exception {
File workingDir = new File(TEMP_DIR, "multiple_segments_output");
FileUtils.forceMkdir(workingDir);
// Default configs
SegmentProcessorConfig config =
new SegmentProcessorConfig.Builder().setTableConfig(_tableConfig).setSchema(_schema).build();
SegmentProcessorFramework framework = new SegmentProcessorFramework(_multipleSegments, config, workingDir);
List<File> outputSegments = framework.process();
assertEquals(outputSegments.size(), 1);
String[] outputDirs = workingDir.list();
assertTrue(outputDirs != null && outputDirs.length == 1, Arrays.toString(outputDirs));
SegmentMetadata segmentMetadata = new SegmentMetadataImpl(outputSegments.get(0));
assertEquals(segmentMetadata.getTotalDocs(), 10);
assertEquals(segmentMetadata.getName(), "myTable_1597719600000_1597892400000_0");
FileUtils.cleanDirectory(workingDir);
rewindRecordReaders(_multipleSegments);
// Time round, partition, rollup
config = new SegmentProcessorConfig.Builder().setTableConfig(_tableConfig).setSchema(_schema).setTimeHandlerConfig(
new TimeHandlerConfig.Builder(TimeHandler.Type.EPOCH).setRoundBucketMs(86400000).setPartitionBucketMs(86400000)
.build()).setMergeType(MergeType.ROLLUP).build();
framework = new SegmentProcessorFramework(_multipleSegments, config, workingDir);
outputSegments = framework.process();
assertEquals(outputSegments.size(), 3);
outputDirs = workingDir.list();
assertTrue(outputDirs != null && outputDirs.length == 1, Arrays.toString(outputDirs));
outputSegments.sort(null);
segmentMetadata = new SegmentMetadataImpl(outputSegments.get(0));
assertEquals(segmentMetadata.getTotalDocs(), 2);
assertEquals(segmentMetadata.getName(), "myTable_1597708800000_1597708800000_0");
segmentMetadata = new SegmentMetadataImpl(outputSegments.get(1));
assertEquals(segmentMetadata.getTotalDocs(), 3);
assertEquals(segmentMetadata.getName(), "myTable_1597795200000_1597795200000_1");
segmentMetadata = new SegmentMetadataImpl(outputSegments.get(2));
assertEquals(segmentMetadata.getTotalDocs(), 2);
assertEquals(segmentMetadata.getName(), "myTable_1597881600000_1597881600000_2");
FileUtils.cleanDirectory(workingDir);
rewindRecordReaders(_multipleSegments);
} |
@Override
public int getType() throws SQLException {
checkClosed();
return type;
} | @Test
void assertGetType() throws SQLException {
assertThat(databaseMetaDataResultSet.getType(), is(ResultSet.TYPE_FORWARD_ONLY));
} |
@Override
public boolean preHandle(HttpServletRequest request, HttpServletResponse response, Object handler) {
Cookie cookie = WebUtils.getCookie(request, Constants.LOCALE_LANGUAGE);
if (cookie != null) {
// Proceed in cookie
return true;
}
// Proceed in header
String newLocale = request.getHeader(Constants.LOCALE_LANGUAGE);
if (newLocale != null) {
LocaleContextHolder.setLocale(parseLocaleValue(newLocale));
}
return true;
} | @Test
public void testPreHandle() {
HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
// test no language
Assertions.assertTrue(interceptor.preHandle(request, response, null));
} |
public static NetworkEndpoint forIp(String ipAddress) {
checkArgument(InetAddresses.isInetAddress(ipAddress), "'%s' is not an IP address.", ipAddress);
return NetworkEndpoint.newBuilder()
.setType(NetworkEndpoint.Type.IP)
.setIpAddress(
IpAddress.newBuilder()
.setAddressFamily(ipAddressFamily(ipAddress))
.setAddress(ipAddress))
.build();
} | @Test
public void forIp_withIpV4Address_returnsIpV4NetworkEndpoint() {
assertThat(NetworkEndpointUtils.forIp("1.2.3.4"))
.isEqualTo(
NetworkEndpoint.newBuilder()
.setType(NetworkEndpoint.Type.IP)
.setIpAddress(
IpAddress.newBuilder()
.setAddressFamily(AddressFamily.IPV4)
.setAddress("1.2.3.4"))
.build());
} |
@Override
public boolean equals(final Object obj) {
if (!(obj instanceof Instance)) {
return false;
}
final Instance host = (Instance) obj;
return Instance.strEquals(host.toString(), toString());
} | @Test
void testEquals() {
Instance actual = new Instance();
setInstance(actual);
actual.setMetadata(new HashMap<>());
actual.addMetadata("a", "b");
assertNotEquals(actual, new Object());
Instance expected = new Instance();
setInstance(expected);
expected.setMetadata(new HashMap<>());
expected.addMetadata("a", "b");
assertEquals(actual, expected);
expected.addMetadata("a", "c");
assertNotEquals(actual, expected);
} |
@Override
protected void handleAddSubscriptionToTxn(CommandAddSubscriptionToTxn command) {
checkArgument(state == State.Connected);
final TxnID txnID = new TxnID(command.getTxnidMostBits(), command.getTxnidLeastBits());
final long requestId = command.getRequestId();
final List<org.apache.pulsar.common.api.proto.Subscription> subscriptionsList = new ArrayList<>();
for (org.apache.pulsar.common.api.proto.Subscription sub : command.getSubscriptionsList()) {
subscriptionsList.add(new org.apache.pulsar.common.api.proto.Subscription().copyFrom(sub));
}
if (log.isDebugEnabled()) {
log.debug("Receive add published partition to txn request {} from {} with txnId {}",
requestId, remoteAddress, txnID);
}
final TransactionCoordinatorID tcId = TransactionCoordinatorID.get(command.getTxnidMostBits());
if (!checkTransactionEnableAndSendError(requestId)) {
return;
}
TransactionMetadataStoreService transactionMetadataStoreService =
service.pulsar().getTransactionMetadataStoreService();
verifyTxnOwnership(txnID)
.thenCompose(isOwner -> {
if (!isOwner) {
return failedFutureTxnNotOwned(txnID);
}
return transactionMetadataStoreService.addAckedPartitionToTxn(txnID,
MLTransactionMetadataStore.subscriptionToTxnSubscription(subscriptionsList));
})
.whenComplete((v, ex) -> {
if (ex == null) {
if (log.isDebugEnabled()) {
log.debug("Send response success for add published partition to txn request {}",
requestId);
}
writeAndFlush(Commands.newAddSubscriptionToTxnResponse(requestId,
txnID.getLeastSigBits(), txnID.getMostSigBits()));
} else {
ex = handleTxnException(ex, BaseCommand.Type.ADD_SUBSCRIPTION_TO_TXN.name(), requestId);
writeAndFlush(
Commands.newAddSubscriptionToTxnResponse(requestId, txnID.getLeastSigBits(),
txnID.getMostSigBits(), BrokerServiceException.getClientErrorCode(ex),
ex.getMessage()));
transactionMetadataStoreService.handleOpFail(ex, tcId);
}
});
} | @Test(expectedExceptions = IllegalArgumentException.class)
public void shouldFailHandleAddSubscriptionToTxn() throws Exception {
ServerCnx serverCnx = mock(ServerCnx.class, CALLS_REAL_METHODS);
Field stateUpdater = ServerCnx.class.getDeclaredField("state");
stateUpdater.setAccessible(true);
stateUpdater.set(serverCnx, ServerCnx.State.Failed);
serverCnx.handleAddSubscriptionToTxn(any());
} |
public static BigDecimal convertToDecimal(Schema schema, Object value, int scale) {
if (value == null) {
throw new DataException("Unable to convert a null value to a schema that requires a value");
}
return convertToDecimal(Decimal.schema(scale), value);
} | @Test
public void shouldConvertDecimalValues() {
// Various forms of the same number should all be parsed to the same BigDecimal
Number number = 1.0f;
String string = number.toString();
BigDecimal value = new BigDecimal(string);
byte[] bytes = Decimal.fromLogical(Decimal.schema(1), value);
ByteBuffer buffer = ByteBuffer.wrap(bytes);
assertEquals(value, Values.convertToDecimal(null, number, 1));
assertEquals(value, Values.convertToDecimal(null, string, 1));
assertEquals(value, Values.convertToDecimal(null, value, 1));
assertEquals(value, Values.convertToDecimal(null, bytes, 1));
assertEquals(value, Values.convertToDecimal(null, buffer, 1));
} |
public static <InputT, OutputT> DoFnInvoker<InputT, OutputT> invokerFor(
DoFn<InputT, OutputT> fn) {
return ByteBuddyDoFnInvokerFactory.only().newByteBuddyInvoker(fn);
} | @Test
public void testDefaultGetSizeWithoutHasProgress() throws Exception {
class MockFn extends DoFn<String, String> {
@ProcessElement
public void processElement(
ProcessContext c,
RestrictionTracker<RestrictionWithBoundedDefaultTracker, Void> tracker) {}
@GetInitialRestriction
public RestrictionWithBoundedDefaultTracker getInitialRestriction(@Element String element) {
return null;
}
}
MockFn fn = mock(MockFn.class);
DoFnInvoker<String, String> invoker = DoFnInvokers.invokerFor(fn);
assertEquals(0.0, invoker.invokeGetSize(mockArgumentProvider), 0.0001);
} |
public static CsvIOParse<Row> parseRows(Schema schema, CSVFormat csvFormat) {
CsvIOParseHelpers.validateCsvFormat(csvFormat);
CsvIOParseHelpers.validateCsvFormatWithSchema(csvFormat, schema);
RowCoder coder = RowCoder.of(schema);
CsvIOParseConfiguration.Builder<Row> builder = CsvIOParseConfiguration.builder();
builder.setCsvFormat(csvFormat).setSchema(schema).setCoder(coder).setFromRowFn(row -> row);
return CsvIOParse.<Row>builder().setConfigBuilder(builder).build();
} | @Test
public void givenStringToRecordError_emits() {
Pipeline pipeline = Pipeline.create();
PCollection<String> input = pipeline.apply(Create.of("true,\"1.1,3.141592,1,5,foo"));
Schema schema =
Schema.builder()
.addBooleanField("aBoolean")
.addDoubleField("aDouble")
.addFloatField("aFloat")
.addInt32Field("anInteger")
.addInt64Field("aLong")
.addStringField("aString")
.build();
CsvIOParse<Row> underTest = CsvIO.parseRows(schema, csvFormat().withQuote('"'));
CsvIOParseResult<Row> result = input.apply(underTest);
PAssert.thatSingleton(result.getErrors().apply("Total Errors", Count.globally())).isEqualTo(1L);
PAssert.thatSingleton(
stackTraceContains(result.getErrors(), CsvIOStringToCsvRecord.class.getName()))
.isEqualTo(1L);
pipeline.run();
} |
@Override
protected void serviceInit(Configuration conf) throws Exception {
String dirPath =
conf.get(YarnServiceConf.YARN_SERVICES_SYSTEM_SERVICE_DIRECTORY);
if (dirPath != null) {
systemServiceDir = new Path(dirPath);
LOG.info("System Service Directory is configured to {}",
systemServiceDir);
fs = systemServiceDir.getFileSystem(conf);
this.loginUGI = UserGroupInformation.isSecurityEnabled() ?
UserGroupInformation.getLoginUser() :
UserGroupInformation.getCurrentUser();
LOG.info("UserGroupInformation initialized to {}", loginUGI);
}
} | @Test
void testFileSystemCloseWhenCleanUpService() throws Exception {
FileSystem fs = null;
Path path = new Path("/tmp/servicedir");
HdfsConfiguration hdfsConfig = new HdfsConfiguration();
MiniDFSCluster hdfsCluster = new MiniDFSCluster.Builder(hdfsConfig)
.numDataNodes(1).build();
fs = hdfsCluster.getFileSystem();
if (!fs.exists(path)) {
fs.mkdirs(path);
}
SystemServiceManagerImpl serviceManager = new SystemServiceManagerImpl();
hdfsConfig.set(YarnServiceConf.YARN_SERVICES_SYSTEM_SERVICE_DIRECTORY,
path.toString());
serviceManager.init(hdfsConfig);
// the FileSystem object owned by SystemServiceManager must not be closed
// when cleanup a service
hdfsConfig.set("hadoop.registry.zk.connection.timeout.ms", "100");
hdfsConfig.set("hadoop.registry.zk.retry.times", "1");
ApiServiceClient asc = new ApiServiceClient();
asc.serviceInit(hdfsConfig);
asc.actionCleanUp("testapp", "testuser");
try {
serviceManager.start();
} catch (Exception e) {
if (e.getMessage().contains("Filesystem closed")) {
fail("SystemServiceManagerImpl failed to handle " +
"FileSystem close");
} else {
fail("Should not get any exceptions");
}
} finally {
serviceManager.stop();
fs = hdfsCluster.getFileSystem();
if (fs.exists(path)) {
fs.delete(path, true);
}
if (hdfsCluster != null) {
hdfsCluster.shutdown();
}
}
} |
@VisibleForTesting
AdminUserDO validateUserExists(Long id) {
if (id == null) {
return null;
}
AdminUserDO user = userMapper.selectById(id);
if (user == null) {
throw exception(USER_NOT_EXISTS);
}
return user;
} | @Test
public void testValidateUserExists_notExists() {
assertServiceException(() -> userService.validateUserExists(randomLongId()), USER_NOT_EXISTS);
} |
public void runPickle(Pickle pickle) {
try {
StepTypeRegistry stepTypeRegistry = createTypeRegistryForPickle(pickle);
snippetGenerators = createSnippetGeneratorsForPickle(stepTypeRegistry);
// Java8 step definitions will be added to the glue here
buildBackendWorlds();
glue.prepareGlue(stepTypeRegistry);
TestCase testCase = createTestCaseForPickle(pickle);
testCase.run(bus);
} finally {
glue.removeScenarioScopedGlue();
disposeBackendWorlds();
}
} | @Test
void steps_are_not_executed_on_dry_run() {
StubStepDefinition stepDefinition = new StubStepDefinition("some step");
Pickle pickle = createPickleMatchingStepDefinitions(stepDefinition);
RuntimeOptions runtimeOptions = new RuntimeOptionsBuilder().setDryRun().build();
TestRunnerSupplier runnerSupplier = new TestRunnerSupplier(bus, runtimeOptions) {
@Override
public void loadGlue(Glue glue, List<URI> gluePaths) {
glue.addStepDefinition(stepDefinition);
}
};
runnerSupplier.get().runPickle(pickle);
assertThat(stepDefinition.getArgs(), is(nullValue()));
} |
public static MessageType convert(Schema schema, String name) {
return new TypeToMessageType().convert(schema, name);
} | @Test
public void testLegacyTwoLevelListGenByParquetAvro() {
String messageType =
"message root {"
+ " optional group my_list (LIST) {"
+ " repeated group array {"
+ " required binary str (UTF8);"
+ " }"
+ " }"
+ "}";
MessageType parquetScehma = MessageTypeParser.parseMessageType(messageType);
Schema expectedSchema =
new Schema(
optional(
1,
"my_list",
Types.ListType.ofRequired(
1001, Types.StructType.of(required(1000, "str", Types.StringType.get())))));
Schema actualSchema = ParquetSchemaUtil.convert(parquetScehma);
assertThat(actualSchema.asStruct())
.as("Schema must match")
.isEqualTo(expectedSchema.asStruct());
} |
public List<Column> headers() {
return byNamespace()
.get(HEADERS);
} | @Test
public void shouldExposeExtractedHeaderColumns() {
assertThat(SCHEMA_WITH_EXTRACTED_HEADERS.headers(), contains(
headersColumn(H0, BYTES, Optional.of("key0")),
headersColumn(H1, BYTES, Optional.of("key1"))
));
} |
@Override
public HttpHeaders add(HttpHeaders headers) {
if (headers instanceof DefaultHttpHeaders) {
this.headers.add(((DefaultHttpHeaders) headers).headers);
return this;
} else {
return super.add(headers);
}
} | @Test
public void testStringKeyRetrievedAsAsciiString() {
final HttpHeaders headers = new DefaultHttpHeaders(false);
// Test adding String key and retrieving it using a AsciiString key
final String connection = "keep-alive";
headers.add(of("Connection"), connection);
// Passes
final String value = headers.getAsString(HttpHeaderNames.CONNECTION.toString());
assertNotNull(value);
assertEquals(connection, value);
// Passes
final String value2 = headers.getAsString(HttpHeaderNames.CONNECTION);
assertNotNull(value2);
assertEquals(connection, value2);
} |
@SuppressWarnings("FutureReturnValueIgnored")
public void start() {
budgetRefreshExecutor.submit(this::subscribeToRefreshBudget);
} | @Test
public void testTriggeredAndScheduledBudgetRefresh_concurrent() throws InterruptedException {
CountDownLatch redistributeBudgetLatch = new CountDownLatch(2);
Runnable redistributeBudget = redistributeBudgetLatch::countDown;
GetWorkBudgetRefresher budgetRefresher = createBudgetRefresher(redistributeBudget);
budgetRefresher.start();
Thread budgetRefreshTriggerThread = new Thread(budgetRefresher::requestBudgetRefresh);
budgetRefreshTriggerThread.start();
budgetRefreshTriggerThread.join();
// Wait for triggered and scheduled redistribute budget to run.
redistributeBudgetLatch.await();
assertThat(redistributeBudgetLatch.getCount()).isEqualTo(0);
} |
@Override
public String getFullName() {
return nullToEmpty((String) fields.get(FULL_NAME));
} | @Test
public void testNoFullNameEmptyString() {
user = createUserImpl(null, null, null);
assertEquals("", user.getFullName());
} |
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
DistroKey distroKey = (DistroKey) o;
return Objects.equals(resourceKey, distroKey.resourceKey) && Objects
.equals(resourceType, distroKey.resourceType) && Objects.equals(targetServer, distroKey.targetServer);
} | @Test
void testEquals() {
assertEquals(distroKey1, distroKey2);
} |
@VisibleForTesting
public int getQueueUserAclsFailedRetrieved() {
return numGetQueueUserAclsFailedRetrieved.value();
} | @Test
public void testQueueUserAclsFailed() {
long totalBadBefore = metrics.getQueueUserAclsFailedRetrieved();
badSubCluster.getQueueUserAcls();
Assert.assertEquals(totalBadBefore + 1, metrics.getQueueUserAclsFailedRetrieved());
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.