focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
@Deprecated @Restricted(DoNotUse.class) public static String resolve(ConfigurationContext context, String toInterpolate) { return context.getSecretSourceResolver().resolve(toInterpolate); }
@Test public void resolve_JsonWithNewlineBetweenTokens() { String input = "{ \n \"a\": 1, \n \"b\": 2 }"; environment.set("FOO", input); String output = resolve("${json:a:${FOO}}"); assertThat(output, equalTo("1")); }
@Override public TListTableStatusResult listTableStatus(TGetTablesParams params) throws TException { LOG.debug("get list table request: {}", params); TListTableStatusResult result = new TListTableStatusResult(); List<TTableStatus> tablesResult = Lists.newArrayList(); result.setTables(tablesResult); PatternMatcher matcher = null; boolean caseSensitive = CaseSensibility.TABLE.getCaseSensibility(); if (params.isSetPattern()) { try { matcher = PatternMatcher.createMysqlPattern(params.getPattern(), caseSensitive); } catch (SemanticException e) { throw new TException("Pattern is in bad format " + params.getPattern()); } } Database db = GlobalStateMgr.getCurrentState().getDb(params.db); long limit = params.isSetLimit() ? params.getLimit() : -1; UserIdentity currentUser; if (params.isSetCurrent_user_ident()) { currentUser = UserIdentity.fromThrift(params.current_user_ident); } else { currentUser = UserIdentity.createAnalyzedUserIdentWithIp(params.user, params.user_ip); } if (db != null) { Locker locker = new Locker(); locker.lockDatabase(db, LockType.READ); try { boolean listingViews = params.isSetType() && TTableType.VIEW.equals(params.getType()); List<Table> tables = listingViews ? db.getViews() : db.getTables(); OUTER: for (Table table : tables) { try { Authorizer.checkAnyActionOnTableLikeObject(currentUser, null, params.db, table); } catch (AccessDeniedException e) { continue; } if (!PatternMatcher.matchPattern(params.getPattern(), table.getName(), matcher, caseSensitive)) { continue; } TTableStatus status = new TTableStatus(); status.setName(table.getName()); status.setType(table.getMysqlType()); status.setEngine(table.getEngine()); status.setComment(table.getComment()); status.setCreate_time(table.getCreateTime()); status.setLast_check_time(table.getLastCheckTime()); if (listingViews) { View view = (View) table; String ddlSql = view.getInlineViewDef(); ConnectContext connectContext = new ConnectContext(); connectContext.setQualifiedUser(AuthenticationMgr.ROOT_USER); connectContext.setCurrentUserIdentity(UserIdentity.ROOT); connectContext.setCurrentRoleIds(Sets.newHashSet(PrivilegeBuiltinConstants.ROOT_ROLE_ID)); try { List<TableName> allTables = view.getTableRefs(); for (TableName tableName : allTables) { Table tbl = db.getTable(tableName.getTbl()); if (tbl != null) { try { Authorizer.checkAnyActionOnTableLikeObject(currentUser, null, db.getFullName(), tbl); } catch (AccessDeniedException e) { continue OUTER; } } } } catch (SemanticException e) { // ignore semantic exception because view maybe invalid } status.setDdl_sql(ddlSql); } tablesResult.add(status); // if user set limit, then only return limit size result if (limit > 0 && tablesResult.size() >= limit) { break; } } } finally { locker.unLockDatabase(db, LockType.READ); } } return result; }
@Test public void testListViewStatusWithBaseTableDropped() throws Exception { starRocksAssert.useDatabase("test") .withTable("CREATE TABLE site_access_empty_for_view (\n" + " event_day DATETIME NOT NULL,\n" + " site_id INT DEFAULT '10',\n" + " city_code VARCHAR(100),\n" + " user_name VARCHAR(32) DEFAULT '',\n" + " pv BIGINT DEFAULT '0'\n" + ")\n" + "DUPLICATE KEY(event_day, site_id, city_code, user_name)\n" + "PARTITION BY date_trunc('day', event_day)\n" + "DISTRIBUTED BY HASH(event_day, site_id)\n" + "PROPERTIES(\n" + " \"replication_num\" = \"1\"\n" + ");"); starRocksAssert.withView("create view test.view11 as select * from test.site_access_empty_for_view"); // drop the base table referenced by test.view11 starRocksAssert.dropTable("test.site_access_empty_for_view"); FrontendServiceImpl impl = new FrontendServiceImpl(exeEnv); TListTableStatusResult result = impl.listTableStatus(buildListTableStatusParam()); System.out.println(result.tables.stream().map(TTableStatus::getName).collect(Collectors.toList())); Assert.assertEquals(8, result.tables.size()); starRocksAssert.dropView("test.view11"); }
public static String getInterfaceName(Invoker invoker) { return getInterfaceName(invoker, false); }
@Test public void testGetInterfaceName() { URL url = URL.valueOf("dubbo://127.0.0.1:2181") .addParameter(CommonConstants.VERSION_KEY, "1.0.0") .addParameter(CommonConstants.GROUP_KEY, "grp1") .addParameter(CommonConstants.INTERFACE_KEY, DemoService.class.getName()); Invoker invoker = mock(Invoker.class); when(invoker.getUrl()).thenReturn(url); when(invoker.getInterface()).thenReturn(DemoService.class); SentinelConfig.setConfig(DUBBO_INTERFACE_GROUP_VERSION_ENABLED, "false"); assertEquals("com.alibaba.csp.sentinel.adapter.dubbo.provider.DemoService", DubboUtils.getInterfaceName(invoker)); }
@ExecuteOn(TaskExecutors.IO) @Get(uri = "logs/{executionId}/download", produces = MediaType.TEXT_PLAIN) @Operation(tags = {"Logs"}, summary = "Download logs for a specific execution, taskrun or task") public StreamedFile download( @Parameter(description = "The execution id") @PathVariable String executionId, @Parameter(description = "The min log level filter") @Nullable @QueryValue Level minLevel, @Parameter(description = "The taskrun id") @Nullable @QueryValue String taskRunId, @Parameter(description = "The task id") @Nullable @QueryValue String taskId, @Parameter(description = "The attempt number") @Nullable @QueryValue Integer attempt ) { List<LogEntry> logEntries; if (taskId != null) { logEntries = logRepository.findByExecutionIdAndTaskId(tenantService.resolveTenant(), executionId, taskId, minLevel); } else if (taskRunId != null) { if (attempt != null) { logEntries = logRepository.findByExecutionIdAndTaskRunIdAndAttempt(tenantService.resolveTenant(), executionId, taskRunId, minLevel, attempt); } else { logEntries = logRepository.findByExecutionIdAndTaskRunId(tenantService.resolveTenant(), executionId, taskRunId, minLevel); } } else { logEntries = logRepository.findByExecutionId(tenantService.resolveTenant(), executionId, minLevel); } InputStream inputStream = new ByteArrayInputStream(logEntries.stream().map(LogEntry::toPrettyString).collect(Collectors.joining("\n")).getBytes()); return new StreamedFile(inputStream, MediaType.TEXT_PLAIN_TYPE).attach(executionId + ".log"); }
@Test void download() { LogEntry log1 = logEntry(Level.INFO); LogEntry log2 = log1.toBuilder().message("another message").build(); LogEntry log3 = logEntry(Level.DEBUG); logRepository.save(log1); logRepository.save(log2); logRepository.save(log3); String logs = client.toBlocking().retrieve( HttpRequest.GET("/api/v1/logs/" + log1.getExecutionId() + "/download"), String.class ); assertThat(logs, containsString("john doe")); assertThat(logs, containsString("another message")); }
public static String getSecondaryNameServiceId(Configuration conf) { return getNameServiceId(conf, DFS_NAMENODE_SECONDARY_HTTP_ADDRESS_KEY); }
@Test public void getSecondaryNameServiceId() { Configuration conf = setupAddress(DFS_NAMENODE_SECONDARY_HTTP_ADDRESS_KEY); assertEquals("nn1", DFSUtil.getSecondaryNameServiceId(conf)); }
public boolean evaluateIfActiveVersion(UpdateCenter updateCenter) { Version installedVersion = Version.create(sonarQubeVersion.get().toString()); if (compareWithoutPatchVersion(installedVersion, updateCenter.getSonar().getLtaVersion().getVersion()) == 0) { return true; } SortedSet<Release> allReleases = updateCenter.getSonar().getAllReleases(); if (compareWithoutPatchVersion(installedVersion, updateCenter.getSonar().getPastLtaVersion().getVersion()) == 0) { Release initialLtaRelease = findInitialVersionOfMajorRelease(allReleases, updateCenter.getSonar().getLtaVersion().getVersion()); Date initialLtaReleaseDate = initialLtaRelease.getDate(); if (initialLtaReleaseDate == null) { throw new IllegalStateException("Initial Major release date is missing in releases"); } // date of the latest major release should be within 6 months Calendar c = Calendar.getInstance(); c.setTime(new Date(system2.now())); c.add(Calendar.MONTH, -6); return initialLtaReleaseDate.after(c.getTime()); } else { return compareWithoutPatchVersion(installedVersion, findPreviousReleaseIgnoringPatch(allReleases).getVersion()) >= 0; } }
@Test void evaluateIfActiveVersion_whenInstalledVersionIsPastLtaAndReleaseDateIsMissing_shouldThrowIllegalStateException() { when(sonarQubeVersion.get()).thenReturn(parse("8.9.5")); SortedSet<Release> releases = getReleases(); when(sonar.getAllReleases()).thenReturn(releases); assertThatThrownBy(() -> underTest.evaluateIfActiveVersion(updateCenter)) .isInstanceOf(IllegalStateException.class) .hasMessageContaining("Initial Major release date is missing in releases"); }
public static void v(String tag, String message, Object... args) { sLogger.v(tag, message, args); }
@Test public void verbose() { String tag = "TestTag"; String message = "Test message"; LogManager.v(tag, message); verify(logger).v(tag, message); }
@Override public LoggingObjectInterface getParent() { return null; }
@Test public void testGetParent() { assertNull( meta.getParent() ); }
@Override public boolean doOffer(final Runnable runnable) { return super.offer(runnable); }
@Test public void testOfferWhenMemoryNotSufficient() { MemoryLimitedTaskQueue memoryLimitedTaskQueue = new MemoryLimitedTaskQueue<>(1, instrumentation); assertFalse(memoryLimitedTaskQueue.doOffer(() -> { })); }
public NodeMgr getNodeMgr() { return nodeMgr; }
@Test(expected = DdlException.class) public void testUpdateFeNotFoundException() throws Exception { GlobalStateMgr globalStateMgr = mockGlobalStateMgr(); ModifyFrontendAddressClause clause = new ModifyFrontendAddressClause("test", "sandbox-fqdn"); // this case will occur [frontend does not exist] exception globalStateMgr.getNodeMgr().modifyFrontendHost(clause); }
public void begin(InterpretationContext ec, String localName, Attributes attributes) { if ("substitutionProperty".equals(localName)) { addWarn("[substitutionProperty] element has been deprecated. Please use the [property] element instead."); } String name = attributes.getValue(NAME_ATTRIBUTE); String value = attributes.getValue(VALUE_ATTRIBUTE); String scopeStr = attributes.getValue(SCOPE_ATTRIBUTE); Scope scope = ActionUtil.stringToScope(scopeStr); if (checkFileAttributeSanity(attributes)) { String file = attributes.getValue(FILE_ATTRIBUTE); file = ec.subst(file); try { FileInputStream istream = new FileInputStream(file); loadAndSetProperties(ec, istream, scope); } catch (FileNotFoundException e) { addError("Could not find properties file [" + file + "].", e); } catch (IOException e1) { addError("Could not read properties file [" + file + "].", e1); } } else if (checkResourceAttributeSanity(attributes)) { String resource = attributes.getValue(RESOURCE_ATTRIBUTE); resource = ec.subst(resource); URL resourceURL = Loader.getResourceBySelfClassLoader(resource); if (resourceURL == null) { addError("Could not find resource [" + resource + "]."); } else { try { InputStream istream = resourceURL.openStream(); loadAndSetProperties(ec, istream, scope); } catch (IOException e) { addError("Could not read resource file [" + resource + "].", e); } } } else if (checkValueNameAttributesSanity(attributes)) { value = RegularEscapeUtil.basicEscape(value); // now remove both leading and trailing spaces value = value.trim(); value = ec.subst(value); ActionUtil.setProperty(ec, name, value, scope); } else { addError(INVALID_ATTRIBUTES); } }
@Test public void noName() { atts.setValue("value", "v1"); propertyAction.begin(ec, null, atts); assertEquals(1, context.getStatusManager().getCount()); assertTrue(checkError()); }
public void appendDocument(PDDocument destination, PDDocument source) throws IOException { if (source.getDocument().isClosed()) { throw new IOException("Error: source PDF is closed."); } if (destination.getDocument().isClosed()) { throw new IOException("Error: destination PDF is closed."); } PDDocumentCatalog srcCatalog = source.getDocumentCatalog(); if (isDynamicXfa(srcCatalog.getAcroForm())) { throw new IOException("Error: can't merge source document containing dynamic XFA form content."); } PDDocumentInformation destInfo = destination.getDocumentInformation(); PDDocumentInformation srcInfo = source.getDocumentInformation(); mergeInto(srcInfo.getCOSObject(), destInfo.getCOSObject(), Collections.emptySet()); // use the highest version number for the resulting pdf float destVersion = destination.getVersion(); float srcVersion = source.getVersion(); if (destVersion < srcVersion) { destination.setVersion(srcVersion); } int pageIndexOpenActionDest = -1; PDDocumentCatalog destCatalog = destination.getDocumentCatalog(); if (destCatalog.getOpenAction() == null) { // PDFBOX-3972: get local dest page index, it must be reassigned after the page cloning PDDestinationOrAction openAction = null; try { openAction = srcCatalog.getOpenAction(); } catch (IOException ex) { // PDFBOX-4223 LOG.error("Invalid OpenAction ignored", ex); } PDDestination openActionDestination = null; if (openAction instanceof PDActionGoTo) { openActionDestination = ((PDActionGoTo) openAction).getDestination(); } else if (openAction instanceof PDDestination) { openActionDestination = (PDDestination) openAction; } // note that it can also be something else, e.g. PDActionJavaScript, then do nothing if (openActionDestination instanceof PDPageDestination) { PDPage page = ((PDPageDestination) openActionDestination).getPage(); if (page != null) { pageIndexOpenActionDest = srcCatalog.getPages().indexOf(page); } } destCatalog.setOpenAction(openAction); } PDFCloneUtility cloner = new PDFCloneUtility(destination); mergeAcroForm(cloner, destCatalog, srcCatalog); COSArray destThreads = destCatalog.getCOSObject().getCOSArray(COSName.THREADS); COSArray srcThreads = (COSArray) cloner.cloneForNewDocument(destCatalog.getCOSObject().getDictionaryObject( COSName.THREADS)); if (destThreads == null) { destCatalog.getCOSObject().setItem(COSName.THREADS, srcThreads); } else { destThreads.addAll(srcThreads); } PDDocumentNameDictionary destNames = destCatalog.getNames(); PDDocumentNameDictionary srcNames = srcCatalog.getNames(); if (srcNames != null) { if (destNames == null) { destCatalog.getCOSObject().setItem(COSName.NAMES, cloner.cloneForNewDocument(srcNames.getCOSObject())); } else { cloner.cloneMerge(srcNames, destNames); } } if (destNames != null && destNames.getCOSObject().containsKey(COSName.ID_TREE)) { // found in 001031.pdf from PDFBOX-4417 and doesn't belong there destNames.getCOSObject().removeItem(COSName.ID_TREE); LOG.warn("Removed /IDTree from /Names dictionary, doesn't belong there"); } PDDocumentNameDestinationDictionary srcDests = srcCatalog.getDests(); if (srcDests != null) { PDDocumentNameDestinationDictionary destDests = destCatalog.getDests(); if (destDests == null) { destCatalog.getCOSObject().setItem(COSName.DESTS, cloner.cloneForNewDocument(srcDests.getCOSObject())); } else { cloner.cloneMerge(srcDests, destDests); } } PDDocumentOutline srcOutline = srcCatalog.getDocumentOutline(); if (srcOutline != null) { PDDocumentOutline destOutline = destCatalog.getDocumentOutline(); if (destOutline == null || destOutline.getFirstChild() == null) { PDDocumentOutline cloned = new PDDocumentOutline( cloner.cloneForNewDocument(srcOutline.getCOSObject())); destCatalog.setDocumentOutline(cloned); } else { // search last sibling for dest, because /Last entry is sometimes wrong PDOutlineItem destLastOutlineItem = destOutline.getFirstChild(); while (true) { PDOutlineItem outlineItem = destLastOutlineItem.getNextSibling(); if (outlineItem == null) { break; } destLastOutlineItem = outlineItem; } for (PDOutlineItem item : srcOutline.children()) { // get each child, clone its dictionary, remove siblings info, // append outline item created from there COSDictionary clonedDict = cloner.cloneForNewDocument(item.getCOSObject()); clonedDict.removeItem(COSName.PREV); clonedDict.removeItem(COSName.NEXT); PDOutlineItem clonedItem = new PDOutlineItem(clonedDict); destLastOutlineItem.insertSiblingAfter(clonedItem); destLastOutlineItem = destLastOutlineItem.getNextSibling(); } } } PageMode destPageMode = destCatalog.getPageMode(); if (destPageMode == null) { PageMode srcPageMode = srcCatalog.getPageMode(); destCatalog.setPageMode(srcPageMode); } COSDictionary srcLabels = srcCatalog.getCOSObject().getCOSDictionary(COSName.PAGE_LABELS); if (srcLabels != null) { int destPageCount = destination.getNumberOfPages(); COSArray destNums; COSDictionary destLabels = destCatalog.getCOSObject().getCOSDictionary(COSName.PAGE_LABELS); if (destLabels == null) { destLabels = new COSDictionary(); destNums = new COSArray(); destLabels.setItem(COSName.NUMS, destNums); destCatalog.getCOSObject().setItem(COSName.PAGE_LABELS, destLabels); } else { destNums = (COSArray) destLabels.getDictionaryObject(COSName.NUMS); } COSArray srcNums = (COSArray) srcLabels.getDictionaryObject(COSName.NUMS); if (srcNums != null) { int startSize = destNums.size(); for (int i = 0; i < srcNums.size(); i += 2) { COSBase base = srcNums.getObject(i); if (!(base instanceof COSNumber)) { LOG.error("page labels ignored, index {} should be a number, but is {}", i, base); // remove what we added while (destNums.size() > startSize) { destNums.remove(startSize); } break; } COSNumber labelIndex = (COSNumber) base; long labelIndexValue = labelIndex.intValue(); destNums.add(COSInteger.get(labelIndexValue + destPageCount)); destNums.add(cloner.cloneForNewDocument(srcNums.getObject(i + 1))); } } } COSStream destMetadata = destCatalog.getCOSObject().getCOSStream(COSName.METADATA); COSStream srcMetadata = srcCatalog.getCOSObject().getCOSStream(COSName.METADATA); if (destMetadata == null && srcMetadata != null) { try { PDStream newStream = new PDStream(destination, srcMetadata.createInputStream(), (COSName) null); mergeInto(srcMetadata, newStream.getCOSObject(), new HashSet<>(Arrays.asList(COSName.FILTER, COSName.LENGTH))); destCatalog.getCOSObject().setItem(COSName.METADATA, newStream); } catch (IOException ex) { // PDFBOX-4227 cleartext XMP stream with /Flate LOG.error("Metadata skipped because it could not be read", ex); } } COSDictionary destOCP = destCatalog.getCOSObject().getCOSDictionary(COSName.OCPROPERTIES); COSDictionary srcOCP = srcCatalog.getCOSObject().getCOSDictionary(COSName.OCPROPERTIES); if (destOCP == null && srcOCP != null) { destCatalog.getCOSObject().setItem(COSName.OCPROPERTIES, cloner.cloneForNewDocument(srcOCP)); } else if (destOCP != null && srcOCP != null) { cloner.cloneMerge(srcOCP, destOCP); } mergeOutputIntents(cloner, srcCatalog, destCatalog); // merge logical structure hierarchy boolean mergeStructTree = false; int destParentTreeNextKey = -1; Map<Integer, COSObjectable> srcNumberTreeAsMap = null; Map<Integer, COSObjectable> destNumberTreeAsMap = null; PDStructureTreeRoot srcStructTree = srcCatalog.getStructureTreeRoot(); PDStructureTreeRoot destStructTree = destCatalog.getStructureTreeRoot(); if (destStructTree == null && srcStructTree != null) { // create a dummy structure tree in the destination, so that the source // tree is cloned. (We can't just copy the tree reference due to PDFBOX-3999) destStructTree = new PDStructureTreeRoot(); destCatalog.setStructureTreeRoot(destStructTree); destStructTree.setParentTree(new PDNumberTreeNode(PDParentTreeValue.class)); // PDFBOX-4429: remove bogus StructParent(s) for (PDPage page : destCatalog.getPages()) { page.getCOSObject().removeItem(COSName.STRUCT_PARENTS); for (PDAnnotation ann : page.getAnnotations()) { ann.getCOSObject().removeItem(COSName.STRUCT_PARENT); } } } if (destStructTree != null) { PDNumberTreeNode destParentTree = destStructTree.getParentTree(); destParentTreeNextKey = destStructTree.getParentTreeNextKey(); if (destParentTree != null) { destNumberTreeAsMap = getNumberTreeAsMap(destParentTree); if (destParentTreeNextKey < 0) { if (destNumberTreeAsMap.isEmpty()) { destParentTreeNextKey = 0; } else { destParentTreeNextKey = Collections.max(destNumberTreeAsMap.keySet()) + 1; } } if (destParentTreeNextKey >= 0 && srcStructTree != null) { PDNumberTreeNode srcParentTree = srcStructTree.getParentTree(); if (srcParentTree != null) { srcNumberTreeAsMap = getNumberTreeAsMap(srcParentTree); if (!srcNumberTreeAsMap.isEmpty()) { mergeStructTree = true; } } } } } Map<COSDictionary, COSDictionary> objMapping = new HashMap<>(); int pageIndex = 0; PDPageTree destinationPageTree = destination.getPages(); // cache PageTree for (PDPage page : srcCatalog.getPages()) { PDPage newPage = new PDPage(cloner.cloneForNewDocument(page.getCOSObject())); if (!mergeStructTree) { // PDFBOX-4429: remove bogus StructParent(s) newPage.getCOSObject().removeItem(COSName.STRUCT_PARENTS); for (PDAnnotation ann : newPage.getAnnotations()) { ann.getCOSObject().removeItem(COSName.STRUCT_PARENT); } } newPage.setCropBox(page.getCropBox()); newPage.setMediaBox(page.getMediaBox()); newPage.setRotation(page.getRotation()); PDResources resources = page.getResources(); if (resources != null) { // this is smart enough to just create references for resources that are used on multiple pages newPage.setResources(new PDResources( cloner.cloneForNewDocument(resources.getCOSObject()))); } else { newPage.setResources(new PDResources()); } if (mergeStructTree) { // add the value of the destination ParentTreeNextKey to every source element // StructParent(s) value so that these don't overlap with the existing values updateStructParentEntries(newPage, destParentTreeNextKey); objMapping.put(page.getCOSObject(), newPage.getCOSObject()); List<PDAnnotation> oldAnnots = page.getAnnotations(); List<PDAnnotation> newAnnots = newPage.getAnnotations(); for (int i = 0; i < oldAnnots.size(); i++) { objMapping.put(oldAnnots.get(i).getCOSObject(), newAnnots.get(i).getCOSObject()); } // TODO update mapping for XObjects } destinationPageTree.add(newPage); if (pageIndex == pageIndexOpenActionDest) { // PDFBOX-3972: reassign the page. // The openAction is either a PDActionGoTo or a PDPageDestination PDDestinationOrAction openAction = destCatalog.getOpenAction(); PDPageDestination pageDestination; if (openAction instanceof PDActionGoTo) { pageDestination = (PDPageDestination) ((PDActionGoTo) openAction).getDestination(); } else { pageDestination = (PDPageDestination) openAction; } pageDestination.setPage(newPage); } ++pageIndex; } if (mergeStructTree) { updatePageReferences(cloner, srcNumberTreeAsMap, objMapping); int maxSrcKey = -1; for (Map.Entry<Integer, COSObjectable> entry : srcNumberTreeAsMap.entrySet()) { int srcKey = entry.getKey(); maxSrcKey = Math.max(srcKey, maxSrcKey); destNumberTreeAsMap.put(destParentTreeNextKey + srcKey, cloner.cloneForNewDocument(entry.getValue().getCOSObject())); } destParentTreeNextKey += maxSrcKey + 1; PDNumberTreeNode newParentTreeNode = new PDNumberTreeNode(PDParentTreeValue.class); // Note that all elements are stored flatly. This could become a problem for large files // when these are opened in a viewer that uses the tagging information. // If this happens, then ​PDNumberTreeNode should be improved with a convenience method that // stores the map into a B+Tree, see https://en.wikipedia.org/wiki/B+_tree newParentTreeNode.setNumbers(destNumberTreeAsMap); destStructTree.setParentTree(newParentTreeNode); destStructTree.setParentTreeNextKey(destParentTreeNextKey); mergeKEntries(cloner, srcStructTree, destStructTree); mergeRoleMap(srcStructTree, destStructTree); mergeIDTree(cloner, srcStructTree, destStructTree); mergeMarkInfo(destCatalog, srcCatalog); mergeLanguage(destCatalog, srcCatalog); mergeViewerPreferences(destCatalog, srcCatalog); } }
@Test void testStructureTreeMerge2() throws IOException { PDFMergerUtility pdfMergerUtility = new PDFMergerUtility(); PDDocument doc = Loader .loadPDF(new File(TARGETPDFDIR, "PDFBOX-3999-GeneralForbearance.pdf")); doc.getDocumentCatalog().getAcroForm().flatten(); doc.save(new File(TARGETTESTDIR, "PDFBOX-3999-GeneralForbearance-flattened.pdf")); ElementCounter elementCounter = new ElementCounter(); elementCounter.walk(doc.getDocumentCatalog().getStructureTreeRoot().getK()); int singleCnt = elementCounter.cnt; int singleSetSize = elementCounter.set.size(); assertEquals(134, singleCnt); assertEquals(134, singleSetSize); doc.close(); PDDocument src = Loader .loadPDF(new File(TARGETTESTDIR, "PDFBOX-3999-GeneralForbearance-flattened.pdf")); PDDocument dst = Loader .loadPDF(new File(TARGETTESTDIR, "PDFBOX-3999-GeneralForbearance-flattened.pdf")); pdfMergerUtility.appendDocument(dst, src); // before solving PDFBOX-3999, the close() below brought // IOException: COSStream has been closed and cannot be read. src.close(); dst.save(new File(TARGETTESTDIR, "PDFBOX-3999-GeneralForbearance-flattened-merged.pdf")); dst.close(); doc = Loader.loadPDF( new File(TARGETTESTDIR, "PDFBOX-3999-GeneralForbearance-flattened-merged.pdf")); checkForPageOrphans(doc); // Assume that the merged tree has double element count elementCounter = new ElementCounter(); elementCounter.walk(doc.getDocumentCatalog().getStructureTreeRoot().getK()); assertEquals(singleCnt * 2, elementCounter.cnt); assertEquals(singleSetSize * 2, elementCounter.set.size()); doc.close(); }
static void validateCsvFormat(CSVFormat format) { String[] header = checkArgumentNotNull(format.getHeader(), "Illegal %s: header is required", CSVFormat.class); checkArgument(header.length > 0, "Illegal %s: header cannot be empty", CSVFormat.class); checkArgument( !format.getAllowMissingColumnNames(), "Illegal %s: cannot allow missing column names", CSVFormat.class); checkArgument( !format.getIgnoreHeaderCase(), "Illegal %s: cannot ignore header case", CSVFormat.class); checkArgument( !format.getAllowDuplicateHeaderNames(), "Illegal %s: cannot allow duplicate header names", CSVFormat.class); for (String columnName : header) { checkArgument( !Strings.isNullOrEmpty(columnName), "Illegal %s: column name is required", CSVFormat.class); } checkArgument( !format.getSkipHeaderRecord(), "Illegal %s: cannot skip header record because the header is already accounted for", CSVFormat.class); }
@Test public void givenCSVFormatThatAllowsMissingColumnNames_throwsException() { CSVFormat format = csvFormatWithHeader().withAllowMissingColumnNames(true); String gotMessage = assertThrows( IllegalArgumentException.class, () -> CsvIOParseHelpers.validateCsvFormat(format)) .getMessage(); assertEquals( "Illegal class org.apache.commons.csv.CSVFormat: cannot allow missing column names", gotMessage); }
private void readPriorityFrame(ChannelHandlerContext ctx, ByteBuf payload, Http2FrameListener listener) throws Http2Exception { long word1 = payload.readUnsignedInt(); boolean exclusive = (word1 & 0x80000000L) != 0; int streamDependency = (int) (word1 & 0x7FFFFFFFL); if (streamDependency == streamId) { throw streamError(streamId, PROTOCOL_ERROR, "A stream cannot depend on itself."); } short weight = (short) (payload.readUnsignedByte() + 1); listener.onPriorityRead(ctx, streamId, streamDependency, weight, exclusive); }
@Test public void readPriorityFrame() throws Http2Exception { ByteBuf input = Unpooled.buffer(); try { writePriorityFrame(input, 1, 0, 10); frameReader.readFrame(ctx, input, listener); } finally { input.release(); } }
@Override public boolean updateGlobalWhiteAddrsConfig(List<String> globalWhiteAddrsList) { return aclPlugEngine.updateGlobalWhiteAddrsConfig(globalWhiteAddrsList); }
@Test public void updateGlobalWhiteRemoteAddressesTest() throws InterruptedException { String backupFileName = System.getProperty("rocketmq.home.dir") + File.separator + "conf/plain_acl_bak.yml".replace("/", File.separator); String targetFileName = System.getProperty("rocketmq.home.dir") + File.separator + "conf/plain_acl.yml".replace("/", File.separator); PlainAccessData backUpAclConfigMap = AclUtils.getYamlDataObject(backupFileName, PlainAccessData.class); AclUtils.writeDataObject(targetFileName, backUpAclConfigMap); List<String> globalWhiteAddrsList = new ArrayList<>(); globalWhiteAddrsList.add("192.168.1.*"); PlainAccessValidator plainAccessValidator = new PlainAccessValidator(); Assert.assertEquals(plainAccessValidator.updateGlobalWhiteAddrsConfig(globalWhiteAddrsList, null), true); String aclFileName = System.getProperty("rocketmq.home.dir") + File.separator + "conf/plain_acl.yml".replace("/", File.separator); PlainAccessData readableMap = AclUtils.getYamlDataObject(aclFileName, PlainAccessData.class); List<PlainAccessData.DataVersion> dataVersions = readableMap.getDataVersion(); Assert.assertEquals(1L, dataVersions.get(0).getCounter()); AclUtils.writeDataObject(targetFileName, backUpAclConfigMap); }
@Override public Collection<SchemaMetaData> load(final MetaDataLoaderMaterial material) throws SQLException { Collection<TableMetaData> tableMetaDataList = new LinkedList<>(); try (Connection connection = new MetaDataLoaderConnection(TypedSPILoader.getService(DatabaseType.class, "Oracle"), material.getDataSource().getConnection())) { tableMetaDataList.addAll(getTableMetaDataList(connection, connection.getSchema(), material.getActualTableNames())); } return Collections.singletonList(new SchemaMetaData(material.getDefaultSchemaName(), tableMetaDataList)); }
@Test void assertLoadCondition6() throws SQLException { DataSource dataSource = mockDataSource(); ResultSet resultSet = mockTableMetaDataResultSet(); when(dataSource.getConnection().prepareStatement(ALL_TAB_COLUMNS_SQL_CONDITION6).executeQuery()).thenReturn(resultSet); ResultSet indexResultSet = mockIndexMetaDataResultSet(); when(dataSource.getConnection().prepareStatement(ALL_INDEXES_SQL).executeQuery()).thenReturn(indexResultSet); when(dataSource.getConnection().getMetaData().getUserName()).thenReturn("TEST"); ResultSet primaryKeys = mockPrimaryKeysMetaDataResultSet(); when(dataSource.getConnection().prepareStatement(ALL_CONSTRAINTS_SQL_WITH_TABLES).executeQuery()).thenReturn(primaryKeys); when(dataSource.getConnection().getMetaData().getDatabaseMajorVersion()).thenReturn(11); when(dataSource.getConnection().getMetaData().getDatabaseMinorVersion()).thenReturn(2); Collection<SchemaMetaData> actual = getDialectTableMetaDataLoader().load(new MetaDataLoaderMaterial(Collections.singleton("tbl"), dataSource, new OracleDatabaseType(), "sharding_db")); assertTableMetaDataMap(actual); TableMetaData actualTableMetaData = actual.iterator().next().getTables().iterator().next(); Iterator<ColumnMetaData> columnsIterator = actualTableMetaData.getColumns().iterator(); assertThat(columnsIterator.next(), is(new ColumnMetaData("id", Types.INTEGER, true, false, false, true, false, false))); assertThat(columnsIterator.next(), is(new ColumnMetaData("name", Types.VARCHAR, false, false, false, false, false, true))); }
public static <T> CloseableIterator<T> concat(CloseableIterator<T> a, CloseableIterator<T> b) { return concat(Lists.newArrayList(a, b)); }
@Test public void concatList() { ArrayList<Integer> list1 = Lists.newArrayList(1, 2); ArrayList<Integer> list2 = Lists.newArrayList(3, 4, 5); ArrayList<Integer> list3 = Lists.newArrayList(0); TestIterator iterator1 = new TestIterator(list1); TestIterator iterator2 = new TestIterator(list2); TestIterator iterator3 = new TestIterator(list3); CloseableIterator<Integer> iterator4 = CloseableIterator.concat( Lists.newArrayList(iterator1, iterator2, iterator3)); Iterators.elementsEqual(Iterators.concat(list1.iterator(), list2.iterator(), list3.iterator()), iterator4.get()); iterator4.close(); Assert.assertTrue(iterator1.isClosed()); Assert.assertTrue(iterator2.isClosed()); Assert.assertTrue(iterator3.isClosed()); }
@Override public void writeCallSite(CallSiteReference callSiteReference) throws IOException { writeSimpleName(callSiteReference.getName()); writer.write('('); writeQuotedString(callSiteReference.getMethodName()); writer.write(", "); writeMethodProtoDescriptor(callSiteReference.getMethodProto()); for (EncodedValue encodedValue : callSiteReference.getExtraArguments()) { writer.write(", "); writeEncodedValue(encodedValue); } writer.write(")@"); MethodHandleReference methodHandle = callSiteReference.getMethodHandle(); if (methodHandle.getMethodHandleType() != MethodHandleType.INVOKE_STATIC) { throw new IllegalArgumentException("The linker method handle for a call site must be of type invoke-static"); } writeMethodDescriptor((MethodReference) callSiteReference.getMethodHandle().getMemberReference()); }
@Test public void testWriteCallsite_withSpaces() throws IOException { BaksmaliWriter writer = new BaksmaliWriter(output); writer.writeCallSite(new ImmutableCallSiteReference( "callsiteName with spaces", getInvokeStaticMethodHandleReferenceForMethodWithSpaces(), "callSiteMethodName with spaces", getMethodProtoReferenceWithSpaces(), ImmutableList.of( new ImmutableFieldEncodedValue(getFieldReferenceWithSpaces()), new ImmutableMethodEncodedValue(getMethodReferenceWithSpaces())))); Assert.assertEquals( "`callsiteName with spaces`(\"callSiteMethodName with spaces\", " + "(L`param with spaces 1`;L`param with spaces 2`;)Lreturn/type/`with spaces`;, " + "Ldefining/class/`with spaces`;->`fieldName with spaces`:Lfield/`type with spaces`;, " + "Ldefining/class/`with spaces`;->`methodName with spaces`(" + "L`param with spaces 1`;L`param with spaces 2`;)Lreturn/type/`with spaces`;)@" + "Ldefining/class/`with spaces`;->`methodName with spaces`(" + "L`param with spaces 1`;L`param with spaces 2`;)Lreturn/type/`with spaces`;", output.toString()); }
public static String jaasConfig(String moduleName, Map<String, String> options) { StringJoiner joiner = new StringJoiner(" "); for (Entry<String, String> entry : options.entrySet()) { String key = Objects.requireNonNull(entry.getKey()); String value = Objects.requireNonNull(entry.getValue()); if (key.contains("=") || key.contains(";")) { throw new IllegalArgumentException("Keys must not contain '=' or ';'"); } if (moduleName.isEmpty() || moduleName.contains(";") || moduleName.contains("=")) { throw new IllegalArgumentException("module name must be not empty and must not contain '=' or ';'"); } else { joiner.add(key + "=\"" + value + "\""); } } return moduleName + " required " + joiner + ";"; }
@Test public void testModuleNameContainsSemicolon() { Map<String, String> options = new HashMap<>(); options.put("key1", "value1"); String moduleName = "Module;"; assertThrows(IllegalArgumentException.class, () -> AuthenticationUtils.jaasConfig(moduleName, options)); }
@Override public void notifyGroup(final Map<String, ConfigChangeEvent> groupItems) { groupItems.forEach((groupItemName, event) -> { if (EventType.DELETE.equals(event.getEventType())) { this.openapiDefs.remove(groupItemName); log.info("EndpointNameGroupingRule4OpenapiWatcher removed groupItem: {}", groupItemName); } else { this.openapiDefs.put(groupItemName, event.getNewValue()); log.info("EndpointNameGroupingRule4OpenapiWatcher modified groupItem: {}", groupItemName); } }); this.grouping.setEndpointGroupingRule4Openapi(new EndpointGroupingRuleReader4Openapi(openapiDefs).read()); }
@Test public void testWatcher() throws FileNotFoundException { EndpointNameGrouping endpointNameGrouping = new EndpointNameGrouping(); EndpointNameGroupingRule4OpenapiWatcher watcher = new EndpointNameGroupingRule4OpenapiWatcher( new ModuleProvider() { @Override public String name() { return "test"; } @Override public Class<? extends ModuleDefine> module() { return CoreModule.class; } @Override public ConfigCreator newConfigCreator() { return null; } @Override public void prepare() throws ServiceNotProvidedException { } @Override public void start() throws ServiceNotProvidedException { } @Override public void notifyAfterCompleted() throws ServiceNotProvidedException { } @Override public String[] requiredModules() { return new String[0]; } }, endpointNameGrouping); Assertions.assertEquals("GET:/products/{id}", endpointNameGrouping.format("serviceA", "GET:/products/123")._1()); Map<String, ConfigChangeWatcher.ConfigChangeEvent> groupItems = new HashMap<>(); groupItems.put( "serviceA.productAPI-v1", new ConfigChangeWatcher .ConfigChangeEvent( "openapi: 3.0.0\n" + "\n" + "info:\n" + " description: OpenAPI definition for SkyWalking test.\n" + " version: v1\n" + " title: Product API\n" + "\n" + "tags:\n" + " - name: product\n" + " description: product\n" + " - name: relatedProducts\n" + " description: Related Products\n" + "\n" + "paths:\n" + " /products:\n" + " get:\n" + " tags:\n" + " - product\n" + " summary: Get all products list\n" + " description: Get all products list.\n" + " operationId: getProducts\n" + " responses:\n" + " \"200\":\n" + " description: Success\n" + " content:\n" + " application/json:\n" + " schema:\n" + " type: array\n" + " items:\n" + " $ref: \"#/components/schemas/Product\"\n" + " /products/{order-id}:\n" + //modified from /products/{id} " get:\n" + " tags:\n" + " - product\n" + " summary: Get product details\n" + " description: Get product details with the given id.\n" + " operationId: getProduct\n" + " parameters:\n" + " - name: id\n" + " in: path\n" + " description: Product id\n" + " required: true\n" + " schema:\n" + " type: integer\n" + " format: int64\n" + " responses:\n" + " \"200\":\n" + " description: successful operation\n" + " content:\n" + " application/json:\n" + " schema:\n" + " $ref: \"#/components/schemas/ProductDetails\"\n" + " \"400\":\n" + " description: Invalid product id\n" + " post:\n" + " tags:\n" + " - product\n" + " summary: Update product details\n" + " description: Update product details with the given id.\n" + " operationId: updateProduct\n" + " parameters:\n" + " - name: id\n" + " in: path\n" + " description: Product id\n" + " required: true\n" + " schema:\n" + " type: integer\n" + " format: int64\n" + " - name: name\n" + " in: query\n" + " description: Product name\n" + " required: true\n" + " schema:\n" + " type: string\n" + " responses:\n" + " \"200\":\n" + " description: successful operation\n" + " delete:\n" + " tags:\n" + " - product\n" + " summary: Delete product details\n" + " description: Delete product details with the given id.\n" + " operationId: deleteProduct\n" + " parameters:\n" + " - name: id\n" + " in: path\n" + " description: Product id\n" + " required: true\n" + " schema:\n" + " type: integer\n" + " format: int64\n" + " responses:\n" + " \"200\":\n" + " description: successful operation\n" + " /products/{id}/relatedProducts:\n" + " get:\n" + " tags:\n" + " - relatedProducts\n" + " summary: Get related products\n" + " description: Get related products with the given product id.\n" + " operationId: getRelatedProducts\n" + " parameters:\n" + " - name: id\n" + " in: path\n" + " description: Product id\n" + " required: true\n" + " schema:\n" + " type: integer\n" + " format: int64\n" + " responses:\n" + " \"200\":\n" + " description: successful operation\n" + " content:\n" + " application/json:\n" + " schema:\n" + " $ref: \"#/components/schemas/RelatedProducts\"\n" + " \"400\":\n" + " description: Invalid product id\n" + "\n" + "components:\n" + " schemas:\n" + " Product:\n" + " type: object\n" + " description: Product id and name\n" + " properties:\n" + " id:\n" + " type: integer\n" + " format: int64\n" + " description: Product id\n" + " name:\n" + " type: string\n" + " description: Product name\n" + " required:\n" + " - id\n" + " - name\n" + " ProductDetails:\n" + " type: object\n" + " description: Product details\n" + " properties:\n" + " id:\n" + " type: integer\n" + " format: int64\n" + " description: Product id\n" + " name:\n" + " type: string\n" + " description: Product name\n" + " description:\n" + " type: string\n" + " description: Product description\n" + " required:\n" + " - id\n" + " - name\n" + " RelatedProducts:\n" + " type: object\n" + " description: Related Products\n" + " properties:\n" + " id:\n" + " type: integer\n" + " format: int32\n" + " description: Product id\n" + " relatedProducts:\n" + " type: array\n" + " description: List of related products\n" + " items:\n" + " $ref: \"#/components/schemas/Product\"", ConfigChangeWatcher.EventType.MODIFY ) ); watcher.notifyGroup(groupItems); Assertions.assertEquals("GET:/products/{order-id}", endpointNameGrouping.format("serviceA", "GET:/products/123")._1()); groupItems.put("serviceA.productAPI-v1", new ConfigChangeWatcher.ConfigChangeEvent("", ConfigChangeWatcher.EventType.DELETE)); watcher.notifyGroup(groupItems); Assertions.assertEquals("GET:/products/123", endpointNameGrouping.format("serviceA", "GET:/products/123")._1()); }
public static ConfigInfos generateResult(String connType, Map<String, ConfigKey> configKeys, List<ConfigValue> configValues, List<String> groups) { int errorCount = 0; List<ConfigInfo> configInfoList = new LinkedList<>(); Map<String, ConfigValue> configValueMap = new HashMap<>(); for (ConfigValue configValue: configValues) { String configName = configValue.name(); configValueMap.put(configName, configValue); if (!configKeys.containsKey(configName)) { configInfoList.add(new ConfigInfo(null, convertConfigValue(configValue, null))); errorCount += configValue.errorMessages().size(); } } for (Map.Entry<String, ConfigKey> entry : configKeys.entrySet()) { String configName = entry.getKey(); ConfigKeyInfo configKeyInfo = convertConfigKey(entry.getValue()); Type type = entry.getValue().type; ConfigValueInfo configValueInfo = null; if (configValueMap.containsKey(configName)) { ConfigValue configValue = configValueMap.get(configName); configValueInfo = convertConfigValue(configValue, type); errorCount += configValue.errorMessages().size(); } configInfoList.add(new ConfigInfo(configKeyInfo, configValueInfo)); } return new ConfigInfos(connType, errorCount, groups, configInfoList); }
@Test public void testGenerateResultWithConfigValuesMoreThanConfigKeysAndWithSomeErrors() { String name = "com.acme.connector.MyConnector"; Map<String, ConfigDef.ConfigKey> keys = new HashMap<>(); addConfigKey(keys, "config.a1", null); addConfigKey(keys, "config.b1", "group B"); addConfigKey(keys, "config.b2", "group B"); addConfigKey(keys, "config.c1", "group C"); List<String> groups = Arrays.asList("groupB", "group C"); List<ConfigValue> values = new ArrayList<>(); addValue(values, "config.a1", "value.a1"); addValue(values, "config.b1", "value.b1"); addValue(values, "config.b2", "value.b2"); addValue(values, "config.c1", "value.c1", "error c1"); addValue(values, "config.extra1", "value.extra1"); addValue(values, "config.extra2", "value.extra2", "error extra2"); ConfigInfos infos = AbstractHerder.generateResult(name, keys, values, groups); assertEquals(name, infos.name()); assertEquals(groups, infos.groups()); assertEquals(values.size(), infos.values().size()); assertEquals(2, infos.errorCount()); assertInfoKey(infos, "config.a1", null); assertInfoKey(infos, "config.b1", "group B"); assertInfoKey(infos, "config.b2", "group B"); assertInfoKey(infos, "config.c1", "group C"); assertNoInfoKey(infos, "config.extra1"); assertNoInfoKey(infos, "config.extra2"); assertInfoValue(infos, "config.a1", "value.a1"); assertInfoValue(infos, "config.b1", "value.b1"); assertInfoValue(infos, "config.b2", "value.b2"); assertInfoValue(infos, "config.c1", "value.c1", "error c1"); assertInfoValue(infos, "config.extra1", "value.extra1"); assertInfoValue(infos, "config.extra2", "value.extra2", "error extra2"); }
@Override public void init(byte[] data, int offset) { this.data = data; this.size = data != null ? data.length : 0; this.pos = offset; }
@Test public void testInit() { in.init(INIT_DATA, 2); assertArrayEquals(INIT_DATA, in.data); assertEquals(INIT_DATA.length, in.size); assertEquals(2, in.pos); }
static Result coerceUserList( final Collection<Expression> expressions, final ExpressionTypeManager typeManager ) { return coerceUserList(expressions, typeManager, Collections.emptyMap()); }
@Test public void shouldCoerceToInts() { // Given: final ImmutableList<Expression> expressions = ImmutableList.of( new IntegerLiteral(10), new StringLiteral("\t -100 \t"), INT_EXPRESSION ); // When: final Result result = CoercionUtil.coerceUserList(expressions, typeManager); // Then: assertThat(result.commonType(), is(Optional.of(SqlTypes.INTEGER))); assertThat(result.expressions(), is(ImmutableList.of( new IntegerLiteral(10), new IntegerLiteral(-100), INT_EXPRESSION ))); }
@SuppressFBWarnings(value = "EI_EXPOSE_REP", justification = "lookupConstraints is ImmutableList") public List<LookupConstraint> getLookupConstraints() { return lookupConstraints; }
@SuppressWarnings("unchecked") @Test public void shouldExtractConstraintWithMultipleKeyExpressions_tableScan() { // Given: when(plannerOptions.getTableScansEnabled()).thenReturn(true); final Expression expression1 = new ComparisonExpression( Type.EQUAL, new UnqualifiedColumnReferenceExp(ColumnName.of("K")), new IntegerLiteral(1) ); final Expression expression2 = new ComparisonExpression( Type.EQUAL, new UnqualifiedColumnReferenceExp(ColumnName.of("K")), new IntegerLiteral(2) ); final Expression expression = new LogicalBinaryExpression( LogicalBinaryExpression.Type.AND, expression1, expression2 ); QueryFilterNode filterNode = new QueryFilterNode( NODE_ID, source, expression, metaStore, ksqlConfig, false, plannerOptions ); // Then: final List<LookupConstraint> keys = filterNode.getLookupConstraints(); assertThat(keys.size(), is(1)); assertThat(keys.get(0), instanceOf(KeyConstraint.class)); }
protected static String getPropertyNameFromBeanReadMethod(Method method) { if (isBeanPropertyReadMethod(method)) { if (method.getName().startsWith("get")) { return method.getName().substring(3, 4).toLowerCase() + method.getName().substring(4); } if (method.getName().startsWith("is")) { return method.getName().substring(2, 3).toLowerCase() + method.getName().substring(3); } } return null; }
@Test public void testGetPropertyNameFromBeanReadMethod() throws Exception { Method method = TestReflect.class.getMethod("getS"); Assert.assertEquals("s", getPropertyNameFromBeanReadMethod(method)); method = TestReflect.class.getMethod("getName"); Assert.assertEquals("name", getPropertyNameFromBeanReadMethod(method)); method = TestReflect.class.getMethod("isB"); Assert.assertEquals("b", getPropertyNameFromBeanReadMethod(method)); method = TestReflect.class.getMethod("is"); Assert.assertNull(getPropertyNameFromBeanReadMethod(method)); }
@Override public <T> T clone(T object) { if (object instanceof String) { return object; } else if (object instanceof Collection) { Object firstElement = findFirstNonNullElement((Collection) object); if (firstElement != null && !(firstElement instanceof Serializable)) { JavaType type = TypeFactory.defaultInstance().constructParametricType(object.getClass(), firstElement.getClass()); return objectMapperWrapper.fromBytes(objectMapperWrapper.toBytes(object), type); } } else if (object instanceof Map) { Map.Entry firstEntry = this.findFirstNonNullEntry((Map) object); if (firstEntry != null) { Object key = firstEntry.getKey(); Object value = firstEntry.getValue(); if (!(key instanceof Serializable) || !(value instanceof Serializable)) { JavaType type = TypeFactory.defaultInstance().constructParametricType(object.getClass(), key.getClass(), value.getClass()); return (T) objectMapperWrapper.fromBytes(objectMapperWrapper.toBytes(object), type); } } } else if (object instanceof JsonNode) { return (T) ((JsonNode) object).deepCopy(); } if (object instanceof Serializable) { try { return (T) SerializationHelper.clone((Serializable) object); } catch (SerializationException e) { //it is possible that object itself implements java.io.Serializable, but underlying structure does not //in this case we switch to the other JSON marshaling strategy which doesn't use the Java serialization } } return jsonClone(object); }
@Test public void should_clone_non_serializable_object() { Object original = new NonSerializableObject("value"); Object cloned = serializer.clone(original); assertEquals(original, cloned); assertNotSame(original, cloned); }
@Nonnull public static String remove(@Nonnull String text, int removeIndex, int removeLength) { if (removeIndex < 0 || removeIndex >= text.length()) return text; String pre = text.substring(0, removeIndex); String post = text.substring(Math.min(removeIndex + removeLength, text.length())); return pre + post; }
@Test void testRemove() { assertEquals("demo", StringUtil.remove("demo", -1, 4)); assertEquals("demo", StringUtil.remove("demo", 999, 4)); assertEquals("do", StringUtil.remove("demo", 1, 2)); assertEquals("", StringUtil.remove("demo", 0, 4)); assertEquals("", StringUtil.remove("demo", 0, 999)); }
@GET @Path("/{logger}") @Operation(summary = "Get the log level for the specified logger") public Response getLogger(final @PathParam("logger") String namedLogger) { Objects.requireNonNull(namedLogger, "require non-null name"); LoggerLevel loggerLevel = herder.loggerLevel(namedLogger); if (loggerLevel == null) throw new NotFoundException("Logger " + namedLogger + " not found."); return Response.ok(loggerLevel).build(); }
@Test public void testGetLevelNotFound() { final String logger = "org.apache.rostropovich"; when(herder.loggerLevel(logger)).thenReturn(null); assertThrows( NotFoundException.class, () -> loggingResource.getLogger(logger) ); }
public IntHashSet difference(final IntHashSet other) { IntHashSet difference = null; final int[] values = this.values; for (final int value : values) { if (MISSING_VALUE != value && !other.contains(value)) { if (null == difference) { difference = new IntHashSet(); } difference.add(value); } } if (containsMissingValue && !other.containsMissingValue) { if (null == difference) { difference = new IntHashSet(); } difference.add(MISSING_VALUE); } return difference; }
@Test void differenceReturnsNullIfBothSetsEqual() { addTwoElements(testSet); final IntHashSet other = new IntHashSet(100); addTwoElements(other); assertNull(testSet.difference(other)); }
@Override public synchronized void cleanupAll() { LOG.info("Attempting to clean up Neo4j manager."); boolean producedError = false; // First, delete the database if it was not given as a static argument try { if (!usingStaticDatabase) { dropDatabase(databaseName, waitOption); } } catch (Exception e) { LOG.error("Failed to delete Neo4j database {}.", databaseName, e); producedError = true; } // Next, try to close the Neo4j client connection try { neo4jDriver.close(); } catch (Exception e) { LOG.error("Failed to delete Neo4j client.", e); producedError = true; } // Throw Exception at the end if there were any errors if (producedError) { throw new Neo4jResourceManagerException( "Failed to delete resources. Check above for errors."); } super.cleanupAll(); LOG.info("Neo4j manager successfully cleaned up."); }
@Test public void testCleanupAllShouldThrowErrorWhenNeo4jDriverFailsToClose() { doThrow(RuntimeException.class).when(neo4jDriver).close(); assertThrows(Neo4jResourceManagerException.class, () -> testManager.cleanupAll()); }
public <T extends BaseRequest<T, R>, R extends BaseResponse> R execute(BaseRequest<T, R> request) { return api.send(request); }
@Test public void forwardMessage() { SendResponse response = bot.execute(new ForwardMessage(chatId, chatId, forwardMessageId).disableNotification(true)); Message message = response.message(); MessageTest.checkMessage(message); assertNotNull(message.forwardDate()); assertNotNull(message.forwardSenderName()); assertNull(message.forwardFrom()); User viaBot = message.viaBot(); UserTest.checkUser(viaBot); assertEquals("gif", viaBot.username()); // message from user with open account message = bot.execute(new ForwardMessage(chatId, chatId, forwardMessageIdUser)).message(); MessageTest.checkMessage(message); assertNotNull(message.forwardDate()); assertNull(message.forwardSenderName()); assertNotNull(message.forwardFrom()); message = bot.execute(new ForwardMessage(channelName, channelName, 651)).message(); assertNotNull(message.authorSignature()); assertNotNull(message.forwardSignature()); assertEquals(Integer.valueOf(651), message.forwardFromMessageId()); Chat chat = message.forwardFromChat(); assertEquals(channelName, "@" + chat.username()); assertEquals(Chat.Type.channel, chat.type()); assertNull(message.forwardSenderName()); message = bot.execute(new ForwardMessage(chatId, groupId, 352).messageThreadId(0)).message(); assertEquals(MessageEntity.Type.text_mention, message.entities()[0].type()); assertNotNull(message.entities()[0].user()); assertNotNull(message.forwardSenderName()); }
public EndpointResponse get() { return EndpointResponse.ok(new ServerInfo( appVersion.get(), kafkaClusterId.get(), ksqlServiceId.get(), serverStatus.get().toString())); }
@Test public void shouldReturnServerInfo() { // When: final EndpointResponse response = serverInfoResource.get(); // Then: assertThat(response.getStatus(), equalTo(200)); assertThat(response.getEntity(), instanceOf(ServerInfo.class)); final ServerInfo serverInfo = (ServerInfo)response.getEntity(); assertThat( serverInfo, equalTo(new ServerInfo(AppInfo.getVersion(), KAFKA_CLUSTER_ID, KSQL_SERVICE_ID, "RUNNING")) ); }
@Override public GetApplicationsResponse getApplications(GetApplicationsRequest request) throws YarnException, IOException { long startedBegin = request.getStartRange() == null ? 0L : request.getStartRange() .getMinimum(); long startedEnd = request.getStartRange() == null ? Long.MAX_VALUE : request .getStartRange().getMaximum(); GetApplicationsResponse response = GetApplicationsResponse.newInstance(new ArrayList<ApplicationReport>( history.getApplications(request.getLimit(), startedBegin, startedEnd) .values())); return response; }
@Test void testApplications() throws IOException, YarnException { ApplicationId appId = null; appId = ApplicationId.newInstance(0, 1); ApplicationId appId1 = ApplicationId.newInstance(0, 2); GetApplicationsRequest request = GetApplicationsRequest.newInstance(); GetApplicationsResponse response = clientService.getApplications(request); List<ApplicationReport> appReport = response.getApplicationList(); assertNotNull(appReport); assertEquals(appId, appReport.get(1).getApplicationId()); assertEquals(appId1, appReport.get(0).getApplicationId()); // Create a historyManager, and set the max_apps can be loaded // as 1. Configuration conf = new YarnConfiguration(); conf.setLong(YarnConfiguration.APPLICATION_HISTORY_MAX_APPS, 1); ApplicationHistoryManagerOnTimelineStore historyManager2 = new ApplicationHistoryManagerOnTimelineStore(dataManager, new ApplicationACLsManager(conf)); historyManager2.init(conf); historyManager2.start(); @SuppressWarnings("resource") ApplicationHistoryClientService clientService2 = new ApplicationHistoryClientService(historyManager2); response = clientService2.getApplications(request); appReport = response.getApplicationList(); assertNotNull(appReport); assertTrue(appReport.size() == 1); // Expected to get the appReport for application with appId1 assertEquals(appId1, appReport.get(0).getApplicationId()); }
@Override public V get() throws InterruptedException, ExecutionException { try { return get(Long.MAX_VALUE, TimeUnit.SECONDS); } catch (TimeoutException e) { throw new ExecutionException(e); } }
@Test public void completeDelegate_successfully_callbackBeforeGet_invokeGetOnOuter_callbacksRun() throws Exception { BiConsumer<String, Throwable> callback = getStringExecutionCallback(); delegateFuture.run(); outerFuture.whenCompleteAsync(callback, CALLER_RUNS); outerFuture.get(); verify(callback, times(1)).accept(any(String.class), isNull()); verify(callback, times(0)).accept(isNull(), any(Throwable.class)); verifyNoMoreInteractions(callback); }
public static String trimQueueName(String name) { if (name == null) { return null; } int start = 0; while (start < name.length() && isWhitespace(name.charAt(start)) && start < name.length()) { start++; } int end = name.length() - 1; while (end >= 0 && isWhitespace(name.charAt(end)) && end > start) { end--; } return name.substring(start, end+1); }
@Test public void testTrimQueueNamesEmpty() throws Exception { assertNull(trimQueueName(null)); final String spaces = "\u2002\u3000\r\u0085\u200A\u2005\u2000\u3000" + "\u2029\u000B\u3000\u2008\u2003\u205F\u3000\u1680" + "\u0009\u0020\u2006\u2001\u202F\u00A0\u000C\u2009" + "\u3000\u2004\u3000\u3000\u2028\n\u2007\u3000"; assertTrue(trimQueueName(spaces).isEmpty()); }
@Override public void print(Iterator<RowData> it, PrintWriter printWriter) { if (!it.hasNext()) { printEmptyResult(it, printWriter); return; } long numRows = printTable(it, printWriter); printFooter(printWriter, numRows); }
@Test void testPrintWithMultipleRowsAndDeriveColumnWidthByContent() { PrintStyle.tableauWithDataInferredColumnWidths( getSchema(), getConverter(), PrintStyle.DEFAULT_MAX_COLUMN_WIDTH, true, true) .print(getData().subList(0, 3).iterator(), new PrintWriter(outContent)); assertThat(outContent.toString()) .isEqualTo( "+----+---------+------------+--------+---------+----------------+----------------------------+" + System.lineSeparator() + "| op | boolean | int | bigint | varchar | decimal(10, 5) | timestamp |" + System.lineSeparator() + "+----+---------+------------+--------+---------+----------------+----------------------------+" + System.lineSeparator() + "| +I | | 1 | 2 | abc | 1.23000 | 2020-03-01 18:39:14.000000 |" + System.lineSeparator() + "| +I | false | | 0 | | 1.00000 | 2020-03-01 18:39:14.100000 |" + System.lineSeparator() + "| -D | true | 2147483647 | | abcdefg | 12345.00000 | 2020-03-01 18:39:14.120000 |" + System.lineSeparator() + "+----+---------+------------+--------+---------+----------------+----------------------------+" + System.lineSeparator() + "3 rows in set" + System.lineSeparator()); }
public String getEndpointsInfo() { return loggingListener.getEndpointsInfo(); }
@Test void logsNoInterfaces() { rc.packages(getClass().getName()); runJersey(); assertThat(rc.getEndpointsInfo()).doesNotContain("io.dropwizard.jersey.DropwizardResourceConfigTest.ResourceInterface"); }
@Override public Stream<MappingField> resolveAndValidateFields( boolean isKey, List<MappingField> userFields, Map<String, String> options, InternalSerializationService serializationService ) { Map<QueryPath, MappingField> fieldsByPath = extractFields(userFields, isKey); for (QueryPath path : fieldsByPath.keySet()) { if (path.isTopLevel()) { throw QueryException.error("Cannot use the '" + path + "' field with Avro serialization"); } } Schema schema = getSchema(fieldsByPath, options, isKey); if (schema != null && options.containsKey("schema.registry.url")) { throw new IllegalArgumentException("Inline schema cannot be used with schema registry"); } if (userFields.isEmpty()) { if (schema == null) { throw QueryException.error( "Either a column list or an inline schema is required to create Avro-based mapping"); } return resolveFields(schema, (name, type) -> new MappingField(name, type, new QueryPath(name, isKey).toString())); } else { if (schema != null) { validate(schema, getFields(fieldsByPath).collect(toList())); } return fieldsByPath.values().stream(); } }
@Test public void when_duplicateExternalName_then_throws() { assertThatThrownBy(() -> INSTANCE.resolveAndValidateFields( isKey, List.of( field("field1", QueryDataType.INT, prefix + ".field"), field("field2", QueryDataType.VARCHAR, prefix + ".field") ), emptyMap(), null )).isInstanceOf(QueryException.class) .hasMessageMatching("Duplicate external name: (__key|this).field"); }
@Override public TransformResultMetadata getResultMetadata() { return _resultMetadata; }
@Test public void testArrayIndexOfAllString() { ExpressionContext expression = RequestContextUtils.getExpression( String.format("array_indexes_of_string(%s, 'a')", STRING_ALPHANUM_MV_COLUMN_2)); TransformFunction transformFunction = TransformFunctionFactory.get(expression, _dataSourceMap); assertTrue(transformFunction instanceof ScalarTransformFunctionWrapper); assertEquals(transformFunction.getResultMetadata().getDataType(), DataType.INT); assertFalse(transformFunction.getResultMetadata().isSingleValue()); int[][] expectedValues = new int[NUM_ROWS][]; for (int i = 0; i < NUM_ROWS; i++) { int len = _stringAlphaNumericMV2Values[i].length; int[] expectedValue = new int[len]; for (int j = 0; j < len; j++) { expectedValue[j] = j; } expectedValues[i] = expectedValue; } testTransformFunctionMV(transformFunction, expectedValues); }
@Override public ChatAdministratorRights deserializeResponse(String answer) throws TelegramApiRequestException { return deserializeResponse(answer, ChatAdministratorRights.class); }
@Test public void testGetMyDefaultAdministratorRightsDeserializeValidResponse() { String responseText = "{\n" + " \"ok\": true,\n" + " \"result\": {\n" + " \"is_anonymous\": true,\n" + " \"can_manage_chat\": true,\n" + " \"can_delete_messages\": true,\n" + " \"can_manage_video_chats\": true,\n" + " \"can_restrict_members\": true,\n" + " \"can_promote_members\": true,\n" + " \"can_change_info\": true,\n" + " \"can_invite_users\": true,\n" + " \"can_post_messages\": true,\n" + " \"can_edit_messages\": true,\n" + " \"can_pin_messages\": true\n" + " }\n" + "}"; GetMyDefaultAdministratorRights getMyDefaultAdministratorRights = GetMyDefaultAdministratorRights .builder() .build(); try { ChatAdministratorRights result = getMyDefaultAdministratorRights.deserializeResponse(responseText); assertNotNull(result); assertEquals(true, result.getIsAnonymous()); assertEquals(true, result.getCanManageChat()); assertEquals(true, result.getCanPostMessages()); assertEquals(true, result.getCanEditMessages()); assertEquals(true, result.getCanDeleteMessages()); assertEquals(true, result.getCanManageVideoChats()); assertEquals(true, result.getCanRestrictMembers()); assertEquals(true, result.getCanPromoteMembers()); assertEquals(true, result.getCanChangeInfo()); assertEquals(true, result.getCanInviteUsers()); assertEquals(true, result.getCanPinMessages()); } catch (TelegramApiRequestException e) { fail(e.getMessage()); } }
public void setActiveState(boolean active) { if (mLogicalState == LOCKED) return; mLogicalState = active ? ACTIVE : INACTIVE; if (mLogicalState == ACTIVE) { // setting the start time to zero, so LOCKED state will not // be activated without actual user's double-clicking mActiveStateStartTime = 0; mConsumed = false; } }
@Test public void testSetActiveState() throws Exception { long millis = 1000; setCurrentTimeMillis(++millis); ModifierKeyState state = new ModifierKeyState(true); Assert.assertFalse(state.isActive()); Assert.assertFalse(state.isLocked()); Assert.assertFalse(state.isPressed()); state.setActiveState(true); Assert.assertTrue(state.isActive()); Assert.assertFalse(state.isLocked()); Assert.assertFalse(state.isPressed()); state.onPress(); Assert.assertTrue(state.isActive()); Assert.assertFalse(state.isLocked()); Assert.assertTrue(state.isPressed()); setCurrentTimeMillis(++millis); state.onRelease(DOUBLE_TAP_TIMEOUT, LONG_PRESS_TIMEOUT); // although the state is ACTIVE before the press-release // sequence, we will not move to LOCKED state. // we can only move to LOCKED state if the user has double-clicked. Assert.assertFalse(state.isActive()); Assert.assertFalse(state.isLocked()); Assert.assertFalse(state.isPressed()); state.onPress(); Assert.assertTrue(state.isActive()); Assert.assertFalse(state.isLocked()); Assert.assertTrue(state.isPressed()); setCurrentTimeMillis(++millis); state.onRelease(DOUBLE_TAP_TIMEOUT, LONG_PRESS_TIMEOUT); Assert.assertTrue(state.isActive()); Assert.assertFalse(state.isLocked()); Assert.assertFalse(state.isPressed()); }
public static boolean match(String pattern, String string) { assertSingleByte(string); return match(pattern.getBytes(StandardCharsets.US_ASCII), string.getBytes(StandardCharsets.US_ASCII)); }
@Test public void testValidInputs() { assertFalse(GlobMatcher.match("a*b", "aaa")); assertFalse(GlobMatcher.match("a*a*b", "aaaa")); // Hangs when parsing as regex. assertFalse(GlobMatcher.match("a*a*a*a*a*a*a*a*a*a*a*a*a*a*a*a*a*a*a*a*b", "a".repeat(55))); assertTrue(GlobMatcher.match("a*a*b", "aaa7b")); assertTrue(GlobMatcher.match("a*a*b[", "aaa7b[")); assertFalse(GlobMatcher.match("a*a*b[", "aaa7b[c")); assertTrue(GlobMatcher.match("a*[0-9]b", "aH5b")); assertTrue(GlobMatcher.match("a*[k\\-0-9]b", "aHk5b")); assertTrue(GlobMatcher.match("a*[k\\-0-9]b", "aH-5b")); assertFalse(GlobMatcher.match("a*[k\\-0-9]b", "aHb")); assertFalse(GlobMatcher.match("a*[0-9]b", "aHRb")); assertTrue(GlobMatcher.match("a*a*a*a*a*a*a*a*a*a*a*a*a*a*a*a*a*a*a*a*b", "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaab")); assertTrue(GlobMatcher.match("hello_[24]", "hello_4")); assertTrue(GlobMatcher.match("hello_[24]", "hello_2")); }
public TimestampOffset lookup(long targetTimestamp) { return maybeLock(lock, () -> { ByteBuffer idx = mmap().duplicate(); int slot = largestLowerBoundSlotFor(idx, targetTimestamp, IndexSearchType.KEY); if (slot == -1) return new TimestampOffset(RecordBatch.NO_TIMESTAMP, baseOffset()); else return parseEntry(idx, slot); }); }
@Test public void testLookUp() { // Empty time index assertEquals(new TimestampOffset(-1L, baseOffset), idx.lookup(100L)); // Add several time index entries. appendEntries(maxEntries - 1); // look for timestamp smaller than the earliest entry assertEquals(new TimestampOffset(-1L, baseOffset), idx.lookup(9)); // look for timestamp in the middle of two entries. assertEquals(new TimestampOffset(20L, 65L), idx.lookup(25)); // look for timestamp same as the one in the entry assertEquals(new TimestampOffset(30L, 75L), idx.lookup(30)); }
@Override public boolean containsObject(K name, Object value) { return false; }
@Test public void testContainsObject() { assertFalse(HEADERS.containsObject("name1", "")); }
@Override public IExpressionObjectFactory getExpressionObjectFactory() { return LINK_EXPRESSION_OBJECTS_FACTORY; }
@Test void getExpressionObjectFactory() { assertThat(linkExpressionObjectDialect.getName()) .isEqualTo("themeLink"); assertThat(linkExpressionObjectDialect.getExpressionObjectFactory()) .isInstanceOf(DefaultLinkExpressionFactory.class); }
@Override public void loginWithKey(String loginIDKey, String loginId) { }
@Test public void testLoginWithKey() { mSensorsAPI.setTrackEventCallBack(new SensorsDataTrackEventCallBack() { @Override public boolean onTrackEvent(String eventName, JSONObject eventProperties) { Assert.fail(); return false; } }); mSensorsAPI.loginWithKey("login_user", "login_user", new JSONObject()); Assert.assertNull(mSensorsAPI.getLoginId()); }
private <T> RFuture<T> pollEntry(int from, int to, RedisCommand<?> command) { return commandExecutor.evalWriteAsync(getRawName(), codec, command, "local v = redis.call('zrange', KEYS[1], ARGV[1], ARGV[2], 'withscores'); " + "if #v > 0 then " + "redis.call('zremrangebyrank', KEYS[1], ARGV[1], ARGV[2]); " + "return v; " + "end " + "return v;", Collections.singletonList(getRawName()), from, to); }
@Test public void testPollEntry() { RScoredSortedSet<String> set = redisson.getScoredSortedSet("test"); set.add(1.1, "v1"); set.add(1.2, "v2"); set.add(1.3, "v3"); ScoredEntry<String> e = set.pollFirstEntry(); assertThat(e).isEqualTo(new ScoredEntry<>(1.1, "v1")); ScoredEntry<String> e2 = set.pollLastEntry(); assertThat(e2).isEqualTo(new ScoredEntry<>(1.3, "v3")); assertThat(set.size()).isEqualTo(1); }
@SuppressWarnings("unchecked") public <T extends Expression> T rewrite(final T expression, final C context) { return (T) rewriter.process(expression, context); }
@Test public void shouldRewriteType() { // Given: final Type type = new Type(SqlPrimitiveType.of("INTEGER")); // When: final Expression rewritten = expressionRewriter.rewrite(type, context); // Then: assertThat(rewritten, is(type)); }
@Override protected String getFolderSuffix() { return FOLDER_SUFFIX; }
@Test public void testGetFolderSuffix() { Assert.assertEquals("/", mOSSUnderFileSystem.getFolderSuffix()); }
@Override public OrganizedImports organizeImports(List<Import> imports) { Map<PackageType, ImmutableSortedSet<Import>> partitioned = imports.stream() .collect( Collectors.groupingBy( IdeaImportOrganizer::getPackageType, TreeMap::new, toImmutableSortedSet(IdeaImportOrganizer::compareImport))); return new OrganizedImports() .addGroups( partitioned, ImmutableList.of(PackageType.NON_STATIC, PackageType.JAVAX_JAVA, PackageType.STATIC)); }
@Test public void staticLastOrdering() { IdeaImportOrganizer organizer = new IdeaImportOrganizer(); ImportOrganizer.OrganizedImports organized = organizer.organizeImports(IMPORTS); assertThat(organized.asImportBlock()) .isEqualTo( "import android.foo;\n" + "import com.android.blah;\n" + "import net.wilma;\n" + "import unknown.barney;\n" + "import unknown.fred;\n" + "\n" + "import javax.pong;\n" + "import java.ping;\n" + "\n" + "import static android.foo.bar;\n" + "import static com.android.blah.blah;\n" + "import static java.ping.pong;\n" + "import static javax.pong.ping;\n" + "import static net.wilma.flintstone;\n" + "import static unknown.fred.flintstone;\n"); }
public List<HttpClientRequestInterceptor> getInterceptors() { return interceptors; }
@Test void testGetInterceptors() { assertTrue(restTemplate.getInterceptors().isEmpty()); restTemplate.setInterceptors(Collections.singletonList(interceptor)); assertEquals(1, restTemplate.getInterceptors().size()); }
public static Optional<String> getDatabaseName(final String configNodeFullPath) { Pattern pattern = Pattern.compile(getShardingSphereDataNodePath() + "/([\\w\\-]+)$", Pattern.CASE_INSENSITIVE); Matcher matcher = pattern.matcher(configNodeFullPath); return matcher.find() ? Optional.of(matcher.group(1)) : Optional.empty(); }
@Test void assertGetDatabaseNameDbNameNotFoundScenario() { assertThat(ShardingSphereDataNode.getDatabaseName("/statistics/databases"), is(Optional.empty())); }
public static Schema createEnum(String name, String doc, String namespace, List<String> values) { return new EnumSchema(new Name(name, namespace), doc, new LockableArrayList<>(values), null); }
@Test void enumSymbolAsNull() { assertThrows(SchemaParseException.class, () -> { Schema.createEnum("myField", "doc", "namespace", Collections.singletonList(null)); }); }
@Override public void close() throws IOException { channel.close(); }
@Test(expected = RuntimeException.class) public void testVersionMismatch() throws IOException { FileChannel channel = FileChannel.open(file, StandardOpenOption.WRITE); channel.write(ByteBuffer.wrap(new byte[] { '2' })); channel.close(); RecordIOReader reader = new RecordIOReader(file); }
public static Builder withSchema(Schema schema) { return new Builder(schema); }
@Test public void testCreateWithNames() { Schema type = Stream.of( Schema.Field.of("f_str", FieldType.STRING), Schema.Field.of("f_byte", FieldType.BYTE), Schema.Field.of("f_short", FieldType.INT16), Schema.Field.of("f_int", FieldType.INT32), Schema.Field.of("f_long", FieldType.INT64), Schema.Field.of("f_float", FieldType.FLOAT), Schema.Field.of("f_double", FieldType.DOUBLE), Schema.Field.of("f_decimal", FieldType.DECIMAL), Schema.Field.of("f_boolean", FieldType.BOOLEAN), Schema.Field.of("f_datetime", FieldType.DATETIME), Schema.Field.of("f_bytes", FieldType.BYTES), Schema.Field.of("f_array", FieldType.array(FieldType.STRING)), Schema.Field.of("f_iterable", FieldType.iterable(FieldType.STRING)), Schema.Field.of("f_map", FieldType.map(FieldType.STRING, FieldType.STRING))) .collect(toSchema()); DateTime dateTime = new DateTime().withDate(1979, 03, 14).withTime(1, 2, 3, 4).withZone(DateTimeZone.UTC); byte[] bytes = new byte[] {1, 2, 3, 4}; Row row = Row.withSchema(type) .withFieldValue("f_str", "str1") .withFieldValue("f_byte", (byte) 42) .withFieldValue("f_short", (short) 43) .withFieldValue("f_int", (int) 44) .withFieldValue("f_long", (long) 45) .withFieldValue("f_float", (float) 3.14) .withFieldValue("f_double", (double) 3.141) .withFieldValue("f_decimal", new BigDecimal("3.1415")) .withFieldValue("f_boolean", true) .withFieldValue("f_datetime", dateTime) .withFieldValue("f_bytes", bytes) .withFieldValue("f_array", Lists.newArrayList("one", "two")) .withFieldValue("f_iterable", Lists.newArrayList("one", "two", "three")) .withFieldValue("f_map", ImmutableMap.of("hello", "goodbye", "here", "there")) .build(); Row expectedRow = Row.withSchema(type) .addValues( "str1", (byte) 42, (short) 43, (int) 44, (long) 45, (float) 3.14, (double) 3.141, new BigDecimal("3.1415"), true, dateTime, bytes, Lists.newArrayList("one", "two"), Lists.newArrayList("one", "two", "three"), ImmutableMap.of("hello", "goodbye", "here", "there")) .build(); assertEquals(expectedRow, row); }
public final String toBitString(long value) { return toBitString(value, 64); }
@Test public void testToBitString() { assertEquals("0010101010101010101010101010101010101010101010101010101010101010", bitUtil.toBitString(Long.MAX_VALUE / 3)); assertEquals("0111111111111111111111111111111111111111111111111111111111111111", bitUtil.toBitString(Long.MAX_VALUE)); assertEquals("00101010101010101010101010101010", bitUtil.toBitString(bitUtil.fromInt(Integer.MAX_VALUE / 3))); assertEquals("10000000000000000000000000000000", bitUtil.toBitString(1L << 63, 32)); assertEquals("00000000000000000000000000000001", bitUtil.toBitString((1L << 32), 32)); }
static void format(final JavaInput javaInput, JavaOutput javaOutput, JavaFormatterOptions options) throws FormatterException { Context context = new Context(); DiagnosticCollector<JavaFileObject> diagnostics = new DiagnosticCollector<>(); context.put(DiagnosticListener.class, diagnostics); Options.instance(context).put("allowStringFolding", "false"); Options.instance(context).put("--enable-preview", "true"); JCCompilationUnit unit; JavacFileManager fileManager = new JavacFileManager(context, true, UTF_8); try { fileManager.setLocation(StandardLocation.PLATFORM_CLASS_PATH, ImmutableList.of()); } catch (IOException e) { // impossible throw new IOError(e); } SimpleJavaFileObject source = new SimpleJavaFileObject(URI.create("source"), JavaFileObject.Kind.SOURCE) { @Override public CharSequence getCharContent(boolean ignoreEncodingErrors) throws IOException { return javaInput.getText(); } }; Log.instance(context).useSource(source); ParserFactory parserFactory = ParserFactory.instance(context); JavacParser parser = parserFactory.newParser( javaInput.getText(), /* keepDocComments= */ true, /* keepEndPos= */ true, /* keepLineMap= */ true); unit = parser.parseCompilationUnit(); unit.sourcefile = source; javaInput.setCompilationUnit(unit); Iterable<Diagnostic<? extends JavaFileObject>> errorDiagnostics = Iterables.filter(diagnostics.getDiagnostics(), Formatter::errorDiagnostic); if (!Iterables.isEmpty(errorDiagnostics)) { throw FormatterException.fromJavacDiagnostics(errorDiagnostics); } OpsBuilder builder = new OpsBuilder(javaInput, javaOutput); // Output the compilation unit. JavaInputAstVisitor visitor; if (Runtime.version().feature() >= 21) { visitor = createVisitor( "com.google.googlejavaformat.java.java21.Java21InputAstVisitor", builder, options); } else if (Runtime.version().feature() >= 17) { visitor = createVisitor( "com.google.googlejavaformat.java.java17.Java17InputAstVisitor", builder, options); } else { visitor = new JavaInputAstVisitor(builder, options.indentationMultiplier()); } visitor.scan(unit, null); builder.sync(javaInput.getText().length()); builder.drain(); Doc doc = new DocBuilder().withOps(builder.build()).build(); doc.computeBreaks(javaOutput.getCommentsHelper(), MAX_LINE_LENGTH, new Doc.State(+0, 0)); doc.write(javaOutput); javaOutput.flush(); }
@Test public void testFormatLengthOutOfRange() throws Exception { String input = "class Foo{}\n"; Path tmpdir = testFolder.newFolder().toPath(); Path path = tmpdir.resolve("Foo.java"); Files.writeString(path, input); StringWriter out = new StringWriter(); StringWriter err = new StringWriter(); Main main = new Main(new PrintWriter(out, true), new PrintWriter(err, true), System.in); String[] args = {"--offset", "0", "--length", "9999", path.toString()}; assertThat(main.format(args)).isEqualTo(1); assertThat(err.toString()) .contains("error: invalid offset (0) or length (9999); offset + length (9999)"); }
static void closeStateManager(final Logger log, final String logPrefix, final boolean closeClean, final boolean eosEnabled, final ProcessorStateManager stateMgr, final StateDirectory stateDirectory, final TaskType taskType) { // if EOS is enabled, wipe out the whole state store for unclean close since it is now invalid final boolean wipeStateStore = !closeClean && eosEnabled; final TaskId id = stateMgr.taskId(); log.trace("Closing state manager for {} task {}", taskType, id); final AtomicReference<ProcessorStateException> firstException = new AtomicReference<>(null); try { if (stateDirectory.lock(id)) { try { stateMgr.close(); } catch (final ProcessorStateException e) { firstException.compareAndSet(null, e); } finally { try { if (wipeStateStore) { log.debug("Wiping state stores for {} task {}", taskType, id); // we can just delete the whole dir of the task, including the state store images and the checkpoint files, // and then we write an empty checkpoint file indicating that the previous close is graceful and we just // need to re-bootstrap the restoration from the beginning Utils.delete(stateMgr.baseDir()); } } finally { stateDirectory.unlock(id); } } } else { log.error("Failed to acquire lock while closing the state store for {} task {}", taskType, id); } } catch (final IOException e) { final ProcessorStateException exception = new ProcessorStateException( String.format("%sFatal error while trying to close the state manager for task %s", logPrefix, id), e ); firstException.compareAndSet(null, exception); } final ProcessorStateException exception = firstException.get(); if (exception != null) { throw exception; } }
@Test public void testCloseStateManagerThrowsExceptionWhenClean() { when(stateManager.taskId()).thenReturn(taskId); when(stateDirectory.lock(taskId)).thenReturn(true); doThrow(new ProcessorStateException("state manager failed to close")).when(stateManager).close(); final ProcessorStateException thrown = assertThrows( ProcessorStateException.class, () -> StateManagerUtil.closeStateManager(logger, "logPrefix:", true, false, stateManager, stateDirectory, TaskType.ACTIVE)); // Thrown stateMgr exception will not be wrapped. assertEquals("state manager failed to close", thrown.getMessage()); // The unlock logic should still be executed. verify(stateDirectory).unlock(taskId); }
protected Map<String, String> parseJettyOptions( Node node ) { Map<String, String> jettyOptions = null; Node jettyOptionsNode = XMLHandler.getSubNode( node, XML_TAG_JETTY_OPTIONS ); if ( jettyOptionsNode != null ) { jettyOptions = new HashMap<String, String>(); if ( XMLHandler.getTagValue( jettyOptionsNode, XML_TAG_ACCEPTORS ) != null ) { jettyOptions.put( Const.KETTLE_CARTE_JETTY_ACCEPTORS, XMLHandler.getTagValue( jettyOptionsNode, XML_TAG_ACCEPTORS ) ); } if ( XMLHandler.getTagValue( jettyOptionsNode, XML_TAG_ACCEPT_QUEUE_SIZE ) != null ) { jettyOptions.put( Const.KETTLE_CARTE_JETTY_ACCEPT_QUEUE_SIZE, XMLHandler.getTagValue( jettyOptionsNode, XML_TAG_ACCEPT_QUEUE_SIZE ) ); } if ( XMLHandler.getTagValue( jettyOptionsNode, XML_TAG_LOW_RES_MAX_IDLE_TIME ) != null ) { jettyOptions.put( Const.KETTLE_CARTE_JETTY_RES_MAX_IDLE_TIME, XMLHandler.getTagValue( jettyOptionsNode, XML_TAG_LOW_RES_MAX_IDLE_TIME ) ); } } return jettyOptions; }
@Test public void testParseJettyOption_AcceptQueueSize() throws KettleXMLException { Node configNode = getConfigNode( getConfigWithAcceptQueueSizeOnlyOption() ); Map<String, String> parseJettyOptions = slServerConfig.parseJettyOptions( configNode ); assertNotNull( parseJettyOptions ); assertEquals( 1, parseJettyOptions.size() ); assertTrue( "Expected containing key=" + EXPECTED_ACCEPT_QUEUE_SIZE_KEY, parseJettyOptions .containsKey( EXPECTED_ACCEPT_QUEUE_SIZE_KEY ) ); assertEquals( EXPECTED_ACCEPT_QUEUE_SIZE_VALUE, parseJettyOptions.get( EXPECTED_ACCEPT_QUEUE_SIZE_KEY ) ); }
public void mergeRuntimeUpdate( List<TimelineEvent> pendingTimeline, Map<String, Artifact> pendingArtifacts) { if (timeline.addAll(pendingTimeline)) { synced = false; } if (pendingArtifacts != null && !pendingArtifacts.isEmpty()) { for (Map.Entry<String, Artifact> entry : pendingArtifacts.entrySet()) { String key = entry.getKey(); if (!entry.getValue().equals(artifacts.get(key))) { if (artifacts.containsKey(key) && artifacts.get(key).getType() == Artifact.Type.DEFAULT && entry.getValue().getType() == Artifact.Type.DEFAULT) { artifacts.get(key).asDefault().getData().putAll(entry.getValue().asDefault().getData()); } else { artifacts.put(entry.getKey(), entry.getValue()); } synced = false; } } } if (!synced) { runtimeState.setModifyTime(System.currentTimeMillis()); } }
@Test public void testNoChangeMerge() throws Exception { StepRuntimeSummary summary = loadObject( "fixtures/execution/sample-step-runtime-summary-1.json", StepRuntimeSummary.class); assertTrue(summary.isSynced()); summary.mergeRuntimeUpdate(null, null); assertTrue(summary.isSynced()); Map<String, Artifact> artifacts = new LinkedHashMap<>(); DefaultArtifact artifact1 = new DefaultArtifact(); artifact1.add("value", 1L); artifact1.add("foo", "bar"); artifacts.put("artifact1", artifact1); summary.mergeRuntimeUpdate(null, artifacts); assertTrue(summary.isSynced()); SubworkflowArtifact artifact2 = new SubworkflowArtifact(); artifact2.setSubworkflowId("test-dag"); artifact2.setSubworkflowVersionId(1L); artifact2.setSubworkflowInstanceId(1); artifact2.setSubworkflowRunId(1); artifact2.setSubworkflowUuid("foo-bar"); artifact2.setSubworkflowOverview( WorkflowRuntimeOverview.of( 1L, singletonEnumMap(StepInstance.Status.SUCCEEDED, WorkflowStepStatusSummary.of(1L)), null)); artifacts.put(artifact2.getType().key(), artifact2); summary.mergeRuntimeUpdate(null, artifacts); assertTrue(summary.isSynced()); summary = loadObject( "fixtures/execution/sample-step-runtime-summary-3.json", StepRuntimeSummary.class); artifacts.clear(); ForeachArtifact artifact3 = new ForeachArtifact(); artifact3.setForeachWorkflowId("inline-wf"); artifact3.setForeachIdentity("foo"); artifact3.setTotalLoopCount(10); artifact3.setNextLoopIndex(0); artifact3.setForeachOverview(new ForeachStepOverview()); artifact3.getForeachOverview().setStats(new EnumMap<>(WorkflowInstance.Status.class)); artifact3.getForeachOverview().setCheckpoint(6L); artifact3.getForeachOverview().getStats().put(WorkflowInstance.Status.CREATED, 5L); artifact3.getForeachOverview().getStats().put(WorkflowInstance.Status.SUCCEEDED, 1L); artifacts.put(artifact3.getType().key(), artifact3); summary.mergeRuntimeUpdate(null, artifacts); assertTrue(summary.isSynced()); }
@Override public boolean enableSendingOldValues(final boolean forceMaterialization) { if (queryableName != null) { sendOldValues = true; return true; } if (parent.enableSendingOldValues(forceMaterialization)) { sendOldValues = true; } return sendOldValues; }
@Test public void shouldSendOldValuesWhenEnabledOnUpStreamMaterialization() { final StreamsBuilder builder = new StreamsBuilder(); final String topic1 = "topic1"; final KTableImpl<String, Integer, Integer> table1 = (KTableImpl<String, Integer, Integer>) builder.table(topic1, consumed, Materialized.as("store2")); final KTableImpl<String, Integer, Integer> table2 = (KTableImpl<String, Integer, Integer>) table1.filter(predicate); table2.enableSendingOldValues(false); assertThat(table1.sendingOldValueEnabled(), is(true)); assertThat(table2.sendingOldValueEnabled(), is(true)); doTestSendingOldValue(builder, table1, table2, topic1); }
public long getTotalSyncCount() { final AtomicLong result = new AtomicLong(); distroRecords.forEach((s, distroRecord) -> result.addAndGet(distroRecord.getTotalSyncCount())); return result.get(); }
@Test void testGetTotalSyncCount() { long expected = DistroRecordsHolder.getInstance().getTotalSyncCount() + 1; DistroRecordsHolder.getInstance().getRecord("testGetTotalSyncCount").syncSuccess(); assertEquals(expected, DistroRecordsHolder.getInstance().getTotalSyncCount()); }
public static List<UpdateRequirement> forUpdateTable( TableMetadata base, List<MetadataUpdate> metadataUpdates) { Preconditions.checkArgument(null != base, "Invalid table metadata: null"); Preconditions.checkArgument(null != metadataUpdates, "Invalid metadata updates: null"); Builder builder = new Builder(base, false); builder.require(new UpdateRequirement.AssertTableUUID(base.uuid())); metadataUpdates.forEach(builder::update); return builder.build(); }
@Test public void setSnapshotRefFailure() { long snapshotId = 14L; String refName = "random_branch"; SnapshotRef snapshotRef = mock(SnapshotRef.class); when(snapshotRef.isBranch()).thenReturn(true); when(snapshotRef.snapshotId()).thenReturn(snapshotId); ImmutableList<MetadataUpdate> metadataUpdates = ImmutableList.of( new MetadataUpdate.SetSnapshotRef( refName, snapshotId, SnapshotRefType.BRANCH, 0, 0L, 0L)); when(metadata.ref(refName)).thenReturn(null); when(updated.ref(refName)).thenReturn(snapshotRef); assertThatThrownBy( () -> UpdateRequirements.forUpdateTable(metadata, metadataUpdates) .forEach(req -> req.validate(updated))) .isInstanceOf(CommitFailedException.class) .hasMessage("Requirement failed: branch random_branch was created concurrently"); when(metadata.ref(refName)).thenReturn(snapshotRef); when(updated.ref(refName)).thenReturn(null); assertThatThrownBy( () -> UpdateRequirements.forUpdateTable(metadata, metadataUpdates) .forEach(req -> req.validate(updated))) .isInstanceOf(CommitFailedException.class) .hasMessage("Requirement failed: branch or tag random_branch is missing, expected 14"); SnapshotRef snapshotRefUpdated = mock(SnapshotRef.class); when(snapshotRefUpdated.isBranch()).thenReturn(true); when(snapshotRefUpdated.snapshotId()).thenReturn(snapshotId + 1); when(updated.ref(refName)).thenReturn(snapshotRefUpdated); assertThatThrownBy( () -> UpdateRequirements.forUpdateTable(metadata, metadataUpdates) .forEach(req -> req.validate(updated))) .isInstanceOf(CommitFailedException.class) .hasMessage("Requirement failed: branch random_branch has changed: expected id 14 != 15"); }
@Override public void execute(Runnable task) { execute0(task); }
@Test @Timeout(value = 5000, unit = TimeUnit.MILLISECONDS) public void testAutomaticStartStop() throws Exception { final TestRunnable task = new TestRunnable(500); e.execute(task); // Ensure the new thread has started. Thread thread = e.thread; assertThat(thread, is(not(nullValue()))); assertThat(thread.isAlive(), is(true)); thread.join(); assertThat(task.ran.get(), is(true)); // Ensure another new thread starts again. task.ran.set(false); e.execute(task); assertThat(e.thread, not(sameInstance(thread))); thread = e.thread; thread.join(); assertThat(task.ran.get(), is(true)); }
public static <T> Result<T> error(Status status) { return new Result<>(status); }
@Test public void error() { Result ret = Result.error(Status.ACCESS_TOKEN_NOT_EXIST); Assertions.assertEquals(Status.ACCESS_TOKEN_NOT_EXIST.getCode(), ret.getCode().intValue()); }
public List<List<String>> getInfos() { List<List<String>> infos = Lists.newArrayList(); readLock(); try { for (Map.Entry<String, GroupId> entry : groupName2Id.entrySet()) { List<String> info = Lists.newArrayList(); GroupId groupId = entry.getValue(); info.add(groupId.toString()); info.add(entry.getKey()); StringJoiner tblIdJoiner = new StringJoiner(", "); StringJoiner tblNameJoiner = new StringJoiner(", "); for (Long tableId : group2Tables.get(groupId)) { Optional<String> tblName = getTableName(groupId.dbId, tableId); if (!tblName.isPresent()) { tblIdJoiner.add(tableId + "*"); tblNameJoiner.add("[deleted]"); } else { tblIdJoiner.add(tableId.toString()); tblNameJoiner.add(tblName.get()); } } info.add(tblIdJoiner.toString()); info.add(tblNameJoiner.toString()); ColocateGroupSchema groupSchema = group2Schema.get(groupId); info.add(String.valueOf(groupSchema.getBucketsNum())); info.add(String.valueOf(groupSchema.getReplicationNum())); List<String> cols = groupSchema.getDistributionColTypes().stream().map( Type::toSql).collect(Collectors.toList()); info.add(Joiner.on(", ").join(cols)); info.add(String.valueOf(!isGroupUnstable(groupId))); infos.add(info); } } finally { readUnlock(); } return infos; }
@Test public void testDropTable() throws Exception { ConnectContext connectContext = UtFrameUtils.createDefaultCtx(); // create db1 String createDbStmtStr = "create database db1;"; CreateDbStmt createDbStmt = (CreateDbStmt) UtFrameUtils.parseStmtWithNewParser(createDbStmtStr, connectContext); GlobalStateMgr.getCurrentState().getMetadata().createDb(createDbStmt.getFullDbName()); // create table1_1->group1 String sql = "CREATE TABLE db1.table1_1 (k1 int, k2 int, k3 varchar(32))\n" + "PRIMARY KEY(k1)\n" + "DISTRIBUTED BY HASH(k1)\n" + "BUCKETS 4\n" + "PROPERTIES(\"colocate_with\"=\"group1\", \"replication_num\" = \"1\");\n"; CreateTableStmt createTableStmt = (CreateTableStmt) UtFrameUtils.parseStmtWithNewParser(sql, connectContext); StarRocksAssert.utCreateTableWithRetry(createTableStmt); List<List<String>> infos = GlobalStateMgr.getCurrentState().getColocateTableIndex().getInfos(); // group1->table1To1 Assert.assertEquals(1, infos.size()); Map<String, List<String>> map = groupByName(infos); Table table1To1 = GlobalStateMgr.getCurrentState().getDb("db1").getTable("table1_1"); Assert.assertEquals(String.format("%d", table1To1.getId()), map.get("group1").get(2)); LOG.info("after create db1.table1_1: {}", infos); // create table1_2->group1 sql = "CREATE TABLE db1.table1_2 (k1 int, k2 int, k3 varchar(32))\n" + "PRIMARY KEY(k1)\n" + "DISTRIBUTED BY HASH(k1)\n" + "BUCKETS 4\n" + "PROPERTIES(\"colocate_with\"=\"group1\", \"replication_num\" = \"1\");\n"; createTableStmt = (CreateTableStmt) UtFrameUtils.parseStmtWithNewParser(sql, connectContext); StarRocksAssert.utCreateTableWithRetry(createTableStmt); // group1 -> table1To1, table1To2 infos = GlobalStateMgr.getCurrentState().getColocateTableIndex().getInfos(); Assert.assertEquals(1, infos.size()); map = groupByName(infos); Table table1To2 = GlobalStateMgr.getCurrentState().getDb("db1").getTable("table1_2"); Assert.assertEquals(String.format("%d, %d", table1To1.getId(), table1To2.getId()), map.get("group1").get(2)); LOG.info("after create db1.table1_2: {}", infos); // create db2 createDbStmtStr = "create database db2;"; createDbStmt = (CreateDbStmt) UtFrameUtils.parseStmtWithNewParser(createDbStmtStr, connectContext); GlobalStateMgr.getCurrentState().getMetadata().createDb(createDbStmt.getFullDbName()); // create table2_1 -> group2 sql = "CREATE TABLE db2.table2_1 (k1 int, k2 int, k3 varchar(32))\n" + "PRIMARY KEY(k1)\n" + "DISTRIBUTED BY HASH(k1)\n" + "BUCKETS 4\n" + "PROPERTIES(\"colocate_with\"=\"group2\", \"replication_num\" = \"1\");\n"; createTableStmt = (CreateTableStmt) UtFrameUtils.parseStmtWithNewParser(sql, connectContext); StarRocksAssert.utCreateTableWithRetry(createTableStmt); // group1 -> table1_1, table1_2 // group2 -> table2_l infos = GlobalStateMgr.getCurrentState().getColocateTableIndex().getInfos(); Assert.assertEquals(2, infos.size()); map = groupByName(infos); Assert.assertEquals(String.format("%d, %d", table1To1.getId(), table1To2.getId()), map.get("group1").get(2)); Table table2To1 = GlobalStateMgr.getCurrentState().getDb("db2").getTable("table2_1"); Assert.assertEquals(String.format("%d", table2To1.getId()), map.get("group2").get(2)); LOG.info("after create db2.table2_1: {}", infos); // drop db1.table1_1 sql = "DROP TABLE db1.table1_1;"; DropTableStmt dropTableStmt = (DropTableStmt) UtFrameUtils.parseStmtWithNewParser(sql, connectContext); GlobalStateMgr.getCurrentState().getLocalMetastore().dropTable(dropTableStmt); // group1 -> table1_1*, table1_2 // group2 -> table2_l infos = GlobalStateMgr.getCurrentState().getColocateTableIndex().getInfos(); map = groupByName(infos); Assert.assertEquals(2, infos.size()); Assert.assertEquals(String.format("%d*, %d", table1To1.getId(), table1To2.getId()), map.get("group1").get(2)); Assert.assertEquals(String.format("%d", table2To1.getId()), map.get("group2").get(2)); LOG.info("after drop db1.table1_1: {}", infos); // drop db1.table1_2 sql = "DROP TABLE db1.table1_2;"; dropTableStmt = (DropTableStmt) UtFrameUtils.parseStmtWithNewParser(sql, connectContext); GlobalStateMgr.getCurrentState().getLocalMetastore().dropTable(dropTableStmt); // group1 -> table1_1*, table1_2* // group2 -> table2_l infos = GlobalStateMgr.getCurrentState().getColocateTableIndex().getInfos(); map = groupByName(infos); Assert.assertEquals(2, infos.size()); Assert.assertEquals(String.format("%d*, %d*", table1To1.getId(), table1To2.getId()), map.get("group1").get(2)); Assert.assertEquals(String.format("[deleted], [deleted]", table1To1.getId(), table1To2.getId()), map.get("group1").get(3)); Assert.assertEquals(String.format("%d", table2To1.getId()), map.get("group2").get(2)); Assert.assertEquals(String.format("table2_1", table2To1.getId()), map.get("group2").get(3)); LOG.info("after drop db1.table1_2: {}", infos); // drop db2 sql = "DROP DATABASE db2;"; DropDbStmt dropDbStmt = (DropDbStmt) UtFrameUtils.parseStmtWithNewParser(sql, connectContext); GlobalStateMgr.getCurrentState().getMetadata().dropDb(dropDbStmt.getDbName(), dropDbStmt.isForceDrop()); // group1 -> table1_1*, table1_2* // group2 -> table2_l* infos = GlobalStateMgr.getCurrentState().getColocateTableIndex().getInfos(); map = groupByName(infos); Assert.assertEquals(2, infos.size()); Assert.assertEquals(String.format("%d*, %d*", table1To1.getId(), table1To2.getId()), map.get("group1").get(2)); Assert.assertEquals(String.format("%d*", table2To1.getId()), map.get("group2").get(2)); LOG.info("after drop db2: {}", infos); // create & drop db2 again createDbStmtStr = "create database db2;"; createDbStmt = (CreateDbStmt) UtFrameUtils.parseStmtWithNewParser(createDbStmtStr, connectContext); GlobalStateMgr.getCurrentState().getMetadata().createDb(createDbStmt.getFullDbName()); // create table2_1 -> group2 sql = "CREATE TABLE db2.table2_3 (k1 int, k2 int, k3 varchar(32))\n" + "PRIMARY KEY(k1)\n" + "DISTRIBUTED BY HASH(k1)\n" + "BUCKETS 4\n" + "PROPERTIES(\"colocate_with\"=\"group3\", \"replication_num\" = \"1\");\n"; createTableStmt = (CreateTableStmt) UtFrameUtils.parseStmtWithNewParser(sql, connectContext); StarRocksAssert.utCreateTableWithRetry(createTableStmt); Table table2To3 = GlobalStateMgr.getCurrentState().getDb("db2").getTable("table2_3"); sql = "DROP DATABASE db2;"; dropDbStmt = (DropDbStmt) UtFrameUtils.parseStmtWithNewParser(sql, connectContext); GlobalStateMgr.getCurrentState().getMetadata().dropDb(dropDbStmt.getDbName(), dropDbStmt.isForceDrop()); infos = GlobalStateMgr.getCurrentState().getColocateTableIndex().getInfos(); map = groupByName(infos); LOG.info("after create & drop db2: {}", infos); Assert.assertEquals(3, infos.size()); Assert.assertEquals(String.format("%d*, %d*", table1To1.getId(), table1To2.getId()), map.get("group1").get(2)); Assert.assertEquals("[deleted], [deleted]", map.get("group1").get(3)); Assert.assertEquals(String.format("%d*", table2To1.getId()), map.get("group2").get(2)); Assert.assertEquals(String.format("[deleted], [deleted]", table1To1.getId(), table1To2.getId()), map.get("group1").get(3)); Assert.assertEquals(String.format("%d*", table2To3.getId()), map.get("group3").get(2)); Assert.assertEquals(String.format("[deleted], [deleted]", table1To1.getId(), table1To2.getId()), map.get("group1").get(3)); }
public Tree() { this(null); }
@Test public void treeTest() { //配置 TreeNodeConfig treeNodeConfig = new TreeNodeConfig(); // 自定义属性名 都要默认值的 treeNodeConfig.setWeightKey("order"); treeNodeConfig.setIdKey("rid"); treeNodeConfig.setDeep(2); //转换器 List<Tree<String>> treeNodes = TreeUtil.build(nodeList, "0", treeNodeConfig, (treeNode, tree) -> { tree.setId(treeNode.getId()); tree.setParentId(treeNode.getParentId()); tree.setWeight(treeNode.getWeight()); tree.setName(treeNode.getName()); // 扩展属性 ... tree.putExtra("extraField", 666); tree.putExtra("other", new Object()); }); assertEquals(treeNodes.size(), 2); }
public long getDataOffset() { return data_offset; }
@Test public void getDataOffset() { assertEquals(TestParameters.VP_DATA_OFFSET_LENGTH, chmItsfHeader.getDataOffset()); }
public static ParsedCommand parse( // CHECKSTYLE_RULES.ON: CyclomaticComplexity final String sql, final Map<String, String> variables) { validateSupportedStatementType(sql); final String substituted; try { substituted = VariableSubstitutor.substitute(KSQL_PARSER.parse(sql).get(0), variables); } catch (ParseFailedException e) { throw new MigrationException(String.format( "Failed to parse the statement. Statement: %s. Reason: %s", sql, e.getMessage())); } final SqlBaseParser.SingleStatementContext statementContext = KSQL_PARSER.parse(substituted) .get(0).getStatement(); final boolean isStatement = StatementType.get(statementContext.statement().getClass()) == StatementType.STATEMENT; return new ParsedCommand(substituted, isStatement ? Optional.empty() : Optional.of(new AstBuilder(TypeRegistry.EMPTY) .buildStatement(statementContext))); }
@Test public void shouldParseDropSourceStatement() { // When: List<CommandParser.ParsedCommand> commands = parse("drop stream foo;"); // Then: assertThat(commands.size(), is(1)); assertThat(commands.get(0).getStatement().isPresent(), is (false)); assertThat(commands.get(0).getCommand(), is("drop stream foo;")); }
@Override public String[] getSchemes() { return new String[]{Scheme.davs.name(), Scheme.https.name()}; }
@Test public void testSchemes() { assertTrue(Arrays.asList(new DAVSSLProtocol().getSchemes()).contains(Scheme.https.name())); }
public FEELFnResult<String> invoke(@ParameterName("from") Object val) { if ( val == null ) { return FEELFnResult.ofResult( null ); } else { return FEELFnResult.ofResult( TypeUtil.formatValue(val, false) ); } }
@Test void invokeDurationZero() { FunctionTestUtil.assertResult(stringFunction.invoke(Duration.ZERO), "PT0S"); }
@Nullable @Override public Message decode(@Nonnull RawMessage rawMessage) { String s = new String(rawMessage.getPayload(), StandardCharsets.UTF_8); LOG.trace("Received raw message: {}", s); String timezoneID = configuration.getString(CK_TIMEZONE); // previously existing PA inputs after updating will not have a Time Zone configured, default to UTC DateTimeZone timezone = timezoneID != null ? DateTimeZone.forID(timezoneID) : DateTimeZone.UTC; LOG.trace("Configured time zone: {}", timezone); PaloAltoMessageBase p = parser.parse(s, timezone); // Return when error occurs parsing syslog header. if (p == null) { return null; } Message message = messageFactory.createMessage(p.payload(), p.source(), p.timestamp()); switch (p.panType()) { case "THREAT": final PaloAltoTypeParser parserThreat = new PaloAltoTypeParser(templates.getThreatMessageTemplate()); message.addFields(parserThreat.parseFields(p.fields(), timezone)); break; case "SYSTEM": final PaloAltoTypeParser parserSystem = new PaloAltoTypeParser(templates.getSystemMessageTemplate()); message.addFields(parserSystem.parseFields(p.fields(), timezone)); break; case "TRAFFIC": final PaloAltoTypeParser parserTraffic = new PaloAltoTypeParser(templates.getTrafficMessageTemplate()); message.addFields(parserTraffic.parseFields(p.fields(), timezone)); break; default: LOG.error("Unsupported PAN type [{}]. Not adding any parsed fields.", p.panType()); } LOG.trace("Successfully processed [{}] message with [{}] fields.", p.panType(), message.getFieldCount()); return message; }
@Test public void testMoreSyslogFormats() { // Test an extra list of messages. for (String threatString : MORE_SYSLOG_THREAT_MESSAGES) { PaloAltoCodec codec = new PaloAltoCodec(Configuration.EMPTY_CONFIGURATION, messageFactory); Message message = codec.decode(new RawMessage(threatString.getBytes(StandardCharsets.UTF_8))); assertEquals("THREAT", message.getField("type")); } }
@Override public ProtobufSystemInfo.Section toProtobuf() { ProtobufSystemInfo.Section.Builder protobuf = ProtobufSystemInfo.Section.newBuilder(); protobuf.setName("System"); setAttribute(protobuf, "Server ID", server.getId()); setAttribute(protobuf, "Edition", sonarRuntime.getEdition().getLabel()); setAttribute(protobuf, NCLOC.getName() ,statisticsSupport.getLinesOfCode()); setAttribute(protobuf, "Container", containerSupport.isRunningInContainer()); setAttribute(protobuf, "High Availability", true); setAttribute(protobuf, "External Users and Groups Provisioning", commonSystemInformation.getManagedInstanceProviderName()); setAttribute(protobuf, "External User Authentication", commonSystemInformation.getExternalUserAuthentication()); addIfNotEmpty(protobuf, "Accepted external identity providers", commonSystemInformation.getEnabledIdentityProviders()); addIfNotEmpty(protobuf, "External identity providers whose users are allowed to sign themselves up", commonSystemInformation.getAllowsToSignUpEnabledIdentityProviders()); setAttribute(protobuf, "Force authentication", commonSystemInformation.getForceAuthentication()); return protobuf.build(); }
@Test @UseDataProvider("trueOrFalse") public void toProtobuf_whenRunningOrNotRunningInContainer_shouldReturnCorrectFlag(boolean flag) { when(containerSupport.isRunningInContainer()).thenReturn(flag); ProtobufSystemInfo.Section protobuf = underTest.toProtobuf(); assertThatAttributeIs(protobuf, "Container", flag); }
public static ExpressionEvaluatorFactory create(ClassLoader classLoader, Type type) { return new ExpressionEvaluatorFactory(classLoader, type); }
@Test public void create() { assertThat(ExpressionEvaluatorFactory.create(classLoader, ScenarioSimulationModel.Type.RULE)).isNotNull(); }
static boolean isVespaParent(String groupId) { return groupId.matches("(com\\.yahoo\\.vespa|ai\\.vespa)(\\..+)?"); }
@Test public void testRegex() { assertTrue(ApplicationMojo.isVespaParent("ai.vespa")); assertTrue(ApplicationMojo.isVespaParent("ai.vespa.hosted")); assertTrue(ApplicationMojo.isVespaParent("com.yahoo.vespa")); assertTrue(ApplicationMojo.isVespaParent("com.yahoo.vespa.hosted")); assertFalse(ApplicationMojo.isVespaParent("ai")); assertFalse(ApplicationMojo.isVespaParent("ai.vespa.")); assertFalse(ApplicationMojo.isVespaParent("ai.vespaxxx.")); assertFalse(ApplicationMojo.isVespaParent("com.yahoo")); assertFalse(ApplicationMojo.isVespaParent("com.yahoo.vespa.")); assertFalse(ApplicationMojo.isVespaParent("com.yahoo.vespaxxx")); }
public ServiceBusConfiguration getConfiguration() { return configuration; }
@Test void testCreateEndpointWithFqnsAndCredential() throws Exception { final String uri = "azure-servicebus://testTopicOrQueue"; final String remaining = "testTopicOrQueue"; final String fullyQualifiedNamespace = "namespace.servicebus.windows.net"; final TokenCredential credential = new DefaultAzureCredentialBuilder().build(); final Map<String, Object> params = new HashMap<>(); params.put("serviceBusType", ServiceBusType.topic); params.put("prefetchCount", 10); params.put("fullyQualifiedNamespace", fullyQualifiedNamespace); params.put("tokenCredential", credential); final ServiceBusEndpoint endpoint = (ServiceBusEndpoint) context.getComponent("azure-servicebus", ServiceBusComponent.class) .createEndpoint(uri, remaining, params); assertEquals(ServiceBusType.topic, endpoint.getConfiguration().getServiceBusType()); assertEquals("testTopicOrQueue", endpoint.getConfiguration().getTopicOrQueueName()); assertEquals(10, endpoint.getConfiguration().getPrefetchCount()); assertEquals(fullyQualifiedNamespace, endpoint.getConfiguration().getFullyQualifiedNamespace()); assertEquals(credential, endpoint.getConfiguration().getTokenCredential()); }
protected Repository getDelegate() { if ( this.delegate != null ) { return this.delegate; } Repository repository = null; try { repository = (Repository) purPluginClassLoader.loadClass( "org.pentaho.di.repository.pur.PurRepository" ).newInstance(); } catch ( Exception e ) { logger.error( "Unable to load delegate class for plugin id \"{}\". PUR plugin is most likely not installed or " + "changed id.", PUR_PLUGIN_ID ); } return this.delegate = repository; }
@Test public void getDelegateTest() { Repository repository = null; try { Mockito.<Class<?>>when( mockClassLoader.loadClass( anyString() ) ).thenReturn( Class.forName( "org.pentaho" + ".di.repository.pur.PurRepository" ) ); } catch ( ClassNotFoundException e ) { e.printStackTrace(); } repository = proxy.getDelegate(); assertNotNull( repository ); }
public boolean intersects(BoundingBox boundingBox) { if (this == boundingBox) { return true; } return this.maxLatitude >= boundingBox.minLatitude && this.maxLongitude >= boundingBox.minLongitude && this.minLatitude <= boundingBox.maxLatitude && this.minLongitude <= boundingBox.maxLongitude; }
@Test public void intersectsTest() { BoundingBox boundingBox1 = new BoundingBox(MIN_LATITUDE, MIN_LONGITUDE, MAX_LATITUDE, MAX_LONGITUDE); BoundingBox boundingBox2 = new BoundingBox(MIN_LATITUDE, MIN_LONGITUDE, MAX_LATITUDE, MAX_LONGITUDE); BoundingBox boundingBox3 = new BoundingBox(0, 0, MIN_LATITUDE, MIN_LONGITUDE); BoundingBox boundingBox4 = new BoundingBox(MIN_LATITUDE - 1, MIN_LONGITUDE - 1, MAX_LATITUDE + 1, MAX_LONGITUDE + 1); BoundingBox boundingBox5 = new BoundingBox(0, 0, 0, 0); BoundingBox boundingBox6 = new BoundingBox(-4, -3, -2, -1); assertIntersection(boundingBox1, boundingBox1); assertIntersection(boundingBox1, boundingBox2); assertIntersection(boundingBox1, boundingBox3); assertIntersection(boundingBox1, boundingBox4); assertNoIntersection(boundingBox1, boundingBox5); assertNoIntersection(boundingBox1, boundingBox6); assertNoIntersection(boundingBox5, boundingBox6); }
@Override public double variance() { return 1 / (lambda * lambda); }
@Test public void testVariance() { System.out.println("variance"); ExponentialDistribution instance = new ExponentialDistribution(1.0); instance.rand(); assertEquals(1.0, instance.variance(), 1E-7); instance.rand(); instance = new ExponentialDistribution(2.0); instance.rand(); assertEquals(0.25, instance.variance(), 1E-7); instance = new ExponentialDistribution(3.0); instance.rand(); assertEquals(1.0/9, instance.variance(), 1E-7); instance = new ExponentialDistribution(4.0); instance.rand(); assertEquals(1.0/16, instance.variance(), 1E-7); }
public void writeEncodedValue(EncodedValue encodedValue) throws IOException { switch (encodedValue.getValueType()) { case ValueType.BOOLEAN: writer.write(Boolean.toString(((BooleanEncodedValue) encodedValue).getValue())); break; case ValueType.BYTE: writer.write( String.format("0x%x", ((ByteEncodedValue)encodedValue).getValue())); break; case ValueType.CHAR: writer.write( String.format("0x%x", (int)((CharEncodedValue)encodedValue).getValue())); break; case ValueType.SHORT: writer.write( String.format("0x%x", ((ShortEncodedValue)encodedValue).getValue())); break; case ValueType.INT: writer.write( String.format("0x%x", ((IntEncodedValue)encodedValue).getValue())); break; case ValueType.LONG: writer.write( String.format("0x%x", ((LongEncodedValue)encodedValue).getValue())); break; case ValueType.FLOAT: writer.write(Float.toString(((FloatEncodedValue)encodedValue).getValue())); break; case ValueType.DOUBLE: writer.write(Double.toString(((DoubleEncodedValue)encodedValue).getValue())); break; case ValueType.ANNOTATION: writeAnnotation((AnnotationEncodedValue)encodedValue); break; case ValueType.ARRAY: writeArray((ArrayEncodedValue)encodedValue); break; case ValueType.STRING: writeQuotedString(((StringEncodedValue)encodedValue).getValue()); break; case ValueType.FIELD: writeFieldDescriptor(((FieldEncodedValue)encodedValue).getValue()); break; case ValueType.ENUM: writeFieldDescriptor(((EnumEncodedValue)encodedValue).getValue()); break; case ValueType.METHOD: writeMethodDescriptor(((MethodEncodedValue)encodedValue).getValue()); break; case ValueType.TYPE: writeType(((TypeEncodedValue)encodedValue).getValue()); break; case ValueType.METHOD_TYPE: writeMethodProtoDescriptor(((MethodTypeEncodedValue)encodedValue).getValue()); break; case ValueType.METHOD_HANDLE: writeMethodHandle(((MethodHandleEncodedValue)encodedValue).getValue()); break; case ValueType.NULL: writer.write("null"); break; default: throw new IllegalArgumentException("Unknown encoded value type"); } }
@Test public void testWriteEncodedValue_char() throws IOException { DexFormattedWriter writer = new DexFormattedWriter(output); writer.writeEncodedValue(new ImmutableCharEncodedValue('a')); Assert.assertEquals("0x61", output.toString()); }
@Override public ProcResult fetchResult() throws AnalysisException { Preconditions.checkNotNull(globalStateMgr); BaseProcResult result = new BaseProcResult(); result.setNames(TITLE_NAMES); List<Long> dbIds = globalStateMgr.getLocalMetastore().getDbIds(); if (dbIds == null || dbIds.isEmpty()) { // empty return result; } SystemInfoService infoService = GlobalStateMgr.getCurrentState().getNodeMgr().getClusterInfo(); int totalDbNum = 0; int totalTableNum = 0; int totalPartitionNum = 0; int totalIndexNum = 0; int totalTabletNum = 0; int totalReplicaNum = 0; unhealthyTabletIds.clear(); inconsistentTabletIds.clear(); errorStateTabletIds.clear(); cloningTabletIds = AgentTaskQueue.getTabletIdsByType(TTaskType.CLONE); List<List<Comparable>> lines = new ArrayList<List<Comparable>>(); for (Long dbId : dbIds) { if (dbId == 0) { // skip information_schema database continue; } Database db = globalStateMgr.getDb(dbId); if (db == null) { continue; } ++totalDbNum; List<Long> aliveBeIdsInCluster = infoService.getBackendIds(true); Locker locker = new Locker(); locker.lockDatabase(db, LockType.READ); try { int dbTableNum = 0; int dbPartitionNum = 0; int dbIndexNum = 0; int dbTabletNum = 0; int dbReplicaNum = 0; for (Table table : db.getTables()) { if (!table.isNativeTableOrMaterializedView()) { continue; } ++dbTableNum; OlapTable olapTable = (OlapTable) table; for (Partition partition : olapTable.getAllPartitions()) { short replicationNum = olapTable.getPartitionInfo().getReplicationNum(partition.getId()); ++dbPartitionNum; for (PhysicalPartition physicalParition : partition.getSubPartitions()) { for (MaterializedIndex materializedIndex : physicalParition .getMaterializedIndices(IndexExtState.VISIBLE)) { ++dbIndexNum; for (Tablet tablet : materializedIndex.getTablets()) { ++dbTabletNum; if (table.isCloudNativeTableOrMaterializedView()) { continue; } LocalTablet localTablet = (LocalTablet) tablet; dbReplicaNum += localTablet.getImmutableReplicas().size(); if (localTablet.getErrorStateReplicaNum() > 0) { errorStateTabletIds.put(dbId, tablet.getId()); } Pair<TabletHealthStatus, Priority> res = TabletChecker.getTabletHealthStatusWithPriority( localTablet, infoService, physicalParition.getVisibleVersion(), replicationNum, aliveBeIdsInCluster, olapTable.getLocation()); // here we treat REDUNDANT as HEALTHY, for user-friendly. if (res.first != TabletHealthStatus.HEALTHY && res.first != TabletHealthStatus.REDUNDANT && res.first != TabletHealthStatus.COLOCATE_REDUNDANT && res.first != TabletHealthStatus.NEED_FURTHER_REPAIR) { unhealthyTabletIds.put(dbId, tablet.getId()); } if (!localTablet.isConsistent()) { inconsistentTabletIds.put(dbId, tablet.getId()); } } // end for tablets } // end for indices } } // end for partitions } // end for tables List<Comparable> oneLine = new ArrayList<Comparable>(TITLE_NAMES.size()); oneLine.add(dbId); oneLine.add(db.getFullName()); oneLine.add(dbTableNum); oneLine.add(dbPartitionNum); oneLine.add(dbIndexNum); oneLine.add(dbTabletNum); oneLine.add(dbReplicaNum); oneLine.add(unhealthyTabletIds.get(dbId).size()); oneLine.add(inconsistentTabletIds.get(dbId).size()); oneLine.add(cloningTabletIds.get(dbId).size()); oneLine.add(errorStateTabletIds.get(dbId).size()); lines.add(oneLine); totalTableNum += dbTableNum; totalPartitionNum += dbPartitionNum; totalIndexNum += dbIndexNum; totalTabletNum += dbTabletNum; totalReplicaNum += dbReplicaNum; } finally { locker.unLockDatabase(db, LockType.READ); } } // end for dbs // sort by dbName ListComparator<List<Comparable>> comparator = new ListComparator<List<Comparable>>(1); Collections.sort(lines, comparator); // add sum line after sort List<Comparable> finalLine = new ArrayList<Comparable>(TITLE_NAMES.size()); finalLine.add("Total"); finalLine.add(totalDbNum); finalLine.add(totalTableNum); finalLine.add(totalPartitionNum); finalLine.add(totalIndexNum); finalLine.add(totalTabletNum); finalLine.add(totalReplicaNum); finalLine.add(unhealthyTabletIds.size()); finalLine.add(inconsistentTabletIds.size()); finalLine.add(cloningTabletIds.size()); finalLine.add(errorStateTabletIds.size()); lines.add(finalLine); // add result for (List<Comparable> line : lines) { List<String> row = new ArrayList<String>(line.size()); for (Comparable comparable : line) { row.add(comparable.toString()); } result.addRow(row); } return result; }
@Test public void testFetchResult() throws AnalysisException { new StatisticProcDir(GlobalStateMgr.getCurrentState()).fetchResult(); }
@Override public boolean isInput() { return false; }
@Test public void testIsInput() throws Exception { assertFalse( analyzer.isInput() ); }
@Nullable protected V get(K key) { if (key == null) return null; Object[] state = state(); int i = indexOfExistingKey(state, key); return i != -1 ? (V) state[i + 1] : null; }
@Test void get_ignored_if_unconfigured() { assertThat(extra.get("three")).isNull(); }
@Override public void print(Iterator<RowData> it, PrintWriter printWriter) { if (!it.hasNext()) { printEmptyResult(it, printWriter); return; } long numRows = printTable(it, printWriter); printFooter(printWriter, numRows); }
@Test void testPrintWithEmptyResultAndDeriveColumnWidthByContent() { PrintStyle.tableauWithTypeInferredColumnWidths( getSchema(), getConverter(), PrintStyle.DEFAULT_MAX_COLUMN_WIDTH, true, false) .print(Collections.emptyIterator(), new PrintWriter(outContent)); assertThat(outContent.toString()).isEqualTo("Empty set" + System.lineSeparator()); }
static <T extends Comparable<? super T>> int compareListWithFillValue( List<T> left, List<T> right, T fillValue) { int longest = Math.max(left.size(), right.size()); for (int i = 0; i < longest; i++) { T leftElement = fillValue; T rightElement = fillValue; if (i < left.size()) { leftElement = left.get(i); } if (i < right.size()) { rightElement = right.get(i); } int compareResult = leftElement.compareTo(rightElement); if (compareResult != 0) { return compareResult; } } return 0; }
@Test public void compareWithFillValue_bothEmptyListWithPositiveFillValue_returnsZero() { assertThat( ComparisonUtility.compareListWithFillValue( Lists.newArrayList(), Lists.newArrayList(), 1)) .isEqualTo(0); }
@Override public Batch toBatch() { return new SparkBatch( sparkContext, table, readConf, groupingKeyType(), taskGroups(), expectedSchema, hashCode()); }
@Test public void testPartitionedYears() throws Exception { createPartitionedTable(spark, tableName, "years(ts)"); SparkScanBuilder builder = scanBuilder(); YearsFunction.TimestampToYearsFunction function = new YearsFunction.TimestampToYearsFunction(); UserDefinedScalarFunc udf = toUDF(function, expressions(fieldRef("ts"))); Predicate predicate = new Predicate( "=", expressions( udf, intLit(timestampStrToYearOrdinal("2017-11-22T00:00:00.000000+00:00")))); pushFilters(builder, predicate); Batch scan = builder.build().toBatch(); assertThat(scan.planInputPartitions().length).isEqualTo(5); // NOT Equal builder = scanBuilder(); predicate = new Not(predicate); pushFilters(builder, predicate); scan = builder.build().toBatch(); assertThat(scan.planInputPartitions().length).isEqualTo(5); }
public static FactoryBuilder newFactoryBuilder(Propagation.Factory delegate) { return new FactoryBuilder(delegate); }
@Test void newFactory_sharingRemoteName() { BaggagePropagation.FactoryBuilder builder = newFactoryBuilder(B3Propagation.FACTORY); SingleBaggageField userName = SingleBaggageField.newBuilder(BaggageField.create("userName")).addKeyName("baggage").build(); SingleBaggageField userId = SingleBaggageField.newBuilder(BaggageField.create("userId")).addKeyName("baggage").build(); builder.add(userName); assertThatThrownBy(() -> builder.add(userId)) .isInstanceOf(IllegalArgumentException.class) .hasMessage("Propagation key already in use: baggage"); }
public static <T extends Enum<T>> T getForNameIgnoreCase(final @Nullable String value, final @NotNull Class<T> enumType) { return getForNameIgnoreCase(value, enumType, Map.of()); }
@Test void shouldGetEnumForNameIgnoreCaseForExisting() { TestEnum result = Enums.getForNameIgnoreCase("enum1", TestEnum.class); Assertions.assertEquals(TestEnum.ENUM1, result); }
public Collection<JID> getAdmins() { return administrators; }
@Test public void testOverrideBareJidWithFullJid() throws Exception { // Setup test fixture. final String groupName = "unit-test-group-l"; final Group group = groupManager.createGroup(groupName); final JID fullJid = new JID("unit-test-user-l", "example.org", "unit-test-resource-l"); final JID bareJid = fullJid.asBareJID(); group.getAdmins().add(bareJid); // Execute system under test. final boolean result = group.getAdmins().add(fullJid); // Verify results. assertFalse(result); }
public static void removeDeepLinkInfo(JSONObject jsonObject) { try { if (jsonObject == null) { return; } String latestKey; Iterator<String> keys = jsonObject.keys(); while (keys.hasNext()) { latestKey = keys.next(); if (latestKey.startsWith("$latest") || latestKey.startsWith("_latest")) { keys.remove(); } } } catch (Exception ex) { SALog.printStackTrace(ex); } }
@Test public void removeDeepLinkInfo() { JSONObject jsonObject = new JSONObject(); try { jsonObject.put("$latest_abc", "abc"); jsonObject.put("normal", "abc_normal"); } catch (JSONException e) { e.printStackTrace(); } ChannelUtils.removeDeepLinkInfo(jsonObject); Assert.assertFalse(jsonObject.has("$latest_abc")); Assert.assertTrue(jsonObject.has("normal")); }
public IThrowableRenderer<ILoggingEvent> getThrowableRenderer() { return throwableRenderer; }
@Test public void testAppendThrowable() throws Exception { StringBuilder buf = new StringBuilder(); PubThrowableProxy tp = new PubThrowableProxy(); tp.setClassName("test1"); tp.setMessage("msg1"); StackTraceElement ste1 = new StackTraceElement("c1", "m1", "f1", 1); StackTraceElement ste2 = new StackTraceElement("c2", "m2", "f2", 2); StackTraceElementProxy[] stepArray = { new StackTraceElementProxy(ste1), new StackTraceElementProxy(ste2) }; tp.setStackTraceElementProxyArray(stepArray); DefaultThrowableRenderer renderer = (DefaultThrowableRenderer) layout.getThrowableRenderer(); renderer.render(buf, tp); System.out.println(buf.toString()); String[] result = buf.toString().split(CoreConstants.LINE_SEPARATOR); System.out.println(result[0]); assertEquals("test1: msg1", result[0]); assertEquals(DefaultThrowableRenderer.TRACE_PREFIX + "at c1.m1(f1:1)", result[1]); }
public static boolean isCollection(String className) { return Collection.class.getCanonicalName().equals(className) || isList(className); }
@Test public void isCollection() { assertThat(listValues).allMatch(ScenarioSimulationSharedUtils::isCollection); assertThat(mapValues).noneMatch(ScenarioSimulationSharedUtils::isCollection); assertThat(ScenarioSimulationSharedUtils.isCollectionOrMap(Collection.class.getCanonicalName())).isTrue(); }
@Override public double dot(SGDVector other) { if (other.size() != elements.length) { throw new IllegalArgumentException("Can't dot two vectors of different dimension, this = " + elements.length + ", other = " + other.size()); } double score = 0.0; if (other instanceof DenseVector) { for (int i = 0; i < elements.length; i++) { score += get(i) * other.get(i); } } else { // else must be sparse for (VectorTuple tuple : other) { score += get(tuple.index) * tuple.value; } } return score; }
@Test public void emptyDot() { DenseVector a = generateVectorA(); DenseVector b = generateVectorB(); DenseVector c = generateVectorC(); DenseVector empty = generateEmptyVector(); assertEquals(a.dot(empty),empty.dot(a),1e-10); assertEquals(0.0, a.dot(empty),1e-10); assertEquals(b.dot(empty),empty.dot(b),1e-10); assertEquals(0.0, b.dot(empty),1e-10); assertEquals(c.dot(empty),empty.dot(c),1e-10); assertEquals(0.0, c.dot(empty),1e-10); }
@Override public String stem(String word) { // Convert input to lowercase and remove all chars that are not a letter. word = cleanup(word.toLowerCase()); //if str's length is greater than 2 then remove prefixes if ((word.length() > 3) && (stripPrefix)) { word = stripPrefixes(word); } // if str is not null remove suffix if (word.length() > 3) { word = stripSuffixes(word); } return word; }
@Test public void testStem() { System.out.println("stem"); String[] words = {"consign", "consigned", "consigning", "consignment", "consist", "consisted", "consistency", "consistent", "consistently", "consisting", "consists", "consolation", "consolations", "consolatory", "console", "consoled", "consoles", "consolidate", "consolidated", "consolidating", "consoling", "consolingly", "consols", "consonant", "consort", "consorted", "consorting", "conspicuous", "conspicuously", "conspiracy", "conspirator", "conspirators", "conspire", "conspired", "conspiring", "constable", "constables", "constance", "constancy", "constant", "knack", "knackeries", "knacks", "knag", "knave", "knaves", "knavish", "kneaded", "kneading", "knee", "kneel", "kneeled", "kneeling", "kneels", "knees", "knell", "knelt", "knew", "knick", "knif", "knife", "knight", "knightly", "knights", "knit", "knits", "knitted", "knitting", "knives", "knob", "knobs", "knock", "knocked", "knocker", "knockers", "knocking", "knocks", "knopp", "knot", "knots" }; String[] expResult = {"consign", "consign", "consign", "consign", "consist", "consist", "consist", "consist", "consist", "consist", "consist", "consol", "consol", "consol", "consol", "consol", "consol", "consolid", "consolid", "consolid", "consol", "consol", "consol", "conson", "consort", "consort", "consort", "conspicu", "conspicu", "conspir", "conspir", "conspir", "conspir", "conspir", "conspir", "const", "const", "const", "const", "const", "knack", "knackery", "knack", "knag", "knav", "knav", "knav", "knead", "knead", "kne", "kneel", "kneel", "kneel", "kneel", "kne", "knel", "knelt", "knew", "knick", "knif", "knif", "knight", "knight", "knight", "knit", "knit", "knit", "knit", "kniv", "knob", "knob", "knock", "knock", "knock", "knock", "knock", "knock", "knop", "knot", "knot" }; LancasterStemmer instance = new LancasterStemmer(); for (int i = 0; i < words.length; i++) { String result = instance.stem(words[i]); assertEquals(expResult[i], result); } }
@Deprecated public static <T> Task<T> callable(final String name, final Callable<? extends T> callable) { return Task.callable(name, () -> callable.call()); }
@SuppressWarnings("deprecation") @Test public void testThrowableCallableWithError() throws InterruptedException { final Throwable throwable = new Throwable(); final ThrowableCallable<Integer> callable = new ThrowableCallable<Integer>() { @Override public Integer call() throws Throwable { throw throwable; } }; final Task<Integer> task = Tasks.callable("error", callable); getEngine().run(task); task.await(100, TimeUnit.MILLISECONDS); assertTrue(task.isDone()); assertTrue(task.isFailed()); assertEquals("Throwable should not be wrapped", throwable, task.getError()); assertEquals("error", task.getName()); }
@Override public PathAttributes find(final Path file, final ListProgressListener listener) throws BackgroundException { try { if(file.isDirectory()) { return this.toAttributes(new FoldersApi(new BoxApiClient(session.getClient())).getFoldersId(fileid.getFileId(file), DEFAULT_FIELDS, null, null)); } return this.toAttributes(new FilesApi(new BoxApiClient(session.getClient())).getFilesId(fileid.getFileId(file), StringUtils.EMPTY, DEFAULT_FIELDS, null, null)); } catch(ApiException e) { throw new BoxExceptionMappingService(fileid).map("Failure to read attributes of {0}", e, file); } }
@Test public void testFindDirectory() throws Exception { final BoxFileidProvider fileid = new BoxFileidProvider(session); final Path folder = new BoxDirectoryFeature(session, fileid).mkdir( new Path(new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory)), new TransferStatus()); final Path test = new BoxDirectoryFeature(session, fileid).mkdir(new Path(folder, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory)), new TransferStatus()); final BoxAttributesFinderFeature f = new BoxAttributesFinderFeature(session, fileid); final PathAttributes attributes = f.find(test); assertNotEquals(-1L, attributes.getSize()); assertNotEquals(-1L, attributes.getModificationDate()); assertNull(attributes.getChecksum().algorithm); assertNull(attributes.getETag()); assertTrue(attributes.getPermission().isReadable()); assertTrue(attributes.getPermission().isWritable()); assertTrue(attributes.getPermission().isExecutable()); // Test wrong type try { f.find(new Path(test.getAbsolute(), EnumSet.of(Path.Type.file))); fail(); } catch(NotfoundException e) { // Expected } new BoxDeleteFeature(session, fileid).delete(Collections.singletonList(folder), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
@Override protected void processOptions(LinkedList<String> args) throws IOException { CommandFormat cf = new CommandFormat(0, Integer.MAX_VALUE, OPTION_PATHONLY, OPTION_DIRECTORY, OPTION_HUMAN, OPTION_HIDENONPRINTABLE, OPTION_RECURSIVE, OPTION_REVERSE, OPTION_MTIME, OPTION_SIZE, OPTION_ATIME, OPTION_ECPOLICY); cf.parse(args); pathOnly = cf.getOpt(OPTION_PATHONLY); dirRecurse = !cf.getOpt(OPTION_DIRECTORY); setRecursive(cf.getOpt(OPTION_RECURSIVE) && dirRecurse); humanReadable = cf.getOpt(OPTION_HUMAN); hideNonPrintable = cf.getOpt(OPTION_HIDENONPRINTABLE); orderReverse = cf.getOpt(OPTION_REVERSE); orderTime = cf.getOpt(OPTION_MTIME); orderSize = !orderTime && cf.getOpt(OPTION_SIZE); useAtime = cf.getOpt(OPTION_ATIME); displayECPolicy = cf.getOpt(OPTION_ECPOLICY); if (args.isEmpty()) args.add(Path.CUR_DIR); initialiseOrderComparator(); }
@Test public void processPathDirOrderAtime() throws IOException { TestFile testfile01 = new TestFile("testDirectory", "testFile01"); TestFile testfile02 = new TestFile("testDirectory", "testFile02"); TestFile testfile03 = new TestFile("testDirectory", "testFile03"); TestFile testfile04 = new TestFile("testDirectory", "testFile04"); TestFile testfile05 = new TestFile("testDirectory", "testFile05"); TestFile testfile06 = new TestFile("testDirectory", "testFile06"); // set file atime in different order to file names testfile01.setAtime(NOW.getTime() + 10); testfile02.setAtime(NOW.getTime() + 30); testfile03.setAtime(NOW.getTime() + 20); testfile04.setAtime(NOW.getTime() + 60); testfile05.setAtime(NOW.getTime() + 50); testfile06.setAtime(NOW.getTime() + 40); // set file mtime in different order to atime testfile01.setMtime(NOW.getTime() + 60); testfile02.setMtime(NOW.getTime() + 50); testfile03.setMtime(NOW.getTime() + 20); testfile04.setMtime(NOW.getTime() + 30); testfile05.setMtime(NOW.getTime() + 10); testfile06.setMtime(NOW.getTime() + 40); TestFile testDir = new TestFile("", "testDirectory"); testDir.setIsDir(true); testDir.addContents(testfile01, testfile02, testfile03, testfile04, testfile05, testfile06); LinkedList<PathData> pathData = new LinkedList<PathData>(); pathData.add(testDir.getPathData()); PrintStream out = mock(PrintStream.class); Ls ls = new Ls(); ls.out = out; LinkedList<String> options = new LinkedList<String>(); options.add("-t"); options.add("-u"); ls.processOptions(options); String lineFormat = TestFile.computeLineFormat(pathData); ls.processArguments(pathData); InOrder inOrder = inOrder(out); inOrder.verify(out).println("Found 6 items"); inOrder.verify(out).println(testfile04.formatLineAtime(lineFormat)); inOrder.verify(out).println(testfile05.formatLineAtime(lineFormat)); inOrder.verify(out).println(testfile06.formatLineAtime(lineFormat)); inOrder.verify(out).println(testfile02.formatLineAtime(lineFormat)); inOrder.verify(out).println(testfile03.formatLineAtime(lineFormat)); inOrder.verify(out).println(testfile01.formatLineAtime(lineFormat)); verifyNoMoreInteractions(out); }
@Override @SuppressWarnings("unchecked") public int run() throws IOException { Preconditions.checkArgument(targets != null && targets.size() >= 1, "A Parquet file is required."); Preconditions.checkArgument(targets.size() == 1, "Cannot process multiple Parquet files."); String source = targets.get(0); try (ParquetFileReader reader = ParquetFileReader.open(getConf(), qualifiedPath(source))) { MessageType schema = reader.getFileMetaData().getSchema(); ColumnDescriptor descriptor = Util.descriptor(column, schema); PrimitiveType type = Util.primitive(column, schema); Preconditions.checkNotNull(type); DictionaryPageReadStore dictionaryReader; int rowGroup = 0; while ((dictionaryReader = reader.getNextDictionaryReader()) != null) { DictionaryPage page = dictionaryReader.readDictionaryPage(descriptor); if (page != null) { console.info("\nRow group {} dictionary for \"{}\":", rowGroup, column); Dictionary dict = page.getEncoding().initDictionary(descriptor, page); printDictionary(dict, type); } else { console.info("\nRow group {} has no dictionary for \"{}\"", rowGroup, column); } reader.skipNextRowGroup(); rowGroup += 1; } } console.info(""); return 0; }
@Test public void testShowDirectoryCommand() throws IOException { File file = parquetFile(); ShowDictionaryCommand command = new ShowDictionaryCommand(createLogger()); command.targets = Arrays.asList(file.getAbsolutePath()); command.column = BINARY_FIELD; command.setConf(new Configuration()); Assert.assertEquals(0, command.run()); }
@Override public String getDefaultScheme() { return PluginEnum.GRPC.getName(); }
@Test public void getDefaultScheme() { assertEquals(shenyuNameResolverProvider.getDefaultScheme(), PluginEnum.GRPC.getName()); }
@Override public Cancellable schedule(final long delay, final TimeUnit unit, final Runnable command) { final IndirectRunnable indirectRunnable = new IndirectRunnable(command); final Cancellable cancellable = _executor.schedule(delay, unit, indirectRunnable); return new IndirectCancellable(cancellable, indirectRunnable); }
@Test public void testCancel() throws InterruptedException { final CountDownLatch latch = new CountDownLatch(1); final Cancellable cancellable = _executor.schedule(100, TimeUnit.MILLISECONDS, new Runnable() { @Override public void run() { latch.countDown(); } }); assertTrue(cancellable.cancel(new Exception())); assertFalse(cancellable.cancel(new Exception())); assertFalse(latch.await(150, TimeUnit.MILLISECONDS)); }