author
int64 658
755k
| date
stringlengths 19
19
| timezone
int64 -46,800
43.2k
| hash
stringlengths 40
40
| message
stringlengths 5
490
| mods
list | language
stringclasses 20
values | license
stringclasses 3
values | repo
stringlengths 5
68
| original_message
stringlengths 12
491
|
---|---|---|---|---|---|---|---|---|---|
95,143 |
29.10.2022 22:56:05
| -7,200 |
e6e7693dfc0ec600f71fa678c4a24bf45c9a60be
|
Add support for getting adjecent records by dates.
Instead of only supporting IDs for looking up adjecent records, allow
the various date fields (createdAt, publishedAt, depublishedAt,
modifiedAt) as well.
|
[
{
"change_type": "MODIFY",
"old_path": "src/Repository/ContentRepository.php",
"new_path": "src/Repository/ContentRepository.php",
"diff": "@@ -219,7 +219,7 @@ class ContentRepository extends ServiceEntityRepository\nreturn $paginator;\n}\n- public function findAdjacentBy(string $column, string $direction, int $currentValue, ?string $contentType = null): ?Content\n+ public function findAdjacentBy(string $column, string $direction, $currentValue, ?string $contentType = null): ?Content\n{\nif ($direction === 'next') {\n$order = 'ASC';\n"
},
{
"change_type": "MODIFY",
"old_path": "src/Twig/ContentExtension.php",
"new_path": "src/Twig/ContentExtension.php",
"diff": "@@ -338,15 +338,29 @@ class ContentExtension extends AbstractExtension\nprivate function getAdjacentContent(Content $content, string $direction, string $byColumn = 'id', bool $sameContentType = true): ?Content\n{\n- if ($byColumn !== 'id') {\n- // @todo implement ordering by other columns/fields too\n- throw new \\RuntimeException('Ordering content by column other than ID is not yet implemented');\n+ switch ($byColumn) {\n+ case \"id\":\n+ $value = $content->getId();\n+ break;\n+ case \"createdAt\":\n+ $value = $content->getCreatedAt();\n+ case \"publishedAt\":\n+ $value = $content->getPublishedAt();\n+ break;\n+ case \"depublishedAt\":\n+ $value = $content->getDepublishedAt();\n+ break;\n+ case \"modifiedAt\":\n+ $value = $content->getModifiedAt();\n+ break;\n+ default:\n+ throw new \\RuntimeException('Ordering content by this column is not yet implemented');\n}\n- $byColumn = filter_var($byColumn, FILTER_SANITIZE_STRING);\n$contentType = $sameContentType ? $content->getContentType() : null;\n- return $this->contentRepository->findAdjacentBy($byColumn, $direction, $content->getId(), $contentType);\n+ return $this->contentRepository->findAdjacentBy($byColumn, $direction, $value, $contentType);\n+\n}\npublic function isCurrent(Environment $env, ?Content $content): bool\n"
}
] |
PHP
|
MIT License
|
bolt/core
|
Add support for getting adjecent records by dates.
Instead of only supporting IDs for looking up adjecent records, allow
the various date fields (createdAt, publishedAt, depublishedAt,
modifiedAt) as well.
|
95,143 |
30.10.2022 11:13:47
| -3,600 |
e7aed19a05c18c812473d72f53daa5934b305d49
|
Remove superfluous newline.
|
[
{
"change_type": "MODIFY",
"old_path": "src/Twig/ContentExtension.php",
"new_path": "src/Twig/ContentExtension.php",
"diff": "@@ -362,7 +362,6 @@ class ContentExtension extends AbstractExtension\n$contentType = $sameContentType ? $content->getContentType() : null;\nreturn $this->contentRepository->findAdjacentBy($byColumn, $direction, $value, $contentType);\n-\n}\npublic function isCurrent(Environment $env, ?Content $content): bool\n"
}
] |
PHP
|
MIT License
|
bolt/core
|
Remove superfluous newline.
|
95,144 |
04.11.2022 14:21:39
| -3,600 |
790b8b932934d2a0703c066729afccb3b52dd38e
|
Update cypress-ci.json
|
[
{
"change_type": "MODIFY",
"old_path": "tests/cypress/cypress-ci.json",
"new_path": "tests/cypress/cypress-ci.json",
"diff": "\"defaultCommandTimeout\": 8000,\n\"viewportWidth\": 1920,\n\"viewportHeight\": 1080,\n- \"retries\": 2,\n- \"scrollBehavior\": false\n+ \"retries\": 0,\n+ \"scrollBehavior\": nearest\n}\n"
}
] |
PHP
|
MIT License
|
bolt/core
|
Update cypress-ci.json
|
95,144 |
04.11.2022 15:29:00
| -3,600 |
67a93ed9d0ed239d05e6fb8fdc0c139bbec0a348
|
Update contenttype_permissions.spec.js
|
[
{
"change_type": "MODIFY",
"old_path": "tests/cypress/integration/contenttype_permissions.spec.js",
"new_path": "tests/cypress/integration/contenttype_permissions.spec.js",
"diff": "@@ -50,7 +50,7 @@ describe('Create content as editor and delete it as chief editor', () => {\ncy.get('#field-heading').type('Test heading');\ncy.get('button[name=\"save\"]').eq(1).scrollIntoView();\n- cy.get('button[name=\"save\"]').eq(1).click();\n+ cy.get('button[name=\"save\"]').eq(1).should('be.visible').click({force:true});\ncy.visit('/bolt/logout');\n"
}
] |
PHP
|
MIT License
|
bolt/core
|
Update contenttype_permissions.spec.js
|
95,143 |
07.11.2022 21:45:42
| -3,600 |
61ee5bb5b1b827e4d708ca5b83f3bf789b13108b
|
Use ParseDown Extra instead of plain old ParseDown.
|
[
{
"change_type": "MODIFY",
"old_path": "composer.json",
"new_path": "composer.json",
"diff": "\"drupol/composer-packages\": \"^2.0\",\n\"embed/embed\": \"^3.4\",\n\"erusev/parsedown\": \"^1.7\",\n+ \"erusev/parsedown-extra\": \"^0.8.1\",\n\"fakerphp/faker\": \"^1.16\",\n\"jasny/twig-extensions\": \"^1.3\",\n\"knplabs/doctrine-behaviors\": \"^2.1\",\n"
},
{
"change_type": "MODIFY",
"old_path": "src/Utils/Markdown.php",
"new_path": "src/Utils/Markdown.php",
"diff": "@@ -4,16 +4,16 @@ declare(strict_types=1);\nnamespace Bolt\\Utils;\n-use Parsedown;\n+use ParsedownExtra;\nclass Markdown\n{\n- /** @var Parsedown */\n+ /** @var ParsedownExtra */\nprivate $parser;\npublic function __construct()\n{\n- $this->parser = new Parsedown();\n+ $this->parser = new ParsedownExtra();\n}\npublic function parse(string $text): string\n"
}
] |
PHP
|
MIT License
|
bolt/core
|
Use ParseDown Extra instead of plain old ParseDown.
|
95,144 |
18.11.2022 14:25:52
| -3,600 |
6c8d8b7e1119c1e6918d696f361b4ba89a85e0a6
|
Prepare release 5.2.0-beta.8
|
[
{
"change_type": "MODIFY",
"old_path": "assets/js/version.js",
"new_path": "assets/js/version.js",
"diff": "// generated by genversion\n-export const version = '5.1.99.7';\n+export const version = '5.1.99.8';\n"
},
{
"change_type": "MODIFY",
"old_path": "package-lock.json",
"new_path": "package-lock.json",
"diff": "{\n\"name\": \"bolt\",\n- \"version\": \"5.1.99.7\",\n+ \"version\": \"5.1.99.8\",\n\"lockfileVersion\": 2,\n\"requires\": true,\n\"packages\": {\n\"\": {\n\"name\": \"bolt\",\n- \"version\": \"5.1.99.7\",\n+ \"version\": \"5.1.99.8\",\n\"license\": \"MIT\",\n\"dependencies\": {\n- \"@popperjs/core\": \"^2.11.5\",\n- \"@vue/cli-service\": \"^4.5.13\",\n- \"axios\": \"^0.21.1\",\n+ \"@popperjs/core\": \"^2.11.6\",\n+ \"@vue/cli-service\": \"^4.5.19\",\n+ \"axios\": \"^0.27.2\",\n\"baguettebox.js\": \"^1.11.1\",\n- \"bootstrap\": \"^5.2.*\",\n- \"browserslist\": \"^4.16.7\",\n+ \"bootstrap\": \"^5.2.2\",\n+ \"browserslist\": \"^4.21.4\",\n\"clipboard\": \"^2.0.11\",\n- \"codemirror\": \"^5.62.2\",\n- \"dropzone\": \"^5.9.2\",\n- \"flagpack-dart-sass\": \"^1.1\",\n- \"hotkeys-js\": \"^3.8.7\",\n- \"jquery\": \"^3.6.0\",\n- \"locutus\": \"^2.0.14\",\n+ \"codemirror\": \"^5.65.9\",\n+ \"dropzone\": \"^5.9.3\",\n+ \"flagpack-dart-sass\": \"^1.4.0\",\n+ \"hotkeys-js\": \"^3.10.0\",\n+ \"jquery\": \"^3.6.1\",\n+ \"locutus\": \"^2.0.15\",\n\"luxon\": \"^1.28.0\",\n\"no-scroll\": \"^2.1.1\",\n\"node-vibrant\": \"^3.1.6\",\n\"register-service-worker\": \"^1.7.2\",\n\"selectize\": \"^0.12.6\",\n\"stylelint-config-recommended-scss\": \"^4.3.0\",\n- \"stylelint-scss\": \"^3.20.1\",\n+ \"stylelint-scss\": \"^3.21.0\",\n\"terser\": \"^4.8.1\",\n\"tinycolor2\": \"^1.4.2\",\n- \"vue\": \"^2.6.14\",\n+ \"vue\": \"^2.7.14\",\n\"vue-easymde\": \"^2.0.0\",\n\"vue-flatpickr-component\": \"^8.1.7\",\n\"vue-multiselect\": \"^2.1.6\",\n\"zxcvbn\": \"^4.4.2\"\n},\n\"devDependencies\": {\n- \"@babel/core\": \"^7.15.0\",\n- \"@babel/plugin-transform-regenerator\": \"^7.14.5\",\n- \"@babel/plugin-transform-runtime\": \"^7.15.0\",\n+ \"@babel/core\": \"^7.20.2\",\n+ \"@babel/plugin-transform-regenerator\": \"^7.18.6\",\n+ \"@babel/plugin-transform-runtime\": \"^7.19.6\",\n\"@babel/polyfill\": \"^7.12.1\",\n- \"@babel/preset-env\": \"^7.15.0\",\n+ \"@babel/preset-env\": \"^7.20.2\",\n\"@fortawesome/fontawesome-free\": \"^5.15.4\",\n- \"@symfony/stimulus-bridge\": \"^2.0.0\",\n- \"@symfony/webpack-encore\": \"^1.5.0\",\n- \"@testing-library/cypress\": \"^7.0.6\",\n- \"@vue/test-utils\": \"^1.2.2\",\n+ \"@symfony/stimulus-bridge\": \"^2.1.0\",\n+ \"@symfony/webpack-encore\": \"^1.8.2\",\n+ \"@testing-library/cypress\": \"^7.0.7\",\n+ \"@vue/test-utils\": \"^1.3.3\",\n\"ajv-keywords\": \"^3.5.2\",\n- \"autoprefixer\": \"^10.4.5\",\n+ \"autoprefixer\": \"^10.4.13\",\n\"babel-core\": \"^7.0.0-bridge.0\",\n\"babel-eslint\": \"^10.1.0\",\n\"babel-jest\": \"^25.5.1\",\n\"clean-webpack-plugin\": \"^4.0.0\",\n- \"core-js\": \"^3.0.0\",\n- \"cypress\": \"^7.7.0\",\n+ \"core-js\": \"^3.26.1\",\n+ \"cypress\": \"^7.6.0\",\n\"eslint\": \"^6.8.0\",\n\"eslint-config-prettier\": \"^6.15.0\",\n- \"eslint-plugin-prettier\": \"^3.4.0\",\n+ \"eslint-plugin-prettier\": \"^3.4.1\",\n\"eslint-plugin-standard\": \"^4.1.0\",\n\"eslint-plugin-vue\": \"^6.2.2\",\n\"file-loader\": \"^6.2.0\",\n\"html-webpack-plugin\": \"^5.5.0\",\n\"jest\": \"^25.5.4\",\n\"jest-serializer-vue\": \"^2.0.2\",\n- \"pa11y-ci\": \"^2.4.0\",\n+ \"pa11y-ci\": \"^2.4.2\",\n\"postcss-loader\": \"^4.3.0\",\n- \"postcss-preset-env\": \"^7.4.1\",\n+ \"postcss-preset-env\": \"^7.8.3\",\n\"prettier\": \"^1.19.1\",\n- \"regenerator-runtime\": \"^0.13.9\",\n- \"sass\": \"^1.37.5\",\n+ \"regenerator-runtime\": \"^0.13.11\",\n+ \"sass\": \"^1.56.1\",\n\"sass-loader\": \"^9.0.3\",\n\"source-sans-pro\": \"^3.6.0\",\n\"stimulus\": \"^2.0.0\",\n\"stylelint\": \"^13.13.1\",\n\"stylelint-config-standard\": \"^19.0.0\",\n\"vue-jest\": \"^3.0.7\",\n- \"vue-loader\": \"^15.9.8\",\n- \"vue-router\": \"^3.5.1\",\n- \"vue-template-compiler\": \"^2.6.14\",\n- \"webpack-notifier\": \"^1.6.0\",\n- \"webpackbar\": \"https://registry.npmjs.org/webpackbar/-/webpackbar-5.0.0-3.tgz\",\n- \"workbox-webpack-plugin\": \"^6.2.0\"\n+ \"vue-loader\": \"^15.10.1\",\n+ \"vue-router\": \"^3.6.5\",\n+ \"vue-template-compiler\": \"^2.7.14\",\n+ \"webpack-notifier\": \"^1.15.0\",\n+ \"webpackbar\": \"^5.0.2\",\n+ \"workbox-webpack-plugin\": \"^6.5.4\"\n}\n},\n\"node_modules/@achrinza/node-ipc\": {\n- \"version\": \"9.2.2\",\n- \"resolved\": \"https://registry.npmjs.org/@achrinza/node-ipc/-/node-ipc-9.2.2.tgz\",\n- \"integrity\": \"sha512-b90U39dx0cU6emsOvy5hxU4ApNXnE3+Tuo8XQZfiKTGelDwpMwBVgBP7QX6dGTcJgu/miyJuNJ/2naFBliNWEw==\",\n+ \"version\": \"9.2.6\",\n+ \"resolved\": \"https://registry.npmjs.org/@achrinza/node-ipc/-/node-ipc-9.2.6.tgz\",\n+ \"integrity\": \"sha512-ULSIYPy4ZPM301dfCxRz0l2GJjOwIo/PqmWonIu1bLml7UmnVQmH+juJcoyXp6E8gIRRNAjGYftJnNQlfy4vPg==\",\n\"dependencies\": {\n\"@node-ipc/js-queue\": \"2.0.3\",\n\"event-pubsub\": \"4.3.0\",\n\"js-message\": \"1.0.7\"\n},\n\"engines\": {\n- \"node\": \"8 || 10 || 12 || 14 || 16 || 17\"\n+ \"node\": \"8 || 9 || 10 || 11 || 12 || 13 || 14 || 15 || 16 || 17 || 18 || 19\"\n}\n},\n\"node_modules/@ampproject/remapping\": {\n\"node\": \">= 6\"\n}\n},\n+ \"node_modules/@cypress/request/node_modules/form-data\": {\n+ \"version\": \"2.3.3\",\n+ \"resolved\": \"https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz\",\n+ \"integrity\": \"sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==\",\n+ \"dev\": true,\n+ \"dependencies\": {\n+ \"asynckit\": \"^0.4.0\",\n+ \"combined-stream\": \"^1.0.6\",\n+ \"mime-types\": \"^2.1.12\"\n+ },\n+ \"engines\": {\n+ \"node\": \">= 0.12\"\n+ }\n+ },\n\"node_modules/@cypress/xvfb\": {\n\"version\": \"1.2.4\",\n\"resolved\": \"https://registry.npmjs.org/@cypress/xvfb/-/xvfb-1.2.4.tgz\",\n}\n},\n\"node_modules/@types/estree\": {\n- \"version\": \"0.0.51\",\n- \"resolved\": \"https://registry.npmjs.org/@types/estree/-/estree-0.0.51.tgz\",\n- \"integrity\": \"sha512-CuPgU6f3eT/XgKKPqKd/gLZV1Xmvf1a2R5POBOGQa6uv82xpls89HU5zKeVoyR8XzHd1RGNOlQlvUe3CFkjWNQ==\"\n+ \"version\": \"1.0.0\",\n+ \"resolved\": \"https://registry.npmjs.org/@types/estree/-/estree-1.0.0.tgz\",\n+ \"integrity\": \"sha512-WulqXMDUTYAXCjZnk6JtIHPigp55cVtDgDrO2gHRwhyJto21+1zbVCtOYB2L1F9w4qCQ0rOGWBnBe0FNTiEJIQ==\"\n},\n\"node_modules/@types/express\": {\n\"version\": \"4.17.14\",\n}\n},\n\"node_modules/axios\": {\n- \"version\": \"0.21.4\",\n- \"resolved\": \"https://registry.npmjs.org/axios/-/axios-0.21.4.tgz\",\n- \"integrity\": \"sha512-ut5vewkiu8jjGBdqpM44XxjuCjq9LAKeHVmoVfHVzy8eHgxxq8SbAVQNovDA8mVi05kP0Ea/n/UzcSHcTJQfNg==\",\n+ \"version\": \"0.27.2\",\n+ \"resolved\": \"https://registry.npmjs.org/axios/-/axios-0.27.2.tgz\",\n+ \"integrity\": \"sha512-t+yRIyySRTp/wua5xEr+z1q60QmLq8ABsS5O9Me1AsE5dfKqgnCFzwiCZZ/cGNd1lq4/7akDWMxdhVlucjmnOQ==\",\n\"dependencies\": {\n- \"follow-redirects\": \"^1.14.0\"\n+ \"follow-redirects\": \"^1.14.9\",\n+ \"form-data\": \"^4.0.0\"\n}\n},\n\"node_modules/babel-code-frame\": {\n\"marked\": \"^4.1.0\"\n}\n},\n+ \"node_modules/easymde/node_modules/marked\": {\n+ \"version\": \"4.2.2\",\n+ \"resolved\": \"https://registry.npmjs.org/marked/-/marked-4.2.2.tgz\",\n+ \"integrity\": \"sha512-JjBTFTAvuTgANXx82a5vzK9JLSMoV6V3LBVn4Uhdso6t7vXrGx7g1Cd2r6NYSsxrYbQGFCMqBDhFHyK5q2UvcQ==\",\n+ \"bin\": {\n+ \"marked\": \"bin/marked.js\"\n+ },\n+ \"engines\": {\n+ \"node\": \">= 12\"\n+ }\n+ },\n\"node_modules/ecc-jsbn\": {\n\"version\": \"0.1.2\",\n\"resolved\": \"https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz\",\n}\n},\n\"node_modules/form-data\": {\n- \"version\": \"2.3.3\",\n- \"resolved\": \"https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz\",\n- \"integrity\": \"sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==\",\n+ \"version\": \"4.0.0\",\n+ \"resolved\": \"https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz\",\n+ \"integrity\": \"sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==\",\n\"dependencies\": {\n\"asynckit\": \"^0.4.0\",\n- \"combined-stream\": \"^1.0.6\",\n+ \"combined-stream\": \"^1.0.8\",\n\"mime-types\": \"^2.1.12\"\n},\n\"engines\": {\n- \"node\": \">= 0.12\"\n+ \"node\": \">= 6\"\n}\n},\n\"node_modules/forwarded\": {\n}\n},\n\"node_modules/marked\": {\n- \"version\": \"4.2.2\",\n- \"resolved\": \"https://registry.npmjs.org/marked/-/marked-4.2.2.tgz\",\n- \"integrity\": \"sha512-JjBTFTAvuTgANXx82a5vzK9JLSMoV6V3LBVn4Uhdso6t7vXrGx7g1Cd2r6NYSsxrYbQGFCMqBDhFHyK5q2UvcQ==\",\n+ \"version\": \"2.1.3\",\n+ \"resolved\": \"https://registry.npmjs.org/marked/-/marked-2.1.3.tgz\",\n+ \"integrity\": \"sha512-/Q+7MGzaETqifOMWYEA7HVMaZb4XbcRfaOzcSsHZEith83KGlvaSG33u0SKu89Mj5h+T8V2hM+8O45Qc5XTgwA==\",\n\"bin\": {\n- \"marked\": \"bin/marked.js\"\n+ \"marked\": \"bin/marked\"\n},\n\"engines\": {\n- \"node\": \">= 12\"\n+ \"node\": \">= 10\"\n}\n},\n\"node_modules/mathml-tag-names\": {\n\"request\": \"^2.34\"\n}\n},\n+ \"node_modules/request/node_modules/form-data\": {\n+ \"version\": \"2.3.3\",\n+ \"resolved\": \"https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz\",\n+ \"integrity\": \"sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==\",\n+ \"dependencies\": {\n+ \"asynckit\": \"^0.4.0\",\n+ \"combined-stream\": \"^1.0.6\",\n+ \"mime-types\": \"^2.1.12\"\n+ },\n+ \"engines\": {\n+ \"node\": \">= 0.12\"\n+ }\n+ },\n\"node_modules/request/node_modules/http-signature\": {\n\"version\": \"1.2.0\",\n\"resolved\": \"https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz\",\n}\n},\n\"node_modules/std-env\": {\n- \"version\": \"2.3.1\",\n- \"resolved\": \"https://registry.npmjs.org/std-env/-/std-env-2.3.1.tgz\",\n- \"integrity\": \"sha512-eOsoKTWnr6C8aWrqJJ2KAReXoa7Vn5Ywyw6uCXgA/xDhxPoaIsBa5aNJmISY04dLwXPBnDHW4diGM7Sn5K4R/g==\",\n- \"dev\": true,\n- \"dependencies\": {\n- \"ci-info\": \"^3.1.1\"\n- }\n+ \"version\": \"3.3.1\",\n+ \"resolved\": \"https://registry.npmjs.org/std-env/-/std-env-3.3.1.tgz\",\n+ \"integrity\": \"sha512-3H20QlwQsSm2OvAxWIYhs+j01MzzqwMwGiiO1NQaJYZgJZFPuAbf95/DiKRBSTYIJ2FeGUc+B/6mPGcWP9dO3Q==\",\n+ \"dev\": true\n},\n\"node_modules/stealthy-require\": {\n\"version\": \"1.1.1\",\n\"marked\": \"^2.1.3\"\n}\n},\n- \"node_modules/vue-easymde/node_modules/marked\": {\n- \"version\": \"2.1.3\",\n- \"resolved\": \"https://registry.npmjs.org/marked/-/marked-2.1.3.tgz\",\n- \"integrity\": \"sha512-/Q+7MGzaETqifOMWYEA7HVMaZb4XbcRfaOzcSsHZEith83KGlvaSG33u0SKu89Mj5h+T8V2hM+8O45Qc5XTgwA==\",\n- \"bin\": {\n- \"marked\": \"bin/marked\"\n- },\n- \"engines\": {\n- \"node\": \">= 10\"\n- }\n- },\n\"node_modules/vue-eslint-parser\": {\n\"version\": \"7.11.0\",\n\"resolved\": \"https://registry.npmjs.org/vue-eslint-parser/-/vue-eslint-parser-7.11.0.tgz\",\n\"source-map\": \"~0.6.1\"\n}\n},\n+ \"node_modules/webpack/node_modules/@types/estree\": {\n+ \"version\": \"0.0.51\",\n+ \"resolved\": \"https://registry.npmjs.org/@types/estree/-/estree-0.0.51.tgz\",\n+ \"integrity\": \"sha512-CuPgU6f3eT/XgKKPqKd/gLZV1Xmvf1a2R5POBOGQa6uv82xpls89HU5zKeVoyR8XzHd1RGNOlQlvUe3CFkjWNQ==\"\n+ },\n\"node_modules/webpack/node_modules/glob-to-regexp\": {\n\"version\": \"0.4.1\",\n\"resolved\": \"https://registry.npmjs.org/glob-to-regexp/-/glob-to-regexp-0.4.1.tgz\",\n}\n},\n\"node_modules/webpackbar\": {\n- \"version\": \"5.0.0-3\",\n- \"resolved\": \"https://registry.npmjs.org/webpackbar/-/webpackbar-5.0.0-3.tgz\",\n- \"integrity\": \"sha512-viW6KCYjMb0NPoDrw2jAmLXU2dEOhRrtku28KmOfeE1vxbfwCYuTbTaMhnkrCZLFAFyY9Q49Z/jzYO80Dw5b8g==\",\n+ \"version\": \"5.0.2\",\n+ \"resolved\": \"https://registry.npmjs.org/webpackbar/-/webpackbar-5.0.2.tgz\",\n+ \"integrity\": \"sha512-BmFJo7veBDgQzfWXl/wwYXr/VFus0614qZ8i9znqcl9fnEdiVkdbi0TedLQ6xAK92HZHDJ0QmyQ0fmuZPAgCYQ==\",\n\"dev\": true,\n- \"license\": \"MIT\",\n\"dependencies\": {\n- \"ansi-escapes\": \"^4.3.1\",\n\"chalk\": \"^4.1.0\",\n- \"consola\": \"^2.15.0\",\n- \"figures\": \"^3.2.0\",\n+ \"consola\": \"^2.15.3\",\n\"pretty-time\": \"^1.1.0\",\n- \"std-env\": \"^2.2.1\",\n- \"text-table\": \"^0.2.0\",\n- \"wrap-ansi\": \"^7.0.0\"\n+ \"std-env\": \"^3.0.1\"\n},\n\"engines\": {\n- \"node\": \">=10\"\n+ \"node\": \">=12\"\n},\n\"peerDependencies\": {\n\"webpack\": \"3 || 4 || 5\"\n\"node\": \">=8\"\n}\n},\n- \"node_modules/webpackbar/node_modules/wrap-ansi\": {\n- \"version\": \"7.0.0\",\n- \"resolved\": \"https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz\",\n- \"integrity\": \"sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==\",\n- \"dev\": true,\n- \"dependencies\": {\n- \"ansi-styles\": \"^4.0.0\",\n- \"string-width\": \"^4.1.0\",\n- \"strip-ansi\": \"^6.0.0\"\n- },\n- \"engines\": {\n- \"node\": \">=10\"\n- },\n- \"funding\": {\n- \"url\": \"https://github.com/chalk/wrap-ansi?sponsor=1\"\n- }\n- },\n\"node_modules/websocket-driver\": {\n\"version\": \"0.7.4\",\n\"resolved\": \"https://registry.npmjs.org/websocket-driver/-/websocket-driver-0.7.4.tgz\",\n},\n\"dependencies\": {\n\"@achrinza/node-ipc\": {\n- \"version\": \"9.2.2\",\n- \"resolved\": \"https://registry.npmjs.org/@achrinza/node-ipc/-/node-ipc-9.2.2.tgz\",\n- \"integrity\": \"sha512-b90U39dx0cU6emsOvy5hxU4ApNXnE3+Tuo8XQZfiKTGelDwpMwBVgBP7QX6dGTcJgu/miyJuNJ/2naFBliNWEw==\",\n+ \"version\": \"9.2.6\",\n+ \"resolved\": \"https://registry.npmjs.org/@achrinza/node-ipc/-/node-ipc-9.2.6.tgz\",\n+ \"integrity\": \"sha512-ULSIYPy4ZPM301dfCxRz0l2GJjOwIo/PqmWonIu1bLml7UmnVQmH+juJcoyXp6E8gIRRNAjGYftJnNQlfy4vPg==\",\n\"requires\": {\n\"@node-ipc/js-queue\": \"2.0.3\",\n\"event-pubsub\": \"4.3.0\",\n\"tough-cookie\": \"~2.5.0\",\n\"tunnel-agent\": \"^0.6.0\",\n\"uuid\": \"^8.3.2\"\n+ },\n+ \"dependencies\": {\n+ \"form-data\": {\n+ \"version\": \"2.3.3\",\n+ \"resolved\": \"https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz\",\n+ \"integrity\": \"sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==\",\n+ \"dev\": true,\n+ \"requires\": {\n+ \"asynckit\": \"^0.4.0\",\n+ \"combined-stream\": \"^1.0.6\",\n+ \"mime-types\": \"^2.1.12\"\n+ }\n+ }\n}\n},\n\"@cypress/xvfb\": {\n}\n},\n\"@types/estree\": {\n- \"version\": \"0.0.51\",\n- \"resolved\": \"https://registry.npmjs.org/@types/estree/-/estree-0.0.51.tgz\",\n- \"integrity\": \"sha512-CuPgU6f3eT/XgKKPqKd/gLZV1Xmvf1a2R5POBOGQa6uv82xpls89HU5zKeVoyR8XzHd1RGNOlQlvUe3CFkjWNQ==\"\n+ \"version\": \"1.0.0\",\n+ \"resolved\": \"https://registry.npmjs.org/@types/estree/-/estree-1.0.0.tgz\",\n+ \"integrity\": \"sha512-WulqXMDUTYAXCjZnk6JtIHPigp55cVtDgDrO2gHRwhyJto21+1zbVCtOYB2L1F9w4qCQ0rOGWBnBe0FNTiEJIQ==\"\n},\n\"@types/express\": {\n\"version\": \"4.17.14\",\n\"resolved\": \"https://registry.npmjs.org/@vue/cli-shared-utils/-/cli-shared-utils-4.5.19.tgz\",\n\"integrity\": \"sha512-JYpdsrC/d9elerKxbEUtmSSU6QRM60rirVubOewECHkBHj+tLNznWq/EhCjswywtePyLaMUK25eTqnTSZlEE+g==\",\n\"requires\": {\n- \"@achrinza/node-ipc\": \"9.2.2\",\n+ \"@achrinza/node-ipc\": \"^9.2.6\",\n\"@hapi/joi\": \"^15.0.1\",\n\"chalk\": \"^2.4.2\",\n\"execa\": \"^1.0.0\",\n\"dev\": true\n},\n\"axios\": {\n- \"version\": \"0.21.4\",\n- \"resolved\": \"https://registry.npmjs.org/axios/-/axios-0.21.4.tgz\",\n- \"integrity\": \"sha512-ut5vewkiu8jjGBdqpM44XxjuCjq9LAKeHVmoVfHVzy8eHgxxq8SbAVQNovDA8mVi05kP0Ea/n/UzcSHcTJQfNg==\",\n+ \"version\": \"0.27.2\",\n+ \"resolved\": \"https://registry.npmjs.org/axios/-/axios-0.27.2.tgz\",\n+ \"integrity\": \"sha512-t+yRIyySRTp/wua5xEr+z1q60QmLq8ABsS5O9Me1AsE5dfKqgnCFzwiCZZ/cGNd1lq4/7akDWMxdhVlucjmnOQ==\",\n\"requires\": {\n- \"follow-redirects\": \"^1.14.0\"\n+ \"follow-redirects\": \"^1.14.9\",\n+ \"form-data\": \"^4.0.0\"\n}\n},\n\"babel-code-frame\": {\n\"codemirror\": \"^5.63.1\",\n\"codemirror-spell-checker\": \"1.1.2\",\n\"marked\": \"^4.1.0\"\n+ },\n+ \"dependencies\": {\n+ \"marked\": {\n+ \"version\": \"4.2.2\",\n+ \"resolved\": \"https://registry.npmjs.org/marked/-/marked-4.2.2.tgz\",\n+ \"integrity\": \"sha512-JjBTFTAvuTgANXx82a5vzK9JLSMoV6V3LBVn4Uhdso6t7vXrGx7g1Cd2r6NYSsxrYbQGFCMqBDhFHyK5q2UvcQ==\"\n+ }\n}\n},\n\"ecc-jsbn\": {\n\"integrity\": \"sha512-j0KLYPhm6zeac4lz3oJ3o65qvgQCcPubiyotZrXqEaG4hNagNYO8qdlUrX5vwqv9ohqeT/Z3j6+yW067yWWdUw==\"\n},\n\"form-data\": {\n- \"version\": \"2.3.3\",\n- \"resolved\": \"https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz\",\n- \"integrity\": \"sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==\",\n+ \"version\": \"4.0.0\",\n+ \"resolved\": \"https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz\",\n+ \"integrity\": \"sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==\",\n\"requires\": {\n\"asynckit\": \"^0.4.0\",\n- \"combined-stream\": \"^1.0.6\",\n+ \"combined-stream\": \"^1.0.8\",\n\"mime-types\": \"^2.1.12\"\n}\n},\n}\n},\n\"marked\": {\n- \"version\": \"4.2.2\",\n- \"resolved\": \"https://registry.npmjs.org/marked/-/marked-4.2.2.tgz\",\n- \"integrity\": \"sha512-JjBTFTAvuTgANXx82a5vzK9JLSMoV6V3LBVn4Uhdso6t7vXrGx7g1Cd2r6NYSsxrYbQGFCMqBDhFHyK5q2UvcQ==\"\n+ \"version\": \"2.1.3\",\n+ \"resolved\": \"https://registry.npmjs.org/marked/-/marked-2.1.3.tgz\",\n+ \"integrity\": \"sha512-/Q+7MGzaETqifOMWYEA7HVMaZb4XbcRfaOzcSsHZEith83KGlvaSG33u0SKu89Mj5h+T8V2hM+8O45Qc5XTgwA==\"\n},\n\"mathml-tag-names\": {\n\"version\": \"2.1.3\",\n\"uuid\": \"^3.3.2\"\n},\n\"dependencies\": {\n+ \"form-data\": {\n+ \"version\": \"2.3.3\",\n+ \"resolved\": \"https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz\",\n+ \"integrity\": \"sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==\",\n+ \"requires\": {\n+ \"asynckit\": \"^0.4.0\",\n+ \"combined-stream\": \"^1.0.6\",\n+ \"mime-types\": \"^2.1.12\"\n+ }\n+ },\n\"http-signature\": {\n\"version\": \"1.2.0\",\n\"resolved\": \"https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz\",\n\"integrity\": \"sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==\"\n},\n\"std-env\": {\n- \"version\": \"2.3.1\",\n- \"resolved\": \"https://registry.npmjs.org/std-env/-/std-env-2.3.1.tgz\",\n- \"integrity\": \"sha512-eOsoKTWnr6C8aWrqJJ2KAReXoa7Vn5Ywyw6uCXgA/xDhxPoaIsBa5aNJmISY04dLwXPBnDHW4diGM7Sn5K4R/g==\",\n- \"dev\": true,\n- \"requires\": {\n- \"ci-info\": \"^3.1.1\"\n- }\n+ \"version\": \"3.3.1\",\n+ \"resolved\": \"https://registry.npmjs.org/std-env/-/std-env-3.3.1.tgz\",\n+ \"integrity\": \"sha512-3H20QlwQsSm2OvAxWIYhs+j01MzzqwMwGiiO1NQaJYZgJZFPuAbf95/DiKRBSTYIJ2FeGUc+B/6mPGcWP9dO3Q==\",\n+ \"dev\": true\n},\n\"stealthy-require\": {\n\"version\": \"1.1.1\",\n\"requires\": {\n\"easymde\": \"^2.15.0\",\n\"marked\": \"^2.1.3\"\n- },\n- \"dependencies\": {\n- \"marked\": {\n- \"version\": \"2.1.3\",\n- \"resolved\": \"https://registry.npmjs.org/marked/-/marked-2.1.3.tgz\",\n- \"integrity\": \"sha512-/Q+7MGzaETqifOMWYEA7HVMaZb4XbcRfaOzcSsHZEith83KGlvaSG33u0SKu89Mj5h+T8V2hM+8O45Qc5XTgwA==\"\n- }\n}\n},\n\"vue-eslint-parser\": {\n\"webpack-sources\": \"^3.2.3\"\n},\n\"dependencies\": {\n+ \"@types/estree\": {\n+ \"version\": \"0.0.51\",\n+ \"resolved\": \"https://registry.npmjs.org/@types/estree/-/estree-0.0.51.tgz\",\n+ \"integrity\": \"sha512-CuPgU6f3eT/XgKKPqKd/gLZV1Xmvf1a2R5POBOGQa6uv82xpls89HU5zKeVoyR8XzHd1RGNOlQlvUe3CFkjWNQ==\"\n+ },\n\"glob-to-regexp\": {\n\"version\": \"0.4.1\",\n\"resolved\": \"https://registry.npmjs.org/glob-to-regexp/-/glob-to-regexp-0.4.1.tgz\",\n}\n},\n\"webpackbar\": {\n- \"version\": \"https://registry.npmjs.org/webpackbar/-/webpackbar-5.0.0-3.tgz\",\n- \"integrity\": \"sha512-viW6KCYjMb0NPoDrw2jAmLXU2dEOhRrtku28KmOfeE1vxbfwCYuTbTaMhnkrCZLFAFyY9Q49Z/jzYO80Dw5b8g==\",\n+ \"version\": \"5.0.2\",\n+ \"resolved\": \"https://registry.npmjs.org/webpackbar/-/webpackbar-5.0.2.tgz\",\n+ \"integrity\": \"sha512-BmFJo7veBDgQzfWXl/wwYXr/VFus0614qZ8i9znqcl9fnEdiVkdbi0TedLQ6xAK92HZHDJ0QmyQ0fmuZPAgCYQ==\",\n\"dev\": true,\n\"requires\": {\n- \"ansi-escapes\": \"^4.3.1\",\n\"chalk\": \"^4.1.0\",\n- \"consola\": \"^2.15.0\",\n- \"figures\": \"^3.2.0\",\n+ \"consola\": \"^2.15.3\",\n\"pretty-time\": \"^1.1.0\",\n- \"std-env\": \"^2.2.1\",\n- \"text-table\": \"^0.2.0\",\n- \"wrap-ansi\": \"^7.0.0\"\n+ \"std-env\": \"^3.0.1\"\n},\n\"dependencies\": {\n\"ansi-styles\": {\n\"requires\": {\n\"has-flag\": \"^4.0.0\"\n}\n- },\n- \"wrap-ansi\": {\n- \"version\": \"7.0.0\",\n- \"resolved\": \"https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz\",\n- \"integrity\": \"sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==\",\n- \"dev\": true,\n- \"requires\": {\n- \"ansi-styles\": \"^4.0.0\",\n- \"string-width\": \"^4.1.0\",\n- \"strip-ansi\": \"^6.0.0\"\n- }\n}\n}\n},\n"
},
{
"change_type": "MODIFY",
"old_path": "package.json",
"new_path": "package.json",
"diff": "{\n\"name\": \"bolt\",\n- \"version\": \"5.1.99.7\",\n+ \"version\": \"5.1.99.8\",\n\"homepage\": \"https://boltcms.io\",\n\"author\": \"Bob den Otter <bob@twokings.nl> (https://boltcms.io)\",\n\"license\": \"MIT\",\n\"type\": \"git\",\n\"url\": \"git://github.com/bolt/core.git\"\n},\n+ \"overrides\": {\n+ \"@achrinza/node-ipc\": \"^9.2.6\"\n+ },\n\"dependencies\": {\n- \"@popperjs/core\": \"^2.11.5\",\n- \"@vue/cli-service\": \"^4.5.13\",\n- \"axios\": \"^0.21.1\",\n+ \"@popperjs/core\": \"^2.11.6\",\n+ \"@vue/cli-service\": \"^4.5.19\",\n+ \"axios\": \"^0.27.2\",\n\"baguettebox.js\": \"^1.11.1\",\n- \"bootstrap\": \"^5.2.*\",\n- \"browserslist\": \"^4.16.7\",\n+ \"bootstrap\": \"^5.2.2\",\n+ \"browserslist\": \"^4.21.4\",\n\"clipboard\": \"^2.0.11\",\n- \"codemirror\": \"^5.62.2\",\n- \"dropzone\": \"^5.9.2\",\n- \"flagpack-dart-sass\": \"^1.1\",\n- \"hotkeys-js\": \"^3.8.7\",\n- \"jquery\": \"^3.6.0\",\n- \"locutus\": \"^2.0.14\",\n+ \"codemirror\": \"^5.65.9\",\n+ \"dropzone\": \"^5.9.3\",\n+ \"flagpack-dart-sass\": \"^1.4.0\",\n+ \"hotkeys-js\": \"^3.10.0\",\n+ \"jquery\": \"^3.6.1\",\n+ \"locutus\": \"^2.0.15\",\n\"luxon\": \"^1.28.0\",\n\"no-scroll\": \"^2.1.1\",\n\"node-vibrant\": \"^3.1.6\",\n\"register-service-worker\": \"^1.7.2\",\n\"selectize\": \"^0.12.6\",\n\"stylelint-config-recommended-scss\": \"^4.3.0\",\n- \"stylelint-scss\": \"^3.20.1\",\n+ \"stylelint-scss\": \"^3.21.0\",\n\"terser\": \"^4.8.1\",\n\"tinycolor2\": \"^1.4.2\",\n- \"vue\": \"^2.6.14\",\n+ \"vue\": \"^2.7.14\",\n\"vue-easymde\": \"^2.0.0\",\n\"vue-flatpickr-component\": \"^8.1.7\",\n\"vue-multiselect\": \"^2.1.6\",\n\"zxcvbn\": \"^4.4.2\"\n},\n\"devDependencies\": {\n- \"@babel/core\": \"^7.15.0\",\n- \"@babel/plugin-transform-regenerator\": \"^7.14.5\",\n- \"@babel/plugin-transform-runtime\": \"^7.15.0\",\n+ \"@babel/core\": \"^7.20.2\",\n+ \"@babel/plugin-transform-regenerator\": \"^7.18.6\",\n+ \"@babel/plugin-transform-runtime\": \"^7.19.6\",\n\"@babel/polyfill\": \"^7.12.1\",\n- \"@babel/preset-env\": \"^7.15.0\",\n+ \"@babel/preset-env\": \"^7.20.2\",\n\"@fortawesome/fontawesome-free\": \"^5.15.4\",\n- \"@symfony/stimulus-bridge\": \"^2.0.0\",\n- \"@symfony/webpack-encore\": \"^1.5.0\",\n- \"@testing-library/cypress\": \"^7.0.6\",\n- \"@vue/test-utils\": \"^1.2.2\",\n+ \"@symfony/stimulus-bridge\": \"^2.1.0\",\n+ \"@symfony/webpack-encore\": \"^1.8.2\",\n+ \"@testing-library/cypress\": \"^7.0.7\",\n+ \"@vue/test-utils\": \"^1.3.3\",\n\"ajv-keywords\": \"^3.5.2\",\n- \"autoprefixer\": \"^10.4.5\",\n+ \"autoprefixer\": \"^10.4.13\",\n\"babel-core\": \"^7.0.0-bridge.0\",\n\"babel-eslint\": \"^10.1.0\",\n\"babel-jest\": \"^25.5.1\",\n\"clean-webpack-plugin\": \"^4.0.0\",\n- \"core-js\": \"^3.0.0\",\n- \"cypress\": \"^7.7.0\",\n+ \"core-js\": \"^3.26.1\",\n+ \"cypress\": \"^7.6.0\",\n\"eslint\": \"^6.8.0\",\n\"eslint-config-prettier\": \"^6.15.0\",\n- \"eslint-plugin-prettier\": \"^3.4.0\",\n+ \"eslint-plugin-prettier\": \"^3.4.1\",\n\"eslint-plugin-standard\": \"^4.1.0\",\n\"eslint-plugin-vue\": \"^6.2.2\",\n\"file-loader\": \"^6.2.0\",\n\"html-webpack-plugin\": \"^5.5.0\",\n\"jest\": \"^25.5.4\",\n\"jest-serializer-vue\": \"^2.0.2\",\n- \"pa11y-ci\": \"^2.4.0\",\n+ \"pa11y-ci\": \"^2.4.2\",\n\"postcss-loader\": \"^4.3.0\",\n- \"postcss-preset-env\": \"^7.4.1\",\n+ \"postcss-preset-env\": \"^7.8.3\",\n\"prettier\": \"^1.19.1\",\n- \"regenerator-runtime\": \"^0.13.9\",\n- \"sass\": \"^1.37.5\",\n+ \"regenerator-runtime\": \"^0.13.11\",\n+ \"sass\": \"^1.56.1\",\n\"sass-loader\": \"^9.0.3\",\n\"source-sans-pro\": \"^3.6.0\",\n\"stimulus\": \"^2.0.0\",\n\"stylelint\": \"^13.13.1\",\n\"stylelint-config-standard\": \"^19.0.0\",\n\"vue-jest\": \"^3.0.7\",\n- \"vue-loader\": \"^15.9.8\",\n- \"vue-router\": \"^3.5.1\",\n- \"vue-template-compiler\": \"^2.6.14\",\n- \"webpack-notifier\": \"^1.6.0\",\n- \"webpackbar\": \"https://registry.npmjs.org/webpackbar/-/webpackbar-5.0.0-3.tgz\",\n- \"workbox-webpack-plugin\": \"^6.2.0\"\n+ \"vue-loader\": \"^15.10.1\",\n+ \"vue-router\": \"^3.6.5\",\n+ \"vue-template-compiler\": \"^2.7.14\",\n+ \"webpack-notifier\": \"^1.15.0\",\n+ \"webpackbar\": \"^5.0.2\",\n+ \"workbox-webpack-plugin\": \"^6.5.4\"\n},\n\"scripts\": {\n\"start\": \"encore dev-server --port=8234 --progress\",\n"
}
] |
PHP
|
MIT License
|
bolt/core
|
Prepare release 5.2.0-beta.8
|
95,144 |
22.11.2022 16:16:21
| -3,600 |
a2639c430707adf01dec4682b4186a35b0a9b863
|
Prevent jank when rendering list of thumbnails.
|
[
{
"change_type": "MODIFY",
"old_path": "assets/scss/modules/base/_modals.scss",
"new_path": "assets/scss/modules/base/_modals.scss",
"diff": "#resourcesModal {\n.card {\n- min-height: 209px;\n+ img, a.directory {\n+ aspect-ratio: 5/3;\n+ }\n}\n}\n"
},
{
"change_type": "MODIFY",
"old_path": "package-lock.json",
"new_path": "package-lock.json",
"diff": "\"strip-ansi\": \"^6.0.0\"\n}\n},\n- \"node_modules/@vue/compiler-core\": {\n- \"version\": \"3.2.45\",\n- \"resolved\": \"https://registry.npmjs.org/@vue/compiler-core/-/compiler-core-3.2.45.tgz\",\n- \"integrity\": \"sha512-rcMj7H+PYe5wBV3iYeUgbCglC+pbpN8hBLTJvRiK2eKQiWqu+fG9F+8sW99JdL4LQi7Re178UOxn09puSXvn4A==\",\n- \"optional\": true,\n- \"peer\": true,\n- \"dependencies\": {\n- \"@babel/parser\": \"^7.16.4\",\n- \"@vue/shared\": \"3.2.45\",\n- \"estree-walker\": \"^2.0.2\",\n- \"source-map\": \"^0.6.1\"\n- }\n- },\n- \"node_modules/@vue/compiler-core/node_modules/estree-walker\": {\n- \"version\": \"2.0.2\",\n- \"resolved\": \"https://registry.npmjs.org/estree-walker/-/estree-walker-2.0.2.tgz\",\n- \"integrity\": \"sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==\",\n- \"optional\": true,\n- \"peer\": true\n- },\n- \"node_modules/@vue/compiler-dom\": {\n- \"version\": \"3.2.45\",\n- \"resolved\": \"https://registry.npmjs.org/@vue/compiler-dom/-/compiler-dom-3.2.45.tgz\",\n- \"integrity\": \"sha512-tyYeUEuKqqZO137WrZkpwfPCdiiIeXYCcJ8L4gWz9vqaxzIQRccTSwSWZ/Axx5YR2z+LvpUbmPNXxuBU45lyRw==\",\n- \"optional\": true,\n- \"peer\": true,\n- \"dependencies\": {\n- \"@vue/compiler-core\": \"3.2.45\",\n- \"@vue/shared\": \"3.2.45\"\n- }\n- },\n\"node_modules/@vue/compiler-sfc\": {\n- \"version\": \"3.2.45\",\n- \"resolved\": \"https://registry.npmjs.org/@vue/compiler-sfc/-/compiler-sfc-3.2.45.tgz\",\n- \"integrity\": \"sha512-1jXDuWah1ggsnSAOGsec8cFjT/K6TMZ0sPL3o3d84Ft2AYZi2jWJgRMjw4iaK0rBfA89L5gw427H4n1RZQBu6Q==\",\n- \"optional\": true,\n- \"peer\": true,\n+ \"version\": \"2.7.14\",\n+ \"resolved\": \"https://registry.npmjs.org/@vue/compiler-sfc/-/compiler-sfc-2.7.14.tgz\",\n+ \"integrity\": \"sha512-aNmNHyLPsw+sVvlQFQ2/8sjNuLtK54TC6cuKnVzAY93ks4ZBrvwQSnkkIh7bsbNhum5hJBS00wSDipQ937f5DA==\",\n\"dependencies\": {\n- \"@babel/parser\": \"^7.16.4\",\n- \"@vue/compiler-core\": \"3.2.45\",\n- \"@vue/compiler-dom\": \"3.2.45\",\n- \"@vue/compiler-ssr\": \"3.2.45\",\n- \"@vue/reactivity-transform\": \"3.2.45\",\n- \"@vue/shared\": \"3.2.45\",\n- \"estree-walker\": \"^2.0.2\",\n- \"magic-string\": \"^0.25.7\",\n- \"postcss\": \"^8.1.10\",\n+ \"@babel/parser\": \"^7.18.4\",\n+ \"postcss\": \"^8.4.14\",\n\"source-map\": \"^0.6.1\"\n}\n},\n- \"node_modules/@vue/compiler-sfc/node_modules/estree-walker\": {\n- \"version\": \"2.0.2\",\n- \"resolved\": \"https://registry.npmjs.org/estree-walker/-/estree-walker-2.0.2.tgz\",\n- \"integrity\": \"sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==\",\n- \"optional\": true,\n- \"peer\": true\n- },\n- \"node_modules/@vue/compiler-ssr\": {\n- \"version\": \"3.2.45\",\n- \"resolved\": \"https://registry.npmjs.org/@vue/compiler-ssr/-/compiler-ssr-3.2.45.tgz\",\n- \"integrity\": \"sha512-6BRaggEGqhWht3lt24CrIbQSRD5O07MTmd+LjAn5fJj568+R9eUD2F7wMQJjX859seSlrYog7sUtrZSd7feqrQ==\",\n- \"optional\": true,\n- \"peer\": true,\n- \"dependencies\": {\n- \"@vue/compiler-dom\": \"3.2.45\",\n- \"@vue/shared\": \"3.2.45\"\n- }\n- },\n\"node_modules/@vue/component-compiler-utils\": {\n\"version\": \"3.3.0\",\n\"resolved\": \"https://registry.npmjs.org/@vue/component-compiler-utils/-/component-compiler-utils-3.3.0.tgz\",\n\"webpack\": \">=4.0.0\"\n}\n},\n- \"node_modules/@vue/reactivity-transform\": {\n- \"version\": \"3.2.45\",\n- \"resolved\": \"https://registry.npmjs.org/@vue/reactivity-transform/-/reactivity-transform-3.2.45.tgz\",\n- \"integrity\": \"sha512-BHVmzYAvM7vcU5WmuYqXpwaBHjsS8T63jlKGWVtHxAHIoMIlmaMyurUSEs1Zcg46M4AYT5MtB1U274/2aNzjJQ==\",\n- \"optional\": true,\n- \"peer\": true,\n- \"dependencies\": {\n- \"@babel/parser\": \"^7.16.4\",\n- \"@vue/compiler-core\": \"3.2.45\",\n- \"@vue/shared\": \"3.2.45\",\n- \"estree-walker\": \"^2.0.2\",\n- \"magic-string\": \"^0.25.7\"\n- }\n- },\n- \"node_modules/@vue/reactivity-transform/node_modules/estree-walker\": {\n- \"version\": \"2.0.2\",\n- \"resolved\": \"https://registry.npmjs.org/estree-walker/-/estree-walker-2.0.2.tgz\",\n- \"integrity\": \"sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==\",\n- \"optional\": true,\n- \"peer\": true\n- },\n- \"node_modules/@vue/shared\": {\n- \"version\": \"3.2.45\",\n- \"resolved\": \"https://registry.npmjs.org/@vue/shared/-/shared-3.2.45.tgz\",\n- \"integrity\": \"sha512-Ewzq5Yhimg7pSztDV+RH1UDKBzmtqieXQlpTVm2AwraoRL/Rks96mvd8Vgi7Lj+h+TH8dv7mXD3FRZR3TUvbSg==\",\n- \"optional\": true,\n- \"peer\": true\n- },\n\"node_modules/@vue/test-utils\": {\n\"version\": \"1.3.3\",\n\"resolved\": \"https://registry.npmjs.org/@vue/test-utils/-/test-utils-1.3.3.tgz\",\n\"integrity\": \"sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A==\"\n},\n\"node_modules/anymatch\": {\n- \"version\": \"3.1.2\",\n- \"resolved\": \"https://registry.npmjs.org/anymatch/-/anymatch-3.1.2.tgz\",\n- \"integrity\": \"sha512-P43ePfOAIupkguHUycrc4qJ9kz8ZiuOUijaETwX7THt0Y/GNK7v0aa8rY816xWjZ7rJdA5XdMcpVFTKMq+RvWg==\",\n+ \"version\": \"3.1.3\",\n+ \"resolved\": \"https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz\",\n+ \"integrity\": \"sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==\",\n\"dependencies\": {\n\"normalize-path\": \"^3.0.0\",\n\"picomatch\": \"^2.0.4\"\n\"integrity\": \"sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww==\"\n},\n\"node_modules/bootstrap\": {\n- \"version\": \"5.2.2\",\n- \"resolved\": \"https://registry.npmjs.org/bootstrap/-/bootstrap-5.2.2.tgz\",\n- \"integrity\": \"sha512-dEtzMTV71n6Fhmbg4fYJzQsw1N29hJKO1js5ackCgIpDcGid2ETMGC6zwSYw09v05Y+oRdQ9loC54zB1La3hHQ==\",\n+ \"version\": \"5.2.3\",\n+ \"resolved\": \"https://registry.npmjs.org/bootstrap/-/bootstrap-5.2.3.tgz\",\n+ \"integrity\": \"sha512-cEKPM+fwb3cT8NzQZYEu4HilJ3anCrWqh3CHAok1p9jXqMPsPTBhU25fBckEJHJ/p+tTxTFTsFQGM+gaHpi3QQ==\",\n\"funding\": [\n{\n\"type\": \"github\",\n}\n},\n\"node_modules/caniuse-lite\": {\n- \"version\": \"1.0.30001431\",\n- \"resolved\": \"https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001431.tgz\",\n- \"integrity\": \"sha512-zBUoFU0ZcxpvSt9IU66dXVT/3ctO1cy4y9cscs1szkPlcWb6pasYM144GqrUygUbT+k7cmUCW61cvskjcv0enQ==\",\n+ \"version\": \"1.0.30001434\",\n+ \"resolved\": \"https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001434.tgz\",\n+ \"integrity\": \"sha512-aOBHrLmTQw//WFa2rcF1If9fa3ypkC1wzqqiKHgfdrXTWcU8C4gKVZT77eQAPWN1APys3+uQ0Df07rKauXGEYA==\",\n\"funding\": [\n{\n\"type\": \"opencollective\",\n}\n},\n\"node_modules/cheerio/node_modules/parse5\": {\n- \"version\": \"7.1.1\",\n- \"resolved\": \"https://registry.npmjs.org/parse5/-/parse5-7.1.1.tgz\",\n- \"integrity\": \"sha512-kwpuwzB+px5WUg9pyK0IcK/shltJN5/OVhQagxhCQNtT9Y9QRZqNY2e1cmbu/paRh5LMnz/oVTVLBpjFmMZhSg==\",\n+ \"version\": \"7.1.2\",\n+ \"resolved\": \"https://registry.npmjs.org/parse5/-/parse5-7.1.2.tgz\",\n+ \"integrity\": \"sha512-Czj1WaSVpaoj0wbhMzLmWD69anp2WH7FXMB9n1Sy8/ZFF9jolSQVMu1Ij5WIyGmcBmhk7EOndpO4mIpihVqAXw==\",\n\"dev\": true,\n\"dependencies\": {\n\"entities\": \"^4.4.0\"\n}\n},\n\"node_modules/codemirror\": {\n- \"version\": \"5.65.9\",\n- \"resolved\": \"https://registry.npmjs.org/codemirror/-/codemirror-5.65.9.tgz\",\n- \"integrity\": \"sha512-19Jox5sAKpusTDgqgKB5dawPpQcY+ipQK7xoEI+MVucEF9qqFaXpeqY1KaoyGBso/wHQoDa4HMMxMjdsS3Zzzw==\"\n+ \"version\": \"5.65.10\",\n+ \"resolved\": \"https://registry.npmjs.org/codemirror/-/codemirror-5.65.10.tgz\",\n+ \"integrity\": \"sha512-IXAG5wlhbgcTJ6rZZcmi4+sjWIbJqIGfeg3tNa3yX84Jb3T4huS5qzQAo/cUisc1l3bI47WZodpyf7cYcocDKg==\"\n},\n\"node_modules/codemirror-spell-checker\": {\n\"version\": \"1.1.2\",\n}\n},\n\"node_modules/easymde/node_modules/marked\": {\n- \"version\": \"4.2.2\",\n- \"resolved\": \"https://registry.npmjs.org/marked/-/marked-4.2.2.tgz\",\n- \"integrity\": \"sha512-JjBTFTAvuTgANXx82a5vzK9JLSMoV6V3LBVn4Uhdso6t7vXrGx7g1Cd2r6NYSsxrYbQGFCMqBDhFHyK5q2UvcQ==\",\n+ \"version\": \"4.2.3\",\n+ \"resolved\": \"https://registry.npmjs.org/marked/-/marked-4.2.3.tgz\",\n+ \"integrity\": \"sha512-slWRdJkbTZ+PjkyJnE30Uid64eHwbwa1Q25INCAYfZlK4o6ylagBy/Le9eWntqJFoFT93ikUKMv47GZ4gTwHkw==\",\n\"bin\": {\n\"marked\": \"bin/marked.js\"\n},\n\"version\": \"0.25.9\",\n\"resolved\": \"https://registry.npmjs.org/magic-string/-/magic-string-0.25.9.tgz\",\n\"integrity\": \"sha512-RmF0AsMzgt25qzqqLc1+MbHmhdx0ojF2Fvs4XnOqz2ZOBXzzkEwc/dJQZCYHAn7v1jbVOjAZfK8msRn4BxO4VQ==\",\n- \"devOptional\": true,\n+ \"dev\": true,\n\"dependencies\": {\n\"sourcemap-codec\": \"^1.4.8\"\n}\n}\n},\n\"node_modules/memfs\": {\n- \"version\": \"3.4.11\",\n- \"resolved\": \"https://registry.npmjs.org/memfs/-/memfs-3.4.11.tgz\",\n- \"integrity\": \"sha512-GvsCITGAyDCxxsJ+X6prJexFQEhOCJaIlUbsAvjzSI5o5O7j2dle3jWvz5Z5aOdpOxW6ol3vI1+0ut+641F1+w==\",\n+ \"version\": \"3.4.12\",\n+ \"resolved\": \"https://registry.npmjs.org/memfs/-/memfs-3.4.12.tgz\",\n+ \"integrity\": \"sha512-BcjuQn6vfqP+k100e0E9m61Hyqa//Brp+I3f0OBmN0ATHlFA8vx3Lt8z57R3u2bPqe3WGDBC+nF72fTH7isyEw==\",\n\"dev\": true,\n\"dependencies\": {\n\"fs-monkey\": \"^1.0.3\"\n}\n},\n\"node_modules/postcss-selector-parser\": {\n- \"version\": \"6.0.10\",\n- \"resolved\": \"https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.0.10.tgz\",\n- \"integrity\": \"sha512-IQ7TZdoaqbT+LCpShg46jnZVlhWD2w6iQYAcYXfHARZ7X1t/UGhhceQDs5X0cGqKvYlHNOuv7Oa1xmb0oQuA3w==\",\n+ \"version\": \"6.0.11\",\n+ \"resolved\": \"https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.0.11.tgz\",\n+ \"integrity\": \"sha512-zbARubNdogI9j7WY4nQJBiNqQf3sLS3wCP4WfOidu+p28LofJqDH1tcXypGrcmMHhDk2t9wGhCsYe/+szLTy1g==\",\n\"dependencies\": {\n\"cssesc\": \"^3.0.0\",\n\"util-deprecate\": \"^1.0.2\"\n\"version\": \"1.4.8\",\n\"resolved\": \"https://registry.npmjs.org/sourcemap-codec/-/sourcemap-codec-1.4.8.tgz\",\n\"integrity\": \"sha512-9NykojV5Uih4lgo5So5dtw+f0JgJX30KCNI8gwhz2J9A15wD0Ml6tjHKwf6fTSa6fAdVBdZeNOs9eJ71qCk8vA==\",\n- \"devOptional\": true\n+ \"dev\": true\n},\n\"node_modules/spdx-correct\": {\n\"version\": \"3.1.1\",\n\"vue\": \"^2.0.0\"\n}\n},\n- \"node_modules/vue/node_modules/@vue/compiler-sfc\": {\n- \"version\": \"2.7.14\",\n- \"resolved\": \"https://registry.npmjs.org/@vue/compiler-sfc/-/compiler-sfc-2.7.14.tgz\",\n- \"integrity\": \"sha512-aNmNHyLPsw+sVvlQFQ2/8sjNuLtK54TC6cuKnVzAY93ks4ZBrvwQSnkkIh7bsbNhum5hJBS00wSDipQ937f5DA==\",\n- \"dependencies\": {\n- \"@babel/parser\": \"^7.18.4\",\n- \"postcss\": \"^8.4.14\",\n- \"source-map\": \"^0.6.1\"\n- }\n- },\n\"node_modules/vuedraggable\": {\n\"version\": \"2.24.3\",\n\"resolved\": \"https://registry.npmjs.org/vuedraggable/-/vuedraggable-2.24.3.tgz\",\n\"strip-ansi\": \"^6.0.0\"\n}\n},\n- \"@vue/compiler-core\": {\n- \"version\": \"3.2.45\",\n- \"resolved\": \"https://registry.npmjs.org/@vue/compiler-core/-/compiler-core-3.2.45.tgz\",\n- \"integrity\": \"sha512-rcMj7H+PYe5wBV3iYeUgbCglC+pbpN8hBLTJvRiK2eKQiWqu+fG9F+8sW99JdL4LQi7Re178UOxn09puSXvn4A==\",\n- \"optional\": true,\n- \"peer\": true,\n- \"requires\": {\n- \"@babel/parser\": \"^7.16.4\",\n- \"@vue/shared\": \"3.2.45\",\n- \"estree-walker\": \"^2.0.2\",\n- \"source-map\": \"^0.6.1\"\n- },\n- \"dependencies\": {\n- \"estree-walker\": {\n- \"version\": \"2.0.2\",\n- \"resolved\": \"https://registry.npmjs.org/estree-walker/-/estree-walker-2.0.2.tgz\",\n- \"integrity\": \"sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==\",\n- \"optional\": true,\n- \"peer\": true\n- }\n- }\n- },\n- \"@vue/compiler-dom\": {\n- \"version\": \"3.2.45\",\n- \"resolved\": \"https://registry.npmjs.org/@vue/compiler-dom/-/compiler-dom-3.2.45.tgz\",\n- \"integrity\": \"sha512-tyYeUEuKqqZO137WrZkpwfPCdiiIeXYCcJ8L4gWz9vqaxzIQRccTSwSWZ/Axx5YR2z+LvpUbmPNXxuBU45lyRw==\",\n- \"optional\": true,\n- \"peer\": true,\n- \"requires\": {\n- \"@vue/compiler-core\": \"3.2.45\",\n- \"@vue/shared\": \"3.2.45\"\n- }\n- },\n\"@vue/compiler-sfc\": {\n- \"version\": \"3.2.45\",\n- \"resolved\": \"https://registry.npmjs.org/@vue/compiler-sfc/-/compiler-sfc-3.2.45.tgz\",\n- \"integrity\": \"sha512-1jXDuWah1ggsnSAOGsec8cFjT/K6TMZ0sPL3o3d84Ft2AYZi2jWJgRMjw4iaK0rBfA89L5gw427H4n1RZQBu6Q==\",\n- \"optional\": true,\n- \"peer\": true,\n+ \"version\": \"2.7.14\",\n+ \"resolved\": \"https://registry.npmjs.org/@vue/compiler-sfc/-/compiler-sfc-2.7.14.tgz\",\n+ \"integrity\": \"sha512-aNmNHyLPsw+sVvlQFQ2/8sjNuLtK54TC6cuKnVzAY93ks4ZBrvwQSnkkIh7bsbNhum5hJBS00wSDipQ937f5DA==\",\n\"requires\": {\n- \"@babel/parser\": \"^7.16.4\",\n- \"@vue/compiler-core\": \"3.2.45\",\n- \"@vue/compiler-dom\": \"3.2.45\",\n- \"@vue/compiler-ssr\": \"3.2.45\",\n- \"@vue/reactivity-transform\": \"3.2.45\",\n- \"@vue/shared\": \"3.2.45\",\n- \"estree-walker\": \"^2.0.2\",\n- \"magic-string\": \"^0.25.7\",\n- \"postcss\": \"^8.1.10\",\n+ \"@babel/parser\": \"^7.18.4\",\n+ \"postcss\": \"^8.4.14\",\n\"source-map\": \"^0.6.1\"\n- },\n- \"dependencies\": {\n- \"estree-walker\": {\n- \"version\": \"2.0.2\",\n- \"resolved\": \"https://registry.npmjs.org/estree-walker/-/estree-walker-2.0.2.tgz\",\n- \"integrity\": \"sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==\",\n- \"optional\": true,\n- \"peer\": true\n- }\n- }\n- },\n- \"@vue/compiler-ssr\": {\n- \"version\": \"3.2.45\",\n- \"resolved\": \"https://registry.npmjs.org/@vue/compiler-ssr/-/compiler-ssr-3.2.45.tgz\",\n- \"integrity\": \"sha512-6BRaggEGqhWht3lt24CrIbQSRD5O07MTmd+LjAn5fJj568+R9eUD2F7wMQJjX859seSlrYog7sUtrZSd7feqrQ==\",\n- \"optional\": true,\n- \"peer\": true,\n- \"requires\": {\n- \"@vue/compiler-dom\": \"3.2.45\",\n- \"@vue/shared\": \"3.2.45\"\n}\n},\n\"@vue/component-compiler-utils\": {\n\"integrity\": \"sha512-LIZMuJk38pk9U9Ur4YzHjlIyMuxPlACdBIHH9/nGYVTsaGKOSnSuELiE8vS9wa+dJpIYspYUOqk+L1Q4pgHQHQ==\",\n\"requires\": {}\n},\n- \"@vue/reactivity-transform\": {\n- \"version\": \"3.2.45\",\n- \"resolved\": \"https://registry.npmjs.org/@vue/reactivity-transform/-/reactivity-transform-3.2.45.tgz\",\n- \"integrity\": \"sha512-BHVmzYAvM7vcU5WmuYqXpwaBHjsS8T63jlKGWVtHxAHIoMIlmaMyurUSEs1Zcg46M4AYT5MtB1U274/2aNzjJQ==\",\n- \"optional\": true,\n- \"peer\": true,\n- \"requires\": {\n- \"@babel/parser\": \"^7.16.4\",\n- \"@vue/compiler-core\": \"3.2.45\",\n- \"@vue/shared\": \"3.2.45\",\n- \"estree-walker\": \"^2.0.2\",\n- \"magic-string\": \"^0.25.7\"\n- },\n- \"dependencies\": {\n- \"estree-walker\": {\n- \"version\": \"2.0.2\",\n- \"resolved\": \"https://registry.npmjs.org/estree-walker/-/estree-walker-2.0.2.tgz\",\n- \"integrity\": \"sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==\",\n- \"optional\": true,\n- \"peer\": true\n- }\n- }\n- },\n- \"@vue/shared\": {\n- \"version\": \"3.2.45\",\n- \"resolved\": \"https://registry.npmjs.org/@vue/shared/-/shared-3.2.45.tgz\",\n- \"integrity\": \"sha512-Ewzq5Yhimg7pSztDV+RH1UDKBzmtqieXQlpTVm2AwraoRL/Rks96mvd8Vgi7Lj+h+TH8dv7mXD3FRZR3TUvbSg==\",\n- \"optional\": true,\n- \"peer\": true\n- },\n\"@vue/test-utils\": {\n\"version\": \"1.3.3\",\n\"resolved\": \"https://registry.npmjs.org/@vue/test-utils/-/test-utils-1.3.3.tgz\",\n\"integrity\": \"sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A==\"\n},\n\"anymatch\": {\n- \"version\": \"3.1.2\",\n- \"resolved\": \"https://registry.npmjs.org/anymatch/-/anymatch-3.1.2.tgz\",\n- \"integrity\": \"sha512-P43ePfOAIupkguHUycrc4qJ9kz8ZiuOUijaETwX7THt0Y/GNK7v0aa8rY816xWjZ7rJdA5XdMcpVFTKMq+RvWg==\",\n+ \"version\": \"3.1.3\",\n+ \"resolved\": \"https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz\",\n+ \"integrity\": \"sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==\",\n\"requires\": {\n\"normalize-path\": \"^3.0.0\",\n\"picomatch\": \"^2.0.4\"\n\"integrity\": \"sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww==\"\n},\n\"bootstrap\": {\n- \"version\": \"5.2.2\",\n- \"resolved\": \"https://registry.npmjs.org/bootstrap/-/bootstrap-5.2.2.tgz\",\n- \"integrity\": \"sha512-dEtzMTV71n6Fhmbg4fYJzQsw1N29hJKO1js5ackCgIpDcGid2ETMGC6zwSYw09v05Y+oRdQ9loC54zB1La3hHQ==\",\n+ \"version\": \"5.2.3\",\n+ \"resolved\": \"https://registry.npmjs.org/bootstrap/-/bootstrap-5.2.3.tgz\",\n+ \"integrity\": \"sha512-cEKPM+fwb3cT8NzQZYEu4HilJ3anCrWqh3CHAok1p9jXqMPsPTBhU25fBckEJHJ/p+tTxTFTsFQGM+gaHpi3QQ==\",\n\"requires\": {}\n},\n\"brace-expansion\": {\n}\n},\n\"caniuse-lite\": {\n- \"version\": \"1.0.30001431\",\n- \"resolved\": \"https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001431.tgz\",\n- \"integrity\": \"sha512-zBUoFU0ZcxpvSt9IU66dXVT/3ctO1cy4y9cscs1szkPlcWb6pasYM144GqrUygUbT+k7cmUCW61cvskjcv0enQ==\"\n+ \"version\": \"1.0.30001434\",\n+ \"resolved\": \"https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001434.tgz\",\n+ \"integrity\": \"sha512-aOBHrLmTQw//WFa2rcF1If9fa3ypkC1wzqqiKHgfdrXTWcU8C4gKVZT77eQAPWN1APys3+uQ0Df07rKauXGEYA==\"\n},\n\"capture-exit\": {\n\"version\": \"2.0.0\",\n},\n\"dependencies\": {\n\"parse5\": {\n- \"version\": \"7.1.1\",\n- \"resolved\": \"https://registry.npmjs.org/parse5/-/parse5-7.1.1.tgz\",\n- \"integrity\": \"sha512-kwpuwzB+px5WUg9pyK0IcK/shltJN5/OVhQagxhCQNtT9Y9QRZqNY2e1cmbu/paRh5LMnz/oVTVLBpjFmMZhSg==\",\n+ \"version\": \"7.1.2\",\n+ \"resolved\": \"https://registry.npmjs.org/parse5/-/parse5-7.1.2.tgz\",\n+ \"integrity\": \"sha512-Czj1WaSVpaoj0wbhMzLmWD69anp2WH7FXMB9n1Sy8/ZFF9jolSQVMu1Ij5WIyGmcBmhk7EOndpO4mIpihVqAXw==\",\n\"dev\": true,\n\"requires\": {\n\"entities\": \"^4.4.0\"\n}\n},\n\"codemirror\": {\n- \"version\": \"5.65.9\",\n- \"resolved\": \"https://registry.npmjs.org/codemirror/-/codemirror-5.65.9.tgz\",\n- \"integrity\": \"sha512-19Jox5sAKpusTDgqgKB5dawPpQcY+ipQK7xoEI+MVucEF9qqFaXpeqY1KaoyGBso/wHQoDa4HMMxMjdsS3Zzzw==\"\n+ \"version\": \"5.65.10\",\n+ \"resolved\": \"https://registry.npmjs.org/codemirror/-/codemirror-5.65.10.tgz\",\n+ \"integrity\": \"sha512-IXAG5wlhbgcTJ6rZZcmi4+sjWIbJqIGfeg3tNa3yX84Jb3T4huS5qzQAo/cUisc1l3bI47WZodpyf7cYcocDKg==\"\n},\n\"codemirror-spell-checker\": {\n\"version\": \"1.1.2\",\n},\n\"dependencies\": {\n\"marked\": {\n- \"version\": \"4.2.2\",\n- \"resolved\": \"https://registry.npmjs.org/marked/-/marked-4.2.2.tgz\",\n- \"integrity\": \"sha512-JjBTFTAvuTgANXx82a5vzK9JLSMoV6V3LBVn4Uhdso6t7vXrGx7g1Cd2r6NYSsxrYbQGFCMqBDhFHyK5q2UvcQ==\"\n+ \"version\": \"4.2.3\",\n+ \"resolved\": \"https://registry.npmjs.org/marked/-/marked-4.2.3.tgz\",\n+ \"integrity\": \"sha512-slWRdJkbTZ+PjkyJnE30Uid64eHwbwa1Q25INCAYfZlK4o6ylagBy/Le9eWntqJFoFT93ikUKMv47GZ4gTwHkw==\"\n}\n}\n},\n\"version\": \"0.25.9\",\n\"resolved\": \"https://registry.npmjs.org/magic-string/-/magic-string-0.25.9.tgz\",\n\"integrity\": \"sha512-RmF0AsMzgt25qzqqLc1+MbHmhdx0ojF2Fvs4XnOqz2ZOBXzzkEwc/dJQZCYHAn7v1jbVOjAZfK8msRn4BxO4VQ==\",\n- \"devOptional\": true,\n+ \"dev\": true,\n\"requires\": {\n\"sourcemap-codec\": \"^1.4.8\"\n}\n\"integrity\": \"sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==\"\n},\n\"memfs\": {\n- \"version\": \"3.4.11\",\n- \"resolved\": \"https://registry.npmjs.org/memfs/-/memfs-3.4.11.tgz\",\n- \"integrity\": \"sha512-GvsCITGAyDCxxsJ+X6prJexFQEhOCJaIlUbsAvjzSI5o5O7j2dle3jWvz5Z5aOdpOxW6ol3vI1+0ut+641F1+w==\",\n+ \"version\": \"3.4.12\",\n+ \"resolved\": \"https://registry.npmjs.org/memfs/-/memfs-3.4.12.tgz\",\n+ \"integrity\": \"sha512-BcjuQn6vfqP+k100e0E9m61Hyqa//Brp+I3f0OBmN0ATHlFA8vx3Lt8z57R3u2bPqe3WGDBC+nF72fTH7isyEw==\",\n\"dev\": true,\n\"requires\": {\n\"fs-monkey\": \"^1.0.3\"\n}\n},\n\"postcss-selector-parser\": {\n- \"version\": \"6.0.10\",\n- \"resolved\": \"https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.0.10.tgz\",\n- \"integrity\": \"sha512-IQ7TZdoaqbT+LCpShg46jnZVlhWD2w6iQYAcYXfHARZ7X1t/UGhhceQDs5X0cGqKvYlHNOuv7Oa1xmb0oQuA3w==\",\n+ \"version\": \"6.0.11\",\n+ \"resolved\": \"https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.0.11.tgz\",\n+ \"integrity\": \"sha512-zbARubNdogI9j7WY4nQJBiNqQf3sLS3wCP4WfOidu+p28LofJqDH1tcXypGrcmMHhDk2t9wGhCsYe/+szLTy1g==\",\n\"requires\": {\n\"cssesc\": \"^3.0.0\",\n\"util-deprecate\": \"^1.0.2\"\n\"version\": \"1.4.8\",\n\"resolved\": \"https://registry.npmjs.org/sourcemap-codec/-/sourcemap-codec-1.4.8.tgz\",\n\"integrity\": \"sha512-9NykojV5Uih4lgo5So5dtw+f0JgJX30KCNI8gwhz2J9A15wD0Ml6tjHKwf6fTSa6fAdVBdZeNOs9eJ71qCk8vA==\",\n- \"devOptional\": true\n+ \"dev\": true\n},\n\"spdx-correct\": {\n\"version\": \"3.1.1\",\n\"requires\": {\n\"@vue/compiler-sfc\": \"2.7.14\",\n\"csstype\": \"^3.1.0\"\n- },\n- \"dependencies\": {\n- \"@vue/compiler-sfc\": {\n- \"version\": \"2.7.14\",\n- \"resolved\": \"https://registry.npmjs.org/@vue/compiler-sfc/-/compiler-sfc-2.7.14.tgz\",\n- \"integrity\": \"sha512-aNmNHyLPsw+sVvlQFQ2/8sjNuLtK54TC6cuKnVzAY93ks4ZBrvwQSnkkIh7bsbNhum5hJBS00wSDipQ937f5DA==\",\n- \"requires\": {\n- \"@babel/parser\": \"^7.18.4\",\n- \"postcss\": \"^8.4.14\",\n- \"source-map\": \"^0.6.1\"\n- }\n- }\n}\n},\n\"vue-easymde\": {\n"
}
] |
PHP
|
MIT License
|
bolt/core
|
Prevent jank when rendering list of thumbnails.
|
95,144 |
24.11.2022 09:39:09
| -3,600 |
7f61d7fc203cc0cca2721901915d42c0e63c368f
|
Release 5.2.0-beta.9
|
[
{
"change_type": "MODIFY",
"old_path": "assets/js/version.js",
"new_path": "assets/js/version.js",
"diff": "// generated by genversion\n-export const version = '5.1.99.8';\n+export const version = '5.1.99.9';\n"
},
{
"change_type": "MODIFY",
"old_path": "package-lock.json",
"new_path": "package-lock.json",
"diff": "{\n\"name\": \"bolt\",\n- \"version\": \"5.1.99.8\",\n+ \"version\": \"5.1.99.9\",\n\"lockfileVersion\": 2,\n\"requires\": true,\n\"packages\": {\n\"\": {\n\"name\": \"bolt\",\n- \"version\": \"5.1.99.8\",\n+ \"version\": \"5.1.99.9\",\n\"license\": \"MIT\",\n\"dependencies\": {\n\"@popperjs/core\": \"^2.11.6\",\n\"integrity\": \"sha512-wOuvG1SN4Us4rez+tylwwwCV1psiNVOkJeM3AUWUNWg/jDQY2+HE/444y5gc+jBmRqASOm2Oeh5c1axHobwRKQ==\"\n},\n\"node_modules/@types/lodash\": {\n- \"version\": \"4.14.189\",\n- \"resolved\": \"https://registry.npmjs.org/@types/lodash/-/lodash-4.14.189.tgz\",\n- \"integrity\": \"sha512-kb9/98N6X8gyME9Cf7YaqIMvYGnBSWqEci6tiettE6iJWH1XdJz/PO8LB0GtLCG7x8dU3KWhZT+lA1a35127tA==\"\n+ \"version\": \"4.14.190\",\n+ \"resolved\": \"https://registry.npmjs.org/@types/lodash/-/lodash-4.14.190.tgz\",\n+ \"integrity\": \"sha512-5iJ3FBJBvQHQ8sFhEhJfjUP+G+LalhavTkYyrAYqz5MEJG+erSv0k9KJLb6q7++17Lafk1scaTIFXcMJlwK8Mw==\"\n},\n\"node_modules/@types/marked\": {\n\"version\": \"4.0.7\",\n\"strip-ansi\": \"^6.0.0\"\n}\n},\n+ \"node_modules/@vue/compiler-core\": {\n+ \"version\": \"3.2.45\",\n+ \"resolved\": \"https://registry.npmjs.org/@vue/compiler-core/-/compiler-core-3.2.45.tgz\",\n+ \"integrity\": \"sha512-rcMj7H+PYe5wBV3iYeUgbCglC+pbpN8hBLTJvRiK2eKQiWqu+fG9F+8sW99JdL4LQi7Re178UOxn09puSXvn4A==\",\n+ \"optional\": true,\n+ \"peer\": true,\n+ \"dependencies\": {\n+ \"@babel/parser\": \"^7.16.4\",\n+ \"@vue/shared\": \"3.2.45\",\n+ \"estree-walker\": \"^2.0.2\",\n+ \"source-map\": \"^0.6.1\"\n+ }\n+ },\n+ \"node_modules/@vue/compiler-core/node_modules/estree-walker\": {\n+ \"version\": \"2.0.2\",\n+ \"resolved\": \"https://registry.npmjs.org/estree-walker/-/estree-walker-2.0.2.tgz\",\n+ \"integrity\": \"sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==\",\n+ \"optional\": true,\n+ \"peer\": true\n+ },\n+ \"node_modules/@vue/compiler-dom\": {\n+ \"version\": \"3.2.45\",\n+ \"resolved\": \"https://registry.npmjs.org/@vue/compiler-dom/-/compiler-dom-3.2.45.tgz\",\n+ \"integrity\": \"sha512-tyYeUEuKqqZO137WrZkpwfPCdiiIeXYCcJ8L4gWz9vqaxzIQRccTSwSWZ/Axx5YR2z+LvpUbmPNXxuBU45lyRw==\",\n+ \"optional\": true,\n+ \"peer\": true,\n+ \"dependencies\": {\n+ \"@vue/compiler-core\": \"3.2.45\",\n+ \"@vue/shared\": \"3.2.45\"\n+ }\n+ },\n\"node_modules/@vue/compiler-sfc\": {\n- \"version\": \"2.7.14\",\n- \"resolved\": \"https://registry.npmjs.org/@vue/compiler-sfc/-/compiler-sfc-2.7.14.tgz\",\n- \"integrity\": \"sha512-aNmNHyLPsw+sVvlQFQ2/8sjNuLtK54TC6cuKnVzAY93ks4ZBrvwQSnkkIh7bsbNhum5hJBS00wSDipQ937f5DA==\",\n+ \"version\": \"3.2.45\",\n+ \"resolved\": \"https://registry.npmjs.org/@vue/compiler-sfc/-/compiler-sfc-3.2.45.tgz\",\n+ \"integrity\": \"sha512-1jXDuWah1ggsnSAOGsec8cFjT/K6TMZ0sPL3o3d84Ft2AYZi2jWJgRMjw4iaK0rBfA89L5gw427H4n1RZQBu6Q==\",\n+ \"optional\": true,\n+ \"peer\": true,\n\"dependencies\": {\n- \"@babel/parser\": \"^7.18.4\",\n- \"postcss\": \"^8.4.14\",\n+ \"@babel/parser\": \"^7.16.4\",\n+ \"@vue/compiler-core\": \"3.2.45\",\n+ \"@vue/compiler-dom\": \"3.2.45\",\n+ \"@vue/compiler-ssr\": \"3.2.45\",\n+ \"@vue/reactivity-transform\": \"3.2.45\",\n+ \"@vue/shared\": \"3.2.45\",\n+ \"estree-walker\": \"^2.0.2\",\n+ \"magic-string\": \"^0.25.7\",\n+ \"postcss\": \"^8.1.10\",\n\"source-map\": \"^0.6.1\"\n}\n},\n+ \"node_modules/@vue/compiler-sfc/node_modules/estree-walker\": {\n+ \"version\": \"2.0.2\",\n+ \"resolved\": \"https://registry.npmjs.org/estree-walker/-/estree-walker-2.0.2.tgz\",\n+ \"integrity\": \"sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==\",\n+ \"optional\": true,\n+ \"peer\": true\n+ },\n+ \"node_modules/@vue/compiler-ssr\": {\n+ \"version\": \"3.2.45\",\n+ \"resolved\": \"https://registry.npmjs.org/@vue/compiler-ssr/-/compiler-ssr-3.2.45.tgz\",\n+ \"integrity\": \"sha512-6BRaggEGqhWht3lt24CrIbQSRD5O07MTmd+LjAn5fJj568+R9eUD2F7wMQJjX859seSlrYog7sUtrZSd7feqrQ==\",\n+ \"optional\": true,\n+ \"peer\": true,\n+ \"dependencies\": {\n+ \"@vue/compiler-dom\": \"3.2.45\",\n+ \"@vue/shared\": \"3.2.45\"\n+ }\n+ },\n\"node_modules/@vue/component-compiler-utils\": {\n\"version\": \"3.3.0\",\n\"resolved\": \"https://registry.npmjs.org/@vue/component-compiler-utils/-/component-compiler-utils-3.3.0.tgz\",\n\"webpack\": \">=4.0.0\"\n}\n},\n+ \"node_modules/@vue/reactivity-transform\": {\n+ \"version\": \"3.2.45\",\n+ \"resolved\": \"https://registry.npmjs.org/@vue/reactivity-transform/-/reactivity-transform-3.2.45.tgz\",\n+ \"integrity\": \"sha512-BHVmzYAvM7vcU5WmuYqXpwaBHjsS8T63jlKGWVtHxAHIoMIlmaMyurUSEs1Zcg46M4AYT5MtB1U274/2aNzjJQ==\",\n+ \"optional\": true,\n+ \"peer\": true,\n+ \"dependencies\": {\n+ \"@babel/parser\": \"^7.16.4\",\n+ \"@vue/compiler-core\": \"3.2.45\",\n+ \"@vue/shared\": \"3.2.45\",\n+ \"estree-walker\": \"^2.0.2\",\n+ \"magic-string\": \"^0.25.7\"\n+ }\n+ },\n+ \"node_modules/@vue/reactivity-transform/node_modules/estree-walker\": {\n+ \"version\": \"2.0.2\",\n+ \"resolved\": \"https://registry.npmjs.org/estree-walker/-/estree-walker-2.0.2.tgz\",\n+ \"integrity\": \"sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==\",\n+ \"optional\": true,\n+ \"peer\": true\n+ },\n+ \"node_modules/@vue/shared\": {\n+ \"version\": \"3.2.45\",\n+ \"resolved\": \"https://registry.npmjs.org/@vue/shared/-/shared-3.2.45.tgz\",\n+ \"integrity\": \"sha512-Ewzq5Yhimg7pSztDV+RH1UDKBzmtqieXQlpTVm2AwraoRL/Rks96mvd8Vgi7Lj+h+TH8dv7mXD3FRZR3TUvbSg==\",\n+ \"optional\": true,\n+ \"peer\": true\n+ },\n\"node_modules/@vue/test-utils\": {\n\"version\": \"1.3.3\",\n\"resolved\": \"https://registry.npmjs.org/@vue/test-utils/-/test-utils-1.3.3.tgz\",\n}\n},\n\"node_modules/ci-info\": {\n- \"version\": \"3.6.1\",\n- \"resolved\": \"https://registry.npmjs.org/ci-info/-/ci-info-3.6.1.tgz\",\n- \"integrity\": \"sha512-up5ggbaDqOqJ4UqLKZ2naVkyqSJQgJi5lwD6b6mM748ysrghDBX0bx/qJTUHzw7zu6Mq4gycviSF5hJnwceD8w==\",\n+ \"version\": \"3.6.2\",\n+ \"resolved\": \"https://registry.npmjs.org/ci-info/-/ci-info-3.6.2.tgz\",\n+ \"integrity\": \"sha512-lVZdhvbEudris15CLytp2u6Y0p5EKfztae9Fqa189MfNmln9F33XuH69v5fvNfiRN5/0eAUz2yJL3mo+nhaRKg==\",\n\"dev\": true,\n\"engines\": {\n\"node\": \">=8\"\n}\n},\n\"node_modules/enhanced-resolve\": {\n- \"version\": \"5.10.0\",\n- \"resolved\": \"https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.10.0.tgz\",\n- \"integrity\": \"sha512-T0yTFjdpldGY8PmuXXR0PyQ1ufZpEGiHVrp7zHKB7jdR4qlmZHhONVM5AQOAWXuF/w3dnHbEQVrNptJgt7F+cQ==\",\n+ \"version\": \"5.12.0\",\n+ \"resolved\": \"https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.12.0.tgz\",\n+ \"integrity\": \"sha512-QHTXI/sZQmko1cbDoNAa3mJ5qhWUUNAq3vR0/YiD379fWQrcfuoX1+HW2S0MTt7XmoPLapdaDKUtelUSPic7hQ==\",\n\"dependencies\": {\n\"graceful-fs\": \"^4.2.4\",\n\"tapable\": \"^2.2.0\"\n\"integrity\": \"sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==\"\n},\n\"node_modules/hotkeys-js\": {\n- \"version\": \"3.10.0\",\n- \"resolved\": \"https://registry.npmjs.org/hotkeys-js/-/hotkeys-js-3.10.0.tgz\",\n- \"integrity\": \"sha512-20xeVdOqcgTkMox0+BqFwADZP7+5dy/9CFPpAinSMh2d0s3b0Hs2V2D+lMh4Hphkf7VE9pwnOl58eP1te+REcg==\"\n+ \"version\": \"3.10.1\",\n+ \"resolved\": \"https://registry.npmjs.org/hotkeys-js/-/hotkeys-js-3.10.1.tgz\",\n+ \"integrity\": \"sha512-mshqjgTqx8ee0qryHvRgZaZDxTwxam/2yTQmQlqAWS3+twnq1jsY9Yng9zB7lWq6WRrjTbTOc7knNwccXQiAjQ==\"\n},\n\"node_modules/hpack.js\": {\n\"version\": \"2.1.6\",\n\"version\": \"0.25.9\",\n\"resolved\": \"https://registry.npmjs.org/magic-string/-/magic-string-0.25.9.tgz\",\n\"integrity\": \"sha512-RmF0AsMzgt25qzqqLc1+MbHmhdx0ojF2Fvs4XnOqz2ZOBXzzkEwc/dJQZCYHAn7v1jbVOjAZfK8msRn4BxO4VQ==\",\n- \"dev\": true,\n+ \"devOptional\": true,\n\"dependencies\": {\n\"sourcemap-codec\": \"^1.4.8\"\n}\n\"version\": \"1.4.8\",\n\"resolved\": \"https://registry.npmjs.org/sourcemap-codec/-/sourcemap-codec-1.4.8.tgz\",\n\"integrity\": \"sha512-9NykojV5Uih4lgo5So5dtw+f0JgJX30KCNI8gwhz2J9A15wD0Ml6tjHKwf6fTSa6fAdVBdZeNOs9eJ71qCk8vA==\",\n- \"dev\": true\n+ \"devOptional\": true\n},\n\"node_modules/spdx-correct\": {\n\"version\": \"3.1.1\",\n\"vue\": \"^2.0.0\"\n}\n},\n+ \"node_modules/vue/node_modules/@vue/compiler-sfc\": {\n+ \"version\": \"2.7.14\",\n+ \"resolved\": \"https://registry.npmjs.org/@vue/compiler-sfc/-/compiler-sfc-2.7.14.tgz\",\n+ \"integrity\": \"sha512-aNmNHyLPsw+sVvlQFQ2/8sjNuLtK54TC6cuKnVzAY93ks4ZBrvwQSnkkIh7bsbNhum5hJBS00wSDipQ937f5DA==\",\n+ \"dependencies\": {\n+ \"@babel/parser\": \"^7.18.4\",\n+ \"postcss\": \"^8.4.14\",\n+ \"source-map\": \"^0.6.1\"\n+ }\n+ },\n\"node_modules/vuedraggable\": {\n\"version\": \"2.24.3\",\n\"resolved\": \"https://registry.npmjs.org/vuedraggable/-/vuedraggable-2.24.3.tgz\",\n\"integrity\": \"sha512-wOuvG1SN4Us4rez+tylwwwCV1psiNVOkJeM3AUWUNWg/jDQY2+HE/444y5gc+jBmRqASOm2Oeh5c1axHobwRKQ==\"\n},\n\"@types/lodash\": {\n- \"version\": \"4.14.189\",\n- \"resolved\": \"https://registry.npmjs.org/@types/lodash/-/lodash-4.14.189.tgz\",\n- \"integrity\": \"sha512-kb9/98N6X8gyME9Cf7YaqIMvYGnBSWqEci6tiettE6iJWH1XdJz/PO8LB0GtLCG7x8dU3KWhZT+lA1a35127tA==\"\n+ \"version\": \"4.14.190\",\n+ \"resolved\": \"https://registry.npmjs.org/@types/lodash/-/lodash-4.14.190.tgz\",\n+ \"integrity\": \"sha512-5iJ3FBJBvQHQ8sFhEhJfjUP+G+LalhavTkYyrAYqz5MEJG+erSv0k9KJLb6q7++17Lafk1scaTIFXcMJlwK8Mw==\"\n},\n\"@types/marked\": {\n\"version\": \"4.0.7\",\n\"strip-ansi\": \"^6.0.0\"\n}\n},\n+ \"@vue/compiler-core\": {\n+ \"version\": \"3.2.45\",\n+ \"resolved\": \"https://registry.npmjs.org/@vue/compiler-core/-/compiler-core-3.2.45.tgz\",\n+ \"integrity\": \"sha512-rcMj7H+PYe5wBV3iYeUgbCglC+pbpN8hBLTJvRiK2eKQiWqu+fG9F+8sW99JdL4LQi7Re178UOxn09puSXvn4A==\",\n+ \"optional\": true,\n+ \"peer\": true,\n+ \"requires\": {\n+ \"@babel/parser\": \"^7.16.4\",\n+ \"@vue/shared\": \"3.2.45\",\n+ \"estree-walker\": \"^2.0.2\",\n+ \"source-map\": \"^0.6.1\"\n+ },\n+ \"dependencies\": {\n+ \"estree-walker\": {\n+ \"version\": \"2.0.2\",\n+ \"resolved\": \"https://registry.npmjs.org/estree-walker/-/estree-walker-2.0.2.tgz\",\n+ \"integrity\": \"sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==\",\n+ \"optional\": true,\n+ \"peer\": true\n+ }\n+ }\n+ },\n+ \"@vue/compiler-dom\": {\n+ \"version\": \"3.2.45\",\n+ \"resolved\": \"https://registry.npmjs.org/@vue/compiler-dom/-/compiler-dom-3.2.45.tgz\",\n+ \"integrity\": \"sha512-tyYeUEuKqqZO137WrZkpwfPCdiiIeXYCcJ8L4gWz9vqaxzIQRccTSwSWZ/Axx5YR2z+LvpUbmPNXxuBU45lyRw==\",\n+ \"optional\": true,\n+ \"peer\": true,\n+ \"requires\": {\n+ \"@vue/compiler-core\": \"3.2.45\",\n+ \"@vue/shared\": \"3.2.45\"\n+ }\n+ },\n\"@vue/compiler-sfc\": {\n- \"version\": \"2.7.14\",\n- \"resolved\": \"https://registry.npmjs.org/@vue/compiler-sfc/-/compiler-sfc-2.7.14.tgz\",\n- \"integrity\": \"sha512-aNmNHyLPsw+sVvlQFQ2/8sjNuLtK54TC6cuKnVzAY93ks4ZBrvwQSnkkIh7bsbNhum5hJBS00wSDipQ937f5DA==\",\n+ \"version\": \"3.2.45\",\n+ \"resolved\": \"https://registry.npmjs.org/@vue/compiler-sfc/-/compiler-sfc-3.2.45.tgz\",\n+ \"integrity\": \"sha512-1jXDuWah1ggsnSAOGsec8cFjT/K6TMZ0sPL3o3d84Ft2AYZi2jWJgRMjw4iaK0rBfA89L5gw427H4n1RZQBu6Q==\",\n+ \"optional\": true,\n+ \"peer\": true,\n\"requires\": {\n- \"@babel/parser\": \"^7.18.4\",\n- \"postcss\": \"^8.4.14\",\n+ \"@babel/parser\": \"^7.16.4\",\n+ \"@vue/compiler-core\": \"3.2.45\",\n+ \"@vue/compiler-dom\": \"3.2.45\",\n+ \"@vue/compiler-ssr\": \"3.2.45\",\n+ \"@vue/reactivity-transform\": \"3.2.45\",\n+ \"@vue/shared\": \"3.2.45\",\n+ \"estree-walker\": \"^2.0.2\",\n+ \"magic-string\": \"^0.25.7\",\n+ \"postcss\": \"^8.1.10\",\n\"source-map\": \"^0.6.1\"\n+ },\n+ \"dependencies\": {\n+ \"estree-walker\": {\n+ \"version\": \"2.0.2\",\n+ \"resolved\": \"https://registry.npmjs.org/estree-walker/-/estree-walker-2.0.2.tgz\",\n+ \"integrity\": \"sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==\",\n+ \"optional\": true,\n+ \"peer\": true\n+ }\n+ }\n+ },\n+ \"@vue/compiler-ssr\": {\n+ \"version\": \"3.2.45\",\n+ \"resolved\": \"https://registry.npmjs.org/@vue/compiler-ssr/-/compiler-ssr-3.2.45.tgz\",\n+ \"integrity\": \"sha512-6BRaggEGqhWht3lt24CrIbQSRD5O07MTmd+LjAn5fJj568+R9eUD2F7wMQJjX859seSlrYog7sUtrZSd7feqrQ==\",\n+ \"optional\": true,\n+ \"peer\": true,\n+ \"requires\": {\n+ \"@vue/compiler-dom\": \"3.2.45\",\n+ \"@vue/shared\": \"3.2.45\"\n}\n},\n\"@vue/component-compiler-utils\": {\n\"integrity\": \"sha512-LIZMuJk38pk9U9Ur4YzHjlIyMuxPlACdBIHH9/nGYVTsaGKOSnSuELiE8vS9wa+dJpIYspYUOqk+L1Q4pgHQHQ==\",\n\"requires\": {}\n},\n+ \"@vue/reactivity-transform\": {\n+ \"version\": \"3.2.45\",\n+ \"resolved\": \"https://registry.npmjs.org/@vue/reactivity-transform/-/reactivity-transform-3.2.45.tgz\",\n+ \"integrity\": \"sha512-BHVmzYAvM7vcU5WmuYqXpwaBHjsS8T63jlKGWVtHxAHIoMIlmaMyurUSEs1Zcg46M4AYT5MtB1U274/2aNzjJQ==\",\n+ \"optional\": true,\n+ \"peer\": true,\n+ \"requires\": {\n+ \"@babel/parser\": \"^7.16.4\",\n+ \"@vue/compiler-core\": \"3.2.45\",\n+ \"@vue/shared\": \"3.2.45\",\n+ \"estree-walker\": \"^2.0.2\",\n+ \"magic-string\": \"^0.25.7\"\n+ },\n+ \"dependencies\": {\n+ \"estree-walker\": {\n+ \"version\": \"2.0.2\",\n+ \"resolved\": \"https://registry.npmjs.org/estree-walker/-/estree-walker-2.0.2.tgz\",\n+ \"integrity\": \"sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==\",\n+ \"optional\": true,\n+ \"peer\": true\n+ }\n+ }\n+ },\n+ \"@vue/shared\": {\n+ \"version\": \"3.2.45\",\n+ \"resolved\": \"https://registry.npmjs.org/@vue/shared/-/shared-3.2.45.tgz\",\n+ \"integrity\": \"sha512-Ewzq5Yhimg7pSztDV+RH1UDKBzmtqieXQlpTVm2AwraoRL/Rks96mvd8Vgi7Lj+h+TH8dv7mXD3FRZR3TUvbSg==\",\n+ \"optional\": true,\n+ \"peer\": true\n+ },\n\"@vue/test-utils\": {\n\"version\": \"1.3.3\",\n\"resolved\": \"https://registry.npmjs.org/@vue/test-utils/-/test-utils-1.3.3.tgz\",\n\"integrity\": \"sha512-p3KULyQg4S7NIHixdwbGX+nFHkoBiA4YQmyWtjb8XngSKV124nJmRysgAeujbUVb15vh+RvFUfCPqU7rXk+hZg==\"\n},\n\"ci-info\": {\n- \"version\": \"3.6.1\",\n- \"resolved\": \"https://registry.npmjs.org/ci-info/-/ci-info-3.6.1.tgz\",\n- \"integrity\": \"sha512-up5ggbaDqOqJ4UqLKZ2naVkyqSJQgJi5lwD6b6mM748ysrghDBX0bx/qJTUHzw7zu6Mq4gycviSF5hJnwceD8w==\",\n+ \"version\": \"3.6.2\",\n+ \"resolved\": \"https://registry.npmjs.org/ci-info/-/ci-info-3.6.2.tgz\",\n+ \"integrity\": \"sha512-lVZdhvbEudris15CLytp2u6Y0p5EKfztae9Fqa189MfNmln9F33XuH69v5fvNfiRN5/0eAUz2yJL3mo+nhaRKg==\",\n\"dev\": true\n},\n\"cipher-base\": {\n}\n},\n\"enhanced-resolve\": {\n- \"version\": \"5.10.0\",\n- \"resolved\": \"https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.10.0.tgz\",\n- \"integrity\": \"sha512-T0yTFjdpldGY8PmuXXR0PyQ1ufZpEGiHVrp7zHKB7jdR4qlmZHhONVM5AQOAWXuF/w3dnHbEQVrNptJgt7F+cQ==\",\n+ \"version\": \"5.12.0\",\n+ \"resolved\": \"https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.12.0.tgz\",\n+ \"integrity\": \"sha512-QHTXI/sZQmko1cbDoNAa3mJ5qhWUUNAq3vR0/YiD379fWQrcfuoX1+HW2S0MTt7XmoPLapdaDKUtelUSPic7hQ==\",\n\"requires\": {\n\"graceful-fs\": \"^4.2.4\",\n\"tapable\": \"^2.2.0\"\n\"integrity\": \"sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==\"\n},\n\"hotkeys-js\": {\n- \"version\": \"3.10.0\",\n- \"resolved\": \"https://registry.npmjs.org/hotkeys-js/-/hotkeys-js-3.10.0.tgz\",\n- \"integrity\": \"sha512-20xeVdOqcgTkMox0+BqFwADZP7+5dy/9CFPpAinSMh2d0s3b0Hs2V2D+lMh4Hphkf7VE9pwnOl58eP1te+REcg==\"\n+ \"version\": \"3.10.1\",\n+ \"resolved\": \"https://registry.npmjs.org/hotkeys-js/-/hotkeys-js-3.10.1.tgz\",\n+ \"integrity\": \"sha512-mshqjgTqx8ee0qryHvRgZaZDxTwxam/2yTQmQlqAWS3+twnq1jsY9Yng9zB7lWq6WRrjTbTOc7knNwccXQiAjQ==\"\n},\n\"hpack.js\": {\n\"version\": \"2.1.6\",\n\"version\": \"0.25.9\",\n\"resolved\": \"https://registry.npmjs.org/magic-string/-/magic-string-0.25.9.tgz\",\n\"integrity\": \"sha512-RmF0AsMzgt25qzqqLc1+MbHmhdx0ojF2Fvs4XnOqz2ZOBXzzkEwc/dJQZCYHAn7v1jbVOjAZfK8msRn4BxO4VQ==\",\n- \"dev\": true,\n+ \"devOptional\": true,\n\"requires\": {\n\"sourcemap-codec\": \"^1.4.8\"\n}\n\"version\": \"1.4.8\",\n\"resolved\": \"https://registry.npmjs.org/sourcemap-codec/-/sourcemap-codec-1.4.8.tgz\",\n\"integrity\": \"sha512-9NykojV5Uih4lgo5So5dtw+f0JgJX30KCNI8gwhz2J9A15wD0Ml6tjHKwf6fTSa6fAdVBdZeNOs9eJ71qCk8vA==\",\n- \"dev\": true\n+ \"devOptional\": true\n},\n\"spdx-correct\": {\n\"version\": \"3.1.1\",\n\"requires\": {\n\"@vue/compiler-sfc\": \"2.7.14\",\n\"csstype\": \"^3.1.0\"\n+ },\n+ \"dependencies\": {\n+ \"@vue/compiler-sfc\": {\n+ \"version\": \"2.7.14\",\n+ \"resolved\": \"https://registry.npmjs.org/@vue/compiler-sfc/-/compiler-sfc-2.7.14.tgz\",\n+ \"integrity\": \"sha512-aNmNHyLPsw+sVvlQFQ2/8sjNuLtK54TC6cuKnVzAY93ks4ZBrvwQSnkkIh7bsbNhum5hJBS00wSDipQ937f5DA==\",\n+ \"requires\": {\n+ \"@babel/parser\": \"^7.18.4\",\n+ \"postcss\": \"^8.4.14\",\n+ \"source-map\": \"^0.6.1\"\n+ }\n+ }\n}\n},\n\"vue-easymde\": {\n"
},
{
"change_type": "MODIFY",
"old_path": "package.json",
"new_path": "package.json",
"diff": "{\n\"name\": \"bolt\",\n- \"version\": \"5.1.99.8\",\n+ \"version\": \"5.1.99.9\",\n\"homepage\": \"https://boltcms.io\",\n\"author\": \"Bob den Otter <bob@twokings.nl> (https://boltcms.io)\",\n\"license\": \"MIT\",\n"
},
{
"change_type": "MODIFY",
"old_path": "symfony.lock",
"new_path": "symfony.lock",
"diff": "\"laminas/laminas-code\": {\n\"version\": \"3.4.1\"\n},\n- \"laminas/laminas-eventmanager\": {\n- \"version\": \"3.2.1\"\n- },\n- \"laminas/laminas-zendframework-bridge\": {\n- \"version\": \"1.1.1\"\n- },\n\"league/flysystem\": {\n\"version\": \"1.0.64\"\n},\n\"pagerfanta/pagerfanta\": {\n\"version\": \"v2.3.0\"\n},\n- \"paragonie/random_compat\": {\n- \"version\": \"v9.99.100\"\n- },\n\"phar-io/manifest\": {\n\"version\": \"1.0.3\"\n},\n"
}
] |
PHP
|
MIT License
|
bolt/core
|
Release 5.2.0-beta.9
|
95,132 |
02.12.2022 11:11:35
| -3,600 |
bddf80bf91ad171dba803bd0bcce7f2381014693
|
Fix `excerpt` when content would be too short _after_ trimming
but would be long enough _before_ trimming in order to skip
returning the `fulltext` instead.
|
[
{
"change_type": "MODIFY",
"old_path": "src/Utils/Excerpt.php",
"new_path": "src/Utils/Excerpt.php",
"diff": "@@ -109,6 +109,7 @@ class Excerpt\n// if we are going to snip too much...\nif ($textlength - $startPos < $relLength) {\n$startPos -= (int) round(($textlength - $startPos) / 2);\n+ $startPos = max(0, $startPos);\n}\n$relText = mb_substr($fulltext, $startPos, $relLength);\n"
}
] |
PHP
|
MIT License
|
bolt/core
|
Fix `excerpt` when content would be too short _after_ trimming
but would be long enough _before_ trimming in order to skip
returning the `fulltext` instead.
|
95,145 |
02.12.2022 12:12:52
| -3,600 |
52fa54e991ae5ca16100a94c88d78165326f97eb
|
Made the is current filter remove the current locale before comparing, to the current route to get continuous results across all languages
|
[
{
"change_type": "MODIFY",
"old_path": "src/Twig/FrontendMenuExtension.php",
"new_path": "src/Twig/FrontendMenuExtension.php",
"diff": "@@ -58,8 +58,11 @@ class FrontendMenuExtension extends AbstractExtension\npublic function isCurrent($item): bool\n{\n+ $currentRequest = $this->requestStack->getCurrentRequest();\n+ $currentLocale = $currentRequest->getLocale();\n$uri = $item['uri'] ?? '';\n+ $currentUrl = str_replace('/' . $currentLocale, '', $currentRequest->getPathInfo());\n- return $uri === $this->requestStack->getCurrentRequest()->getPathInfo();\n+ return $uri === $currentUrl;\n}\n}\n"
}
] |
PHP
|
MIT License
|
bolt/core
|
Made the is current filter remove the current locale before comparing, to the current route to get continuous results across all languages
|
95,145 |
02.12.2022 13:20:18
| -3,600 |
08ae66aa4d338581447cfb1c16aec50b39e2acca
|
Improved the sting replace of the current filter to avoid edge case errors
|
[
{
"change_type": "MODIFY",
"old_path": "src/Twig/FrontendMenuExtension.php",
"new_path": "src/Twig/FrontendMenuExtension.php",
"diff": "@@ -61,7 +61,7 @@ class FrontendMenuExtension extends AbstractExtension\n$currentRequest = $this->requestStack->getCurrentRequest();\n$currentLocale = $currentRequest->getLocale();\n$uri = $item['uri'] ?? '';\n- $currentUrl = str_replace('/' . $currentLocale, '', $currentRequest->getPathInfo());\n+ $currentUrl = str_replace('/' . $currentLocale . '/', '/', $currentRequest->getPathInfo());\nreturn $uri === $currentUrl;\n}\n"
}
] |
PHP
|
MIT License
|
bolt/core
|
Improved the sting replace of the current filter to avoid edge case errors
|
95,175 |
07.01.2023 22:46:41
| -3,600 |
ebc691b60f923108b00fbc266c01483e24cc7a67
|
Thumbnail quality setting now actually works; Removed superfluous thumbnail settings; Brought GeneralParser's default quality config in line with the default YAML config
|
[
{
"change_type": "MODIFY",
"old_path": "config/bolt/config.yaml",
"new_path": "config/bolt/config.yaml",
"diff": "@@ -166,9 +166,6 @@ records_per_page: 8\n# Default settings for thumbnails.\n#\n# quality: Quality should be between 0 (horrible, small file) and 100 (best, huge file).\n-# fit: One of either none, crop (= crop-center), crop-top-left, crop-top, crop-top-right, crop-left, crop-right, crop-bottom-left, crop-bottom or crop-bottom-right.\n-# allow_upscale: Determines whether small images will be enlarged to fit\n-# the requested dimensions.\n# save_files: Save files in the thumbs/ folder, so subsequent requests will serve file directly. Great for performance\n#\n# Note: If you change these values, you might need to clear the cache before\n@@ -177,9 +174,7 @@ thumbnails:\ndefault_thumbnail: [ 320, 240 ]\ndefault_image: [ 1000, 750 ]\nquality: 80\n- cropping: crop\nsave_files: true\n- allow_upscale: false\n# File permissions for read/write/execute to set on folders and files that are\n# created. The exact permissions you should be setting depends on the system\n"
},
{
"change_type": "MODIFY",
"old_path": "src/Configuration/Parser/GeneralParser.php",
"new_path": "src/Configuration/Parser/GeneralParser.php",
"diff": "@@ -84,7 +84,7 @@ class GeneralParser extends BaseParser\n'thumbnails' => [\n'default_thumbnail' => [160, 120],\n'default_image' => [1000, 750],\n- 'quality' => 75,\n+ 'quality' => 80,\n'cropping' => 'crop',\n'notfound_image' => 'bolt_assets://img/default_notfound.png',\n'error_image' => 'bolt_assets://img/default_error.png',\n"
},
{
"change_type": "MODIFY",
"old_path": "src/Controller/ImageController.php",
"new_path": "src/Controller/ImageController.php",
"diff": "@@ -147,18 +147,19 @@ class ImageController\n'h' => is_numeric($raw[1]) ? (int) $raw[1] : 300,\n'fit' => 'default',\n'location' => 'files',\n+ 'q' => (is_numeric($raw[2]) && 0 <= $raw[2] && $raw[2] <= 100) ? (int) $raw[2] : 80\n];\n- if (isset($raw[3])) {\n- $this->parameters['fit'] = $this->parseFit($raw[2]);\n- $this->parameters['location'] = $raw[3];\n- } elseif (isset($raw[2])) {\n- $posible_fit = $this->parseFit($raw[2]);\n+ if (isset($raw[4])) {\n+ $this->parameters['fit'] = $this->parseFit($raw[3]);\n+ $this->parameters['location'] = $raw[4];\n+ } elseif (isset($raw[3])) {\n+ $posible_fit = $this->parseFit($raw[3]);\nif ($this->testFit($posible_fit)) {\n$this->parameters['fit'] = $posible_fit;\n} else {\n- $this->parameters['location'] = $raw[2];\n+ $this->parameters['location'] = $raw[3];\n}\n}\n}\n"
},
{
"change_type": "MODIFY",
"old_path": "src/Twig/ImageExtension.php",
"new_path": "src/Twig/ImageExtension.php",
"diff": "@@ -121,11 +121,11 @@ class ImageExtension extends AbstractExtension\n/**\n* @param ImageField|array|string $image\n*/\n- public function thumbnail($image, ?int $width = null, ?int $height = null, ?string $location = null, ?string $path = null, ?string $fit = null)\n+ public function thumbnail($image, ?int $width = null, ?int $height = null, ?string $location = null, ?string $path = null, ?string $fit = null, ?int $quality = null)\n{\n$filename = $this->getFilename($image, true);\n- return $this->thumbnailHelper->path($filename, $width, $height, $location, $path, $fit);\n+ return $this->thumbnailHelper->path($filename, $width, $height, $location, $path, $fit, $quality);\n}\n/**\n"
}
] |
PHP
|
MIT License
|
bolt/core
|
Thumbnail quality setting now actually works; Removed superfluous thumbnail settings; Brought GeneralParser's default quality config in line with the default YAML config
|
95,144 |
27.01.2023 12:08:38
| -3,600 |
a9f7cbe959c36f0966f2ee594c52f508a4a7b244
|
Prepare release 5.1.21
|
[
{
"change_type": "MODIFY",
"old_path": "src/Security/AuthenticationEntryPointRedirector.php",
"new_path": "src/Security/AuthenticationEntryPointRedirector.php",
"diff": "@@ -4,11 +4,11 @@ namespace Bolt\\Security;\nuse Symfony\\Component\\HttpFoundation\\RedirectResponse;\nuse Symfony\\Component\\HttpFoundation\\Request;\n+use Symfony\\Component\\HttpFoundation\\Session\\Session;\nuse Symfony\\Component\\Routing\\Generator\\UrlGeneratorInterface;\nuse Symfony\\Component\\Security\\Core\\Exception\\AuthenticationException;\nuse Symfony\\Component\\Security\\Http\\EntryPoint\\AuthenticationEntryPointInterface;\nuse Symfony\\Contracts\\Translation\\TranslatorInterface;\n-use Symfony\\Component\\HttpFoundation\\Session\\Session;\nclass AuthenticationEntryPointRedirector implements AuthenticationEntryPointInterface\n{\n"
}
] |
PHP
|
MIT License
|
bolt/core
|
Prepare release 5.1.21
|
95,153 |
31.01.2023 09:48:55
| 0 |
95c70aed564bcdbc4aa19ab2e87732df8b2ac05d
|
Fix language switcher failing to set a default "type" value
|
[
{
"change_type": "MODIFY",
"old_path": "templates/helpers/_languageselect.html.twig",
"new_path": "templates/helpers/_languageselect.html.twig",
"diff": "@@ -13,8 +13,8 @@ The current locale is in the Request object, which is an instance of `Symfony\\Co\n#}\n-{# Make sure type is either 'list' or 'select' #}\n-{% if type|default('list') != 'list' %}{% set type = 'select' %}{% endif %}\n+{# Make sure type is either 'list' or 'select'. Default to 'select' if not defined. #}\n+{% if type|default('') != 'list' %}{% set type = 'select' %}{% endif %}\n{# Set `class` and `style` variables #}\n{% set class = 'class=\"languageselect_' ~ type ~ (class is defined ? ' ' ~ class) ~ '\"' %}\n"
}
] |
PHP
|
MIT License
|
bolt/core
|
Fix language switcher failing to set a default "type" value
|
95,144 |
03.02.2023 14:01:55
| -3,600 |
ccc1148feb426b89950133a84e323bc90a0f8bc3
|
Partial reverse of we really need those `\Iterator`s
|
[
{
"change_type": "MODIFY",
"old_path": "src/Entity/Field/CollectionField.php",
"new_path": "src/Entity/Field/CollectionField.php",
"diff": "@@ -17,7 +17,7 @@ use Doctrine\\ORM\\Mapping as ORM;\n/**\n* @ORM\\Entity\n*/\n-class CollectionField extends Field implements Excerptable, FieldInterface, FieldParentInterface, ListFieldInterface, RawPersistable\n+class CollectionField extends Field implements Excerptable, FieldInterface, FieldParentInterface, ListFieldInterface, RawPersistable, \\Iterator\n{\nuse FieldParentTrait;\nuse IterableFieldTrait;\n"
},
{
"change_type": "MODIFY",
"old_path": "src/Entity/Field/FilelistField.php",
"new_path": "src/Entity/Field/FilelistField.php",
"diff": "@@ -14,7 +14,7 @@ use Doctrine\\ORM\\Mapping as ORM;\n/**\n* @ORM\\Entity\n*/\n-class FilelistField extends Field implements FieldInterface, ListFieldInterface, RawPersistable\n+class FilelistField extends Field implements FieldInterface, ListFieldInterface, RawPersistable, \\Iterator\n{\nuse IterableFieldTrait;\n"
},
{
"change_type": "MODIFY",
"old_path": "src/Entity/Field/ImagelistField.php",
"new_path": "src/Entity/Field/ImagelistField.php",
"diff": "@@ -14,7 +14,7 @@ use Doctrine\\ORM\\Mapping as ORM;\n/**\n* @ORM\\Entity\n*/\n-class ImagelistField extends Field implements FieldInterface, ListFieldInterface, RawPersistable\n+class ImagelistField extends Field implements FieldInterface, ListFieldInterface, RawPersistable, \\Iterator\n{\nuse IterableFieldTrait;\n"
},
{
"change_type": "MODIFY",
"old_path": "src/Entity/Field/SelectField.php",
"new_path": "src/Entity/Field/SelectField.php",
"diff": "@@ -14,7 +14,7 @@ use Tightenco\\Collect\\Support\\Collection;\n/**\n* @ORM\\Entity\n*/\n-class SelectField extends Field implements FieldInterface, RawPersistable\n+class SelectField extends Field implements FieldInterface, RawPersistable, \\Iterator\n{\nuse IterableFieldTrait;\n"
},
{
"change_type": "MODIFY",
"old_path": "src/Entity/Field/SetField.php",
"new_path": "src/Entity/Field/SetField.php",
"diff": "@@ -18,7 +18,7 @@ use Tightenco\\Collect\\Support\\Collection;\n/**\n* @ORM\\Entity\n*/\n-class SetField extends Field implements Excerptable, FieldInterface, FieldParentInterface, ListFieldInterface, RawPersistable\n+class SetField extends Field implements Excerptable, FieldInterface, FieldParentInterface, ListFieldInterface, RawPersistable, \\Iterator\n{\nuse FieldParentTrait;\nuse IterableFieldTrait;\n"
},
{
"change_type": "MODIFY",
"old_path": "src/Security/AuthenticationEntryPointRedirector.php",
"new_path": "src/Security/AuthenticationEntryPointRedirector.php",
"diff": "@@ -9,7 +9,6 @@ use Symfony\\Component\\Routing\\Generator\\UrlGeneratorInterface;\nuse Symfony\\Component\\Security\\Core\\Exception\\AuthenticationException;\nuse Symfony\\Component\\Security\\Http\\EntryPoint\\AuthenticationEntryPointInterface;\nuse Symfony\\Contracts\\Translation\\TranslatorInterface;\n-use Symfony\\Component\\HttpFoundation\\Session\\Session;\nclass AuthenticationEntryPointRedirector implements AuthenticationEntryPointInterface\n{\n"
}
] |
PHP
|
MIT License
|
bolt/core
|
Partial reverse of #3395, we really need those `\Iterator`s
|
95,145 |
07.02.2023 15:11:33
| -3,600 |
60fc2be8cf559b85074ff912ee8ddc597bc98a37
|
Added a thumbnail config option to set a default cropping method
|
[
{
"change_type": "MODIFY",
"old_path": "config/bolt/config.yaml",
"new_path": "config/bolt/config.yaml",
"diff": "@@ -173,6 +173,7 @@ records_per_page: 8\nthumbnails:\ndefault_thumbnail: [ 320, 240 ]\ndefault_image: [ 1000, 750 ]\n+ default_cropping: crop\nquality: 80\nsave_files: true\n"
},
{
"change_type": "MODIFY",
"old_path": "src/Controller/ImageController.php",
"new_path": "src/Controller/ImageController.php",
"diff": "@@ -145,7 +145,7 @@ class ImageController\n$this->parameters = [\n'w' => is_numeric($raw[0]) ? (int) $raw[0] : 400,\n'h' => is_numeric($raw[1]) ? (int) $raw[1] : 300,\n- 'fit' => 'default',\n+ 'fit' => isset($raw[2]) ? $raw[2] : $this->config->get('general/thumbnails/default_cropping', 'default'),\n'location' => 'files',\n'q' => (is_numeric($raw[2]) && 0 <= $raw[2] && $raw[2] <= 100) ? (int) $raw[2] : 80\n];\n"
}
] |
PHP
|
MIT License
|
bolt/core
|
Added a thumbnail config option to set a default cropping method
|
95,174 |
08.02.2023 19:27:56
| -3,600 |
f08460dbaedc578716d1c422d421b0dd7472bd09
|
fix: IterableFieldTrait:current() return type
IterableFieldTrait::current() should return (a subtype of) mixed from 8.2 onwards, or it will give deprecation notices.
Temporarily fixing this (will break in 9.0) with #[\ReturnTypeWillChange].
|
[
{
"change_type": "MODIFY",
"old_path": "src/Entity/IterableFieldTrait.php",
"new_path": "src/Entity/IterableFieldTrait.php",
"diff": "@@ -32,6 +32,7 @@ trait IterableFieldTrait\n/**\n* @return Field|string\n*/\n+ #[\\ReturnTypeWillChange]\npublic function current()\n{\nreturn $this->fields[$this->iteratorCursor];\n"
}
] |
PHP
|
MIT License
|
bolt/core
|
fix: IterableFieldTrait:current() return type
IterableFieldTrait::current() should return (a subtype of) mixed from 8.2 onwards, or it will give deprecation notices.
Temporarily fixing this (will break in 9.0) with #[\ReturnTypeWillChange].
|
95,174 |
08.02.2023 19:39:37
| -3,600 |
61edd27b5631118fe2d8bfb4270df4d78570a0d8
|
fix: Using ${var} in strings is deprecated
Since PHP 8.2, PHP emits a deprecation notice on the following pattern that the dollar sign ($) is placed at the outside of the curly brace. This is now fixed.
For more information: https://php.watch/versions/8.2/$%7Bvar%7D-string-interpolation-deprecated#dollar-outside.
|
[
{
"change_type": "MODIFY",
"old_path": "src/Configuration/PathResolver.php",
"new_path": "src/Configuration/PathResolver.php",
"diff": "@@ -75,7 +75,7 @@ class PathResolver\n*/\npublic function define(string $name, string $path): void\n{\n- if (mb_strpos($path, \"%${name}%\") !== false) {\n+ if (mb_strpos($path, \"%{$name}%\") !== false) {\nthrow new ConfigurationException('Paths cannot reference themselves.');\n}\n@@ -105,11 +105,11 @@ class PathResolver\n$alias = $match[1];\nif (! isset($this->paths[$alias])) {\n- throw new ConfigurationException(\"Failed to resolve path. Alias %${alias}% is not defined.\");\n+ throw new ConfigurationException(\"Failed to resolve path. Alias %{$alias}% is not defined.\");\n}\n// absolute if alias is at start of path\n- $absolute = mb_strpos($path, \"%${alias}%\") === 0;\n+ $absolute = mb_strpos($path, \"%{$alias}%\") === 0;\nif (isset($this->resolving[$alias])) {\nthrow new ConfigurationException('Failed to resolve path. Infinite recursion detected.');\n"
},
{
"change_type": "MODIFY",
"old_path": "src/Security/GlobalVoter.php",
"new_path": "src/Security/GlobalVoter.php",
"diff": "@@ -58,7 +58,7 @@ class GlobalVoter extends Voter\n}\nif (! isset($this->globalPermissions[$attribute])) {\n- throw new \\DomainException(\"Global permission '${attribute}' not defined, check your security and permissions configuration.\");\n+ throw new \\DomainException(\"Global permission '{$attribute}' not defined, check your security and permissions configuration.\");\n}\n$rolesWithPermission = $this->globalPermissions[$attribute];\n"
},
{
"change_type": "MODIFY",
"old_path": "src/Storage/Directive/OrderDirective.php",
"new_path": "src/Storage/Directive/OrderDirective.php",
"diff": "@@ -134,7 +134,7 @@ class OrderDirective\n$query->incrementIndex();\n} else {\n$this->notifications->warning('Incorrect OrderBy clause for field that does not exist',\n- \"A query with ordering on a Field or Taxonomy (`${order}`) that's not defined, will yield unexpected results. Update your `{% setcontent %}`-statement\");\n+ \"A query with ordering on a Field or Taxonomy (`{$order}`) that's not defined, will yield unexpected results. Update your `{% setcontent %}`-statement\");\n}\n}\n"
},
{
"change_type": "MODIFY",
"old_path": "src/Storage/QueryParameterParser.php",
"new_path": "src/Storage/QueryParameterParser.php",
"diff": "@@ -43,19 +43,19 @@ class QueryParameterParser\n$word = \"[\\p{L}\\p{N}_\\/]+\";\n// @codingStandardsIgnoreStart\n- $this->addValueMatcher(\"<\\s?(${word})\", [\n+ $this->addValueMatcher(\"<\\s?({$word})\", [\n'value' => '$1',\n'operator' => 'lt',\n]);\n- $this->addValueMatcher(\"<=\\s?(${word})\", [\n+ $this->addValueMatcher(\"<=\\s?({$word})\", [\n'value' => '$1',\n'operator' => 'lte',\n]);\n- $this->addValueMatcher(\">=\\s?(${word})\", [\n+ $this->addValueMatcher(\">=\\s?({$word})\", [\n'value' => '$1',\n'operator' => 'gte',\n]);\n- $this->addValueMatcher(\">\\s?(${word})\", [\n+ $this->addValueMatcher(\">\\s?({$word})\", [\n'value' => '$1',\n'operator' => 'gt',\n]);\n@@ -63,7 +63,7 @@ class QueryParameterParser\n'value' => '',\n'operator' => 'isNotNull',\n]);\n- $this->addValueMatcher(\"!\\s?(${word})\", [\n+ $this->addValueMatcher(\"!\\s?({$word})\", [\n'value' => '$1',\n'operator' => 'neq',\n]);\n@@ -79,11 +79,11 @@ class QueryParameterParser\n},\n'operator' => 'in',\n]);\n- $this->addValueMatcher(\"(%${word}|${word}%|%${word}%)\", [\n+ $this->addValueMatcher(\"(%{$word}|{$word}%|%{$word}%)\", [\n'value' => '$1',\n'operator' => 'like',\n]);\n- $this->addValueMatcher(\"(${word})\", [\n+ $this->addValueMatcher(\"({$word})\", [\n'value' => '$1',\n'operator' => 'eq',\n]);\n"
},
{
"change_type": "MODIFY",
"old_path": "src/Twig/ContentExtension.php",
"new_path": "src/Twig/ContentExtension.php",
"diff": "@@ -604,7 +604,7 @@ class ContentExtension extends AbstractExtension\n$icon = str_replace('fa-', '', $icon);\n- return \"<i class='fas mr-2 fa-${icon}'></i>\";\n+ return \"<i class='fas mr-2 fa-{$icon}'></i>\";\n}\npublic function hasPath(Content $record, string $path): bool\n"
},
{
"change_type": "MODIFY",
"old_path": "src/Twig/HtmlExtension.php",
"new_path": "src/Twig/HtmlExtension.php",
"diff": "@@ -89,7 +89,7 @@ class HtmlExtension extends AbstractExtension\n*/\npublic function redirect(string $path): void\n{\n- header(\"Location: ${path}\");\n+ header(\"Location: {$path}\");\nexit();\n}\n"
}
] |
PHP
|
MIT License
|
bolt/core
|
fix: Using ${var} in strings is deprecated
Since PHP 8.2, PHP emits a deprecation notice on the following pattern that the dollar sign ($) is placed at the outside of the curly brace. This is now fixed.
For more information: https://php.watch/versions/8.2/$%7Bvar%7D-string-interpolation-deprecated#dollar-outside.
|
95,144 |
09.02.2023 17:25:10
| -3,600 |
d3822fea20e353f8dd2f3b4ddec7d07ceabc68b3
|
Fix PHP 8.2 compat in fix-bundles command
|
[
{
"change_type": "MODIFY",
"old_path": "bin/fix-bundles.php",
"new_path": "bin/fix-bundles.php",
"diff": "@@ -49,7 +49,7 @@ class BundleFixer\n{\nif (! array_key_exists($key, $this->bundles) && class_exists($key)) {\n$this->bundles[$key] = $value;\n- echo \" - Adding '${key}'.\\n\";\n+ echo \" - Adding '{$key}'.\\n\";\nreturn true;\n}\n@@ -61,7 +61,7 @@ class BundleFixer\n{\nif (array_key_exists($key, $this->bundles) && ! class_exists($key)) {\nunset($this->bundles[$key]);\n- echo \" - Removing '${key}'.\\n\";\n+ echo \" - Removing '{$key}'.\\n\";\nreturn true;\n}\n"
},
{
"change_type": "MODIFY",
"old_path": "composer.json",
"new_path": "composer.json",
"diff": "\"allow-plugins\": {\n\"composer/package-versions-deprecated\": true,\n\"drupol/composer-packages\": true,\n- \"symfony/flex\": true\n+ \"symfony/flex\": true,\n+ \"php-http/discovery\": true\n}\n},\n\"extra\": {\n"
}
] |
PHP
|
MIT License
|
bolt/core
|
Fix PHP 8.2 compat in fix-bundles command
|
11,731 |
07.03.2017 15:50:22
| -7,200 |
70291626af0ca05185a49792a5944afd48ccc427
|
Fix for issue with Statement.SUCCESS_NO_INFO returned for preparedStatement.executeBatch()
|
[
{
"change_type": "MODIFY",
"old_path": "src/main/java/io/confluent/connect/jdbc/sink/BufferedRecords.java",
"new_path": "src/main/java/io/confluent/connect/jdbc/sink/BufferedRecords.java",
"diff": "@@ -24,6 +24,7 @@ import org.slf4j.LoggerFactory;\nimport java.sql.Connection;\nimport java.sql.PreparedStatement;\nimport java.sql.SQLException;\n+import java.sql.Statement;\nimport java.util.ArrayList;\nimport java.util.Collections;\nimport java.util.List;\n@@ -96,10 +97,15 @@ public class BufferedRecords {\npreparedStatementBinder.bindRecord(record);\n}\nint totalUpdateCount = 0;\n+ boolean successNoInfo = false;\nfor (int updateCount : preparedStatement.executeBatch()) {\n+ if (updateCount == Statement.SUCCESS_NO_INFO) {\n+ successNoInfo = true;\n+ continue;\n+ }\ntotalUpdateCount += updateCount;\n}\n- if (totalUpdateCount != records.size()) {\n+ if (totalUpdateCount != records.size() && !successNoInfo) {\nswitch (config.insertMode) {\ncase INSERT:\nthrow new ConnectException(String.format(\"Update count (%d) did not sum up to total number of records inserted (%d)\",\n@@ -108,6 +114,12 @@ public class BufferedRecords {\nlog.trace(\"Upserted records:{} resulting in in totalUpdateCount:{}\", records.size(), totalUpdateCount);\n}\n}\n+ if( successNoInfo ){\n+ log.info(\n+ \"Insert records:{} , but no count of the number of rows it affected is available\",\n+ records.size());\n+ }\n+\nfinal List<SinkRecord> flushedRecords = records;\nrecords = new ArrayList<>();\n"
},
{
"change_type": "MODIFY",
"old_path": "src/test/java/io/confluent/connect/jdbc/sink/BufferedRecordsTest.java",
"new_path": "src/test/java/io/confluent/connect/jdbc/sink/BufferedRecordsTest.java",
"diff": "package io.confluent.connect.jdbc.sink;\n+import io.confluent.connect.jdbc.sink.metadata.FieldsMetadata;\nimport org.apache.kafka.connect.data.Schema;\nimport org.apache.kafka.connect.data.SchemaBuilder;\nimport org.apache.kafka.connect.data.Struct;\n@@ -25,14 +26,20 @@ import org.junit.Before;\nimport org.junit.Test;\nimport java.io.IOException;\n+import java.sql.Connection;\n+import java.sql.PreparedStatement;\nimport java.sql.SQLException;\n+import java.sql.Statement;\nimport java.util.Arrays;\nimport java.util.Collections;\nimport java.util.HashMap;\nimport io.confluent.connect.jdbc.sink.dialect.DbDialect;\n+import org.mockito.Matchers;\nimport static org.junit.Assert.assertEquals;\n+import static org.mockito.Mockito.mock;\n+import static org.mockito.Mockito.when;\npublic class BufferedRecordsTest {\n@@ -92,4 +99,44 @@ public class BufferedRecordsTest {\nassertEquals(Collections.singletonList(recordA), buffer.flush());\n}\n+ @Test\n+ public void testFlushSuccessNoInfo() throws SQLException {\n+ final DbDialect dbDialect = DbDialect.fromConnectionString(sqliteHelper.sqliteUri());\n+ final HashMap<Object, Object> props = new HashMap<>();\n+ props.put(\"connection.url\", \"\");\n+ props.put(\"auto.create\", true);\n+ props.put(\"auto.evolve\", true);\n+ props.put(\"batch.size\", 1000);\n+ final JdbcSinkConfig config = new JdbcSinkConfig(props);\n+\n+ int[] batchResponse = new int[2];\n+ batchResponse[0] = Statement.SUCCESS_NO_INFO;\n+ batchResponse[1] = Statement.SUCCESS_NO_INFO;\n+\n+ final DbStructure dbStructureMock = mock(DbStructure.class);\n+ when(dbStructureMock.createOrAmendIfNecessary(Matchers.any(JdbcSinkConfig.class),\n+ Matchers.any(Connection.class), Matchers.anyString(), Matchers.any(FieldsMetadata.class))).thenReturn(\n+ true);\n+\n+ PreparedStatement preparedStatementMock = mock(PreparedStatement.class);\n+ when(preparedStatementMock.executeBatch()).thenReturn(batchResponse);\n+\n+ Connection connectionMock = mock(Connection.class);\n+ when(connectionMock.prepareStatement(Matchers.anyString())).thenReturn(preparedStatementMock);\n+\n+ final BufferedRecords buffer = new BufferedRecords(config, \"dummy\", dbDialect, dbStructureMock,\n+ connectionMock);\n+\n+ final Schema schemaA = SchemaBuilder.struct().field(\"name\", Schema.STRING_SCHEMA).build();\n+ final Struct valueA = new Struct(schemaA).put(\"name\", \"cuba\");\n+ final SinkRecord recordA = new SinkRecord(\"dummy\", 0, null, null, schemaA, valueA, 0);\n+ buffer.add(recordA);\n+\n+ final Schema schemaB = SchemaBuilder.struct().field(\"name\", Schema.STRING_SCHEMA).build();\n+ final Struct valueB = new Struct(schemaA).put(\"name\", \"cubb\");\n+ final SinkRecord recordB = new SinkRecord(\"dummy\", 0, null, null, schemaB, valueB, 0);\n+ buffer.add(recordB);\n+ buffer.flush();\n+\n+ }\n}\n"
}
] |
Java
|
Apache License 2.0
|
aiven/jdbc-connector-for-apache-kafka
|
Fix for issue with Statement.SUCCESS_NO_INFO returned for preparedStatement.executeBatch()
|
11,731 |
08.03.2017 10:06:52
| -7,200 |
f09b7c23e5a3ceffbb3120e295a8f9fa0e29f144
|
Update log message to use config.insertMode
|
[
{
"change_type": "MODIFY",
"old_path": "src/main/java/io/confluent/connect/jdbc/sink/BufferedRecords.java",
"new_path": "src/main/java/io/confluent/connect/jdbc/sink/BufferedRecords.java",
"diff": "@@ -116,7 +116,7 @@ public class BufferedRecords {\n}\nif (successNoInfo) {\nlog.info(\n- \"Insert records:{} , but no count of the number of rows it affected is available\",\n+ config.insertMode + \" records:{} , but no count of the number of rows it affected is available\",\nrecords.size());\n}\n"
}
] |
Java
|
Apache License 2.0
|
aiven/jdbc-connector-for-apache-kafka
|
Update log message to use config.insertMode
|
11,715 |
22.03.2017 17:48:01
| -7,200 |
69921cdcef521fb6bfc7e90fe7832d3ceba123e6
|
add dialect for HP Vertica
|
[
{
"change_type": "MODIFY",
"old_path": "src/main/java/io/confluent/connect/jdbc/sink/dialect/DbDialect.java",
"new_path": "src/main/java/io/confluent/connect/jdbc/sink/dialect/DbDialect.java",
"diff": "@@ -230,6 +230,10 @@ public abstract class DbDialect {\nreturn new HanaDialect();\n}\n+ if (url.startsWith(\"jdbc:vertica\")) {\n+ return new VerticaDialect();\n+ }\n+\nfinal String protocol = extractProtocolFromUrl(url).toLowerCase();\nswitch (protocol) {\ncase \"microsoft:sqlserver\":\n"
},
{
"change_type": "ADD",
"old_path": null,
"new_path": "src/main/java/io/confluent/connect/jdbc/sink/dialect/VerticaDialect.java",
"diff": "+/*\n+ * Copyright 2016 Confluent Inc.\n+ *\n+ * Licensed under the Apache License, Version 2.0 (the \"License\");\n+ * you may not use this file except in compliance with the License.\n+ * You may obtain a copy of the License at\n+ *\n+ * http://www.apache.org/licenses/LICENSE-2.0\n+ *\n+ * Unless required by applicable law or agreed to in writing, software\n+ * distributed under the License is distributed on an \"AS IS\" BASIS,\n+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n+ * See the License for the specific language governing permissions and\n+ * limitations under the License.\n+ */\n+\n+package io.confluent.connect.jdbc.sink.dialect;\n+\n+import org.apache.kafka.connect.data.Date;\n+import org.apache.kafka.connect.data.Decimal;\n+import org.apache.kafka.connect.data.Schema;\n+import org.apache.kafka.connect.data.Time;\n+import org.apache.kafka.connect.data.Timestamp;\n+\n+import java.util.ArrayList;\n+import java.util.Collection;\n+import java.util.Collections;\n+import java.util.List;\n+import java.util.Map;\n+\n+import io.confluent.connect.jdbc.sink.metadata.SinkRecordField;\n+\n+public class VerticaDialect extends DbDialect {\n+ public VerticaDialect() {\n+ super(\"\\\"\", \"\\\"\");\n+ }\n+\n+ @Override\n+ protected String getSqlType(String schemaName, Map<String, String> parameters, Schema.Type type) {\n+ if (schemaName != null) {\n+ switch (schemaName) {\n+ case Decimal.LOGICAL_NAME:\n+ return \"DECIMAL(18,\" + parameters.get(Decimal.SCALE_FIELD) + \")\";\n+ case Date.LOGICAL_NAME:\n+ return \"DATE\";\n+ case Time.LOGICAL_NAME:\n+ return \"TIME\";\n+ case Timestamp.LOGICAL_NAME:\n+ return \"TIMESTAMP\";\n+ }\n+ }\n+ switch (type) {\n+ case INT8:\n+ return \"INT\";\n+ case INT16:\n+ return \"INT\";\n+ case INT32:\n+ return \"INT\";\n+ case INT64:\n+ return \"INT\";\n+ case FLOAT32:\n+ return \"FLOAT\";\n+ case FLOAT64:\n+ return \"FLOAT\";\n+ case BOOLEAN:\n+ return \"BOOLEAN\";\n+ case STRING:\n+ return \"VARCHAR(1024)\";\n+ case BYTES:\n+ return \"VARBINARY(1024)\";\n+ }\n+ return super.getSqlType(schemaName, parameters, type);\n+ }\n+\n+ @Override\n+ public List<String> getAlterTable(String tableName, Collection<SinkRecordField> fields) {\n+ final List<String> queries = new ArrayList<>(fields.size());\n+ for (SinkRecordField field : fields) {\n+ queries.addAll(super.getAlterTable(tableName, Collections.singleton(field)));\n+ }\n+ return queries;\n+ }\n+}\n"
},
{
"change_type": "ADD",
"old_path": null,
"new_path": "src/test/java/io/confluent/connect/jdbc/sink/dialect/VerticaDialectTest.java",
"diff": "+/*\n+ * Copyright 2016 Confluent Inc.\n+ *\n+ * Licensed under the Apache License, Version 2.0 (the \"License\");\n+ * you may not use this file except in compliance with the License.\n+ * You may obtain a copy of the License at\n+ *\n+ * http://www.apache.org/licenses/LICENSE-2.0\n+ *\n+ * Unless required by applicable law or agreed to in writing, software\n+ * distributed under the License is distributed on an \"AS IS\" BASIS,\n+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n+ * See the License for the specific language governing permissions and\n+ * limitations under the License.\n+ */\n+\n+package io.confluent.connect.jdbc.sink.dialect;\n+\n+import org.apache.kafka.connect.data.Date;\n+import org.apache.kafka.connect.data.Decimal;\n+import org.apache.kafka.connect.data.Schema;\n+import org.apache.kafka.connect.data.Time;\n+import org.apache.kafka.connect.data.Timestamp;\n+import org.junit.Test;\n+\n+public class VerticaDialectTest extends BaseDialectTest {\n+\n+ public VerticaDialectTest() {\n+ super(new VerticaDialect());\n+ }\n+\n+ @Test\n+ public void dataTypeMappings() {\n+ verifyDataTypeMapping(\"INT\", Schema.INT8_SCHEMA);\n+ verifyDataTypeMapping(\"INT\", Schema.INT16_SCHEMA);\n+ verifyDataTypeMapping(\"INT\", Schema.INT32_SCHEMA);\n+ verifyDataTypeMapping(\"INT\", Schema.INT64_SCHEMA);\n+ verifyDataTypeMapping(\"FLOAT\", Schema.FLOAT32_SCHEMA);\n+ verifyDataTypeMapping(\"FLOAT\", Schema.FLOAT64_SCHEMA);\n+ verifyDataTypeMapping(\"BOOLEAN\", Schema.BOOLEAN_SCHEMA);\n+ verifyDataTypeMapping(\"VARCHAR(1024)\", Schema.STRING_SCHEMA);\n+ verifyDataTypeMapping(\"VARBINARY(1024)\", Schema.BYTES_SCHEMA);\n+ verifyDataTypeMapping(\"DECIMAL(18,0)\", Decimal.schema(0));\n+ verifyDataTypeMapping(\"DECIMAL(18,4)\", Decimal.schema(4));\n+ verifyDataTypeMapping(\"DATE\", Date.SCHEMA);\n+ verifyDataTypeMapping(\"TIME\", Time.SCHEMA);\n+ verifyDataTypeMapping(\"TIMESTAMP\", Timestamp.SCHEMA);\n+ }\n+\n+ @Test\n+ public void createOneColNoPk() {\n+ verifyCreateOneColNoPk(\n+ \"CREATE TABLE \\\"test\\\" (\" + System.lineSeparator() +\n+ \"\\\"col1\\\" INT NOT NULL)\");\n+ }\n+\n+ @Test\n+ public void createOneColOnePk() {\n+ verifyCreateOneColOnePk(\n+ \"CREATE TABLE \\\"test\\\" (\" + System.lineSeparator() +\n+ \"\\\"pk1\\\" INT NOT NULL,\" + System.lineSeparator() +\n+ \"PRIMARY KEY(\\\"pk1\\\"))\");\n+ }\n+\n+ @Test\n+ public void createThreeColTwoPk() {\n+ verifyCreateThreeColTwoPk(\n+ \"CREATE TABLE \\\"test\\\" (\" + System.lineSeparator() +\n+ \"\\\"pk1\\\" INT NOT NULL,\" + System.lineSeparator() +\n+ \"\\\"pk2\\\" INT NOT NULL,\" + System.lineSeparator() +\n+ \"\\\"col1\\\" INT NOT NULL,\" + System.lineSeparator() +\n+ \"PRIMARY KEY(\\\"pk1\\\",\\\"pk2\\\"))\"\n+ );\n+ }\n+\n+ @Test\n+ public void alterAddOneCol() {\n+ verifyAlterAddOneCol(\n+ \"ALTER TABLE \\\"test\\\" ADD \\\"newcol1\\\" INT NULL\"\n+ );\n+ }\n+\n+ @Test\n+ public void alterAddTwoCol() {\n+ verifyAlterAddTwoCols(\n+ \"ALTER TABLE \\\"test\\\" ADD \\\"newcol1\\\" INT NULL\",\n+ \"ALTER TABLE \\\"test\\\" ADD \\\"newcol2\\\" INT DEFAULT 42\"\n+ );\n+ }\n+}\n"
}
] |
Java
|
Apache License 2.0
|
aiven/jdbc-connector-for-apache-kafka
|
add dialect for HP Vertica
|
11,715 |
27.03.2017 14:06:14
| -10,800 |
1dc97f1d92706d86a364baace916a1cac688eb31
|
document Vertica type mapping
|
[
{
"change_type": "MODIFY",
"old_path": "docs/sink_connector.rst",
"new_path": "docs/sink_connector.rst",
"diff": "@@ -123,35 +123,35 @@ For both auto-creation and auto-evolution, the nullability of a column is based\nand default values are also specified based on the default value of the corresponding field if applicable.\nWe use the following mapping from Connect schema types to database-specific types:\n-+-------------+-----------------+-----------------+------------------+---------+----------------+\n-| Schema Type | MySQL | Oracle | PostgreSQL | SQLite | SQL Server |\n-+=============+=================+=================+==================+=========+================+\n-| INT8 | TINYINT | NUMBER(3,0) | SMALLINT | NUMERIC | TINYINT |\n-+-------------+-----------------+-----------------+------------------+---------+----------------+\n-| INT16 | SMALLINT | NUMBER(5,0) | SMALLINT | NUMERIC | SMALLINT |\n-+-------------+-----------------+-----------------+------------------+---------+----------------+\n-| INT32 | INT | NUMBER(10,0) | INT | NUMERIC | INT |\n-+-------------+-----------------+-----------------+------------------+---------+----------------+\n-| INT64 | BIGINT | NUMBER(19,0) | BIGINT | NUMERIC | BIGINT |\n-+-------------+-----------------+-----------------+------------------+---------+----------------+\n-| FLOAT32 | FLOAT | BINARY_FLOAT | REAL | REAL | REAL |\n-+-------------+-----------------+-----------------+------------------+---------+----------------+\n-| FLOAT64 | DOUBLE | BINARY_DOUBLE | DOUBLE PRECISION | REAL | FLOAT |\n-+-------------+-----------------+-----------------+------------------+---------+----------------+\n-| BOOLEAN | TINYINT | NUMBER(1,0) | BOOLEAN | NUMERIC | BIT |\n-+-------------+-----------------+-----------------+------------------+---------+----------------+\n-| STRING | VARCHAR(256) | NCLOB | TEXT | TEXT | VARCHAR(MAX) |\n-+-------------+-----------------+-----------------+------------------+---------+----------------+\n-| BYTES | VARBINARY(1024) | BLOB | BYTEA | BLOB | VARBINARY(MAX) |\n-+-------------+-----------------+-----------------+------------------+---------+----------------+\n-| 'Decimal' | DECIMAL(65,s) | NUMBER(*,s) | DECIMAL | NUMERIC | DECIMAL(38,s) |\n-+-------------+-----------------+-----------------+------------------+---------+----------------+\n-| 'Date' | DATE | DATE | DATE | NUMERIC | DATE |\n-+-------------+-----------------+-----------------+------------------+---------+----------------+\n-| 'Time' | TIME(3) | DATE | TIME | NUMERIC | TIME |\n-+-------------+-----------------+-----------------+------------------+---------+----------------+\n-| 'Timestamp' | TIMESTAMP(3) | TIMESTAMP | TIMESTAMP | NUMERIC | DATETIME2 |\n-+-------------+-----------------+-----------------+------------------+---------+----------------+\n++-------------+-----------------+-----------------+------------------+---------+----------------+-----------------+\n+| Schema Type | MySQL | Oracle | PostgreSQL | SQLite | SQL Server | Vertica |\n++=============+=================+=================+==================+=========+================+=================+\n+| INT8 | TINYINT | NUMBER(3,0) | SMALLINT | NUMERIC | TINYINT | INT |\n++-------------+-----------------+-----------------+------------------+---------+----------------+-----------------+\n+| INT16 | SMALLINT | NUMBER(5,0) | SMALLINT | NUMERIC | SMALLINT | INT |\n++-------------+-----------------+-----------------+------------------+---------+----------------+-----------------+\n+| INT32 | INT | NUMBER(10,0) | INT | NUMERIC | INT | INT |\n++-------------+-----------------+-----------------+------------------+---------+----------------+-----------------+\n+| INT64 | BIGINT | NUMBER(19,0) | BIGINT | NUMERIC | BIGINT | INT |\n++-------------+-----------------+-----------------+------------------+---------+----------------+-----------------+\n+| FLOAT32 | FLOAT | BINARY_FLOAT | REAL | REAL | REAL | FLOAT |\n++-------------+-----------------+-----------------+------------------+---------+----------------+-----------------+\n+| FLOAT64 | DOUBLE | BINARY_DOUBLE | DOUBLE PRECISION | REAL | FLOAT | FLOAT |\n++-------------+-----------------+-----------------+------------------+---------+----------------+-----------------+\n+| BOOLEAN | TINYINT | NUMBER(1,0) | BOOLEAN | NUMERIC | BIT | BOOLEAN |\n++-------------+-----------------+-----------------+------------------+---------+----------------+-----------------+\n+| STRING | VARCHAR(256) | NCLOB | TEXT | TEXT | VARCHAR(MAX) | VARCHAR(1024) |\n++-------------+-----------------+-----------------+------------------+---------+----------------+-----------------+\n+| BYTES | VARBINARY(1024) | BLOB | BYTEA | BLOB | VARBINARY(MAX) | VARBINARY(1024) |\n++-------------+-----------------+-----------------+------------------+---------+----------------+-----------------+\n+| 'Decimal' | DECIMAL(65,s) | NUMBER(*,s) | DECIMAL | NUMERIC | DECIMAL(38,s) | DECIMAL(18,s) |\n++-------------+-----------------+-----------------+------------------+---------+----------------+-----------------+\n+| 'Date' | DATE | DATE | DATE | NUMERIC | DATE | DATE |\n++-------------+-----------------+-----------------+------------------+---------+----------------+-----------------+\n+| 'Time' | TIME(3) | DATE | TIME | NUMERIC | TIME | TIME |\n++-------------+-----------------+-----------------+------------------+---------+----------------+-----------------+\n+| 'Timestamp' | TIMESTAMP(3) | TIMESTAMP | TIMESTAMP | NUMERIC | DATETIME2 | TIMESTAMP |\n++-------------+-----------------+-----------------+------------------+---------+----------------+-----------------+\nAuto-creation or auto-evolution is not supported for databases not mentioned here.\n"
}
] |
Java
|
Apache License 2.0
|
aiven/jdbc-connector-for-apache-kafka
|
document Vertica type mapping
|
11,715 |
10.04.2017 09:29:48
| -10,800 |
91c844fd3d5d0018ff9e3756fc5e4409b81a9003
|
add vertica dialect detection test
|
[
{
"change_type": "MODIFY",
"old_path": "src/test/java/io/confluent/connect/jdbc/sink/dialect/DbDialectTest.java",
"new_path": "src/test/java/io/confluent/connect/jdbc/sink/dialect/DbDialectTest.java",
"diff": "@@ -133,4 +133,9 @@ public class DbDialectTest {\nassertEquals(GenericDialect.class, DbDialect.fromConnectionString(\"jdbc:other://host:42\").getClass());\n}\n+ @Test\n+ public void detectVertica() {\n+ assertEquals(VerticaDialect.class, DbDialect.fromConnectionString(\"jdbc:vertica://host:5433/db\").getClass());\n+ }\n+\n}\n"
}
] |
Java
|
Apache License 2.0
|
aiven/jdbc-connector-for-apache-kafka
|
add vertica dialect detection test
|
11,717 |
25.03.2017 15:37:48
| 18,000 |
0686804ad0a67459de724ebd53ab6b007035b833
|
Issue Modified the JdbcSourceConnector to support a configurable number of retries and backoff, along with tests for validating the behavior.
|
[
{
"change_type": "MODIFY",
"old_path": "src/main/java/io/confluent/connect/jdbc/JdbcSourceConnector.java",
"new_path": "src/main/java/io/confluent/connect/jdbc/JdbcSourceConnector.java",
"diff": "@@ -75,7 +75,10 @@ public class JdbcSourceConnector extends SourceConnector {\nfinal String dbUrl = config.getString(JdbcSourceConnectorConfig.CONNECTION_URL_CONFIG);\nfinal String dbUser = config.getString(JdbcSourceConnectorConfig.CONNECTION_USER_CONFIG);\nfinal Password dbPassword = config.getPassword(JdbcSourceConnectorConfig.CONNECTION_PASSWORD_CONFIG);\n- cachedConnectionProvider = new CachedConnectionProvider(dbUrl, dbUser, dbPassword == null ? null : dbPassword.value());\n+ final int maxConnectionAttempts = config.getInt(JdbcSourceConnectorConfig.CONNECTION_ATTEMPTS_CONFIG);\n+ final long connectionRetryBackoff = config.getLong(JdbcSourceConnectorConfig.CONNECTION_BACKOFF_CONFIG);\n+ cachedConnectionProvider = new CachedConnectionProvider(dbUrl, dbUser,\n+ dbPassword == null ? null : dbPassword.value(), maxConnectionAttempts, connectionRetryBackoff);\n// Initial connection attempt\ncachedConnectionProvider.getValidConnection();\n"
},
{
"change_type": "MODIFY",
"old_path": "src/main/java/io/confluent/connect/jdbc/source/JdbcSourceConnectorConfig.java",
"new_path": "src/main/java/io/confluent/connect/jdbc/source/JdbcSourceConnectorConfig.java",
"diff": "@@ -48,6 +48,16 @@ public class JdbcSourceConnectorConfig extends AbstractConfig {\nprivate static final String CONNECTION_PASSWORD_DOC = \"JDBC connection password.\";\nprivate static final String CONNECTION_PASSWORD_DISPLAY = \"JDBC Password\";\n+ public static final String CONNECTION_ATTEMPTS_CONFIG = \"connection.attempts\";\n+ private static final String CONNECTION_ATTEMPTS_DOC = \"Maximum number of attempts to retrieve a valid JDBC connection.\";\n+ private static final String CONNECTION_ATTEMPTS_DISPLAY = \"JDBC connection attempts\";\n+ public static final int CONNECTION_ATTEMPTS_DEFAULT = 3;\n+\n+ public static final String CONNECTION_BACKOFF_CONFIG = \"connection.backoff.ms\";\n+ private static final String CONNECTION_BACKOFF_DOC = \"Backoff time in milliseconds between connection attemps.\";\n+ private static final String CONNECTION_BACKOFF_DISPLAY = \"JDBC connection backoff in milliseconds\";\n+ public static final long CONNECTION_BACKOFF_DEFAULT = 10000L;\n+\npublic static final String POLL_INTERVAL_MS_CONFIG = \"poll.interval.ms\";\nprivate static final String POLL_INTERVAL_MS_DOC = \"Frequency in ms to poll for new data in \"\n+ \"each table.\";\n@@ -195,6 +205,8 @@ public class JdbcSourceConnectorConfig extends AbstractConfig {\n.define(CONNECTION_URL_CONFIG, Type.STRING, Importance.HIGH, CONNECTION_URL_DOC, DATABASE_GROUP, 1, Width.LONG, CONNECTION_URL_DISPLAY, Arrays.asList(TABLE_WHITELIST_CONFIG, TABLE_BLACKLIST_CONFIG))\n.define(CONNECTION_USER_CONFIG, Type.STRING, null, Importance.HIGH, CONNECTION_USER_DOC, DATABASE_GROUP, 2, Width.LONG, CONNECTION_USER_DISPLAY)\n.define(CONNECTION_PASSWORD_CONFIG, Type.PASSWORD, null, Importance.HIGH, CONNECTION_PASSWORD_DOC, DATABASE_GROUP, 3, Width.SHORT, CONNECTION_PASSWORD_DISPLAY)\n+ .define(CONNECTION_ATTEMPTS_CONFIG, Type.INT, CONNECTION_ATTEMPTS_DEFAULT, Importance.LOW, CONNECTION_ATTEMPTS_DOC, DATABASE_GROUP, 4, Width.SHORT, CONNECTION_ATTEMPTS_DISPLAY)\n+ .define(CONNECTION_BACKOFF_CONFIG, Type.LONG, CONNECTION_BACKOFF_DEFAULT, Importance.LOW, CONNECTION_BACKOFF_DOC, DATABASE_GROUP, 5, Width.SHORT, CONNECTION_BACKOFF_DISPLAY)\n.define(TABLE_WHITELIST_CONFIG, Type.LIST, TABLE_WHITELIST_DEFAULT, Importance.MEDIUM, TABLE_WHITELIST_DOC, DATABASE_GROUP, 4, Width.LONG, TABLE_WHITELIST_DISPLAY,\nTABLE_RECOMMENDER)\n.define(TABLE_BLACKLIST_CONFIG, Type.LIST, TABLE_BLACKLIST_DEFAULT, Importance.MEDIUM, TABLE_BLACKLIST_DOC, DATABASE_GROUP, 5, Width.LONG, TABLE_BLACKLIST_DISPLAY,\n"
},
{
"change_type": "MODIFY",
"old_path": "src/main/java/io/confluent/connect/jdbc/source/JdbcSourceTask.java",
"new_path": "src/main/java/io/confluent/connect/jdbc/source/JdbcSourceTask.java",
"diff": "@@ -77,7 +77,10 @@ public class JdbcSourceTask extends SourceTask {\nfinal String dbUrl = config.getString(JdbcSourceConnectorConfig.CONNECTION_URL_CONFIG);\nfinal String dbUser = config.getString(JdbcSourceConnectorConfig.CONNECTION_USER_CONFIG);\nfinal Password dbPassword = config.getPassword(JdbcSourceConnectorConfig.CONNECTION_PASSWORD_CONFIG);\n- cachedConnectionProvider = new CachedConnectionProvider(dbUrl, dbUser, dbPassword == null ? null : dbPassword.value());\n+ final int maxConnectionAttempts = config.getInt(JdbcSourceConnectorConfig.CONNECTION_ATTEMPTS_CONFIG);\n+ final long connectionRetryBackoff = config.getLong(JdbcSourceConnectorConfig.CONNECTION_BACKOFF_CONFIG);\n+ cachedConnectionProvider = new CachedConnectionProvider(dbUrl, dbUser,\n+ dbPassword == null ? null : dbPassword.value(), maxConnectionAttempts, connectionRetryBackoff);\nList<String> tables = config.getList(JdbcSourceTaskConfig.TABLES_CONFIG);\nString query = config.getString(JdbcSourceTaskConfig.QUERY_CONFIG);\n"
},
{
"change_type": "MODIFY",
"old_path": "src/main/java/io/confluent/connect/jdbc/util/CachedConnectionProvider.java",
"new_path": "src/main/java/io/confluent/connect/jdbc/util/CachedConnectionProvider.java",
"diff": "package io.confluent.connect.jdbc.util;\n+import io.confluent.connect.jdbc.source.JdbcSourceConnectorConfig;\nimport org.apache.kafka.connect.errors.ConnectException;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n@@ -33,6 +34,8 @@ public class CachedConnectionProvider {\nprivate final String url;\nprivate final String username;\nprivate final String password;\n+ private final int maxConnectionAttempts;\n+ private final long connectionRetryBackoff;\nprivate Connection connection;\n@@ -41,9 +44,17 @@ public class CachedConnectionProvider {\n}\npublic CachedConnectionProvider(String url, String username, String password) {\n+ this(url, username, password, JdbcSourceConnectorConfig.CONNECTION_ATTEMPTS_DEFAULT,\n+ JdbcSourceConnectorConfig.CONNECTION_BACKOFF_DEFAULT);\n+ }\n+\n+ public CachedConnectionProvider(String url, String username, String password, int maxConnectionAttempts,\n+ long connectionRetryBackoff) {\nthis.url = url;\nthis.username = username;\nthis.password = password;\n+ this.maxConnectionAttempts = maxConnectionAttempts;\n+ this.connectionRetryBackoff = connectionRetryBackoff;\n}\npublic synchronized Connection getValidConnection() {\n@@ -62,18 +73,38 @@ public class CachedConnectionProvider {\n}\nprivate void newConnection() throws SQLException {\n+ int attempts = 0;\n+ while (attempts < maxConnectionAttempts) {\n+ try {\nlog.debug(\"Attempting to connect to {}\", url);\nconnection = DriverManager.getConnection(url, username, password);\nonConnect(connection);\n+ return;\n+ } catch (SQLException sqle) {\n+ attempts++;\n+ if (attempts < maxConnectionAttempts) {\n+ log.info(\"Unable to connect to database on attempt {}/{}. Will retry in {} ms.\", attempts,\n+ maxConnectionAttempts, connectionRetryBackoff, sqle);\n+ try {\n+ Thread.sleep(connectionRetryBackoff);\n+ } catch (InterruptedException e) {\n+ // this is ok because just woke up early\n+ }\n+ } else {\n+ throw sqle;\n+ }\n+ }\n+ }\n}\npublic synchronized void closeQuietly() {\nif (connection != null) {\ntry {\nconnection.close();\n- connection = null;\n} catch (SQLException sqle) {\nlog.warn(\"Ignoring error closing connection\", sqle);\n+ } finally {\n+ connection = null;\n}\n}\n}\n"
},
{
"change_type": "MODIFY",
"old_path": "src/test/java/io/confluent/connect/jdbc/JdbcSourceConnectorTest.java",
"new_path": "src/test/java/io/confluent/connect/jdbc/JdbcSourceConnectorTest.java",
"diff": "@@ -98,7 +98,8 @@ public class JdbcSourceConnectorTest {\n@Test\npublic void testStartStop() throws Exception {\nCachedConnectionProvider mockCachedConnectionProvider = PowerMock.createMock(CachedConnectionProvider.class);\n- PowerMock.expectNew(CachedConnectionProvider.class, db.getUrl(), null, null).andReturn(mockCachedConnectionProvider);\n+ PowerMock.expectNew(CachedConnectionProvider.class, db.getUrl(), null, null,\n+ JdbcSourceConnectorConfig.CONNECTION_ATTEMPTS_DEFAULT, JdbcSourceConnectorConfig.CONNECTION_BACKOFF_DEFAULT).andReturn(mockCachedConnectionProvider);\n// Should request a connection, then should close it on stop()\nConnection conn = PowerMock.createMock(Connection.class);\n"
},
{
"change_type": "MODIFY",
"old_path": "src/test/java/io/confluent/connect/jdbc/source/JdbcSourceTaskLifecycleTest.java",
"new_path": "src/test/java/io/confluent/connect/jdbc/source/JdbcSourceTaskLifecycleTest.java",
"diff": "@@ -59,7 +59,8 @@ public class JdbcSourceTaskLifecycleTest extends JdbcSourceTaskTestBase {\npublic void testStartStop() throws Exception {\n// Minimal start/stop functionality\nCachedConnectionProvider mockCachedConnectionProvider = PowerMock.createMock(CachedConnectionProvider.class);\n- PowerMock.expectNew(CachedConnectionProvider.class, db.getUrl(), null, null).andReturn(mockCachedConnectionProvider);\n+ PowerMock.expectNew(CachedConnectionProvider.class, db.getUrl(), null, null,\n+ JdbcSourceConnectorConfig.CONNECTION_ATTEMPTS_DEFAULT, JdbcSourceConnectorConfig.CONNECTION_BACKOFF_DEFAULT).andReturn(mockCachedConnectionProvider);\n// Should request a connection, then should close it on stop()\nConnection conn = PowerMock.createMock(Connection.class);\n"
},
{
"change_type": "MODIFY",
"old_path": "src/test/java/io/confluent/connect/jdbc/source/TableMonitorThreadTest.java",
"new_path": "src/test/java/io/confluent/connect/jdbc/source/TableMonitorThreadTest.java",
"diff": "package io.confluent.connect.jdbc.source;\nimport org.apache.kafka.connect.connector.ConnectorContext;\n+import org.apache.kafka.connect.errors.ConnectException;\nimport org.easymock.EasyMock;\nimport org.easymock.IAnswer;\nimport org.junit.After;\n@@ -29,6 +30,7 @@ import org.powermock.core.classloader.annotations.PowerMockIgnore;\nimport org.powermock.core.classloader.annotations.PrepareForTest;\nimport org.powermock.modules.junit4.PowerMockRunner;\n+import java.sql.Connection;\nimport java.sql.SQLException;\nimport java.util.Arrays;\nimport java.util.Collections;\n@@ -201,4 +203,30 @@ public class TableMonitorThreadTest {\nPowerMock.verifyAll();\n}\n+\n+ @Test\n+ public void testInvalidConnection() throws Exception {\n+ CachedConnectionProvider provider = EasyMock.createMock(CachedConnectionProvider.class);\n+ tableMonitorThread = new TableMonitorThread(provider, context, null, POLL_INTERVAL, null, null, VIEW_TABLE_TYPES);\n+\n+ EasyMock.expect(provider.getValidConnection()).andAnswer(new IAnswer<Connection>() {\n+ @Override\n+ public Connection answer() throws Throwable {\n+ tableMonitorThread.shutdown();\n+ throw new ConnectException(\"Simulated error with the db.\");\n+ }\n+ });\n+ provider.closeQuietly();\n+ EasyMock.expectLastCall().anyTimes();\n+\n+ EasyMock.replay(provider);\n+\n+ tableMonitorThread.start();\n+ tableMonitorThread.join();\n+\n+ EasyMock.verify(provider);\n+ }\n+\n+\n+\n}\n"
},
{
"change_type": "ADD",
"old_path": null,
"new_path": "src/test/java/io/confluent/connect/jdbc/util/CachedConnectionProviderTest.java",
"diff": "+package io.confluent.connect.jdbc.util;\n+\n+import org.apache.kafka.connect.errors.ConnectException;\n+import org.easymock.EasyMock;\n+import org.junit.Before;\n+import org.junit.Test;\n+import org.junit.runner.RunWith;\n+import org.powermock.api.easymock.PowerMock;\n+import org.powermock.core.classloader.annotations.PowerMockIgnore;\n+import org.powermock.core.classloader.annotations.PrepareForTest;\n+import org.powermock.modules.junit4.PowerMockRunner;\n+\n+import java.sql.Connection;\n+import java.sql.DriverManager;\n+import java.sql.SQLException;\n+\n+import static org.easymock.EasyMock.anyObject;\n+import static org.junit.Assert.assertNotNull;\n+\n+@RunWith(PowerMockRunner.class)\n+@PrepareForTest({CachedConnectionProvider.class, DriverManager.class})\n+@PowerMockIgnore(\"javax.management.*\")\n+public class CachedConnectionProviderTest {\n+\n+\n+ @Before\n+ public void setup(){\n+ PowerMock.mockStatic(DriverManager.class);\n+ }\n+\n+ @Test\n+ public void retryTillFailure() throws SQLException {\n+ int retries = 15;\n+\n+ CachedConnectionProvider connectionProvider = new CachedConnectionProvider(\"url\", \"user\", \"password\",\n+ retries, 100L);\n+\n+ EasyMock.expect(DriverManager.getConnection(anyObject(String.class), anyObject(String.class), anyObject(String.class)))\n+ .andThrow(new SQLException()).times(retries);\n+\n+ PowerMock.replayAll();\n+\n+ try {\n+ connectionProvider.getValidConnection();\n+ }catch(ConnectException ce){\n+ assertNotNull(ce);\n+ }\n+\n+ PowerMock.verifyAll();\n+ }\n+\n+\n+@Test\n+ public void retryTillConnect() throws SQLException {\n+\n+ Connection connection = EasyMock.createMock(Connection.class);\n+ int retries = 15;\n+\n+ CachedConnectionProvider connectionProvider = new CachedConnectionProvider(\"url\", \"user\", \"password\",\n+ retries, 100L);\n+\n+ EasyMock.expect(DriverManager.getConnection(anyObject(String.class), anyObject(String.class), anyObject(String.class)))\n+ .andThrow(new SQLException()).times(retries-1).andReturn(connection);\n+\n+ PowerMock.replayAll();\n+\n+ assertNotNull(connectionProvider.getValidConnection());\n+\n+ PowerMock.verifyAll();\n+ }\n+\n+}\n"
}
] |
Java
|
Apache License 2.0
|
aiven/jdbc-connector-for-apache-kafka
|
Issue #198: Modified the JdbcSourceConnector to support a configurable number of retries and backoff, along with tests for validating the behavior.
|
11,713 |
19.04.2017 16:46:01
| -7,200 |
2870f7a2264e9398e2488d21886ff7d7c53dea58
|
Fixed issue - get current timestamp on DB2 UDB for AS/400
|
[
{
"change_type": "MODIFY",
"old_path": "src/main/java/io/confluent/connect/jdbc/util/JdbcUtils.java",
"new_path": "src/main/java/io/confluent/connect/jdbc/util/JdbcUtils.java",
"diff": "@@ -189,7 +189,7 @@ public class JdbcUtils {\nString dbProduct = conn.getMetaData().getDatabaseProductName();\nif (\"Oracle\".equals(dbProduct))\nquery = \"select CURRENT_TIMESTAMP from dual\";\n- else if (\"Apache Derby\".equals(dbProduct))\n+ else if (\"Apache Derby\".equals(dbProduct) || \"DB2 UDB for AS/400\".equals(dbProduct))\nquery = \"values(CURRENT_TIMESTAMP)\";\nelse\nquery = \"select CURRENT_TIMESTAMP;\";\n"
}
] |
Java
|
Apache License 2.0
|
aiven/jdbc-connector-for-apache-kafka
|
Fixed issue #211 - get current timestamp on DB2 UDB for AS/400
|
11,711 |
11.11.2017 17:03:31
| -3,600 |
818898aadb7ca9f05b22ce2769d0996353ff47d0
|
Broader fix for issue
Target all product names starting with "DB2" regardless of platform.
|
[
{
"change_type": "MODIFY",
"old_path": "src/main/java/io/confluent/connect/jdbc/util/JdbcUtils.java",
"new_path": "src/main/java/io/confluent/connect/jdbc/util/JdbcUtils.java",
"diff": "@@ -243,7 +243,7 @@ public class JdbcUtils {\nString dbProduct = conn.getMetaData().getDatabaseProductName();\nif (\"Oracle\".equals(dbProduct)) {\nquery = \"select CURRENT_TIMESTAMP from dual\";\n- } else if (\"Apache Derby\".equals(dbProduct) || \"DB2 UDB for AS/400\".equals(dbProduct)) {\n+ } else if (\"Apache Derby\".equals(dbProduct) || dbProduct.startsWith(\"DB2\")) {\nquery = \"values(CURRENT_TIMESTAMP)\";\n} else {\nquery = \"select CURRENT_TIMESTAMP;\";\n"
}
] |
Java
|
Apache License 2.0
|
aiven/jdbc-connector-for-apache-kafka
|
Broader fix for issue #211
Target all product names starting with "DB2" regardless of platform.
|
11,711 |
12.11.2017 09:52:38
| -3,600 |
4ff6d8b0f6fde8eb862ce04f0b5a66ed6c1b72d0
|
Added suggested null check.
|
[
{
"change_type": "MODIFY",
"old_path": "src/main/java/io/confluent/connect/jdbc/util/JdbcUtils.java",
"new_path": "src/main/java/io/confluent/connect/jdbc/util/JdbcUtils.java",
"diff": "@@ -243,7 +243,7 @@ public class JdbcUtils {\nString dbProduct = conn.getMetaData().getDatabaseProductName();\nif (\"Oracle\".equals(dbProduct)) {\nquery = \"select CURRENT_TIMESTAMP from dual\";\n- } else if (\"Apache Derby\".equals(dbProduct) || dbProduct.startsWith(\"DB2\")) {\n+ } else if (\"Apache Derby\".equals(dbProduct) || (dbProduct != null && dbProduct.startsWith(\"DB2\")) ) {\nquery = \"values(CURRENT_TIMESTAMP)\";\n} else {\nquery = \"select CURRENT_TIMESTAMP;\";\n"
}
] |
Java
|
Apache License 2.0
|
aiven/jdbc-connector-for-apache-kafka
|
Added suggested null check.
|
11,722 |
28.11.2017 11:21:34
| 28,800 |
1c2c3961d2753fde41105cd3bd05fb285ecd6084
|
Log out all messages
|
[
{
"change_type": "MODIFY",
"old_path": "src/main/java/io/confluent/connect/jdbc/sink/JdbcSinkTask.java",
"new_path": "src/main/java/io/confluent/connect/jdbc/sink/JdbcSinkTask.java",
"diff": "@@ -74,14 +74,18 @@ public class JdbcSinkTask extends SinkTask {\nremainingRetries,\nsqle\n);\n+ String sqleAllMessages = \"\";\n+ for (Throwable e : sqle) {\n+ sqleAllMessages += e + System.lineSeparator();\n+ }\nif (remainingRetries == 0) {\n- throw new ConnectException(sqle);\n+ throw new ConnectException(sqleAllMessages);\n} else {\nwriter.closeQuietly();\ninitWriter();\nremainingRetries--;\ncontext.timeout(config.retryBackoffMs);\n- throw new RetriableException(sqle);\n+ throw new RetriableException(sqleAllMessages);\n}\n}\nremainingRetries = config.maxRetries;\n"
}
] |
Java
|
Apache License 2.0
|
aiven/jdbc-connector-for-apache-kafka
|
Log out all messages
|
11,722 |
29.11.2017 13:28:00
| 28,800 |
dab4814e498caad58ebe412622db17646329e7bb
|
Consistent SQLException
|
[
{
"change_type": "MODIFY",
"old_path": "src/main/java/io/confluent/connect/jdbc/sink/JdbcSinkTask.java",
"new_path": "src/main/java/io/confluent/connect/jdbc/sink/JdbcSinkTask.java",
"diff": "@@ -79,13 +79,13 @@ public class JdbcSinkTask extends SinkTask {\nsqleAllMessages += e + System.lineSeparator();\n}\nif (remainingRetries == 0) {\n- throw new ConnectException(sqleAllMessages);\n+ throw new ConnectException(new SQLException(sqleAllMessages));\n} else {\nwriter.closeQuietly();\ninitWriter();\nremainingRetries--;\ncontext.timeout(config.retryBackoffMs);\n- throw new RetriableException(sqleAllMessages);\n+ throw new RetriableException(new SQLException(sqleAllMessages));\n}\n}\nremainingRetries = config.maxRetries;\n"
}
] |
Java
|
Apache License 2.0
|
aiven/jdbc-connector-for-apache-kafka
|
Consistent SQLException
|
11,743 |
31.08.2018 17:11:40
| -7,200 |
3f5744e0d4c105f2d6e049d71bfbea4b7e19a8dd
|
Fix values(CURRENT_TIMESTAMP) not working on DB2 because invalid sql-statement. Replace with select-statement of CURRENT_TIMESTAMP(12)
|
[
{
"change_type": "MODIFY",
"old_path": "src/main/java/io/confluent/connect/jdbc/dialect/Db2DatabaseDialect.java",
"new_path": "src/main/java/io/confluent/connect/jdbc/dialect/Db2DatabaseDialect.java",
"diff": "@@ -62,7 +62,7 @@ public class Db2DatabaseDialect extends GenericDatabaseDialect {\n@Override\nprotected String currentTimestampDatabaseQuery() {\n- return \"values(CURRENT_TIMESTAMP)\";\n+ return \"SELECT CURRENT_TIMESTAMP(12) FROM SYSIBM.SYSDUMMY1;\";\n}\n@Override\n"
}
] |
Java
|
Apache License 2.0
|
aiven/jdbc-connector-for-apache-kafka
|
Fix values(CURRENT_TIMESTAMP) not working on DB2 because invalid sql-statement. Replace with select-statement of CURRENT_TIMESTAMP(12)
|
11,726 |
22.10.2018 23:25:53
| -7,200 |
b0bb5fe59931cb30d47157aad89e02a51bbc8b54
|
added validator for the db.timezone configuration option
|
[
{
"change_type": "MODIFY",
"old_path": "src/main/java/io/confluent/connect/jdbc/sink/JdbcSinkConfig.java",
"new_path": "src/main/java/io/confluent/connect/jdbc/sink/JdbcSinkConfig.java",
"diff": "package io.confluent.connect.jdbc.sink;\n-import io.confluent.connect.jdbc.source.JdbcSourceConnectorConfig;\n-import io.confluent.connect.jdbc.util.DatabaseDialectRecommender;\n-import io.confluent.connect.jdbc.util.StringUtils;\nimport java.time.ZoneId;\n-import java.time.ZoneOffset;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collections;\n@@ -29,6 +25,12 @@ import java.util.List;\nimport java.util.Map;\nimport java.util.Set;\nimport java.util.TimeZone;\n+\n+import io.confluent.connect.jdbc.source.JdbcSourceConnectorConfig;\n+import io.confluent.connect.jdbc.util.DatabaseDialectRecommender;\n+import io.confluent.connect.jdbc.util.StringUtils;\n+import io.confluent.connect.jdbc.util.TimeZoneValidator;\n+\nimport org.apache.kafka.common.config.AbstractConfig;\nimport org.apache.kafka.common.config.ConfigDef;\nimport org.apache.kafka.common.config.ConfigException;\n@@ -191,9 +193,9 @@ public class JdbcSinkConfig extends AbstractConfig {\npublic static final String DB_TIMEZONE_CONFIG = \"db.timezone\";\npublic static final String DB_TIMEZONE_DEFAULT = \"UTC\";\nprivate static final String DB_TIMEZONE_CONFIG_DOC =\n- \"Alternative time zone of the database, to be used by JDBC driver instead of UTC (default)\"\n- + \"when instantiating PreparedStatements.\";\n- private static final String DB_TIMEZONE_CONFIG_DISPLAY = \"DB time zone\";\n+ \"Name of the timezone that should be used in the connector when \"\n+ + \"specifying time-based criteria. Defaults to UTC.\";\n+ private static final String DB_TIMEZONE_CONFIG_DISPLAY = \"DB Time Zone\";\npublic static final ConfigDef CONFIG_DEF = new ConfigDef()\n// Connection\n@@ -315,7 +317,8 @@ public class JdbcSinkConfig extends AbstractConfig {\nDB_TIMEZONE_CONFIG,\nConfigDef.Type.STRING,\nDB_TIMEZONE_DEFAULT,\n- ConfigDef.Importance.HIGH,\n+ TimeZoneValidator.INSTANCE,\n+ ConfigDef.Importance.MEDIUM,\nDB_TIMEZONE_CONFIG_DOC,\nDATAMAPPING_GROUP,\n5,\n@@ -402,8 +405,7 @@ public class JdbcSinkConfig extends AbstractConfig {\ndialectName = getString(DIALECT_NAME_CONFIG);\nfieldsWhitelist = new HashSet<>(getList(FIELDS_WHITELIST));\nString dbTimeZone = getString(DB_TIMEZONE_CONFIG);\n- timeZone = dbTimeZone == null ? TimeZone.getTimeZone(ZoneOffset.UTC) :\n- TimeZone.getTimeZone(ZoneId.of(dbTimeZone));\n+ timeZone = TimeZone.getTimeZone(ZoneId.of(dbTimeZone));\n}\nprivate String getPasswordValue(String key) {\n"
},
{
"change_type": "MODIFY",
"old_path": "src/main/java/io/confluent/connect/jdbc/source/JdbcSourceConnectorConfig.java",
"new_path": "src/main/java/io/confluent/connect/jdbc/source/JdbcSourceConnectorConfig.java",
"diff": "package io.confluent.connect.jdbc.source;\n-import java.time.ZoneId;\n-import java.time.ZoneOffset;\n-import java.util.TimeZone;\n-import org.apache.kafka.common.config.AbstractConfig;\n-import org.apache.kafka.common.config.ConfigDef;\n-import org.apache.kafka.common.config.ConfigDef.Importance;\n-import org.apache.kafka.common.config.ConfigDef.Recommender;\n-import org.apache.kafka.common.config.ConfigDef.Type;\n-import org.apache.kafka.common.config.ConfigDef.Width;\n-import org.apache.kafka.common.config.ConfigException;\n-import org.apache.kafka.common.utils.Time;\n-import org.slf4j.Logger;\n-import org.slf4j.LoggerFactory;\n-\nimport java.sql.Connection;\nimport java.sql.SQLException;\n+import java.time.ZoneId;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.HashMap;\n@@ -41,6 +28,7 @@ import java.util.List;\nimport java.util.Locale;\nimport java.util.Map;\nimport java.util.Set;\n+import java.util.TimeZone;\nimport java.util.concurrent.TimeUnit;\nimport java.util.concurrent.atomic.AtomicReference;\n@@ -48,6 +36,18 @@ import io.confluent.connect.jdbc.dialect.DatabaseDialect;\nimport io.confluent.connect.jdbc.dialect.DatabaseDialects;\nimport io.confluent.connect.jdbc.util.DatabaseDialectRecommender;\nimport io.confluent.connect.jdbc.util.TableId;\n+import io.confluent.connect.jdbc.util.TimeZoneValidator;\n+\n+import org.apache.kafka.common.config.AbstractConfig;\n+import org.apache.kafka.common.config.ConfigDef;\n+import org.apache.kafka.common.config.ConfigDef.Importance;\n+import org.apache.kafka.common.config.ConfigDef.Recommender;\n+import org.apache.kafka.common.config.ConfigDef.Type;\n+import org.apache.kafka.common.config.ConfigDef.Width;\n+import org.apache.kafka.common.config.ConfigException;\n+import org.apache.kafka.common.utils.Time;\n+import org.slf4j.Logger;\n+import org.slf4j.LoggerFactory;\npublic class JdbcSourceConnectorConfig extends AbstractConfig {\n@@ -553,7 +553,8 @@ public class JdbcSourceConnectorConfig extends AbstractConfig {\nDB_TIMEZONE_CONFIG,\nType.STRING,\nDB_TIMEZONE_DEFAULT,\n- Importance.HIGH,\n+ TimeZoneValidator.INSTANCE,\n+ Importance.MEDIUM,\nDB_TIMEZONE_CONFIG_DOC,\nCONNECTOR_GROUP,\n++orderInGroup,\n@@ -783,8 +784,7 @@ public class JdbcSourceConnectorConfig extends AbstractConfig {\npublic TimeZone timeZone() {\nString dbTimeZone = getString(JdbcSourceTaskConfig.DB_TIMEZONE_CONFIG);\n- return dbTimeZone == null ? TimeZone.getTimeZone(ZoneOffset.UTC) :\n- TimeZone.getTimeZone(ZoneId.of(dbTimeZone));\n+ return TimeZone.getTimeZone(ZoneId.of(dbTimeZone));\n}\npublic static void main(String[] args) {\n"
},
{
"change_type": "ADD",
"old_path": null,
"new_path": "src/main/java/io/confluent/connect/jdbc/util/TimeZoneValidator.java",
"diff": "+package io.confluent.connect.jdbc.util;\n+\n+import java.time.DateTimeException;\n+import java.time.ZoneId;\n+import org.apache.kafka.common.config.ConfigDef;\n+import org.apache.kafka.common.config.ConfigException;\n+\n+public class TimeZoneValidator implements ConfigDef.Validator {\n+\n+ public static final TimeZoneValidator INSTANCE = new TimeZoneValidator();\n+\n+ @Override\n+ public void ensureValid(String name, Object value) {\n+ if (value != null) {\n+ try {\n+ ZoneId.of(value.toString());\n+ } catch (DateTimeException e) {\n+ throw new ConfigException(name, value, \"Invalid time zone identifier\");\n+ }\n+ }\n+ }\n+}\n"
},
{
"change_type": "MODIFY",
"old_path": "src/test/java/io/confluent/connect/jdbc/source/JdbcSourceTaskUpdateTest.java",
"new_path": "src/test/java/io/confluent/connect/jdbc/source/JdbcSourceTaskUpdateTest.java",
"diff": "package io.confluent.connect.jdbc.source;\n-import io.confluent.connect.jdbc.util.DateTimeUtils;\n+import static org.hamcrest.MatcherAssert.assertThat;\n+import static org.hamcrest.core.IsEqual.equalTo;\n+import static org.junit.Assert.assertEquals;\n+import static org.junit.Assert.assertTrue;\n+import static org.junit.Assert.fail;\n+import java.sql.Timestamp;\nimport java.time.ZoneOffset;\n-import java.util.*;\n-\n+import java.util.Arrays;\n+import java.util.Collections;\n+import java.util.Date;\n+import java.util.HashMap;\n+import java.util.List;\n+import java.util.Map;\n+import java.util.TimeZone;\n+\n+import org.apache.kafka.common.config.ConfigException;\nimport org.apache.kafka.connect.data.Struct;\nimport org.apache.kafka.connect.errors.ConnectException;\nimport org.apache.kafka.connect.source.SourceRecord;\nimport org.junit.After;\n-import org.junit.Before;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\nimport org.powermock.api.easymock.PowerMock;\n@@ -33,9 +44,7 @@ import org.powermock.core.classloader.annotations.PowerMockIgnore;\nimport org.powermock.core.classloader.annotations.PrepareForTest;\nimport org.powermock.modules.junit4.PowerMockRunner;\n-import java.sql.Timestamp;\n-\n-import static org.junit.Assert.assertEquals;\n+import io.confluent.connect.jdbc.util.DateTimeUtils;\n// Tests of polling that return data updates, i.e. verifies the different behaviors for getting\n// incremental data updates from the database\n@@ -340,6 +349,26 @@ public class JdbcSourceTaskUpdateTest extends JdbcSourceTaskTestBase {\nPowerMock.verifyAll();\n}\n+ @Test\n+ public void testTimestampInInvalidTimezone() throws Exception {\n+ String invalidTimeZoneID = \"Europe/Invalid\";\n+ // Manage these manually so we can verify the emitted values\n+ db.createTable(SINGLE_TABLE_NAME,\n+ \"modified\", \"TIMESTAMP NOT NULL\",\n+ \"id\", \"INT NOT NULL\");\n+\n+ try {\n+ startTask(\"modified\", \"id\", null, 0L, invalidTimeZoneID);\n+ fail(\"A ConfigException should have been thrown\");\n+ } catch (ConnectException e) {\n+ assertTrue(e.getCause() instanceof ConfigException);\n+ ConfigException configException = (ConfigException) e.getCause();\n+ assertThat(configException.getMessage(),\n+ equalTo(\n+ \"Invalid value Europe/Invalid for configuration db.timezone: Invalid time zone identifier\"));\n+ }\n+ }\n+\n@Test\npublic void testMultiColumnTimestampAndIncrementing() throws Exception {\nexpectInitializeNoOffsets(Arrays.asList(\n"
},
{
"change_type": "ADD",
"old_path": null,
"new_path": "src/test/java/io/confluent/connect/jdbc/util/TimeZoneValidatorTest.java",
"diff": "+package io.confluent.connect.jdbc.util;\n+\n+import java.util.stream.Stream;\n+\n+import org.apache.kafka.common.config.ConfigException;\n+import org.junit.Test;\n+\n+public class TimeZoneValidatorTest {\n+\n+ @Test\n+ public void testAccuracy() {\n+ String[] validTimeZones = new String[]{\n+ \"Europe/Vienna\",\n+ \"Asia/Tokyo\",\n+ \"America/Los_Angeles\",\n+ \"UTC\",\n+ \"GMT+01:00\",\n+ \"UTC\"\n+ };\n+\n+ Stream.of(validTimeZones)\n+ .forEach(timeZone -> TimeZoneValidator.INSTANCE.ensureValid(\"db.timezone\", timeZone));\n+ }\n+\n+ @Test\n+ public void testTimeZoneNotSpecified() {\n+ TimeZoneValidator.INSTANCE.ensureValid(\"db.timezone\", null);\n+ }\n+\n+ @Test(expected = ConfigException.class)\n+ public void testInvalidTimeZone() {\n+ TimeZoneValidator.INSTANCE.ensureValid(\"db.timezone\", \"invalid\");\n+ }\n+\n+ @Test(expected = ConfigException.class)\n+ public void testEmptyTimeZone() {\n+ TimeZoneValidator.INSTANCE.ensureValid(\"db.timezone\", \"\");\n+ }\n+}\n"
}
] |
Java
|
Apache License 2.0
|
aiven/jdbc-connector-for-apache-kafka
|
added validator for the db.timezone configuration option
|
11,726 |
22.10.2018 23:28:47
| -7,200 |
ef7b29038f4dce10b80d972cc218c51766f53a24
|
added copyright section for the newly added source files.
|
[
{
"change_type": "MODIFY",
"old_path": "src/main/java/io/confluent/connect/jdbc/util/TimeZoneValidator.java",
"new_path": "src/main/java/io/confluent/connect/jdbc/util/TimeZoneValidator.java",
"diff": "+/**\n+ * Copyright 2015 Confluent Inc.\n+ *\n+ * Licensed under the Apache License, Version 2.0 (the \"License\");\n+ * you may not use this file except in compliance with the License.\n+ * You may obtain a copy of the License at\n+ *\n+ * http://www.apache.org/licenses/LICENSE-2.0\n+ *\n+ * Unless required by applicable law or agreed to in writing, software\n+ * distributed under the License is distributed on an \"AS IS\" BASIS,\n+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n+ * See the License for the specific language governing permissions and\n+ * limitations under the License.\n+ **/\n+\npackage io.confluent.connect.jdbc.util;\nimport java.time.DateTimeException;\n"
},
{
"change_type": "MODIFY",
"old_path": "src/test/java/io/confluent/connect/jdbc/util/TimeZoneValidatorTest.java",
"new_path": "src/test/java/io/confluent/connect/jdbc/util/TimeZoneValidatorTest.java",
"diff": "+/**\n+ * Copyright 2017 Confluent Inc.\n+ *\n+ * Licensed under the Apache License, Version 2.0 (the \"License\"); you may not use this file except\n+ * in compliance with the License. You may obtain a copy of the License at\n+ *\n+ * http://www.apache.org/licenses/LICENSE-2.0\n+ *\n+ * Unless required by applicable law or agreed to in writing, software distributed under the License\n+ * is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express\n+ * or implied. See the License for the specific language governing permissions and limitations under\n+ * the License.\n+ **/\n+\npackage io.confluent.connect.jdbc.util;\nimport java.util.stream.Stream;\n"
}
] |
Java
|
Apache License 2.0
|
aiven/jdbc-connector-for-apache-kafka
|
added copyright section for the newly added source files.
|
11,745 |
04.11.2018 17:09:26
| 0 |
650e99c090b1175b9e83e2a4f513749f01221ab1
|
Fix update where with multiple keyColumns
fixes
|
[
{
"change_type": "MODIFY",
"old_path": "src/main/java/io/confluent/connect/jdbc/dialect/GenericDatabaseDialect.java",
"new_path": "src/main/java/io/confluent/connect/jdbc/dialect/GenericDatabaseDialect.java",
"diff": "@@ -1376,7 +1376,7 @@ public class GenericDatabaseDialect implements DatabaseDialect {\nif (!keyColumns.isEmpty()) {\nbuilder.append(\" WHERE \");\nbuilder.appendList()\n- .delimitedBy(\", \")\n+ .delimitedBy(\" AND \")\n.transformedBy(ExpressionBuilder.columnNamesWith(\" = ?\"))\n.of(keyColumns);\n}\n"
}
] |
Java
|
Apache License 2.0
|
aiven/jdbc-connector-for-apache-kafka
|
Fix update where with multiple keyColumns
fixes https://github.com/confluentinc/kafka-connect-jdbc/issues/388
|
11,712 |
13.11.2018 14:38:51
| 18,000 |
7154dd6f2aae71ad0d734dcaf44bac9fb8d77f17
|
Update the SQL Server dialect to properly retrieve auto incremented column information.
|
[
{
"change_type": "MODIFY",
"old_path": "checkstyle/suppressions.xml",
"new_path": "checkstyle/suppressions.xml",
"diff": "files=\"(DataConverter|GenericDatabaseDialect|JdbcSourceTask).java\"/>\n<suppress checks=\"ParameterNumber\"\n- files=\"(ColumnDefinition|GenericDatabaseDialect).java\"/>\n+ files=\"(ColumnDefinition|GenericDatabaseDialect|SqlServerDatabaseDialect).java\"/>\n</suppressions>\n"
},
{
"change_type": "MODIFY",
"old_path": "src/main/java/io/confluent/connect/jdbc/dialect/SqlServerDatabaseDialect.java",
"new_path": "src/main/java/io/confluent/connect/jdbc/dialect/SqlServerDatabaseDialect.java",
"diff": "@@ -22,16 +22,21 @@ import org.apache.kafka.connect.data.Decimal;\nimport org.apache.kafka.connect.data.Time;\nimport org.apache.kafka.connect.data.Timestamp;\n+import java.sql.ResultSet;\n+import java.sql.SQLException;\nimport java.util.Collection;\nimport java.util.Collections;\nimport java.util.List;\nimport io.confluent.connect.jdbc.dialect.DatabaseDialectProvider.SubprotocolBasedProvider;\nimport io.confluent.connect.jdbc.sink.metadata.SinkRecordField;\n+import io.confluent.connect.jdbc.util.ColumnDefinition;\nimport io.confluent.connect.jdbc.util.ColumnId;\nimport io.confluent.connect.jdbc.util.ExpressionBuilder;\nimport io.confluent.connect.jdbc.util.IdentifierRules;\nimport io.confluent.connect.jdbc.util.TableId;\n+import io.confluent.connect.jdbc.util.ColumnDefinition.Mutability;\n+import io.confluent.connect.jdbc.util.ColumnDefinition.Nullability;\n/**\n* A {@link DatabaseDialect} for SQL Server.\n@@ -183,6 +188,57 @@ public class SqlServerDatabaseDialect extends GenericDatabaseDialect {\nreturn builder.toString();\n}\n+ @Override\n+ protected ColumnDefinition columnDefinition(\n+ ResultSet resultSet,\n+ ColumnId id,\n+ int jdbcType,\n+ String typeName,\n+ String classNameForType,\n+ Nullability nullability,\n+ Mutability mutability,\n+ int precision,\n+ int scale,\n+ Boolean signedNumbers,\n+ Integer displaySize,\n+ Boolean autoIncremented,\n+ Boolean caseSensitive,\n+ Boolean searchable,\n+ Boolean currency,\n+ Boolean isPrimaryKey\n+ ) {\n+ try {\n+ String isAutoIncremented = resultSet.getString(22);\n+\n+ if (\"yes\".equalsIgnoreCase(isAutoIncremented)) {\n+ autoIncremented = Boolean.TRUE;\n+ } else if (\"no\".equalsIgnoreCase(isAutoIncremented)) {\n+ autoIncremented = Boolean.FALSE;\n+ }\n+ } catch (SQLException e) {\n+ log.warn(\"Unable to get auto incrementing column information\", e);\n+ }\n+\n+ return super.columnDefinition(\n+ resultSet,\n+ id,\n+ jdbcType,\n+ typeName,\n+ classNameForType,\n+ nullability,\n+ mutability,\n+ precision,\n+ scale,\n+ signedNumbers,\n+ displaySize,\n+ autoIncremented,\n+ caseSensitive,\n+ searchable,\n+ currency,\n+ isPrimaryKey\n+ );\n+ }\n+\nprivate void transformAs(ExpressionBuilder builder, ColumnId col) {\nbuilder.append(\"target.\")\n.appendIdentifierQuoted(col.name())\n"
}
] |
Java
|
Apache License 2.0
|
aiven/jdbc-connector-for-apache-kafka
|
Update the SQL Server dialect to properly retrieve auto incremented column information.
|
11,748 |
10.06.2020 13:35:37
| -7,200 |
5618b6ca4e95fd586b99ed96f1fa14183693218b
|
Replace all non -A-Za-z0-9_ characters with _
for table name
|
[
{
"change_type": "MODIFY",
"old_path": "checkstyle/java.header",
"new_path": "checkstyle/java.header",
"diff": "/\\*\n- \\* Copyright 2019 Aiven Oy\n+ \\* Copyright 20(19|2[0-9]) Aiven Oy\n\\* Copyright 201[5-9] Confluent Inc.\n\\*\n\\* Licensed under the Apache License, Version 2.0 \\(the \"License\"\\);\n"
},
{
"change_type": "MODIFY",
"old_path": "src/main/java/io/aiven/connect/jdbc/sink/JdbcDbWriter.java",
"new_path": "src/main/java/io/aiven/connect/jdbc/sink/JdbcDbWriter.java",
"diff": "/*\n- * Copyright 2019 Aiven Oy\n+ * Copyright 2020 Aiven Oy\n* Copyright 2016 Confluent Inc.\n*\n* Licensed under the Apache License, Version 2.0 (the \"License\");\n@@ -22,6 +22,7 @@ import java.sql.SQLException;\nimport java.util.Collection;\nimport java.util.HashMap;\nimport java.util.Map;\n+import java.util.regex.Pattern;\nimport org.apache.kafka.connect.errors.ConnectException;\nimport org.apache.kafka.connect.sink.SinkRecord;\n@@ -34,11 +35,17 @@ import org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\npublic class JdbcDbWriter {\n+\nprivate static final Logger log = LoggerFactory.getLogger(JdbcDbWriter.class);\n+ private static final Pattern NORMALIZE_TABLE_NAME_FOR_TOPIC = Pattern.compile(\"(?<!^)[^a-zA-Z0-9_-]\");\n+\nprivate final JdbcSinkConfig config;\n+\nprivate final DatabaseDialect dbDialect;\n+\nprivate final DbStructure dbStructure;\n+\nfinal CachedConnectionProvider cachedConnectionProvider;\nJdbcDbWriter(final JdbcSinkConfig config, final DatabaseDialect dbDialect, final DbStructure dbStructure) {\n@@ -83,7 +90,7 @@ public class JdbcDbWriter {\n}\nTableId destinationTable(final String topic) {\n- final String tableName = config.tableNameFormat.replace(\"${topic}\", topic);\n+ final String tableName = generateTableNameFor(topic);\nif (tableName.isEmpty()) {\nthrow new ConnectException(String.format(\n\"Destination table name for topic '%s' is empty using the format string '%s'\",\n@@ -93,4 +100,10 @@ public class JdbcDbWriter {\n}\nreturn dbDialect.parseTableIdentifier(tableName);\n}\n+\n+ public String generateTableNameFor(final String topic) {\n+ final String tableName = config.tableNameFormat.replace(\"${topic}\", topic);\n+ return NORMALIZE_TABLE_NAME_FOR_TOPIC.matcher(tableName).replaceAll(\"_\");\n+ }\n+\n}\n"
},
{
"change_type": "MODIFY",
"old_path": "src/main/java/io/aiven/connect/jdbc/sink/JdbcSinkConfig.java",
"new_path": "src/main/java/io/aiven/connect/jdbc/sink/JdbcSinkConfig.java",
"diff": "/*\n- * Copyright 2019 Aiven Oy\n+ * Copyright 2020 Aiven Oy\n* Copyright 2016 Confluent Inc.\n*\n* Licensed under the Apache License, Version 2.0 (the \"License\");\n@@ -29,7 +29,6 @@ import java.util.TimeZone;\nimport org.apache.kafka.common.config.ConfigDef;\nimport org.apache.kafka.common.config.ConfigException;\n-import org.apache.kafka.common.config.types.Password;\nimport io.aiven.connect.jdbc.config.JdbcConfig;\nimport io.aiven.connect.jdbc.util.StringUtils;\n@@ -58,12 +57,15 @@ public class JdbcSinkConfig extends JdbcConfig {\n);\npublic static final String TABLE_NAME_FORMAT = \"table.name.format\";\n- private static final String TABLE_NAME_FORMAT_DEFAULT = \"${topic}\";\n+\n+ public static final String TABLE_NAME_FORMAT_DEFAULT = \"${topic}\";\n+\nprivate static final String TABLE_NAME_FORMAT_DOC =\n\"A format string for the destination table name, which may contain '${topic}' as a \"\n+ \"placeholder for the originating topic name.\\n\"\n+ \"For example, ``kafka_${topic}`` for the topic 'orders' will map to the table name \"\n- + \"'kafka_orders'.\";\n+ + \"'kafka_orders'. The alphanumeric characters (``a-z A-Z 0-9``) and ``_`` \"\n+ + \"will remain as is, others (like ``.``) will be replaced by ``_``.\";\nprivate static final String TABLE_NAME_FORMAT_DISPLAY = \"Table Name Format\";\npublic static final String MAX_RETRIES = \"max.retries\";\n@@ -324,14 +326,6 @@ public class JdbcSinkConfig extends JdbcConfig {\ntimeZone = TimeZone.getTimeZone(ZoneId.of(dbTimeZone));\n}\n- private String getPasswordValue(final String key) {\n- final Password password = getPassword(key);\n- if (password != null) {\n- return password.value();\n- }\n- return null;\n- }\n-\nprivate static class EnumValidator implements ConfigDef.Validator {\nprivate final List<String> canonicalValues;\nprivate final Set<String> validValues;\n"
},
{
"change_type": "MODIFY",
"old_path": "src/test/java/io/aiven/connect/jdbc/sink/JdbcDbWriterTest.java",
"new_path": "src/test/java/io/aiven/connect/jdbc/sink/JdbcDbWriterTest.java",
"diff": "/*\n- * Copyright 2019 Aiven Oy\n+ * Copyright 2020 Aiven Oy\n* Copyright 2016 Confluent Inc.\n*\n* Licensed under the Apache License, Version 2.0 (the \"License\");\n@@ -77,6 +77,54 @@ public class JdbcDbWriterTest {\nreturn new JdbcDbWriter(config, dialect, dbStructure);\n}\n+ @Test\n+ public void shouldGenerateNormalizedTableNameForTopic() {\n+\n+ final Map<String, String> props = new HashMap<>();\n+ props.put(JdbcSinkConfig.CONNECTION_URL_CONFIG, \"jdbc://localhost\");\n+ props.put(JdbcSinkConfig.TABLE_NAME_FORMAT, \"kafka_topic_${topic}\");\n+ final JdbcSinkConfig jdbcSinkConfig = new JdbcSinkConfig(props);\n+\n+ dialect = new SqliteDatabaseDialect(jdbcSinkConfig);\n+ final DbStructure dbStructure = new DbStructure(dialect);\n+ final JdbcDbWriter jdbcDbWriter = new JdbcDbWriter(jdbcSinkConfig, dialect, dbStructure);\n+\n+ assertEquals(\"kafka_topic_some_topic\",\n+ jdbcDbWriter.generateTableNameFor(\"some_topic\"));\n+\n+ assertEquals(\"kafka_topic_some-topic\",\n+ jdbcDbWriter.generateTableNameFor(\"some-topic\"));\n+\n+ assertEquals(\"kafka_topic_this_is_topic_with_dots\",\n+ jdbcDbWriter.generateTableNameFor(\"this.is.topic.with.dots\"));\n+\n+ assertEquals(\"kafka_topic_this_is_topic_with_dots_and_weired_characters___\",\n+ jdbcDbWriter.generateTableNameFor(\"this.is.topic.with.dots.and.weired.characters#$%\"));\n+\n+ assertEquals(\"kafka_topic_orders_topic__3\",\n+ jdbcDbWriter.generateTableNameFor(\"orders_topic_#3\"));\n+\n+ }\n+\n+\n+ @Test\n+ public void shouldGetNormalizedTableName() {\n+ final Map<String, String> props = new HashMap<>();\n+ props.put(JdbcSinkConfig.CONNECTION_URL_CONFIG, \"jdbc://localhnost\");\n+ props.put(JdbcSinkConfig.TABLE_NAME_FORMAT, \"${topic}\");\n+\n+ final JdbcSinkConfig jdbcSinkConfig = new JdbcSinkConfig(props);\n+ dialect = new SqliteDatabaseDialect(jdbcSinkConfig);\n+ final DbStructure dbStructure = new DbStructure(dialect);\n+ final JdbcDbWriter writer = new JdbcDbWriter(jdbcSinkConfig, dialect, dbStructure);\n+\n+ TableId tableId = writer.destinationTable(\"this.is.my.topic\");\n+ assertEquals(\"this_is_my_topic\", tableId.tableName());\n+\n+ tableId = writer.destinationTable(\"the_topic\");\n+ assertEquals(\"the_topic\", tableId.tableName());\n+ }\n+\n@Test\npublic void autoCreateWithAutoEvolve() throws SQLException {\nfinal String topic = \"books\";\n"
}
] |
Java
|
Apache License 2.0
|
aiven/jdbc-connector-for-apache-kafka
|
Replace all non -A-Za-z0-9_ characters with _
for table name
|
11,748 |
16.06.2020 14:06:29
| -7,200 |
8611ff76fd1ba1efd943904939e7234f4939883d
|
add new parameter 'table.name.normalize'
|
[
{
"change_type": "MODIFY",
"old_path": "src/main/java/io/aiven/connect/jdbc/sink/JdbcDbWriter.java",
"new_path": "src/main/java/io/aiven/connect/jdbc/sink/JdbcDbWriter.java",
"diff": "@@ -38,7 +38,7 @@ public class JdbcDbWriter {\nprivate static final Logger log = LoggerFactory.getLogger(JdbcDbWriter.class);\n- private static final Pattern NORMALIZE_TABLE_NAME_FOR_TOPIC = Pattern.compile(\"(?<!^)[^a-zA-Z0-9_-]\");\n+ private static final Pattern NORMALIZE_TABLE_NAME_FOR_TOPIC = Pattern.compile(\"[^a-zA-Z0-9_]\");\nprivate final JdbcSinkConfig config;\n@@ -103,7 +103,9 @@ public class JdbcDbWriter {\npublic String generateTableNameFor(final String topic) {\nfinal String tableName = config.tableNameFormat.replace(\"${topic}\", topic);\n- return NORMALIZE_TABLE_NAME_FOR_TOPIC.matcher(tableName).replaceAll(\"_\");\n+ return config.tableNameNormalize\n+ ? NORMALIZE_TABLE_NAME_FOR_TOPIC.matcher(tableName).replaceAll(\"_\")\n+ : tableName;\n}\n}\n"
},
{
"change_type": "MODIFY",
"old_path": "src/main/java/io/aiven/connect/jdbc/sink/JdbcSinkConfig.java",
"new_path": "src/main/java/io/aiven/connect/jdbc/sink/JdbcSinkConfig.java",
"diff": "@@ -57,17 +57,23 @@ public class JdbcSinkConfig extends JdbcConfig {\n);\npublic static final String TABLE_NAME_FORMAT = \"table.name.format\";\n-\npublic static final String TABLE_NAME_FORMAT_DEFAULT = \"${topic}\";\n-\nprivate static final String TABLE_NAME_FORMAT_DOC =\n\"A format string for the destination table name, which may contain '${topic}' as a \"\n+ \"placeholder for the originating topic name.\\n\"\n+ \"For example, ``kafka_${topic}`` for the topic 'orders' will map to the table name \"\n- + \"'kafka_orders'. The alphanumeric characters (``a-z A-Z 0-9``) and ``_`` \"\n- + \"will remain as is, others (like ``.``) will be replaced by ``_``.\";\n+ + \"'kafka_orders'.\";\nprivate static final String TABLE_NAME_FORMAT_DISPLAY = \"Table Name Format\";\n+ public static final String TABLE_NAME_NORMALIZE = \"table.name.normalize\";\n+ public static final boolean TABLE_NAME_NORMALIZE_DEFAULT = false;\n+ private static final String TABLE_NAME_NORMALIZE_DOC =\n+ \"If set to ``true`` the alphanumeric characters (``a-z A-Z 0-9``) and ``_`` \"\n+ + \"in the destination table name for the particular topic will remain as is, \"\n+ + \"others (like ``.``) will be replaced by ``_``. \"\n+ + \"By default is set to ``false``.\";\n+ private static final String TABLE_NAME_NORMALIZE_DISPLAY = \"Table Name Normalize\";\n+\npublic static final String MAX_RETRIES = \"max.retries\";\nprivate static final int MAX_RETRIES_DEFAULT = 10;\nprivate static final String MAX_RETRIES_DOC =\n@@ -215,6 +221,17 @@ public class JdbcSinkConfig extends JdbcConfig {\nConfigDef.Width.LONG,\nTABLE_NAME_FORMAT_DISPLAY\n)\n+ .define(\n+ TABLE_NAME_NORMALIZE,\n+ ConfigDef.Type.BOOLEAN,\n+ TABLE_NAME_NORMALIZE_DEFAULT,\n+ ConfigDef.Importance.MEDIUM,\n+ TABLE_NAME_NORMALIZE_DOC,\n+ DATAMAPPING_GROUP,\n+ 2,\n+ ConfigDef.Width.LONG,\n+ TABLE_NAME_NORMALIZE_DISPLAY\n+ )\n.define(\nPK_MODE,\nConfigDef.Type.STRING,\n@@ -223,7 +240,7 @@ public class JdbcSinkConfig extends JdbcConfig {\nConfigDef.Importance.HIGH,\nPK_MODE_DOC,\nDATAMAPPING_GROUP,\n- 2,\n+ 3,\nConfigDef.Width.MEDIUM,\nPK_MODE_DISPLAY\n)\n@@ -234,7 +251,7 @@ public class JdbcSinkConfig extends JdbcConfig {\nConfigDef.Importance.MEDIUM,\nPK_FIELDS_DOC,\nDATAMAPPING_GROUP,\n- 3,\n+ 4,\nConfigDef.Width.LONG, PK_FIELDS_DISPLAY\n)\n.define(\n@@ -244,7 +261,7 @@ public class JdbcSinkConfig extends JdbcConfig {\nConfigDef.Importance.MEDIUM,\nFIELDS_WHITELIST_DOC,\nDATAMAPPING_GROUP,\n- 4,\n+ 5,\nConfigDef.Width.LONG,\nFIELDS_WHITELIST_DISPLAY\n);\n@@ -299,6 +316,7 @@ public class JdbcSinkConfig extends JdbcConfig {\n}\npublic final String tableNameFormat;\n+ public final boolean tableNameNormalize;\npublic final int batchSize;\npublic final int maxRetries;\npublic final int retryBackoffMs;\n@@ -313,6 +331,7 @@ public class JdbcSinkConfig extends JdbcConfig {\npublic JdbcSinkConfig(final Map<?, ?> props) {\nsuper(CONFIG_DEF, props);\ntableNameFormat = getString(TABLE_NAME_FORMAT).trim();\n+ tableNameNormalize = getBoolean(TABLE_NAME_NORMALIZE);\nbatchSize = getInt(BATCH_SIZE);\nmaxRetries = getInt(MAX_RETRIES);\nretryBackoffMs = getInt(RETRY_BACKOFF_MS);\n"
},
{
"change_type": "MODIFY",
"old_path": "src/test/java/io/aiven/connect/jdbc/sink/JdbcDbWriterTest.java",
"new_path": "src/test/java/io/aiven/connect/jdbc/sink/JdbcDbWriterTest.java",
"diff": "@@ -79,20 +79,23 @@ public class JdbcDbWriterTest {\n@Test\npublic void shouldGenerateNormalizedTableNameForTopic() {\n-\n- final Map<String, String> props = new HashMap<>();\n+ final Map<String, Object> props = new HashMap<>();\nprops.put(JdbcSinkConfig.CONNECTION_URL_CONFIG, \"jdbc://localhost\");\nprops.put(JdbcSinkConfig.TABLE_NAME_FORMAT, \"kafka_topic_${topic}\");\n+ props.put(JdbcSinkConfig.TABLE_NAME_NORMALIZE, true);\nfinal JdbcSinkConfig jdbcSinkConfig = new JdbcSinkConfig(props);\ndialect = new SqliteDatabaseDialect(jdbcSinkConfig);\nfinal DbStructure dbStructure = new DbStructure(dialect);\nfinal JdbcDbWriter jdbcDbWriter = new JdbcDbWriter(jdbcSinkConfig, dialect, dbStructure);\n+ assertEquals(\"kafka_topic___some_topic\",\n+ jdbcDbWriter.generateTableNameFor(\"--some_topic\"));\n+\nassertEquals(\"kafka_topic_some_topic\",\njdbcDbWriter.generateTableNameFor(\"some_topic\"));\n- assertEquals(\"kafka_topic_some-topic\",\n+ assertEquals(\"kafka_topic_some_topic\",\njdbcDbWriter.generateTableNameFor(\"some-topic\"));\nassertEquals(\"kafka_topic_this_is_topic_with_dots\",\n@@ -106,25 +109,6 @@ public class JdbcDbWriterTest {\n}\n-\n- @Test\n- public void shouldGetNormalizedTableName() {\n- final Map<String, String> props = new HashMap<>();\n- props.put(JdbcSinkConfig.CONNECTION_URL_CONFIG, \"jdbc://localhnost\");\n- props.put(JdbcSinkConfig.TABLE_NAME_FORMAT, \"${topic}\");\n-\n- final JdbcSinkConfig jdbcSinkConfig = new JdbcSinkConfig(props);\n- dialect = new SqliteDatabaseDialect(jdbcSinkConfig);\n- final DbStructure dbStructure = new DbStructure(dialect);\n- final JdbcDbWriter writer = new JdbcDbWriter(jdbcSinkConfig, dialect, dbStructure);\n-\n- TableId tableId = writer.destinationTable(\"this.is.my.topic\");\n- assertEquals(\"this_is_my_topic\", tableId.tableName());\n-\n- tableId = writer.destinationTable(\"the_topic\");\n- assertEquals(\"the_topic\", tableId.tableName());\n- }\n-\n@Test\npublic void autoCreateWithAutoEvolve() throws SQLException {\nfinal String topic = \"books\";\n"
}
] |
Java
|
Apache License 2.0
|
aiven/jdbc-connector-for-apache-kafka
|
add new parameter 'table.name.normalize'
|
11,748 |
03.06.2020 12:53:33
| -7,200 |
cd1f9a7abf71bb807152e4d8036d2a41e1d18a5c
|
Use DO NOTHING in upsert query for PgSQL in the case of empty non key columns
|
[
{
"change_type": "MODIFY",
"old_path": "src/main/java/io/aiven/connect/jdbc/dialect/PostgreSqlDatabaseDialect.java",
"new_path": "src/main/java/io/aiven/connect/jdbc/dialect/PostgreSqlDatabaseDialect.java",
"diff": "@@ -244,11 +244,16 @@ public class PostgreSqlDatabaseDialect extends GenericDatabaseDialect {\n.delimitedBy(\",\")\n.transformedBy(ExpressionBuilder.columnNames())\n.of(keyColumns);\n- builder.append(\") DO UPDATE SET \");\n+ builder.append(\") \");\n+ if (nonKeyColumns.isEmpty()) {\n+ builder.append(\"DO NOTHING\");\n+ } else {\n+ builder.append(\"DO UPDATE SET \");\nbuilder.appendList()\n.delimitedBy(\",\")\n.transformedBy(transform)\n.of(nonKeyColumns);\n+ }\nreturn builder.toString();\n}\n"
},
{
"change_type": "MODIFY",
"old_path": "src/test/java/io/aiven/connect/jdbc/dialect/PostgreSqlDatabaseDialectTest.java",
"new_path": "src/test/java/io/aiven/connect/jdbc/dialect/PostgreSqlDatabaseDialectTest.java",
"diff": "@@ -155,6 +155,18 @@ public class PostgreSqlDatabaseDialectTest extends BaseDialectTest<PostgreSqlDat\nassertQueryEquals(expected, actual);\n}\n+ @Test\n+ public void upsertWithEmptyNonKeyColumns() {\n+ final String expected = readQueryResourceForThisTest(\"upsert2\");\n+ final TableId customer = tableId(\"Customer\");\n+ final String actual = dialect.buildUpsertQueryStatement(\n+ customer,\n+ columns(customer, \"id\", \"name\", \"salary\", \"address\"),\n+ columns(customer)\n+ );\n+ assertQueryEquals(expected, actual);\n+ }\n+\n@Test\npublic void shouldSanitizeUrlWithoutCredentialsInProperties() {\nassertSanitizedUrl(\n"
},
{
"change_type": "ADD",
"old_path": null,
"new_path": "src/test/resources/io.aiven.connect.jdbc.dialect/PostgreSqlDatabaseDialectTest/upsert2-nonquoted.txt",
"diff": "+INSERT INTO Customer (id,name,salary,address) VALUES (?,?,?,?) ON CONFLICT (id,name,salary,address) DO NOTHING\n\\ No newline at end of file\n"
},
{
"change_type": "ADD",
"old_path": null,
"new_path": "src/test/resources/io.aiven.connect.jdbc.dialect/PostgreSqlDatabaseDialectTest/upsert2-quoted.txt",
"diff": "+INSERT INTO \"Customer\" (\"id\",\"name\",\"salary\",\"address\") VALUES (?,?,?,?) ON CONFLICT (\"id\",\"name\",\"salary\",\"address\") DO NOTHING\n\\ No newline at end of file\n"
}
] |
Java
|
Apache License 2.0
|
aiven/jdbc-connector-for-apache-kafka
|
Use DO NOTHING in upsert query for PgSQL in the case of empty non key columns
|
11,748 |
10.06.2020 17:01:16
| -7,200 |
d3e6ca76d382a376b14df74a9ae3bcc3e7c762f3
|
Add new config parameter: topics.to.tables.mapping
|
[
{
"change_type": "MODIFY",
"old_path": "src/main/java/io/aiven/connect/jdbc/sink/JdbcDbWriter.java",
"new_path": "src/main/java/io/aiven/connect/jdbc/sink/JdbcDbWriter.java",
"diff": "@@ -23,6 +23,7 @@ import java.util.Collection;\nimport java.util.HashMap;\nimport java.util.Map;\nimport java.util.regex.Pattern;\n+import java.util.stream.Collectors;\nimport org.apache.kafka.connect.errors.ConnectException;\nimport org.apache.kafka.connect.sink.SinkRecord;\n@@ -91,21 +92,33 @@ public class JdbcDbWriter {\nTableId destinationTable(final String topic) {\nfinal String tableName = generateTableNameFor(topic);\n- if (tableName.isEmpty()) {\n- throw new ConnectException(String.format(\n- \"Destination table name for topic '%s' is empty using the format string '%s'\",\n- topic,\n- config.tableNameFormat\n- ));\n- }\nreturn dbDialect.parseTableIdentifier(tableName);\n}\npublic String generateTableNameFor(final String topic) {\n- final String tableName = config.tableNameFormat.replace(\"${topic}\", topic);\n- return config.tableNameNormalize\n- ? NORMALIZE_TABLE_NAME_FOR_TOPIC.matcher(tableName).replaceAll(\"_\")\n- : tableName;\n+ String tableName = config.tableNameFormat.replace(\"${topic}\", topic);\n+ if (config.tableNameNormalize) {\n+ tableName = NORMALIZE_TABLE_NAME_FOR_TOPIC.matcher(tableName).replaceAll(\"_\");\n+ }\n+ if (!config.topicsToTablesMapping.isEmpty()) {\n+ tableName = config.topicsToTablesMapping.getOrDefault(topic, \"\");\n+ }\n+ if (tableName.isEmpty()) {\n+ final String errorMessage =\n+ String.format(\n+ \"Destination table for the topic: '%s' \"\n+ + \"couldn't be found in the topics to tables mapping: '%s' \"\n+ + \"and couldn't be generated for the format string '%s'\",\n+ topic,\n+ config.topicsToTablesMapping\n+ .entrySet()\n+ .stream()\n+ .map(e -> String.join(\"->\", e.getKey(), e.getValue()))\n+ .collect(Collectors.joining(\",\")),\n+ config.tableNameFormat);\n+ throw new ConnectException(errorMessage);\n+ }\n+ return tableName;\n}\n}\n"
},
{
"change_type": "MODIFY",
"old_path": "src/main/java/io/aiven/connect/jdbc/sink/JdbcSinkConfig.java",
"new_path": "src/main/java/io/aiven/connect/jdbc/sink/JdbcSinkConfig.java",
"diff": "@@ -24,8 +24,10 @@ import java.util.Collections;\nimport java.util.HashSet;\nimport java.util.List;\nimport java.util.Map;\n+import java.util.Objects;\nimport java.util.Set;\nimport java.util.TimeZone;\n+import java.util.stream.Collectors;\nimport org.apache.kafka.common.config.ConfigDef;\nimport org.apache.kafka.common.config.ConfigException;\n@@ -57,7 +59,7 @@ public class JdbcSinkConfig extends JdbcConfig {\n);\npublic static final String TABLE_NAME_FORMAT = \"table.name.format\";\n- private static final String TABLE_NAME_FORMAT_DEFAULT = \"${topic}\";\n+ public static final String TABLE_NAME_FORMAT_DEFAULT = \"${topic}\";\nprivate static final String TABLE_NAME_FORMAT_DOC =\n\"A format string for the destination table name, which may contain '${topic}' as a \"\n+ \"placeholder for the originating topic name.\\n\"\n@@ -73,6 +75,14 @@ public class JdbcSinkConfig extends JdbcConfig {\n+ \"remain as is, others (like ``.``) are replaced with ``_``.\";\nprivate static final String TABLE_NAME_NORMALIZE_DISPLAY = \"Table Name Normalize\";\n+ public static final String TOPICS_TO_TABLES_MAPPING = \"topics.to.tables.mapping\";\n+ private static final String TOPICS_TO_TABLES_MAPPING_DOC =\n+ \"Kafka topics to database tables mapping. \"\n+ + \"Comma-separated list of topic to table mapping in the format: topic_name:table_name. \"\n+ + \"If the destination table found in the mapping, \"\n+ + \"it would override generated one defined in \" + TABLE_NAME_FORMAT + \".\";\n+ private static final String TOPICS_TO_TABLES_MAPPING_DISPLAY = \"Topics To Tables Mapping\";\n+\npublic static final String MAX_RETRIES = \"max.retries\";\nprivate static final int MAX_RETRIES_DEFAULT = 10;\nprivate static final String MAX_RETRIES_DOC =\n@@ -224,13 +234,43 @@ public class JdbcSinkConfig extends JdbcConfig {\nTABLE_NAME_NORMALIZE,\nConfigDef.Type.BOOLEAN,\nTABLE_NAME_NORMALIZE_DEFAULT,\n- ConfigDef.Importance.LOW,\n+ ConfigDef.Importance.MEDIUM,\nTABLE_NAME_NORMALIZE_DOC,\nDATAMAPPING_GROUP,\n2,\nConfigDef.Width.LONG,\nTABLE_NAME_NORMALIZE_DISPLAY\n)\n+ .define(\n+ TOPICS_TO_TABLES_MAPPING,\n+ ConfigDef.Type.LIST,\n+ null,\n+ new ConfigDef.Validator() {\n+ @Override\n+ public void ensureValid(final String name, final Object value) {\n+ if (Objects.isNull(value)\n+ || ConfigDef.NO_DEFAULT_VALUE == value\n+ || \"\".equals(value)) {\n+ return;\n+ }\n+ assert value instanceof List;\n+ try {\n+ final Map<String, String> mapping = topicToTableMapping((List<String>) value);\n+ if (Objects.isNull(mapping) || mapping.isEmpty()) {\n+ throw new ConfigException(name, value, \"Invalid topics to tables mapping\");\n+ }\n+ } catch (final ArrayIndexOutOfBoundsException e) {\n+ throw new ConfigException(name, value, \"Invalid topics to tables mapping\");\n+ }\n+ }\n+ },\n+ ConfigDef.Importance.MEDIUM,\n+ TOPICS_TO_TABLES_MAPPING_DOC,\n+ DATAMAPPING_GROUP,\n+ 3,\n+ ConfigDef.Width.LONG,\n+ TOPICS_TO_TABLES_MAPPING_DISPLAY\n+ )\n.define(\nPK_MODE,\nConfigDef.Type.STRING,\n@@ -239,7 +279,7 @@ public class JdbcSinkConfig extends JdbcConfig {\nConfigDef.Importance.HIGH,\nPK_MODE_DOC,\nDATAMAPPING_GROUP,\n- 3,\n+ 4,\nConfigDef.Width.MEDIUM,\nPK_MODE_DISPLAY\n)\n@@ -250,7 +290,7 @@ public class JdbcSinkConfig extends JdbcConfig {\nConfigDef.Importance.MEDIUM,\nPK_FIELDS_DOC,\nDATAMAPPING_GROUP,\n- 4,\n+ 5,\nConfigDef.Width.LONG, PK_FIELDS_DISPLAY\n)\n.define(\n@@ -260,7 +300,7 @@ public class JdbcSinkConfig extends JdbcConfig {\nConfigDef.Importance.MEDIUM,\nFIELDS_WHITELIST_DOC,\nDATAMAPPING_GROUP,\n- 5,\n+ 6,\nConfigDef.Width.LONG,\nFIELDS_WHITELIST_DISPLAY\n);\n@@ -315,6 +355,7 @@ public class JdbcSinkConfig extends JdbcConfig {\n}\npublic final String tableNameFormat;\n+ public final Map<String, String> topicsToTablesMapping;\npublic final boolean tableNameNormalize;\npublic final int batchSize;\npublic final int maxRetries;\n@@ -331,6 +372,7 @@ public class JdbcSinkConfig extends JdbcConfig {\nsuper(CONFIG_DEF, props);\ntableNameFormat = getString(TABLE_NAME_FORMAT).trim();\ntableNameNormalize = getBoolean(TABLE_NAME_NORMALIZE);\n+ topicsToTablesMapping = topicToTableMapping(getList(TOPICS_TO_TABLES_MAPPING));\nbatchSize = getInt(BATCH_SIZE);\nmaxRetries = getInt(MAX_RETRIES);\nretryBackoffMs = getInt(RETRY_BACKOFF_MS);\n@@ -344,6 +386,14 @@ public class JdbcSinkConfig extends JdbcConfig {\ntimeZone = TimeZone.getTimeZone(ZoneId.of(dbTimeZone));\n}\n+ static Map<String, String> topicToTableMapping(final List<String> value) {\n+ return (Objects.nonNull(value))\n+ ? value.stream()\n+ .map(s -> s.split(\":\"))\n+ .collect(Collectors.toMap(e -> e[0], e -> e[1]))\n+ : Collections.emptyMap();\n+ }\n+\nprivate static class EnumValidator implements ConfigDef.Validator {\nprivate final List<String> canonicalValues;\nprivate final Set<String> validValues;\n"
},
{
"change_type": "MODIFY",
"old_path": "src/test/java/io/aiven/connect/jdbc/sink/JdbcDbWriterTest.java",
"new_path": "src/test/java/io/aiven/connect/jdbc/sink/JdbcDbWriterTest.java",
"diff": "@@ -34,6 +34,7 @@ import org.apache.kafka.connect.data.SchemaBuilder;\nimport org.apache.kafka.connect.data.Struct;\nimport org.apache.kafka.connect.data.Time;\nimport org.apache.kafka.connect.data.Timestamp;\n+import org.apache.kafka.connect.errors.ConnectException;\nimport org.apache.kafka.connect.sink.SinkRecord;\nimport io.aiven.connect.jdbc.dialect.DatabaseDialect;\n@@ -109,6 +110,36 @@ public class JdbcDbWriterTest {\n}\n+ @Test\n+ public void shouldSelectTableFromMapping() {\n+ final Map<String, String> props = new HashMap<>();\n+ props.put(JdbcSinkConfig.CONNECTION_URL_CONFIG, \"jdbc://localhnost\");\n+ props.put(JdbcSinkConfig.TABLE_NAME_FORMAT, \"${topic}\");\n+ props.put(JdbcSinkConfig.TOPICS_TO_TABLES_MAPPING, \"some_topic:same_table\");\n+\n+ final JdbcSinkConfig jdbcSinkConfig = new JdbcSinkConfig(props);\n+ dialect = new SqliteDatabaseDialect(jdbcSinkConfig);\n+ final DbStructure dbStructure = new DbStructure(dialect);\n+ final JdbcDbWriter writer = new JdbcDbWriter(jdbcSinkConfig, dialect, dbStructure);\n+\n+ final TableId tableId = writer.destinationTable(\"some_topic\");\n+ assertEquals(\"same_table\", tableId.tableName());\n+ }\n+\n+ @Test(expected = ConnectException.class)\n+ public void shouldThrowConnectExceptionForUnknownTopicToTableMapping() {\n+ final Map<String, String> props = new HashMap<>();\n+ props.put(JdbcSinkConfig.CONNECTION_URL_CONFIG, \"jdbc://localhnost\");\n+ props.put(JdbcSinkConfig.TABLE_NAME_FORMAT, \"\");\n+ props.put(JdbcSinkConfig.TOPICS_TO_TABLES_MAPPING, \"some_topic:same_table,some_topic2:same_table2\");\n+\n+ final JdbcSinkConfig jdbcSinkConfig = new JdbcSinkConfig(props);\n+ dialect = new SqliteDatabaseDialect(jdbcSinkConfig);\n+ final DbStructure dbStructure = new DbStructure(dialect);\n+ final JdbcDbWriter writer = new JdbcDbWriter(jdbcSinkConfig, dialect, dbStructure);\n+ writer.generateTableNameFor(\"another_topic\");\n+ }\n+\n@Test\npublic void autoCreateWithAutoEvolve() throws SQLException {\nfinal String topic = \"books\";\n"
},
{
"change_type": "ADD",
"old_path": null,
"new_path": "src/test/java/io/aiven/connect/jdbc/sink/JdbcSinkConfigTest.java",
"diff": "+/*\n+ * Copyright 2020 Aiven Oy\n+ * Copyright 2016 Confluent Inc.\n+ *\n+ * Licensed under the Apache License, Version 2.0 (the \"License\");\n+ * you may not use this file except in compliance with the License.\n+ * You may obtain a copy of the License at\n+ *\n+ * http://www.apache.org/licenses/LICENSE-2.0\n+ *\n+ * Unless required by applicable law or agreed to in writing, software\n+ * distributed under the License is distributed on an \"AS IS\" BASIS,\n+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n+ * See the License for the specific language governing permissions and\n+ * limitations under the License.\n+ */\n+\n+package io.aiven.connect.jdbc.sink;\n+\n+import java.util.HashMap;\n+import java.util.Map;\n+\n+import org.apache.kafka.common.config.ConfigException;\n+\n+import org.junit.Test;\n+\n+import static junit.framework.TestCase.assertEquals;\n+import static junit.framework.TestCase.assertTrue;\n+\n+public class JdbcSinkConfigTest {\n+\n+ @Test\n+ public void shouldReturnEmptyMapForUndefinedMapping() {\n+ final Map<String, String> props = new HashMap<>();\n+ props.put(JdbcSinkConfig.CONNECTION_URL_CONFIG, \"jdbc://localhost\");\n+ assertTrue(new JdbcSinkConfig(props).topicsToTablesMapping.isEmpty());\n+ }\n+\n+ @Test\n+ public void shouldParseTopicToTableMappings() {\n+ final Map<String, String> props = new HashMap<>();\n+ props.put(JdbcSinkConfig.CONNECTION_URL_CONFIG, \"jdbc://localhost\");\n+ props.put(JdbcSinkConfig.TOPICS_TO_TABLES_MAPPING, \"t0:tbl0,t1:tbl1\");\n+\n+ JdbcSinkConfig config = new JdbcSinkConfig(props);\n+\n+ assertEquals(config.topicsToTablesMapping.size(), 2);\n+ assertEquals(config.topicsToTablesMapping.get(\"t0\"), \"tbl0\");\n+ assertEquals(config.topicsToTablesMapping.get(\"t1\"), \"tbl1\");\n+\n+ props.put(JdbcSinkConfig.TOPICS_TO_TABLES_MAPPING, \"t3:tbl3\");\n+ config = new JdbcSinkConfig(props);\n+\n+ assertEquals(config.topicsToTablesMapping.size(), 1);\n+ assertEquals(config.topicsToTablesMapping.get(\"t3\"), \"tbl3\");\n+ }\n+\n+ @Test(expected = ConfigException.class)\n+ public void shouldThrowExceptionForWrongMappingFormat() {\n+ final Map<String, String> props = new HashMap<>();\n+ props.put(JdbcSinkConfig.TOPICS_TO_TABLES_MAPPING, \"asd:asd,asd\");\n+\n+ new JdbcSinkConfig(props);\n+ }\n+\n+ @Test(expected = ConfigException.class)\n+ public void shouldThrowExceptionForEmptyMappingFormat() {\n+ final Map<String, String> props = new HashMap<>();\n+ props.put(JdbcSinkConfig.TOPICS_TO_TABLES_MAPPING, \",,,,,,asd\");\n+\n+ new JdbcSinkConfig(props);\n+ }\n+\n+}\n"
}
] |
Java
|
Apache License 2.0
|
aiven/jdbc-connector-for-apache-kafka
|
Add new config parameter: topics.to.tables.mapping
|
11,748 |
07.07.2020 16:32:34
| -7,200 |
489e5d12340d25ef6e07870c5c81e62a25ab735f
|
add jdbc drivers
snowflake jdbc drive
bump up version of postgresql driver to 42.2.14
bump up version of sqlite driver to 3.32.2
|
[
{
"change_type": "MODIFY",
"old_path": "build.gradle",
"new_path": "build.gradle",
"diff": "@@ -108,9 +108,10 @@ ext {\ndependencies {\ncompileOnly \"org.apache.kafka:connect-api:$kafkaVersion\"\n- runtimeOnly \"org.xerial:sqlite-jdbc:3.25.2\"\n- runtimeOnly \"org.postgresql:postgresql:42.2.9\"\n+ runtimeOnly \"org.xerial:sqlite-jdbc:3.32.3\"\n+ runtimeOnly \"org.postgresql:postgresql:42.2.14\"\nruntimeOnly \"net.sourceforge.jtds:jtds:1.3.1\"\n+ runtimeOnly \"net.snowflake:snowflake-jdbc:3.12.8\"\nimplementation \"com.google.guava:guava:27.1-jre\"\nimplementation \"org.slf4j:slf4j-api:$slf4jVersion\"\n"
}
] |
Java
|
Apache License 2.0
|
aiven/jdbc-connector-for-apache-kafka
|
add jdbc drivers
- snowflake jdbc drive
- bump up version of postgresql driver to 42.2.14
- bump up version of sqlite driver to 3.32.2
|
11,748 |
29.07.2020 16:24:35
| -7,200 |
12560e74879573e61c0a6e7982d91dad4caac0c2
|
max length for SqlServer varchar pk is 900
|
[
{
"change_type": "MODIFY",
"old_path": "src/main/java/io/aiven/connect/jdbc/dialect/SqlServerDatabaseDialect.java",
"new_path": "src/main/java/io/aiven/connect/jdbc/dialect/SqlServerDatabaseDialect.java",
"diff": "@@ -104,7 +104,9 @@ public class SqlServerDatabaseDialect extends GenericDatabaseDialect {\ncase BOOLEAN:\nreturn \"bit\";\ncase STRING:\n- return \"varchar(max)\";\n+ //900 is a max size for a column on which SQL server builds an index\n+ //here is the docs: https://docs.microsoft.com/en-us/previous-versions/sql/sql-server-2008-r2/ms191241(v=sql.105)?redirectedfrom=MSDN\n+ return field.isPrimaryKey() ? \"varchar(900)\" : \"varchar(max)\";\ncase BYTES:\nreturn \"varbinary(max)\";\ndefault:\n"
},
{
"change_type": "MODIFY",
"old_path": "src/test/java/io/aiven/connect/jdbc/dialect/SqlServerDatabaseDialectTest.java",
"new_path": "src/test/java/io/aiven/connect/jdbc/dialect/SqlServerDatabaseDialectTest.java",
"diff": "package io.aiven.connect.jdbc.dialect;\n+import java.util.Arrays;\nimport java.util.List;\nimport org.apache.kafka.connect.data.Date;\n@@ -26,6 +27,7 @@ import org.apache.kafka.connect.data.Schema.Type;\nimport org.apache.kafka.connect.data.Time;\nimport org.apache.kafka.connect.data.Timestamp;\n+import io.aiven.connect.jdbc.sink.metadata.SinkRecordField;\nimport io.aiven.connect.jdbc.util.TableId;\nimport org.junit.Test;\n@@ -126,6 +128,17 @@ public class SqlServerDatabaseDialectTest extends BaseDialectTest<SqlServerDatab\nverifyCreateOneColOnePk(expected);\n}\n+ @Test\n+ public void createOneColOneVarcharPk() {\n+ final String expected = readQueryResourceForThisTest(\"create_table_one_col_one_varchar_pk\");\n+ assertQueryEquals(expected,\n+ dialect.buildCreateTableStatement(\n+ tableId,\n+ Arrays.asList(new SinkRecordField(Schema.STRING_SCHEMA, \"pk1\", true))\n+ )\n+ );\n+ }\n+\n@Test\npublic void createThreeColTwoPk() {\nfinal String expected = readQueryResourceForThisTest(\"create_table_three_cols_two_pks\");\n"
},
{
"change_type": "ADD",
"old_path": null,
"new_path": "src/test/resources/io.aiven.connect.jdbc.dialect/SqlServerDatabaseDialectTest/create_table_one_col_one_varchar_pk-nonquoted.txt",
"diff": "+CREATE TABLE myTable (\n+pk1 varchar(900) NOT NULL,\n+PRIMARY KEY(pk1))\n\\ No newline at end of file\n"
},
{
"change_type": "ADD",
"old_path": null,
"new_path": "src/test/resources/io.aiven.connect.jdbc.dialect/SqlServerDatabaseDialectTest/create_table_one_col_one_varchar_pk-quoted.txt",
"diff": "+CREATE TABLE [myTable] (\n+[pk1] varchar(900) NOT NULL,\n+PRIMARY KEY([pk1]))\n\\ No newline at end of file\n"
}
] |
Java
|
Apache License 2.0
|
aiven/jdbc-connector-for-apache-kafka
|
max length for SqlServer varchar pk is 900
|
11,748 |
30.07.2020 11:06:21
| -7,200 |
271b4d5b41d6a730dde07b2e0fce8c1841670795
|
bump uip tool version for checkstyle and jacoco
|
[
{
"change_type": "MODIFY",
"old_path": "build.gradle",
"new_path": "build.gradle",
"diff": "@@ -41,13 +41,13 @@ java {\n}\ncheckstyle {\n- toolVersion \"8.19\"\n+ toolVersion \"8.35\"\nconfigDir rootProject.file(\"checkstyle/\")\nprintln configFile\n}\njacoco {\n- toolVersion = \"0.8.3\"\n+ toolVersion = \"0.8.5\"\n}\ndistributions {\n"
},
{
"change_type": "MODIFY",
"old_path": "checkstyle/checkstyle.xml",
"new_path": "checkstyle/checkstyle.xml",
"diff": "<!-- See http://checkstyle.sourceforge.net/config_misc.html#NewlineAtEndOfFile -->\n<module name=\"NewlineAtEndOfFile\"/>\n+ <!-- See http://checkstyle.sourceforge.net/config_sizes.html#LineLength -->\n+ <module name=\"LineLength\">\n+ <property name=\"max\" value=\"120\"/>\n+ <property name=\"ignorePattern\" value=\"^package.*|^import.*|a href|href|http://|https://|ftp://\"/>\n+ </module>\n+\n<!-- See http://checkstyle.sourceforge.net/config.html#TreeWalker -->\n<module name=\"TreeWalker\">\n<property name=\"fileExtensions\" value=\"java\"/>\n<property name=\"max\" value=\"10\"/>\n</module>\n- <!-- See http://checkstyle.sourceforge.net/config_sizes.html#LineLength -->\n- <module name=\"LineLength\">\n- <property name=\"max\" value=\"120\"/>\n- <property name=\"ignorePattern\" value=\"^package.*|^import.*|a href|href|http://|https://|ftp://\"/>\n- </module>\n-\n<!-- See http://checkstyle.sourceforge.net/config_metrics.html#ClassDataAbstractionCoupling -->\n<module name=\"ClassDataAbstractionCoupling\"/>\n"
}
] |
Java
|
Apache License 2.0
|
aiven/jdbc-connector-for-apache-kafka
|
bump uip tool version for checkstyle and jacoco
|
11,748 |
29.07.2020 17:20:09
| -7,200 |
43793c7e8116808fe3a727e2c226c5790365f5bc
|
add ms sql jdbc driver
|
[
{
"change_type": "MODIFY",
"old_path": "build.gradle",
"new_path": "build.gradle",
"diff": "@@ -112,6 +112,7 @@ dependencies {\nruntimeOnly \"org.postgresql:postgresql:42.2.14\"\nruntimeOnly \"net.sourceforge.jtds:jtds:1.3.1\"\nruntimeOnly \"net.snowflake:snowflake-jdbc:3.12.8\"\n+ runtimeOnly \"com.microsoft.sqlserver:mssql-jdbc:8.2.1.jre11\"\nimplementation \"com.google.guava:guava:27.1-jre\"\nimplementation \"org.slf4j:slf4j-api:$slf4jVersion\"\n@@ -125,6 +126,7 @@ dependencies {\ntestImplementation \"commons-io:commons-io:2.6\"\ntestImplementation \"org.apache.derby:derby:10.11.1.1\"\ntestRuntime \"org.slf4j:slf4j-log4j12:$slf4jVersion\"\n+\n}\nprocessResources {\n"
},
{
"change_type": "MODIFY",
"old_path": "src/main/java/io/aiven/connect/jdbc/dialect/SqlServerDatabaseDialect.java",
"new_path": "src/main/java/io/aiven/connect/jdbc/dialect/SqlServerDatabaseDialect.java",
"diff": "@@ -46,7 +46,10 @@ public class SqlServerDatabaseDialect extends GenericDatabaseDialect {\n*/\npublic static class Provider extends SubprotocolBasedProvider {\npublic Provider() {\n- super(SqlServerDatabaseDialect.class.getSimpleName(), \"microsoft:sqlserver\", \"sqlserver\",\n+ super(SqlServerDatabaseDialect.class.getSimpleName(),\n+ \"microsoft:sqlserver\",\n+ \"sqlserver\",\n+ \"jdbc:sqlserver\",\n\"jtds:sqlserver\");\n}\n"
}
] |
Java
|
Apache License 2.0
|
aiven/jdbc-connector-for-apache-kafka
|
add ms sql jdbc driver
|
11,748 |
04.08.2020 10:16:36
| -7,200 |
b25180180297a3a4273a1ccc067c2c29c5539f0d
|
downgrade PgSQL driver to version 42.2.10
New versions contains bug which doesn't return tabels with
prtition
|
[
{
"change_type": "MODIFY",
"old_path": "build.gradle",
"new_path": "build.gradle",
"diff": "@@ -109,7 +109,7 @@ dependencies {\ncompileOnly \"org.apache.kafka:connect-api:$kafkaVersion\"\nruntimeOnly \"org.xerial:sqlite-jdbc:3.32.3\"\n- runtimeOnly \"org.postgresql:postgresql:42.2.14\"\n+ runtimeOnly \"org.postgresql:postgresql:42.2.10\"\nruntimeOnly \"net.sourceforge.jtds:jtds:1.3.1\"\nruntimeOnly \"net.snowflake:snowflake-jdbc:3.12.8\"\nruntimeOnly \"com.microsoft.sqlserver:mssql-jdbc:8.2.1.jre11\"\n"
}
] |
Java
|
Apache License 2.0
|
aiven/jdbc-connector-for-apache-kafka
|
downgrade PgSQL driver to version 42.2.10
New versions contains bug which doesn't return tabels with
prtition
|
11,748 |
04.08.2020 16:22:23
| -7,200 |
ae6db3eb20f05affe13907ec469de05e374d6629
|
Add licenses for drivers
MS SQL JDBC driver
PgSQL JDBC driver
SQLLite JDBC driver
|
[
{
"change_type": "ADD",
"old_path": null,
"new_path": "licenses/LICENSE-mssql-jdbc-8-2-1.txt",
"diff": "+Copyright(c) 2019 Microsoft Corporation\n+All rights reserved.\n+\n+MIT License\n+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files(the \"Software\"),\n+to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,\n+and / or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions :\n+\n+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.\n+\n+THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n+IN THE SOFTWARE.\n\\ No newline at end of file\n"
},
{
"change_type": "ADD",
"old_path": null,
"new_path": "licenses/LICENSE-postgresql-42-2-10.txt",
"diff": "+Copyright (c) 1997, PostgreSQL Global Development Group\n+All rights reserved.\n+\n+Redistribution and use in source and binary forms, with or without\n+modification, are permitted provided that the following conditions are met:\n+\n+1. Redistributions of source code must retain the above copyright notice,\n+ this list of conditions and the following disclaimer.\n+2. Redistributions in binary form must reproduce the above copyright notice,\n+ this list of conditions and the following disclaimer in the documentation\n+ and/or other materials provided with the distribution.\n+\n+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\"\n+AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\n+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE\n+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE\n+LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR\n+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF\n+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS\n+INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN\n+CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)\n+ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE\n+POSSIBILITY OF SUCH DAMAGE.\n\\ No newline at end of file\n"
},
{
"change_type": "DELETE",
"old_path": "licenses/LICENSE-postgresql-9.4-1206-jdbc41.txt",
"new_path": "licenses/LICENSE-postgresql-9.4-1206-jdbc41.txt",
"diff": ""
},
{
"change_type": "RENAME",
"old_path": "licenses/LICENSE-sqlite-jdbc-3.8.11.2.txt",
"new_path": "licenses/LICENSE-sqlite-jdbc-3-32-3.txt",
"diff": ""
}
] |
Java
|
Apache License 2.0
|
aiven/jdbc-connector-for-apache-kafka
|
Add licenses for drivers
- MS SQL JDBC driver
- PgSQL JDBC driver
- SQLLite JDBC driver
|
11,748 |
12.08.2020 10:02:53
| -7,200 |
ad711b29b529d3c039fdbb4ba22ea1e0f8047c5d
|
add MySQL JDBC driver
|
[
{
"change_type": "MODIFY",
"old_path": "build.gradle",
"new_path": "build.gradle",
"diff": "@@ -113,6 +113,7 @@ dependencies {\nruntimeOnly \"net.sourceforge.jtds:jtds:1.3.1\"\nruntimeOnly \"net.snowflake:snowflake-jdbc:3.12.8\"\nruntimeOnly \"com.microsoft.sqlserver:mssql-jdbc:8.2.1.jre11\"\n+ runtimeOnly \"mysql:mysql-connector-java:8.0.21\"\nimplementation \"com.google.guava:guava:27.1-jre\"\nimplementation \"org.slf4j:slf4j-api:$slf4jVersion\"\n"
},
{
"change_type": "ADD",
"old_path": null,
"new_path": "licenses/LICENSE-mysql-jdbc-8-0-21.txt",
"diff": "+\n+ Apache License\n+ Version 2.0, January 2004\n+ http://www.apache.org/licenses/\n+\n+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n+\n+ 1. Definitions.\n+\n+ \"License\" shall mean the terms and conditions for use, reproduction,\n+ and distribution as defined by Sections 1 through 9 of this document.\n+\n+ \"Licensor\" shall mean the copyright owner or entity authorized by\n+ the copyright owner that is granting the License.\n+\n+ \"Legal Entity\" shall mean the union of the acting entity and all\n+ other entities that control, are controlled by, or are under common\n+ control with that entity. For the purposes of this definition,\n+ \"control\" means (i) the power, direct or indirect, to cause the\n+ direction or management of such entity, whether by contract or\n+ otherwise, or (ii) ownership of fifty percent (50%) or more of the\n+ outstanding shares, or (iii) beneficial ownership of such entity.\n+\n+ \"You\" (or \"Your\") shall mean an individual or Legal Entity\n+ exercising permissions granted by this License.\n+\n+ \"Source\" form shall mean the preferred form for making modifications,\n+ including but not limited to software source code, documentation\n+ source, and configuration files.\n+\n+ \"Object\" form shall mean any form resulting from mechanical\n+ transformation or translation of a Source form, including but\n+ not limited to compiled object code, generated documentation,\n+ and conversions to other media types.\n+\n+ \"Work\" shall mean the work of authorship, whether in Source or\n+ Object form, made available under the License, as indicated by a\n+ copyright notice that is included in or attached to the work\n+ (an example is provided in the Appendix below).\n+\n+ \"Derivative Works\" shall mean any work, whether in Source or Object\n+ form, that is based on (or derived from) the Work and for which the\n+ editorial revisions, annotations, elaborations, or other modifications\n+ represent, as a whole, an original work of authorship. For the purposes\n+ of this License, Derivative Works shall not include works that remain\n+ separable from, or merely link (or bind by name) to the interfaces of,\n+ the Work and Derivative Works thereof.\n+\n+ \"Contribution\" shall mean any work of authorship, including\n+ the original version of the Work and any modifications or additions\n+ to that Work or Derivative Works thereof, that is intentionally\n+ submitted to Licensor for inclusion in the Work by the copyright owner\n+ or by an individual or Legal Entity authorized to submit on behalf of\n+ the copyright owner. For the purposes of this definition, \"submitted\"\n+ means any form of electronic, verbal, or written communication sent\n+ to the Licensor or its representatives, including but not limited to\n+ communication on electronic mailing lists, source code control systems,\n+ and issue tracking systems that are managed by, or on behalf of, the\n+ Licensor for the purpose of discussing and improving the Work, but\n+ excluding communication that is conspicuously marked or otherwise\n+ designated in writing by the copyright owner as \"Not a Contribution.\"\n+\n+ \"Contributor\" shall mean Licensor and any individual or Legal Entity\n+ on behalf of whom a Contribution has been received by Licensor and\n+ subsequently incorporated within the Work.\n+\n+ 2. Grant of Copyright License. Subject to the terms and conditions of\n+ this License, each Contributor hereby grants to You a perpetual,\n+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n+ copyright license to reproduce, prepare Derivative Works of,\n+ publicly display, publicly perform, sublicense, and distribute the\n+ Work and such Derivative Works in Source or Object form.\n+\n+ 3. Grant of Patent License. Subject to the terms and conditions of\n+ this License, each Contributor hereby grants to You a perpetual,\n+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n+ (except as stated in this section) patent license to make, have made,\n+ use, offer to sell, sell, import, and otherwise transfer the Work,\n+ where such license applies only to those patent claims licensable\n+ by such Contributor that are necessarily infringed by their\n+ Contribution(s) alone or by combination of their Contribution(s)\n+ with the Work to which such Contribution(s) was submitted. If You\n+ institute patent litigation against any entity (including a\n+ cross-claim or counterclaim in a lawsuit) alleging that the Work\n+ or a Contribution incorporated within the Work constitutes direct\n+ or contributory patent infringement, then any patent licenses\n+ granted to You under this License for that Work shall terminate\n+ as of the date such litigation is filed.\n+\n+ 4. Redistribution. You may reproduce and distribute copies of the\n+ Work or Derivative Works thereof in any medium, with or without\n+ modifications, and in Source or Object form, provided that You\n+ meet the following conditions:\n+\n+ (a) You must give any other recipients of the Work or\n+ Derivative Works a copy of this License; and\n+\n+ (b) You must cause any modified files to carry prominent notices\n+ stating that You changed the files; and\n+\n+ (c) You must retain, in the Source form of any Derivative Works\n+ that You distribute, all copyright, patent, trademark, and\n+ attribution notices from the Source form of the Work,\n+ excluding those notices that do not pertain to any part of\n+ the Derivative Works; and\n+\n+ (d) If the Work includes a \"NOTICE\" text file as part of its\n+ distribution, then any Derivative Works that You distribute must\n+ include a readable copy of the attribution notices contained\n+ within such NOTICE file, excluding those notices that do not\n+ pertain to any part of the Derivative Works, in at least one\n+ of the following places: within a NOTICE text file distributed\n+ as part of the Derivative Works; within the Source form or\n+ documentation, if provided along with the Derivative Works; or,\n+ within a display generated by the Derivative Works, if and\n+ wherever such third-party notices normally appear. The contents\n+ of the NOTICE file are for informational purposes only and\n+ do not modify the License. You may add Your own attribution\n+ notices within Derivative Works that You distribute, alongside\n+ or as an addendum to the NOTICE text from the Work, provided\n+ that such additional attribution notices cannot be construed\n+ as modifying the License.\n+\n+ You may add Your own copyright statement to Your modifications and\n+ may provide additional or different license terms and conditions\n+ for use, reproduction, or distribution of Your modifications, or\n+ for any such Derivative Works as a whole, provided Your use,\n+ reproduction, and distribution of the Work otherwise complies with\n+ the conditions stated in this License.\n+\n+ 5. Submission of Contributions. Unless You explicitly state otherwise,\n+ any Contribution intentionally submitted for inclusion in the Work\n+ by You to the Licensor shall be under the terms and conditions of\n+ this License, without any additional terms or conditions.\n+ Notwithstanding the above, nothing herein shall supersede or modify\n+ the terms of any separate license agreement you may have executed\n+ with Licensor regarding such Contributions.\n+\n+ 6. Trademarks. This License does not grant permission to use the trade\n+ names, trademarks, service marks, or product names of the Licensor,\n+ except as required for reasonable and customary use in describing the\n+ origin of the Work and reproducing the content of the NOTICE file.\n+\n+ 7. Disclaimer of Warranty. Unless required by applicable law or\n+ agreed to in writing, Licensor provides the Work (and each\n+ Contributor provides its Contributions) on an \"AS IS\" BASIS,\n+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n+ implied, including, without limitation, any warranties or conditions\n+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n+ PARTICULAR PURPOSE. You are solely responsible for determining the\n+ appropriateness of using or redistributing the Work and assume any\n+ risks associated with Your exercise of permissions under this License.\n+\n+ 8. Limitation of Liability. In no event and under no legal theory,\n+ whether in tort (including negligence), contract, or otherwise,\n+ unless required by applicable law (such as deliberate and grossly\n+ negligent acts) or agreed to in writing, shall any Contributor be\n+ liable to You for damages, including any direct, indirect, special,\n+ incidental, or consequential damages of any character arising as a\n+ result of this License or out of the use or inability to use the\n+ Work (including but not limited to damages for loss of goodwill,\n+ work stoppage, computer failure or malfunction, or any and all\n+ other commercial damages or losses), even if such Contributor\n+ has been advised of the possibility of such damages.\n+\n+ 9. Accepting Warranty or Additional Liability. While redistributing\n+ the Work or Derivative Works thereof, You may choose to offer,\n+ and charge a fee for, acceptance of support, warranty, indemnity,\n+ or other liability obligations and/or rights consistent with this\n+ License. However, in accepting such obligations, You may act only\n+ on Your own behalf and on Your sole responsibility, not on behalf\n+ of any other Contributor, and only if You agree to indemnify,\n+ defend, and hold each Contributor harmless for any liability\n+ incurred by, or claims asserted against, such Contributor by reason\n+ of your accepting any such warranty or additional liability.\n+\n+ END OF TERMS AND CONDITIONS\n+\n+ APPENDIX: How to apply the Apache License to your work.\n+\n+ To apply the Apache License to your work, attach the following\n+ boilerplate notice, with the fields enclosed by brackets \"[]\"\n+ replaced with your own identifying information. (Don't include\n+ the brackets!) The text should be enclosed in the appropriate\n+ comment syntax for the file format. We also recommend that a\n+ file or class name and description of purpose be included on the\n+ same \"printed page\" as the copyright notice for easier\n+ identification within third-party archives.\n+\n+ Copyright [yyyy] [name of copyright owner]\n+\n+ Licensed under the Apache License, Version 2.0 (the \"License\");\n+ you may not use this file except in compliance with the License.\n+ You may obtain a copy of the License at\n+\n+ http://www.apache.org/licenses/LICENSE-2.0\n+\n+ Unless required by applicable law or agreed to in writing, software\n+ distributed under the License is distributed on an \"AS IS\" BASIS,\n+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n+ See the License for the specific language governing permissions and\n+ limitations under the License.\n"
}
] |
Java
|
Apache License 2.0
|
aiven/jdbc-connector-for-apache-kafka
|
add MySQL JDBC driver
|
11,723 |
20.10.2020 00:06:28
| -10,800 |
652c8b69263fb2a3b6f7b6c5e48b50535ccdc6a0
|
reduce time spending in poll
|
[
{
"change_type": "MODIFY",
"old_path": "src/main/java/io/aiven/connect/jdbc/source/JdbcSourceTask.java",
"new_path": "src/main/java/io/aiven/connect/jdbc/source/JdbcSourceTask.java",
"diff": "@@ -298,10 +298,11 @@ public class JdbcSourceTask extends SourceTask {\n// If not in the middle of an update, wait for next update time\nfinal long nextUpdate = querier.getLastUpdate()\n+ config.getInt(JdbcSourceTaskConfig.POLL_INTERVAL_MS_CONFIG);\n- final long untilNext = nextUpdate - time.milliseconds();\n- if (untilNext > 0) {\n- log.trace(\"Waiting {} ms to poll {} next\", untilNext, querier.toString());\n- time.sleep(untilNext);\n+ final long now = time.milliseconds();\n+ final long sleepMs = Math.min(nextUpdate - now, 100);\n+ if (sleepMs > 0) {\n+ log.trace(\"Waiting {} ms to poll {} next\", nextUpdate - now, querier.toString());\n+ time.sleep(sleepMs);\ncontinue; // Re-check stop flag before continuing\n}\n}\n"
}
] |
Java
|
Apache License 2.0
|
aiven/jdbc-connector-for-apache-kafka
|
reduce time spending in poll
|
11,748 |
24.11.2020 19:53:17
| -3,600 |
e852dab18c52daa95e0137e25ccd1d0685ee3bc9
|
Add 3 new methods for insert, update and upsert
3 new methods, which take additional parameter TableDefinition, which contains information about columns types for a predefined database table
|
[
{
"change_type": "MODIFY",
"old_path": "src/main/java/io/aiven/connect/jdbc/dialect/DatabaseDialect.java",
"new_path": "src/main/java/io/aiven/connect/jdbc/dialect/DatabaseDialect.java",
"diff": "/*\n- * Copyright 2019 Aiven Oy\n+ * Copyright 2020 Aiven Oy\n* Copyright 2018 Confluent Inc.\n*\n* Licensed under the Apache License, Version 2.0 (the \"License\");\n@@ -324,6 +324,24 @@ public interface DatabaseDialect extends ConnectionProvider {\nCollection<ColumnId> nonKeyColumns\n);\n+ /**\n+ * Build the INSERT prepared statement expression for the given table and its columns.\n+ *\n+ * @param table the identifier of the table; may not be null\n+ * @param tableDefinition the table definition; may be null if unknown\n+ * @param keyColumns the identifiers of the columns in the primary/unique key; may not be null\n+ * but may be empty\n+ * @param nonKeyColumns the identifiers of the other columns in the table; may not be null but may\n+ * be empty\n+ * @return the INSERT statement; may not be null\n+ */\n+ default String buildInsertStatement(TableId table,\n+ TableDefinition tableDefinition,\n+ Collection<ColumnId> keyColumns,\n+ Collection<ColumnId> nonKeyColumns) {\n+ return buildInsertStatement(table, keyColumns, nonKeyColumns);\n+ }\n+\n/**\n* Build the UPDATE prepared statement expression for the given table and its columns. Variables\n* for each key column should also appear in the WHERE clause of the statement.\n@@ -341,6 +359,25 @@ public interface DatabaseDialect extends ConnectionProvider {\nCollection<ColumnId> nonKeyColumns\n);\n+ /**\n+ * Build the UPDATE prepared statement expression for the given table and its columns. Variables\n+ * for each key column should also appear in the WHERE clause of the statement.\n+ *\n+ * @param table the identifier of the table; may not be null\n+ * @param tableDefinition the table definition; may be null if unknown\n+ * @param keyColumns the identifiers of the columns in the primary/unique key; may not be null\n+ * but may be empty\n+ * @param nonKeyColumns the identifiers of the other columns in the table; may not be null but may\n+ * be empty\n+ * @return the UPDATE statement; may not be null\n+ */\n+ default String buildUpdateStatement(TableId table,\n+ TableDefinition tableDefinition,\n+ Collection<ColumnId> keyColumns,\n+ Collection<ColumnId> nonKeyColumns) {\n+ return buildUpdateStatement(table, keyColumns, nonKeyColumns);\n+ }\n+\n/**\n* Build the UPSERT or MERGE prepared statement expression to either insert a new record into the\n* given table or update an existing record in that table Variables for each key column should\n@@ -360,6 +397,27 @@ public interface DatabaseDialect extends ConnectionProvider {\nCollection<ColumnId> nonKeyColumns\n);\n+ /**\n+ * Build the UPSERT or MERGE prepared statement expression to either insert a new record into the\n+ * given table or update an existing record in that table Variables for each key column should\n+ * also appear in the WHERE clause of the statement.\n+ *\n+ * @param table the identifier of the table; may not be null\n+ * @param tableDefinition the table definition; may be null if unknown\n+ * @param keyColumns the identifiers of the columns in the primary/unique key; may not be null\n+ * but may be empty\n+ * @param nonKeyColumns the identifiers of the other columns in the table; may not be null but may\n+ * be empty\n+ * @return the upsert/merge statement; may not be null\n+ * @throws UnsupportedOperationException if the dialect does not support upserts\n+ */\n+ default String buildUpsertQueryStatement(TableId table,\n+ TableDefinition tableDefinition,\n+ Collection<ColumnId> keyColumns,\n+ Collection<ColumnId> nonKeyColumns) {\n+ return buildUpsertQueryStatement(table, keyColumns, nonKeyColumns);\n+ }\n+\n/**\n* Build the DROP TABLE statement expression for the given table.\n*\n"
},
{
"change_type": "MODIFY",
"old_path": "src/main/java/io/aiven/connect/jdbc/sink/BufferedRecords.java",
"new_path": "src/main/java/io/aiven/connect/jdbc/sink/BufferedRecords.java",
"diff": "/*\n- * Copyright 2019 Aiven Oy\n+ * Copyright 2020 Aiven Oy\n* Copyright 2016 Confluent Inc.\n*\n* Licensed under the Apache License, Version 2.0 (the \"License\");\n@@ -35,6 +35,7 @@ import io.aiven.connect.jdbc.dialect.DatabaseDialect.StatementBinder;\nimport io.aiven.connect.jdbc.sink.metadata.FieldsMetadata;\nimport io.aiven.connect.jdbc.sink.metadata.SchemaPair;\nimport io.aiven.connect.jdbc.util.ColumnId;\n+import io.aiven.connect.jdbc.util.TableDefinition;\nimport io.aiven.connect.jdbc.util.TableId;\nimport org.slf4j.Logger;\n@@ -92,7 +93,8 @@ public class BufferedRecords {\nfieldsMetadata\n);\n- final String sql = getInsertSql();\n+ final TableDefinition tableDefinition = dbStructure.tableDefinitionFor(tableId, connection);\n+ final String sql = getInsertSql(tableDefinition);\nlog.debug(\n\"{} sql: {}\",\nconfig.insertMode,\n@@ -192,11 +194,12 @@ public class BufferedRecords {\n}\n}\n- private String getInsertSql() {\n+ private String getInsertSql(final TableDefinition tableDefinition) {\nswitch (config.insertMode) {\ncase INSERT:\nreturn dbDialect.buildInsertStatement(\ntableId,\n+ tableDefinition,\nasColumns(fieldsMetadata.keyFieldNames),\nasColumns(fieldsMetadata.nonKeyFieldNames)\n);\n@@ -211,6 +214,7 @@ public class BufferedRecords {\ntry {\nreturn dbDialect.buildUpsertQueryStatement(\ntableId,\n+ tableDefinition,\nasColumns(fieldsMetadata.keyFieldNames),\nasColumns(fieldsMetadata.nonKeyFieldNames)\n);\n@@ -224,6 +228,7 @@ public class BufferedRecords {\ncase UPDATE:\nreturn dbDialect.buildUpdateStatement(\ntableId,\n+ tableDefinition,\nasColumns(fieldsMetadata.keyFieldNames),\nasColumns(fieldsMetadata.nonKeyFieldNames)\n);\n"
},
{
"change_type": "MODIFY",
"old_path": "src/main/java/io/aiven/connect/jdbc/sink/DbStructure.java",
"new_path": "src/main/java/io/aiven/connect/jdbc/sink/DbStructure.java",
"diff": "/*\n- * Copyright 2019 Aiven Oy\n+ * Copyright 2020 Aiven Oy\n* Copyright 2016 Confluent Inc.\n*\n* Licensed under the Apache License, Version 2.0 (the \"License\");\n@@ -23,6 +23,7 @@ import java.util.Collection;\nimport java.util.Collections;\nimport java.util.HashSet;\nimport java.util.List;\n+import java.util.Objects;\nimport java.util.Set;\nimport org.apache.kafka.connect.errors.ConnectException;\n@@ -48,6 +49,15 @@ public class DbStructure {\nthis.tableDefns = new TableDefinitions(dbDialect);\n}\n+ public TableDefinition tableDefinitionFor(final TableId tableId, final Connection connection) throws SQLException {\n+ final var tblDefinition = tableDefns.get(connection, tableId);\n+ if (Objects.nonNull(tblDefinition)) {\n+ return tblDefinition;\n+ } else {\n+ return tableDefns.refresh(connection, tableId);\n+ }\n+ }\n+\n/**\n* @return whether a DDL operation was performed\n* @throws SQLException if a DDL operation was deemed necessary but failed\n"
}
] |
Java
|
Apache License 2.0
|
aiven/jdbc-connector-for-apache-kafka
|
Add 3 new methods for insert, update and upsert
3 new methods, which take additional parameter TableDefinition, which contains information about columns types for a predefined database table
|
11,748 |
24.11.2020 19:55:13
| -3,600 |
d9fe648e268797a501b625c48e9502355df9a8e9
|
JSON/JSONB support for PgSQL
Custom insert, update and upsert PgSQL SQL generators, with support of JSON/JSONB column types
|
[
{
"change_type": "MODIFY",
"old_path": "src/main/java/io/aiven/connect/jdbc/dialect/PostgreSqlDatabaseDialect.java",
"new_path": "src/main/java/io/aiven/connect/jdbc/dialect/PostgreSqlDatabaseDialect.java",
"diff": "/*\n- * Copyright 2019 Aiven Oy\n+ * Copyright 2020 Aiven Oy\n* Copyright 2018 Confluent Inc.\n*\n* Licensed under the Apache License, Version 2.0 (the \"License\");\n@@ -23,6 +23,8 @@ import java.sql.ResultSet;\nimport java.sql.SQLException;\nimport java.sql.Types;\nimport java.util.Collection;\n+import java.util.List;\n+import java.util.Objects;\nimport org.apache.kafka.connect.data.Date;\nimport org.apache.kafka.connect.data.Decimal;\n@@ -39,6 +41,7 @@ import io.aiven.connect.jdbc.util.ColumnDefinition;\nimport io.aiven.connect.jdbc.util.ColumnId;\nimport io.aiven.connect.jdbc.util.ExpressionBuilder;\nimport io.aiven.connect.jdbc.util.IdentifierRules;\n+import io.aiven.connect.jdbc.util.TableDefinition;\nimport io.aiven.connect.jdbc.util.TableId;\n/**\n@@ -60,8 +63,11 @@ public class PostgreSqlDatabaseDialect extends GenericDatabaseDialect {\n}\n}\n- private static final String JSON_TYPE_NAME = \"json\";\n- private static final String JSONB_TYPE_NAME = \"jsonb\";\n+ protected static final String JSON_TYPE_NAME = \"json\";\n+\n+ protected static final String JSONB_TYPE_NAME = \"jsonb\";\n+\n+ private static final List<String> CAST_TYPES = List.of(JSON_TYPE_NAME, JSONB_TYPE_NAME);\n/**\n* Create a new dialect instance with the given connector configuration.\n@@ -195,7 +201,6 @@ public class PostgreSqlDatabaseDialect extends GenericDatabaseDialect {\n}\nswitch (field.schemaType()) {\ncase INT8:\n- return \"SMALLINT\";\ncase INT16:\nreturn \"SMALLINT\";\ncase INT32:\n@@ -218,11 +223,61 @@ public class PostgreSqlDatabaseDialect extends GenericDatabaseDialect {\n}\n@Override\n- public String buildUpsertQueryStatement(\n- final TableId table,\n+ public String buildInsertStatement(final TableId table,\n+ final TableDefinition tableDefinition,\nfinal Collection<ColumnId> keyColumns,\n- final Collection<ColumnId> nonKeyColumns\n- ) {\n+ final Collection<ColumnId> nonKeyColumns) {\n+ return expressionBuilder()\n+ .append(\"INSERT INTO \")\n+ .append(table)\n+ .append(\"(\")\n+ .appendList()\n+ .delimitedBy(\",\")\n+ .transformedBy(ExpressionBuilder.columnNames())\n+ .of(keyColumns, nonKeyColumns)\n+ .append(\") VALUES(\")\n+ .appendList()\n+ .delimitedBy(\",\")\n+ .transformedBy(transformColumn(tableDefinition))\n+ .of(keyColumns, nonKeyColumns)\n+ .append(\")\")\n+ .toString();\n+ }\n+\n+ @Override\n+ public String buildUpdateStatement(final TableId table,\n+ final TableDefinition tableDefinition,\n+ final Collection<ColumnId> keyColumns,\n+ final Collection<ColumnId> nonKeyColumns) {\n+ final ExpressionBuilder.Transform<ColumnId> columnTransform = (builder, columnId) -> {\n+ builder.append(columnId.name())\n+ .append(\"=?\")\n+ .append(cast(tableDefinition, columnId));\n+ };\n+\n+ final ExpressionBuilder builder = expressionBuilder();\n+ builder.append(\"UPDATE \")\n+ .append(table)\n+ .append(\" SET \")\n+ .appendList()\n+ .delimitedBy(\",\")\n+ .transformedBy(columnTransform)\n+ .of(nonKeyColumns);\n+ if (!keyColumns.isEmpty()) {\n+ builder.append(\" WHERE \");\n+ builder.appendList()\n+ .delimitedBy(\" AND \")\n+ .transformedBy(ExpressionBuilder.columnNamesWith(\" = ?\"))\n+ .of(keyColumns);\n+ }\n+ return builder.toString();\n+ }\n+\n+ @Override\n+ public String buildUpsertQueryStatement(final TableId table,\n+ final TableDefinition tableDefinition,\n+ final Collection<ColumnId> keyColumns,\n+ final Collection<ColumnId> nonKeyColumns) {\nfinal ExpressionBuilder.Transform<ColumnId> transform = (builder, col) -> {\nbuilder.appendIdentifier(col.name())\n.append(\"=EXCLUDED.\")\n@@ -230,17 +285,20 @@ public class PostgreSqlDatabaseDialect extends GenericDatabaseDialect {\n};\nfinal ExpressionBuilder builder = expressionBuilder();\n- builder.append(\"INSERT INTO \");\n- builder.append(table);\n- builder.append(\" (\");\n- builder.appendList()\n+ builder.append(\"INSERT INTO \")\n+ .append(table)\n+ .append(\" (\")\n+ .appendList()\n.delimitedBy(\",\")\n.transformedBy(ExpressionBuilder.columnNames())\n- .of(keyColumns, nonKeyColumns);\n- builder.append(\") VALUES (\");\n- builder.appendMultiple(\",\", \"?\", keyColumns.size() + nonKeyColumns.size());\n- builder.append(\") ON CONFLICT (\");\n- builder.appendList()\n+ .of(keyColumns, nonKeyColumns)\n+ .append(\") VALUES (\")\n+ .appendList()\n+ .delimitedBy(\",\")\n+ .transformedBy(transformColumn(tableDefinition))\n+ .of(keyColumns, nonKeyColumns)\n+ .append(\") ON CONFLICT (\")\n+ .appendList()\n.delimitedBy(\",\")\n.transformedBy(ExpressionBuilder.columnNames())\n.of(keyColumns);\n@@ -257,4 +315,24 @@ public class PostgreSqlDatabaseDialect extends GenericDatabaseDialect {\nreturn builder.toString();\n}\n+ private ExpressionBuilder.Transform<ColumnId> transformColumn(final TableDefinition tableDefinition) {\n+ return (builder, column) -> {\n+ builder.append(\"?\");\n+ builder.append(cast(tableDefinition, column));\n+ };\n+ }\n+\n+ private String cast(final TableDefinition tableDfn, final ColumnId columnId) {\n+ if (Objects.nonNull(tableDfn)) {\n+ final var columnDef = tableDfn.definitionForColumn(columnId.name());\n+ final var typeName = columnDef.typeName();\n+ if (Objects.nonNull(typeName)) {\n+ if (CAST_TYPES.contains(typeName.toLowerCase())) {\n+ return \"::\" + typeName.toLowerCase();\n+ }\n+ }\n+ }\n+ return \"\";\n+ }\n+\n}\n"
},
{
"change_type": "MODIFY",
"old_path": "src/test/java/io/aiven/connect/jdbc/dialect/PostgreSqlDatabaseDialectTest.java",
"new_path": "src/test/java/io/aiven/connect/jdbc/dialect/PostgreSqlDatabaseDialectTest.java",
"diff": "/*\n- * Copyright 2019 Aiven Oy\n+ * Copyright 2020 Aiven Oy\n* Copyright 2017 Confluent Inc.\n*\n* Licensed under the Apache License, Version 2.0 (the \"License\");\npackage io.aiven.connect.jdbc.dialect;\n+import java.sql.Types;\nimport java.util.List;\nimport org.apache.kafka.connect.data.Date;\nimport org.apache.kafka.connect.data.Decimal;\n+import org.apache.kafka.connect.data.Field;\nimport org.apache.kafka.connect.data.Schema;\nimport org.apache.kafka.connect.data.Schema.Type;\nimport org.apache.kafka.connect.data.Time;\nimport org.apache.kafka.connect.data.Timestamp;\n+import io.aiven.connect.jdbc.source.ColumnMapping;\n+import io.aiven.connect.jdbc.util.ColumnDefinition;\n+import io.aiven.connect.jdbc.util.ColumnId;\nimport io.aiven.connect.jdbc.util.TableId;\nimport org.junit.Test;\n+import static org.junit.Assert.assertNotNull;\n+\npublic class PostgreSqlDatabaseDialectTest extends BaseDialectTest<PostgreSqlDatabaseDialect> {\n@Override\n@@ -37,6 +44,53 @@ public class PostgreSqlDatabaseDialectTest extends BaseDialectTest<PostgreSqlDat\nreturn new PostgreSqlDatabaseDialect(sourceConfigWithUrl(\"jdbc:postgresql://something\"));\n}\n+ @Test\n+ public void shouldCreateConverterForJdbcTypes() {\n+ assertColumnConverter(\n+ Types.OTHER,\n+ PostgreSqlDatabaseDialect.JSON_TYPE_NAME,\n+ Schema.STRING_SCHEMA,\n+ String.class\n+ );\n+ assertColumnConverter(\n+ Types.OTHER,\n+ PostgreSqlDatabaseDialect.JSONB_TYPE_NAME,\n+ Schema.STRING_SCHEMA,\n+ String.class\n+ );\n+ }\n+\n+ protected <T> void assertColumnConverter(final int jdbcType,\n+ final String typeName,\n+ final Schema schemaType,\n+ final Class<T> clazz) {\n+ final var columnDefinition = new ColumnDefinition(\n+ new ColumnId(\n+ new TableId(\n+ \"test_catalog\",\n+ \"test\",\n+ \"test_table\"\n+ ),\n+ \"column\"\n+ ),\n+ jdbcType,\n+ typeName,\n+ clazz.getCanonicalName(),\n+ ColumnDefinition.Nullability.NOT_NULL,\n+ ColumnDefinition.Mutability.UNKNOWN,\n+ 0, 0, false, 1, false,\n+ false, false, false, false\n+ );\n+ assertNotNull(\n+ dialect.createColumnConverter(\n+ new ColumnMapping(\n+ columnDefinition, 1,\n+ new Field(\"a\", 1, schemaType)\n+ )\n+ )\n+ );\n+ }\n+\n@Test\npublic void shouldMapPrimitiveSchemaTypeToSqlTypes() {\nassertPrimitiveMapping(Type.INT8, \"SMALLINT\");\n@@ -109,7 +163,7 @@ public class PostgreSqlDatabaseDialectTest extends BaseDialectTest<PostgreSqlDat\n@Test\npublic void shouldBuildUpsertStatement() {\nfinal String expected = readQueryResourceForThisTest(\"upsert0\");\n- final String actual = dialect.buildUpsertQueryStatement(tableId, pkColumns, columnsAtoD);\n+ final String actual = dialect.buildUpsertQueryStatement(tableId, null, pkColumns, columnsAtoD);\nassertQueryEquals(expected, actual);\n}\n@@ -148,7 +202,7 @@ public class PostgreSqlDatabaseDialectTest extends BaseDialectTest<PostgreSqlDat\nfinal String expected = readQueryResourceForThisTest(\"upsert1\");\nfinal TableId customer = tableId(\"Customer\");\nfinal String actual = dialect.buildUpsertQueryStatement(\n- customer,\n+ customer, null,\ncolumns(customer, \"id\"),\ncolumns(customer, \"name\", \"salary\", \"address\")\n);\n@@ -160,7 +214,7 @@ public class PostgreSqlDatabaseDialectTest extends BaseDialectTest<PostgreSqlDat\nfinal String expected = readQueryResourceForThisTest(\"upsert2\");\nfinal TableId customer = tableId(\"Customer\");\nfinal String actual = dialect.buildUpsertQueryStatement(\n- customer,\n+ customer, null,\ncolumns(customer, \"id\", \"name\", \"salary\", \"address\"),\ncolumns(customer)\n);\n"
}
] |
Java
|
Apache License 2.0
|
aiven/jdbc-connector-for-apache-kafka
|
JSON/JSONB support for PgSQL
Custom insert, update and upsert PgSQL SQL generators, with support of JSON/JSONB column types
|
11,748 |
05.02.2021 14:17:11
| -3,600 |
e20f73a6d24ed44a8bd7d86697f290bfb87ce7e8
|
Add support for UUID column type
|
[
{
"change_type": "MODIFY",
"old_path": "src/main/java/io/aiven/connect/jdbc/dialect/PostgreSqlDatabaseDialect.java",
"new_path": "src/main/java/io/aiven/connect/jdbc/dialect/PostgreSqlDatabaseDialect.java",
"diff": "@@ -25,6 +25,7 @@ import java.sql.Types;\nimport java.util.Collection;\nimport java.util.List;\nimport java.util.Objects;\n+import java.util.UUID;\nimport org.apache.kafka.connect.data.Date;\nimport org.apache.kafka.connect.data.Decimal;\n@@ -67,7 +68,9 @@ public class PostgreSqlDatabaseDialect extends GenericDatabaseDialect {\nprotected static final String JSONB_TYPE_NAME = \"jsonb\";\n- private static final List<String> CAST_TYPES = List.of(JSON_TYPE_NAME, JSONB_TYPE_NAME);\n+ protected static final String UUID_TYPE_NAME = \"uuid\";\n+\n+ private static final List<String> CAST_TYPES = List.of(JSON_TYPE_NAME, JSONB_TYPE_NAME, UUID_TYPE_NAME);\n/**\n* Create a new dialect instance with the given connector configuration.\n@@ -133,6 +136,13 @@ public class PostgreSqlDatabaseDialect extends GenericDatabaseDialect {\n);\nreturn fieldName;\n}\n+ if (isUuidType(columnDefn)) {\n+ builder.field(\n+ fieldName,\n+ columnDefn.isOptional() ? Schema.OPTIONAL_STRING_SCHEMA : Schema.STRING_SCHEMA\n+ );\n+ return fieldName;\n+ }\nbreak;\n}\ndefault:\n@@ -168,6 +178,9 @@ public class PostgreSqlDatabaseDialect extends GenericDatabaseDialect {\nif (isJsonType(columnDefn)) {\nreturn rs -> rs.getString(col);\n}\n+ if (isUuidType(columnDefn)) {\n+ return rs -> rs.getString(col);\n+ }\nbreak;\n}\ndefault:\n@@ -183,6 +196,10 @@ public class PostgreSqlDatabaseDialect extends GenericDatabaseDialect {\nreturn JSON_TYPE_NAME.equalsIgnoreCase(typeName) || JSONB_TYPE_NAME.equalsIgnoreCase(typeName);\n}\n+ protected boolean isUuidType(final ColumnDefinition columnDefn) {\n+ return UUID.class.getName().equals(columnDefn.classNameForType());\n+ }\n+\n@Override\nprotected String getSqlType(final SinkRecordField field) {\nif (field.schemaName() != null) {\n"
},
{
"change_type": "MODIFY",
"old_path": "src/test/java/io/aiven/connect/jdbc/dialect/BaseDialectTest.java",
"new_path": "src/test/java/io/aiven/connect/jdbc/dialect/BaseDialectTest.java",
"diff": "@@ -108,6 +108,7 @@ public abstract class BaseDialectTest<T extends GenericDatabaseDialect> {\nprotected ColumnId columnB;\nprotected ColumnId columnC;\nprotected ColumnId columnD;\n+\nprotected List<ColumnId> pkColumns;\nprotected List<ColumnId> columnsAtoD;\nprotected List<SinkRecordField> sinkRecordFields;\n@@ -138,6 +139,7 @@ public abstract class BaseDialectTest<T extends GenericDatabaseDialect> {\ncolumnB = new ColumnId(tableId, \"columnB\");\ncolumnC = new ColumnId(tableId, \"columnC\");\ncolumnD = new ColumnId(tableId, \"columnD\");\n+\npkColumns = Arrays.asList(columnPK1, columnPK2);\ncolumnsAtoD = Arrays.asList(columnA, columnB, columnC, columnD);\n"
}
] |
Java
|
Apache License 2.0
|
aiven/jdbc-connector-for-apache-kafka
|
Add support for UUID column type
|
11,748 |
05.02.2021 14:21:11
| -3,600 |
54f686cd719ea646de9ced3e108078b032f449ac
|
Extended tests
1. Added test for PgSQL UUID type
2. Added tests for INSERT, UPDATE and UPSERT sql queries generation for cast types like UUID, JSON and JSONB which were missed
|
[
{
"change_type": "MODIFY",
"old_path": "src/test/java/io/aiven/connect/jdbc/dialect/PostgreSqlDatabaseDialectTest.java",
"new_path": "src/test/java/io/aiven/connect/jdbc/dialect/PostgreSqlDatabaseDialectTest.java",
"diff": "@@ -19,6 +19,7 @@ package io.aiven.connect.jdbc.dialect;\nimport java.sql.Types;\nimport java.util.List;\n+import java.util.UUID;\nimport org.apache.kafka.connect.data.Date;\nimport org.apache.kafka.connect.data.Decimal;\n@@ -31,14 +32,59 @@ import org.apache.kafka.connect.data.Timestamp;\nimport io.aiven.connect.jdbc.source.ColumnMapping;\nimport io.aiven.connect.jdbc.util.ColumnDefinition;\nimport io.aiven.connect.jdbc.util.ColumnId;\n+import io.aiven.connect.jdbc.util.TableDefinition;\nimport io.aiven.connect.jdbc.util.TableId;\n+import org.junit.Before;\nimport org.junit.Test;\nimport static org.junit.Assert.assertNotNull;\npublic class PostgreSqlDatabaseDialectTest extends BaseDialectTest<PostgreSqlDatabaseDialect> {\n+ private TableId castTypesTableId;\n+\n+ private ColumnId castTypesPkColumn;\n+\n+ private ColumnId columnUuid;\n+\n+ private ColumnId columnJson;\n+\n+ private ColumnId columnJsonb;\n+\n+ private TableDefinition castTypesTableDefinition;\n+\n+ @Before\n+ public void setupTest() {\n+ castTypesTableId = new TableId(null, null, \"cast_types_table\");\n+ castTypesPkColumn = new ColumnId(castTypesTableId, \"pk\");\n+ columnUuid = new ColumnId(castTypesTableId, \"uuid_col\");\n+ columnJson = new ColumnId(castTypesTableId, \"json_col\");\n+ columnJsonb = new ColumnId(castTypesTableId, \"jsonb_col\");\n+ castTypesTableDefinition =\n+ new TableDefinition(\n+ castTypesTableId,\n+ List.of(\n+ createColumnDefinition(\n+ castTypesPkColumn,\n+ Types.INTEGER,\n+ \"INT\", Integer.class, true),\n+ createColumnDefinition(\n+ columnUuid,\n+ Types.OTHER,\n+ PostgreSqlDatabaseDialect.UUID_TYPE_NAME, UUID.class),\n+ createColumnDefinition(\n+ columnJson,\n+ Types.OTHER,\n+ PostgreSqlDatabaseDialect.JSON_TYPE_NAME, String.class),\n+ createColumnDefinition(\n+ columnJsonb,\n+ Types.OTHER,\n+ PostgreSqlDatabaseDialect.JSONB_TYPE_NAME, String.class)\n+ )\n+ );\n+ }\n+\n@Override\nprotected PostgreSqlDatabaseDialect createDialect() {\nreturn new PostgreSqlDatabaseDialect(sourceConfigWithUrl(\"jdbc:postgresql://something\"));\n@@ -60,11 +106,24 @@ public class PostgreSqlDatabaseDialectTest extends BaseDialectTest<PostgreSqlDat\n);\n}\n+ @Test\n+ public void shouldCreateConverterForUuidType() {\n+ assertColumnConverter(\n+ Types.OTHER,\n+ PostgreSqlDatabaseDialect.UUID_TYPE_NAME,\n+ Schema.STRING_SCHEMA,\n+ UUID.class\n+ );\n+ }\n+\nprotected <T> void assertColumnConverter(final int jdbcType,\nfinal String typeName,\nfinal Schema schemaType,\nfinal Class<T> clazz) {\n- final var columnDefinition = new ColumnDefinition(\n+ assertNotNull(\n+ dialect.createColumnConverter(\n+ new ColumnMapping(\n+ createColumnDefinition(\nnew ColumnId(\nnew TableId(\n\"test_catalog\",\n@@ -72,19 +131,7 @@ public class PostgreSqlDatabaseDialectTest extends BaseDialectTest<PostgreSqlDat\n\"test_table\"\n),\n\"column\"\n- ),\n- jdbcType,\n- typeName,\n- clazz.getCanonicalName(),\n- ColumnDefinition.Nullability.NOT_NULL,\n- ColumnDefinition.Mutability.UNKNOWN,\n- 0, 0, false, 1, false,\n- false, false, false, false\n- );\n- assertNotNull(\n- dialect.createColumnConverter(\n- new ColumnMapping(\n- columnDefinition, 1,\n+ ), jdbcType, typeName, clazz), 1,\nnew Field(\"a\", 1, schemaType)\n)\n)\n@@ -160,6 +207,51 @@ public class PostgreSqlDatabaseDialectTest extends BaseDialectTest<PostgreSqlDat\nassertStatements(expected, actual);\n}\n+ @Test\n+ public void shouldBuildInsertStatement() {\n+ final String expected = readQueryResourceForThisTest(\"insert0\");\n+ final String actual = dialect.buildInsertStatement(\n+ castTypesTableId,\n+ castTypesTableDefinition,\n+ List.of(castTypesPkColumn),\n+ List.of(columnUuid, columnJson, columnJsonb));\n+ assertQueryEquals(expected, actual);\n+ }\n+\n+ @Test\n+ public void shouldBuildUpdateStatement() {\n+ final String expected = readQueryResourceForThisTest(\"update0\");\n+ final String actual = dialect.buildUpdateStatement(\n+ castTypesTableId,\n+ castTypesTableDefinition,\n+ List.of(castTypesPkColumn),\n+ List.of(columnUuid, columnJson, columnJsonb));\n+ assertQueryEquals(expected, actual);\n+ }\n+\n+ private <T> ColumnDefinition createColumnDefinition(final ColumnId columnId,\n+ final int jdbcType,\n+ final String typeName,\n+ final Class<T> clazz) {\n+ return createColumnDefinition(columnId, jdbcType, typeName, clazz, false);\n+ }\n+\n+ private <T> ColumnDefinition createColumnDefinition(final ColumnId columnId,\n+ final int jdbcType,\n+ final String typeName,\n+ final Class<T> clazz, final boolean isPk) {\n+ return new ColumnDefinition(\n+ columnId,\n+ jdbcType,\n+ typeName,\n+ clazz.getName(),\n+ ColumnDefinition.Nullability.NOT_NULL,\n+ ColumnDefinition.Mutability.UNKNOWN,\n+ 0, 0, false, 1, false,\n+ false, false, false, isPk\n+ );\n+ }\n+\n@Test\npublic void shouldBuildUpsertStatement() {\nfinal String expected = readQueryResourceForThisTest(\"upsert0\");\n@@ -167,6 +259,17 @@ public class PostgreSqlDatabaseDialectTest extends BaseDialectTest<PostgreSqlDat\nassertQueryEquals(expected, actual);\n}\n+ @Test\n+ public void shouldBuildUpsertStatementForCastTypes() {\n+ final String expected = readQueryResourceForThisTest(\"upsert_cast_types0\");\n+ final String actual = dialect.buildUpsertQueryStatement(\n+ castTypesTableId,\n+ castTypesTableDefinition,\n+ List.of(castTypesPkColumn),\n+ List.of(columnUuid, columnJson, columnJsonb));\n+ assertQueryEquals(expected, actual);\n+ }\n+\n@Test\npublic void createOneColNoPk() {\nfinal String expected = readQueryResourceForThisTest(\"create_table_one_col_no_pk\");\n"
},
{
"change_type": "ADD",
"old_path": null,
"new_path": "src/test/resources/io.aiven.connect.jdbc.dialect/PostgreSqlDatabaseDialectTest/insert0-nonquoted.txt",
"diff": "+INSERT INTO cast_types_table(pk,uuid_col,json_col,jsonb_col) VALUES(?,?::uuid,?::json,?::jsonb)\n\\ No newline at end of file\n"
},
{
"change_type": "ADD",
"old_path": null,
"new_path": "src/test/resources/io.aiven.connect.jdbc.dialect/PostgreSqlDatabaseDialectTest/insert0-quoted.txt",
"diff": "+INSERT INTO \"cast_types_table\"(\"pk\",\"uuid_col\",\"json_col\",\"jsonb_col\") VALUES(?,?::uuid,?::json,?::jsonb)\n\\ No newline at end of file\n"
},
{
"change_type": "ADD",
"old_path": null,
"new_path": "src/test/resources/io.aiven.connect.jdbc.dialect/PostgreSqlDatabaseDialectTest/update0-nonquoted.txt",
"diff": "+UPDATE cast_types_table SET uuid_col=?::uuid,json_col=?::json,jsonb_col=?::jsonb WHERE pk = ?\n\\ No newline at end of file\n"
},
{
"change_type": "ADD",
"old_path": null,
"new_path": "src/test/resources/io.aiven.connect.jdbc.dialect/PostgreSqlDatabaseDialectTest/update0-quoted.txt",
"diff": "+UPDATE \"cast_types_table\" SET uuid_col=?::uuid,json_col=?::json,jsonb_col=?::jsonb WHERE \"pk\" = ?\n\\ No newline at end of file\n"
},
{
"change_type": "ADD",
"old_path": null,
"new_path": "src/test/resources/io.aiven.connect.jdbc.dialect/PostgreSqlDatabaseDialectTest/upsert_cast_types0-nonquoted.txt",
"diff": "+INSERT INTO cast_types_table (pk,uuid_col,json_col,jsonb_col) VALUES (?,?::uuid,?::json,?::jsonb) ON CONFLICT (pk) DO UPDATE SET uuid_col=EXCLUDED.uuid_col,json_col=EXCLUDED.json_col,jsonb_col=EXCLUDED.jsonb_col\n\\ No newline at end of file\n"
},
{
"change_type": "ADD",
"old_path": null,
"new_path": "src/test/resources/io.aiven.connect.jdbc.dialect/PostgreSqlDatabaseDialectTest/upsert_cast_types0-quoted.txt",
"diff": "+INSERT INTO \"cast_types_table\" (\"pk\",\"uuid_col\",\"json_col\",\"jsonb_col\") VALUES (?,?::uuid,?::json,?::jsonb) ON CONFLICT (\"pk\") DO UPDATE SET \"uuid_col\"=EXCLUDED.\"uuid_col\",\"json_col\"=EXCLUDED.\"json_col\",\"jsonb_col\"=EXCLUDED.\"jsonb_col\"\n\\ No newline at end of file\n"
}
] |
Java
|
Apache License 2.0
|
aiven/jdbc-connector-for-apache-kafka
|
Extended tests
1. Added test for PgSQL UUID type
2. Added tests for INSERT, UPDATE and UPSERT sql queries generation for cast types like UUID, JSON and JSONB which were missed
|
11,748 |
09.02.2021 10:58:25
| -3,600 |
816a254bc75c8fb28ef8f3c4b082e5f15b3ef497
|
Setup jdk version in workflow
The default JDK version in workflow build is 8 while we use 11.
This change set up JDK version to 11 for the build
|
[
{
"change_type": "MODIFY",
"old_path": ".github/workflows/create_release.yml",
"new_path": ".github/workflows/create_release.yml",
"diff": "@@ -12,6 +12,11 @@ jobs:\nname: Create Release\nruns-on: ubuntu-latest\nsteps:\n+ - name: Setup Java SDK\n+ uses: actions/setup-java@v1.4.3\n+ with:\n+ java-version: 11\n+\n- name: Checkout code\nuses: actions/checkout@v2\nwith:\n"
}
] |
Java
|
Apache License 2.0
|
aiven/jdbc-connector-for-apache-kafka
|
Setup jdk version in workflow
The default JDK version in workflow build is 8 while we use 11.
This change set up JDK version to 11 for the build
|
11,732 |
12.02.2021 12:59:49
| -7,200 |
a275f66b0ccf5825424facd14f67576d790ca81b
|
Fix release workflow to use correct commit
By default (omitting commitish) actions/create-release uses
repository's default branch ie. last commit in master branch. Fix
this by explicitly defining commit to use in release.
|
[
{
"change_type": "MODIFY",
"old_path": ".github/workflows/create_release.yml",
"new_path": ".github/workflows/create_release.yml",
"diff": "@@ -56,6 +56,7 @@ jobs:\nwith:\ntag_name: \"v${{ env.version }}\"\nrelease_name: \"v${{ env.version }}\"\n+ commitish: ${{ github.event.inputs.commit_hash }}\nbody: |\n*Fill in*\ndraft: true\n"
}
] |
Java
|
Apache License 2.0
|
aiven/jdbc-connector-for-apache-kafka
|
Fix release workflow to use correct commit
By default (omitting commitish) actions/create-release uses
repository's default branch ie. last commit in master branch. Fix
this by explicitly defining commit to use in release.
|
11,732 |
12.02.2021 13:19:15
| -7,200 |
c20098a880eb9f8a9e69e4d82154eabd7e7a6fec
|
Delete no longer used TravisCI badge
|
[
{
"change_type": "MODIFY",
"old_path": "README.md",
"new_path": "README.md",
"diff": "# Kafka Connect JDBC Connector\n-[](https://travis-ci.org/aiven/aiven-kafka-connect-jdbc)\n-\nThis repository includes a Sink and Source\n[Kafka Connect](http://kafka.apache.org/documentation.html#connect)\nconnectors for JDBC-compatible databases.\n"
}
] |
Java
|
Apache License 2.0
|
aiven/jdbc-connector-for-apache-kafka
|
Delete no longer used TravisCI badge
|
11,732 |
12.02.2021 13:25:38
| -7,200 |
116a0bc3424abe99a3be22aa9cac5958b4f847fa
|
Remove cruft version.txt
|
[
{
"change_type": "MODIFY",
"old_path": "build.gradle",
"new_path": "build.gradle",
"diff": "@@ -58,7 +58,7 @@ distributions {\ninto(\"/\") {\nfrom projectDir\n- include \"version.txt\", \"README*\", \"LICENSE*\", \"NOTICE*\", \"licenses/\"\n+ include \"README*\", \"LICENSE*\", \"NOTICE*\", \"licenses/\"\ninclude \"config/\"\n}\n}\n"
},
{
"change_type": "DELETE",
"old_path": "version.txt",
"new_path": null,
"diff": "-##This file must be updated at package time to have a valid package##\n-invalid\n"
}
] |
Java
|
Apache License 2.0
|
aiven/jdbc-connector-for-apache-kafka
|
Remove cruft version.txt
|
11,748 |
22.03.2021 10:24:43
| -3,600 |
33f944721b8d7fbdb6e3f778cdcae44569fbc8cf
|
Record fields in original order
|
[
{
"change_type": "MODIFY",
"old_path": "src/main/java/io/aiven/connect/jdbc/sink/metadata/FieldsMetadata.java",
"new_path": "src/main/java/io/aiven/connect/jdbc/sink/metadata/FieldsMetadata.java",
"diff": "package io.aiven.connect.jdbc.sink.metadata;\n+import java.util.ArrayList;\n+import java.util.Collections;\nimport java.util.HashMap;\nimport java.util.Iterator;\n+import java.util.LinkedHashMap;\nimport java.util.LinkedHashSet;\nimport java.util.List;\nimport java.util.Map;\n@@ -130,7 +133,33 @@ public class FieldsMetadata {\n);\n}\n- return new FieldsMetadata(keyFieldNames, nonKeyFieldNames, allFields);\n+ final var allFieldsOrdered = new LinkedHashMap<String, SinkRecordField>();\n+ for (final var fieldName : JdbcSinkConfig.DEFAULT_KAFKA_PK_NAMES) {\n+ if (allFields.containsKey(fieldName)) {\n+ allFieldsOrdered.put(fieldName, allFields.get(fieldName));\n+ }\n+ }\n+\n+ if (valueSchema != null) {\n+ for (final var field : valueSchema.fields()) {\n+ final var fieldName = field.name();\n+ if (allFields.containsKey(fieldName)) {\n+ allFieldsOrdered.put(fieldName, allFields.get(fieldName));\n+ }\n+ }\n+ }\n+\n+ if (allFieldsOrdered.size() < allFields.size()) {\n+ final var fieldKeys = new ArrayList<>(allFields.keySet());\n+ Collections.sort(fieldKeys);\n+ for (final var fieldName : fieldKeys) {\n+ if (!allFieldsOrdered.containsKey(fieldName)) {\n+ allFieldsOrdered.put(fieldName, allFields.get(fieldName));\n+ }\n+ }\n+ }\n+\n+ return new FieldsMetadata(keyFieldNames, nonKeyFieldNames, allFieldsOrdered);\n}\nprivate static void extractKafkaPk(\n"
},
{
"change_type": "MODIFY",
"old_path": "src/test/java/io/aiven/connect/jdbc/sink/metadata/FieldsMetadataTest.java",
"new_path": "src/test/java/io/aiven/connect/jdbc/sink/metadata/FieldsMetadataTest.java",
"diff": "package io.aiven.connect.jdbc.sink.metadata;\n-import java.util.Arrays;\nimport java.util.Collections;\n-import java.util.HashSet;\nimport java.util.List;\nimport java.util.Set;\n@@ -29,6 +27,7 @@ import org.apache.kafka.connect.errors.ConnectException;\nimport io.aiven.connect.jdbc.sink.JdbcSinkConfig;\n+import com.google.common.collect.Lists;\nimport org.junit.Test;\nimport static org.junit.Assert.assertEquals;\n@@ -46,7 +45,7 @@ public class FieldsMetadataTest {\npublic void valueSchemaMustBePresentForPkModeRecordValue() {\nextract(\nJdbcSinkConfig.PrimaryKeyMode.RECORD_VALUE,\n- Collections.<String>emptyList(),\n+ Collections.emptyList(),\nSIMPLE_PRIMITIVE_SCHEMA,\nnull\n);\n@@ -56,7 +55,7 @@ public class FieldsMetadataTest {\npublic void valueSchemaMustBeStructIfPresent() {\nextract(\nJdbcSinkConfig.PrimaryKeyMode.KAFKA,\n- Collections.<String>emptyList(),\n+ Collections.emptyList(),\nSIMPLE_PRIMITIVE_SCHEMA,\nSIMPLE_PRIMITIVE_SCHEMA\n);\n@@ -65,10 +64,10 @@ public class FieldsMetadataTest {\n@Test\npublic void missingValueSchemaCanBeOk() {\nassertEquals(\n- new HashSet<>(Collections.singletonList(\"name\")),\n+ Set.of(\"name\"),\nextract(\nJdbcSinkConfig.PrimaryKeyMode.RECORD_KEY,\n- Collections.<String>emptyList(),\n+ Collections.emptyList(),\nSIMPLE_STRUCT_SCHEMA,\nnull\n).allFields.keySet()\n@@ -77,13 +76,13 @@ public class FieldsMetadataTest {\n// this one is a bit weird, only columns being inserted would be kafka coords...\n// but not sure should explicitly disallow!\nassertEquals(\n- new HashSet<>(Arrays.asList(\"__connect_topic\", \"__connect_partition\", \"__connect_offset\")),\n- extract(\n+ List.of(\"__connect_topic\", \"__connect_partition\", \"__connect_offset\"),\n+ Lists.newArrayList(extract(\nJdbcSinkConfig.PrimaryKeyMode.KAFKA,\n- Collections.<String>emptyList(),\n+ Collections.emptyList(),\nnull,\nnull\n- ).allFields.keySet()\n+ ).allFields.keySet())\n);\n}\n@@ -91,7 +90,7 @@ public class FieldsMetadataTest {\npublic void metadataMayNotBeEmpty() {\nextract(\nJdbcSinkConfig.PrimaryKeyMode.NONE,\n- Collections.<String>emptyList(),\n+ Collections.emptyList(),\nnull,\nnull\n);\n@@ -101,14 +100,15 @@ public class FieldsMetadataTest {\npublic void kafkaPkMode() {\nfinal FieldsMetadata metadata = extract(\nJdbcSinkConfig.PrimaryKeyMode.KAFKA,\n- Collections.<String>emptyList(),\n+ Collections.emptyList(),\nnull,\nSIMPLE_STRUCT_SCHEMA\n);\nassertEquals(\n- new HashSet<>(Arrays.asList(\"__connect_topic\", \"__connect_partition\", \"__connect_offset\")),\n- metadata.keyFieldNames);\n- assertEquals(Collections.singleton(\"name\"), metadata.nonKeyFieldNames);\n+ List.of(\"__connect_topic\", \"__connect_partition\", \"__connect_offset\"),\n+ Lists.newArrayList(metadata.keyFieldNames)\n+ );\n+ assertEquals(Set.of(\"name\"), metadata.nonKeyFieldNames);\nfinal SinkRecordField topicField = metadata.allFields.get(\"__connect_topic\");\nassertEquals(Schema.Type.STRING, topicField.schemaType());\n@@ -128,14 +128,14 @@ public class FieldsMetadataTest {\n@Test\npublic void kafkaPkModeCustomNames() {\n- final List<String> customKeyNames = Arrays.asList(\"the_topic\", \"the_partition\", \"the_offset\");\n+ final List<String> customKeyNames = List.of(\"the_topic\", \"the_partition\", \"the_offset\");\nfinal FieldsMetadata metadata = extract(\nJdbcSinkConfig.PrimaryKeyMode.KAFKA,\ncustomKeyNames,\nnull,\nSIMPLE_STRUCT_SCHEMA\n);\n- assertEquals(new HashSet<>(customKeyNames), metadata.keyFieldNames);\n+ assertEquals(customKeyNames, Lists.newArrayList(metadata.keyFieldNames));\nassertEquals(Collections.singleton(\"name\"), metadata.nonKeyFieldNames);\n}\n@@ -143,7 +143,7 @@ public class FieldsMetadataTest {\npublic void kafkaPkModeBadFieldSpec() {\nextract(\nJdbcSinkConfig.PrimaryKeyMode.KAFKA,\n- Collections.singletonList(\"lone\"),\n+ List.of(\"lone\"),\nnull,\nSIMPLE_STRUCT_SCHEMA\n);\n@@ -158,7 +158,7 @@ public class FieldsMetadataTest {\npublic void recordKeyPkModePrimitiveKey() {\nfinal FieldsMetadata metadata = extract(\nJdbcSinkConfig.PrimaryKeyMode.RECORD_KEY,\n- Collections.singletonList(\"the_pk\"),\n+ List.of(\"the_pk\"),\nSIMPLE_PRIMITIVE_SCHEMA,\nSIMPLE_STRUCT_SCHEMA\n);\n@@ -180,7 +180,7 @@ public class FieldsMetadataTest {\npublic void recordKeyPkModeWithPrimitiveKeyButMultiplePkFieldsSpecified() {\nextract(\nJdbcSinkConfig.PrimaryKeyMode.RECORD_KEY,\n- Arrays.asList(\"pk1\", \"pk2\"),\n+ List.of(\"pk1\", \"pk2\"),\nSIMPLE_PRIMITIVE_SCHEMA,\nSIMPLE_STRUCT_SCHEMA\n);\n@@ -190,7 +190,7 @@ public class FieldsMetadataTest {\npublic void recordKeyPkModeButKeySchemaMissing() {\nextract(\nJdbcSinkConfig.PrimaryKeyMode.RECORD_KEY,\n- Collections.<String>emptyList(),\n+ Collections.emptyList(),\nnull,\nSIMPLE_STRUCT_SCHEMA\n);\n@@ -200,7 +200,7 @@ public class FieldsMetadataTest {\npublic void recordKeyPkModeButKeySchemaAsNonStructCompositeType() {\nextract(\nJdbcSinkConfig.PrimaryKeyMode.RECORD_KEY,\n- Collections.<String>emptyList(),\n+ Collections.emptyList(),\nSIMPLE_MAP_SCHEMA,\nSIMPLE_STRUCT_SCHEMA\n);\n@@ -220,7 +220,7 @@ public class FieldsMetadataTest {\npublic void recordValuePkModeWithMissingPkField() {\nextract(\nJdbcSinkConfig.PrimaryKeyMode.RECORD_VALUE,\n- Collections.singletonList(\"nonexistent\"),\n+ List.of(\"nonexistent\"),\nSIMPLE_PRIMITIVE_SCHEMA,\nSIMPLE_STRUCT_SCHEMA\n);\n@@ -235,7 +235,7 @@ public class FieldsMetadataTest {\nSIMPLE_STRUCT_SCHEMA\n);\n- assertEquals(Collections.singleton(\"name\"), metadata.keyFieldNames);\n+ assertEquals(Set.of(\"name\"), metadata.keyFieldNames);\nassertEquals(Collections.emptySet(), metadata.nonKeyFieldNames);\nassertEquals(Schema.Type.STRING, metadata.allFields.get(\"name\").schemaType());\n@@ -256,13 +256,64 @@ public class FieldsMetadataTest {\nfinal FieldsMetadata metadata = extract(\nJdbcSinkConfig.PrimaryKeyMode.RECORD_VALUE,\nCollections.singletonList(\"field1\"),\n- new HashSet<>(Arrays.asList(\"field2\", \"field4\")),\n+ Set.of(\"field2\", \"field4\"),\n+ null,\n+ valueSchema\n+ );\n+\n+ assertEquals(List.of(\"field1\"), Lists.newArrayList(metadata.keyFieldNames));\n+ assertEquals(List.of(\"field2\", \"field4\"), Lists.newArrayList(metadata.nonKeyFieldNames));\n+ }\n+\n+ @Test\n+ public void recordValuePkModeWithFieldsInOriginalOrdering() {\n+ final Schema valueSchema =\n+ SchemaBuilder.struct()\n+ .field(\"field4\", Schema.INT64_SCHEMA)\n+ .field(\"field2\", Schema.INT64_SCHEMA)\n+ .field(\"field1\", Schema.INT64_SCHEMA)\n+ .field(\"field3\", Schema.INT64_SCHEMA)\n+ .build();\n+\n+ var metadata = extract(\n+ JdbcSinkConfig.PrimaryKeyMode.RECORD_VALUE,\n+ Collections.singletonList(\"field4\"),\n+ Set.of(\"field3\", \"field1\", \"field2\"),\nnull,\nvalueSchema\n);\n- assertEquals(Collections.singleton(\"field1\"), metadata.keyFieldNames);\n- assertEquals(new HashSet<>(Arrays.asList(\"field2\", \"field4\")), metadata.nonKeyFieldNames);\n+ assertEquals(\n+ List.of(\"field4\", \"field2\", \"field1\", \"field3\"),\n+ Lists.newArrayList(metadata.allFields.keySet())\n+ );\n+\n+ metadata = extract(\n+ JdbcSinkConfig.PrimaryKeyMode.RECORD_VALUE,\n+ Collections.singletonList(\"field1\"),\n+ Set.of(\"field4\", \"field3\"),\n+ null,\n+ valueSchema\n+ );\n+\n+ assertEquals(List.of(\"field4\", \"field1\", \"field3\"), Lists.newArrayList(metadata.allFields.keySet()));\n+\n+ final var keySchema =\n+ SchemaBuilder.struct()\n+ .field(\"field1\", Schema.INT64_SCHEMA)\n+ .field(\"field3\", Schema.INT64_SCHEMA)\n+ .field(\"field2\", Schema.INT64_SCHEMA)\n+ .build();\n+\n+ metadata = extract(\n+ JdbcSinkConfig.PrimaryKeyMode.RECORD_KEY,\n+ List.of(\"field2\", \"field3\", \"field1\"),\n+ Set.of(\"field3\", \"field1\"),\n+ keySchema,\n+ null\n+ );\n+\n+ assertEquals(List.of(\"field1\", \"field2\", \"field3\"), Lists.newArrayList(metadata.allFields.keySet()));\n}\nprivate static FieldsMetadata extract(final JdbcSinkConfig.PrimaryKeyMode pkMode,\n"
}
] |
Java
|
Apache License 2.0
|
aiven/jdbc-connector-for-apache-kafka
|
Record fields in original order
|
11,748 |
22.03.2021 14:50:28
| -3,600 |
28fc64ee210c55876e25d934f8c0951c565533da
|
Add annotated tags to create release workflow
|
[
{
"change_type": "MODIFY",
"old_path": ".github/workflows/create_release.yml",
"new_path": ".github/workflows/create_release.yml",
"diff": "@@ -48,6 +48,13 @@ jobs:\necho tar_path=`realpath ./build/distributions/${tar_file}` >> $GITHUB_ENV\necho zip_path=`realpath ./build/distributions/${zip_file}` >> $GITHUB_ENV\n+ - name: Create tag\n+ run: |\n+ git config --local user.name \"GitHub Action\"\n+ git config --local user.email \"action@github.com\"\n+ git tag -a \"v${{ env.version }}\" -m \"Release version ${{ env.version }}\"\n+ git push origin \"v${{ env.version }}\"\n+\n- name: Create release draft\nid: create_release\nuses: actions/create-release@v1\n"
}
] |
Java
|
Apache License 2.0
|
aiven/jdbc-connector-for-apache-kafka
|
Add annotated tags to create release workflow
|
11,720 |
09.06.2021 14:06:13
| -10,800 |
b12ea6a9d10c20f0c53194d06de7cd9da16cbb69
|
Support of postgres arrays
|
[
{
"change_type": "MODIFY",
"old_path": "build.gradle",
"new_path": "build.gradle",
"diff": "/*\n- * Copyright 2019 Aiven Oy\n+ * Copyright 2021 Aiven Oy\n*\n* Licensed under the Apache License, Version 2.0 (the \"License\");\n* you may not use this file except in compliance with the License.\n@@ -127,6 +127,7 @@ dependencies {\ntestImplementation \"commons-io:commons-io:2.6\"\ntestImplementation \"org.apache.derby:derby:10.11.1.1\"\ntestRuntime \"org.slf4j:slf4j-log4j12:$slf4jVersion\"\n+ testImplementation 'org.assertj:assertj-core:3.19.0'\n}\n"
},
{
"change_type": "MODIFY",
"old_path": "src/main/java/io/aiven/connect/jdbc/dialect/PostgreSqlDatabaseDialect.java",
"new_path": "src/main/java/io/aiven/connect/jdbc/dialect/PostgreSqlDatabaseDialect.java",
"diff": "package io.aiven.connect.jdbc.dialect;\n+import java.lang.reflect.Array;\nimport java.sql.Connection;\nimport java.sql.PreparedStatement;\nimport java.sql.ResultSet;\n@@ -24,8 +25,10 @@ import java.sql.SQLException;\nimport java.sql.Types;\nimport java.util.Collection;\nimport java.util.List;\n+import java.util.Map;\nimport java.util.Objects;\nimport java.util.UUID;\n+import java.util.stream.Collectors;\nimport org.apache.kafka.connect.data.Date;\nimport org.apache.kafka.connect.data.Decimal;\n@@ -33,6 +36,7 @@ import org.apache.kafka.connect.data.Schema;\nimport org.apache.kafka.connect.data.SchemaBuilder;\nimport org.apache.kafka.connect.data.Time;\nimport org.apache.kafka.connect.data.Timestamp;\n+import org.apache.kafka.connect.errors.DataException;\nimport io.aiven.connect.jdbc.config.JdbcConfig;\nimport io.aiven.connect.jdbc.dialect.DatabaseDialectProvider.SubprotocolBasedProvider;\n@@ -50,6 +54,17 @@ import io.aiven.connect.jdbc.util.TableId;\n*/\npublic class PostgreSqlDatabaseDialect extends GenericDatabaseDialect {\n+ private static final Map<Schema.Type, Class<?>> SUPPORTED_ARRAY_VALUE_TYPES_TO_JAVA = Map.of(\n+ Schema.Type.INT8, short.class,\n+ Schema.Type.INT16, short.class,\n+ Schema.Type.INT32, int.class,\n+ Schema.Type.INT64, long.class,\n+ Schema.Type.FLOAT32, float.class,\n+ Schema.Type.FLOAT64, double.class,\n+ Schema.Type.BOOLEAN, boolean.class,\n+ Schema.Type.STRING, String.class\n+ );\n+\n/**\n* The provider for {@link PostgreSqlDatabaseDialect}.\n*/\n@@ -202,8 +217,13 @@ public class PostgreSqlDatabaseDialect extends GenericDatabaseDialect {\n@Override\nprotected String getSqlType(final SinkRecordField field) {\n- if (field.schemaName() != null) {\n- switch (field.schemaName()) {\n+ final String sqlType = getSqlTypeFromSchema(field.schema());\n+ return sqlType != null ? sqlType : super.getSqlType(field);\n+ }\n+\n+ private String getSqlTypeFromSchema(final Schema schema) {\n+ if (schema.name() != null) {\n+ switch (schema.name()) {\ncase Decimal.LOGICAL_NAME:\nreturn \"DECIMAL\";\ncase Date.LOGICAL_NAME:\n@@ -216,7 +236,7 @@ public class PostgreSqlDatabaseDialect extends GenericDatabaseDialect {\n// fall through to normal types\n}\n}\n- switch (field.schemaType()) {\n+ switch (schema.type()) {\ncase INT8:\ncase INT16:\nreturn \"SMALLINT\";\n@@ -234,8 +254,50 @@ public class PostgreSqlDatabaseDialect extends GenericDatabaseDialect {\nreturn \"TEXT\";\ncase BYTES:\nreturn \"BYTEA\";\n+ case ARRAY:\n+ return getSqlTypeFromSchema(schema.valueSchema()) + \"[]\";\ndefault:\n- return super.getSqlType(field);\n+ return null;\n+ }\n+ }\n+\n+ @Override\n+ protected boolean maybeBindPrimitive(\n+ final PreparedStatement statement,\n+ final int index,\n+ final Schema schema,\n+ final Object value\n+ ) throws SQLException {\n+ if (schema.type() == Schema.Type.ARRAY) {\n+ return bindPrimitiveArray(statement, index, schema, value);\n+ } else {\n+ return super.maybeBindPrimitive(statement, index, schema, value);\n+ }\n+ }\n+\n+ private boolean bindPrimitiveArray(\n+ final PreparedStatement statement,\n+ final int index,\n+ final Schema schema,\n+ final Object value\n+ ) throws SQLException {\n+ final Schema.Type valueType = schema.valueSchema().type();\n+ final Class<?> componentType = SUPPORTED_ARRAY_VALUE_TYPES_TO_JAVA.get(valueType);\n+ if (componentType != null) {\n+ List<?> valueCollection = (List<?>) value;\n+ // Postgres does not have an 8-bit integer, using short as the best alternative.\n+ if (valueType == Schema.Type.INT8) {\n+ valueCollection = valueCollection.stream()\n+ .map(o -> ((Byte) o).shortValue()).collect(Collectors.toList());\n+ }\n+ final Object newValue = Array.newInstance(componentType, valueCollection.size());\n+ for (int i = 0; i < valueCollection.size(); i++) {\n+ Array.set(newValue, i, valueCollection.get(i));\n+ }\n+ statement.setObject(index, newValue, Types.ARRAY);\n+ return true;\n+ } else {\n+ throw new DataException(String.format(\"Unsupported schema type %s for ARRAY values\", valueType));\n}\n}\n"
},
{
"change_type": "MODIFY",
"old_path": "src/main/java/io/aiven/connect/jdbc/sink/metadata/SinkRecordField.java",
"new_path": "src/main/java/io/aiven/connect/jdbc/sink/metadata/SinkRecordField.java",
"diff": "/*\n- * Copyright 2019 Aiven Oy\n+ * Copyright 2021 Aiven Oy\n* Copyright 2016 Confluent Inc.\n*\n* Licensed under the Apache License, Version 2.0 (the \"License\");\n@@ -33,6 +33,10 @@ public class SinkRecordField {\nthis.isPrimaryKey = isPrimaryKey;\n}\n+ public Schema schema() {\n+ return schema;\n+ }\n+\npublic String schemaName() {\nreturn schema.name();\n}\n"
},
{
"change_type": "MODIFY",
"old_path": "src/test/java/io/aiven/connect/jdbc/dialect/BaseDialectTest.java",
"new_path": "src/test/java/io/aiven/connect/jdbc/dialect/BaseDialectTest.java",
"diff": "@@ -388,6 +388,7 @@ public abstract class BaseDialectTest<T extends GenericDatabaseDialect> {\nSchema.BOOLEAN_SCHEMA,\nSchema.BYTES_SCHEMA,\nSchema.STRING_SCHEMA,\n+ SchemaBuilder.array(Schema.INT32_SCHEMA),\nDecimal.schema(0),\nDate.SCHEMA,\nTime.SCHEMA,\n"
},
{
"change_type": "MODIFY",
"old_path": "src/test/java/io/aiven/connect/jdbc/dialect/PostgreSqlDatabaseDialectTest.java",
"new_path": "src/test/java/io/aiven/connect/jdbc/dialect/PostgreSqlDatabaseDialectTest.java",
"diff": "package io.aiven.connect.jdbc.dialect;\n+import java.sql.PreparedStatement;\n+import java.sql.SQLException;\nimport java.sql.Types;\n+import java.util.Arrays;\n+import java.util.Collections;\nimport java.util.List;\nimport java.util.UUID;\n+import java.util.concurrent.ThreadLocalRandom;\nimport org.apache.kafka.connect.data.Date;\nimport org.apache.kafka.connect.data.Decimal;\nimport org.apache.kafka.connect.data.Field;\nimport org.apache.kafka.connect.data.Schema;\nimport org.apache.kafka.connect.data.Schema.Type;\n+import org.apache.kafka.connect.data.SchemaBuilder;\nimport org.apache.kafka.connect.data.Time;\nimport org.apache.kafka.connect.data.Timestamp;\n+import org.apache.kafka.connect.errors.DataException;\nimport io.aiven.connect.jdbc.source.ColumnMapping;\nimport io.aiven.connect.jdbc.util.ColumnDefinition;\n@@ -38,7 +45,9 @@ import io.aiven.connect.jdbc.util.TableId;\nimport org.junit.Before;\nimport org.junit.Test;\n+import static org.assertj.core.api.Assertions.assertThatThrownBy;\nimport static org.junit.Assert.assertNotNull;\n+import static org.mockito.Mockito.mock;\npublic class PostgreSqlDatabaseDialectTest extends BaseDialectTest<PostgreSqlDatabaseDialect> {\n@@ -339,4 +348,92 @@ public class PostgreSqlDatabaseDialectTest extends BaseDialectTest<PostgreSqlDat\n\"jdbc:postgresql://localhost/test?user=fred&password=****&ssl=true\"\n);\n}\n+\n+\n+ @Test\n+ @Override\n+ public void bindFieldArrayUnsupported() {\n+ }\n+\n+ @Test\n+ public void bindFieldArray() throws SQLException {\n+ int index = ThreadLocalRandom.current().nextInt();\n+\n+ super.verifyBindField(\n+ ++index,\n+ SchemaBuilder.array(Schema.INT8_SCHEMA),\n+ Arrays.asList((byte) 1, (byte) 2, (byte) 3)\n+ ).setObject(index, new short[] {1, 2, 3}, Types.ARRAY);\n+ super.verifyBindField(\n+ ++index,\n+ SchemaBuilder.array(Schema.INT16_SCHEMA),\n+ Arrays.asList((short) 1, (short) 2, (short) 3)\n+ ).setObject(index, new short[] {1, 2, 3}, Types.ARRAY);\n+ super.verifyBindField(\n+ ++index,\n+ SchemaBuilder.array(Schema.INT32_SCHEMA),\n+ Arrays.asList(1, 2, 3)\n+ ).setObject(index, new int[] {1, 2, 3}, Types.ARRAY);\n+ super.verifyBindField(\n+ ++index,\n+ SchemaBuilder.array(Schema.INT64_SCHEMA),\n+ Arrays.asList(1L, 2L, 3L)\n+ ).setObject(index, new long[] {1L, 2L, 3L}, Types.ARRAY);\n+ super.verifyBindField(\n+ ++index,\n+ SchemaBuilder.array(Schema.FLOAT32_SCHEMA),\n+ Arrays.asList(1.23F, 2.34F, 3.45F)\n+ ).setObject(index, new float[] {1.23F, 2.34F, 3.45F}, Types.ARRAY);\n+ super.verifyBindField(\n+ ++index,\n+ SchemaBuilder.array(Schema.FLOAT64_SCHEMA),\n+ Arrays.asList(1.23D, 2.34D, 3.45D)\n+ ).setObject(index, new double[] {1.23D, 2.34D, 3.45D}, Types.ARRAY);\n+ super.verifyBindField(\n+ ++index,\n+ SchemaBuilder.array(Schema.STRING_SCHEMA),\n+ Arrays.asList(\"qwe\", \"asd\", \"zxc\")\n+ ).setObject(index, new String[] {\"qwe\", \"asd\", \"zxc\"}, Types.ARRAY);\n+ super.verifyBindField(\n+ ++index,\n+ SchemaBuilder.array(Schema.BOOLEAN_SCHEMA),\n+ Arrays.asList(true, false, true)\n+ ).setObject(index, new boolean[] {true, false, true}, Types.ARRAY);\n+ }\n+\n+ @Test\n+ public void bindFieldArrayOfStructsUnsupported() {\n+ final Schema structSchema = SchemaBuilder.struct().field(\"test\", Schema.BOOLEAN_SCHEMA).build();\n+ final Schema arraySchema = SchemaBuilder.array(structSchema);\n+ assertThatThrownBy(() -> dialect.bindField(mock(PreparedStatement.class), 1, arraySchema,\n+ Collections.singletonList(structSchema)))\n+ .isInstanceOf(DataException.class)\n+ .hasMessage(\"Unsupported schema type STRUCT for ARRAY values\");\n+ }\n+\n+ @Test\n+ public void bindFieldArrayOfArraysUnsupported() {\n+ final Schema arraySchema = SchemaBuilder.array(SchemaBuilder.array(Schema.INT8_SCHEMA));\n+ assertThatThrownBy(\n+ () -> dialect.bindField(mock(PreparedStatement.class), 1, arraySchema, Collections.emptyList()))\n+ .isInstanceOf(DataException.class)\n+ .hasMessage(\"Unsupported schema type ARRAY for ARRAY values\");\n+ }\n+\n+ @Test\n+ public void bindFieldArrayOfMapsUnsupported() {\n+ final Schema mapSchema = SchemaBuilder.array(SchemaBuilder.map(Schema.INT8_SCHEMA, Schema.INT8_SCHEMA));\n+ assertThatThrownBy(() -> dialect.bindField(mock(PreparedStatement.class), 1, mapSchema, Collections.emptyMap()))\n+ .isInstanceOf(DataException.class)\n+ .hasMessage(\"Unsupported schema type MAP for ARRAY values\");\n+ }\n+\n+ @Test\n+ public void bindFieldMapUnsupported() {\n+ final Schema bytesSchema = SchemaBuilder.array(Schema.BYTES_SCHEMA);\n+ assertThatThrownBy(\n+ () -> dialect.bindField(mock(PreparedStatement.class), 1, bytesSchema, Collections.emptyMap()))\n+ .isInstanceOf(DataException.class)\n+ .hasMessage(\"Unsupported schema type BYTES for ARRAY values\");\n+ }\n}\n"
}
] |
Java
|
Apache License 2.0
|
aiven/jdbc-connector-for-apache-kafka
|
Support of postgres arrays
|
11,748 |
06.07.2021 15:26:25
| -7,200 |
5e1b5128fe9abeeb92b9a8ed0468d54e22e005e5
|
Insecure parameters for MySQL DB JDBC connection
Three parameters:
allowLoadLocalInFile
allowUrlInLocalInfile
autoDeserialize
were forcefully set to false for any MySQL DB JDBC connection
|
[
{
"change_type": "MODIFY",
"old_path": "src/main/java/io/aiven/connect/jdbc/dialect/MySqlDatabaseDialect.java",
"new_path": "src/main/java/io/aiven/connect/jdbc/dialect/MySqlDatabaseDialect.java",
"diff": "@@ -22,6 +22,7 @@ import java.sql.PreparedStatement;\nimport java.sql.ResultSet;\nimport java.sql.SQLException;\nimport java.util.Collection;\n+import java.util.Properties;\nimport org.apache.kafka.connect.data.Date;\nimport org.apache.kafka.connect.data.Decimal;\n@@ -123,6 +124,14 @@ public class MySqlDatabaseDialect extends GenericDatabaseDialect {\n}\n}\n+ @Override\n+ protected Properties addConnectionProperties(final Properties properties) {\n+ properties.put(\"allowLoadLocalInFile\", Boolean.FALSE.toString());\n+ properties.put(\"allowUrlInLocalInfile\", Boolean.FALSE.toString());\n+ properties.put(\"autoDeserialize\", Boolean.FALSE.toString());\n+ return properties;\n+ }\n+\n@Override\npublic String buildUpsertQueryStatement(\nfinal TableId table,\n"
}
] |
Java
|
Apache License 2.0
|
aiven/jdbc-connector-for-apache-kafka
|
Insecure parameters for MySQL DB JDBC connection
Three parameters:
- allowLoadLocalInFile
- allowUrlInLocalInfile
- autoDeserialize
were forcefully set to false for any MySQL DB JDBC connection
|
11,720 |
22.07.2021 11:19:04
| -10,800 |
1701288be143ba2f5f8689a73ffeb7c885bc0caa
|
Added documentation for PostgreSQL arrays
|
[
{
"change_type": "MODIFY",
"old_path": "docs/sink-connector.md",
"new_path": "docs/sink-connector.md",
"diff": "@@ -223,11 +223,12 @@ types are used.\n| `Date` | `DATE` | `DATE` | `DATE` |\n| `Time` | `TIME` | `TIME` | `TIME(3)` |\n| `Timestamp` | `TIMESTAMP` | `TIMESTAMP` | `DATETIME(3)` |\n+| `ARRAY` | `-` | `-` | `-` |\n_(continued)_\n| Connect schema type | Oracle | PostgreSQL | SAP HANA |\n-|:-------------------:|:-----------------:|:------------------:|:---------------:|\n+|:-------------------:|:-----------------:|:--------------------------------------------:|:---------------:|\n| `INT8` | `NUMBER(3,0)` | `SMALLINT` | `TINYINT` |\n| `INT16` | `NUMBER(5,0)` | `SMALLINT` | `SMALLINT` |\n| `INT32` | `NUMBER(10,0)` | `INT` | `INTEGER` |\n@@ -241,6 +242,7 @@ _(continued)_\n| `Date` | `DATE` | `DATE` | `DATE` |\n| `Time` | `DATE` | `TIME` | `DATE` |\n| `Timestamp` | `TIMESTAMP` | `TIMESTAMP` | `TIMESTAMP` |\n+| `ARRAY` | `-` |[`*[]`](#supported-array-types-for-postgresql)| `-` |\n_(continued)_\n@@ -259,6 +261,7 @@ _(continued)_\n| `Date` | `NUMERIC` | `DATE` |\n| `Time` | `NUMERIC` | `TIME` |\n| `Timestamp` | `NUMERIC` | `DATETIME2` |\n+| `ARRAY` | `-` | `-` |\n_(continued)_\n@@ -277,6 +280,28 @@ _(continued)_\n| `Date` | `DATE` | `DATE` |\n| `Time` | `TIME` | `TIME` |\n| `Timestamp` | `DATETIME` | `DATETIME` |\n+| `ARRAY` | `-` | `-` |\n+\n+### Supported array types for PostgreSQL\n+\n+| Connect schema type | PostgreSQL |\n+|:-------------------:|:-------------------:|\n+| `INT8` | `SMALLINT` |\n+| `INT16` | `SMALLINT` |\n+| `INT32` | `INTEGER` |\n+| `INT64` | `BIGINT` |\n+| `FLOAT32` | `FLOAT` |\n+| `FLOAT64` | `DOUBLE` |\n+| `BOOLEAN` | `SMALLINT` |\n+| `STRING` | `VARCHAR(32672)` |\n+| `BYTES` | `-` |\n+| `ARRAY` | `-` |\n+| `Decimal` | `-` |\n+| `Date` | `INTEGER` |\n+| `Time` | `INTEGER` |\n+| `Timestamp` | `BIGINT` |\n+| `MAP` | `-` |\n+| `STRUCT` | `-` |\n## Example\n"
}
] |
Java
|
Apache License 2.0
|
aiven/jdbc-connector-for-apache-kafka
|
Added documentation for PostgreSQL arrays
|
11,721 |
12.08.2021 15:31:31
| -7,200 |
d18531b8b9f09b6d31e636f73cede41fa69526d5
|
fixed currentTimestampDatabase query for some db2 driver/database pairs
|
[
{
"change_type": "MODIFY",
"old_path": "src/main/java/io/aiven/connect/jdbc/dialect/Db2DatabaseDialect.java",
"new_path": "src/main/java/io/aiven/connect/jdbc/dialect/Db2DatabaseDialect.java",
"diff": "@@ -62,7 +62,7 @@ public class Db2DatabaseDialect extends GenericDatabaseDialect {\n@Override\nprotected String currentTimestampDatabaseQuery() {\n- return \"SELECT CURRENT_TIMESTAMP(12) FROM SYSIBM.SYSDUMMY1;\";\n+ return \"SELECT CURRENT_TIMESTAMP(12) FROM SYSIBM.SYSDUMMY1\";\n}\n@Override\n"
}
] |
Java
|
Apache License 2.0
|
aiven/jdbc-connector-for-apache-kafka
|
fixed currentTimestampDatabase query for some db2 driver/database pairs
|
11,708 |
16.12.2021 14:23:54
| -3,600 |
ad757774fc9295f877ea82ae2e106084cc1a3ad7
|
Add checksum to check gradle artifacts after downloading
Update gradle to 6.9.1
|
[
{
"change_type": "MODIFY",
"old_path": "gradle/wrapper/gradle-wrapper.jar",
"new_path": "gradle/wrapper/gradle-wrapper.jar",
"diff": "Binary files a/gradle/wrapper/gradle-wrapper.jar and b/gradle/wrapper/gradle-wrapper.jar differ\n"
},
{
"change_type": "MODIFY",
"old_path": "gradle/wrapper/gradle-wrapper.properties",
"new_path": "gradle/wrapper/gradle-wrapper.properties",
"diff": "-#Thu Apr 25 09:42:35 EEST 2019\ndistributionBase=GRADLE_USER_HOME\ndistributionPath=wrapper/dists\n+distributionUrl=https\\://services.gradle.org/distributions/gradle-6.9.1-all.zip\nzipStoreBase=GRADLE_USER_HOME\nzipStorePath=wrapper/dists\n-distributionUrl=https\\://services.gradle.org/distributions/gradle-6.5.1-all.zip\n+distributionSha256Sum=b13f5d97f08000996bf12d9dd70af3f2c6b694c2c663ab1b545e9695562ad1ee\n"
},
{
"change_type": "MODIFY",
"old_path": "gradlew",
"new_path": "gradlew",
"diff": "# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n-# http://www.apache.org/licenses/LICENSE-2.0\n+# https://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n@@ -82,6 +82,7 @@ esac\nCLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar\n+\n# Determine the Java command to use to start the JVM.\nif [ -n \"$JAVA_HOME\" ] ; then\nif [ -x \"$JAVA_HOME/jre/sh/java\" ] ; then\n@@ -125,10 +126,11 @@ if $darwin; then\nGRADLE_OPTS=\"$GRADLE_OPTS \\\"-Xdock:name=$APP_NAME\\\" \\\"-Xdock:icon=$APP_HOME/media/gradle.icns\\\"\"\nfi\n-# For Cygwin, switch paths to Windows format before running java\n-if $cygwin ; then\n+# For Cygwin or MSYS, switch paths to Windows format before running java\n+if [ \"$cygwin\" = \"true\" -o \"$msys\" = \"true\" ] ; then\nAPP_HOME=`cygpath --path --mixed \"$APP_HOME\"`\nCLASSPATH=`cygpath --path --mixed \"$CLASSPATH\"`\n+\nJAVACMD=`cygpath --unix \"$JAVACMD\"`\n# We build the pattern for arguments to be converted via cygpath\n@@ -154,19 +156,19 @@ if $cygwin ; then\nelse\neval `echo args$i`=\"\\\"$arg\\\"\"\nfi\n- i=$((i+1))\n+ i=`expr $i + 1`\ndone\ncase $i in\n- (0) set -- ;;\n- (1) set -- \"$args0\" ;;\n- (2) set -- \"$args0\" \"$args1\" ;;\n- (3) set -- \"$args0\" \"$args1\" \"$args2\" ;;\n- (4) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" ;;\n- (5) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" ;;\n- (6) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" ;;\n- (7) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" \"$args6\" ;;\n- (8) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" \"$args6\" \"$args7\" ;;\n- (9) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" \"$args6\" \"$args7\" \"$args8\" ;;\n+ 0) set -- ;;\n+ 1) set -- \"$args0\" ;;\n+ 2) set -- \"$args0\" \"$args1\" ;;\n+ 3) set -- \"$args0\" \"$args1\" \"$args2\" ;;\n+ 4) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" ;;\n+ 5) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" ;;\n+ 6) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" ;;\n+ 7) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" \"$args6\" ;;\n+ 8) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" \"$args6\" \"$args7\" ;;\n+ 9) set -- \"$args0\" \"$args1\" \"$args2\" \"$args3\" \"$args4\" \"$args5\" \"$args6\" \"$args7\" \"$args8\" ;;\nesac\nfi\n@@ -175,14 +177,9 @@ save () {\nfor i do printf %s\\\\n \"$i\" | sed \"s/'/'\\\\\\\\''/g;1s/^/'/;\\$s/\\$/' \\\\\\\\/\" ; done\necho \" \"\n}\n-APP_ARGS=$(save \"$@\")\n+APP_ARGS=`save \"$@\"`\n# Collect all arguments for the java command, following the shell quoting and substitution rules\neval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS \"\\\"-Dorg.gradle.appname=$APP_BASE_NAME\\\"\" -classpath \"\\\"$CLASSPATH\\\"\" org.gradle.wrapper.GradleWrapperMain \"$APP_ARGS\"\n-# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong\n-if [ \"$(uname)\" = \"Darwin\" ] && [ \"$HOME\" = \"$PWD\" ]; then\n- cd \"$(dirname \"$0\")\"\n-fi\n-\nexec \"$JAVACMD\" \"$@\"\n"
},
{
"change_type": "MODIFY",
"old_path": "gradlew.bat",
"new_path": "gradlew.bat",
"diff": "@rem you may not use this file except in compliance with the License.\n@rem You may obtain a copy of the License at\n@rem\n-@rem http://www.apache.org/licenses/LICENSE-2.0\n+@rem https://www.apache.org/licenses/LICENSE-2.0\n@rem\n@rem Unless required by applicable law or agreed to in writing, software\n@rem distributed under the License is distributed on an \"AS IS\" BASIS,\n@@ -29,6 +29,9 @@ if \"%DIRNAME%\" == \"\" set DIRNAME=.\nset APP_BASE_NAME=%~n0\nset APP_HOME=%DIRNAME%\n+@rem Resolve any \".\" and \"..\" in APP_HOME to make it shorter.\n+for %%i in (\"%APP_HOME%\") do set APP_HOME=%%~fi\n+\n@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.\nset DEFAULT_JVM_OPTS=\"-Xmx64m\" \"-Xms64m\"\n@@ -81,6 +84,7 @@ set CMD_LINE_ARGS=%*\nset CLASSPATH=%APP_HOME%\\gradle\\wrapper\\gradle-wrapper.jar\n+\n@rem Execute Gradle\n\"%JAVA_EXE%\" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% \"-Dorg.gradle.appname=%APP_BASE_NAME%\" -classpath \"%CLASSPATH%\" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%\n"
}
] |
Java
|
Apache License 2.0
|
aiven/jdbc-connector-for-apache-kafka
|
Add checksum to check gradle artifacts after downloading
Update gradle to 6.9.1
|
11,708 |
16.02.2022 08:20:10
| -3,600 |
113f3b039819a86061917fbecd1cf372e4c75055
|
Bump jdbc postgressql dependency to 42.2.25 because of CVE-2022-21724
|
[
{
"change_type": "MODIFY",
"old_path": "build.gradle",
"new_path": "build.gradle",
"diff": "@@ -109,7 +109,7 @@ dependencies {\ncompileOnly \"org.apache.kafka:connect-api:$kafkaVersion\"\nruntimeOnly \"org.xerial:sqlite-jdbc:3.32.3\"\n- runtimeOnly \"org.postgresql:postgresql:42.2.10\"\n+ runtimeOnly \"org.postgresql:postgresql:42.2.25\"\nruntimeOnly \"net.sourceforge.jtds:jtds:1.3.1\"\nruntimeOnly \"net.snowflake:snowflake-jdbc:3.12.8\"\nruntimeOnly \"com.microsoft.sqlserver:mssql-jdbc:8.2.1.jre11\"\n"
}
] |
Java
|
Apache License 2.0
|
aiven/jdbc-connector-for-apache-kafka
|
Bump jdbc postgressql dependency to 42.2.25 because of CVE-2022-21724
|
11,730 |
23.02.2022 17:00:34
| -3,600 |
cbfec0d52e7eac202942940b1e0d657e22fe8d13
|
Make it possible to publish artifacts to local Maven repo
|
[
{
"change_type": "MODIFY",
"old_path": "build.gradle",
"new_path": "build.gradle",
"diff": "/*\n- * Copyright 2021 Aiven Oy\n+ * Copyright 2021 Aiven Oy and jdbc-connector-for-apache-kafka contributors\n*\n* Licensed under the Apache License, Version 2.0 (the \"License\");\n* you may not use this file except in compliance with the License.\n@@ -38,6 +38,7 @@ repositories {\njava {\nsourceCompatibility = JavaVersion.VERSION_11\ntargetCompatibility = JavaVersion.VERSION_11\n+ withSourcesJar()\n}\ncheckstyle {\n@@ -70,9 +71,11 @@ publishing {\nmaven(MavenPublication) {\n// Defaults, for clarity\ngroupId = getGroup()\n- artifactId = getName()\n+ artifactId = 'jdbc-connector-for-apache-kafka'\nversion = getVersion()\n+ from components.java\n+\npom {\nname = \"Aiven's JDBC Sink and Source Connectors for Apache Kafka\"\ndescription = \"A Kafka Connect JDBC connector for copying data between databases and Kafka.\"\n"
}
] |
Java
|
Apache License 2.0
|
aiven/jdbc-connector-for-apache-kafka
|
Make it possible to publish artifacts to local Maven repo
|
11,730 |
23.02.2022 17:04:05
| -3,600 |
2df30630969eec0bdeac919fd457fb0ac6ef77ff
|
Support multi-row inserts
|
[
{
"change_type": "MODIFY",
"old_path": "docs/sink-connector-config-options.rst",
"new_path": "docs/sink-connector-config-options.rst",
"diff": "@@ -58,6 +58,10 @@ Writes\nUse standard SQL ``INSERT`` statements.\n+ ``multi``\n+\n+ Use multi-row inserts, e.g. ``INSERT INTO table_name (column_list) VALUES (value_list_1), (value_list_2), ... (value_list_n);``\n+\n``upsert``\nUse the appropriate upsert semantics for the target database if it is supported by the connector, e.g. ``INSERT .. ON CONFLICT .. DO UPDATE SET ..``.\n@@ -68,7 +72,7 @@ Writes\n* Type: string\n* Default: insert\n- * Valid Values: [insert, upsert, update]\n+ * Valid Values: [insert, multi, upsert, update]\n* Importance: high\n``batch.size``\n"
},
{
"change_type": "MODIFY",
"old_path": "docs/sink-connector.md",
"new_path": "docs/sink-connector.md",
"diff": "@@ -77,6 +77,14 @@ from Kafka.\nThis mode is used by default. To enable it explicitly, set\n`insert.mode=insert`.\n+### Multi Mode\n+\n+In this mode, the connector executes an `INSERT` SQL query with multiple\n+values (effectively inserting multiple row/records per query).\n+Supported in `SqliteDatabaseDialect` and `PostgreSqlDatabaseDialect`.\n+\n+To use this mode, set `insert.mode=multi`\n+\n### Update Mode\nIn this mode, the connector executes `UPDATE` SQL query on each record\n"
},
{
"change_type": "MODIFY",
"old_path": "src/main/java/io/aiven/connect/jdbc/dialect/DatabaseDialect.java",
"new_path": "src/main/java/io/aiven/connect/jdbc/dialect/DatabaseDialect.java",
"diff": "@@ -324,6 +324,24 @@ public interface DatabaseDialect extends ConnectionProvider {\nCollection<ColumnId> nonKeyColumns\n);\n+ /**\n+ * Build an INSERT statement for multiple rows.\n+ *\n+ * @param table the identifier of the table; may not be null\n+ * @param records number of rows which will be inserted; must be a positive number\n+ * @param keyColumns the identifiers of the columns in the primary/unique key; may not be null\n+ * but may be empty\n+ * @param nonKeyColumns the identifiers of the other columns in the table; may not be null but may\n+ * be empty\n+ * @return the INSERT statement; may not be null\n+ */\n+ String buildMultiInsertStatement(\n+ TableId table,\n+ int records,\n+ Collection<ColumnId> keyColumns,\n+ Collection<ColumnId> nonKeyColumns\n+ );\n+\n/**\n* Build the INSERT prepared statement expression for the given table and its columns.\n*\n@@ -494,7 +512,18 @@ public interface DatabaseDialect extends ConnectionProvider {\n* @param record the sink record with values to be bound into the statement; never null\n* @throws SQLException if there is a problem binding values into the statement\n*/\n- void bindRecord(SinkRecord record) throws SQLException;\n+ default void bindRecord(SinkRecord record) throws SQLException {\n+ bindRecord(1, record);\n+ }\n+\n+ /**\n+ * Bind the values in the supplied record, starting at the specified index.\n+ *\n+ * @param index the index at which binding starts; must be positive\n+ * @param record the sink record with values to be bound into the statement; never null\n+ * @throws SQLException if there is a problem binding values into the statement\n+ */\n+ int bindRecord(int index, SinkRecord record) throws SQLException;\n}\n/**\n"
},
{
"change_type": "MODIFY",
"old_path": "src/main/java/io/aiven/connect/jdbc/dialect/GenericDatabaseDialect.java",
"new_path": "src/main/java/io/aiven/connect/jdbc/dialect/GenericDatabaseDialect.java",
"diff": "@@ -51,6 +51,7 @@ import java.util.Set;\nimport java.util.TimeZone;\nimport java.util.concurrent.ConcurrentLinkedQueue;\nimport java.util.concurrent.atomic.AtomicReference;\n+import java.util.stream.Collectors;\nimport org.apache.kafka.common.config.types.Password;\nimport org.apache.kafka.connect.data.Date;\n@@ -85,6 +86,10 @@ import io.aiven.connect.jdbc.util.TableId;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n+import static io.aiven.connect.jdbc.util.CollectionUtils.isEmpty;\n+import static java.util.Objects.requireNonNull;\n+import static java.util.stream.IntStream.range;\n+\n/**\n* A {@link DatabaseDialect} implementation that provides functionality based upon JDBC and SQL.\n*\n@@ -1350,6 +1355,44 @@ public class GenericDatabaseDialect implements DatabaseDialect {\nreturn builder.toString();\n}\n+ @Override\n+ public String buildMultiInsertStatement(final TableId table,\n+ final int records,\n+ final Collection<ColumnId> keyColumns,\n+ final Collection<ColumnId> nonKeyColumns) {\n+\n+ if (records < 1) {\n+ throw new IllegalArgumentException(\"number of records must be a positive number, but got: \" + records);\n+ }\n+ if (isEmpty(keyColumns) && isEmpty(nonKeyColumns)) {\n+ throw new IllegalArgumentException(\"no columns specified\");\n+ }\n+ requireNonNull(table, \"table must not be null\");\n+\n+ final String insertStatement = expressionBuilder()\n+ .append(\"INSERT INTO \")\n+ .append(table)\n+ .append(\"(\")\n+ .appendList()\n+ .delimitedBy(\",\")\n+ .transformedBy(ExpressionBuilder.columnNames())\n+ .of(keyColumns, nonKeyColumns)\n+ .append(\") VALUES \")\n+ .toString();\n+\n+ final String singleRowPlaceholder = expressionBuilder()\n+ .append(\"(\")\n+ .appendMultiple(\",\", \"?\", keyColumns.size() + nonKeyColumns.size())\n+ .append(\")\")\n+ .toString();\n+\n+ final String allRowsPlaceholder = range(1, records + 1)\n+ .mapToObj(i -> singleRowPlaceholder)\n+ .collect(Collectors.joining(\",\"));\n+\n+ return insertStatement + allRowsPlaceholder;\n+ }\n+\n@Override\npublic String buildUpdateStatement(\nfinal TableId table,\n"
},
{
"change_type": "MODIFY",
"old_path": "src/main/java/io/aiven/connect/jdbc/sink/BufferedRecords.java",
"new_path": "src/main/java/io/aiven/connect/jdbc/sink/BufferedRecords.java",
"diff": "@@ -41,6 +41,8 @@ import io.aiven.connect.jdbc.util.TableId;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n+import static io.aiven.connect.jdbc.sink.JdbcSinkConfig.InsertMode.MULTI;\n+\npublic class BufferedRecords {\nprivate static final Logger log = LoggerFactory.getLogger(BufferedRecords.class);\n@@ -53,6 +55,7 @@ public class BufferedRecords {\nprivate List<SinkRecord> records = new ArrayList<>();\nprivate SchemaPair currentSchemaPair;\nprivate FieldsMetadata fieldsMetadata;\n+ private TableDefinition tableDefinition;\nprivate PreparedStatement preparedStatement;\nprivate StatementBinder preparedStatementBinder;\n@@ -76,39 +79,10 @@ public class BufferedRecords {\nrecord.valueSchema()\n);\n- if (currentSchemaPair == null) {\n- currentSchemaPair = schemaPair;\n- // re-initialize everything that depends on the record schema\n- fieldsMetadata = FieldsMetadata.extract(\n- tableId.tableName(),\n- config.pkMode,\n- config.pkFields,\n- config.fieldsWhitelist,\n- currentSchemaPair\n- );\n- dbStructure.createOrAmendIfNecessary(\n- config,\n- connection,\n- tableId,\n- fieldsMetadata\n- );\n+ log.debug(\"buffered records in list {}\", records.size());\n- final TableDefinition tableDefinition = dbStructure.tableDefinitionFor(tableId, connection);\n- final String sql = getInsertSql(tableDefinition);\n- log.debug(\n- \"{} sql: {}\",\n- config.insertMode,\n- sql\n- );\n- close();\n- preparedStatement = connection.prepareStatement(sql);\n- preparedStatementBinder = dbDialect.statementBinder(\n- preparedStatement,\n- config.pkMode,\n- schemaPair,\n- fieldsMetadata,\n- config.insertMode\n- );\n+ if (currentSchemaPair == null) {\n+ reInitialize(schemaPair);\n}\nfinal List<SinkRecord> flushed;\n@@ -134,27 +108,77 @@ public class BufferedRecords {\nreturn flushed;\n}\n+ private void prepareStatement() throws SQLException {\n+ final String sql = writeSql();\n+\n+ log.debug(\"Prepared SQL {} for insert mode {}\", sql, config.insertMode);\n+\n+ close();\n+ preparedStatement = connection.prepareStatement(sql);\n+ preparedStatementBinder = dbDialect.statementBinder(\n+ preparedStatement,\n+ config.pkMode,\n+ currentSchemaPair,\n+ fieldsMetadata,\n+ config.insertMode\n+ );\n+ }\n+\n+ private String writeSql() {\n+ final String sql;\n+ log.debug(\"Generating query for insert mode {} and {} records\", config.insertMode, records.size());\n+ if (config.insertMode == MULTI) {\n+ sql = getMultiInsertSql(tableDefinition);\n+ } else {\n+ sql = getInsertSql(tableDefinition);\n+ }\n+ return sql;\n+ }\n+\n+ // re-initialize everything that depends on the record schema\n+ private void reInitialize(final SchemaPair schemaPair) throws SQLException {\n+ currentSchemaPair = schemaPair;\n+ fieldsMetadata = FieldsMetadata.extract(\n+ tableId.tableName(),\n+ config.pkMode,\n+ config.pkFields,\n+ config.fieldsWhitelist,\n+ currentSchemaPair\n+ );\n+ dbStructure.createOrAmendIfNecessary(\n+ config,\n+ connection,\n+ tableId,\n+ fieldsMetadata\n+ );\n+\n+ tableDefinition = dbStructure.tableDefinitionFor(tableId, connection);\n+ }\n+\npublic List<SinkRecord> flush() throws SQLException {\nif (records.isEmpty()) {\nlog.debug(\"Records is empty\");\nreturn new ArrayList<>();\n}\n- log.debug(\"Flushing {} buffered records\", records.size());\n- for (final SinkRecord record : records) {\n- preparedStatementBinder.bindRecord(record);\n- }\n+ prepareStatement();\n+ bindRecords();\n+\nint totalUpdateCount = 0;\nboolean successNoInfo = false;\n- for (final int updateCount : preparedStatement.executeBatch()) {\n+\n+ log.debug(\"Executing batch...\");\n+ for (final int updateCount : executeBatch()) {\nif (updateCount == Statement.SUCCESS_NO_INFO) {\nsuccessNoInfo = true;\ncontinue;\n}\ntotalUpdateCount += updateCount;\n}\n+ log.debug(\"Done executing batch.\");\nif (totalUpdateCount != records.size() && !successNoInfo) {\nswitch (config.insertMode) {\ncase INSERT:\n+ case MULTI:\nthrow new ConnectException(String.format(\n\"Update count (%d) did not sum up to total number of records inserted (%d)\",\ntotalUpdateCount,\n@@ -186,6 +210,30 @@ public class BufferedRecords {\nreturn flushedRecords;\n}\n+ private int[] executeBatch() throws SQLException {\n+ if (config.insertMode == MULTI) {\n+ preparedStatement.addBatch();\n+ }\n+ log.debug(\"Executing batch with insert mode {}\", config.insertMode);\n+ return preparedStatement.executeBatch();\n+ }\n+\n+ private void bindRecords() throws SQLException {\n+ log.debug(\"Binding {} buffered records\", records.size());\n+ int index = 1;\n+ for (final SinkRecord record : records) {\n+ if (config.insertMode == MULTI) {\n+ // All records are bound to the same prepared statement,\n+ // so when binding fields for record N (N > 0)\n+ // we need to start at the index where binding fields for record N - 1 stopped.\n+ index = preparedStatementBinder.bindRecord(index, record);\n+ } else {\n+ preparedStatementBinder.bindRecord(record);\n+ }\n+ }\n+ log.debug(\"Done binding records.\");\n+ }\n+\npublic void close() throws SQLException {\nlog.info(\"Closing BufferedRecords with preparedStatement: {}\", preparedStatement);\nif (preparedStatement != null) {\n@@ -194,6 +242,29 @@ public class BufferedRecords {\n}\n}\n+ private String getMultiInsertSql(final TableDefinition tableDefinition) {\n+ if (config.insertMode != MULTI) {\n+ throw new ConnectException(String.format(\n+ \"Multi-row first insert SQL unsupported by insert mode %s\",\n+ config.insertMode\n+ ));\n+ }\n+ try {\n+ return dbDialect.buildMultiInsertStatement(\n+ tableId,\n+ records.size(),\n+ asColumns(fieldsMetadata.keyFieldNames),\n+ asColumns(fieldsMetadata.nonKeyFieldNames)\n+ );\n+ } catch (final UnsupportedOperationException e) {\n+ throw new ConnectException(String.format(\n+ \"Write to table '%s' in MULTI mode is not supported with the %s dialect.\",\n+ tableId,\n+ dbDialect.name()\n+ ));\n+ }\n+ }\n+\nprivate String getInsertSql(final TableDefinition tableDefinition) {\nswitch (config.insertMode) {\ncase INSERT:\n"
},
{
"change_type": "MODIFY",
"old_path": "src/main/java/io/aiven/connect/jdbc/sink/JdbcSinkConfig.java",
"new_path": "src/main/java/io/aiven/connect/jdbc/sink/JdbcSinkConfig.java",
"diff": "@@ -39,6 +39,7 @@ public class JdbcSinkConfig extends JdbcConfig {\npublic enum InsertMode {\nINSERT,\n+ MULTI,\nUPSERT,\nUPDATE;\n}\n@@ -122,6 +123,8 @@ public class JdbcSinkConfig extends JdbcConfig {\n\"The insertion mode to use. Supported modes are:\\n\"\n+ \"``insert``\\n\"\n+ \" Use standard SQL ``INSERT`` statements.\\n\"\n+ + \"``multi``\\n\"\n+ + \" Use multi-row ``INSERT`` statements.\\n\"\n+ \"``upsert``\\n\"\n+ \" Use the appropriate upsert semantics for the target database if it is supported by \"\n+ \"the connector, e.g. ``INSERT .. ON CONFLICT .. DO UPDATE SET ..``.\\n\"\n"
},
{
"change_type": "MODIFY",
"old_path": "src/main/java/io/aiven/connect/jdbc/sink/PreparedStatementBinder.java",
"new_path": "src/main/java/io/aiven/connect/jdbc/sink/PreparedStatementBinder.java",
"diff": "@@ -31,6 +31,8 @@ import io.aiven.connect.jdbc.dialect.DatabaseDialect.StatementBinder;\nimport io.aiven.connect.jdbc.sink.metadata.FieldsMetadata;\nimport io.aiven.connect.jdbc.sink.metadata.SchemaPair;\n+import static io.aiven.connect.jdbc.sink.JdbcSinkConfig.InsertMode.MULTI;\n+\npublic class PreparedStatementBinder implements StatementBinder {\nprivate final JdbcSinkConfig.PrimaryKeyMode pkMode;\n@@ -58,6 +60,12 @@ public class PreparedStatementBinder implements StatementBinder {\n@Override\npublic void bindRecord(final SinkRecord record) throws SQLException {\n+ // backwards compatibility\n+ bindRecord(1, record);\n+ }\n+\n+\n+ public int bindRecord(int index, final SinkRecord record) throws SQLException {\nfinal Struct valueStruct = (Struct) record.value();\n// Assumption: the relevant SQL has placeholders for keyFieldNames first followed by\n@@ -65,24 +73,29 @@ public class PreparedStatementBinder implements StatementBinder {\n// the relevant SQL has placeholders for nonKeyFieldNames first followed by\n// keyFieldNames, in iteration order for all UPDATE queries\n- int index = 1;\n+ final int nextIndex;\nswitch (insertMode) {\ncase INSERT:\n+ case MULTI:\ncase UPSERT:\nindex = bindKeyFields(record, index);\n- bindNonKeyFields(record, valueStruct, index);\n+ nextIndex = bindNonKeyFields(record, valueStruct, index);\nbreak;\ncase UPDATE:\nindex = bindNonKeyFields(record, valueStruct, index);\n- bindKeyFields(record, index);\n+ nextIndex = bindKeyFields(record, index);\nbreak;\ndefault:\nthrow new AssertionError();\n}\n+ // in a multi-row insert, all records are a single item in the batch\n+ if (insertMode != MULTI) {\nstatement.addBatch();\n}\n+ return nextIndex;\n+ }\nprotected int bindKeyFields(final SinkRecord record, int index) throws SQLException {\nswitch (pkMode) {\n"
},
{
"change_type": "ADD",
"old_path": null,
"new_path": "src/main/java/io/aiven/connect/jdbc/util/CollectionUtils.java",
"diff": "+/*\n+ * Copyright 2022 Aiven Oy and jdbc-connector-for-apache-kafka project contributors\n+ *\n+ * Licensed under the Apache License, Version 2.0 (the \"License\");\n+ * you may not use this file except in compliance with the License.\n+ * You may obtain a copy of the License at\n+ *\n+ * http://www.apache.org/licenses/LICENSE-2.0\n+ *\n+ * Unless required by applicable law or agreed to in writing, software\n+ * distributed under the License is distributed on an \"AS IS\" BASIS,\n+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n+ * See the License for the specific language governing permissions and\n+ * limitations under the License.\n+ */\n+\n+package io.aiven.connect.jdbc.util;\n+\n+import java.util.Collection;\n+\n+public final class CollectionUtils {\n+ private CollectionUtils() {\n+ }\n+\n+ public static <T> boolean isEmpty(final Collection<T> collection) {\n+ return collection == null || collection.isEmpty();\n+ }\n+}\n"
},
{
"change_type": "MODIFY",
"old_path": "src/test/java/io/aiven/connect/jdbc/sink/BufferedRecordsTest.java",
"new_path": "src/test/java/io/aiven/connect/jdbc/sink/BufferedRecordsTest.java",
"diff": "@@ -21,10 +21,10 @@ import java.io.IOException;\nimport java.sql.Connection;\nimport java.sql.PreparedStatement;\nimport java.sql.SQLException;\n-import java.sql.Statement;\nimport java.util.Arrays;\nimport java.util.Collections;\nimport java.util.HashMap;\n+import java.util.Map;\nimport org.apache.kafka.connect.data.Schema;\nimport org.apache.kafka.connect.data.SchemaBuilder;\n@@ -39,16 +39,22 @@ import io.aiven.connect.jdbc.util.TableId;\nimport org.junit.After;\nimport org.junit.Before;\nimport org.junit.Test;\n-import org.mockito.Matchers;\n-import org.mockito.Mockito;\n+import org.mockito.ArgumentCaptor;\n+import static java.sql.Statement.SUCCESS_NO_INFO;\nimport static org.junit.Assert.assertEquals;\n+import static org.mockito.Matchers.any;\n+import static org.mockito.Matchers.anyString;\n+import static org.mockito.Matchers.eq;\nimport static org.mockito.Mockito.mock;\n+import static org.mockito.Mockito.times;\n+import static org.mockito.Mockito.verify;\nimport static org.mockito.Mockito.when;\npublic class BufferedRecordsTest {\nprivate final SqliteHelper sqliteHelper = new SqliteHelper(getClass().getSimpleName());\n+ private final String dbUrl = sqliteHelper.sqliteUri();\n@Before\npublic void setUp() throws IOException, SQLException {\n@@ -63,14 +69,13 @@ public class BufferedRecordsTest {\n@Test\npublic void correctBatching() throws SQLException {\nfinal HashMap<Object, Object> props = new HashMap<>();\n- props.put(\"connection.url\", sqliteHelper.sqliteUri());\n+ props.put(\"connection.url\", dbUrl);\nprops.put(\"auto.create\", true);\nprops.put(\"auto.evolve\", true);\nprops.put(\"batch.size\", 1000); // sufficiently high to not cause flushes due to buffer being full\nfinal JdbcSinkConfig config = new JdbcSinkConfig(props);\n- final String url = sqliteHelper.sqliteUri();\n- final DatabaseDialect dbDialect = DatabaseDialects.findBestFor(url, config);\n+ final DatabaseDialect dbDialect = DatabaseDialects.findBestFor(dbUrl, config);\nfinal DbStructure dbStructure = new DbStructure(dbDialect);\nfinal TableId tableId = new TableId(null, null, \"dummy\");\n@@ -82,7 +87,7 @@ public class BufferedRecordsTest {\n.build();\nfinal Struct valueA = new Struct(schemaA)\n.put(\"name\", \"cuba\");\n- final SinkRecord recordA = new SinkRecord(\"dummy\", 0, null, null, schemaA, valueA, 0);\n+ final SinkRecord recordA = wrapInSinkRecord(valueA);\nfinal Schema schemaB = SchemaBuilder.struct()\n.field(\"name\", Schema.STRING_SCHEMA)\n@@ -116,25 +121,22 @@ public class BufferedRecordsTest {\nprops.put(\"batch.size\", 1000);\nfinal JdbcSinkConfig config = new JdbcSinkConfig(props);\n- final String url = sqliteHelper.sqliteUri();\n- final DatabaseDialect dbDialect = DatabaseDialects.findBestFor(url, config);\n+ final DatabaseDialect dbDialect = DatabaseDialects.findBestFor(dbUrl, config);\n- final int[] batchResponse = new int[2];\n- batchResponse[0] = Statement.SUCCESS_NO_INFO;\n- batchResponse[1] = Statement.SUCCESS_NO_INFO;\n+ final int[] batchResponse = new int[] {SUCCESS_NO_INFO, SUCCESS_NO_INFO};\nfinal DbStructure dbStructureMock = mock(DbStructure.class);\n- when(dbStructureMock.createOrAmendIfNecessary(Matchers.any(JdbcSinkConfig.class),\n- Matchers.any(Connection.class),\n- Matchers.any(TableId.class),\n- Matchers.any(FieldsMetadata.class)))\n+ when(dbStructureMock.createOrAmendIfNecessary(any(JdbcSinkConfig.class),\n+ any(Connection.class),\n+ any(TableId.class),\n+ any(FieldsMetadata.class)))\n.thenReturn(true);\nfinal PreparedStatement preparedStatementMock = mock(PreparedStatement.class);\nwhen(preparedStatementMock.executeBatch()).thenReturn(batchResponse);\nfinal Connection connectionMock = mock(Connection.class);\n- when(connectionMock.prepareStatement(Matchers.anyString())).thenReturn(preparedStatementMock);\n+ when(connectionMock.prepareStatement(anyString())).thenReturn(preparedStatementMock);\nfinal TableId tableId = new TableId(null, null, \"dummy\");\nfinal BufferedRecords buffer = new BufferedRecords(config, tableId, dbDialect,\n@@ -142,18 +144,17 @@ public class BufferedRecordsTest {\nfinal Schema schemaA = SchemaBuilder.struct().field(\"name\", Schema.STRING_SCHEMA).build();\nfinal Struct valueA = new Struct(schemaA).put(\"name\", \"cuba\");\n- final SinkRecord recordA = new SinkRecord(\"dummy\", 0, null, null, schemaA, valueA, 0);\n+ final SinkRecord recordA = wrapInSinkRecord(valueA);\nbuffer.add(recordA);\nfinal Schema schemaB = SchemaBuilder.struct().field(\"name\", Schema.STRING_SCHEMA).build();\nfinal Struct valueB = new Struct(schemaA).put(\"name\", \"cubb\");\n- final SinkRecord recordB = new SinkRecord(\"dummy\", 0, null, null, schemaB, valueB, 0);\n+ final SinkRecord recordB = wrapInSinkRecord(valueB);\nbuffer.add(recordB);\nbuffer.flush();\n}\n-\n@Test\npublic void testInsertModeUpdate() throws SQLException {\nfinal HashMap<Object, Object> props = new HashMap<>();\n@@ -164,27 +165,131 @@ public class BufferedRecordsTest {\nprops.put(\"insert.mode\", \"update\");\nfinal JdbcSinkConfig config = new JdbcSinkConfig(props);\n- final String url = sqliteHelper.sqliteUri();\n- final DatabaseDialect dbDialect = DatabaseDialects.findBestFor(url, config);\n+ final DatabaseDialect dbDialect = DatabaseDialects.findBestFor(dbUrl, config);\nfinal DbStructure dbStructureMock = mock(DbStructure.class);\n- when(dbStructureMock.createOrAmendIfNecessary(Matchers.any(JdbcSinkConfig.class),\n- Matchers.any(Connection.class),\n- Matchers.any(TableId.class),\n- Matchers.any(FieldsMetadata.class)))\n+ when(dbStructureMock.createOrAmendIfNecessary(any(JdbcSinkConfig.class),\n+ any(Connection.class),\n+ any(TableId.class),\n+ any(FieldsMetadata.class)))\n.thenReturn(true);\nfinal Connection connectionMock = mock(Connection.class);\n+ final PreparedStatement preparedStatement = mock(PreparedStatement.class);\n+ when(connectionMock.prepareStatement(anyString())).thenReturn(preparedStatement);\n+ when(preparedStatement.executeBatch()).thenReturn(new int[1]);\n+\nfinal TableId tableId = new TableId(null, null, \"dummy\");\nfinal BufferedRecords buffer = new BufferedRecords(config, tableId, dbDialect, dbStructureMock,\nconnectionMock);\nfinal Schema schemaA = SchemaBuilder.struct().field(\"name\", Schema.STRING_SCHEMA).build();\nfinal Struct valueA = new Struct(schemaA).put(\"name\", \"cuba\");\n- final SinkRecord recordA = new SinkRecord(\"dummy\", 0, null, null, schemaA, valueA, 0);\n+ final SinkRecord recordA = wrapInSinkRecord(valueA);\nbuffer.add(recordA);\n+ buffer.flush();\n+\n+ verify(connectionMock).prepareStatement(eq(\"UPDATE \\\"dummy\\\" SET \\\"name\\\" = ?\"));\n+\n+ }\n+\n+ @Test\n+ public void testInsertModeMultiAutomaticFlush() throws SQLException {\n+ final JdbcSinkConfig config = multiModeConfig(2);\n+\n+ final DatabaseDialect dbDialect = DatabaseDialects.findBestFor(dbUrl, config);\n+ final DbStructure dbStructureMock = mock(DbStructure.class);\n+ when(dbStructureMock.createOrAmendIfNecessary(any(JdbcSinkConfig.class),\n+ any(Connection.class),\n+ any(TableId.class),\n+ any(FieldsMetadata.class)))\n+ .thenReturn(true);\n+\n+ final Connection connection = mock(Connection.class);\n+ final PreparedStatement preparedStatement = mock(PreparedStatement.class);\n+ when(connection.prepareStatement(anyString())).thenReturn(preparedStatement);\n+ when(preparedStatement.executeBatch()).thenReturn(new int[]{2});\n+\n+ final TableId tableId = new TableId(null, null, \"planets\");\n+ final BufferedRecords buffer = new BufferedRecords(config, tableId, dbDialect, dbStructureMock,\n+ connection);\n+\n+ final Schema schema = newPlanetSchema();\n+ for (int i = 1; i <= 5; i++) {\n+ buffer.add(wrapInSinkRecord(newPlanet(schema, 1, \"planet name \" + i)));\n+ }\n+\n+ final ArgumentCaptor<String> sqlCaptor = ArgumentCaptor.forClass(String.class);\n+ // Given the 5 records, and batch size of 2, we expect 2 inserts.\n+ // One record is still waiting in the buffer, and that is expected.\n+ verify(connection, times(2)).prepareStatement(sqlCaptor.capture());\n+ assertEquals(\n+ sqlCaptor.getAllValues().get(0),\n+ \"INSERT INTO \\\"planets\\\"(\\\"name\\\",\\\"planetid\\\") VALUES (?,?),(?,?)\"\n+ );\n+ assertEquals(\n+ sqlCaptor.getAllValues().get(1),\n+ \"INSERT INTO \\\"planets\\\"(\\\"name\\\",\\\"planetid\\\") VALUES (?,?),(?,?)\"\n+ );\n+ }\n+\n+ @Test\n+ public void testInsertModeMultiExplicitFlush() throws SQLException {\n+ final JdbcSinkConfig config = multiModeConfig(100);\n+\n+ final DatabaseDialect dbDialect = DatabaseDialects.findBestFor(dbUrl, config);\n+ final DbStructure dbStructureMock = mock(DbStructure.class);\n+ when(dbStructureMock.createOrAmendIfNecessary(any(JdbcSinkConfig.class),\n+ any(Connection.class),\n+ any(TableId.class),\n+ any(FieldsMetadata.class)))\n+ .thenReturn(true);\n+\n+ final Connection connection = mock(Connection.class);\n+ final PreparedStatement preparedStatement = mock(PreparedStatement.class);\n+ when(connection.prepareStatement(anyString())).thenReturn(preparedStatement);\n+ when(preparedStatement.executeBatch()).thenReturn(new int[]{2});\n+\n+ final TableId tableId = new TableId(null, null, \"planets\");\n+ final BufferedRecords buffer = new BufferedRecords(config, tableId, dbDialect, dbStructureMock,\n+ connection);\n+\n+ final Schema schema = newPlanetSchema();\n+ final Struct valueA = newPlanet(schema, 1, \"mercury\");\n+ final Struct valueB = newPlanet(schema, 2, \"venus\");\n+ buffer.add(wrapInSinkRecord(valueA));\n+ buffer.add(wrapInSinkRecord(valueB));\n+ buffer.flush();\n+\n+ verify(connection).prepareStatement(\n+ \"INSERT INTO \\\"planets\\\"(\\\"name\\\",\\\"planetid\\\") VALUES (?,?),(?,?)\"\n+ );\n- Mockito.verify(connectionMock, Mockito.times(1))\n- .prepareStatement(Matchers.eq(\"UPDATE \\\"dummy\\\" SET \\\"name\\\" = ?\"));\n+ }\n+\n+ private Struct newPlanet(final Schema schema, final int id, final String name) {\n+ return new Struct(schema)\n+ .put(\"planetid\", id)\n+ .put(\"name\", name);\n+ }\n+\n+ private Schema newPlanetSchema() {\n+ return SchemaBuilder.struct()\n+ .field(\"name\", Schema.STRING_SCHEMA)\n+ .field(\"planetid\", Schema.INT32_SCHEMA)\n+ .build();\n+ }\n+\n+ private JdbcSinkConfig multiModeConfig(final int batchSize) {\n+ return new JdbcSinkConfig(Map.of(\n+ \"connection.url\", \"\",\n+ \"auto.create\", true,\n+ \"auto.evolve\", true,\n+ \"batch.size\", batchSize,\n+ \"insert.mode\", \"multi\"\n+ ));\n+ }\n+ private SinkRecord wrapInSinkRecord(final Struct value) {\n+ return new SinkRecord(\"dummy-topic\", 0, null, null, value.schema(), value, 0);\n}\n}\n"
}
] |
Java
|
Apache License 2.0
|
aiven/jdbc-connector-for-apache-kafka
|
Support multi-row inserts
|
11,732 |
03.03.2022 10:07:41
| -7,200 |
f7d222696d1fbe9b2b33c8fe518961b7592612ce
|
Fix regression with partitioned tables in PostgreSQL
Add handling for detecting existence of partitioned tables explicitly.
PostgreSQL JDBC Driver update from 42.2.10 to 42.2.25 in caused
partitioned tables to broke due upstream change of separating types of
normal table and partitioned table as explained in
|
[
{
"change_type": "MODIFY",
"old_path": "src/main/java/io/aiven/connect/jdbc/dialect/GenericDatabaseDialect.java",
"new_path": "src/main/java/io/aiven/connect/jdbc/dialect/GenericDatabaseDialect.java",
"diff": "@@ -496,12 +496,15 @@ public class GenericDatabaseDialect implements DatabaseDialect {\nfinal Connection connection,\nfinal TableId tableId\n) throws SQLException {\n+ final DatabaseMetaData metadata = connection.getMetaData();\n+ final String[] tableTypes = tableTypes(metadata, new HashSet<>(Arrays.asList(\"TABLE\", \"PARTITIONED TABLE\")));\n+\nlog.info(\"Checking {} dialect for existence of table {}\", this, tableId);\ntry (final ResultSet rs = connection.getMetaData().getTables(\ntableId.catalogName(),\ntableId.schemaName(),\ntableId.tableName(),\n- new String[]{\"TABLE\"}\n+ tableTypes\n)) {\nfinal boolean exists = rs.next();\nlog.info(\"Using {} dialect table {} {}\", this, tableId, exists ? \"present\" : \"absent\");\n"
}
] |
Java
|
Apache License 2.0
|
aiven/jdbc-connector-for-apache-kafka
|
Fix regression with partitioned tables in PostgreSQL
Add handling for detecting existence of partitioned tables explicitly.
PostgreSQL JDBC Driver update from 42.2.10 to 42.2.25 in #113 caused
partitioned tables to broke due upstream change of separating types of
normal table and partitioned table as explained in
https://github.com/pgjdbc/pgjdbc/pull/1708
|
11,732 |
03.03.2022 13:42:15
| -7,200 |
9fc5d7f67f7a95becd4a28876b036c79157bdb7a
|
Refactor PostgreSQL table types out from generic dialect
|
[
{
"change_type": "MODIFY",
"old_path": "src/main/java/io/aiven/connect/jdbc/dialect/GenericDatabaseDialect.java",
"new_path": "src/main/java/io/aiven/connect/jdbc/dialect/GenericDatabaseDialect.java",
"diff": "@@ -35,7 +35,6 @@ import java.sql.Statement;\nimport java.sql.Timestamp;\nimport java.sql.Types;\nimport java.util.ArrayList;\n-import java.util.Arrays;\nimport java.util.Calendar;\nimport java.util.Collection;\nimport java.util.Collections;\n@@ -166,7 +165,7 @@ public class GenericDatabaseDialect implements DatabaseDialect {\nif (config instanceof JdbcSinkConfig) {\ncatalogPattern = JdbcSourceTaskConfig.CATALOG_PATTERN_DEFAULT;\nschemaPattern = JdbcSourceTaskConfig.SCHEMA_PATTERN_DEFAULT;\n- tableTypes = new HashSet<>(Arrays.asList(JdbcSourceTaskConfig.TABLE_TYPE_DEFAULT));\n+ tableTypes = new HashSet<>(getDefaultSinkTableTypes());\n} else {\ncatalogPattern = config.getString(JdbcSourceTaskConfig.CATALOG_PATTERN_CONFIG);\nschemaPattern = config.getString(JdbcSourceTaskConfig.SCHEMA_PATTERN_CONFIG);\n@@ -182,6 +181,10 @@ public class GenericDatabaseDialect implements DatabaseDialect {\nquoteIdentifiers = config.isQuoteSqlIdentifiers();\n}\n+ protected List<String> getDefaultSinkTableTypes() {\n+ return List.of(JdbcSourceTaskConfig.TABLE_TYPE_DEFAULT);\n+ }\n+\n@Override\npublic String name() {\nreturn getClass().getSimpleName().replace(\"DatabaseDialect\", \"\");\n@@ -496,8 +499,7 @@ public class GenericDatabaseDialect implements DatabaseDialect {\nfinal Connection connection,\nfinal TableId tableId\n) throws SQLException {\n- final DatabaseMetaData metadata = connection.getMetaData();\n- final String[] tableTypes = tableTypes(metadata, new HashSet<>(Arrays.asList(\"TABLE\", \"PARTITIONED TABLE\")));\n+ final String[] tableTypes = tableTypes(connection.getMetaData(), this.tableTypes);\nlog.info(\"Checking {} dialect for existence of table {}\", this, tableId);\ntry (final ResultSet rs = connection.getMetaData().getTables(\n"
},
{
"change_type": "MODIFY",
"old_path": "src/main/java/io/aiven/connect/jdbc/dialect/PostgreSqlDatabaseDialect.java",
"new_path": "src/main/java/io/aiven/connect/jdbc/dialect/PostgreSqlDatabaseDialect.java",
"diff": "@@ -65,6 +65,8 @@ public class PostgreSqlDatabaseDialect extends GenericDatabaseDialect {\nSchema.Type.STRING, String.class\n);\n+ private static final List<String> SINK_TABLE_TYPE_DEFAULT = List.of(\"TABLE\", \"PARTITIONED TABLE\");\n+\n/**\n* The provider for {@link PostgreSqlDatabaseDialect}.\n*/\n@@ -414,4 +416,8 @@ public class PostgreSqlDatabaseDialect extends GenericDatabaseDialect {\nreturn \"\";\n}\n+ @Override\n+ protected List<String> getDefaultSinkTableTypes() {\n+ return SINK_TABLE_TYPE_DEFAULT;\n+ }\n}\n"
}
] |
Java
|
Apache License 2.0
|
aiven/jdbc-connector-for-apache-kafka
|
Refactor PostgreSQL table types out from generic dialect
|
11,720 |
30.03.2022 16:56:10
| -10,800 |
87047ee6a75610c2e9f1c3dd339467dace4eac0c
|
Integration test for Postgres partitioned tables
|
[
{
"change_type": "MODIFY",
"old_path": "build.gradle",
"new_path": "build.gradle",
"diff": "@@ -29,10 +29,16 @@ plugins {\n// https://docs.gradle.org/current/userguide/publishing_maven.html\nid \"maven-publish\"\n+\n+ // https://docs.gradle.org/current/userguide/idea_plugin.html\n+ id 'idea'\n}\nrepositories {\nmavenCentral()\n+ maven {\n+ url \"https://packages.confluent.io/maven\"\n+ }\n}\njava {\n@@ -59,7 +65,7 @@ distributions {\ninto(\"/\") {\nfrom projectDir\n- include \"README*\", \"LICENSE*\", \"NOTICE*\", \"licenses/\"\n+ include \"version.txt\", \"README*\", \"LICENSE*\", \"NOTICE*\", \"licenses/\"\ninclude \"config/\"\n}\n}\n@@ -107,8 +113,48 @@ ext {\nkafkaVersion = \"2.2.0\"\nslf4jVersion = \"1.7.36\"\nderbyVersion = \"10.15.2.0\"\n+\n+ avroVersion = \"1.8.1\"\n+ // Version 1.8.1 brings Jackson 1.9.x/org.codehaus.jackson package for Avro and Confluent Platform 4.1.4.\n+ confluentPlatformVersion = \"4.1.4\" // For compatibility tests use version 4.1.4.\n+ hamcrestVersion = \"2.2\"\n+ jacksonVersion = \"2.13.1\" // This Jackson is used in the tests.\n+ jupiterVersion = \"5.8.2\"\n+ kafkaVersion = \"2.0.1\" // Oldest version supported, new versions are backwards compatible.\n+ jettyVersion = \"9.4.11.v20180605\"\n+ junit4Version = \"4.13.2\"\n+ jsr305Version = \"3.0.2\"\n+ log4jVersion = \"2.17.1\"\n+ mockitoVersion = '4.3.1'\n+ servletVersion = \"3.1.0\"\n+ spotbugsAnnotationsVersion = \"4.5.2\"\n+ testcontainersVersion = '1.16.3'\n+ assertjVersion = \"3.22.0\"\n+ awaitilityVersion = '4.2.0'\n}\n+sourceSets {\n+ integrationTest {\n+ java.srcDir file('src/integrationTest/java')\n+ resources.srcDir file('src/integrationTest/resources')\n+ compileClasspath += sourceSets.main.output + configurations.testRuntimeClasspath\n+ runtimeClasspath += output + compileClasspath\n+ }\n+}\n+\n+idea {\n+ module {\n+ testSourceDirs += project.sourceSets.integrationTest.java.srcDirs\n+ testSourceDirs += project.sourceSets.integrationTest.resources.srcDirs\n+ }\n+}\n+\n+configurations {\n+ integrationTestImplementation.extendsFrom testImplementation\n+ integrationTestRuntimeOnly.extendsFrom testRuntimeOnly\n+}\n+\n+\ndependencies {\ncompileOnly \"org.apache.kafka:connect-api:$kafkaVersion\"\n@@ -136,6 +182,48 @@ dependencies {\ntestImplementation 'org.assertj:assertj-core:3.22.0'\ntestRuntimeOnly \"org.slf4j:slf4j-log4j12:$slf4jVersion\"\n+\n+ integrationTestRuntimeOnly \"io.confluent:kafka-avro-serializer:$confluentPlatformVersion\"\n+ integrationTestRuntimeOnly \"io.confluent:kafka-connect-avro-converter:$confluentPlatformVersion\"\n+ integrationTestRuntimeOnly \"io.confluent:kafka-json-serializer:$confluentPlatformVersion\"\n+ integrationTestRuntimeOnly \"org.junit.jupiter:junit-jupiter:$jupiterVersion\"\n+ integrationTestRuntimeOnly \"org.apache.logging.log4j:log4j-slf4j-impl:$log4jVersion\"\n+\n+ integrationTestImplementation \"org.apache.kafka:connect-runtime:$kafkaVersion\"\n+ integrationTestImplementation \"com.fasterxml.jackson.core:jackson-annotations:$jacksonVersion\"\n+ integrationTestImplementation \"javax.servlet:javax.servlet-api:$servletVersion\"\n+ integrationTestImplementation \"org.apache.avro:avro:$avroVersion\"\n+ integrationTestImplementation \"org.apache.kafka:connect-runtime:$kafkaVersion\"\n+ integrationTestImplementation \"org.eclipse.jetty:jetty-http:$jettyVersion\"\n+ integrationTestImplementation \"org.eclipse.jetty:jetty-server:$jettyVersion\"\n+ integrationTestImplementation \"org.eclipse.jetty:jetty-util:$jettyVersion\"\n+ integrationTestImplementation \"junit:junit:$junit4Version\" // This is for testcontainers\n+ integrationTestImplementation \"org.testcontainers:junit-jupiter:$testcontainersVersion\"\n+ integrationTestImplementation \"org.testcontainers:kafka:$testcontainersVersion\" // this is not Kafka version\n+ integrationTestImplementation \"org.testcontainers:testcontainers:$testcontainersVersion\"\n+ integrationTestImplementation \"org.testcontainers:postgresql:$testcontainersVersion\"\n+ integrationTestImplementation \"org.awaitility:awaitility:$awaitilityVersion\"\n+ integrationTestImplementation(\"org.assertj:assertj-db:2.0.2\")\n+\n+ // Make test utils from 'test' available in 'integration-test'\n+ integrationTestImplementation sourceSets.test.output\n+}\n+\n+task integrationTest(type: Test) {\n+ description = 'Runs the integration tests.'\n+ group = 'verification'\n+ testClassesDirs = sourceSets.integrationTest.output.classesDirs\n+ classpath = sourceSets.integrationTest.runtimeClasspath\n+\n+ dependsOn test, distTar\n+\n+ useJUnitPlatform()\n+\n+ // Run always.\n+ outputs.upToDateWhen { false }\n+\n+ // Pass the distribution file path to the tests.\n+ systemProperty(\"integration-test.distribution.file.path\", distTar.archiveFile.get().asFile.path)\n}\nprocessResources {\n"
},
{
"change_type": "MODIFY",
"old_path": "checkstyle/suppressions.xml",
"new_path": "checkstyle/suppressions.xml",
"diff": "<suppress checks=\"ClassFanOutComplexity\"\nfiles=\"(DatabaseDialect|GenericDatabaseDialect).java\"/>\n- <suppress checks=\"ClassDataAbstractionCoupling\" files=\".*Test\\.java\"/>\n+ <suppress checks=\"ClassDataAbstractionCoupling\" files=\".*Test|ConnectRunner.java\"/>\n<suppress checks=\"JavaNCSS\" files=\".*Test\\.java\"/>\n<suppress checks=\"ClassFanOutComplexity\" files=\".*Test\\.java\"/>\n</suppressions>\n"
},
{
"change_type": "ADD",
"old_path": null,
"new_path": "src/integrationTest/java/io/aiven/kafka/connect/jdbc/AbstractIT.java",
"diff": "+/*\n+ * Copyright 2022 Aiven Oy and jdbc-connector-for-apache-kafka project contributors\n+ *\n+ * Licensed under the Apache License, Version 2.0 (the \"License\");\n+ * you may not use this file except in compliance with the License.\n+ * You may obtain a copy of the License at\n+ *\n+ * http://www.apache.org/licenses/LICENSE-2.0\n+ *\n+ * Unless required by applicable law or agreed to in writing, software\n+ * distributed under the License is distributed on an \"AS IS\" BASIS,\n+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n+ * See the License for the specific language governing permissions and\n+ * limitations under the License.\n+ */\n+\n+package io.aiven.kafka.connect.jdbc;\n+\n+import java.nio.file.Files;\n+import java.nio.file.Path;\n+import java.nio.file.Paths;\n+import java.util.HashMap;\n+import java.util.List;\n+import java.util.Map;\n+import java.util.Properties;\n+\n+import org.apache.kafka.clients.admin.AdminClient;\n+import org.apache.kafka.clients.admin.AdminClientConfig;\n+import org.apache.kafka.clients.admin.NewTopic;\n+import org.apache.kafka.clients.producer.KafkaProducer;\n+import org.apache.kafka.clients.producer.ProducerConfig;\n+\n+import org.apache.avro.generic.GenericRecord;\n+import org.junit.jupiter.api.AfterEach;\n+import org.junit.jupiter.api.BeforeEach;\n+import org.slf4j.Logger;\n+import org.slf4j.LoggerFactory;\n+import org.testcontainers.containers.KafkaContainer;\n+import org.testcontainers.containers.Network;\n+import org.testcontainers.junit.jupiter.Container;\n+import org.testcontainers.junit.jupiter.Testcontainers;\n+import org.testcontainers.utility.DockerImageName;\n+\n+@Testcontainers\n+public abstract class AbstractIT {\n+\n+ private static final Logger LOGGER = LoggerFactory.getLogger(AbstractIT.class);\n+ protected static final String TEST_TOPIC_NAME = \"test_topic\";\n+ private static final String DEFAULT_KAFKA_TAG = \"5.4.3\";\n+ private static final DockerImageName DEFAULT_IMAGE_NAME =\n+ DockerImageName.parse(\"confluentinc/cp-kafka\")\n+ .withTag(DEFAULT_KAFKA_TAG);\n+ protected static KafkaProducer<String, GenericRecord> producer;\n+ @Container\n+ protected KafkaContainer kafkaContainer = new KafkaContainer(DEFAULT_IMAGE_NAME)\n+ .withNetwork(Network.newNetwork())\n+ .withEnv(\"KAFKA_AUTO_CREATE_TOPICS_ENABLE\", \"false\");\n+\n+ @Container\n+ protected SchemaRegistryContainer schemaRegistryContainer =\n+ new SchemaRegistryContainer(kafkaContainer);\n+\n+ protected ConnectRunner connectRunner;\n+\n+ @BeforeEach\n+ void startKafka() throws Exception {\n+ LOGGER.info(\"Configure Kafka connect plugins\");\n+ setupKafka();\n+ final Path pluginDir = setupPluginDir();\n+ setupKafkaConnect(pluginDir);\n+ producer = createProducer();\n+ }\n+\n+ private static Path setupPluginDir() throws Exception {\n+ final Path testDir = Files.createTempDirectory(\"aiven-kafka-connect-jdbc-test-\");\n+ final Path distFile = Paths.get(System.getProperty(\"integration-test.distribution.file.path\"));\n+ assert Files.exists(distFile);\n+\n+ final var pluginDir = Paths.get(testDir.toString(), \"plugins/aiven-kafka-connect-jdbc/\");\n+ Files.createDirectories(pluginDir);\n+\n+ final String cmd = String.format(\"tar -xf %s --strip-components=1 -C %s\", distFile, pluginDir);\n+ final Process p = Runtime.getRuntime().exec(cmd);\n+ assert p.waitFor() == 0;\n+ return pluginDir;\n+ }\n+\n+ private void setupKafka() throws Exception {\n+ LOGGER.info(\"Setup Kafka\");\n+ try (final AdminClient adminClient = createAdminClient()) {\n+ LOGGER.info(\"Create topic {}\", TEST_TOPIC_NAME);\n+ final NewTopic newTopic = new NewTopic(TEST_TOPIC_NAME, 4, (short) 1);\n+ adminClient.createTopics(List.of(newTopic)).all().get();\n+ }\n+ }\n+\n+ protected AdminClient createAdminClient() {\n+ final Properties adminClientConfig = new Properties();\n+ adminClientConfig.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaContainer.getBootstrapServers());\n+ return AdminClient.create(adminClientConfig);\n+ }\n+\n+ private void setupKafkaConnect(final Path pluginDir) {\n+ LOGGER.info(\"Start Kafka Connect\");\n+ connectRunner = new ConnectRunner(kafkaContainer.getBootstrapServers(), pluginDir);\n+ connectRunner.start();\n+ }\n+\n+ protected KafkaProducer<String, GenericRecord> createProducer() {\n+ LOGGER.info(\"Create kafka producer\");\n+ final Map<String, Object> producerProps = new HashMap<>();\n+ producerProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaContainer.getBootstrapServers());\n+ producerProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,\n+ \"io.confluent.kafka.serializers.KafkaAvroSerializer\");\n+ producerProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,\n+ \"io.confluent.kafka.serializers.KafkaAvroSerializer\");\n+ producerProps.put(\"schema.registry.url\", schemaRegistryContainer.getSchemaRegistryUrl());\n+ return new KafkaProducer<>(producerProps);\n+ }\n+\n+ @AfterEach\n+ final void tearDown() {\n+ connectRunner.stop();\n+ producer.close();\n+\n+ connectRunner.awaitStop();\n+ }\n+}\n"
},
{
"change_type": "ADD",
"old_path": null,
"new_path": "src/integrationTest/java/io/aiven/kafka/connect/jdbc/ConnectRunner.java",
"diff": "+/*\n+ * Copyright 2022 Aiven Oy and jdbc-connector-for-apache-kafka project contributors\n+ *\n+ * Licensed under the Apache License, Version 2.0 (the \"License\");\n+ * you may not use this file except in compliance with the License.\n+ * You may obtain a copy of the License at\n+ *\n+ * http://www.apache.org/licenses/LICENSE-2.0\n+ *\n+ * Unless required by applicable law or agreed to in writing, software\n+ * distributed under the License is distributed on an \"AS IS\" BASIS,\n+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n+ * See the License for the specific language governing permissions and\n+ * limitations under the License.\n+ */\n+\n+package io.aiven.kafka.connect.jdbc;\n+\n+import java.nio.file.Path;\n+import java.util.Map;\n+import java.util.concurrent.ExecutionException;\n+\n+import org.apache.kafka.common.utils.Time;\n+import org.apache.kafka.connect.runtime.Connect;\n+import org.apache.kafka.connect.runtime.ConnectorConfig;\n+import org.apache.kafka.connect.runtime.Herder;\n+import org.apache.kafka.connect.runtime.Worker;\n+import org.apache.kafka.connect.runtime.isolation.Plugins;\n+import org.apache.kafka.connect.runtime.rest.RestServer;\n+import org.apache.kafka.connect.runtime.rest.entities.ConnectorInfo;\n+import org.apache.kafka.connect.runtime.standalone.StandaloneConfig;\n+import org.apache.kafka.connect.runtime.standalone.StandaloneHerder;\n+import org.apache.kafka.connect.storage.MemoryOffsetBackingStore;\n+import org.apache.kafka.connect.util.FutureCallback;\n+\n+import org.slf4j.Logger;\n+import org.slf4j.LoggerFactory;\n+\n+public final class ConnectRunner {\n+ private static final Logger LOGGER = LoggerFactory.getLogger(ConnectRunner.class);\n+ private final String bootstrapServers;\n+\n+ private final Path pluginDir;\n+\n+ private Herder herder;\n+\n+ private Connect connect;\n+\n+ public ConnectRunner(final String bootstrapServers, final Path pluginDir) {\n+ this.bootstrapServers = bootstrapServers;\n+ this.pluginDir = pluginDir;\n+ }\n+\n+ void start() {\n+ final Map<String, String> workerProps = Map.of(\n+ \"bootstrap.servers\", bootstrapServers,\n+ \"offset.flush.interval.ms\", Integer.toString(5000),\n+ // These don't matter much (each connector sets its own converters),\n+ // but need to be filled with valid classes.\n+ \"key.converter\", \"org.apache.kafka.connect.converters.ByteArrayConverter\",\n+ \"value.converter\", \"org.apache.kafka.connect.converters.ByteArrayConverter\",\n+ \"internal.key.converter\", \"org.apache.kafka.connect.json.JsonConverter\",\n+ \"internal.key.converter.schemas.enable\", \"false\",\n+ \"internal.value.converter\", \"org.apache.kafka.connect.json.JsonConverter\",\n+ \"internal.value.converter.schemas.enable\", \"false\",\n+ // Don't need it since we'll memory MemoryOffsetBackingStore.\n+ \"offset.storage.file.filename\", \"\",\n+ \"plugin.path\", pluginDir.toString());\n+\n+ final Time time = Time.SYSTEM;\n+ final String workerId = \"test-worker\";\n+ final String kafkaClusterId = \"test-cluster\";\n+\n+ final Plugins plugins = new Plugins(workerProps);\n+ final StandaloneConfig config = new StandaloneConfig(workerProps);\n+\n+\n+ final Worker worker = new Worker(workerId, time, plugins, config, new MemoryOffsetBackingStore());\n+ herder = new StandaloneHerder(worker, kafkaClusterId);\n+ connect = new Connect(herder, new RestServer(config));\n+\n+ connect.start();\n+ }\n+\n+ public void createConnector(final Map<String, String> config) throws ExecutionException, InterruptedException {\n+ assert herder != null;\n+\n+ final FutureCallback<Herder.Created<ConnectorInfo>> cb = new FutureCallback<>(\n+ (error, info) -> {\n+ if (error != null) {\n+ LOGGER.error(\"Failed to create job\");\n+ } else {\n+ LOGGER.info(\"Created connector {}\", info.result().name());\n+ }\n+ });\n+ herder.putConnectorConfig(\n+ config.get(ConnectorConfig.NAME_CONFIG),\n+ config, false, cb\n+ );\n+\n+ final Herder.Created<ConnectorInfo> connectorInfoCreated = cb.get();\n+ assert connectorInfoCreated.created();\n+ }\n+\n+ void stop() {\n+ connect.stop();\n+ }\n+\n+ void awaitStop() {\n+ connect.awaitStop();\n+ }\n+}\n"
},
{
"change_type": "ADD",
"old_path": null,
"new_path": "src/integrationTest/java/io/aiven/kafka/connect/jdbc/SchemaRegistryContainer.java",
"diff": "+/*\n+ * Copyright 2022 Aiven Oy and jdbc-connector-for-apache-kafka project contributors\n+ *\n+ * Licensed under the Apache License, Version 2.0 (the \"License\");\n+ * you may not use this file except in compliance with the License.\n+ * You may obtain a copy of the License at\n+ *\n+ * http://www.apache.org/licenses/LICENSE-2.0\n+ *\n+ * Unless required by applicable law or agreed to in writing, software\n+ * distributed under the License is distributed on an \"AS IS\" BASIS,\n+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n+ * See the License for the specific language governing permissions and\n+ * limitations under the License.\n+ */\n+\n+package io.aiven.kafka.connect.jdbc;\n+\n+import org.testcontainers.containers.GenericContainer;\n+import org.testcontainers.containers.KafkaContainer;\n+import org.testcontainers.utility.Base58;\n+\n+public final class SchemaRegistryContainer extends GenericContainer<SchemaRegistryContainer> {\n+ public static final int SCHEMA_REGISTRY_PORT = 8081;\n+\n+ public SchemaRegistryContainer(final KafkaContainer kafka) {\n+ this(\"5.2.1\", kafka);\n+ }\n+\n+ public SchemaRegistryContainer(final String confluentPlatformVersion, final KafkaContainer kafka) {\n+ super(\"confluentinc/cp-schema-registry:\" + confluentPlatformVersion);\n+\n+ dependsOn(kafka);\n+ withNetwork(kafka.getNetwork());\n+ withNetworkAliases(\"schema-registry-\" + Base58.randomString(6));\n+\n+ withEnv(\"SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS\",\n+ String.format(\"PLAINTEXT://%s:%s\", kafka.getNetworkAliases().get(0), 9092));\n+\n+ withExposedPorts(SCHEMA_REGISTRY_PORT);\n+ withEnv(\"SCHEMA_REGISTRY_HOST_NAME\", \"localhost\");\n+ }\n+\n+ public String getSchemaRegistryUrl() {\n+ return String.format(\"http://%s:%s\", getContainerIpAddress(), getMappedPort(SCHEMA_REGISTRY_PORT));\n+ }\n+\n+}\n"
},
{
"change_type": "ADD",
"old_path": null,
"new_path": "src/integrationTest/java/io/aiven/kafka/connect/jdbc/postgres/PartitionedTableIntegrationTest.java",
"diff": "+/*\n+ * Copyright 2022 Aiven Oy and jdbc-connector-for-apache-kafka project contributors\n+ *\n+ * Licensed under the Apache License, Version 2.0 (the \"License\");\n+ * you may not use this file except in compliance with the License.\n+ * You may obtain a copy of the License at\n+ *\n+ * http://www.apache.org/licenses/LICENSE-2.0\n+ *\n+ * Unless required by applicable law or agreed to in writing, software\n+ * distributed under the License is distributed on an \"AS IS\" BASIS,\n+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n+ * See the License for the specific language governing permissions and\n+ * limitations under the License.\n+ */\n+\n+package io.aiven.kafka.connect.jdbc.postgres;\n+\n+import javax.sql.DataSource;\n+\n+import java.sql.Connection;\n+import java.sql.SQLException;\n+import java.sql.Statement;\n+import java.time.Duration;\n+import java.util.ArrayList;\n+import java.util.HashMap;\n+import java.util.List;\n+import java.util.Map;\n+import java.util.concurrent.ExecutionException;\n+import java.util.concurrent.Future;\n+\n+import org.apache.kafka.clients.producer.ProducerRecord;\n+import org.apache.kafka.clients.producer.RecordMetadata;\n+\n+import io.aiven.connect.jdbc.JdbcSinkConnector;\n+import io.aiven.kafka.connect.jdbc.AbstractIT;\n+\n+import org.apache.avro.Schema;\n+import org.apache.avro.generic.GenericRecord;\n+import org.assertj.core.util.Arrays;\n+import org.assertj.db.type.Table;\n+import org.junit.jupiter.api.Test;\n+import org.postgresql.ds.PGSimpleDataSource;\n+import org.testcontainers.containers.PostgreSQLContainer;\n+import org.testcontainers.junit.jupiter.Container;\n+import org.testcontainers.junit.jupiter.Testcontainers;\n+import org.testcontainers.utility.DockerImageName;\n+\n+import static org.apache.avro.generic.GenericData.Record;\n+import static org.assertj.db.api.Assertions.assertThat;\n+import static org.awaitility.Awaitility.await;\n+\n+@Testcontainers\n+public class PartitionedTableIntegrationTest extends AbstractIT {\n+\n+ public static final String DEFAULT_POSTGRES_TAG = \"10.20\";\n+ private static final String CONNECTOR_NAME = \"test-sink-connector\";\n+ private static final int TEST_TOPIC_PARTITIONS = 1;\n+ private static final Schema VALUE_RECORD_SCHEMA =\n+ new Schema.Parser().parse(\"{\\n\"\n+ + \" \\\"type\\\": \\\"record\\\",\\n\"\n+ + \" \\\"name\\\": \\\"record\\\",\\n\"\n+ + \" \\\"fields\\\": [\\n\"\n+ + \" {\\n\"\n+ + \" \\\"name\\\": \\\"name\\\",\\n\"\n+ + \" \\\"type\\\": \\\"string\\\"\\n\"\n+ + \" },\\n\"\n+ + \" {\\n\"\n+ + \" \\\"name\\\": \\\"value\\\",\\n\"\n+ + \" \\\"type\\\": \\\"string\\\"\\n\"\n+ + \" }\\n\"\n+ + \" ]\\n\"\n+ + \"}\");\n+ private static final String CREATE_TABLE =\n+ \"create table \\\"\" + TEST_TOPIC_NAME + \"\\\"\\n\"\n+ + \"(\\n\"\n+ + \" name text not null,\\n\"\n+ + \" value text not null,\\n\"\n+ + \" date timestamp not null default '2022-03-04'\\n\"\n+ + \")\";\n+ private static final String CREATE_TABLE_WITH_PARTITION = CREATE_TABLE + \" partition by RANGE (date)\";\n+ private static final String CREATE_PARTITION =\n+ \"create table partition partition of \\\"\" + TEST_TOPIC_NAME\n+ + \"\\\" for values from ('2022-03-03') to ('2122-03-03');\";\n+ private static final DockerImageName DEFAULT_POSTGRES_IMAGE_NAME =\n+ DockerImageName.parse(\"postgres\")\n+ .withTag(DEFAULT_POSTGRES_TAG);\n+\n+ @Container\n+ private final PostgreSQLContainer<?> postgreSqlContainer = new PostgreSQLContainer<>(DEFAULT_POSTGRES_IMAGE_NAME);\n+\n+ @Test\n+ final void testBasicDelivery() throws ExecutionException, InterruptedException, SQLException {\n+ executeUpdate(CREATE_TABLE);\n+ connectRunner.createConnector(basicConnectorConfig());\n+\n+ sendTestData(1000);\n+\n+ await().atMost(Duration.ofSeconds(15)).pollInterval(Duration.ofMillis(100))\n+ .untilAsserted(() -> assertThat(new Table(getDatasource(), TEST_TOPIC_NAME)).hasNumberOfRows(1000));\n+ }\n+\n+ @Test\n+ final void testBasicDeliveryForPartitionedTable() throws ExecutionException, InterruptedException, SQLException {\n+ executeUpdate(CREATE_TABLE_WITH_PARTITION);\n+ executeUpdate(CREATE_PARTITION);\n+ connectRunner.createConnector(basicConnectorConfig());\n+\n+ sendTestData(1000);\n+\n+ await().atMost(Duration.ofSeconds(15)).pollInterval(Duration.ofMillis(100))\n+ .untilAsserted(() -> assertThat(new Table(getDatasource(), TEST_TOPIC_NAME)).hasNumberOfRows(1000));\n+ }\n+\n+ private void executeUpdate(final String updateStatement) throws SQLException {\n+ try (final Connection connection = getDatasource().getConnection();\n+ final Statement statement = connection.createStatement()) {\n+ statement.executeUpdate(updateStatement);\n+ }\n+ }\n+\n+ public DataSource getDatasource() {\n+ final PGSimpleDataSource pgSimpleDataSource = new PGSimpleDataSource();\n+ pgSimpleDataSource.setServerNames(Arrays.array(postgreSqlContainer.getHost()));\n+ pgSimpleDataSource.setPortNumbers(new int[] {postgreSqlContainer.getMappedPort(5432)});\n+ pgSimpleDataSource.setDatabaseName(postgreSqlContainer.getDatabaseName());\n+ pgSimpleDataSource.setUser(postgreSqlContainer.getUsername());\n+ pgSimpleDataSource.setPassword(postgreSqlContainer.getPassword());\n+ return pgSimpleDataSource;\n+ }\n+\n+ private void sendTestData(final int numberOfRecords) throws InterruptedException, ExecutionException {\n+ final List<Future<RecordMetadata>> sendFutures = new ArrayList<>();\n+ for (int i = 0; i < numberOfRecords; i++) {\n+ for (int partition = 0; partition < TEST_TOPIC_PARTITIONS; partition++) {\n+ final String key = \"key-\" + i;\n+ final String recordName = \"user-\" + i;\n+ final String recordValue = \"value-\" + i;\n+ final Record value = createRecord(recordName, recordValue);\n+ final ProducerRecord<String, GenericRecord> msg =\n+ new ProducerRecord<>(TEST_TOPIC_NAME, partition, key, value);\n+ sendFutures.add(producer.send(msg));\n+ }\n+ }\n+ producer.flush();\n+ for (final Future<RecordMetadata> sendFuture : sendFutures) {\n+ sendFuture.get();\n+ }\n+ }\n+\n+ private Record createRecord(final String name, final String value) {\n+ final Record valueRecord = new Record(VALUE_RECORD_SCHEMA);\n+ valueRecord.put(\"name\", name);\n+ valueRecord.put(\"value\", value);\n+ return valueRecord;\n+ }\n+\n+ private Map<String, String> basicConnectorConfig() {\n+ final HashMap<String, String> config = new HashMap<>();\n+ config.put(\"name\", CONNECTOR_NAME);\n+ config.put(\"connector.class\", JdbcSinkConnector.class.getName());\n+ config.put(\"topics\", TEST_TOPIC_NAME);\n+ config.put(\"key.converter\", \"io.confluent.connect.avro.AvroConverter\");\n+ config.put(\"key.converter.schema.registry.url\", schemaRegistryContainer.getSchemaRegistryUrl());\n+ config.put(\"value.converter\", \"io.confluent.connect.avro.AvroConverter\");\n+ config.put(\"value.converter.schema.registry.url\", schemaRegistryContainer.getSchemaRegistryUrl());\n+ config.put(\"tasks.max\", \"1\");\n+ config.put(\"connection.url\", postgreSqlContainer.getJdbcUrl());\n+ config.put(\"connection.user\", postgreSqlContainer.getUsername());\n+ config.put(\"connection.password\", postgreSqlContainer.getPassword());\n+ config.put(\"insert.mode\", \"insert\");\n+ config.put(\"dialect.name\", \"PostgreSqlDatabaseDialect\");\n+ return config;\n+ }\n+}\n"
}
] |
Java
|
Apache License 2.0
|
aiven/jdbc-connector-for-apache-kafka
|
Integration test for Postgres partitioned tables
|
11,720 |
01.04.2022 09:42:13
| -10,800 |
95ce574f6c78a74a1a8e5ae1195879eae0a4e5b0
|
Downgrading schema registry version
|
[
{
"change_type": "MODIFY",
"old_path": "src/integrationTest/java/io/aiven/kafka/connect/jdbc/SchemaRegistryContainer.java",
"new_path": "src/integrationTest/java/io/aiven/kafka/connect/jdbc/SchemaRegistryContainer.java",
"diff": "@@ -24,7 +24,7 @@ public final class SchemaRegistryContainer extends GenericContainer<SchemaRegist\npublic static final int SCHEMA_REGISTRY_PORT = 8081;\npublic SchemaRegistryContainer(final KafkaContainer kafka) {\n- this(\"5.2.1\", kafka);\n+ this(\"5.0.4\", kafka);\n}\npublic SchemaRegistryContainer(final String confluentPlatformVersion, final KafkaContainer kafka) {\n"
}
] |
Java
|
Apache License 2.0
|
aiven/jdbc-connector-for-apache-kafka
|
Downgrading schema registry version
|
11,720 |
06.02.2023 11:41:09
| -7,200 |
8f23196399aa994fcdccb4f6bf069e2c463b95cd
|
Replacing outdated and deprecate mockito-all with mockito-core.
|
[
{
"change_type": "MODIFY",
"old_path": "build.gradle",
"new_path": "build.gradle",
"diff": "@@ -125,7 +125,7 @@ ext {\njunit4Version = \"4.13.2\"\njsr305Version = \"3.0.2\"\nlog4jVersion = \"2.19.0\"\n- mockitoVersion = '4.3.1'\n+ mockitoVersion = '5.1.1'\nservletVersion = \"4.0.1\"\nspotbugsAnnotationsVersion = \"4.5.2\"\ntestcontainersVersion = '1.17.6'\n@@ -172,7 +172,7 @@ dependencies {\ntestImplementation \"org.easymock:easymock:5.1.0\"\ntestImplementation \"org.powermock:powermock-module-junit4:2.0.9\"\ntestImplementation \"org.powermock:powermock-api-easymock:2.0.9\"\n- testImplementation \"org.mockito:mockito-all:1.10.19\"\n+ testImplementation \"org.mockito:mockito-core:$mockitoVersion\"\ntestImplementation \"org.apache.kafka:connect-api:$kafkaVersion\"\ntestImplementation \"commons-io:commons-io:2.11.0\"\ntestImplementation \"org.apache.derby:derby:$derbyVersion\"\n"
},
{
"change_type": "MODIFY",
"old_path": "src/test/java/io/aiven/connect/jdbc/sink/BufferedRecordsTest.java",
"new_path": "src/test/java/io/aiven/connect/jdbc/sink/BufferedRecordsTest.java",
"diff": "@@ -43,9 +43,9 @@ import org.mockito.ArgumentCaptor;\nimport static java.sql.Statement.SUCCESS_NO_INFO;\nimport static org.junit.Assert.assertEquals;\n-import static org.mockito.Matchers.any;\n-import static org.mockito.Matchers.anyString;\n-import static org.mockito.Matchers.eq;\n+import static org.mockito.Mockito.any;\n+import static org.mockito.Mockito.anyString;\n+import static org.mockito.Mockito.eq;\nimport static org.mockito.Mockito.mock;\nimport static org.mockito.Mockito.times;\nimport static org.mockito.Mockito.verify;\n"
}
] |
Java
|
Apache License 2.0
|
aiven/jdbc-connector-for-apache-kafka
|
Replacing outdated and deprecate mockito-all with mockito-core. (#213)
|
499,300 |
30.10.2019 11:15:33
| -28,800 |
d4d6beb2469b438c9ff144c2dd14daf467c1b80e
|
Only check requirements in `configure.py`
to avoid cycle import when required packages are not installed
|
[
{
"change_type": "MODIFY",
"old_path": "ppdet/core/config/schema.py",
"new_path": "ppdet/core/config/schema.py",
"diff": "@@ -23,32 +23,13 @@ import re\ntry:\nfrom docstring_parser import parse as doc_parse\nexcept Exception:\n-\ndef doc_parse(*args):\n- if not doc_parse.__warning_sent__:\n- from ppdet.utils.cli import ColorTTY\n- color_tty = ColorTTY()\n- message = \"docstring_parser is not installed, \" \\\n- + \"argument description is not available\"\n- print(color_tty.yellow(message))\n- doc_parse.__warning_sent__ = True\n-\n- doc_parse.__warning_sent__ = False\n-\n+ pass\ntry:\nfrom typeguard import check_type\nexcept Exception:\n-\ndef check_type(*args):\n- if not check_type.__warning_sent__:\n- from ppdet.utils.cli import ColorTTY\n- color_tty = ColorTTY()\n- message = \"typeguard is not installed,\" \\\n- + \"type checking is not available\"\n- print(color_tty.yellow(message))\n- check_type.__warning_sent__ = True\n-\n- check_type.__warning_sent__ = False\n+ pass\n__all__ = ['SchemaValue', 'SchemaDict', 'SharedConfig', 'extract_schema']\n"
},
{
"change_type": "MODIFY",
"old_path": "tools/configure.py",
"new_path": "tools/configure.py",
"diff": "@@ -24,6 +24,20 @@ from ppdet.utils.cli import ColorTTY, print_total_cfg\ncolor_tty = ColorTTY()\n+try:\n+ from docstring_parser import parse as doc_parse\n+except Exception:\n+ message = \"docstring_parser is not installed, \" \\\n+ + \"argument description is not available\"\n+ print(color_tty.yellow(message))\n+\n+try:\n+ from typeguard import check_type\n+except Exception:\n+ message = \"typeguard is not installed,\" \\\n+ + \"type checking is not available\"\n+ print(color_tty.yellow(message))\n+\nMISC_CONFIG = {\n\"architecture\": \"<value>\",\n\"max_iters\": \"<value>\",\n"
}
] |
Python
|
Apache License 2.0
|
paddlepaddle/paddledetection
|
Only check requirements in `configure.py` (#6)
to avoid cycle import when required packages are not installed
|
499,300 |
30.10.2019 12:06:35
| -28,800 |
20a611cf78944cecca0fa594c8749e6f94b7cbb8
|
Fix mixed precision training of senet backbone
|
[
{
"change_type": "MODIFY",
"old_path": "ppdet/modeling/backbones/senet.py",
"new_path": "ppdet/modeling/backbones/senet.py",
"diff": "@@ -21,6 +21,7 @@ import math\nfrom paddle import fluid\nfrom paddle.fluid.param_attr import ParamAttr\n+from ppdet.experimental import mixed_precision_global_state\nfrom ppdet.core.workspace import register, serializable\nfrom .resnext import ResNeXt\n@@ -72,12 +73,13 @@ class SENet(ResNeXt):\nself.dcn_v2_stages = dcn_v2_stages\ndef _squeeze_excitation(self, input, num_channels, name=None):\n+ mixed_precision_enabled = mixed_precision_global_state() is not None\npool = fluid.layers.pool2d(\ninput=input,\npool_size=0,\npool_type='avg',\nglobal_pooling=True,\n- use_cudnn=False)\n+ use_cudnn=mixed_precision_enabled)\nstdv = 1.0 / math.sqrt(pool.shape[1] * 1.0)\nsqueeze = fluid.layers.fc(\ninput=pool,\n"
}
] |
Python
|
Apache License 2.0
|
paddlepaddle/paddledetection
|
Fix mixed precision training of senet backbone (#8)
|
499,385 |
01.11.2019 12:19:08
| -28,800 |
f10b52e86dd30da6f1bd7e636d6290ea11bdb882
|
Update date format in README
|
[
{
"change_type": "MODIFY",
"old_path": "README.md",
"new_path": "README.md",
"diff": "@@ -117,14 +117,14 @@ Advanced Features:\n- Add a series of models ralated modulated Deformable Convolution.\n-#### 7/29/2019\n+#### 29/7/2019\n- Update Chinese docs for PaddleDetection\n- Fix bug in R-CNN models when train and test at the same time\n- Add ResNext101-vd + Mask R-CNN + FPN models\n- Add YOLOv3 on VOC models\n-#### 7/3/2019\n+#### 3/7/2019\n- Initial release of PaddleDetection and detection model zoo\n- Models included: Faster R-CNN, Mask R-CNN, Faster R-CNN+FPN, Mask\n"
}
] |
Python
|
Apache License 2.0
|
paddlepaddle/paddledetection
|
Update date format in README (#11)
|
499,313 |
01.11.2019 17:40:28
| -28,800 |
a69ca0eccfc1ca67ffd286a75fa54e9628d216f4
|
fix download.py
|
[
{
"change_type": "MODIFY",
"old_path": "ppdet/utils/download.py",
"new_path": "ppdet/utils/download.py",
"diff": "@@ -30,13 +30,13 @@ from .voc_utils import create_list\nimport logging\nlogger = logging.getLogger(__name__)\n-__all__ = ['get_weights_path', 'get_dataset_path']\n+__all__ = ['get_weights_path', 'get_dataset_path', 'download_dataset', 'create_voc_list']\nWEIGHTS_HOME = osp.expanduser(\"~/.cache/paddle/weights\")\nDATASET_HOME = osp.expanduser(\"~/.cache/paddle/dataset\")\n# dict of {dataset_name: (download_info, sub_dirs)}\n-# download info: (url, md5sum)\n+# download info: [(url, md5sum)]\nDATASETS = {\n'coco': ([\n(\n"
}
] |
Python
|
Apache License 2.0
|
paddlepaddle/paddledetection
|
fix download.py (#13)
|
499,313 |
01.11.2019 19:03:18
| -28,800 |
744cb882d261bd7ded00412d159df9ef75d78f22
|
model zoo YOLOv3 add paper mAP
|
[
{
"change_type": "MODIFY",
"old_path": "docs/MODEL_ZOO.md",
"new_path": "docs/MODEL_ZOO.md",
"diff": "@@ -95,10 +95,13 @@ The backbone models pretrained on ImageNet are available. All backbone models ar\n- Group Normalization reference from [Group Normalization](https://arxiv.org/abs/1803.08494).\n- Detailed configuration file in [configs/gn](https://github.com/PaddlePaddle/models/tree/develop/PaddleCV/PaddleDetection/configs/gn)\n-### Yolo v3\n+### YOLO v3\n| Backbone | Pretrain dataset | Size | deformable Conv | Image/gpu | Lr schd | Inf time (fps) | Box AP | Download |\n| :----------- | :--------: | :-----: | :-----: |:------------: |:----: | :-------: | :----: | :-------: |\n+| DarkNet53 (paper) | ImageNet | 608 | False | 8 | 270e | - | 33.0 | - |\n+| DarkNet53 (paper) | ImageNet | 416 | False | 8 | 270e | - | 31.0 | - |\n+| DarkNet53 (paper) | ImageNet | 320 | False | 8 | 270e | - | 28.2 | - |\n| DarkNet53 | ImageNet | 608 | False | 8 | 270e | 45.571 | 38.9 | [model](https://paddlemodels.bj.bcebos.com/object_detection/yolov3_darknet.tar) |\n| DarkNet53 | ImageNet | 416 | False | 8 | 270e | - | 37.5 | [model](https://paddlemodels.bj.bcebos.com/object_detection/yolov3_darknet.tar) |\n| DarkNet53 | ImageNet | 320 | False | 8 | 270e | - | 34.8 | [model](https://paddlemodels.bj.bcebos.com/object_detection/yolov3_darknet.tar) |\n@@ -111,8 +114,7 @@ The backbone models pretrained on ImageNet are available. All backbone models ar\n| ResNet50_vd | ImageNet | 608 | True | 8 | 270e | - | 39.1 | [model](https://paddlemodels.bj.bcebos.com/object_detection/yolov3_r50vd_dcn.tar) |\n| ResNet50_vd | Object365 | 608 | True | 8 | 270e | - | 41.4 | [model](https://paddlemodels.bj.bcebos.com/object_detection/yolov3_r50vd_dcn_obj365_pretrained_coco.tar) |\n-\n-### Yolo v3 on Pascal VOC\n+### YOLO v3 on Pascal VOC\n| Backbone | Size | Image/gpu | Lr schd | Inf time (fps) | Box AP | Download |\n| :----------- | :--: | :-------: | :-----: | :------------: | :----: | :----------------------------------------------------------: |\n@@ -126,8 +128,11 @@ The backbone models pretrained on ImageNet are available. All backbone models ar\n| ResNet34 | 416 | 8 | 270e | - | 81.9 | [model](https://paddlemodels.bj.bcebos.com/object_detection/yolov3_r34_voc.tar) |\n| ResNet34 | 320 | 8 | 270e | - | 80.1 | [model](https://paddlemodels.bj.bcebos.com/object_detection/yolov3_r34_voc.tar) |\n-**Notes:** Yolo v3 is trained in 8 GPU with total batch size as 64 and trained 270 epoches. Yolo v3 training data augmentations: mixup,\n-randomly color distortion, randomly cropping, randomly expansion, randomly interpolation method, randomly flippling. Yolo v3 used randomly\n+#### Notes:\n+- YOLOv3-DarkNet53 performance in paper [YOLOv3](https://arxiv.org/abs/1804.02767) is also provided above, our implements\n+improved performance mainly by using L1 loss in bounding box width and height regression, image mixup and label smooth.\n+- YOLO v3 is trained in 8 GPU with total batch size as 64 and trained 270 epoches. YOLO v3 training data augmentations: mixup,\n+randomly color distortion, randomly cropping, randomly expansion, randomly interpolation method, randomly flippling. YOLO v3 used randomly\nreshaped minibatch in training, inferences can be performed on different image sizes with the same model weights, and we provided evaluation\nresults of image size 608/416/320 above. Deformable conv is added on stage 5 of backbone.\n"
}
] |
Python
|
Apache License 2.0
|
paddlepaddle/paddledetection
|
model zoo YOLOv3 add paper mAP (#14)
|
499,313 |
02.11.2019 12:13:54
| -28,800 |
7aa11973396a6efb211467f14cac82e5811abf4d
|
fix download.py windows compatible
|
[
{
"change_type": "MODIFY",
"old_path": "ppdet/utils/download.py",
"new_path": "ppdet/utils/download.py",
"diff": "@@ -154,7 +154,7 @@ def create_voc_list(data_dir, devkit_subdir='VOCdevkit'):\ndef map_path(url, root_dir):\n# parse path after download to decompress under root_dir\n- fname = url.split('/')[-1]\n+ fname = osp.split(url)[-1]\nzip_formats = ['.zip', '.tar', '.gz']\nfpath = fname\nfor zip_format in zip_formats:\n@@ -186,7 +186,7 @@ def get_path(url, root_dir, md5sum=None, check_exist=True):\n}\nfor k, v in decompress_name_map.items():\nif fullpath.find(k) >= 0:\n- fullpath = '/'.join(fullpath.split('/')[:-1] + [v])\n+ fullpath = osp.join(osp.split(fullpath)[0], v)\nexist_flag = False\nif osp.exists(fullpath) and check_exist:\n@@ -247,7 +247,7 @@ def _download(url, path, md5sum=None):\nif not osp.exists(path):\nos.makedirs(path)\n- fname = url.split('/')[-1]\n+ fname = osp.split(url)[-1]\nfullname = osp.join(path, fname)\nretry_cnt = 0\n@@ -314,7 +314,7 @@ def _decompress(fname):\n# decompress to fpath_tmp directory firstly, if decompress\n# successed, move decompress files to fpath and delete\n# fpath_tmp and remove download compress file.\n- fpath = '/'.join(fname.split('/')[:-1])\n+ fpath = osp.split(fname)[0]\nfpath_tmp = osp.join(fpath, 'tmp')\nif osp.isdir(fpath_tmp):\nshutil.rmtree(fpath_tmp)\n"
}
] |
Python
|
Apache License 2.0
|
paddlepaddle/paddledetection
|
fix download.py windows compatible (#15)
|
499,333 |
04.11.2019 11:48:56
| -28,800 |
1f5b8d5c4c46ca52374723123268403d8f471008
|
refine fruit doc
|
[
{
"change_type": "MODIFY",
"old_path": "docs/QUICK_STARTED.md",
"new_path": "docs/QUICK_STARTED.md",
"diff": "@@ -26,7 +26,7 @@ Training:\npython -u tools/train.py -c configs/yolov3_mobilenet_v1_fruit.yml \\\n--use_tb=True \\\n--tb_log_dir=tb_fruit_dir/scalar \\\n- --eval \\\n+ --eval\n```\nUse `yolov3_mobilenet_v1` to fine-tune the model from COCO dataset. Meanwhile, loss and mAP can be observed on tensorboard.\n"
}
] |
Python
|
Apache License 2.0
|
paddlepaddle/paddledetection
|
refine fruit doc (#16)
|
499,333 |
05.11.2019 09:57:31
| -28,800 |
611f33240e451364dff78d342e6607fd0ce9df00
|
fix save inference model
|
[
{
"change_type": "MODIFY",
"old_path": "tools/export_model.py",
"new_path": "tools/export_model.py",
"diff": "@@ -97,7 +97,7 @@ def main():\ninfer_prog = fluid.Program()\nwith fluid.program_guard(infer_prog, startup_prog):\nwith fluid.unique_name.guard():\n- _, feed_vars = create_feed(test_feed, use_pyreader=False)\n+ _, feed_vars = create_feed(test_feed, iterable=True)\ntest_fetches = model.test(feed_vars)\ninfer_prog = infer_prog.clone(True)\n"
}
] |
Python
|
Apache License 2.0
|
paddlepaddle/paddledetection
|
fix save inference model (#17)
|
499,333 |
05.11.2019 20:46:50
| -28,800 |
df6cfd8db092335464a20654e051dce5235319f4
|
fix face_eval
|
[
{
"change_type": "MODIFY",
"old_path": "tools/eval.py",
"new_path": "tools/eval.py",
"diff": "@@ -107,6 +107,9 @@ def main():\nif 'weights' in cfg:\ncheckpoint.load_params(exe, eval_prog, cfg.weights)\n+ if cfg.metric == \"WIDERFACE\":\n+ raise ValueError(\"metric type {} does not support in tools/eval.py, \"\n+ \"please use tools/face_eval.py\".format(cfg.metric))\nassert cfg.metric in ['COCO', 'VOC'], \\\n\"unknown metric type {}\".format(cfg.metric)\nextra_keys = []\n"
},
{
"change_type": "MODIFY",
"old_path": "tools/face_eval.py",
"new_path": "tools/face_eval.py",
"diff": "@@ -235,7 +235,7 @@ def main():\neval_prog = fluid.Program()\nwith fluid.program_guard(eval_prog, startup_prog):\nwith fluid.unique_name.guard():\n- _, feed_vars = create_feed(eval_feed, use_pyreader=False)\n+ _, feed_vars = create_feed(eval_feed, iterable=True)\nfetches = model.eval(feed_vars)\neval_prog = eval_prog.clone(True)\n"
}
] |
Python
|
Apache License 2.0
|
paddlepaddle/paddledetection
|
fix face_eval (#18)
|
499,333 |
11.11.2019 21:38:41
| -28,800 |
c8a292b065a901fde816ccf16b3241b3e8fd678b
|
Minor fix for import
|
[
{
"change_type": "MODIFY",
"old_path": "ppdet/modeling/backbones/cb_resnet.py",
"new_path": "ppdet/modeling/backbones/cb_resnet.py",
"diff": "@@ -28,7 +28,7 @@ from ppdet.core.workspace import register, serializable\nfrom numbers import Integral\nfrom .name_adapter import NameAdapter\n-from .nonlocal_helper import add_space_nonlocal\n+from .nonlocal import add_space_nonlocal\n__all__ = ['CBResNet']\n"
}
] |
Python
|
Apache License 2.0
|
paddlepaddle/paddledetection
|
Minor fix for import (#22)
|
499,333 |
12.11.2019 10:31:54
| -28,800 |
f137929f1abc40278f0b53571d3a3dd7d564e21b
|
refine focal loss
|
[
{
"change_type": "MODIFY",
"old_path": "ppdet/modeling/anchor_heads/retina_head.py",
"new_path": "ppdet/modeling/anchor_heads/retina_head.py",
"diff": "@@ -389,6 +389,7 @@ class RetinaHead(object):\nim_info=im_info,\nnum_classes=self.num_classes - 1)\nfg_num = fluid.layers.reduce_sum(fg_num, name='fg_num')\n+ score_tgt = fluid.layers.cast(score_tgt, 'int32')\nloss_cls = fluid.layers.sigmoid_focal_loss(\nx=score_pred,\nlabel=score_tgt,\n"
}
] |
Python
|
Apache License 2.0
|
paddlepaddle/paddledetection
|
refine focal loss (#23)
|
499,369 |
12.11.2019 14:39:56
| -28,800 |
a992fef40eeca9d7547253596eef3bdef39fad0b
|
Add ResizeImage in FasterRCNNTestFeed and MaskRCNNTestFeed
|
[
{
"change_type": "MODIFY",
"old_path": "ppdet/data/data_feed.py",
"new_path": "ppdet/data/data_feed.py",
"diff": "@@ -558,6 +558,7 @@ class FasterRCNNTestFeed(DataFeed):\nstd=[0.229, 0.224, 0.225],\nis_scale=True,\nis_channel_first=False),\n+ ResizeImage(target_size=800, max_size=1333, interp=1),\nPermute(to_bgr=False)\n],\nbatch_transforms=[PadBatch()],\n@@ -704,6 +705,7 @@ class MaskRCNNTestFeed(DataFeed):\nstd=[0.229, 0.224, 0.225],\nis_scale=True,\nis_channel_first=False),\n+ ResizeImage(target_size=800, max_size=1333, interp=1),\nPermute(to_bgr=False, channel_first=True)\n],\nbatch_transforms=[PadBatch()],\n"
}
] |
Python
|
Apache License 2.0
|
paddlepaddle/paddledetection
|
Add ResizeImage in FasterRCNNTestFeed and MaskRCNNTestFeed (#25)
|
499,400 |
21.11.2019 13:13:33
| -28,800 |
f17b971d4157583b86ea8f519903fdc30cace4d3
|
Make demo of slim support COCO dataset.
|
[
{
"change_type": "MODIFY",
"old_path": "slim/prune/compress.py",
"new_path": "slim/prune/compress.py",
"diff": "@@ -52,7 +52,7 @@ logging.basicConfig(level=logging.INFO, format=FORMAT)\nlogger = logging.getLogger(__name__)\n-def eval_run(exe, compile_program, reader, keys, values, cls, test_feed):\n+def eval_run(exe, compile_program, reader, keys, values, cls, test_feed, cfg):\n\"\"\"\nRun evaluation program, return program outputs.\n\"\"\"\n@@ -75,9 +75,16 @@ def eval_run(exe, compile_program, reader, keys, values, cls, test_feed):\nfeed=feed_data,\nfetch_list=[values[0]],\nreturn_numpy=False)\n+\n+ if cfg.metric == 'VOC':\nouts.append(data['gt_box'])\nouts.append(data['gt_label'])\nouts.append(data['is_difficult'])\n+ elif cfg.metric == 'COCO':\n+ outs.append(data['im_info'])\n+ outs.append(data['im_id'])\n+ outs.append(data['im_shape'])\n+\nres = {\nk: (np.array(v), v.recursive_sequence_lengths())\nfor k, v in zip(keys, outs)\n@@ -195,7 +202,7 @@ def main():\n#place = fluid.CPUPlace()\n#exe = fluid.Executor(place)\nresults = eval_run(exe, program, eval_reader, eval_keys, eval_values,\n- eval_cls, test_data_feed)\n+ eval_cls, test_data_feed, cfg)\nresolution = None\nif 'mask' in results[0]:\n"
}
] |
Python
|
Apache License 2.0
|
paddlepaddle/paddledetection
|
Make demo of slim support COCO dataset. (#33)
|
499,300 |
21.11.2019 13:16:45
| -28,800 |
703d379c07d2446c36b16290943686641bc86283
|
Remove leftover reference to `scale_box`
|
[
{
"change_type": "MODIFY",
"old_path": "ppdet/data/transform/operators.py",
"new_path": "ppdet/data/transform/operators.py",
"diff": "@@ -1032,7 +1032,6 @@ class Resize(BaseOperator):\nscale_x = dim / w\nscale_y = dim / h\nif 'gt_bbox' in sample and len(sample['gt_bbox']) > 0:\n- if self.scale_box or self.scale_box is None:\nscale_array = np.array([scale_x, scale_y] * 2,\ndtype=np.float32)\nsample['gt_bbox'] = np.clip(\n"
}
] |
Python
|
Apache License 2.0
|
paddlepaddle/paddledetection
|
Remove leftover reference to `scale_box` (#31)
|
499,333 |
25.11.2019 13:54:04
| -28,800 |
9229209b04237645c06e74ad4d3936de75b7d82d
|
fix config for fluid.data
|
[
{
"change_type": "MODIFY",
"old_path": "ppdet/data/data_feed.py",
"new_path": "ppdet/data/data_feed.py",
"diff": "@@ -453,7 +453,7 @@ class FasterRCNNTrainFeed(DataFeed):\n'image', 'im_info', 'im_id', 'gt_box', 'gt_label',\n'is_crowd'\n],\n- image_shape=[None, 3, None, None],\n+ image_shape=[3, None, None],\nsample_transforms=[\nDecodeImage(to_rgb=True),\nRandomFlipImage(prob=0.5),\n@@ -505,7 +505,7 @@ class FasterRCNNEvalFeed(DataFeed):\nCOCO_VAL_IMAGE_DIR).__dict__,\nfields=['image', 'im_info', 'im_id', 'im_shape', 'gt_box',\n'gt_label', 'is_difficult'],\n- image_shape=[None, 3, None, None],\n+ image_shape=[3, None, None],\nsample_transforms=[\nDecodeImage(to_rgb=True),\nNormalizeImage(mean=[0.485, 0.456, 0.406],\n@@ -552,7 +552,7 @@ class FasterRCNNTestFeed(DataFeed):\ndataset=SimpleDataSet(COCO_VAL_ANNOTATION,\nCOCO_VAL_IMAGE_DIR).__dict__,\nfields=['image', 'im_info', 'im_id', 'im_shape'],\n- image_shape=[None, 3, None, None],\n+ image_shape=[3, None, None],\nsample_transforms=[\nDecodeImage(to_rgb=True),\nNormalizeImage(mean=[0.485, 0.456, 0.406],\n@@ -600,7 +600,7 @@ class MaskRCNNTrainFeed(DataFeed):\n'image', 'im_info', 'im_id', 'gt_box', 'gt_label',\n'is_crowd', 'gt_mask'\n],\n- image_shape=[None, 3, None, None],\n+ image_shape=[3, None, None],\nsample_transforms=[\nDecodeImage(to_rgb=True),\nRandomFlipImage(prob=0.5, is_mask_flip=True),\n@@ -646,7 +646,7 @@ class MaskRCNNEvalFeed(DataFeed):\ndataset=CocoDataSet(COCO_VAL_ANNOTATION,\nCOCO_VAL_IMAGE_DIR).__dict__,\nfields=['image', 'im_info', 'im_id', 'im_shape'],\n- image_shape=[None, 3, None, None],\n+ image_shape=[3, None, None],\nsample_transforms=[\nDecodeImage(to_rgb=True),\nNormalizeImage(mean=[0.485, 0.456, 0.406],\n@@ -698,7 +698,7 @@ class MaskRCNNTestFeed(DataFeed):\ndataset=SimpleDataSet(COCO_VAL_ANNOTATION,\nCOCO_VAL_IMAGE_DIR).__dict__,\nfields=['image', 'im_info', 'im_id', 'im_shape'],\n- image_shape=[None, 3, None, None],\n+ image_shape=[3, None, None],\nsample_transforms=[\nDecodeImage(to_rgb=True),\nNormalizeImage(\n@@ -743,7 +743,7 @@ class SSDTrainFeed(DataFeed):\ndef __init__(self,\ndataset=VocDataSet().__dict__,\nfields=['image', 'gt_box', 'gt_label'],\n- image_shape=[None, 3, 300, 300],\n+ image_shape=[3, 300, 300],\nsample_transforms=[\nDecodeImage(to_rgb=True, with_mixup=False),\nNormalizeBox(),\n@@ -802,7 +802,7 @@ class SSDEvalFeed(DataFeed):\ndataset=VocDataSet(VOC_VAL_ANNOTATION).__dict__,\nfields=['image', 'im_shape', 'im_id', 'gt_box',\n'gt_label', 'is_difficult'],\n- image_shape=[None, 3, 300, 300],\n+ image_shape=[3, 300, 300],\nsample_transforms=[\nDecodeImage(to_rgb=True, with_mixup=False),\nNormalizeBox(),\n@@ -847,7 +847,7 @@ class SSDTestFeed(DataFeed):\ndef __init__(self,\ndataset=SimpleDataSet(VOC_VAL_ANNOTATION).__dict__,\nfields=['image', 'im_id', 'im_shape'],\n- image_shape=[None, 3, 300, 300],\n+ image_shape=[3, 300, 300],\nsample_transforms=[\nDecodeImage(to_rgb=True),\nResizeImage(target_size=300, use_cv2=False, interp=1),\n@@ -893,7 +893,7 @@ class YoloTrainFeed(DataFeed):\ndef __init__(self,\ndataset=CocoDataSet().__dict__,\nfields=['image', 'gt_box', 'gt_label', 'gt_score'],\n- image_shape=[None, 3, 608, 608],\n+ image_shape=[3, 608, 608],\nsample_transforms=[\nDecodeImage(to_rgb=True, with_mixup=True),\nMixupImage(alpha=1.5, beta=1.5),\n@@ -955,7 +955,7 @@ class YoloEvalFeed(DataFeed):\nCOCO_VAL_IMAGE_DIR).__dict__,\nfields=['image', 'im_size', 'im_id', 'gt_box',\n'gt_label', 'is_difficult'],\n- image_shape=[None, 3, 608, 608],\n+ image_shape=[3, 608, 608],\nsample_transforms=[\nDecodeImage(to_rgb=True),\nResizeImage(target_size=608, interp=2),\n@@ -1013,7 +1013,7 @@ class YoloTestFeed(DataFeed):\ndataset=SimpleDataSet(COCO_VAL_ANNOTATION,\nCOCO_VAL_IMAGE_DIR).__dict__,\nfields=['image', 'im_size', 'im_id'],\n- image_shape=[None, 3, 608, 608],\n+ image_shape=[3, 608, 608],\nsample_transforms=[\nDecodeImage(to_rgb=True),\nResizeImage(target_size=608, interp=2),\n"
},
{
"change_type": "MODIFY",
"old_path": "ppdet/modeling/model_input.py",
"new_path": "ppdet/modeling/model_input.py",
"diff": "@@ -40,7 +40,7 @@ feed_var_def = [\ndef create_feed(feed, iterable=False, sub_prog_feed=False):\n- image_shape = feed.image_shape\n+ image_shape = [None] + feed.image_shape\nfeed_var_map = {var['name']: var for var in feed_var_def}\nfeed_var_map['image'] = {\n'name': 'image',\n@@ -98,14 +98,14 @@ def create_feed(feed, iterable=False, sub_prog_feed=False):\n'lod_level': 0\n}\nimage_name_list.append(name)\n- feed_var_map['im_info']['shape'] = [feed.num_scale * 3]\n+ feed_var_map['im_info']['shape'] = [None, feed.num_scale * 3]\nfeed.fields = image_name_list + feed.fields[1:]\nif sub_prog_feed:\nbox_names = ['bbox', 'bbox_flip']\nfor box_name in box_names:\nsub_prog_feed = {\n'name': box_name,\n- 'shape': [6],\n+ 'shape': [None, 6],\n'dtype': 'float32',\n'lod_level': 1\n}\n"
}
] |
Python
|
Apache License 2.0
|
paddlepaddle/paddledetection
|
fix config for fluid.data (#37)
|
499,333 |
26.11.2019 19:16:56
| -28,800 |
50381edae1df4165281d00a56c701d349cbea19f
|
revert fluid.data to fluid.layers.data
|
[
{
"change_type": "MODIFY",
"old_path": "ppdet/data/data_feed.py",
"new_path": "ppdet/data/data_feed.py",
"diff": "@@ -453,7 +453,7 @@ class FasterRCNNTrainFeed(DataFeed):\n'image', 'im_info', 'im_id', 'gt_box', 'gt_label',\n'is_crowd'\n],\n- image_shape=[3, None, None],\n+ image_shape=[3, 800, 1333],\nsample_transforms=[\nDecodeImage(to_rgb=True),\nRandomFlipImage(prob=0.5),\n@@ -505,7 +505,7 @@ class FasterRCNNEvalFeed(DataFeed):\nCOCO_VAL_IMAGE_DIR).__dict__,\nfields=['image', 'im_info', 'im_id', 'im_shape', 'gt_box',\n'gt_label', 'is_difficult'],\n- image_shape=[3, None, None],\n+ image_shape=[3, 800, 1333],\nsample_transforms=[\nDecodeImage(to_rgb=True),\nNormalizeImage(mean=[0.485, 0.456, 0.406],\n@@ -552,7 +552,7 @@ class FasterRCNNTestFeed(DataFeed):\ndataset=SimpleDataSet(COCO_VAL_ANNOTATION,\nCOCO_VAL_IMAGE_DIR).__dict__,\nfields=['image', 'im_info', 'im_id', 'im_shape'],\n- image_shape=[3, None, None],\n+ image_shape=[3, 800, 1333],\nsample_transforms=[\nDecodeImage(to_rgb=True),\nNormalizeImage(mean=[0.485, 0.456, 0.406],\n@@ -600,7 +600,7 @@ class MaskRCNNTrainFeed(DataFeed):\n'image', 'im_info', 'im_id', 'gt_box', 'gt_label',\n'is_crowd', 'gt_mask'\n],\n- image_shape=[3, None, None],\n+ image_shape=[3, 800, 1333],\nsample_transforms=[\nDecodeImage(to_rgb=True),\nRandomFlipImage(prob=0.5, is_mask_flip=True),\n@@ -646,7 +646,7 @@ class MaskRCNNEvalFeed(DataFeed):\ndataset=CocoDataSet(COCO_VAL_ANNOTATION,\nCOCO_VAL_IMAGE_DIR).__dict__,\nfields=['image', 'im_info', 'im_id', 'im_shape'],\n- image_shape=[3, None, None],\n+ image_shape=[3, 800, 1333],\nsample_transforms=[\nDecodeImage(to_rgb=True),\nNormalizeImage(mean=[0.485, 0.456, 0.406],\n@@ -698,7 +698,7 @@ class MaskRCNNTestFeed(DataFeed):\ndataset=SimpleDataSet(COCO_VAL_ANNOTATION,\nCOCO_VAL_IMAGE_DIR).__dict__,\nfields=['image', 'im_info', 'im_id', 'im_shape'],\n- image_shape=[3, None, None],\n+ image_shape=[3, 800, 1333],\nsample_transforms=[\nDecodeImage(to_rgb=True),\nNormalizeImage(\n"
},
{
"change_type": "MODIFY",
"old_path": "ppdet/modeling/model_input.py",
"new_path": "ppdet/modeling/model_input.py",
"diff": "@@ -25,22 +25,22 @@ __all__ = ['create_feed']\n# yapf: disable\nfeed_var_def = [\n- {'name': 'im_info', 'shape': [None, 3], 'dtype': 'float32', 'lod_level': 0},\n- {'name': 'im_id', 'shape': [None, 1], 'dtype': 'int32', 'lod_level': 0},\n- {'name': 'gt_box', 'shape': [None, 4], 'dtype': 'float32', 'lod_level': 1},\n- {'name': 'gt_label', 'shape': [None, 1], 'dtype': 'int32', 'lod_level': 1},\n- {'name': 'is_crowd', 'shape': [None, 1], 'dtype': 'int32', 'lod_level': 1},\n- {'name': 'gt_mask', 'shape': [None, 2], 'dtype': 'float32', 'lod_level': 3},\n- {'name': 'is_difficult', 'shape': [None, 1], 'dtype': 'int32', 'lod_level': 1},\n- {'name': 'gt_score', 'shape': [None, 1], 'dtype': 'float32', 'lod_level': 0},\n- {'name': 'im_shape', 'shape': [None, 3], 'dtype': 'float32', 'lod_level': 0},\n- {'name': 'im_size', 'shape': [None, 2], 'dtype': 'int32', 'lod_level': 0},\n+ {'name': 'im_info', 'shape': [3], 'dtype': 'float32', 'lod_level': 0},\n+ {'name': 'im_id', 'shape': [1], 'dtype': 'int32', 'lod_level': 0},\n+ {'name': 'gt_box', 'shape': [4], 'dtype': 'float32', 'lod_level': 1},\n+ {'name': 'gt_label', 'shape': [1], 'dtype': 'int32', 'lod_level': 1},\n+ {'name': 'is_crowd', 'shape': [1], 'dtype': 'int32', 'lod_level': 1},\n+ {'name': 'gt_mask', 'shape': [2], 'dtype': 'float32', 'lod_level': 3},\n+ {'name': 'is_difficult', 'shape': [1], 'dtype': 'int32', 'lod_level': 1},\n+ {'name': 'gt_score', 'shape': [1], 'dtype': 'float32', 'lod_level': 0},\n+ {'name': 'im_shape', 'shape': [3], 'dtype': 'float32', 'lod_level': 0},\n+ {'name': 'im_size', 'shape': [2], 'dtype': 'int32', 'lod_level': 0},\n]\n# yapf: enable\ndef create_feed(feed, iterable=False, sub_prog_feed=False):\n- image_shape = [None] + feed.image_shape\n+ image_shape = feed.image_shape\nfeed_var_map = {var['name']: var for var in feed_var_def}\nfeed_var_map['image'] = {\n'name': 'image',\n@@ -52,10 +52,10 @@ def create_feed(feed, iterable=False, sub_prog_feed=False):\n# tensor padding with 0 is used instead of LoD tensor when\n# num_max_boxes is set\nif getattr(feed, 'num_max_boxes', None) is not None:\n- feed_var_map['gt_label']['shape'] = [None, feed.num_max_boxes]\n- feed_var_map['gt_score']['shape'] = [None, feed.num_max_boxes]\n- feed_var_map['gt_box']['shape'] = [None, feed.num_max_boxes, 4]\n- feed_var_map['is_difficult']['shape'] = [None, feed.num_max_boxes]\n+ feed_var_map['gt_label']['shape'] = [feed.num_max_boxes]\n+ feed_var_map['gt_score']['shape'] = [feed.num_max_boxes]\n+ feed_var_map['gt_box']['shape'] = [feed.num_max_boxes, 4]\n+ feed_var_map['is_difficult']['shape'] = [feed.num_max_boxes]\nfeed_var_map['gt_label']['lod_level'] = 0\nfeed_var_map['gt_score']['lod_level'] = 0\nfeed_var_map['gt_box']['lod_level'] = 0\n@@ -98,14 +98,14 @@ def create_feed(feed, iterable=False, sub_prog_feed=False):\n'lod_level': 0\n}\nimage_name_list.append(name)\n- feed_var_map['im_info']['shape'] = [None, feed.num_scale * 3]\n+ feed_var_map['im_info']['shape'] = [feed.num_scale * 3]\nfeed.fields = image_name_list + feed.fields[1:]\nif sub_prog_feed:\nbox_names = ['bbox', 'bbox_flip']\nfor box_name in box_names:\nsub_prog_feed = {\n'name': box_name,\n- 'shape': [None, 6],\n+ 'shape': [6],\n'dtype': 'float32',\n'lod_level': 1\n}\n@@ -113,7 +113,7 @@ def create_feed(feed, iterable=False, sub_prog_feed=False):\nfeed.fields = feed.fields + [box_name]\nfeed_var_map[box_name] = sub_prog_feed\n- feed_vars = OrderedDict([(key, fluid.data(\n+ feed_vars = OrderedDict([(key, fluid.layers.data(\nname=feed_var_map[key]['name'],\nshape=feed_var_map[key]['shape'],\ndtype=feed_var_map[key]['dtype'],\n"
}
] |
Python
|
Apache License 2.0
|
paddlepaddle/paddledetection
|
revert fluid.data to fluid.layers.data (#42)
|
499,333 |
28.11.2019 19:57:13
| -28,800 |
f24d6619c6f3e2eb5190c3ac8b0fdc6f1888dfa4
|
fix data_feed
|
[
{
"change_type": "MODIFY",
"old_path": "ppdet/data/data_feed.py",
"new_path": "ppdet/data/data_feed.py",
"diff": "@@ -453,7 +453,7 @@ class FasterRCNNTrainFeed(DataFeed):\n'image', 'im_info', 'im_id', 'gt_box', 'gt_label',\n'is_crowd'\n],\n- image_shape=[3, 800, 1333],\n+ image_shape=[None, 3, None, None],\nsample_transforms=[\nDecodeImage(to_rgb=True),\nRandomFlipImage(prob=0.5),\n@@ -505,7 +505,7 @@ class FasterRCNNEvalFeed(DataFeed):\nCOCO_VAL_IMAGE_DIR).__dict__,\nfields=['image', 'im_info', 'im_id', 'im_shape', 'gt_box',\n'gt_label', 'is_difficult'],\n- image_shape=[3, 800, 1333],\n+ image_shape=[None, 3, None, None],\nsample_transforms=[\nDecodeImage(to_rgb=True),\nNormalizeImage(mean=[0.485, 0.456, 0.406],\n@@ -552,7 +552,7 @@ class FasterRCNNTestFeed(DataFeed):\ndataset=SimpleDataSet(COCO_VAL_ANNOTATION,\nCOCO_VAL_IMAGE_DIR).__dict__,\nfields=['image', 'im_info', 'im_id', 'im_shape'],\n- image_shape=[3, 800, 1333],\n+ image_shape=[None, 3, None, None],\nsample_transforms=[\nDecodeImage(to_rgb=True),\nNormalizeImage(mean=[0.485, 0.456, 0.406],\n@@ -600,7 +600,7 @@ class MaskRCNNTrainFeed(DataFeed):\n'image', 'im_info', 'im_id', 'gt_box', 'gt_label',\n'is_crowd', 'gt_mask'\n],\n- image_shape=[3, 800, 1333],\n+ image_shape=[None, 3, None, None],\nsample_transforms=[\nDecodeImage(to_rgb=True),\nRandomFlipImage(prob=0.5, is_mask_flip=True),\n@@ -646,7 +646,7 @@ class MaskRCNNEvalFeed(DataFeed):\ndataset=CocoDataSet(COCO_VAL_ANNOTATION,\nCOCO_VAL_IMAGE_DIR).__dict__,\nfields=['image', 'im_info', 'im_id', 'im_shape'],\n- image_shape=[3, 800, 1333],\n+ image_shape=[None, 3, None, None],\nsample_transforms=[\nDecodeImage(to_rgb=True),\nNormalizeImage(mean=[0.485, 0.456, 0.406],\n@@ -698,7 +698,7 @@ class MaskRCNNTestFeed(DataFeed):\ndataset=SimpleDataSet(COCO_VAL_ANNOTATION,\nCOCO_VAL_IMAGE_DIR).__dict__,\nfields=['image', 'im_info', 'im_id', 'im_shape'],\n- image_shape=[3, 800, 1333],\n+ image_shape=[None, 3, None, None],\nsample_transforms=[\nDecodeImage(to_rgb=True),\nNormalizeImage(\n"
}
] |
Python
|
Apache License 2.0
|
paddlepaddle/paddledetection
|
fix data_feed (#43)
|
499,323 |
29.11.2019 10:38:32
| -28,800 |
f469674d47cbf7ebfc5aeb326f6e8334e19cdd04
|
Fix coco eval error in compress.py
|
[
{
"change_type": "MODIFY",
"old_path": "slim/quantization/compress.py",
"new_path": "slim/quantization/compress.py",
"diff": "@@ -59,7 +59,7 @@ logging.basicConfig(level=logging.INFO, format=FORMAT)\nlogger = logging.getLogger(__name__)\n-def eval_run(exe, compile_program, reader, keys, values, cls, test_feed):\n+def eval_run(exe, compile_program, reader, keys, values, cls, test_feed, cfg):\n\"\"\"\nRun evaluation program, return program outputs.\n\"\"\"\n@@ -82,9 +82,12 @@ def eval_run(exe, compile_program, reader, keys, values, cls, test_feed):\nfeed=feed_data,\nfetch_list=[values[0]],\nreturn_numpy=False)\n+ if cfg.metric == 'VOC':\nouts.append(data['gt_box'])\nouts.append(data['gt_label'])\nouts.append(data['is_difficult'])\n+ elif cfg.metric == 'COCO':\n+ outs.append(data['im_id'])\nres = {\nk: (np.array(v), v.recursive_sequence_lengths())\nfor k, v in zip(keys, outs)\n@@ -208,7 +211,7 @@ def main():\n#place = fluid.CPUPlace()\n#exe = fluid.Executor(place)\nresults = eval_run(exe, program, eval_reader, eval_keys, eval_values,\n- eval_cls, test_data_feed)\n+ eval_cls, test_data_feed, cfg)\nresolution = None\nif 'mask' in results[0]:\n"
}
] |
Python
|
Apache License 2.0
|
paddlepaddle/paddledetection
|
Fix coco eval error in compress.py (#51)
|
499,333 |
02.12.2019 11:24:56
| -28,800 |
acf2cb24d3e787cbc2fadee356b86728766bbddd
|
refine snapshot_iter in fruit_detection
|
[
{
"change_type": "MODIFY",
"old_path": "configs/yolov3_mobilenet_v1_fruit.yml",
"new_path": "configs/yolov3_mobilenet_v1_fruit.yml",
"diff": "@@ -6,7 +6,7 @@ use_gpu: true\nmax_iters: 20000\nlog_smooth_window: 20\nsave_dir: output\n-snapshot_iter: 200\n+snapshot_iter: 1000\nmetric: VOC\nmap_type: 11point\npretrain_weights: https://paddlemodels.bj.bcebos.com/object_detection/yolov3_mobilenet_v1.tar\n"
}
] |
Python
|
Apache License 2.0
|
paddlepaddle/paddledetection
|
refine snapshot_iter in fruit_detection (#55)
|
499,385 |
02.12.2019 16:09:40
| -28,800 |
506c7f0e6d825f67c6fbb20fd22d1f17517e5c9d
|
Fix pretrain_weights URL in yolov3_r50vd_dcn.yml
|
[
{
"change_type": "MODIFY",
"old_path": "configs/dcn/yolov3_r50vd_dcn.yml",
"new_path": "configs/dcn/yolov3_r50vd_dcn.yml",
"diff": "@@ -8,7 +8,7 @@ log_smooth_window: 20\nsave_dir: output\nsnapshot_iter: 20000\nmetric: COCO\n-pretrain_weights: /paddle-imagenet-models-name.bj.bcebos.com/ResNet50_vd_pretrained.tar\n+pretrain_weights: https://paddle-imagenet-models-name.bj.bcebos.com/ResNet50_vd_pretrained.tar\nweights: output/yolov3_r50vd_dcn/model_final\nnum_classes: 80\n"
}
] |
Python
|
Apache License 2.0
|
paddlepaddle/paddledetection
|
Fix pretrain_weights URL in yolov3_r50vd_dcn.yml (#57)
|
499,333 |
10.12.2019 16:54:46
| -28,800 |
497e0e277e7191cf536fdc89cf7cdfe27f38be6b
|
fix im_info
|
[
{
"change_type": "MODIFY",
"old_path": "ppdet/data/transform/operators.py",
"new_path": "ppdet/data/transform/operators.py",
"diff": "@@ -287,8 +287,8 @@ class ResizeImage(BaseOperator):\nim_scale_x = im_scale\nim_scale_y = im_scale\n- resize_w = np.round(im_scale_x * float(im_shape[1]))\n- resize_h = np.round(im_scale_y * float(im_shape[0]))\n+ resize_w = im_scale_x * float(im_shape[1])\n+ resize_h = im_scale_y * float(im_shape[0])\nim_info = [resize_h, resize_w, im_scale]\nif 'im_info' in sample and sample['im_info'][2] != 1.:\nsample['im_info'] = np.append(\n@@ -311,8 +311,12 @@ class ResizeImage(BaseOperator):\nfy=im_scale_y,\ninterpolation=self.interp)\nelse:\n+ if self.max_size != 0:\n+ raise TypeError(\n+ 'If you set max_size to cap the maximum size of image,'\n+ 'please set use_cv2 to True to resize the image.')\nim = Image.fromarray(im)\n- im = im.resize((resize_w, resize_h), self.interp)\n+ im = im.resize((int(resize_w), int(resize_h)), self.interp)\nim = np.array(im)\nsample['image'] = im\n@@ -1009,9 +1013,8 @@ class Resize(BaseOperator):\n'random' (for randomized interpolation).\ndefault to `cv2.INTER_LINEAR`.\n\"\"\"\n- def __init__(self,\n- target_dim=[],\n- interp=cv2.INTER_LINEAR):\n+\n+ def __init__(self, target_dim=[], interp=cv2.INTER_LINEAR):\nsuper(Resize, self).__init__()\nself.target_dim = target_dim\nself.interp = interp # 'random' for yolov3\n@@ -1032,10 +1035,9 @@ class Resize(BaseOperator):\nscale_x = dim / w\nscale_y = dim / h\nif 'gt_bbox' in sample and len(sample['gt_bbox']) > 0:\n- scale_array = np.array([scale_x, scale_y] * 2,\n- dtype=np.float32)\n- sample['gt_bbox'] = np.clip(\n- sample['gt_bbox'] * scale_array, 0, dim - 1)\n+ scale_array = np.array([scale_x, scale_y] * 2, dtype=np.float32)\n+ sample['gt_bbox'] = np.clip(sample['gt_bbox'] * scale_array, 0,\n+ dim - 1)\nsample['h'] = resize_h\nsample['w'] = resize_w\n@@ -1060,6 +1062,7 @@ class ColorDistort(BaseOperator):\nrandom_apply (bool): whether to apply in random (yolo) or fixed (SSD)\norder.\n\"\"\"\n+\ndef __init__(self,\nhue=[-18, 18, 0.5],\nsaturation=[0.5, 1.5, 0.5],\n@@ -1084,14 +1087,10 @@ class ColorDistort(BaseOperator):\ndelta = np.random.uniform(low, high)\nu = np.cos(delta * np.pi)\nw = np.sin(delta * np.pi)\n- bt = np.array([[1.0, 0.0, 0.0],\n- [0.0, u, -w],\n- [0.0, w, u]])\n- tyiq = np.array([[0.299, 0.587, 0.114],\n- [0.596, -0.274, -0.321],\n+ bt = np.array([[1.0, 0.0, 0.0], [0.0, u, -w], [0.0, w, u]])\n+ tyiq = np.array([[0.299, 0.587, 0.114], [0.596, -0.274, -0.321],\n[0.211, -0.523, 0.311]])\n- ityiq = np.array([[1.0, 0.956, 0.621],\n- [1.0, -0.272, -0.647],\n+ ityiq = np.array([[1.0, 0.956, 0.621], [1.0, -0.272, -0.647],\n[1.0, -1.107, 1.705]])\nt = np.dot(np.dot(ityiq, bt), tyiq).T\nimg = np.dot(img, t)\n@@ -1135,10 +1134,8 @@ class ColorDistort(BaseOperator):\nimg = sample['image']\nif self.random_apply:\ndistortions = np.random.permutation([\n- self.apply_brightness,\n- self.apply_contrast,\n- self.apply_saturation,\n- self.apply_hue\n+ self.apply_brightness, self.apply_contrast,\n+ self.apply_saturation, self.apply_hue\n])\nfor func in distortions:\nimg = func(img)\n@@ -1167,6 +1164,7 @@ class NormalizePermute(BaseOperator):\nmean (list): mean values in RGB order.\nstd (list): std values in RGB order.\n\"\"\"\n+\ndef __init__(self,\nmean=[123.675, 116.28, 103.53],\nstd=[58.395, 57.120, 57.375]):\n@@ -1197,6 +1195,7 @@ class RandomExpand(BaseOperator):\nprob (float): probability to expand.\nfill_value (list): color value used to fill the canvas. in RGB order.\n\"\"\"\n+\ndef __init__(self, ratio=4., prob=0.5, fill_value=(127.5, ) * 3):\nsuper(RandomExpand, self).__init__()\nassert ratio > 1.01, \"expand ratio must be larger than 1.01\"\n@@ -1251,6 +1250,7 @@ class RandomCrop(BaseOperator):\nallow_no_crop (bool): allow return without actually cropping them.\ncover_all_box (bool): ensure all bboxes are covered in the final crop.\n\"\"\"\n+\ndef __init__(self,\naspect_ratio=[.5, 2.],\nthresholds=[.0, .1, .3, .5, .7, .9],\n@@ -1295,15 +1295,16 @@ class RandomCrop(BaseOperator):\nfor i in range(self.num_attempts):\nscale = np.random.uniform(*self.scaling)\nmin_ar, max_ar = self.aspect_ratio\n- aspect_ratio = np.random.uniform(max(min_ar, scale**2),\n- min(max_ar, scale**-2))\n+ aspect_ratio = np.random.uniform(\n+ max(min_ar, scale**2), min(max_ar, scale**-2))\ncrop_h = int(h * scale / np.sqrt(aspect_ratio))\ncrop_w = int(w * scale * np.sqrt(aspect_ratio))\ncrop_y = np.random.randint(0, h - crop_h)\ncrop_x = np.random.randint(0, w - crop_w)\ncrop_box = [crop_x, crop_y, crop_x + crop_w, crop_y + crop_h]\n- iou = self._iou_matrix(gt_bbox,\n- np.array([crop_box], dtype=np.float32))\n+ iou = self._iou_matrix(\n+ gt_bbox, np.array(\n+ [crop_box], dtype=np.float32))\nif iou.max() < thresh:\ncontinue\n@@ -1311,7 +1312,8 @@ class RandomCrop(BaseOperator):\ncontinue\ncropped_box, valid_ids = self._crop_box_with_center_constraint(\n- gt_bbox, np.array(crop_box, dtype=np.float32))\n+ gt_bbox, np.array(\n+ crop_box, dtype=np.float32))\nif valid_ids.size > 0:\nfound = True\nbreak\n@@ -1349,8 +1351,8 @@ class RandomCrop(BaseOperator):\ncropped_box[:, 2:] -= crop[:2]\ncenters = (box[:, :2] + box[:, 2:]) / 2\n- valid = np.logical_and(\n- crop[:2] <= centers, centers < crop[2:]).all(axis=1)\n+ valid = np.logical_and(crop[:2] <= centers,\n+ centers < crop[2:]).all(axis=1)\nvalid = np.logical_and(\nvalid, (cropped_box[:, :2] < cropped_box[:, 2:]).all(axis=1))\n"
}
] |
Python
|
Apache License 2.0
|
paddlepaddle/paddledetection
|
fix im_info (#93)
|
499,300 |
11.12.2019 10:38:23
| -28,800 |
cbdd3bc5cb4d7629e21afede1df21f83c7280ee3
|
Safeguard docstring parsing
so that it does not choke on non conforming docstrings
|
[
{
"change_type": "MODIFY",
"old_path": "ppdet/core/config/schema.py",
"new_path": "ppdet/core/config/schema.py",
"diff": "@@ -195,7 +195,11 @@ def extract_schema(cls):\ndocs = cls.__doc__\nif docs is None and getattr(cls, '__category__', None) == 'op':\ndocs = cls.__call__.__doc__\n+ try:\ndocstring = doc_parse(docs)\n+ except Exception:\n+ docstring = None\n+\nif docstring is None:\ncomments = {}\nelse:\n"
}
] |
Python
|
Apache License 2.0
|
paddlepaddle/paddledetection
|
Safeguard docstring parsing (#40)
so that it does not choke on non conforming docstrings
|
499,322 |
12.12.2019 18:55:14
| -28,800 |
d626a675cb2ff413290a1e72dd2682fb3b99ca35
|
Fix hang bug in parallel_map.py when multiprocessing mode used and some consumer exit abnormally
|
[
{
"change_type": "MODIFY",
"old_path": "ppdet/data/dataset.py",
"new_path": "ppdet/data/dataset.py",
"diff": "@@ -25,6 +25,7 @@ class Dataset(object):\ndef __init__(self):\nself._epoch = -1\n+ self._pos = 0\ndef __next__(self):\nreturn self.next()\n@@ -33,8 +34,8 @@ class Dataset(object):\nreturn self\ndef __str__(self):\n- return \"{}(fname:{}, epoch:{:d}, size:{:d}, pos:{:d})\".format(\n- type(self).__name__, self._fname, self._epoch,\n+ return \"{}(epoch:{:d}, size:{:d}, pos:{:d})\".format(\n+ type(self).__name__, self._epoch,\nself.size(), self._pos)\ndef next(self):\n"
},
{
"change_type": "MODIFY",
"old_path": "ppdet/data/source/roidb_source.py",
"new_path": "ppdet/data/source/roidb_source.py",
"diff": "@@ -82,8 +82,8 @@ class RoiDbSource(Dataset):\nself._imid2path = None\ndef __str__(self):\n- return 'RoiDbSource(fname:%s,epoch:%d,size:%d,pos:%d)' \\\n- % (self._fname, self._epoch, self.size(), self._pos)\n+ return 'RoiDbSource(epoch:%d,size:%d,pos:%d,fname:%s)' \\\n+ % (self._epoch, self.size(), self._pos, self._fname)\ndef next(self):\n\"\"\" load next sample\n"
},
{
"change_type": "ADD",
"old_path": null,
"new_path": "ppdet/data/tests/test_dataset.py",
"diff": "+# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.\n+#\n+# Licensed under the Apache License, Version 2.0 (the \"License\");\n+# you may not use this file except in compliance with the License.\n+# You may obtain a copy of the License at\n+#\n+# http://www.apache.org/licenses/LICENSE-2.0\n+#\n+# Unless required by applicable law or agreed to in writing, software\n+# distributed under the License is distributed on an \"AS IS\" BASIS,\n+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n+# See the License for the specific language governing permissions and\n+# limitations under the License.\n+import os\n+import time\n+import unittest\n+import sys\n+import logging\n+import random\n+import copy\n+\n+import set_env\n+\n+import ppdet.data.transform as tf\n+from ppdet.data.dataset import Dataset\n+\n+class MemorySource(Dataset):\n+ \"\"\" memory data source for testing\n+ \"\"\"\n+ def __init__(self, samples):\n+ super(MemorySource, self).__init__()\n+ self._epoch = -1\n+\n+ self._pos = -1\n+ self._drained = False\n+ self._samples = samples\n+\n+ def next(self):\n+ if self._epoch < 0:\n+ self.reset()\n+\n+ if self._pos >= self.size():\n+ self._drained = True\n+ raise StopIteration(\"no more data in \" + str(self))\n+ else:\n+ sample = copy.deepcopy(self._samples[self._pos])\n+ self._pos += 1\n+ return sample\n+\n+ def reset(self):\n+ if self._epoch < 0:\n+ self._epoch = 0\n+ else:\n+ self._epoch += 1\n+\n+ self._pos = 0\n+ self._drained = False\n+ random.shuffle(self._samples)\n+\n+ def size(self):\n+ return len(self._samples)\n+\n+ def drained(self):\n+ assert self._epoch >= 0, \"the first epoch has not started yet\"\n+ return self._pos >= self.size()\n+\n+ def epoch_id(self):\n+ return self._epoch\n+\n+\n+class TestDataset(unittest.TestCase):\n+ \"\"\"Test cases for ppdet.data.dataset\n+ \"\"\"\n+\n+ @classmethod\n+ def setUpClass(cls):\n+ \"\"\" setup\n+ \"\"\"\n+ pass\n+\n+ @classmethod\n+ def tearDownClass(cls):\n+ \"\"\" tearDownClass \"\"\"\n+ pass\n+\n+ def test_next(self):\n+ \"\"\" test next\n+ \"\"\"\n+ samples = list(range(10))\n+ mem_sc = MemorySource(samples)\n+\n+ for i, d in enumerate(mem_sc):\n+ self.assertTrue(d in samples)\n+\n+ def test_transform_with_abnormal_worker(self):\n+ \"\"\" test dataset transform with abnormally exit process\n+ \"\"\"\n+ samples = list(range(1000))\n+ ds = MemorySource(samples)\n+\n+ def _mapper(sample):\n+ if sample == 3:\n+ sys.exit(1)\n+\n+ return 2 * sample\n+\n+ worker_conf = {'WORKER_NUM': 2, 'use_process': True}\n+ mapped = tf.map(ds, _mapper, worker_conf)\n+\n+ ct = 0\n+ for i, d in enumerate(mapped):\n+ ct += 1\n+ self.assertTrue(d / 2 in samples)\n+\n+ self.assertEqual(len(samples) - 1, ct)\n+\n+ def test_transform_with_delay_worker(self):\n+ \"\"\" test dataset transform with delayed process\n+ \"\"\"\n+ samples = list(range(1000))\n+ ds = MemorySource(samples)\n+\n+ def _mapper(sample):\n+ if sample == 3:\n+ time.sleep(30)\n+\n+ return 2 * sample\n+\n+ worker_conf = {'WORKER_NUM': 2, 'use_process': True}\n+ mapped = tf.map(ds, _mapper, worker_conf)\n+\n+ ct = 0\n+ for i, d in enumerate(mapped):\n+ ct += 1\n+ self.assertTrue(d / 2 in samples)\n+\n+ self.assertEqual(len(samples), ct)\n+\n+\n+if __name__ == '__main__':\n+ logging.basicConfig()\n+ unittest.main()\n+\n"
},
{
"change_type": "MODIFY",
"old_path": "ppdet/data/transform/parallel_map.py",
"new_path": "ppdet/data/transform/parallel_map.py",
"diff": "@@ -21,6 +21,11 @@ from __future__ import print_function\nimport sys\nimport six\n+if six.PY3:\n+ from queue import Empty\n+else:\n+ from Queue import Empty\n+\nimport uuid\nimport logging\nimport signal\n@@ -31,15 +36,19 @@ logger = logging.getLogger(__name__)\nclass EndSignal(object):\n- def __init__(self, errno=0, errmsg=''):\n+ \"\"\" signal used to notify worker to exit\n+ \"\"\"\n+ def __init__(self, id, errno=0, errmsg=''):\n+ self.id = id\nself.errno = errno\nself.errmsg = errmsg\nclass ParallelMappedDataset(ProxiedDataset):\n\"\"\"\n- Transform samples to mapped samples which is similar to 'basic.MappedDataset',\n- but multiple workers (threads or processes) will be used\n+ Transform samples to mapped samples which is similar to\n+ 'basic.MappedDataset', but multiple workers (threads or processes)\n+ will be used\nNotes:\nthis class is not thread-safe\n@@ -58,7 +67,8 @@ class ParallelMappedDataset(ProxiedDataset):\nargs.update(worker_args)\nif args['use_process'] and type(args['memsize']) is str:\nassert args['memsize'][-1].lower() == 'g', \\\n- \"invalid param for memsize[%s], should be ended with 'G' or 'g'\" % (args['memsize'])\n+ \"invalid param for memsize[{}], should \" \\\n+ \"be ended with 'G' or 'g'\".format(args['memsize'])\ngb = args['memsize'][:-1]\nargs['memsize'] = int(gb) * 1024 ** 3\n@@ -103,22 +113,25 @@ class ParallelMappedDataset(ProxiedDataset):\nself._producer.daemon = True\nself._consumers = []\n+ self._consumer_endsig = {}\nfor i in range(consumer_num):\n+ consumer_id = 'consumer-' + id + '-' + str(i)\np = Worker(\ntarget=self._consume,\n- args=('consumer-' + id + '_' + str(i), self._inq, self._outq,\n+ args=(consumer_id, self._inq, self._outq,\nself._mapper))\nself._consumers.append(p)\np.daemon = True\n+ setattr(p, 'id', consumer_id)\nself._epoch = -1\nself._feeding_ev = Event()\nself._produced = 0 # produced sample in self._produce\nself._consumed = 0 # consumed sample in self.next\n- self._stopped_consumers = 0\ndef _produce(self, id, source, inq):\n\"\"\"Fetch data from source and feed it to 'inq' queue\"\"\"\n+ endsig = EndSignal(id)\nwhile True:\nself._feeding_ev.wait()\nif self._exit:\n@@ -128,32 +141,38 @@ class ParallelMappedDataset(ProxiedDataset):\nself._produced += 1\nexcept StopIteration:\nself._feeding_ev.clear()\n- self._feeding_ev.wait() # wait other guy to wake up me\n- logger.debug(\"producer[{}] starts new epoch\".format(id))\n+ self._feeding_ev.wait()\nexcept Exception as e:\n- msg = \"producer[{}] failed with error: {}\".format(id, str(e))\n- inq.put(EndSignal(-1, msg))\n+ endsig.errno = -1\n+ endsig.errmsg = \"producer[{}] failed with error: {}\" \\\n+ .format(id, str(e))\n+ inq.put(endsig)\nbreak\n- logger.debug(\"producer[{}] exits\".format(id))\n-\ndef _consume(self, id, inq, outq, mapper):\n\"\"\"Fetch data from 'inq', process it and put result to 'outq'\"\"\"\n+ if self._worker_args['use_process']:\n+ # handle SIGTERM signal to exit to prevent print stack frame\n+ signal.signal(signal.SIGTERM, lambda signum, frame : sys.exit())\n+\n+ endsig = EndSignal(id)\nwhile True:\nsample = inq.get()\nif isinstance(sample, EndSignal):\n- sample.errmsg += \"[consumer[{}] exits]\".format(id)\n- outq.put(sample)\n- logger.debug(\"end signal received, \" +\n- \"consumer[{}] exits\".format(id))\n+ endsig.errno = sample.errno\n+ endsig.errmsg = \"consumer[{}] exits for reason[{}]\" \\\n+ .format(id, sample.errmsg)\n+ outq.put(endsig)\nbreak\ntry:\nresult = mapper(sample)\noutq.put(result)\nexcept Exception as e:\n- msg = 'failed to map consumer[%s], error: {}'.format(str(e), id)\n- outq.put(EndSignal(-1, msg))\n+ endsig.errno = -2\n+ endsig.errmsg = \"consumer[{}] failed to map with error:[{}]\" \\\n+ .format(id, str(e))\n+ outq.put(endsig)\nbreak\ndef drained(self):\n@@ -168,6 +187,25 @@ class ParallelMappedDataset(ProxiedDataset):\nfor _ in range(len(self._consumers)):\nself._inq.put(EndSignal(0, \"notify consumers to exit\"))\n+ def _consumer_healthy(self):\n+ abnormal_num = 0\n+ for w in self._consumers:\n+ if not w.is_alive() and w.id not in self._consumer_endsig:\n+ abnormal_num += 1\n+ if self._worker_args['use_process']:\n+ errmsg = \"consumer[{}] exit abnormally with exitcode[{}]\" \\\n+ .format(w.pid, w.exitcode)\n+ else:\n+ errmsg = \"consumer[{}] exit abnormally\".format(w.ident)\n+\n+ logger.warn(errmsg)\n+\n+ if abnormal_num > 0:\n+ logger.warn(\"{} consumers have exited abnormally!!!\" \\\n+ .format(abnormal_num))\n+\n+ return abnormal_num == 0\n+\ndef next(self):\n\"\"\" get next transformed sample\n\"\"\"\n@@ -177,41 +215,54 @@ class ParallelMappedDataset(ProxiedDataset):\nif self.drained():\nraise StopIteration()\n- while True:\n- sample = self._outq.get()\n+ while not self._exit:\n+ try:\n+ sample = self._outq.get(timeout=3)\n+ except Empty as e:\n+ if not self._consumer_healthy():\n+ raise StopIteration()\n+ else:\n+ continue\n+\nif isinstance(sample, EndSignal):\n- self._stopped_consumers += 1\n- if sample.errno != 0:\n- logger.warn(\"consumer failed with error: {}\".format(\n- sample.errmsg))\n+ self._consumer_endsig[sample.id] = sample\n+ logger.warn(\"recv endsignal from outq with errmsg[{}]\" \\\n+ .format(sample.errmsg))\n- if self._stopped_consumers < len(self._consumers):\n+ if len(self._consumer_endsig.keys()) < len(self._consumers):\nself._inq.put(sample)\nelse:\n- raise ValueError(\"all consumers exited, no more samples\")\n+ self._exit = True\n+ raise StopIteration(\"all consumers exited, no more samples\")\nelse:\nself._consumed += 1\nreturn sample\n+ raise StopIteration()\n+\ndef reset(self):\n\"\"\" reset for a new epoch of samples\n\"\"\"\n+ assert not self._exit, \"cannot reset for already stopped dataset\"\n+\nif self._epoch < 0:\nself._epoch = 0\n- for p in self._consumers:\n- p.start()\n+ for w in self._consumers:\n+ w.start()\nself._producer.start()\nelse:\n+ assert self._consumer_healthy(), \"cannot start another pass of data\" \\\n+ \" for some consumers exited abnormally before!!!\"\n+\nif not self.drained():\n- logger.warn(\"do not reset before epoch[%d] finishes\".format(\n- self._epoch))\n+ logger.warn(\"reset before epoch[{}] finishes\".format(self._epoch))\nself._produced = self._produced - self._consumed\nelse:\nself._produced = 0\nself._epoch += 1\n- assert self._stopped_consumers == 0, \"some consumers already exited,\" \\\n+ assert len(self._consumer_endsig.keys()) == 0, \"some consumers already exited,\" \\\n+ \" cannot start another epoch\"\nself._source.reset()\n@@ -221,9 +272,4 @@ class ParallelMappedDataset(ProxiedDataset):\n# FIXME(dengkaipeng): fix me if you have better impliment\n# handle terminate reader process, do not print stack frame\n-def _reader_exit(signum, frame):\n- logger.debug(\"Reader process exit.\")\n- sys.exit()\n-\n-\n-signal.signal(signal.SIGTERM, _reader_exit)\n+signal.signal(signal.SIGTERM, lambda signum, frame : sys.exit())\n"
},
{
"change_type": "MODIFY",
"old_path": "ppdet/data/transform/shared_queue/queue.py",
"new_path": "ppdet/data/transform/shared_queue/queue.py",
"diff": "@@ -22,9 +22,11 @@ import six\nif six.PY3:\nimport pickle\nfrom io import BytesIO as StringIO\n+ from queue import Empty\nelse:\nimport cPickle as pickle\nfrom cStringIO import StringIO\n+ from Queue import Empty\nimport logging\nimport traceback\n@@ -87,6 +89,8 @@ class SharedQueue(Queue):\nbuff = super(SharedQueue, self).get(**kwargs)\ndata = buff.get()\nreturn pickle.load(StringIO(data))\n+ except Empty as e:\n+ raise e\nexcept Exception as e:\nstack_info = traceback.format_exc()\nerr_msg = 'failed to get element from SharedQueue '\\\n"
},
{
"change_type": "MODIFY",
"old_path": "ppdet/data/transform/shared_queue/sharedmemory.py",
"new_path": "ppdet/data/transform/shared_queue/sharedmemory.py",
"diff": "@@ -316,8 +316,6 @@ class PageAllocator(object):\nstart_pos = pos\nflags = ''\nwhile True:\n- # maybe flags already has some '0' pages,\n- # so just check 'page_num - len(flags)' pages\nflags = self.get_page_status(pos, page_num, ret_flag=True)\nif flags.count('0') == page_num:\n@@ -343,10 +341,10 @@ class PageAllocator(object):\nif free_pages == 0:\nerr_msg = 'all pages have been used:%s' % (str(self))\nelse:\n- err_msg = 'not found available pages with page_status[%s] '\\\n- 'and %d free pages' % (str(page_status), free_pages)\n- err_msg = 'failed to malloc %d pages at pos[%d] for reason[%s] and allocator status[%s]' \\\n- % (page_num, pos, err_msg, str(self))\n+ err_msg = 'not found enough pages[avail:%d, expect:%d] '\\\n+ 'with total free pages[%d]' % (page_status[0], page_num, free_pages)\n+ err_msg = 'failed to malloc %d pages at pos[%d] for reason[%s] '\\\n+ 'and allocator status[%s]' % (page_num, pos, err_msg, str(self))\nraise MemoryFullError(err_msg)\nself.set_page_status(pos, page_num, '1')\n"
}
] |
Python
|
Apache License 2.0
|
paddlepaddle/paddledetection
|
Fix hang bug in parallel_map.py when multiprocessing mode used and some consumer exit abnormally (#106)
|
499,385 |
13.12.2019 18:04:53
| -28,800 |
b42cfe4679598c328184d06bcc6edd06b8ba2f35
|
Fix README in face_detection
|
[
{
"change_type": "MODIFY",
"old_path": ".gitignore",
"new_path": ".gitignore",
"diff": "@@ -56,9 +56,17 @@ coverage.xml\n/docs/_build/\n*.json\n+*.tar\ndataset/coco/annotations\ndataset/coco/train2017\ndataset/coco/val2017\ndataset/voc/VOCdevkit\n+dataset/fruit/fruit-detection/\n+dataset/voc/test.txt\n+dataset/voc/trainval.txt\n+dataset/wider_face/WIDER_test\n+dataset/wider_face/WIDER_train\n+dataset/wider_face/WIDER_val\n+dataset/wider_face/wider_face_split\n"
}
] |
Python
|
Apache License 2.0
|
paddlepaddle/paddledetection
|
Fix README in face_detection (#113)
|
499,313 |
24.12.2019 17:02:40
| -28,800 |
8a8b279ac1fc106d41822ff5958baefcb4aa84c3
|
fix voc use_default_reader
|
[
{
"change_type": "MODIFY",
"old_path": "ppdet/data/source/voc.py",
"new_path": "ppdet/data/source/voc.py",
"diff": "@@ -72,6 +72,7 @@ class VOCDataSet(DataSet):\nself.roidbs = None\n# 'cname2id' is a dict to map category name to class id\nself.cname2cid = None\n+ self.use_default_label = use_default_label\nself.label_list = label_list\ndef load_roidb_and_cname2cid(self):\n"
}
] |
Python
|
Apache License 2.0
|
paddlepaddle/paddledetection
|
fix voc use_default_reader (#125)
|
499,300 |
30.12.2019 19:41:46
| -28,800 |
f9d57fcf0dc5cab2e5e26f6cbea9680905fecc50
|
Fix error in config doc
`--minimal` is option for `generate` and should be positioned after it
|
[
{
"change_type": "MODIFY",
"old_path": "docs/CONFIG.md",
"new_path": "docs/CONFIG.md",
"diff": "@@ -191,7 +191,7 @@ A small utility (`tools/configure.py`) is included to simplify the configuration\nFor a minimal version, run:\n```shell\n- python tools/configure.py --minimal generate FasterRCNN BBoxHead\n+ python tools/configure.py generate --minimal FasterRCNN BBoxHead\n```\n"
}
] |
Python
|
Apache License 2.0
|
paddlepaddle/paddledetection
|
Fix error in config doc (#145)
`--minimal` is option for `generate` and should be positioned after it
|
499,385 |
30.12.2019 21:13:22
| -28,800 |
6574f8b27147925b9318a28097acf5c3fc4b8680
|
Change dtype of im_id to int64
|
[
{
"change_type": "MODIFY",
"old_path": "ppdet/modeling/architectures/blazeface.py",
"new_path": "ppdet/modeling/architectures/blazeface.py",
"diff": "@@ -176,7 +176,7 @@ class BlazeFace(object):\n# yapf: disable\ninputs_def = {\n'image': {'shape': im_shape, 'dtype': 'float32', 'lod_level': 0},\n- 'im_id': {'shape': [None, 1], 'dtype': 'int32', 'lod_level': 0},\n+ 'im_id': {'shape': [None, 1], 'dtype': 'int64', 'lod_level': 0},\n'gt_bbox': {'shape': [None, 4], 'dtype': 'float32', 'lod_level': 1},\n'gt_class': {'shape': [None, 1], 'dtype': 'int32', 'lod_level': 1},\n'im_shape': {'shape': [None, 3], 'dtype': 'int32', 'lod_level': 0},\n"
},
{
"change_type": "MODIFY",
"old_path": "ppdet/modeling/architectures/cascade_mask_rcnn.py",
"new_path": "ppdet/modeling/architectures/cascade_mask_rcnn.py",
"diff": "@@ -371,7 +371,7 @@ class CascadeMaskRCNN(object):\ninputs_def = {\n'image': {'shape': im_shape, 'dtype': 'float32', 'lod_level': 0},\n'im_info': {'shape': [None, 3], 'dtype': 'float32', 'lod_level': 0},\n- 'im_id': {'shape': [None, 1], 'dtype': 'int32', 'lod_level': 0},\n+ 'im_id': {'shape': [None, 1], 'dtype': 'int64', 'lod_level': 0},\n'im_shape': {'shape': [None, 3], 'dtype': 'float32', 'lod_level': 0},\n'gt_bbox': {'shape': [None, 4], 'dtype': 'float32', 'lod_level': 1},\n'gt_class': {'shape': [None, 1], 'dtype': 'int32', 'lod_level': 1},\n"
},
{
"change_type": "MODIFY",
"old_path": "ppdet/modeling/architectures/cascade_rcnn.py",
"new_path": "ppdet/modeling/architectures/cascade_rcnn.py",
"diff": "@@ -283,7 +283,7 @@ class CascadeRCNN(object):\n'image': {'shape': im_shape, 'dtype': 'float32', 'lod_level': 0},\n'im_info': {'shape': [None, 3], 'dtype': 'float32', 'lod_level': 0},\n'im_shape': {'shape': [None, 3], 'dtype': 'float32', 'lod_level': 0},\n- 'im_id': {'shape': [None, 1], 'dtype': 'int32', 'lod_level': 0},\n+ 'im_id': {'shape': [None, 1], 'dtype': 'int64', 'lod_level': 0},\n'gt_bbox': {'shape': [None, 4], 'dtype': 'float32', 'lod_level': 1},\n'gt_class': {'shape': [None, 1], 'dtype': 'int32', 'lod_level': 1},\n'is_crowd': {'shape': [None, 1], 'dtype': 'int32', 'lod_level': 1},\n"
},
{
"change_type": "MODIFY",
"old_path": "ppdet/modeling/architectures/cascade_rcnn_cls_aware.py",
"new_path": "ppdet/modeling/architectures/cascade_rcnn_cls_aware.py",
"diff": "@@ -176,7 +176,7 @@ class CascadeRCNNClsAware(object):\ninputs_def = {\n'image': {'shape': im_shape, 'dtype': 'float32', 'lod_level': 0},\n'im_info': {'shape': [None, 3], 'dtype': 'float32', 'lod_level': 0},\n- 'im_id': {'shape': [None, 1], 'dtype': 'int32', 'lod_level': 0},\n+ 'im_id': {'shape': [None, 1], 'dtype': 'int64', 'lod_level': 0},\n'im_shape': {'shape': [None, 3], 'dtype': 'float32', 'lod_level': 0},\n'gt_bbox': {'shape': [None, 4], 'dtype': 'float32', 'lod_level': 1},\n'gt_class': {'shape': [None, 1], 'dtype': 'int32', 'lod_level': 1},\n"
},
{
"change_type": "MODIFY",
"old_path": "ppdet/modeling/architectures/faceboxes.py",
"new_path": "ppdet/modeling/architectures/faceboxes.py",
"diff": "@@ -150,7 +150,7 @@ class FaceBoxes(object):\n# yapf: disable\ninputs_def = {\n'image': {'shape': im_shape, 'dtype': 'float32', 'lod_level': 0},\n- 'im_id': {'shape': [None, 1], 'dtype': 'int32', 'lod_level': 0},\n+ 'im_id': {'shape': [None, 1], 'dtype': 'int64', 'lod_level': 0},\n'gt_bbox': {'shape': [None, 4], 'dtype': 'float32', 'lod_level': 1},\n'gt_class': {'shape': [None, 1], 'dtype': 'int32', 'lod_level': 1},\n'im_shape': {'shape': [None, 3], 'dtype': 'int32', 'lod_level': 0},\n"
},
{
"change_type": "MODIFY",
"old_path": "ppdet/modeling/architectures/faster_rcnn.py",
"new_path": "ppdet/modeling/architectures/faster_rcnn.py",
"diff": "@@ -194,7 +194,7 @@ class FasterRCNN(object):\ninputs_def = {\n'image': {'shape': im_shape, 'dtype': 'float32', 'lod_level': 0},\n'im_info': {'shape': [None, 3], 'dtype': 'float32', 'lod_level': 0},\n- 'im_id': {'shape': [None, 1], 'dtype': 'int32', 'lod_level': 0},\n+ 'im_id': {'shape': [None, 1], 'dtype': 'int64', 'lod_level': 0},\n'im_shape': {'shape': [None, 3], 'dtype': 'float32', 'lod_level': 0},\n'gt_bbox': {'shape': [None, 4], 'dtype': 'float32', 'lod_level': 1},\n'gt_class': {'shape': [None, 1], 'dtype': 'int32', 'lod_level': 1},\n"
},
{
"change_type": "MODIFY",
"old_path": "ppdet/modeling/architectures/mask_rcnn.py",
"new_path": "ppdet/modeling/architectures/mask_rcnn.py",
"diff": "@@ -274,7 +274,7 @@ class MaskRCNN(object):\ninputs_def = {\n'image': {'shape': im_shape, 'dtype': 'float32', 'lod_level': 0},\n'im_info': {'shape': [None, 3], 'dtype': 'float32', 'lod_level': 0},\n- 'im_id': {'shape': [None, 1], 'dtype': 'int32', 'lod_level': 0},\n+ 'im_id': {'shape': [None, 1], 'dtype': 'int64', 'lod_level': 0},\n'im_shape': {'shape': [None, 3], 'dtype': 'float32', 'lod_level': 0},\n'gt_bbox': {'shape': [None, 4], 'dtype': 'float32', 'lod_level': 1},\n'gt_class': {'shape': [None, 1], 'dtype': 'int32', 'lod_level': 1},\n"
},
{
"change_type": "MODIFY",
"old_path": "ppdet/modeling/architectures/retinanet.py",
"new_path": "ppdet/modeling/architectures/retinanet.py",
"diff": "@@ -88,7 +88,7 @@ class RetinaNet(object):\ninputs_def = {\n'image': {'shape': im_shape, 'dtype': 'float32', 'lod_level': 0},\n'im_info': {'shape': [None, 3], 'dtype': 'float32', 'lod_level': 0},\n- 'im_id': {'shape': [None, 1], 'dtype': 'int32', 'lod_level': 0},\n+ 'im_id': {'shape': [None, 1], 'dtype': 'int64', 'lod_level': 0},\n'im_shape': {'shape': [None, 3], 'dtype': 'float32', 'lod_level': 0},\n'gt_bbox': {'shape': [None, 4], 'dtype': 'float32', 'lod_level': 1},\n'gt_class': {'shape': [None, 1], 'dtype': 'int32', 'lod_level': 1},\n"
},
{
"change_type": "MODIFY",
"old_path": "ppdet/modeling/architectures/ssd.py",
"new_path": "ppdet/modeling/architectures/ssd.py",
"diff": "@@ -95,7 +95,7 @@ class SSD(object):\n# yapf: disable\ninputs_def = {\n'image': {'shape': im_shape, 'dtype': 'float32', 'lod_level': 0},\n- 'im_id': {'shape': [None, 1], 'dtype': 'int32', 'lod_level': 0},\n+ 'im_id': {'shape': [None, 1], 'dtype': 'int64', 'lod_level': 0},\n'gt_bbox': {'shape': [None, 4], 'dtype': 'float32', 'lod_level': 1},\n'gt_class': {'shape': [None, 1], 'dtype': 'int32', 'lod_level': 1},\n'im_shape': {'shape': [None, 3], 'dtype': 'int32', 'lod_level': 0},\n"
},
{
"change_type": "MODIFY",
"old_path": "ppdet/modeling/architectures/yolov3.py",
"new_path": "ppdet/modeling/architectures/yolov3.py",
"diff": "@@ -96,7 +96,7 @@ class YOLOv3(object):\ninputs_def = {\n'image': {'shape': im_shape, 'dtype': 'float32', 'lod_level': 0},\n'im_size': {'shape': [None, 2], 'dtype': 'int32', 'lod_level': 0},\n- 'im_id': {'shape': [None, 1], 'dtype': 'int32', 'lod_level': 0},\n+ 'im_id': {'shape': [None, 1], 'dtype': 'int64', 'lod_level': 0},\n'gt_bbox': {'shape': [None, num_max_boxes, 4], 'dtype': 'float32', 'lod_level': 0},\n'gt_class': {'shape': [None, num_max_boxes], 'dtype': 'int32', 'lod_level': 0},\n'gt_score': {'shape': [None, num_max_boxes], 'dtype': 'float32', 'lod_level': 0},\n"
}
] |
Python
|
Apache License 2.0
|
paddlepaddle/paddledetection
|
Change dtype of im_id to int64 (#148)
|
499,400 |
31.12.2019 11:13:06
| -28,800 |
993b955b1ce53c9a1c9ac6772bce7d1af4ce09ef
|
Add tutorial for sensitive
|
[
{
"change_type": "ADD",
"old_path": "slim/sensitive/images/mobilev1_yolov3_voc_sensitives.png",
"new_path": "slim/sensitive/images/mobilev1_yolov3_voc_sensitives.png",
"diff": "Binary files /dev/null and b/slim/sensitive/images/mobilev1_yolov3_voc_sensitives.png differ\n"
},
{
"change_type": "ADD",
"old_path": null,
"new_path": "slim/sensitive/sensitive.py",
"diff": "+# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.\n+#\n+# Licensed under the Apache License, Version 2.0 (the \"License\");\n+# you may not use this file except in compliance with the License.\n+# You may obtain a copy of the License at\n+#\n+# http://www.apache.org/licenses/LICENSE-2.0\n+#\n+# Unless required by applicable law or agreed to in writing, software\n+# distributed under the License is distributed on an \"AS IS\" BASIS,\n+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n+# See the License for the specific language governing permissions and\n+# limitations under the License.\n+\n+from __future__ import absolute_import\n+from __future__ import division\n+from __future__ import print_function\n+\n+import os\n+import time\n+import numpy as np\n+import datetime\n+from collections import deque\n+\n+\n+def set_paddle_flags(**kwargs):\n+ for key, value in kwargs.items():\n+ if os.environ.get(key, None) is None:\n+ os.environ[key] = str(value)\n+\n+\n+# NOTE(paddle-dev): All of these flags should be set before\n+# `import paddle`. Otherwise, it would not take any effect.\n+set_paddle_flags(\n+ FLAGS_eager_delete_tensor_gb=0, # enable GC to save memory\n+)\n+\n+\n+from paddle import fluid\n+from ppdet.experimental import mixed_precision_context\n+from ppdet.core.workspace import load_config, merge_config, create\n+#from ppdet.data.data_feed import create_reader\n+\n+from ppdet.data.reader import create_reader\n+\n+from ppdet.utils.cli import print_total_cfg\n+from ppdet.utils import dist_utils\n+from ppdet.utils.eval_utils import parse_fetches, eval_run, eval_results\n+from ppdet.utils.stats import TrainingStats\n+from ppdet.utils.cli import ArgsParser\n+from ppdet.utils.check import check_gpu, check_version\n+import ppdet.utils.checkpoint as checkpoint\n+from ppdet.modeling.model_input import create_feed\n+from paddleslim.prune import sensitivity\n+import logging\n+FORMAT = '%(asctime)s-%(levelname)s: %(message)s'\n+logging.basicConfig(level=logging.INFO, format=FORMAT)\n+logger = logging.getLogger(__name__)\n+\n+\n+def main():\n+ env = os.environ\n+\n+ print(\"FLAGS.config: {}\".format(FLAGS.config))\n+ cfg = load_config(FLAGS.config)\n+ assert 'architecture' in cfg\n+ main_arch = cfg.architecture\n+\n+ merge_config(FLAGS.opt)\n+\n+ print_total_cfg(cfg)\n+\n+ place = fluid.CUDAPlace(0)\n+ exe = fluid.Executor(place)\n+\n+ # build program\n+ startup_prog = fluid.Program()\n+ eval_prog = fluid.Program()\n+ with fluid.program_guard(eval_prog, startup_prog):\n+ with fluid.unique_name.guard():\n+ model = create(main_arch)\n+ inputs_def = cfg['EvalReader']['inputs_def']\n+ feed_vars, eval_loader = model.build_inputs(**inputs_def)\n+ fetches = model.eval(feed_vars)\n+ eval_prog = eval_prog.clone(True)\n+\n+ if FLAGS.print_params:\n+ print(\"-------------------------All parameters in current graph----------------------\")\n+ for block in eval_prog.blocks:\n+ for param in block.all_parameters():\n+ print(\"parameter name: {}\\tshape: {}\".format(param.name, param.shape))\n+ print(\"------------------------------------------------------------------------------\")\n+ return\n+\n+ eval_reader = create_reader(cfg.EvalReader)\n+ eval_loader.set_sample_list_generator(eval_reader, place)\n+\n+ # parse eval fetches\n+ extra_keys = []\n+ if cfg.metric == 'COCO':\n+ extra_keys = ['im_info', 'im_id', 'im_shape']\n+ if cfg.metric == 'VOC':\n+ extra_keys = ['gt_box', 'gt_label', 'is_difficult']\n+ if cfg.metric == 'WIDERFACE':\n+ extra_keys = ['im_id', 'im_shape', 'gt_box']\n+ eval_keys, eval_values, eval_cls = parse_fetches(fetches, eval_prog,\n+ extra_keys)\n+\n+ exe.run(startup_prog)\n+\n+ fuse_bn = getattr(model.backbone, 'norm_type', None) == 'affine_channel'\n+\n+ ignore_params = cfg.finetune_exclude_pretrained_params \\\n+ if 'finetune_exclude_pretrained_params' in cfg else []\n+\n+ start_iter = 0\n+\n+ if cfg.weights:\n+ checkpoint.load_params(exe, eval_prog, cfg.weights)\n+ else:\n+ logger.warn(\"Please set cfg.weights to load trained model.\")\n+\n+ # whether output bbox is normalized in model output layer\n+ is_bbox_normalized = False\n+ if hasattr(model, 'is_bbox_normalized') and \\\n+ callable(model.is_bbox_normalized):\n+ is_bbox_normalized = model.is_bbox_normalized()\n+\n+ # if map_type not set, use default 11point, only use in VOC eval\n+ map_type = cfg.map_type if 'map_type' in cfg else '11point'\n+\n+ def test(program):\n+\n+ compiled_eval_prog = fluid.compiler.CompiledProgram(program)\n+\n+ results = eval_run(exe, compiled_eval_prog, eval_loader,\n+ eval_keys, eval_values, eval_cls)\n+ resolution = None\n+ if 'mask' in results[0]:\n+ resolution = model.mask_head.resolution\n+ dataset = cfg['EvalReader']['dataset']\n+ box_ap_stats = eval_results(\n+ results,\n+ cfg.metric,\n+ cfg.num_classes,\n+ resolution,\n+ is_bbox_normalized,\n+ FLAGS.output_eval,\n+ map_type,\n+ dataset=dataset)\n+ return box_ap_stats[0]\n+\n+ pruned_params = FLAGS.pruned_params\n+\n+ assert (FLAGS.pruned_params is not None), \"FLAGS.pruned_params is empty!!! Please set it by '--pruned_params' option.\"\n+ pruned_params = FLAGS.pruned_params.strip().split(\",\")\n+ logger.info(\"pruned params: {}\".format(pruned_params))\n+ pruned_ratios = [float(n) for n in FLAGS.pruned_ratios.strip().split(\" \")]\n+ logger.info(\"pruned ratios: {}\".format(pruned_ratios))\n+ sensitivity(eval_prog,\n+ place,\n+ pruned_params,\n+ test,\n+ sensitivities_file=FLAGS.sensitivities_file,\n+ pruned_ratios=pruned_ratios)\n+\n+\n+if __name__ == '__main__':\n+ parser = ArgsParser()\n+ parser.add_argument(\n+ \"--output_eval\",\n+ default=None,\n+ type=str,\n+ help=\"Evaluation directory, default is current directory.\")\n+ parser.add_argument(\n+ \"-d\",\n+ \"--dataset_dir\",\n+ default=None,\n+ type=str,\n+ help=\"Dataset path, same as DataFeed.dataset.dataset_dir\")\n+ parser.add_argument(\n+ \"-s\",\n+ \"--sensitivities_file\",\n+ default=\"sensitivities.data\",\n+ type=str,\n+ help=\"The file used to save sensitivities.\")\n+ parser.add_argument(\n+ \"-p\",\n+ \"--pruned_params\",\n+ default=None,\n+ type=str,\n+ help=\"The parameters to be pruned when calculating sensitivities.\")\n+ parser.add_argument(\n+ \"-r\",\n+ \"--pruned_ratios\",\n+ default=\"0.1 0.2 0.3 0.4 0.5 0.6 0.7 0.8 0.9\",\n+ type=str,\n+ help=\"The ratios pruned iteratively for each parameter when calculating sensitivities.\")\n+ parser.add_argument(\n+ \"-P\",\n+ \"--print_params\",\n+ default=False,\n+ action='store_true',\n+ help=\"Whether to only print the parameters' names and shapes.\")\n+ FLAGS = parser.parse_args()\n+ main()\n"
}
] |
Python
|
Apache License 2.0
|
paddlepaddle/paddledetection
|
Add tutorial for sensitive (#129)
|
499,333 |
31.12.2019 16:31:06
| -28,800 |
684fe1b662bec75f161d08f16696573fb7e6ec31
|
support use_default_label for infer
|
[
{
"change_type": "MODIFY",
"old_path": "ppdet/data/source/dataset.py",
"new_path": "ppdet/data/source/dataset.py",
"diff": "@@ -120,12 +120,11 @@ class ImageFolder(DataSet):\nanno_path=None,\nsample_num=-1,\nwith_background=True,\n+ use_default_label=None,\n**kwargs):\n- super(ImageFolder, self).__init__(image_dir, anno_path, sample_num,\n- dataset_dir, with_background)\n- self.anno_path = anno_path\n- self.sample_num = sample_num\n- self.with_background = with_background\n+ super(ImageFolder, self).__init__(dataset_dir, image_dir, anno_path,\n+ sample_num, with_background,\n+ use_default_label)\nself.roidbs = None\nself._imid2path = {}\n"
}
] |
Python
|
Apache License 2.0
|
paddlepaddle/paddledetection
|
support use_default_label for infer (#151)
|
499,400 |
02.01.2020 15:13:40
| -28,800 |
d941928d0ec0c4490f199ad7c564a70a0299c004
|
Add tutorial for pruning.
|
[
{
"change_type": "ADD",
"old_path": null,
"new_path": "slim/prune/prune.py",
"diff": "+# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.\n+#\n+# Licensed under the Apache License, Version 2.0 (the \"License\");\n+# you may not use this file except in compliance with the License.\n+# You may obtain a copy of the License at\n+#\n+# http://www.apache.org/licenses/LICENSE-2.0\n+#\n+# Unless required by applicable law or agreed to in writing, software\n+# distributed under the License is distributed on an \"AS IS\" BASIS,\n+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n+# See the License for the specific language governing permissions and\n+# limitations under the License.\n+\n+from __future__ import absolute_import\n+from __future__ import division\n+from __future__ import print_function\n+\n+import os\n+import time\n+import numpy as np\n+import datetime\n+from collections import deque\n+from paddleslim.prune import Pruner\n+from paddleslim.analysis import flops\n+from paddle import fluid\n+from ppdet.experimental import mixed_precision_context\n+from ppdet.core.workspace import load_config, merge_config, create\n+from ppdet.data.reader import create_reader\n+from ppdet.utils.cli import print_total_cfg\n+from ppdet.utils import dist_utils\n+from ppdet.utils.eval_utils import parse_fetches, eval_run, eval_results\n+from ppdet.utils.stats import TrainingStats\n+from ppdet.utils.cli import ArgsParser\n+from ppdet.utils.check import check_gpu, check_version\n+import ppdet.utils.checkpoint as checkpoint\n+from ppdet.modeling.model_input import create_feed\n+\n+import logging\n+FORMAT = '%(asctime)s-%(levelname)s: %(message)s'\n+logging.basicConfig(level=logging.INFO, format=FORMAT)\n+logger = logging.getLogger(__name__)\n+\n+\n+def main():\n+ env = os.environ\n+ FLAGS.dist = 'PADDLE_TRAINER_ID' in env and 'PADDLE_TRAINERS_NUM' in env\n+ if FLAGS.dist:\n+ trainer_id = int(env['PADDLE_TRAINER_ID'])\n+ import random\n+ local_seed = (99 + trainer_id)\n+ random.seed(local_seed)\n+ np.random.seed(local_seed)\n+\n+ cfg = load_config(FLAGS.config)\n+ if 'architecture' in cfg:\n+ main_arch = cfg.architecture\n+ else:\n+ raise ValueError(\"'architecture' not specified in config file.\")\n+\n+ merge_config(FLAGS.opt)\n+\n+ if 'log_iter' not in cfg:\n+ cfg.log_iter = 20\n+\n+ # check if set use_gpu=True in paddlepaddle cpu version\n+ check_gpu(cfg.use_gpu)\n+ # check if paddlepaddle version is satisfied\n+ check_version()\n+ if not FLAGS.dist or trainer_id == 0:\n+ print_total_cfg(cfg)\n+\n+ if cfg.use_gpu:\n+ devices_num = fluid.core.get_cuda_device_count()\n+ else:\n+ devices_num = int(os.environ.get('CPU_NUM', 1))\n+\n+ if 'FLAGS_selected_gpus' in env:\n+ device_id = int(env['FLAGS_selected_gpus'])\n+ else:\n+ device_id = 0\n+ place = fluid.CUDAPlace(device_id) if cfg.use_gpu else fluid.CPUPlace()\n+ exe = fluid.Executor(place)\n+\n+ lr_builder = create('LearningRate')\n+ optim_builder = create('OptimizerBuilder')\n+\n+ # build program\n+ startup_prog = fluid.Program()\n+ train_prog = fluid.Program()\n+ with fluid.program_guard(train_prog, startup_prog):\n+ with fluid.unique_name.guard():\n+ model = create(main_arch)\n+ if FLAGS.fp16:\n+ assert (getattr(model.backbone, 'norm_type', None)\n+ != 'affine_channel'), \\\n+ '--fp16 currently does not support affine channel, ' \\\n+ ' please modify backbone settings to use batch norm'\n+\n+ with mixed_precision_context(FLAGS.loss_scale, FLAGS.fp16) as ctx:\n+ inputs_def = cfg['TrainReader']['inputs_def']\n+ feed_vars, train_loader = model.build_inputs(**inputs_def)\n+ train_fetches = model.train(feed_vars)\n+ loss = train_fetches['loss']\n+ if FLAGS.fp16:\n+ loss *= ctx.get_loss_scale_var()\n+ lr = lr_builder()\n+ optimizer = optim_builder(lr)\n+ optimizer.minimize(loss)\n+ if FLAGS.fp16:\n+ loss /= ctx.get_loss_scale_var()\n+\n+ # parse train fetches\n+ train_keys, train_values, _ = parse_fetches(train_fetches)\n+ train_values.append(lr)\n+\n+ if FLAGS.print_params:\n+ print(\"-------------------------All parameters in current graph----------------------\")\n+ for block in train_prog.blocks:\n+ for param in block.all_parameters():\n+ print(\"parameter name: {}\\tshape: {}\".format(param.name, param.shape))\n+ print(\"------------------------------------------------------------------------------\")\n+ return\n+\n+ if FLAGS.eval:\n+ eval_prog = fluid.Program()\n+ with fluid.program_guard(eval_prog, startup_prog):\n+ with fluid.unique_name.guard():\n+ model = create(main_arch)\n+ inputs_def = cfg['EvalReader']['inputs_def']\n+ feed_vars, eval_loader = model.build_inputs(**inputs_def)\n+ fetches = model.eval(feed_vars)\n+ eval_prog = eval_prog.clone(True)\n+\n+ eval_reader = create_reader(cfg.EvalReader)\n+ eval_loader.set_sample_list_generator(eval_reader, place)\n+\n+ # parse eval fetches\n+ extra_keys = []\n+ if cfg.metric == 'COCO':\n+ extra_keys = ['im_info', 'im_id', 'im_shape']\n+ if cfg.metric == 'VOC':\n+ extra_keys = ['gt_box', 'gt_label', 'is_difficult']\n+ if cfg.metric == 'WIDERFACE':\n+ extra_keys = ['im_id', 'im_shape', 'gt_box']\n+ eval_keys, eval_values, eval_cls = parse_fetches(fetches, eval_prog,\n+ extra_keys)\n+\n+ # compile program for multi-devices\n+ build_strategy = fluid.BuildStrategy()\n+ build_strategy.fuse_all_optimizer_ops = False\n+ build_strategy.fuse_elewise_add_act_ops = True\n+ # only enable sync_bn in multi GPU devices\n+ sync_bn = getattr(model.backbone, 'norm_type', None) == 'sync_bn'\n+ build_strategy.sync_batch_norm = sync_bn and devices_num > 1 \\\n+ and cfg.use_gpu\n+\n+ exec_strategy = fluid.ExecutionStrategy()\n+ # iteration number when CompiledProgram tries to drop local execution scopes.\n+ # Set it to be 1 to save memory usages, so that unused variables in\n+ # local execution scopes can be deleted after each iteration.\n+ exec_strategy.num_iteration_per_drop_scope = 1\n+ if FLAGS.dist:\n+ dist_utils.prepare_for_multi_process(exe, build_strategy, startup_prog,\n+ train_prog)\n+ exec_strategy.num_threads = 1\n+\n+ exe.run(startup_prog)\n+\n+ fuse_bn = getattr(model.backbone, 'norm_type', None) == 'affine_channel'\n+\n+ start_iter = 0\n+ if FLAGS.resume_checkpoint:\n+ checkpoint.load_checkpoint(exe, train_prog, FLAGS.resume_checkpoint)\n+ start_iter = checkpoint.global_step()\n+ elif cfg.pretrain_weights:\n+ checkpoint.load_params(\n+ exe, train_prog, cfg.pretrain_weights)\n+\n+\n+ pruned_params = FLAGS.pruned_params\n+ assert (FLAGS.pruned_params is not None), \"FLAGS.pruned_params is empty!!! Please set it by '--pruned_params' option.\"\n+ pruned_params = FLAGS.pruned_params.strip().split(\",\")\n+ logger.info(\"pruned params: {}\".format(pruned_params))\n+ pruned_ratios = [float(n) for n in FLAGS.pruned_ratios.strip().split(\" \")]\n+ logger.info(\"pruned ratios: {}\".format(pruned_ratios))\n+ assert(len(pruned_params) == len(pruned_ratios)), \"The length of pruned params and pruned ratios should be equal.\"\n+ assert(pruned_ratios > [0] * len(pruned_ratios) and pruned_ratios < [1] * len(pruned_ratios)), \"The elements of pruned ratios should be in range (0, 1).\"\n+\n+\n+ pruner = Pruner()\n+ train_prog = pruner.prune(\n+ train_prog,\n+ fluid.global_scope(),\n+ params=pruned_params,\n+ ratios=pruned_ratios,\n+ place=place,\n+ only_graph=False)[0]\n+\n+ compiled_train_prog = fluid.CompiledProgram(train_prog).with_data_parallel(\n+ loss_name=loss.name,\n+ build_strategy=build_strategy,\n+ exec_strategy=exec_strategy)\n+\n+ if FLAGS.eval:\n+\n+ base_flops = flops(eval_prog)\n+ eval_prog = pruner.prune(\n+ eval_prog,\n+ fluid.global_scope(),\n+ params=pruned_params,\n+ ratios=pruned_ratios,\n+ place=place,\n+ only_graph=True)[0]\n+ pruned_flops = flops(eval_prog)\n+ logger.info(\"FLOPs -{}; total FLOPs: {}; pruned FLOPs: {}\".format(float(base_flops - pruned_flops)/base_flops, base_flops, pruned_flops))\n+ compiled_eval_prog = fluid.compiler.CompiledProgram(eval_prog)\n+\n+\n+\n+ train_reader = create_reader(cfg.TrainReader, (cfg.max_iters - start_iter) *\n+ devices_num, cfg)\n+ train_loader.set_sample_list_generator(train_reader, place)\n+\n+ # whether output bbox is normalized in model output layer\n+ is_bbox_normalized = False\n+ if hasattr(model, 'is_bbox_normalized') and \\\n+ callable(model.is_bbox_normalized):\n+ is_bbox_normalized = model.is_bbox_normalized()\n+\n+ # if map_type not set, use default 11point, only use in VOC eval\n+ map_type = cfg.map_type if 'map_type' in cfg else '11point'\n+\n+ train_stats = TrainingStats(cfg.log_smooth_window, train_keys)\n+ train_loader.start()\n+ start_time = time.time()\n+ end_time = time.time()\n+\n+ cfg_name = os.path.basename(FLAGS.config).split('.')[0]\n+ save_dir = os.path.join(cfg.save_dir, cfg_name)\n+ time_stat = deque(maxlen=cfg.log_smooth_window)\n+ best_box_ap_list = [0.0, 0] #[map, iter]\n+\n+ # use tb-paddle to log data\n+ if FLAGS.use_tb:\n+ from tb_paddle import SummaryWriter\n+ tb_writer = SummaryWriter(FLAGS.tb_log_dir)\n+ tb_loss_step = 0\n+ tb_mAP_step = 0\n+\n+\n+\n+ if FLAGS.eval:\n+ # evaluation\n+ results = eval_run(exe, compiled_eval_prog, eval_loader,\n+ eval_keys, eval_values, eval_cls)\n+ resolution = None\n+ if 'mask' in results[0]:\n+ resolution = model.mask_head.resolution\n+ dataset = cfg['EvalReader']['dataset']\n+ box_ap_stats = eval_results(\n+ results,\n+ cfg.metric,\n+ cfg.num_classes,\n+ resolution,\n+ is_bbox_normalized,\n+ FLAGS.output_eval,\n+ map_type,\n+ dataset=dataset)\n+\n+\n+\n+ for it in range(start_iter, cfg.max_iters):\n+ start_time = end_time\n+ end_time = time.time()\n+ time_stat.append(end_time - start_time)\n+ time_cost = np.mean(time_stat)\n+ eta_sec = (cfg.max_iters - it) * time_cost\n+ eta = str(datetime.timedelta(seconds=int(eta_sec)))\n+ outs = exe.run(compiled_train_prog, fetch_list=train_values)\n+ stats = {k: np.array(v).mean() for k, v in zip(train_keys, outs[:-1])}\n+\n+ # use tb-paddle to log loss\n+ if FLAGS.use_tb:\n+ if it % cfg.log_iter == 0:\n+ for loss_name, loss_value in stats.items():\n+ tb_writer.add_scalar(loss_name, loss_value, tb_loss_step)\n+ tb_loss_step += 1\n+\n+ train_stats.update(stats)\n+ logs = train_stats.log()\n+ if it % cfg.log_iter == 0 and (not FLAGS.dist or trainer_id == 0):\n+ strs = 'iter: {}, lr: {:.6f}, {}, time: {:.3f}, eta: {}'.format(\n+ it, np.mean(outs[-1]), logs, time_cost, eta)\n+ logger.info(strs)\n+\n+ if (it > 0 and it % cfg.snapshot_iter == 0 or it == cfg.max_iters - 1) \\\n+ and (not FLAGS.dist or trainer_id == 0):\n+ save_name = str(it) if it != cfg.max_iters - 1 else \"model_final\"\n+ checkpoint.save(exe, train_prog, os.path.join(save_dir, save_name))\n+\n+ if FLAGS.eval:\n+ # evaluation\n+ results = eval_run(exe, compiled_eval_prog, eval_loader,\n+ eval_keys, eval_values, eval_cls)\n+ resolution = None\n+ if 'mask' in results[0]:\n+ resolution = model.mask_head.resolution\n+ box_ap_stats = eval_results(\n+ results, eval_feed, cfg.metric, cfg.num_classes, resolution,\n+ is_bbox_normalized, FLAGS.output_eval, map_type)\n+\n+ # use tb_paddle to log mAP\n+ if FLAGS.use_tb:\n+ tb_writer.add_scalar(\"mAP\", box_ap_stats[0], tb_mAP_step)\n+ tb_mAP_step += 1\n+\n+ if box_ap_stats[0] > best_box_ap_list[0]:\n+ best_box_ap_list[0] = box_ap_stats[0]\n+ best_box_ap_list[1] = it\n+ checkpoint.save(exe, train_prog,\n+ os.path.join(save_dir, \"best_model\"))\n+ logger.info(\"Best test box ap: {}, in iter: {}\".format(\n+ best_box_ap_list[0], best_box_ap_list[1]))\n+\n+ train_loader.reset()\n+\n+\n+if __name__ == '__main__':\n+ parser = ArgsParser()\n+ parser.add_argument(\n+ \"-r\",\n+ \"--resume_checkpoint\",\n+ default=None,\n+ type=str,\n+ help=\"Checkpoint path for resuming training.\")\n+ parser.add_argument(\n+ \"--fp16\",\n+ action='store_true',\n+ default=False,\n+ help=\"Enable mixed precision training.\")\n+ parser.add_argument(\n+ \"--loss_scale\",\n+ default=8.,\n+ type=float,\n+ help=\"Mixed precision training loss scale.\")\n+ parser.add_argument(\n+ \"--eval\",\n+ action='store_true',\n+ default=False,\n+ help=\"Whether to perform evaluation in train\")\n+ parser.add_argument(\n+ \"--output_eval\",\n+ default=None,\n+ type=str,\n+ help=\"Evaluation directory, default is current directory.\")\n+ parser.add_argument(\n+ \"--use_tb\",\n+ type=bool,\n+ default=False,\n+ help=\"whether to record the data to Tensorboard.\")\n+ parser.add_argument(\n+ '--tb_log_dir',\n+ type=str,\n+ default=\"tb_log_dir/scalar\",\n+ help='Tensorboard logging directory for scalar.')\n+\n+ parser.add_argument(\n+ \"-p\",\n+ \"--pruned_params\",\n+ default=None,\n+ type=str,\n+ help=\"The parameters to be pruned when calculating sensitivities.\")\n+ parser.add_argument(\n+ \"--pruned_ratios\",\n+ default=\"0.1 0.2 0.3 0.4 0.5 0.6 0.7 0.8 0.9\",\n+ type=str,\n+ help=\"The ratios pruned iteratively for each parameter when calculating sensitivities.\")\n+ parser.add_argument(\n+ \"-P\",\n+ \"--print_params\",\n+ default=False,\n+ action='store_true',\n+ help=\"Whether to only print the parameters' names and shapes.\")\n+ FLAGS = parser.parse_args()\n+ main()\n"
}
] |
Python
|
Apache License 2.0
|
paddlepaddle/paddledetection
|
Add tutorial for pruning. (#152)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.