diff --git a/.github/workflows/pullRequests.yml b/.github/workflows/pullRequests.yml index adb49a400f8..07bef462152 100644 --- a/.github/workflows/pullRequests.yml +++ b/.github/workflows/pullRequests.yml @@ -92,7 +92,9 @@ jobs: assignMilestone: name: Assign milestone needs: constants - if: needs.constants.outputs.is-fork-pr != 'true' + if: >- + needs.constants.outputs.is-fork-pr != 'true' && + github.event.pull_request.milestone == null steps: - uses: actions/setup-node@v4 with: diff --git a/.github/workflows/pullRequestsCommandCypress.yml b/.github/workflows/pullRequestsCommandCypress.yml index 712e3e661d0..59103fd5f2c 100644 --- a/.github/workflows/pullRequestsCommandCypress.yml +++ b/.github/workflows/pullRequestsCommandCypress.yml @@ -3,9 +3,9 @@ # and run "github-actions-wac build" (or "ghawac build") to regenerate this file. # For more information, run "github-actions-wac --help". name: Pull Requests Command - Cypress -"on": issue_comment +'on': issue_comment env: - NODE_OPTIONS: "--max_old_space_size=4096" + NODE_OPTIONS: '--max_old_space_size=4096' AWS_REGION: eu-central-1 jobs: checkComment: @@ -21,9 +21,9 @@ jobs: with: repo-token: ${{ secrets.GITHUB_TOKEN }} command: cypress - reaction: "true" + reaction: 'true' reaction-type: eyes - allow-edits: "false" + allow-edits: 'false' permission-level: write - name: Create comment uses: peter-evans/create-or-update-comment@v2 @@ -35,7 +35,7 @@ jobs: github.run_id }})). :sparkles: runs-on: ubuntu-latest env: - NODE_OPTIONS: "--max_old_space_size=4096" + NODE_OPTIONS: '--max_old_space_size=4096' YARN_ENABLE_IMMUTABLE_INSTALLS: false validateWorkflows: name: Validate workflows @@ -51,7 +51,7 @@ jobs: needs: checkComment runs-on: ubuntu-latest env: - NODE_OPTIONS: "--max_old_space_size=4096" + NODE_OPTIONS: '--max_old_space_size=4096' YARN_ENABLE_IMMUTABLE_INSTALLS: false baseBranch: needs: checkComment @@ -72,7 +72,7 @@ jobs: baseRefName -q .baseRefName)" >> $GITHUB_OUTPUT runs-on: ubuntu-latest env: - NODE_OPTIONS: "--max_old_space_size=4096" + NODE_OPTIONS: '--max_old_space_size=4096' YARN_ENABLE_IMMUTABLE_INSTALLS: false constants: needs: baseBranch @@ -97,7 +97,7 @@ jobs: vars.RANDOM_CACHE_KEY_SUFFIX }}" >> $GITHUB_OUTPUT runs-on: ubuntu-latest env: - NODE_OPTIONS: "--max_old_space_size=4096" + NODE_OPTIONS: '--max_old_space_size=4096' YARN_ENABLE_IMMUTABLE_INSTALLS: false build: name: Build @@ -136,7 +136,7 @@ jobs: path: ${{ needs.baseBranch.outputs.base-branch }}/.webiny/cached-packages key: ${{ needs.constants.outputs.run-cache-key }} env: - NODE_OPTIONS: "--max_old_space_size=4096" + NODE_OPTIONS: '--max_old_space_size=4096' YARN_ENABLE_IMMUTABLE_INSTALLS: false e2e-wby-cms-ddb-constants: needs: @@ -172,7 +172,7 @@ jobs: github.run_id }}_ddb" >> $GITHUB_OUTPUT runs-on: ubuntu-latest env: - NODE_OPTIONS: "--max_old_space_size=4096" + NODE_OPTIONS: '--max_old_space_size=4096' YARN_ENABLE_IMMUTABLE_INSTALLS: false e2e-wby-cms-ddb-project-setup: needs: @@ -184,7 +184,7 @@ jobs: cypress-config: ${{ steps.save-cypress-config.outputs.cypress-config }} environment: next env: - NODE_OPTIONS: "--max_old_space_size=4096" + NODE_OPTIONS: '--max_old_space_size=4096' YARN_ENABLE_IMMUTABLE_INSTALLS: false CYPRESS_MAILOSAUR_API_KEY: ${{ secrets.CYPRESS_MAILOSAUR_API_KEY }} PULUMI_CONFIG_PASSPHRASE: ${{ secrets.PULUMI_CONFIG_PASSPHRASE }} @@ -328,7 +328,7 @@ jobs: }} environment: next env: - NODE_OPTIONS: "--max_old_space_size=4096" + NODE_OPTIONS: '--max_old_space_size=4096' YARN_ENABLE_IMMUTABLE_INSTALLS: false CYPRESS_MAILOSAUR_API_KEY: ${{ secrets.CYPRESS_MAILOSAUR_API_KEY }} PULUMI_CONFIG_PASSPHRASE: ${{ secrets.PULUMI_CONFIG_PASSPHRASE }} @@ -404,7 +404,7 @@ jobs: github.run_id }}_ddb-es" >> $GITHUB_OUTPUT runs-on: ubuntu-latest env: - NODE_OPTIONS: "--max_old_space_size=4096" + NODE_OPTIONS: '--max_old_space_size=4096' YARN_ENABLE_IMMUTABLE_INSTALLS: false e2e-wby-cms-ddb-es-project-setup: needs: @@ -416,7 +416,7 @@ jobs: cypress-config: ${{ steps.save-cypress-config.outputs.cypress-config }} environment: next env: - NODE_OPTIONS: "--max_old_space_size=4096" + NODE_OPTIONS: '--max_old_space_size=4096' YARN_ENABLE_IMMUTABLE_INSTALLS: false CYPRESS_MAILOSAUR_API_KEY: ${{ secrets.CYPRESS_MAILOSAUR_API_KEY }} PULUMI_CONFIG_PASSPHRASE: ${{ secrets.PULUMI_CONFIG_PASSPHRASE }} @@ -564,7 +564,7 @@ jobs: }} environment: next env: - NODE_OPTIONS: "--max_old_space_size=4096" + NODE_OPTIONS: '--max_old_space_size=4096' YARN_ENABLE_IMMUTABLE_INSTALLS: false CYPRESS_MAILOSAUR_API_KEY: ${{ secrets.CYPRESS_MAILOSAUR_API_KEY }} PULUMI_CONFIG_PASSPHRASE: ${{ secrets.PULUMI_CONFIG_PASSPHRASE }} @@ -644,7 +644,7 @@ jobs: github.run_id }}_ddb-os" >> $GITHUB_OUTPUT runs-on: ubuntu-latest env: - NODE_OPTIONS: "--max_old_space_size=4096" + NODE_OPTIONS: '--max_old_space_size=4096' YARN_ENABLE_IMMUTABLE_INSTALLS: false e2e-wby-cms-ddb-os-project-setup: needs: @@ -656,7 +656,7 @@ jobs: cypress-config: ${{ steps.save-cypress-config.outputs.cypress-config }} environment: next env: - NODE_OPTIONS: "--max_old_space_size=4096" + NODE_OPTIONS: '--max_old_space_size=4096' YARN_ENABLE_IMMUTABLE_INSTALLS: false CYPRESS_MAILOSAUR_API_KEY: ${{ secrets.CYPRESS_MAILOSAUR_API_KEY }} PULUMI_CONFIG_PASSPHRASE: ${{ secrets.PULUMI_CONFIG_PASSPHRASE }} @@ -804,7 +804,7 @@ jobs: }} environment: next env: - NODE_OPTIONS: "--max_old_space_size=4096" + NODE_OPTIONS: '--max_old_space_size=4096' YARN_ENABLE_IMMUTABLE_INSTALLS: false CYPRESS_MAILOSAUR_API_KEY: ${{ secrets.CYPRESS_MAILOSAUR_API_KEY }} PULUMI_CONFIG_PASSPHRASE: ${{ secrets.PULUMI_CONFIG_PASSPHRASE }} diff --git a/.github/workflows/pullRequestsCommandJest.yml b/.github/workflows/pullRequestsCommandJest.yml new file mode 100644 index 00000000000..87683a5ce36 --- /dev/null +++ b/.github/workflows/pullRequestsCommandJest.yml @@ -0,0 +1,466 @@ +# This file was automatically generated by github-actions-wac. +# DO NOT MODIFY IT BY HAND. Instead, modify the source *.wac.ts file(s) +# and run "github-actions-wac build" (or "ghawac build") to regenerate this file. +# For more information, run "github-actions-wac --help". +name: Pull Requests Command - Jest +'on': issue_comment +env: + NODE_OPTIONS: '--max_old_space_size=4096' + AWS_REGION: eu-central-1 +jobs: + checkComment: + name: Check comment for /jest + if: ${{ github.event.issue.pull_request }} + steps: + - uses: actions/setup-node@v4 + with: + node-version: 20 + - name: Check for Command + id: command + uses: xt0rted/slash-command-action@v2 + with: + repo-token: ${{ secrets.GITHUB_TOKEN }} + command: jest + reaction: 'true' + reaction-type: eyes + allow-edits: 'false' + permission-level: write + - name: Create comment + uses: peter-evans/create-or-update-comment@v2 + with: + issue-number: ${{ github.event.issue.number }} + body: >- + Jest tests have been initiated (for more information, click + [here](https://github.com/webiny/webiny-js/actions/runs/${{ + github.run_id }})). :sparkles: + runs-on: ubuntu-latest + env: + NODE_OPTIONS: '--max_old_space_size=4096' + YARN_ENABLE_IMMUTABLE_INSTALLS: false + validateWorkflows: + name: Validate workflows + steps: + - uses: actions/setup-node@v4 + with: + node-version: 20 + - uses: actions/checkout@v4 + - name: Install dependencies + run: yarn --immutable + - name: Validate + run: npx github-actions-wac validate + needs: checkComment + runs-on: ubuntu-latest + env: + NODE_OPTIONS: '--max_old_space_size=4096' + YARN_ENABLE_IMMUTABLE_INSTALLS: false + baseBranch: + needs: checkComment + name: Get base branch + outputs: + base-branch: ${{ steps.base-branch.outputs.base-branch }} + steps: + - uses: actions/setup-node@v4 + with: + node-version: 20 + - uses: actions/checkout@v4 + - name: Get base branch + id: base-branch + env: + GITHUB_TOKEN: ${{ secrets.GH_TOKEN }} + run: >- + echo "base-branch=$(gh pr view ${{ github.event.issue.number }} --json + baseRefName -q .baseRefName)" >> $GITHUB_OUTPUT + runs-on: ubuntu-latest + env: + NODE_OPTIONS: '--max_old_space_size=4096' + YARN_ENABLE_IMMUTABLE_INSTALLS: false + constants: + needs: baseBranch + name: Create constants + outputs: + global-cache-key: ${{ steps.global-cache-key.outputs.global-cache-key }} + run-cache-key: ${{ steps.run-cache-key.outputs.run-cache-key }} + steps: + - uses: actions/setup-node@v4 + with: + node-version: 20 + - name: Create global cache key + id: global-cache-key + run: >- + echo "global-cache-key=${{ needs.baseBranch.outputs.base-branch }}-${{ + runner.os }}-$(/bin/date -u "+%m%d")-${{ vars.RANDOM_CACHE_KEY_SUFFIX + }}" >> $GITHUB_OUTPUT + - name: Create workflow run cache key + id: run-cache-key + run: >- + echo "run-cache-key=${{ github.run_id }}-${{ github.run_attempt }}-${{ + vars.RANDOM_CACHE_KEY_SUFFIX }}" >> $GITHUB_OUTPUT + runs-on: ubuntu-latest + env: + NODE_OPTIONS: '--max_old_space_size=4096' + YARN_ENABLE_IMMUTABLE_INSTALLS: false + build: + name: Build + needs: + - baseBranch + - constants + runs-on: webiny-build-packages + steps: + - uses: actions/setup-node@v4 + with: + node-version: 20 + - uses: actions/checkout@v4 + with: + path: ${{ needs.baseBranch.outputs.base-branch }} + - name: Checkout Pull Request + working-directory: ${{ needs.baseBranch.outputs.base-branch }} + run: gh pr checkout ${{ github.event.issue.number }} + env: + GITHUB_TOKEN: ${{ secrets.GH_TOKEN }} + - uses: actions/cache@v4 + with: + path: ${{ needs.baseBranch.outputs.base-branch }}/.yarn/cache + key: yarn-${{ runner.os }}-${{ hashFiles('**/yarn.lock') }} + - uses: actions/cache@v4 + with: + path: ${{ needs.baseBranch.outputs.base-branch }}/.webiny/cached-packages + key: ${{ needs.constants.outputs.global-cache-key }} + - name: Install dependencies + run: yarn --immutable + working-directory: ${{ needs.baseBranch.outputs.base-branch }} + - name: Build packages + run: yarn build:quick + working-directory: ${{ needs.baseBranch.outputs.base-branch }} + - uses: actions/cache@v4 + with: + path: ${{ needs.baseBranch.outputs.base-branch }}/.webiny/cached-packages + key: ${{ needs.constants.outputs.run-cache-key }} + env: + NODE_OPTIONS: '--max_old_space_size=4096' + YARN_ENABLE_IMMUTABLE_INSTALLS: false + jestTestsNoStorage: + needs: + - constants + - build + name: ${{ matrix.package.cmd }} + strategy: + fail-fast: false + matrix: + os: + - ubuntu-latest + node: + - 20 + package: >- + ${{ + fromJson('[{"cmd":"packages/api","packageName":"api","id":"806497aaa729e8d39f59792bcfb12b26"},{"cmd":"packages/api-admin-settings","packageName":"api-admin-settings","id":"31140e7ea9283c9db32ec5f905ce2a1e"},{"cmd":"packages/api-authentication","packageName":"api-authentication","id":"0eaf9f853f122e4ab215bf49d39f3edc"},{"cmd":"packages/api-authentication-cognito","packageName":"api-authentication-cognito","id":"dfb5e1fcea213538a9730314cb5e7d06"},{"cmd":"packages/api-headless-cms-ddb","packageName":"api-headless-cms-ddb","id":"5333e1fe6c2b8f5bbcb101a446419c3e"},{"cmd":"packages/api-record-locking","packageName":"api-record-locking","id":"9340c019a5369ea1aa55f7ed28b09f48"},{"cmd":"packages/api-wcp","packageName":"api-wcp","id":"77ff8a0a075e8d9f7e25001ea64c6c9e"},{"cmd":"packages/api-websockets","packageName":"api-websockets","id":"fd704b97c31f78a886b342babd344d33"},{"cmd":"packages/app-aco","packageName":"app-aco","id":"dddb66beffe2e54804d5bdedd2b423cb"},{"cmd":"packages/app-admin","packageName":"app-admin","id":"53bbef747a26e831904585bcfdd845f7"},{"cmd":"packages/cwp-template-aws","packageName":"cwp-template-aws","id":"846572f41c9427974a577bb95257d019"},{"cmd":"packages/data-migration","packageName":"data-migration","id":"294257fffed0174f169b2c812e16258e"},{"cmd":"packages/db-dynamodb","packageName":"db-dynamodb","id":"5cb733de265d7bbda981fce60f2a8962"},{"cmd":"packages/form","packageName":"form","id":"5707e699d8a4d3b8ee1954c070a50617"},{"cmd":"packages/handler","packageName":"handler","id":"1dad17bbf61657b4308250e8293cb5dd"},{"cmd":"packages/handler-aws","packageName":"handler-aws","id":"2a5bd44c5f2a4290c43f9021bbc705a5"},{"cmd":"packages/handler-graphql","packageName":"handler-graphql","id":"74884166fb2bf383da482fb78b18b704"},{"cmd":"packages/handler-logs","packageName":"handler-logs","id":"ca9a7e2ed32de50aff66c839f0003352"},{"cmd":"packages/ioc","packageName":"ioc","id":"af22b6d7d245321d64d4b714d03ef3e1"},{"cmd":"packages/lexical-converter","packageName":"lexical-converter","id":"52e3bb3ea633bd27d5bab8be976cd16f"},{"cmd":"packages/plugins","packageName":"plugins","id":"c91537eaa40845d816d0d9f39e66018b"},{"cmd":"packages/pubsub","packageName":"pubsub","id":"fc14c28c51c537a7d9edd33d73ae29e2"},{"cmd":"packages/react-composition","packageName":"react-composition","id":"428b8a3187fe275cb76da6bad0ba3918"},{"cmd":"packages/react-properties","packageName":"react-properties","id":"7578e63dcaa1ac66fed4a8dd936a9285"},{"cmd":"packages/react-rich-text-lexical-renderer","packageName":"react-rich-text-lexical-renderer","id":"452451b34eb7e0134e99b0706e5eb076"},{"cmd":"packages/utils","packageName":"utils","id":"696ceb17e38e4a274d4a149d24513b78"},{"cmd":"packages/validation","packageName":"validation","id":"9c68da33792a1214ae45e040a2830cd7"}]') + }} + runs-on: ${{ matrix.os }} + env: + NODE_OPTIONS: '--max_old_space_size=4096' + YARN_ENABLE_IMMUTABLE_INSTALLS: false + AWS_REGION: eu-central-1 + steps: + - uses: actions/setup-node@v4 + with: + node-version: 20 + - uses: actions/checkout@v4 + with: + path: ${{ needs.baseBranch.outputs.base-branch }} + - uses: actions/cache@v4 + with: + path: ${{ needs.baseBranch.outputs.base-branch }}/.yarn/cache + key: yarn-${{ runner.os }}-${{ hashFiles('**/yarn.lock') }} + - uses: actions/cache@v4 + with: + path: ${{ needs.baseBranch.outputs.base-branch }}/.webiny/cached-packages + key: ${{ needs.constants.outputs.run-cache-key }} + - name: Install dependencies + run: yarn --immutable + working-directory: ${{ needs.baseBranch.outputs.base-branch }} + - name: Build packages + run: yarn build:quick + working-directory: ${{ needs.baseBranch.outputs.base-branch }} + - name: Run tests + run: yarn test ${{ matrix.package.cmd }} + working-directory: ${{ needs.baseBranch.outputs.base-branch }} + jestTestsDdb: + needs: + - constants + - build + name: ${{ matrix.package.cmd }} + strategy: + fail-fast: false + matrix: + os: + - ubuntu-latest + node: + - 20 + package: >- + ${{ fromJson('[{"cmd":"packages/api-aco + --storage=ddb","storage":"ddb","packageName":"api-aco","id":"5595b4f3460fb2a019066177bd6489f3"},{"cmd":"packages/api-apw + --storage=ddb","storage":"ddb","packageName":"api-apw","id":"04462239e1f3509b08f511de460971ec"},{"cmd":"packages/api-audit-logs + --storage=ddb","storage":"ddb","packageName":"api-audit-logs","id":"47680aa68a1a3951f1117c736e150e45"},{"cmd":"packages/api-file-manager + --storage=ddb","storage":"ddb","packageName":"api-file-manager","id":"9b6eee1ff7cbf9a3d367818705cc4189"},{"cmd":"packages/api-form-builder + --storage=ddb","storage":"ddb","packageName":"api-form-builder","id":"980a9aebb5ec0cab057422364a60493b"},{"cmd":"packages/api-headless-cms + --storage=ddb + --shard=1/6","storage":"ddb","packageName":"api-headless-cms","id":"70476469f4407a455237133406a37a4b"},{"cmd":"packages/api-headless-cms + --storage=ddb + --shard=2/6","storage":"ddb","packageName":"api-headless-cms","id":"0eba11dcf36fd00e737a630f40567e85"},{"cmd":"packages/api-headless-cms + --storage=ddb + --shard=3/6","storage":"ddb","packageName":"api-headless-cms","id":"8c15e662d10ad6272ac557515e39d4cd"},{"cmd":"packages/api-headless-cms + --storage=ddb + --shard=4/6","storage":"ddb","packageName":"api-headless-cms","id":"3b14c43cd5971ad2945b1f0e87970e20"},{"cmd":"packages/api-headless-cms + --storage=ddb + --shard=5/6","storage":"ddb","packageName":"api-headless-cms","id":"a71716169299cfee9996f4344c84616f"},{"cmd":"packages/api-headless-cms + --storage=ddb + --shard=6/6","storage":"ddb","packageName":"api-headless-cms","id":"26f0b825b771340ca981858d86bd1f42"},{"cmd":"packages/api-headless-cms-aco + --storage=ddb","storage":"ddb","packageName":"api-headless-cms-aco","id":"718c110b004c59ed7d13cbcc875a6b64"},{"cmd":"packages/api-headless-cms-bulk-actions + --storage=ddb","storage":"ddb","packageName":"api-headless-cms-bulk-actions","id":"00c0a57737502f28c304015d2d1ba442"},{"cmd":"packages/api-headless-cms-import-export + --storage=ddb","storage":"ddb","packageName":"api-headless-cms-import-export","id":"e9052e7c40171aeb43ce089fdfbbe3c8"},{"cmd":"packages/api-i18n + --storage=ddb","storage":"ddb","packageName":"api-i18n","id":"943e15fe21c847b164f9413f8baf97b7"},{"cmd":"packages/api-mailer + --storage=ddb","storage":"ddb","packageName":"api-mailer","id":"2cc1dc707a39e72f4e5d9a140677ca39"},{"cmd":"packages/api-page-builder + --storage=ddb + --shard=1/6","storage":"ddb","packageName":"api-page-builder","id":"b2a30dfaf230076ce7120c55eb581d32"},{"cmd":"packages/api-page-builder + --storage=ddb + --shard=2/6","storage":"ddb","packageName":"api-page-builder","id":"c58e2f120653e8bd68475c16de4434c5"},{"cmd":"packages/api-page-builder + --storage=ddb + --shard=3/6","storage":"ddb","packageName":"api-page-builder","id":"808cb2da8e70bf84a24de2ab7ed27c24"},{"cmd":"packages/api-page-builder + --storage=ddb + --shard=4/6","storage":"ddb","packageName":"api-page-builder","id":"6f95134a56bea87da59d4c7d56846d72"},{"cmd":"packages/api-page-builder + --storage=ddb + --shard=5/6","storage":"ddb","packageName":"api-page-builder","id":"918eb8cb9d4046da9d38962b12e8ace6"},{"cmd":"packages/api-page-builder + --storage=ddb + --shard=6/6","storage":"ddb","packageName":"api-page-builder","id":"45bc3d824b38bd2770f1d4ba357387f9"},{"cmd":"packages/api-page-builder-aco + --storage=ddb","storage":"ddb","packageName":"api-page-builder-aco","id":"48281621c024ae9bbd0f79da5f6f4867"},{"cmd":"packages/api-page-builder-import-export + --storage=ddb","storage":"ddb","packageName":"api-page-builder-import-export","id":"8540085b59af85d1fd82b37b9e890704"},{"cmd":"packages/api-prerendering-service + --storage=ddb","storage":"ddb","packageName":"api-prerendering-service","id":"a2831c88465244dc03f188f4a40e4d63"},{"cmd":"packages/api-security + --storage=ddb","storage":"ddb","packageName":"api-security","id":"0a065366763b713fb016c43ce21e77b9"},{"cmd":"packages/api-security-cognito + --storage=ddb","storage":"ddb","packageName":"api-security-cognito","id":"0787967fe56689618106e6c64e784bff"},{"cmd":"packages/api-serverless-cms + --storage=ddb","storage":"ddb","packageName":"api-serverless-cms","id":"b660572a629aa6e9191829fe7bfd33cc"},{"cmd":"packages/api-tenancy + --storage=ddb","storage":"ddb","packageName":"api-tenancy","id":"0c81e56d64e97e6b563965250f04ed34"},{"cmd":"packages/api-tenant-manager + --storage=ddb","storage":"ddb","packageName":"api-tenant-manager","id":"4b93a028b8055553c3443a45b38079e9"},{"cmd":"packages/tasks + --storage=ddb","storage":"ddb","packageName":"tasks","id":"925ba761b5995e8a8b980c0789034b3c"}]') + }} + runs-on: ${{ matrix.os }} + env: + NODE_OPTIONS: '--max_old_space_size=4096' + YARN_ENABLE_IMMUTABLE_INSTALLS: false + AWS_REGION: eu-central-1 + steps: + - uses: actions/setup-node@v4 + with: + node-version: 20 + - uses: actions/checkout@v4 + with: + path: ${{ needs.baseBranch.outputs.base-branch }} + - uses: actions/cache@v4 + with: + path: ${{ needs.baseBranch.outputs.base-branch }}/.yarn/cache + key: yarn-${{ runner.os }}-${{ hashFiles('**/yarn.lock') }} + - uses: actions/cache@v4 + with: + path: ${{ needs.baseBranch.outputs.base-branch }}/.webiny/cached-packages + key: ${{ needs.constants.outputs.run-cache-key }} + - name: Install dependencies + run: yarn --immutable + working-directory: ${{ needs.baseBranch.outputs.base-branch }} + - name: Build packages + run: yarn build:quick + working-directory: ${{ needs.baseBranch.outputs.base-branch }} + - name: Run tests + run: yarn test ${{ matrix.package.cmd }} + working-directory: ${{ needs.baseBranch.outputs.base-branch }} + jestTestsDdbEs: + needs: + - constants + - build + name: ${{ matrix.package.cmd }} + strategy: + fail-fast: false + matrix: + os: + - ubuntu-latest + node: + - 20 + package: >- + ${{ fromJson('[{"cmd":"packages/api-aco + --storage=ddb-es,ddb","storage":"ddb-es","packageName":"api-aco","id":"8f23ec33f547aa62236f5c71115688d6"},{"cmd":"packages/api-audit-logs + --storage=ddb-es,ddb","storage":"ddb-es","packageName":"api-audit-logs","id":"a292444cd9100f78d8fc196274393ea8"},{"cmd":"packages/api-dynamodb-to-elasticsearch + --storage=ddb-es,ddb","storage":["ddb-es"],"packageName":"api-dynamodb-to-elasticsearch","id":"e2c325f0940ba5fb5a891a8cf74fca61"},{"cmd":"packages/api-elasticsearch + --storage=ddb-es,ddb","storage":["ddb-es"],"packageName":"api-elasticsearch","id":"5963079c60b96202bbaf2a802ad14383"},{"cmd":"packages/api-elasticsearch-tasks + --storage=ddb-es,ddb","storage":"ddb-es","packageName":"api-elasticsearch-tasks","id":"d81ad1d024a8746cc440e2e548770f8f"},{"cmd":"packages/api-file-manager + --storage=ddb-es,ddb","storage":"ddb-es","packageName":"api-file-manager","id":"d6f293add4a252b96cbd770ab6e80557"},{"cmd":"packages/api-form-builder + --storage=ddb-es,ddb","storage":"ddb-es","packageName":"api-form-builder","id":"3753bde0144d808eb15c755b7176386c"},{"cmd":"packages/api-form-builder-so-ddb-es + --storage=ddb-es,ddb","storage":"ddb-es","packageName":"api-form-builder-so-ddb-es","id":"be1748722ce53a7383696bdc9aecb36e"},{"cmd":"packages/api-headless-cms + --storage=ddb-es,ddb + --shard=1/6","storage":"ddb-es","packageName":"api-headless-cms","id":"c9e8cf197d213d99f54ae218b027db43"},{"cmd":"packages/api-headless-cms + --storage=ddb-es,ddb + --shard=2/6","storage":"ddb-es","packageName":"api-headless-cms","id":"0db69460c7bcc2bd54f21ae32c2436a0"},{"cmd":"packages/api-headless-cms + --storage=ddb-es,ddb + --shard=3/6","storage":"ddb-es","packageName":"api-headless-cms","id":"13763c404c6788aa580d8b9fa8f52239"},{"cmd":"packages/api-headless-cms + --storage=ddb-es,ddb + --shard=4/6","storage":"ddb-es","packageName":"api-headless-cms","id":"795fb79efa47ed2c7b14b1601b03db21"},{"cmd":"packages/api-headless-cms + --storage=ddb-es,ddb + --shard=5/6","storage":"ddb-es","packageName":"api-headless-cms","id":"775a20e72e2f9e3db4c119b08dca9858"},{"cmd":"packages/api-headless-cms + --storage=ddb-es,ddb + --shard=6/6","storage":"ddb-es","packageName":"api-headless-cms","id":"d9e94bb347222577c3a3c8ea3cc41e47"},{"cmd":"packages/api-headless-cms-aco + --storage=ddb-es,ddb","storage":"ddb-es","packageName":"api-headless-cms-aco","id":"873cd623b92712713e58e7dc6ddbe5d9"},{"cmd":"packages/api-headless-cms-bulk-actions + --storage=ddb-es,ddb","storage":"ddb-es","packageName":"api-headless-cms-bulk-actions","id":"d57a9e2a64e475f4629a14f4e1130e78"},{"cmd":"packages/api-headless-cms-ddb-es + --storage=ddb-es,ddb","storage":"ddb-es","packageName":"api-headless-cms-ddb-es","id":"f64e01fd77d4d1c22803e1523560b07c"},{"cmd":"packages/api-headless-cms-es-tasks + --storage=ddb-es,ddb","storage":["ddb-es"],"packageName":"api-headless-cms-es-tasks","id":"f857b5e4a7381a7f10eadef6ec83d9e0"},{"cmd":"packages/api-headless-cms-import-export + --storage=ddb-es,ddb","storage":"ddb-es","packageName":"api-headless-cms-import-export","id":"fa2cbb7997de447c87e3f1b646008711"},{"cmd":"packages/api-mailer + --storage=ddb-es,ddb","storage":"ddb-es","packageName":"api-mailer","id":"ccc077215f734fbec817d90fdb04d423"},{"cmd":"packages/api-page-builder + --storage=ddb-es,ddb + --shard=1/6","storage":"ddb-es","packageName":"api-page-builder","id":"a9d5f7851f0b921677df8521ff899f86"},{"cmd":"packages/api-page-builder + --storage=ddb-es,ddb + --shard=2/6","storage":"ddb-es","packageName":"api-page-builder","id":"d6c00270cbcfa826dab79e8c703c9eb5"},{"cmd":"packages/api-page-builder + --storage=ddb-es,ddb + --shard=3/6","storage":"ddb-es","packageName":"api-page-builder","id":"b407ab6f87871e108480b0fa3bc17902"},{"cmd":"packages/api-page-builder + --storage=ddb-es,ddb + --shard=4/6","storage":"ddb-es","packageName":"api-page-builder","id":"9aa4fe8f6e30c49c501003a914b2ca5c"},{"cmd":"packages/api-page-builder + --storage=ddb-es,ddb + --shard=5/6","storage":"ddb-es","packageName":"api-page-builder","id":"a84a7bf736194196387f2959132abfdd"},{"cmd":"packages/api-page-builder + --storage=ddb-es,ddb + --shard=6/6","storage":"ddb-es","packageName":"api-page-builder","id":"02927f20dd60108bec8356b6dae55357"},{"cmd":"packages/api-page-builder-aco + --storage=ddb-es,ddb","storage":"ddb-es","packageName":"api-page-builder-aco","id":"d12985ec4dcdb80af419125d236a73d8"},{"cmd":"packages/api-page-builder-so-ddb-es + --storage=ddb-es,ddb","storage":"ddb-es","packageName":"api-page-builder-so-ddb-es","id":"911289d4016adf351238298ce5b41ac8"},{"cmd":"packages/api-serverless-cms + --storage=ddb-es,ddb","storage":"ddb-es","packageName":"api-serverless-cms","id":"3d8f52f5b779b9ded3d746716fed019f"},{"cmd":"packages/migrations + --storage=ddb-es,ddb","storage":["ddb-es"],"packageName":"migrations","id":"7262de0ebd8c413fce5cc1428462df1a"},{"cmd":"packages/tasks + --storage=ddb-es,ddb","storage":"ddb-es","packageName":"tasks","id":"0c5cd8395d241e54e3488ffcc1c81c26"}]') + }} + runs-on: ${{ matrix.os }} + env: + NODE_OPTIONS: '--max_old_space_size=4096' + YARN_ENABLE_IMMUTABLE_INSTALLS: false + AWS_REGION: eu-central-1 + AWS_ELASTIC_SEARCH_DOMAIN_NAME: ${{ secrets.AWS_ELASTIC_SEARCH_DOMAIN_NAME }} + ELASTIC_SEARCH_ENDPOINT: ${{ secrets.ELASTIC_SEARCH_ENDPOINT }} + ELASTIC_SEARCH_INDEX_PREFIX: ${{ matrix.package.id }} + steps: + - uses: actions/setup-node@v4 + with: + node-version: 20 + - name: Configure AWS Credentials + uses: aws-actions/configure-aws-credentials@v4 + with: + role-to-assume: arn:aws:iam::726952677045:role/GitHubActionsWebinyJs + aws-region: eu-central-1 + - uses: actions/checkout@v4 + with: + path: ${{ needs.baseBranch.outputs.base-branch }} + - uses: actions/cache@v4 + with: + path: ${{ needs.baseBranch.outputs.base-branch }}/.yarn/cache + key: yarn-${{ runner.os }}-${{ hashFiles('**/yarn.lock') }} + - uses: actions/cache@v4 + with: + path: ${{ needs.baseBranch.outputs.base-branch }}/.webiny/cached-packages + key: ${{ needs.constants.outputs.run-cache-key }} + - name: Install dependencies + run: yarn --immutable + working-directory: ${{ needs.baseBranch.outputs.base-branch }} + - name: Build packages + run: yarn build:quick + working-directory: ${{ needs.baseBranch.outputs.base-branch }} + - name: Run tests + run: yarn test ${{ matrix.package.cmd }} + working-directory: ${{ needs.baseBranch.outputs.base-branch }} + permissions: + id-token: write + jestTestsDdbOs: + needs: + - constants + - build + name: ${{ matrix.package.cmd }} + strategy: + fail-fast: false + matrix: + os: + - ubuntu-latest + node: + - 20 + package: >- + ${{ fromJson('[{"cmd":"packages/api-aco + --storage=ddb-os,ddb","storage":"ddb-os","packageName":"api-aco","id":"e4b1b5ebc172f2657485e41c35ad1cd7"},{"cmd":"packages/api-audit-logs + --storage=ddb-os,ddb","storage":"ddb-os","packageName":"api-audit-logs","id":"b36aac5f0e34dc4583e5422ae589f1ed"},{"cmd":"packages/api-dynamodb-to-elasticsearch + --storage=ddb-os,ddb","storage":["ddb-os"],"packageName":"api-dynamodb-to-elasticsearch","id":"6e0b282c3d135703e52b2c55822d4fb0"},{"cmd":"packages/api-elasticsearch + --storage=ddb-os,ddb","storage":["ddb-os"],"packageName":"api-elasticsearch","id":"b0f477d6b209f654714809b318be888e"},{"cmd":"packages/api-elasticsearch-tasks + --storage=ddb-os,ddb","storage":"ddb-os","packageName":"api-elasticsearch-tasks","id":"580a9577fdbd4a241034a42e1a47dee5"},{"cmd":"packages/api-file-manager + --storage=ddb-os,ddb","storage":"ddb-os","packageName":"api-file-manager","id":"346430a79981d3e214c87254a08e31b2"},{"cmd":"packages/api-form-builder + --storage=ddb-os,ddb","storage":"ddb-os","packageName":"api-form-builder","id":"d386cddfd3c366ad9955193dcfe74363"},{"cmd":"packages/api-form-builder-so-ddb-es + --storage=ddb-os,ddb","storage":"ddb-os","packageName":"api-form-builder-so-ddb-es","id":"6086ced9d7b4412cc438b9e1aefbb976"},{"cmd":"packages/api-headless-cms + --storage=ddb-os,ddb + --shard=1/6","storage":"ddb-os","packageName":"api-headless-cms","id":"f0851fe3b18a5f4130ae919506f9d68f"},{"cmd":"packages/api-headless-cms + --storage=ddb-os,ddb + --shard=2/6","storage":"ddb-os","packageName":"api-headless-cms","id":"627bf598869494740bdb3ee340398ed5"},{"cmd":"packages/api-headless-cms + --storage=ddb-os,ddb + --shard=3/6","storage":"ddb-os","packageName":"api-headless-cms","id":"49c59082ed1d7a79b742944965adff82"},{"cmd":"packages/api-headless-cms + --storage=ddb-os,ddb + --shard=4/6","storage":"ddb-os","packageName":"api-headless-cms","id":"37865d8ba2366687e25fa61967fe4db9"},{"cmd":"packages/api-headless-cms + --storage=ddb-os,ddb + --shard=5/6","storage":"ddb-os","packageName":"api-headless-cms","id":"19d0191a992c0a5145674dc0b37d96b6"},{"cmd":"packages/api-headless-cms + --storage=ddb-os,ddb + --shard=6/6","storage":"ddb-os","packageName":"api-headless-cms","id":"2aade1f8261eacc7d93cc25fa3457fac"},{"cmd":"packages/api-headless-cms-aco + --storage=ddb-os,ddb","storage":"ddb-os","packageName":"api-headless-cms-aco","id":"aa2c8429c2564549a680db23fe963347"},{"cmd":"packages/api-headless-cms-bulk-actions + --storage=ddb-os,ddb","storage":"ddb-os","packageName":"api-headless-cms-bulk-actions","id":"a798b4705a7eb9858a51d80b386cf30a"},{"cmd":"packages/api-headless-cms-ddb-es + --storage=ddb-os,ddb","storage":"ddb-os","packageName":"api-headless-cms-ddb-es","id":"23bea783bb40390ae069dfa4985f97d2"},{"cmd":"packages/api-headless-cms-es-tasks + --storage=ddb-os,ddb","storage":["ddb-os"],"packageName":"api-headless-cms-es-tasks","id":"ee446fd78ad6294bbfb3c0689ff2602e"},{"cmd":"packages/api-headless-cms-import-export + --storage=ddb-os,ddb","storage":"ddb-os","packageName":"api-headless-cms-import-export","id":"6059cf3e78f93525c8ed72ad83b7de1a"},{"cmd":"packages/api-mailer + --storage=ddb-os,ddb","storage":"ddb-os","packageName":"api-mailer","id":"0ede859b604febdfa78018cdd1067a77"},{"cmd":"packages/api-page-builder + --storage=ddb-os,ddb + --shard=1/6","storage":"ddb-os","packageName":"api-page-builder","id":"691427cc9c5cb297c68cb2f90d7fcb89"},{"cmd":"packages/api-page-builder + --storage=ddb-os,ddb + --shard=2/6","storage":"ddb-os","packageName":"api-page-builder","id":"66b65733ec32b2010df792151240cca1"},{"cmd":"packages/api-page-builder + --storage=ddb-os,ddb + --shard=3/6","storage":"ddb-os","packageName":"api-page-builder","id":"8cdd1f181701f25f8cf9c3fe45b661bd"},{"cmd":"packages/api-page-builder + --storage=ddb-os,ddb + --shard=4/6","storage":"ddb-os","packageName":"api-page-builder","id":"0956377c7a7550c745e9402b51bdca85"},{"cmd":"packages/api-page-builder + --storage=ddb-os,ddb + --shard=5/6","storage":"ddb-os","packageName":"api-page-builder","id":"cc194759ab43627005bc21ee7c833a01"},{"cmd":"packages/api-page-builder + --storage=ddb-os,ddb + --shard=6/6","storage":"ddb-os","packageName":"api-page-builder","id":"b979f8aa837353847942b60e8f4bc057"},{"cmd":"packages/api-page-builder-aco + --storage=ddb-os,ddb","storage":"ddb-os","packageName":"api-page-builder-aco","id":"a1a7c90d43da1678f254bd4331cf4d55"},{"cmd":"packages/api-page-builder-so-ddb-es + --storage=ddb-os,ddb","storage":"ddb-os","packageName":"api-page-builder-so-ddb-es","id":"e0236755edb31fc1a6005eb161941bf8"},{"cmd":"packages/api-serverless-cms + --storage=ddb-os,ddb","storage":"ddb-os","packageName":"api-serverless-cms","id":"28f2386bb4be699710cb574f3401d76b"},{"cmd":"packages/migrations + --storage=ddb-os,ddb","storage":["ddb-os"],"packageName":"migrations","id":"3f8965830bbe44499a4bc97baf27e090"},{"cmd":"packages/tasks + --storage=ddb-os,ddb","storage":"ddb-os","packageName":"tasks","id":"5eadfa5cc14ec4e8ba87ac3dfb112580"}]') + }} + runs-on: ${{ matrix.os }} + env: + NODE_OPTIONS: '--max_old_space_size=4096' + YARN_ENABLE_IMMUTABLE_INSTALLS: false + AWS_REGION: eu-central-1 + AWS_ELASTIC_SEARCH_DOMAIN_NAME: ${{ secrets.AWS_OPEN_SEARCH_DOMAIN_NAME }} + ELASTIC_SEARCH_ENDPOINT: ${{ secrets.OPEN_SEARCH_ENDPOINT }} + ELASTIC_SEARCH_INDEX_PREFIX: ${{ matrix.package.id }} + steps: + - uses: actions/setup-node@v4 + with: + node-version: 20 + - name: Configure AWS Credentials + uses: aws-actions/configure-aws-credentials@v4 + with: + role-to-assume: arn:aws:iam::726952677045:role/GitHubActionsWebinyJs + aws-region: eu-central-1 + - uses: actions/checkout@v4 + with: + path: ${{ needs.baseBranch.outputs.base-branch }} + - uses: actions/cache@v4 + with: + path: ${{ needs.baseBranch.outputs.base-branch }}/.yarn/cache + key: yarn-${{ runner.os }}-${{ hashFiles('**/yarn.lock') }} + - uses: actions/cache@v4 + with: + path: ${{ needs.baseBranch.outputs.base-branch }}/.webiny/cached-packages + key: ${{ needs.constants.outputs.run-cache-key }} + - name: Install dependencies + run: yarn --immutable + working-directory: ${{ needs.baseBranch.outputs.base-branch }} + - name: Build packages + run: yarn build:quick + working-directory: ${{ needs.baseBranch.outputs.base-branch }} + - name: Run tests + run: yarn test ${{ matrix.package.cmd }} + working-directory: ${{ needs.baseBranch.outputs.base-branch }} + permissions: + id-token: write diff --git a/.github/workflows/wac/pullRequests.wac.ts b/.github/workflows/wac/pullRequests.wac.ts index 723dcc6e574..60b4a28f061 100644 --- a/.github/workflows/wac/pullRequests.wac.ts +++ b/.github/workflows/wac/pullRequests.wac.ts @@ -210,7 +210,10 @@ export const pullRequests = createWorkflow({ assignMilestone: createJob({ name: "Assign milestone", needs: "constants", - if: "needs.constants.outputs.is-fork-pr != 'true'", + if: [ + "needs.constants.outputs.is-fork-pr != 'true'", + "github.event.pull_request.milestone == null" + ].join(" && "), steps: [ { name: "Print latest Webiny version", diff --git a/.github/workflows/wac/pullRequestsCommandJest.wac.ts b/.github/workflows/wac/pullRequestsCommandJest.wac.ts new file mode 100644 index 00000000000..87f2f5d49c9 --- /dev/null +++ b/.github/workflows/wac/pullRequestsCommandJest.wac.ts @@ -0,0 +1,173 @@ +import { createWorkflow, NormalJob } from "github-actions-wac"; +import { + createGlobalBuildCacheSteps, + createInstallBuildSteps, + createRunBuildCacheSteps, + createYarnCacheSteps, + withCommonParams +} from "./steps"; +import { + AWS_REGION, + BUILD_PACKAGES_RUNNER, + listPackagesWithJestTests, + NODE_OPTIONS, + NODE_VERSION +} from "./utils"; +import { createJob, createValidateWorkflowsJob } from "./jobs"; + +// Will print "next" or "dev". Important for caching (via actions/cache). +const DIR_WEBINY_JS = "${{ needs.baseBranch.outputs.base-branch }}"; + +const installBuildSteps = createInstallBuildSteps({ workingDirectory: DIR_WEBINY_JS }); +const yarnCacheSteps = createYarnCacheSteps({ workingDirectory: DIR_WEBINY_JS }); +const globalBuildCacheSteps = createGlobalBuildCacheSteps({ workingDirectory: DIR_WEBINY_JS }); +const runBuildCacheSteps = createRunBuildCacheSteps({ workingDirectory: DIR_WEBINY_JS }); + +const createCheckoutPrSteps = () => + [ + { + name: "Checkout Pull Request", + "working-directory": DIR_WEBINY_JS, + run: "gh pr checkout ${{ github.event.issue.number }}", + env: { GITHUB_TOKEN: "${{ secrets.GH_TOKEN }}" } + } + ] as NonNullable; + +const createJestTestsJob = (storage: string | null) => { + const env: Record = { AWS_REGION }; + + if (storage) { + if (storage === "ddb-es") { + env["AWS_ELASTIC_SEARCH_DOMAIN_NAME"] = "${{ secrets.AWS_ELASTIC_SEARCH_DOMAIN_NAME }}"; + env["ELASTIC_SEARCH_ENDPOINT"] = "${{ secrets.ELASTIC_SEARCH_ENDPOINT }}"; + env["ELASTIC_SEARCH_INDEX_PREFIX"] = "${{ matrix.package.id }}"; + } else if (storage === "ddb-os") { + // We still use the same environment variables as for "ddb-es" setup, it's + // just that the values are read from different secrets. + env["AWS_ELASTIC_SEARCH_DOMAIN_NAME"] = "${{ secrets.AWS_OPEN_SEARCH_DOMAIN_NAME }}"; + env["ELASTIC_SEARCH_ENDPOINT"] = "${{ secrets.OPEN_SEARCH_ENDPOINT }}"; + env["ELASTIC_SEARCH_INDEX_PREFIX"] = "${{ matrix.package.id }}"; + } + } + + const packages = listPackagesWithJestTests({ storage }); + + return createJob({ + needs: ["constants", "build"], + name: "${{ matrix.package.cmd }}", + strategy: { + "fail-fast": false, + matrix: { + os: ["ubuntu-latest"], + node: [NODE_VERSION], + package: "${{ fromJson('" + JSON.stringify(packages) + "') }}" + } + }, + "runs-on": "${{ matrix.os }}", + env, + awsAuth: storage === "ddb-es" || storage === "ddb-os", + checkout: { path: DIR_WEBINY_JS }, + steps: [ + ...yarnCacheSteps, + ...runBuildCacheSteps, + ...installBuildSteps, + ...withCommonParams( + [{ name: "Run tests", run: "yarn test ${{ matrix.package.cmd }}" }], + { "working-directory": DIR_WEBINY_JS } + ) + ] + }); +}; + +export const pullRequestsCommandJest = createWorkflow({ + name: "Pull Requests Command - Jest", + on: "issue_comment", + env: { + NODE_OPTIONS, + AWS_REGION + }, + jobs: { + checkComment: createJob({ + name: `Check comment for /jest`, + if: "${{ github.event.issue.pull_request }}", + checkout: false, + steps: [ + { + name: "Check for Command", + id: "command", + uses: "xt0rted/slash-command-action@v2", + with: { + "repo-token": "${{ secrets.GITHUB_TOKEN }}", + command: "jest", + reaction: "true", + "reaction-type": "eyes", + "allow-edits": "false", + "permission-level": "write" + } + }, + { + name: "Create comment", + uses: "peter-evans/create-or-update-comment@v2", + with: { + "issue-number": "${{ github.event.issue.number }}", + body: "Jest tests have been initiated (for more information, click [here](https://github.com/webiny/webiny-js/actions/runs/${{ github.run_id }})). :sparkles:" + } + } + ] + }), + validateWorkflows: createValidateWorkflowsJob({ needs: "checkComment" }), + baseBranch: createJob({ + needs: "checkComment", + name: "Get base branch", + outputs: { + "base-branch": "${{ steps.base-branch.outputs.base-branch }}" + }, + steps: [ + { + name: "Get base branch", + id: "base-branch", + env: { GITHUB_TOKEN: "${{ secrets.GH_TOKEN }}" }, + run: 'echo "base-branch=$(gh pr view ${{ github.event.issue.number }} --json baseRefName -q .baseRefName)" >> $GITHUB_OUTPUT' + } + ] + }), + constants: createJob({ + needs: "baseBranch", + name: "Create constants", + outputs: { + "global-cache-key": "${{ steps.global-cache-key.outputs.global-cache-key }}", + "run-cache-key": "${{ steps.run-cache-key.outputs.run-cache-key }}" + }, + checkout: false, + steps: [ + { + name: "Create global cache key", + id: "global-cache-key", + run: `echo "global-cache-key=\${{ needs.baseBranch.outputs.base-branch }}-\${{ runner.os }}-$(/bin/date -u "+%m%d")-\${{ vars.RANDOM_CACHE_KEY_SUFFIX }}" >> $GITHUB_OUTPUT` + }, + { + name: "Create workflow run cache key", + id: "run-cache-key", + run: 'echo "run-cache-key=${{ github.run_id }}-${{ github.run_attempt }}-${{ vars.RANDOM_CACHE_KEY_SUFFIX }}" >> $GITHUB_OUTPUT' + } + ] + }), + build: createJob({ + name: "Build", + needs: ["baseBranch", "constants"], + checkout: { path: DIR_WEBINY_JS }, + "runs-on": BUILD_PACKAGES_RUNNER, + steps: [ + ...createCheckoutPrSteps(), + ...yarnCacheSteps, + ...globalBuildCacheSteps, + ...installBuildSteps, + ...runBuildCacheSteps + ] + }), + jestTestsNoStorage: createJestTestsJob(null), + jestTestsDdb: createJestTestsJob("ddb"), + jestTestsDdbEs: createJestTestsJob("ddb-es"), + jestTestsDdbOs: createJestTestsJob("ddb-os") + } +}); diff --git a/extensions/theme/src/layouts/pages/Static/HeaderMobile.tsx b/extensions/theme/src/layouts/pages/Static/HeaderMobile.tsx index e31b913cd0d..ad09d180b06 100644 --- a/extensions/theme/src/layouts/pages/Static/HeaderMobile.tsx +++ b/extensions/theme/src/layouts/pages/Static/HeaderMobile.tsx @@ -98,6 +98,7 @@ const HeaderMobileWrapper = styled.div` } > nav { + display: none; -moz-osx-font-smoothing: grayscale; -webkit-font-smoothing: antialiased; animation: slide-out 0.5s forwards; diff --git a/packages/api-elasticsearch-tasks/src/definitions/entry.ts b/packages/api-elasticsearch-tasks/src/definitions/entry.ts index fa3c9bb7afd..e2559d417f4 100644 --- a/packages/api-elasticsearch-tasks/src/definitions/entry.ts +++ b/packages/api-elasticsearch-tasks/src/definitions/entry.ts @@ -1,13 +1,18 @@ -import { Entity, TableDef } from "@webiny/db-dynamodb/toolbox"; +/** + * TODO If adding GSIs to the Elasticsearch table, add them here. + */ +import type { TableDef } from "@webiny/db-dynamodb/toolbox"; +import type { IEntity } from "@webiny/db-dynamodb"; +import { createEntity } from "@webiny/db-dynamodb"; interface Params { table: TableDef; entityName: string; } -export const createEntry = (params: Params): Entity => { +export const createEntry = (params: Params): IEntity => { const { table, entityName } = params; - return new Entity({ + return createEntity({ name: entityName, table, attributes: { @@ -24,6 +29,9 @@ export const createEntry = (params: Params): Entity => { }, data: { type: "map" + }, + TYPE: { + type: "string" } } }); diff --git a/packages/api-elasticsearch-tasks/src/tasks/Manager.ts b/packages/api-elasticsearch-tasks/src/tasks/Manager.ts index a62664742a0..2d981674e49 100644 --- a/packages/api-elasticsearch-tasks/src/tasks/Manager.ts +++ b/packages/api-elasticsearch-tasks/src/tasks/Manager.ts @@ -1,19 +1,16 @@ import { DynamoDBDocument, getDocumentClient } from "@webiny/aws-sdk/client-dynamodb"; import { Client, createElasticsearchClient } from "@webiny/api-elasticsearch"; import { createTable } from "~/definitions"; -import { Context, IManager } from "~/types"; +import type { Context, IManager } from "~/types"; import { createEntry } from "~/definitions/entry"; -import { Entity } from "@webiny/db-dynamodb/toolbox"; -import { ITaskResponse } from "@webiny/tasks/response/abstractions"; -import { IIsCloseToTimeoutCallable, ITaskManagerStore } from "@webiny/tasks/runner/abstractions"; -import { - batchReadAll, - BatchReadItem, - batchWriteAll, - BatchWriteItem, - BatchWriteResult -} from "@webiny/db-dynamodb"; -import { ITimer } from "@webiny/handler-aws/utils"; +import type { ITaskResponse } from "@webiny/tasks/response/abstractions"; +import type { + IIsCloseToTimeoutCallable, + ITaskManagerStore +} from "@webiny/tasks/runner/abstractions"; +import type { BatchReadItem, IEntity } from "@webiny/db-dynamodb"; +import { batchReadAll } from "@webiny/db-dynamodb"; +import type { ITimer } from "@webiny/handler-aws/utils"; export interface ManagerParams { context: Context; @@ -37,7 +34,7 @@ export class Manager implements IManager { public readonly store: ITaskManagerStore; public readonly timer: ITimer; - private readonly entities: Record> = {}; + private readonly entities: Record = {}; public constructor(params: ManagerParams) { this.context = params.context; @@ -64,7 +61,7 @@ export class Manager implements IManager { this.timer = params.timer; } - public getEntity(name: string): Entity { + public getEntity(name: string): IEntity { if (this.entities[name]) { return this.entities[name]; } @@ -75,17 +72,10 @@ export class Manager implements IManager { })); } - public async read(items: BatchReadItem[]) { + public async read(items: BatchReadItem[]): Promise { return await batchReadAll({ table: this.table, items }); } - - public async write(items: BatchWriteItem[]): Promise { - return await batchWriteAll({ - table: this.table, - items - }); - } } diff --git a/packages/api-elasticsearch-tasks/src/tasks/reindexing/ReindexingTaskRunner.ts b/packages/api-elasticsearch-tasks/src/tasks/reindexing/ReindexingTaskRunner.ts index 908d8d911b7..930eb1b8166 100644 --- a/packages/api-elasticsearch-tasks/src/tasks/reindexing/ReindexingTaskRunner.ts +++ b/packages/api-elasticsearch-tasks/src/tasks/reindexing/ReindexingTaskRunner.ts @@ -6,7 +6,7 @@ import { } from "~/types"; import { ITaskResponse, ITaskResponseResult } from "@webiny/tasks/response/abstractions"; import { scan } from "~/helpers/scan"; -import { BatchWriteItem, ScanResponse } from "@webiny/db-dynamodb"; +import { createTableWriteBatch, ScanResponse } from "@webiny/db-dynamodb"; import { IndexManager } from "~/settings"; import { IIndexManager } from "~/settings/types"; @@ -73,7 +73,10 @@ export class ReindexingTaskRunner { return this.response.done("No more items to process."); } - const batch: BatchWriteItem[] = []; + const tableWriteBatch = createTableWriteBatch({ + table: this.manager.table + }); + for (const item of results.items) { /** * No index defined? Impossible but let's skip if really happens. @@ -110,14 +113,13 @@ export class ReindexingTaskRunner { /** * Reindexing will be triggered by the `putBatch` method. */ - batch.push( - entity.putBatch({ - ...item, - modified: new Date().toISOString() - }) - ); + tableWriteBatch.put(entity.entity, { + ...item, + TYPE: item.TYPE || "unknown", + modified: new Date().toISOString() + }); } - await this.manager.write(batch); + await tableWriteBatch.execute(); /** * We always store the index settings, so we can restore them later. * Also, we always want to store what was the last key we processed, just in case something breaks, so we can continue from this point. diff --git a/packages/api-elasticsearch-tasks/src/types.ts b/packages/api-elasticsearch-tasks/src/types.ts index b2d5b276551..8446398e1ee 100644 --- a/packages/api-elasticsearch-tasks/src/types.ts +++ b/packages/api-elasticsearch-tasks/src/types.ts @@ -1,17 +1,17 @@ -import { ElasticsearchContext } from "@webiny/api-elasticsearch/types"; -import { Entity } from "@webiny/db-dynamodb/toolbox"; -import { +import type { ElasticsearchContext } from "@webiny/api-elasticsearch/types"; +import type { Context as TasksContext, IIsCloseToTimeoutCallable, + ITaskManagerStore, + ITaskResponse, ITaskResponseDoneResultOutput } from "@webiny/tasks/types"; -import { DynamoDBDocument } from "@webiny/aws-sdk/client-dynamodb"; -import { Client } from "@webiny/api-elasticsearch"; +import type { DynamoDBDocument } from "@webiny/aws-sdk/client-dynamodb"; +import type { Client } from "@webiny/api-elasticsearch"; import { createTable } from "~/definitions"; -import { ITaskResponse } from "@webiny/tasks/response/abstractions"; -import { ITaskManagerStore } from "@webiny/tasks/runner/abstractions"; -import { BatchWriteItem, BatchWriteResult } from "@webiny/db-dynamodb"; -import { ITimer } from "@webiny/handler-aws"; +import type { BatchReadItem, IEntity } from "@webiny/db-dynamodb"; +import type { ITimer } from "@webiny/handler-aws"; +import type { GenericRecord } from "@webiny/api/types"; export interface Context extends ElasticsearchContext, TasksContext {} @@ -42,17 +42,18 @@ export interface IElasticsearchIndexingTaskValues { } export interface AugmentedError extends Error { - data?: Record; + data?: GenericRecord; [key: string]: any; } export interface IDynamoDbElasticsearchRecord { PK: string; SK: string; + TYPE?: string; index: string; _et?: string; entity: string; - data: Record; + data: GenericRecord; modified: string; } @@ -70,7 +71,7 @@ export interface IManager< readonly store: ITaskManagerStore; readonly timer: ITimer; - getEntity: (name: string) => Entity; + getEntity: (name: string) => IEntity; - write: (items: BatchWriteItem[]) => Promise; + read(items: BatchReadItem[]): Promise; } diff --git a/packages/api-file-manager-ddb/src/operations/AliasesStorageOperations.ts b/packages/api-file-manager-ddb/src/operations/AliasesStorageOperations.ts index 47d2ed6280b..af1f8aba00d 100644 --- a/packages/api-file-manager-ddb/src/operations/AliasesStorageOperations.ts +++ b/packages/api-file-manager-ddb/src/operations/AliasesStorageOperations.ts @@ -1,13 +1,12 @@ -import { DynamoDBDocument } from "@webiny/aws-sdk/client-dynamodb"; -import { Entity, Table } from "@webiny/db-dynamodb/toolbox"; -import { - FileManagerAliasesStorageOperations, +import type { DynamoDBDocument } from "@webiny/aws-sdk/client-dynamodb"; +import type { Entity, Table } from "@webiny/db-dynamodb/toolbox"; +import type { File, - FileAlias + FileAlias, + FileManagerAliasesStorageOperations } from "@webiny/api-file-manager/types"; import { - BatchWriteItem, - batchWriteAll, + createEntityWriteBatch, createStandardEntity, createTable, DbItem, @@ -39,52 +38,49 @@ export class AliasesStorageOperations implements FileManagerAliasesStorageOperat async deleteAliases(file: File): Promise { const aliasItems = await this.getExistingAliases(file); - const items: BatchWriteItem[] = []; - aliasItems.forEach(item => { - items.push( - this.aliasEntity.deleteBatch({ + const batchWrite = createEntityWriteBatch({ + entity: this.aliasEntity, + delete: aliasItems.map(item => { + return { PK: this.createPartitionKey({ id: item.fileId, tenant: item.tenant, locale: item.locale }), SK: `ALIAS#${item.alias}` - }) - ); + }; + }) }); - await batchWriteAll({ table: this.table, items }); + await batchWrite.execute(); } async storeAliases(file: File): Promise { - const items: BatchWriteItem[] = []; const existingAliases = await this.getExistingAliases(file); const newAliases = this.createNewAliasesRecords(file, existingAliases); - newAliases.forEach(alias => { - items.push(this.aliasEntity.putBatch(alias)); + const batchWrite = createEntityWriteBatch({ + entity: this.aliasEntity }); + for (const alias of newAliases) { + batchWrite.put(alias); + } // Delete aliases that are in the DB but are NOT in the file. for (const data of existingAliases) { if (!file.aliases.some(alias => data.alias === alias)) { - items.push( - this.aliasEntity.deleteBatch({ - PK: this.createPartitionKey(file), - SK: `ALIAS#${data.alias}` - }) - ); + batchWrite.delete({ + PK: this.createPartitionKey(file), + SK: `ALIAS#${data.alias}` + }); } } - await batchWriteAll({ - table: this.table, - items - }); + await batchWrite.execute(); } - private async getExistingAliases(file: File) { + private async getExistingAliases(file: File): Promise { const aliases = await queryAll<{ data: FileAlias }>({ entity: this.aliasEntity, partitionKey: this.createPartitionKey(file), diff --git a/packages/api-form-builder-so-ddb-es/src/definitions/elasticsearch.ts b/packages/api-form-builder-so-ddb-es/src/definitions/elasticsearch.ts index 84f28c9538c..9236ce07eee 100644 --- a/packages/api-form-builder-so-ddb-es/src/definitions/elasticsearch.ts +++ b/packages/api-form-builder-so-ddb-es/src/definitions/elasticsearch.ts @@ -28,7 +28,6 @@ export const createElasticsearchEntity = (params: Params) => { TYPE: { type: "string" }, - ...(attributes || {}) } }); diff --git a/packages/api-form-builder-so-ddb-es/src/operations/form/index.ts b/packages/api-form-builder-so-ddb-es/src/operations/form/index.ts index db1e3cf4c05..5ca078d4910 100644 --- a/packages/api-form-builder-so-ddb-es/src/operations/form/index.ts +++ b/packages/api-form-builder-so-ddb-es/src/operations/form/index.ts @@ -17,7 +17,7 @@ import { Entity, Table } from "@webiny/db-dynamodb/toolbox"; import { Client } from "@elastic/elasticsearch"; import { queryAll, QueryAllParams } from "@webiny/db-dynamodb/utils/query"; import WebinyError from "@webiny/error"; -import { batchWriteAll } from "@webiny/db-dynamodb/utils/batchWrite"; +import { createEntityWriteBatch, getClean, IPutParamsItem, put } from "@webiny/db-dynamodb"; import { configurations } from "~/configurations"; import { filterItems } from "@webiny/db-dynamodb/utils/filter"; import fields from "./fields"; @@ -28,7 +28,6 @@ import { decodeCursor, encodeCursor } from "@webiny/api-elasticsearch"; import { PluginsContainer } from "@webiny/plugins"; import { FormBuilderFormCreateKeyParams, FormBuilderFormStorageOperations } from "~/types"; import { ElasticsearchSearchResponse } from "@webiny/api-elasticsearch/types"; -import { deleteItem, getClean, put } from "@webiny/db-dynamodb"; export type DbRecord = T & { PK: string; @@ -71,7 +70,7 @@ const getESDataForLatestRevision = (form: FbForm): FbFormElastic => ({ export const createFormStorageOperations = ( params: CreateFormStorageOperationsParams ): FormBuilderFormStorageOperations => { - const { entity, esEntity, table, plugins, elasticsearch } = params; + const { entity, esEntity, plugins, elasticsearch } = params; const formDynamoDbFields = fields(); @@ -123,24 +122,24 @@ export const createFormStorageOperations = ( SK: createLatestSortKey() }; - const items = [ - entity.putBatch({ - ...form, - TYPE: createFormType(), - ...revisionKeys - }), - entity.putBatch({ - ...form, - TYPE: createFormLatestType(), - ...latestKeys - }) - ]; + const itemsBatch = createEntityWriteBatch({ + entity, + put: [ + { + ...form, + TYPE: createFormType(), + ...revisionKeys + }, + { + ...form, + TYPE: createFormLatestType(), + ...latestKeys + } + ] + }); try { - await batchWriteAll({ - table, - items - }); + await itemsBatch.execute(); } catch (ex) { throw new WebinyError( ex.message || "Could not insert form data into regular table.", @@ -194,24 +193,24 @@ export const createFormStorageOperations = ( SK: createLatestSortKey() }; - const items = [ - entity.putBatch({ - ...form, - ...revisionKeys, - TYPE: createFormType() - }), - entity.putBatch({ - ...form, - ...latestKeys, - TYPE: createFormLatestType() - }) - ]; + const entityBatch = createEntityWriteBatch({ + entity, + put: [ + { + ...form, + ...revisionKeys, + TYPE: createFormType() + }, + { + ...form, + ...latestKeys, + TYPE: createFormLatestType() + } + ] + }); try { - await batchWriteAll({ - table, - items - }); + await entityBatch.execute(); } catch (ex) { throw new WebinyError( ex.message || @@ -283,27 +282,26 @@ export const createFormStorageOperations = ( }); const isLatestForm = latestForm ? latestForm.id === form.id : false; - const items = [ - entity.putBatch({ - ...form, - TYPE: createFormType(), - ...revisionKeys - }) - ]; - if (isLatestForm) { - items.push( - entity.putBatch({ + const entityBatch = createEntityWriteBatch({ + entity, + put: [ + { ...form, - TYPE: createFormLatestType(), - ...latestKeys - }) - ); + TYPE: createFormType(), + ...revisionKeys + } + ] + }); + + if (isLatestForm) { + entityBatch.put({ + ...form, + TYPE: createFormLatestType(), + ...latestKeys + }); } try { - await batchWriteAll({ - table, - items - }); + await entityBatch.execute(); } catch (ex) { throw new WebinyError( ex.message || "Could not update form data in the regular table.", @@ -547,17 +545,18 @@ export const createFormStorageOperations = ( ); } - const deleteItems = items.map(item => { - return entity.deleteBatch({ - PK: item.PK, - SK: item.SK - }); + const deleteBatch = createEntityWriteBatch({ + entity, + delete: items.map(item => { + return { + PK: item.PK, + SK: item.SK + }; + }) }); + try { - await batchWriteAll({ - table, - items: deleteItems - }); + await deleteBatch.execute(); } catch (ex) { throw new WebinyError( ex.message || "Could not delete form and it's submissions.", @@ -569,11 +568,13 @@ export const createFormStorageOperations = ( PK: createFormPartitionKey(form), SK: createLatestSortKey() }; + const deleteEsBatch = createEntityWriteBatch({ + entity: esEntity, + delete: [latestKeys] + }); + try { - await deleteItem({ - entity: esEntity, - keys: latestKeys - }); + await deleteEsBatch.execute(); } catch (ex) { throw new WebinyError( ex.message || "Could not delete latest form record from Elasticsearch.", @@ -612,8 +613,12 @@ export const createFormStorageOperations = ( const isLatest = latestForm ? latestForm.id === form.id : false; const isLatestPublished = latestPublishedForm ? latestPublishedForm.id === form.id : false; - const items = [entity.deleteBatch(revisionKeys)]; - let esDataItem = undefined; + const entityBatch = createEntityWriteBatch({ + entity, + delete: [revisionKeys] + }); + + let esDataItem: IPutParamsItem | undefined = undefined; if (isLatest || isLatestPublished) { /** @@ -630,34 +635,28 @@ export const createFormStorageOperations = ( }) .shift(); if (previouslyPublishedForm) { - items.push( - entity.putBatch({ - ...previouslyPublishedForm, - PK: createFormPartitionKey(previouslyPublishedForm), - SK: createLatestPublishedSortKey(), - TYPE: createFormLatestPublishedType() - }) - ); + entityBatch.put({ + ...previouslyPublishedForm, + PK: createFormPartitionKey(previouslyPublishedForm), + SK: createLatestPublishedSortKey(), + TYPE: createFormLatestPublishedType() + }); } else { - items.push( - entity.deleteBatch({ - PK: createFormPartitionKey(form), - SK: createLatestPublishedSortKey() - }) - ); + entityBatch.delete({ + PK: createFormPartitionKey(form), + SK: createLatestPublishedSortKey() + }); } } /** * Sort out the latest record. */ if (isLatest && previous) { - items.push( - entity.putBatch({ - ...previous, - ...latestKeys, - TYPE: createFormLatestType() - }) - ); + entityBatch.put({ + ...previous, + ...latestKeys, + TYPE: createFormLatestType() + }); const { index } = configurations.es({ tenant: previous.tenant, @@ -675,10 +674,7 @@ export const createFormStorageOperations = ( * Now save the batch data. */ try { - await batchWriteAll({ - table, - items - }); + await entityBatch.execute(); } catch (ex) { throw new WebinyError( ex.message || "Could not delete form revision from regular table.", @@ -759,36 +755,35 @@ export const createFormStorageOperations = ( /** * Update revision and latest published records */ - const items = [ - entity.putBatch({ - ...form, - ...revisionKeys, - TYPE: createFormType() - }), - entity.putBatch({ - ...form, - ...latestPublishedKeys, - TYPE: createFormLatestPublishedType() - }) - ]; + const entityBatch = createEntityWriteBatch({ + entity, + put: [ + { + ...form, + ...revisionKeys, + TYPE: createFormType() + }, + { + ...form, + ...latestPublishedKeys, + TYPE: createFormLatestPublishedType() + } + ] + }); + /** * Update the latest form as well */ if (isLatestForm) { - items.push( - entity.putBatch({ - ...form, - ...latestKeys, - TYPE: createFormLatestType() - }) - ); + entityBatch.put({ + ...form, + ...latestKeys, + TYPE: createFormLatestType() + }); } try { - await batchWriteAll({ - table, - items - }); + await entityBatch.execute(); } catch (ex) { throw new WebinyError( ex.message || "Could not publish form.", @@ -887,14 +882,18 @@ export const createFormStorageOperations = ( const isLatest = latestForm ? latestForm.id === form.id : false; const isLatestPublished = latestPublishedForm ? latestPublishedForm.id === form.id : false; - const items = [ - entity.putBatch({ - ...form, - ...revisionKeys, - TYPE: createFormType() - }) - ]; - let esData: any = undefined; + const entityBatch = createEntityWriteBatch({ + entity, + put: [ + { + ...form, + ...revisionKeys, + TYPE: createFormType() + } + ] + }); + + let esData: FbFormElastic | undefined = undefined; if (isLatest) { esData = getESDataForLatestRevision(form); } @@ -916,23 +915,18 @@ export const createFormStorageOperations = ( const previouslyPublishedRevision = revisions.shift(); if (previouslyPublishedRevision) { - items.push( - entity.putBatch({ - ...previouslyPublishedRevision, - ...latestPublishedKeys, - TYPE: createFormLatestPublishedType() - }) - ); + entityBatch.put({ + ...previouslyPublishedRevision, + ...latestPublishedKeys, + TYPE: createFormLatestPublishedType() + }); } else { - items.push(entity.deleteBatch(latestPublishedKeys)); + entityBatch.delete(latestPublishedKeys); } } try { - await batchWriteAll({ - table, - items - }); + await entityBatch.execute(); } catch (ex) { throw new WebinyError( ex.message || "Could not unpublish form.", diff --git a/packages/api-form-builder-so-ddb-es/src/operations/submission/index.ts b/packages/api-form-builder-so-ddb-es/src/operations/submission/index.ts index a1e1a00aafa..e93ee28b580 100644 --- a/packages/api-form-builder-so-ddb-es/src/operations/submission/index.ts +++ b/packages/api-form-builder-so-ddb-es/src/operations/submission/index.ts @@ -10,7 +10,7 @@ import { import { Entity, Table } from "@webiny/db-dynamodb/toolbox"; import { Client } from "@elastic/elasticsearch"; import WebinyError from "@webiny/error"; -import { batchReadAll } from "@webiny/db-dynamodb/utils/batchRead"; +import { batchReadAll } from "@webiny/db-dynamodb"; import { sortItems } from "@webiny/db-dynamodb/utils/sort"; import { createLimit, decodeCursor, encodeCursor } from "@webiny/api-elasticsearch"; import { diff --git a/packages/api-form-builder-so-ddb/src/operations/form/index.ts b/packages/api-form-builder-so-ddb/src/operations/form/index.ts index 3ade575baa2..8177b0c942a 100644 --- a/packages/api-form-builder-so-ddb/src/operations/form/index.ts +++ b/packages/api-form-builder-so-ddb/src/operations/form/index.ts @@ -1,5 +1,5 @@ import WebinyError from "@webiny/error"; -import { +import type { FbForm, FormBuilderStorageOperationsCreateFormFromParams, FormBuilderStorageOperationsCreateFormParams, @@ -14,14 +14,14 @@ import { FormBuilderStorageOperationsUnpublishFormParams, FormBuilderStorageOperationsUpdateFormParams } from "@webiny/api-form-builder/types"; -import { Entity, Table } from "@webiny/db-dynamodb/toolbox"; +import type { Entity, Table } from "@webiny/db-dynamodb/toolbox"; import { queryAll, QueryAllParams } from "@webiny/db-dynamodb/utils/query"; -import { batchWriteAll } from "@webiny/db-dynamodb/utils/batchWrite"; +import { createEntityWriteBatch } from "@webiny/db-dynamodb"; import { filterItems } from "@webiny/db-dynamodb/utils/filter"; import { sortItems } from "@webiny/db-dynamodb/utils/sort"; import { createIdentifier, parseIdentifier } from "@webiny/utils"; -import { PluginsContainer } from "@webiny/plugins"; -import { +import type { PluginsContainer } from "@webiny/plugins"; +import type { FormBuilderFormCreateGSIPartitionKeyParams, FormBuilderFormCreatePartitionKeyParams, FormBuilderFormStorageOperations @@ -60,7 +60,7 @@ export interface CreateFormStorageOperationsParams { export const createFormStorageOperations = ( params: CreateFormStorageOperationsParams ): FormBuilderFormStorageOperations => { - const { entity, table, plugins } = params; + const { entity, plugins } = params; const formDynamoDbFields = plugins.byType( FormDynamoDbFieldPlugin.type @@ -123,28 +123,30 @@ export const createFormStorageOperations = ( return "fb.form.latestPublished"; }; - const createRevisionKeys = (form: FbForm): Keys => { + const createRevisionKeys = (form: Pick): Keys => { return { PK: createFormPartitionKey(form), SK: createRevisionSortKey(form) }; }; - const createLatestKeys = (form: FbForm): Keys => { + const createLatestKeys = (form: Pick): Keys => { return { PK: createFormLatestPartitionKey(form), SK: createFormLatestSortKey(form) }; }; - const createLatestPublishedKeys = (form: FbForm): Keys => { + const createLatestPublishedKeys = ( + form: Pick + ): Keys => { return { PK: createFormLatestPublishedPartitionKey(form), SK: createLatestPublishedSortKey(form) }; }; - const createGSIKeys = (form: FbForm): GsiKeys => { + const createGSIKeys = (form: Pick): GsiKeys => { return { GSI1_PK: createFormGSIPartitionKey(form), GSI1_SK: createGSISortKey(form.version) @@ -160,25 +162,25 @@ export const createFormStorageOperations = ( const latestKeys = createLatestKeys(form); const gsiKeys = createGSIKeys(form); - const items = [ - entity.putBatch({ - ...form, - ...revisionKeys, - ...gsiKeys, - TYPE: createFormType() - }), - entity.putBatch({ - ...form, - ...latestKeys, - TYPE: createFormLatestType() - }) - ]; + const entityBatch = createEntityWriteBatch({ + entity, + put: [ + { + ...form, + ...revisionKeys, + ...gsiKeys, + TYPE: createFormType() + }, + { + ...form, + ...latestKeys, + TYPE: createFormLatestType() + } + ] + }); try { - await batchWriteAll({ - table, - items - }); + await entityBatch.execute(); } catch (ex) { throw new WebinyError( ex.message || "Could not insert form data into table.", @@ -202,25 +204,25 @@ export const createFormStorageOperations = ( const latestKeys = createLatestKeys(form); const gsiKeys = createGSIKeys(form); - const items = [ - entity.putBatch({ - ...form, - ...revisionKeys, - ...gsiKeys, - TYPE: createFormType() - }), - entity.putBatch({ - ...form, - ...latestKeys, - TYPE: createFormLatestType() - }) - ]; + const entityBatch = createEntityWriteBatch({ + entity, + put: [ + { + ...form, + ...revisionKeys, + ...gsiKeys, + TYPE: createFormType() + }, + { + ...form, + ...latestKeys, + TYPE: createFormLatestType() + } + ] + }); try { - await batchWriteAll({ - table, - items - }); + await entityBatch.execute(); } catch (ex) { throw new WebinyError( ex.message || "Could not create form data in the table, from existing form.", @@ -259,28 +261,27 @@ export const createFormStorageOperations = ( }); const isLatestForm = latestForm ? latestForm.id === form.id : false; - const items = [ - entity.putBatch({ - ...form, - ...revisionKeys, - ...gsiKeys, - TYPE: createFormType() - }) - ]; - if (isLatestForm) { - items.push( - entity.putBatch({ + const entityBatch = createEntityWriteBatch({ + entity, + put: [ + { ...form, - ...latestKeys, - TYPE: createFormLatestType() - }) - ); + ...revisionKeys, + ...gsiKeys, + TYPE: createFormType() + } + ] + }); + + if (isLatestForm) { + entityBatch.put({ + ...form, + ...latestKeys, + TYPE: createFormLatestType() + }); } try { - await batchWriteAll({ - table, - items - }); + await entityBatch.execute(); } catch (ex) { throw new WebinyError( ex.message || "Could not update form data in the table.", @@ -510,29 +511,28 @@ export const createFormStorageOperations = ( } ); } + let latestPublishedKeys: Keys | undefined; + const entityBatch = createEntityWriteBatch({ + entity, + delete: [createLatestKeys(form)] + }); - let hasLatestPublishedRecord = false; - - const deleteItems = items.map(item => { - if (!hasLatestPublishedRecord && item.published) { - hasLatestPublishedRecord = true; + for (const item of items) { + if (!latestPublishedKeys && item.published) { + latestPublishedKeys = createLatestPublishedKeys(item); } - return entity.deleteBatch({ + entityBatch.delete({ PK: item.PK, SK: item.SK }); - }); - if (hasLatestPublishedRecord) { - deleteItems.push(entity.deleteBatch(createLatestPublishedKeys(items[0]))); } - deleteItems.push(entity.deleteBatch(createLatestKeys(items[0]))); + if (latestPublishedKeys) { + entityBatch.delete(latestPublishedKeys); + } try { - await batchWriteAll({ - table, - items: deleteItems - }); + await entityBatch.execute(); } catch (ex) { throw new WebinyError( ex.message || "Could not delete form and it's submissions.", @@ -561,7 +561,10 @@ export const createFormStorageOperations = ( const isLatest = latestForm ? latestForm.id === form.id : false; const isLatestPublished = latestPublishedForm ? latestPublishedForm.id === form.id : false; - const items = [entity.deleteBatch(revisionKeys)]; + const entityBatch = createEntityWriteBatch({ + entity, + delete: [revisionKeys] + }); if (isLatest || isLatestPublished) { /** @@ -578,42 +581,36 @@ export const createFormStorageOperations = ( }) .shift(); if (previouslyPublishedForm) { - items.push( - entity.putBatch({ - ...previouslyPublishedForm, - ...createLatestPublishedKeys(previouslyPublishedForm), - GSI1_PK: null, - GSI1_SK: null, - TYPE: createFormLatestPublishedType() - }) - ); + entityBatch.put({ + ...previouslyPublishedForm, + ...createLatestPublishedKeys(previouslyPublishedForm), + GSI1_PK: null, + GSI1_SK: null, + TYPE: createFormLatestPublishedType() + }); } else { - items.push(entity.deleteBatch(createLatestPublishedKeys(form))); + entityBatch.delete(createLatestPublishedKeys(form)); } } /** * Sort out the latest record. */ if (isLatest) { - items.push( - entity.putBatch({ - ...previous, - ...latestKeys, - GSI1_PK: null, - GSI1_SK: null, - TYPE: createFormLatestType() - }) - ); + entityBatch.put({ + ...previous, + ...latestKeys, + GSI1_PK: null, + GSI1_SK: null, + TYPE: createFormLatestType() + }); } } /** * Now save the batch data. */ try { - await batchWriteAll({ - table, - items - }); + await entityBatch.execute(); + return form; } catch (ex) { throw new WebinyError( @@ -667,37 +664,36 @@ export const createFormStorageOperations = ( /** * Update revision and latest published records */ - const items = [ - entity.putBatch({ - ...form, - ...revisionKeys, - ...gsiKeys, - TYPE: createFormType() - }), - entity.putBatch({ - ...form, - ...latestPublishedKeys, - TYPE: createFormLatestPublishedType() - }) - ]; + const entityBatch = createEntityWriteBatch({ + entity, + put: [ + { + ...form, + ...revisionKeys, + ...gsiKeys, + TYPE: createFormType() + }, + { + ...form, + ...latestPublishedKeys, + TYPE: createFormLatestPublishedType() + } + ] + }); + /** * Update the latest form as well */ if (isLatestForm) { - items.push( - entity.putBatch({ - ...form, - ...latestKeys, - TYPE: createFormLatestType() - }) - ); + entityBatch.put({ + ...form, + ...latestKeys, + TYPE: createFormLatestType() + }); } try { - await batchWriteAll({ - table, - items - }); + await entityBatch.execute(); } catch (ex) { throw new WebinyError( ex.message || "Could not publish form.", @@ -754,17 +750,20 @@ export const createFormStorageOperations = ( const isLatest = latestForm ? latestForm.id === form.id : false; const isLatestPublished = latestPublishedForm ? latestPublishedForm.id === form.id : false; - const items = [ - entity.putBatch({ - ...form, - ...revisionKeys, - ...gsiKeys, - TYPE: createFormType() - }) - ]; + const entityBatch = createEntityWriteBatch({ + entity, + put: [ + { + ...form, + ...revisionKeys, + ...gsiKeys, + TYPE: createFormType() + } + ] + }); if (isLatest) { - entity.putBatch({ + entityBatch.put({ ...form, ...latestKeys, TYPE: createFormLatestType() @@ -788,23 +787,18 @@ export const createFormStorageOperations = ( const previouslyPublishedRevision = revisions.shift(); if (previouslyPublishedRevision) { - items.push( - entity.putBatch({ - ...previouslyPublishedRevision, - ...latestPublishedKeys, - TYPE: createFormLatestPublishedType() - }) - ); + entityBatch.put({ + ...previouslyPublishedRevision, + ...latestPublishedKeys, + TYPE: createFormLatestPublishedType() + }); } else { - items.push(entity.deleteBatch(latestPublishedKeys)); + entityBatch.delete(latestPublishedKeys); } } try { - await batchWriteAll({ - table, - items - }); + await entityBatch.execute(); return form; } catch (ex) { throw new WebinyError( diff --git a/packages/api-form-builder/__tests__/forms.test.ts b/packages/api-form-builder/__tests__/forms.test.ts index 0b29a520ec1..a2b9fc330b0 100644 --- a/packages/api-form-builder/__tests__/forms.test.ts +++ b/packages/api-form-builder/__tests__/forms.test.ts @@ -40,7 +40,7 @@ describe('Form Builder "Form" Test', () => { } }); - test("should create a form and return it in the list of latest forms", async () => { + it("should create a form and return it in the list of latest forms", async () => { const [create] = await createForm({ data: { name: "contact-us" } }); const { id } = create.data.formBuilder.createForm.data; @@ -70,7 +70,7 @@ describe('Form Builder "Form" Test', () => { expect(data[0].id).toEqual(id); }); - test("should update form and return new data from storage", async () => { + it("should update form and return new data from storage", async () => { const [create] = await createForm({ data: { name: "contact-us" } }); const { id } = create.data.formBuilder.createForm.data; @@ -219,7 +219,7 @@ describe('Form Builder "Form" Test', () => { expect(revisions[0].version).toEqual(2); }); - test("should delete a form and all of its revisions", async () => { + it("should delete a form and all of its revisions", async () => { const [create] = await createForm({ data: { name: "contact-us" } }); const { id } = create.data.formBuilder.createForm.data; @@ -246,7 +246,7 @@ describe('Form Builder "Form" Test', () => { expect(list.data.formBuilder.listForms.data.length).toBe(0); }); - test("should publish, add views and unpublish", async () => { + it("should publish, add views and unpublish", async () => { const [create] = await createForm({ data: { name: "contact-us" } }); const { id } = create.data.formBuilder.createForm.data; @@ -306,7 +306,7 @@ describe('Form Builder "Form" Test', () => { expect(latestPublished3.data.formBuilder.getPublishedForm.data.id).toEqual(id); }); - test("should create, list and export submissions to file", async () => { + it("should create, list and export submissions to file", async () => { const [create] = await createForm({ data: { name: "contact-us" } }); const { id } = create.data.formBuilder.createForm.data; diff --git a/packages/api-form-builder/__tests__/graphql/i18n.ts b/packages/api-form-builder/__tests__/graphql/i18n.ts new file mode 100644 index 00000000000..86255c3126e --- /dev/null +++ b/packages/api-form-builder/__tests__/graphql/i18n.ts @@ -0,0 +1,31 @@ +export const CREATE_LOCALE = /* GraphQL */ ` + mutation CreateI18NLocale($data: I18NLocaleInput!) { + i18n { + createI18NLocale(data: $data) { + data { + code + } + error { + message + code + } + } + } + } +`; + +export const DELETE_LOCALE = /* GraphQL */ ` + mutation DeleteI18NLocale($code: String!) { + i18n { + deleteI18NLocale(code: $code) { + data { + code + } + error { + message + code + } + } + } + } +`; diff --git a/packages/api-form-builder/__tests__/settings.test.ts b/packages/api-form-builder/__tests__/settings.test.ts index 764543af83c..9addb5a3903 100644 --- a/packages/api-form-builder/__tests__/settings.test.ts +++ b/packages/api-form-builder/__tests__/settings.test.ts @@ -1,9 +1,17 @@ import useGqlHandler from "./useGqlHandler"; +import { GET_SETTINGS } from "~tests/graphql/formBuilderSettings"; describe("Settings Test", () => { - const { getSettings, updateSettings, install, isInstalled } = useGqlHandler(); - - test(`Should not be able to get & update settings before "install"`, async () => { + const { + getSettings, + updateSettings, + install, + createI18NLocale, + deleteI18NLocale, + isInstalled + } = useGqlHandler(); + + it(`Should not be able to get & update settings before "install"`, async () => { // Should not have any settings without install const [getSettingsResponse] = await getSettings(); @@ -39,7 +47,7 @@ describe("Settings Test", () => { }); }); - test("Should be able to install `Form Builder`", async () => { + it("Should be able to install `Form Builder`", async () => { // "isInstalled" should return false prior "install" const [isInstalledResponse] = await isInstalled(); @@ -77,7 +85,7 @@ describe("Settings Test", () => { }); }); - test(`Should be able to get & update settings after "install"`, async () => { + it(`Should be able to get & update settings after "install"`, async () => { // Let's install the `Form builder` const [installResponse] = await install({ domain: "http://localhost:3001" }); @@ -154,4 +162,73 @@ describe("Settings Test", () => { } }); }); + + it(`Should be able to get & update settings after in a new locale`, async () => { + // Let's install the `Form builder` + await install({ domain: "http://localhost:3001" }); + + await createI18NLocale({ data: { code: "de-DE" } }); + + const { invoke } = useGqlHandler(); + + // Had to do it via `invoke` directly because this way it's possible to + // set the locale header. Wasn't easily possible via the `getSettings` helper. + const [newLocaleFbSettings] = await invoke({ + body: { query: GET_SETTINGS }, + headers: { "x-i18n-locale": "default:de-DE;content:de-DE;" } + }); + + // Settings should exist in the newly created locale. + expect(newLocaleFbSettings).toEqual({ + data: { + formBuilder: { + getSettings: { + data: { + domain: null, + reCaptcha: { + enabled: null, + secretKey: null, + siteKey: null + } + }, + error: null + } + } + } + }); + }); + + it(`Should be able to create a locale, delete it, and again create it`, async () => { + // Let's install the `Form builder` + await install({ domain: "http://localhost:3001" }); + + await createI18NLocale({ data: { code: "en-US" } }); + await createI18NLocale({ data: { code: "de-DE" } }); + + const [deleteDeLocaleResponse] = await deleteI18NLocale({ code: "de-DE" }); + expect(deleteDeLocaleResponse).toEqual({ + data: { + i18n: { + deleteI18NLocale: { + data: { code: "de-DE" }, + error: null + } + } + } + }); + + const [createDeLocaleResponse] = await createI18NLocale({ data: { code: "de-DE" } }); + expect(createDeLocaleResponse).toEqual({ + data: { + i18n: { + createI18NLocale: { + data: { + code: "de-DE" + }, + error: null + } + } + } + }); + }); }); diff --git a/packages/api-form-builder/__tests__/useGqlHandler.ts b/packages/api-form-builder/__tests__/useGqlHandler.ts index 0d6a8108469..8dbf4b4f1bf 100644 --- a/packages/api-form-builder/__tests__/useGqlHandler.ts +++ b/packages/api-form-builder/__tests__/useGqlHandler.ts @@ -8,8 +8,12 @@ import i18nContext from "@webiny/api-i18n/graphql/context"; import { mockLocalesPlugins } from "@webiny/api-i18n/graphql/testing"; import { SecurityIdentity, SecurityPermission } from "@webiny/api-security/types"; import { createFormBuilder } from "~/index"; +import { createI18NGraphQL } from "@webiny/api-i18n/graphql"; + // Graphql import { INSTALL as INSTALL_FILE_MANAGER } from "./graphql/fileManagerSettings"; +import { DELETE_LOCALE, CREATE_LOCALE } from "./graphql/i18n"; + import { GET_SETTINGS, INSTALL, @@ -41,11 +45,7 @@ import { PluginCollection } from "@webiny/plugins/types"; import { getStorageOps } from "@webiny/project-utils/testing/environment"; import { FileManagerStorageOperations } from "@webiny/api-file-manager/types"; import { HeadlessCmsStorageOperations } from "@webiny/api-headless-cms/types"; -import { - CmsParametersPlugin, - createHeadlessCmsContext, - createHeadlessCmsGraphQL -} from "@webiny/api-headless-cms"; +import { createHeadlessCmsContext, createHeadlessCmsGraphQL } from "@webiny/api-headless-cms"; import { FormBuilderStorageOperations } from "~/types"; import { APIGatewayEvent, LambdaContext } from "@webiny/handler-aws/types"; import { createPageBuilderContext } from "@webiny/api-page-builder"; @@ -83,14 +83,9 @@ export default (params: UseGqlHandlerParams = {}) => { graphqlHandlerPlugins(), ...createTenancyAndSecurity({ permissions, identity }), i18nContext(), + createI18NGraphQL(), i18nStorage.storageOperations, mockLocalesPlugins(), - new CmsParametersPlugin(async () => { - return { - locale: "en-US", - type: "manage" - }; - }), createHeadlessCmsContext({ storageOperations: cmsStorage.storageOperations }), createHeadlessCmsGraphQL(), createPageBuilderContext({ @@ -228,6 +223,14 @@ export default (params: UseGqlHandlerParams = {}) => { }, async exportFormSubmissions(variables: Record) { return invoke({ body: { query: EXPORT_FORM_SUBMISSIONS, variables } }); + }, + + // Locales. + async createI18NLocale(variables: Record) { + return invoke({ body: { query: CREATE_LOCALE, variables } }); + }, + async deleteI18NLocale(variables: Record) { + return invoke({ body: { query: DELETE_LOCALE, variables } }); } }; }; diff --git a/packages/api-form-builder/src/plugins/crud/index.ts b/packages/api-form-builder/src/plugins/crud/index.ts index a4df2a2e981..c0aaf780b26 100644 --- a/packages/api-form-builder/src/plugins/crud/index.ts +++ b/packages/api-form-builder/src/plugins/crud/index.ts @@ -15,94 +15,120 @@ export interface CreateFormBuilderCrudParams { export default (params: CreateFormBuilderCrudParams) => { const { storageOperations } = params; - return new ContextPlugin(async context => { - const getLocale = () => { - const locale = context.i18n.getContentLocale(); - if (!locale) { - throw new WebinyError( - "Missing locale on context.i18n locale in API Form Builder.", - "LOCALE_ERROR" - ); + return [ + new ContextPlugin(async context => { + const getLocale = () => { + const locale = context.i18n.getContentLocale(); + if (!locale) { + throw new WebinyError( + "Missing locale on context.i18n locale in API Form Builder.", + "LOCALE_ERROR" + ); + } + return locale; + }; + + const getIdentity = () => { + return context.security.getIdentity(); + }; + + const getTenant = () => { + return context.tenancy.getCurrentTenant(); + }; + + if (storageOperations.beforeInit) { + try { + await storageOperations.beforeInit(context); + } catch (ex) { + throw new WebinyError( + ex.message || + "Could not run before init in Form Builder storage operations.", + ex.code || "STORAGE_OPERATIONS_BEFORE_INIT_ERROR", + { + ...ex + } + ); + } } - return locale; - }; - const getIdentity = () => { - return context.security.getIdentity(); - }; + const basePermissionsArgs = { + getIdentity, + fullAccessPermissionName: "fb.*" + }; + + const formsPermissions = new FormsPermissions({ + ...basePermissionsArgs, + getPermissions: () => context.security.getPermissions("fb.form") + }); - const getTenant = () => { - return context.tenancy.getCurrentTenant(); - }; + const settingsPermissions = new SettingsPermissions({ + ...basePermissionsArgs, + getPermissions: () => context.security.getPermissions("fb.settings") + }); - if (storageOperations.beforeInit) { + context.formBuilder = { + storageOperations, + ...createSystemCrud({ + getIdentity, + getTenant, + getLocale, + context + }), + ...createSettingsCrud({ + getTenant, + getLocale, + settingsPermissions, + context + }), + ...createFormsCrud({ + getTenant, + getLocale, + formsPermissions, + context + }), + ...createSubmissionsCrud({ + context, + formsPermissions + }) + }; + + if (!storageOperations.init) { + return; + } try { - await storageOperations.beforeInit(context); + await storageOperations.init(context); } catch (ex) { throw new WebinyError( - ex.message || "Could not run before init in Form Builder storage operations.", - ex.code || "STORAGE_OPERATIONS_BEFORE_INIT_ERROR", + ex.message || "Could not run init in Form Builder storage operations.", + ex.code || "STORAGE_OPERATIONS_INIT_ERROR", { ...ex } ); } - } - - const basePermissionsArgs = { - getIdentity, - fullAccessPermissionName: "fb.*" - }; - - const formsPermissions = new FormsPermissions({ - ...basePermissionsArgs, - getPermissions: () => context.security.getPermissions("fb.form") - }); + }), - const settingsPermissions = new SettingsPermissions({ - ...basePermissionsArgs, - getPermissions: () => context.security.getPermissions("fb.settings") - }); - - context.formBuilder = { - storageOperations, - ...createSystemCrud({ - getIdentity, - getTenant, - getLocale, - context - }), - ...createSettingsCrud({ - getTenant, - getLocale, - settingsPermissions, - context - }), - ...createFormsCrud({ - getTenant, - getLocale, - formsPermissions, - context - }), - ...createSubmissionsCrud({ - context, - formsPermissions - }) - }; - - if (!storageOperations.init) { - return; - } - try { - await storageOperations.init(context); - } catch (ex) { - throw new WebinyError( - ex.message || "Could not run init in Form Builder storage operations.", - ex.code || "STORAGE_OPERATIONS_INIT_ERROR", - { - ...ex + // Once a new locale is created, we need to create a new settings entry for it. + new ContextPlugin(async context => { + context.i18n.locales.onLocaleAfterCreate.subscribe(async params => { + // We don't want to auto-create the settings entry if Form Builder is not installed. + // This is because the entry will be created by the app's installer. + const fbIsInstalled = Boolean(await context.formBuilder.getSystemVersion()); + if (!fbIsInstalled) { + return; } - ); - } - }); + const { locale } = params; + await context.i18n.withLocale(locale, async () => { + return context.formBuilder.createSettings({}); + }); + }); + + context.i18n.locales.onLocaleAfterDelete.subscribe(async params => { + const { locale } = params; + await context.i18n.withLocale(locale, async () => { + return context.formBuilder.deleteSettings(); + }); + }); + }) + ]; }; diff --git a/packages/api-headless-cms-ddb-es/__tests__/__api__/setupFile.js b/packages/api-headless-cms-ddb-es/__tests__/__api__/setupFile.js index e657c72bbb3..a436a3f75d9 100644 --- a/packages/api-headless-cms-ddb-es/__tests__/__api__/setupFile.js +++ b/packages/api-headless-cms-ddb-es/__tests__/__api__/setupFile.js @@ -66,6 +66,9 @@ module.exports = () => { }); }); + createOrRefreshIndexSubscription.name = + "headlessCmsDdbEs.context.createOrRefreshIndexSubscription"; + return { storageOperations: createStorageOperations({ documentClient, diff --git a/packages/api-headless-cms-ddb-es/__tests__/converters/convertersDisabled.test.ts b/packages/api-headless-cms-ddb-es/__tests__/converters/convertersDisabled.test.ts index b3193273d49..dac86634f7c 100644 --- a/packages/api-headless-cms-ddb-es/__tests__/converters/convertersDisabled.test.ts +++ b/packages/api-headless-cms-ddb-es/__tests__/converters/convertersDisabled.test.ts @@ -7,8 +7,6 @@ import { CmsModel } from "@webiny/api-headless-cms/types"; import { get } from "@webiny/db-dynamodb"; import { createPartitionKey } from "~/operations/entry/keys"; -jest.retryTimes(0); - describe("storage field path converters disabled", () => { const { elasticsearch, entryEntity } = useHandler(); diff --git a/packages/api-headless-cms-ddb-es/__tests__/graphql/dummyLocales.ts b/packages/api-headless-cms-ddb-es/__tests__/graphql/dummyLocales.ts index 9424ee10e21..f6b763e697c 100644 --- a/packages/api-headless-cms-ddb-es/__tests__/graphql/dummyLocales.ts +++ b/packages/api-headless-cms-ddb-es/__tests__/graphql/dummyLocales.ts @@ -2,7 +2,7 @@ import { ContextPlugin } from "@webiny/api"; import { CmsContext } from "@webiny/api-headless-cms/types"; export const createDummyLocales = () => { - return new ContextPlugin(async context => { + const plugin = new ContextPlugin(async context => { const { i18n, security } = context; await security.withoutAuthorization(async () => { @@ -23,4 +23,7 @@ export const createDummyLocales = () => { }); }); }); + + plugin.name = "headlessCmsDdbEs.context.createDummyLocales"; + return plugin; }; diff --git a/packages/api-headless-cms-ddb-es/src/definitions/entryElasticsearch.ts b/packages/api-headless-cms-ddb-es/src/definitions/entryElasticsearch.ts index 97bffededb1..338b1b42af0 100644 --- a/packages/api-headless-cms-ddb-es/src/definitions/entryElasticsearch.ts +++ b/packages/api-headless-cms-ddb-es/src/definitions/entryElasticsearch.ts @@ -28,6 +28,9 @@ export const createEntryElasticsearchEntity = ( data: { type: "map" }, + TYPE: { + type: "string" + }, ...(attributes || {}) } }); diff --git a/packages/api-headless-cms-ddb-es/src/operations/entry/dataLoader/getLatestRevisionByEntryId.ts b/packages/api-headless-cms-ddb-es/src/operations/entry/dataLoader/getLatestRevisionByEntryId.ts index 1e35ed56305..6338066f712 100644 --- a/packages/api-headless-cms-ddb-es/src/operations/entry/dataLoader/getLatestRevisionByEntryId.ts +++ b/packages/api-headless-cms-ddb-es/src/operations/entry/dataLoader/getLatestRevisionByEntryId.ts @@ -1,5 +1,5 @@ import DataLoader from "dataloader"; -import { batchReadAll } from "@webiny/db-dynamodb/utils/batchRead"; +import { batchReadAll } from "@webiny/db-dynamodb"; import { cleanupItems } from "@webiny/db-dynamodb/utils/cleanup"; import { CmsStorageEntry } from "@webiny/api-headless-cms/types"; import { createBatchScheduleFn } from "./createBatchScheduleFn"; diff --git a/packages/api-headless-cms-ddb-es/src/operations/entry/dataLoader/getPublishedRevisionByEntryId.ts b/packages/api-headless-cms-ddb-es/src/operations/entry/dataLoader/getPublishedRevisionByEntryId.ts index 5e510a2a714..d6a409bd913 100644 --- a/packages/api-headless-cms-ddb-es/src/operations/entry/dataLoader/getPublishedRevisionByEntryId.ts +++ b/packages/api-headless-cms-ddb-es/src/operations/entry/dataLoader/getPublishedRevisionByEntryId.ts @@ -1,5 +1,5 @@ import DataLoader from "dataloader"; -import { batchReadAll } from "@webiny/db-dynamodb/utils/batchRead"; +import { batchReadAll } from "@webiny/db-dynamodb"; import { cleanupItems } from "@webiny/db-dynamodb/utils/cleanup"; import { CmsStorageEntry } from "@webiny/api-headless-cms/types"; import { createPartitionKey, createPublishedSortKey } from "~/operations/entry/keys"; diff --git a/packages/api-headless-cms-ddb-es/src/operations/entry/dataLoader/getRevisionById.ts b/packages/api-headless-cms-ddb-es/src/operations/entry/dataLoader/getRevisionById.ts index 8374efbcb72..4458e9a34fc 100644 --- a/packages/api-headless-cms-ddb-es/src/operations/entry/dataLoader/getRevisionById.ts +++ b/packages/api-headless-cms-ddb-es/src/operations/entry/dataLoader/getRevisionById.ts @@ -1,5 +1,5 @@ import DataLoader from "dataloader"; -import { batchReadAll } from "@webiny/db-dynamodb/utils/batchRead"; +import { batchReadAll } from "@webiny/db-dynamodb"; import { CmsStorageEntry } from "@webiny/api-headless-cms/types"; import { cleanupItems } from "@webiny/db-dynamodb/utils/cleanup"; import { createPartitionKey, createRevisionSortKey } from "~/operations/entry/keys"; diff --git a/packages/api-headless-cms-ddb-es/src/operations/entry/index.ts b/packages/api-headless-cms-ddb-es/src/operations/entry/index.ts index 574423f8119..f5d098b658e 100644 --- a/packages/api-headless-cms-ddb-es/src/operations/entry/index.ts +++ b/packages/api-headless-cms-ddb-es/src/operations/entry/index.ts @@ -1,17 +1,25 @@ import WebinyError from "@webiny/error"; -import { +import type { CmsEntry, CmsModel, CmsStorageEntry, - CONTENT_ENTRY_STATUS, StorageOperationsCmsModel } from "@webiny/api-headless-cms/types"; +import { CONTENT_ENTRY_STATUS } from "@webiny/api-headless-cms/types"; import { extractEntriesFromIndex } from "~/helpers"; import { configurations } from "~/configurations"; -import { Entity } from "@webiny/db-dynamodb/toolbox"; -import { Client } from "@elastic/elasticsearch"; -import { PluginsContainer } from "@webiny/plugins"; -import { batchWriteAll, BatchWriteItem } from "@webiny/db-dynamodb/utils/batchWrite"; +import type { Entity } from "@webiny/db-dynamodb/toolbox"; +import type { Client } from "@elastic/elasticsearch"; +import type { PluginsContainer } from "@webiny/plugins"; +import type { BatchReadItem, QueryAllParams, QueryOneParams } from "@webiny/db-dynamodb"; +import { + batchReadAll, + cleanupItem, + createEntityWriteBatch, + getClean, + queryAll, + queryOne +} from "@webiny/db-dynamodb"; import { DataLoadersHandler } from "./dataLoaders"; import { createLatestSortKey, @@ -19,12 +27,6 @@ import { createPublishedSortKey, createRevisionSortKey } from "./keys"; -import { - queryAll, - QueryAllParams, - queryOne, - QueryOneParams -} from "@webiny/db-dynamodb/utils/query"; import { compress, createLimit, @@ -32,21 +34,17 @@ import { decompress, encodeCursor } from "@webiny/api-elasticsearch"; -import { getClean } from "@webiny/db-dynamodb/utils/get"; import { zeroPad } from "@webiny/utils"; -import { cleanupItem } from "@webiny/db-dynamodb/utils/cleanup"; -import { +import type { ElasticsearchSearchResponse, SearchBody as ElasticsearchSearchBody } from "@webiny/api-elasticsearch/types"; -import { CmsEntryStorageOperations, CmsIndexEntry } from "~/types"; +import type { CmsEntryStorageOperations, CmsIndexEntry } from "~/types"; import { createElasticsearchBody } from "./elasticsearch/body"; import { logIgnoredEsResponseError } from "./elasticsearch/logIgnoredEsResponseError"; import { shouldIgnoreEsResponseError } from "./elasticsearch/shouldIgnoreEsResponseError"; import { createLatestRecordType, createPublishedRecordType, createRecordType } from "./recordType"; import { StorageOperationsCmsModelPlugin } from "@webiny/api-headless-cms"; -import { WriteRequest } from "@webiny/aws-sdk/client-dynamodb"; -import { batchReadAll, BatchReadItem } from "@webiny/db-dynamodb"; import { createTransformer } from "./transformations"; import { convertEntryKeysFromStorage } from "./transformations/convertEntryKeys"; import { @@ -57,6 +55,9 @@ import { } from "@webiny/api-headless-cms/constants"; interface ElasticsearchDbRecord { + PK: string; + SK: string; + TYPE: string; index: string; data: Record; } @@ -164,37 +165,35 @@ export const createEntriesStorageOperations = ( SK: createPublishedSortKey() }; - const items = [ - entity.putBatch({ - ...storageEntry, - locked, - ...revisionKeys, - TYPE: createRecordType() - }), - entity.putBatch({ - ...storageEntry, - locked, - ...latestKeys, - TYPE: createLatestRecordType() - }) - ]; - - if (isPublished) { - items.push( - entity.putBatch({ + const entityBatch = createEntityWriteBatch({ + entity, + put: [ + { ...storageEntry, locked, - ...publishedKeys, - TYPE: createPublishedRecordType() - }) - ); + ...revisionKeys, + TYPE: createRecordType() + }, + { + ...storageEntry, + locked, + ...latestKeys, + TYPE: createLatestRecordType() + } + ] + }); + + if (isPublished) { + entityBatch.put({ + ...storageEntry, + locked, + ...publishedKeys, + TYPE: createPublishedRecordType() + }); } try { - await batchWriteAll({ - table: entity.table, - items - }); + await entityBatch.execute(); dataLoaders.clearAll({ model }); @@ -211,29 +210,29 @@ export const createEntriesStorageOperations = ( } const esLatestData = await transformer.getElasticsearchLatestEntryData(); - const esItems: BatchWriteItem[] = [ - esEntity.putBatch({ - ...latestKeys, - index: esIndex, - data: esLatestData - }) - ]; + + const elasticsearchEntityBatch = createEntityWriteBatch({ + entity: esEntity, + put: [ + { + ...latestKeys, + index: esIndex, + data: esLatestData + } + ] + }); + if (isPublished) { const esPublishedData = await transformer.getElasticsearchPublishedEntryData(); - esItems.push( - esEntity.putBatch({ - ...publishedKeys, - index: esIndex, - data: esPublishedData - }) - ); + elasticsearchEntityBatch.put({ + ...publishedKeys, + index: esIndex, + data: esPublishedData + }); } try { - await batchWriteAll({ - table: esEntity.table, - items: esItems - }); + await elasticsearchEntityBatch.execute(); } catch (ex) { throw new WebinyError( ex.message || "Could not insert entry data into the Elasticsearch DynamoDB table.", @@ -295,27 +294,28 @@ export const createEntriesStorageOperations = ( const esLatestData = await transformer.getElasticsearchLatestEntryData(); - const items = [ - entity.putBatch({ - ...storageEntry, - TYPE: createRecordType(), - ...revisionKeys - }), - entity.putBatch({ - ...storageEntry, - TYPE: createLatestRecordType(), - ...latestKeys - }) - ]; + const entityBatch = createEntityWriteBatch({ + entity, + put: [ + { + ...storageEntry, + TYPE: createRecordType(), + ...revisionKeys + }, + { + ...storageEntry, + TYPE: createLatestRecordType(), + ...latestKeys + } + ] + }); if (isPublished) { - items.push( - entity.putBatch({ - ...storageEntry, - TYPE: createPublishedRecordType(), - ...publishedKeys - }) - ); + entityBatch.put({ + ...storageEntry, + TYPE: createPublishedRecordType(), + ...publishedKeys + }); // Unpublish previously published revision (if any). const [publishedRevisionStorageEntry] = await dataLoaders.getPublishedRevisionByEntryId( @@ -326,27 +326,23 @@ export const createEntriesStorageOperations = ( ); if (publishedRevisionStorageEntry) { - items.push( - entity.putBatch({ - ...publishedRevisionStorageEntry, - PK: createPartitionKey({ - id: publishedRevisionStorageEntry.id, - locale: model.locale, - tenant: model.tenant - }), - SK: createRevisionSortKey(publishedRevisionStorageEntry), - TYPE: createRecordType(), - status: CONTENT_ENTRY_STATUS.UNPUBLISHED - }) - ); + entityBatch.put({ + ...publishedRevisionStorageEntry, + PK: createPartitionKey({ + id: publishedRevisionStorageEntry.id, + locale: model.locale, + tenant: model.tenant + }), + SK: createRevisionSortKey(publishedRevisionStorageEntry), + TYPE: createRecordType(), + status: CONTENT_ENTRY_STATUS.UNPUBLISHED + }); } } try { - await batchWriteAll({ - table: entity.table, - items - }); + await entityBatch.execute(); + dataLoaders.clearAll({ model }); @@ -366,30 +362,28 @@ export const createEntriesStorageOperations = ( model }); - const esItems: BatchWriteItem[] = [ - esEntity.putBatch({ - ...latestKeys, - index: esIndex, - data: esLatestData - }) - ]; + const elasticsearchEntityBatch = createEntityWriteBatch({ + entity: esEntity, + put: [ + { + ...latestKeys, + index: esIndex, + data: esLatestData + } + ] + }); if (isPublished) { const esPublishedData = await transformer.getElasticsearchPublishedEntryData(); - esItems.push( - esEntity.putBatch({ - ...publishedKeys, - index: esIndex, - data: esPublishedData - }) - ); + elasticsearchEntityBatch.put({ + ...publishedKeys, + index: esIndex, + data: esPublishedData + }); } try { - await batchWriteAll({ - table: esEntity.table, - items: esItems - }); + await elasticsearchEntityBatch.execute(); } catch (ex) { throw new WebinyError( ex.message || "Could not update latest entry in the DynamoDB Elasticsearch table.", @@ -461,26 +455,30 @@ export const createEntriesStorageOperations = ( ids: [entry.id] }); - const items = [ - entity.putBatch({ - ...storageEntry, - locked, - ...revisionKeys, - TYPE: createRecordType() - }) - ]; - if (isPublished) { - items.push( - entity.putBatch({ + const entityBatch = createEntityWriteBatch({ + entity, + put: [ + { ...storageEntry, locked, - ...publishedKeys, - TYPE: createPublishedRecordType() - }) - ); + ...revisionKeys, + TYPE: createRecordType() + } + ] + }); + + if (isPublished) { + entityBatch.put({ + ...storageEntry, + locked, + ...publishedKeys, + TYPE: createPublishedRecordType() + }); } - const esItems: BatchWriteItem[] = []; + const elasticsearchEntityBatch = createEntityWriteBatch({ + entity: esEntity + }); const { index: esIndex } = configurations.es({ model @@ -495,26 +493,22 @@ export const createEntriesStorageOperations = ( /** * First we update the regular DynamoDB table. */ - items.push( - entity.putBatch({ - ...storageEntry, - ...latestKeys, - TYPE: createLatestRecordType() - }) - ); + entityBatch.put({ + ...storageEntry, + ...latestKeys, + TYPE: createLatestRecordType() + }); /** * And then update the Elasticsearch table to propagate changes to the Elasticsearch */ const elasticsearchLatestData = await transformer.getElasticsearchLatestEntryData(); - esItems.push( - esEntity.putBatch({ - ...latestKeys, - index: esIndex, - data: elasticsearchLatestData - }) - ); + elasticsearchEntityBatch.put({ + ...latestKeys, + index: esIndex, + data: elasticsearchLatestData + }); } else { /** * If not updating latest revision, we still want to update the latest revision's @@ -536,25 +530,21 @@ export const createEntriesStorageOperations = ( * - one for the actual revision record * - one for the latest record */ - items.push( - entity.putBatch({ - ...updatedLatestStorageEntry, - PK: createPartitionKey({ - id: latestStorageEntry.id, - locale: model.locale, - tenant: model.tenant - }), - SK: createRevisionSortKey(latestStorageEntry), - TYPE: createRecordType() - }) - ); + entityBatch.put({ + ...updatedLatestStorageEntry, + PK: createPartitionKey({ + id: latestStorageEntry.id, + locale: model.locale, + tenant: model.tenant + }), + SK: createRevisionSortKey(latestStorageEntry), + TYPE: createRecordType() + }); - items.push( - entity.putBatch({ - ...updatedLatestStorageEntry, - TYPE: createLatestRecordType() - }) - ); + entityBatch.put({ + ...updatedLatestStorageEntry, + TYPE: createLatestRecordType() + }); /** * Update the Elasticsearch table to propagate changes to the Elasticsearch. @@ -575,13 +565,11 @@ export const createEntriesStorageOperations = ( ...updatedEntryLevelMetaFields }); - esItems.push( - esEntity.putBatch({ - ...latestKeys, - index: esIndex, - data: updatedLatestEntry - }) - ); + elasticsearchEntityBatch.put({ + ...latestKeys, + index: esIndex, + data: updatedLatestEntry + }); } } } @@ -589,19 +577,15 @@ export const createEntriesStorageOperations = ( if (isPublished && publishedStorageEntry?.id === entry.id) { const elasticsearchPublishedData = await transformer.getElasticsearchPublishedEntryData(); - esItems.push( - esEntity.putBatch({ - ...publishedKeys, - index: esIndex, - data: elasticsearchPublishedData - }) - ); + elasticsearchEntityBatch.put({ + ...publishedKeys, + index: esIndex, + data: elasticsearchPublishedData + }); } try { - await batchWriteAll({ - table: entity.table, - items - }); + await entityBatch.execute(); + dataLoaders.clearAll({ model }); @@ -616,15 +600,8 @@ export const createEntriesStorageOperations = ( } ); } - if (esItems.length === 0) { - return initialStorageEntry; - } - try { - await batchWriteAll({ - table: esEntity.table, - items: esItems - }); + await elasticsearchEntityBatch.execute(); } catch (ex) { throw new WebinyError( ex.message || "Could not update entry DynamoDB Elasticsearch records.", @@ -664,17 +641,19 @@ export const createEntriesStorageOperations = ( */ let latestRecord: CmsEntry | undefined = undefined; let publishedRecord: CmsEntry | undefined = undefined; - const items: BatchWriteItem[] = []; + const entityBatch = createEntityWriteBatch({ + entity + }); + for (const record of records) { - items.push( - entity.putBatch({ - ...record, - location: { - ...record?.location, - folderId - } - }) - ); + entityBatch.put({ + ...record, + location: { + ...record?.location, + folderId + } + }); + /** * We need to get the published and latest records, so we can update the Elasticsearch. */ @@ -685,10 +664,7 @@ export const createEntriesStorageOperations = ( } } try { - await batchWriteAll({ - table: entity.table, - items - }); + await entityBatch.execute(); dataLoaders.clearAll({ model }); @@ -743,27 +719,27 @@ export const createEntriesStorageOperations = ( if (esItems.length === 0) { return; } - const esUpdateItems: BatchWriteItem[] = []; - for (const item of esItems) { - esUpdateItems.push( - esEntity.putBatch({ - ...item, - data: await compress(plugins, { - ...item.data, - location: { - ...item.data?.location, - folderId - } - }) + + const elasticsearchEntityBatch = createEntityWriteBatch({ + entity: esEntity, + put: await Promise.all( + esItems.map(async item => { + return { + ...item, + data: await compress(plugins, { + ...item.data, + location: { + ...item.data?.location, + folderId + } + }) + }; }) - ); - } + ) + }); try { - await batchWriteAll({ - table: esEntity.table, - items: esUpdateItems - }); + await elasticsearchEntityBatch.execute(); } catch (ex) { throw new WebinyError( ex.message || "Could not move entry DynamoDB Elasticsearch records.", @@ -820,18 +796,20 @@ export const createEntriesStorageOperations = ( */ let latestRecord: CmsEntry | undefined = undefined; let publishedRecord: CmsEntry | undefined = undefined; - const items: BatchWriteItem[] = []; + + const entityBatch = createEntityWriteBatch({ + entity + }); for (const record of records) { - items.push( - entity.putBatch({ - ...record, - ...updatedEntryMetaFields, - wbyDeleted: storageEntry.wbyDeleted, - location: storageEntry.location, - binOriginalFolderId: storageEntry.binOriginalFolderId - }) - ); + entityBatch.put({ + ...record, + ...updatedEntryMetaFields, + wbyDeleted: storageEntry.wbyDeleted, + location: storageEntry.location, + binOriginalFolderId: storageEntry.binOriginalFolderId + }); + /** * We need to get the published and latest records, so we can update the Elasticsearch. */ @@ -846,10 +824,8 @@ export const createEntriesStorageOperations = ( * We write the records back to the primary DynamoDB table. */ try { - await batchWriteAll({ - table: entity.table, - items - }); + await entityBatch.execute(); + dataLoaders.clearAll({ model }); @@ -915,30 +891,28 @@ export const createEntriesStorageOperations = ( /** * We update all ES records with data received. */ - const esUpdateItems: BatchWriteItem[] = []; + const elasticsearchEntityBatch = createEntityWriteBatch({ + entity: esEntity + }); + for (const item of esItems) { - esUpdateItems.push( - esEntity.putBatch({ - ...item, - data: await compress(plugins, { - ...item.data, - ...updatedEntryMetaFields, - wbyDeleted: entry.wbyDeleted, - location: entry.location, - binOriginalFolderId: entry.binOriginalFolderId - }) + elasticsearchEntityBatch.put({ + ...item, + data: await compress(plugins, { + ...item.data, + ...updatedEntryMetaFields, + wbyDeleted: entry.wbyDeleted, + location: entry.location, + binOriginalFolderId: entry.binOriginalFolderId }) - ); + }); } /** * We write the records back to the primary DynamoDB Elasticsearch table. */ try { - await batchWriteAll({ - table: esEntity.table, - items: esUpdateItems - }); + await elasticsearchEntityBatch.execute(); } catch (ex) { throw new WebinyError( ex.message || @@ -1000,18 +974,20 @@ export const createEntriesStorageOperations = ( */ let latestRecord: CmsEntry | undefined = undefined; let publishedRecord: CmsEntry | undefined = undefined; - const items: BatchWriteItem[] = []; + + const entityBatch = createEntityWriteBatch({ + entity + }); for (const record of records) { - items.push( - entity.putBatch({ - ...record, - ...updatedEntryMetaFields, - wbyDeleted: storageEntry.wbyDeleted, - location: storageEntry.location, - binOriginalFolderId: storageEntry.binOriginalFolderId - }) - ); + entityBatch.put({ + ...record, + ...updatedEntryMetaFields, + wbyDeleted: storageEntry.wbyDeleted, + location: storageEntry.location, + binOriginalFolderId: storageEntry.binOriginalFolderId + }); + /** * We need to get the published and latest records, so we can update the Elasticsearch. */ @@ -1026,10 +1002,8 @@ export const createEntriesStorageOperations = ( * We write the records back to the primary DynamoDB table. */ try { - await batchWriteAll({ - table: entity.table, - items - }); + await entityBatch.execute(); + dataLoaders.clearAll({ model }); @@ -1092,30 +1066,27 @@ export const createEntriesStorageOperations = ( /** * We update all ES records with data received. */ - const esUpdateItems: BatchWriteItem[] = []; + const elasticsearchEntityBatch = createEntityWriteBatch({ + entity: esEntity + }); for (const item of esItems) { - esUpdateItems.push( - esEntity.putBatch({ - ...item, - data: await compress(plugins, { - ...item.data, - ...updatedEntryMetaFields, - wbyDeleted: entry.wbyDeleted, - location: entry.location, - binOriginalFolderId: entry.binOriginalFolderId - }) + elasticsearchEntityBatch.put({ + ...item, + data: await compress(plugins, { + ...item.data, + ...updatedEntryMetaFields, + wbyDeleted: entry.wbyDeleted, + location: entry.location, + binOriginalFolderId: entry.binOriginalFolderId }) - ); + }); } /** * We write the records back to the primary DynamoDB Elasticsearch table. */ try { - await batchWriteAll({ - table: esEntity.table, - items: esUpdateItems - }); + await elasticsearchEntityBatch.execute(); } catch (ex) { throw new WebinyError( ex.message || "Could not restore entry records from DynamoDB Elasticsearch table.", @@ -1158,25 +1129,29 @@ export const createEntriesStorageOperations = ( } }); - const deleteItems = items.map(item => { - return entity.deleteBatch({ - PK: item.PK, - SK: item.SK - }); + const entityBatch = createEntityWriteBatch({ + entity, + delete: items.map(item => { + return { + PK: item.PK, + SK: item.SK + }; + }) }); - const deleteEsItems = esItems.map(item => { - return esEntity.deleteBatch({ - PK: item.PK, - SK: item.SK - }); + const elasticsearchEntityBatch = createEntityWriteBatch({ + entity: esEntity, + delete: esItems.map(item => { + return { + PK: item.PK, + SK: item.SK + }; + }) }); try { - await batchWriteAll({ - table: entity.table, - items: deleteItems - }); + await entityBatch.execute(); + dataLoaders.clearAll({ model }); @@ -1192,10 +1167,7 @@ export const createEntriesStorageOperations = ( } try { - await batchWriteAll({ - table: esEntity.table, - items: deleteEsItems - }); + await elasticsearchEntityBatch.execute(); } catch (ex) { throw new WebinyError( ex.message || "Could not destroy entry records from DynamoDB Elasticsearch table.", @@ -1234,34 +1206,33 @@ export const createEntriesStorageOperations = ( /** * We need to delete all existing records of the given entry revision. */ - const items = [ - /** - * Delete records of given entry revision. - */ - entity.deleteBatch({ - PK: partitionKey, - SK: createRevisionSortKey(entry) - }) - ]; + const entityBatch = createEntityWriteBatch({ + entity, + delete: [ + { + PK: partitionKey, + SK: createRevisionSortKey(entry) + } + ] + }); - const esItems: BatchWriteItem[] = []; + const elasticsearchEntityBatch = createEntityWriteBatch({ + entity: esEntity + }); /** * If revision we are deleting is the published one as well, we need to delete those records as well. */ if (publishedStorageEntry?.id === entry.id) { - items.push( - entity.deleteBatch({ - PK: partitionKey, - SK: createPublishedSortKey() - }) - ); - esItems.push( - esEntity.deleteBatch({ - PK: partitionKey, - SK: createPublishedSortKey() - }) - ); + entityBatch.delete({ + PK: partitionKey, + SK: createPublishedSortKey() + }); + + elasticsearchEntityBatch.delete({ + PK: partitionKey, + SK: createPublishedSortKey() + }); } if (latestEntry && initialLatestStorageEntry) { @@ -1273,31 +1244,27 @@ export const createEntriesStorageOperations = ( /** * In the end we need to set the new latest entry. */ - items.push( - entity.putBatch({ - ...latestStorageEntry, - PK: partitionKey, - SK: createLatestSortKey(), - TYPE: createLatestRecordType() - }) - ); + entityBatch.put({ + ...latestStorageEntry, + PK: partitionKey, + SK: createLatestSortKey(), + TYPE: createLatestRecordType() + }); /** * Also perform an update on the actual revision. This is needed * because of updates on the entry-level meta fields. */ - items.push( - entity.putBatch({ - ...latestStorageEntry, - PK: createPartitionKey({ - id: initialLatestStorageEntry.id, - locale: model.locale, - tenant: model.tenant - }), - SK: createRevisionSortKey(initialLatestStorageEntry), - TYPE: createRecordType() - }) - ); + entityBatch.put({ + ...latestStorageEntry, + PK: createPartitionKey({ + id: initialLatestStorageEntry.id, + locale: model.locale, + tenant: model.tenant + }), + SK: createRevisionSortKey(initialLatestStorageEntry), + TYPE: createRecordType() + }); const latestTransformer = createTransformer({ plugins, @@ -1307,21 +1274,16 @@ export const createEntriesStorageOperations = ( }); const esLatestData = await latestTransformer.getElasticsearchLatestEntryData(); - esItems.push( - esEntity.putBatch({ - PK: partitionKey, - SK: createLatestSortKey(), - index, - data: esLatestData - }) - ); + elasticsearchEntityBatch.put({ + PK: partitionKey, + SK: createLatestSortKey(), + index, + data: esLatestData + }); } try { - await batchWriteAll({ - table: entity.table, - items - }); + await entityBatch.execute(); dataLoaders.clearAll({ model @@ -1339,15 +1301,8 @@ export const createEntriesStorageOperations = ( ); } - if (esItems.length === 0) { - return; - } - try { - await batchWriteAll({ - table: esEntity.table, - items: esItems - }); + await elasticsearchEntityBatch.execute(); } catch (ex) { throw new WebinyError( ex.message || @@ -1379,82 +1334,74 @@ export const createEntriesStorageOperations = ( /** * Then we need to construct the queries for all the revisions and entries. */ - const items: Record[] = []; - const esItems: Record[] = []; + + const entityBatch = createEntityWriteBatch({ + entity + }); + const elasticsearchEntityBatch = createEntityWriteBatch({ + entity: esEntity + }); for (const id of entries) { /** * Latest item. */ - items.push( - entity.deleteBatch({ - PK: createPartitionKey({ - id, - locale: model.locale, - tenant: model.tenant - }), - SK: "L" - }) - ); - esItems.push( - esEntity.deleteBatch({ - PK: createPartitionKey({ - id, - locale: model.locale, - tenant: model.tenant - }), - SK: "L" - }) - ); + entityBatch.delete({ + PK: createPartitionKey({ + id, + locale: model.locale, + tenant: model.tenant + }), + SK: "L" + }); + + elasticsearchEntityBatch.delete({ + PK: createPartitionKey({ + id, + locale: model.locale, + tenant: model.tenant + }), + SK: "L" + }); + /** * Published item. */ - items.push( - entity.deleteBatch({ - PK: createPartitionKey({ - id, - locale: model.locale, - tenant: model.tenant - }), - SK: "P" - }) - ); - esItems.push( - esEntity.deleteBatch({ - PK: createPartitionKey({ - id, - locale: model.locale, - tenant: model.tenant - }), - SK: "P" - }) - ); + entityBatch.delete({ + PK: createPartitionKey({ + id, + locale: model.locale, + tenant: model.tenant + }), + SK: "P" + }); + + elasticsearchEntityBatch.delete({ + PK: createPartitionKey({ + id, + locale: model.locale, + tenant: model.tenant + }), + SK: "P" + }); } /** * Exact revisions of all the entries */ for (const revision of revisions) { - items.push( - entity.deleteBatch({ - PK: createPartitionKey({ - id: revision.id, - locale: model.locale, - tenant: model.tenant - }), - SK: createRevisionSortKey({ - version: revision.version - }) + entityBatch.delete({ + PK: createPartitionKey({ + id: revision.id, + locale: model.locale, + tenant: model.tenant + }), + SK: createRevisionSortKey({ + version: revision.version }) - ); + }); } - await batchWriteAll({ - table: entity.table, - items - }); - await batchWriteAll({ - table: esEntity.table, - items: esItems - }); + await entityBatch.execute(); + await elasticsearchEntityBatch.execute(); }; const list: CmsEntryStorageOperations["list"] = async (initialModel, params) => { @@ -1641,19 +1588,25 @@ export const createEntriesStorageOperations = ( }); // 1. Update REV# and P records with new data. - const items = [ - entity.putBatch({ - ...storageEntry, - ...revisionKeys, - TYPE: createRecordType() - }), - entity.putBatch({ - ...storageEntry, - ...publishedKeys, - TYPE: createPublishedRecordType() - }) - ]; - const esItems: BatchWriteItem[] = []; + const entityBatch = createEntityWriteBatch({ + entity, + put: [ + { + ...storageEntry, + ...revisionKeys, + TYPE: createRecordType() + }, + { + ...storageEntry, + ...publishedKeys, + TYPE: createPublishedRecordType() + } + ] + }); + + const elasticsearchEntityBatch = createEntityWriteBatch({ + entity: esEntity + }); const { index: esIndex } = configurations.es({ model @@ -1666,12 +1619,10 @@ export const createEntriesStorageOperations = ( if (publishingLatestRevision) { // 2.1 If we're publishing the latest revision, we first need to update the L record. - items.push( - entity.putBatch({ - ...storageEntry, - ...latestKeys - }) - ); + entityBatch.put({ + ...storageEntry, + ...latestKeys + }); // 2.2 Additionally, if we have a previously published entry, we need to mark it as unpublished. // Note that we need to take re-publishing into account (same published revision being @@ -1680,18 +1631,16 @@ export const createEntriesStorageOperations = ( if (publishedStorageEntry) { const isRepublishing = publishedStorageEntry.id === entry.id; if (!isRepublishing) { - items.push( - /** - * Update currently published entry (unpublish it) - */ - entity.putBatch({ - ...publishedStorageEntry, - status: CONTENT_ENTRY_STATUS.UNPUBLISHED, - TYPE: createRecordType(), - PK: createPartitionKey(publishedStorageEntry), - SK: createRevisionSortKey(publishedStorageEntry) - }) - ); + /** + * Update currently published entry (unpublish it) + */ + entityBatch.put({ + ...publishedStorageEntry, + status: CONTENT_ENTRY_STATUS.UNPUBLISHED, + TYPE: createRecordType(), + PK: createPartitionKey(publishedStorageEntry), + SK: createRevisionSortKey(publishedStorageEntry) + }); } } } else { @@ -1716,24 +1665,20 @@ export const createEntriesStorageOperations = ( status: latestRevisionStatus }; - items.push( - entity.putBatch({ - ...latestStorageEntryFields, - PK: createPartitionKey(latestStorageEntry), - SK: createLatestSortKey(), - TYPE: createLatestRecordType() - }) - ); + entityBatch.put({ + ...latestStorageEntryFields, + PK: createPartitionKey(latestStorageEntry), + SK: createLatestSortKey(), + TYPE: createLatestRecordType() + }); // 2.5 Update REV# record. - items.push( - entity.putBatch({ - ...latestStorageEntryFields, - PK: createPartitionKey(latestStorageEntry), - SK: createRevisionSortKey(latestStorageEntry), - TYPE: createRecordType() - }) - ); + entityBatch.put({ + ...latestStorageEntryFields, + PK: createPartitionKey(latestStorageEntry), + SK: createRevisionSortKey(latestStorageEntry), + TYPE: createRecordType() + }); // 2.6 Additionally, if we have a previously published entry, we need to mark it as unpublished. // Note that we need to take re-publishing into account (same published revision being @@ -1745,15 +1690,13 @@ export const createEntriesStorageOperations = ( publishedRevisionId !== latestStorageEntry.id; if (!isRepublishing && publishedRevisionDifferentFromLatest) { - items.push( - entity.putBatch({ - ...publishedStorageEntry, - PK: createPartitionKey(publishedStorageEntry), - SK: createRevisionSortKey(publishedStorageEntry), - TYPE: createRecordType(), - status: CONTENT_ENTRY_STATUS.UNPUBLISHED - }) - ); + entityBatch.put({ + ...publishedStorageEntry, + PK: createPartitionKey(publishedStorageEntry), + SK: createRevisionSortKey(publishedStorageEntry), + TYPE: createRecordType(), + status: CONTENT_ENTRY_STATUS.UNPUBLISHED + }); } } } @@ -1764,13 +1707,11 @@ export const createEntriesStorageOperations = ( * Update the published revision entry in ES. */ const esPublishedData = await transformer.getElasticsearchPublishedEntryData(); - esItems.push( - esEntity.putBatch({ - ...publishedKeys, - index: esIndex, - data: esPublishedData - }) - ); + elasticsearchEntityBatch.put({ + ...publishedKeys, + index: esIndex, + data: esPublishedData + }); /** * Need to decompress the data from Elasticsearch DynamoDB table. @@ -1797,14 +1738,12 @@ export const createEntriesStorageOperations = ( } }); - esItems.push( - esEntity.putBatch({ - index: esIndex, - PK: createPartitionKey(latestEsEntryDataDecompressed), - SK: createLatestSortKey(), - data: await latestTransformer.getElasticsearchLatestEntryData() - }) - ); + elasticsearchEntityBatch.put({ + index: esIndex, + PK: createPartitionKey(latestEsEntryDataDecompressed), + SK: createLatestSortKey(), + data: await latestTransformer.getElasticsearchLatestEntryData() + }); } else { const updatedEntryLevelMetaFields = pickEntryMetaFields( entry, @@ -1836,13 +1775,11 @@ export const createEntriesStorageOperations = ( status: latestRevisionStatus }); - esItems.push( - esEntity.putBatch({ - ...latestKeys, - index: esIndex, - data: updatedLatestEntry - }) - ); + elasticsearchEntityBatch.put({ + ...latestKeys, + index: esIndex, + data: updatedLatestEntry + }); } } @@ -1850,10 +1787,8 @@ export const createEntriesStorageOperations = ( * Finally, execute regular table batch. */ try { - await batchWriteAll({ - table: entity.table, - items - }); + await entityBatch.execute(); + dataLoaders.clearAll({ model }); @@ -1873,10 +1808,7 @@ export const createEntriesStorageOperations = ( * And Elasticsearch table batch. */ try { - await batchWriteAll({ - table: esEntity.table, - items: esItems - }); + await elasticsearchEntityBatch.execute(); } catch (ex) { throw new WebinyError( ex.message || @@ -1919,25 +1851,34 @@ export const createEntriesStorageOperations = ( tenant: model.tenant }); - const items = [ - entity.deleteBatch({ - PK: partitionKey, - SK: createPublishedSortKey() - }), - entity.putBatch({ - ...storageEntry, - PK: partitionKey, - SK: createRevisionSortKey(entry), - TYPE: createRecordType() - }) - ]; + const entityBatch = createEntityWriteBatch({ + entity, + put: [ + { + ...storageEntry, + PK: partitionKey, + SK: createRevisionSortKey(entry), + TYPE: createRecordType() + } + ], + delete: [ + { + PK: partitionKey, + SK: createPublishedSortKey() + } + ] + }); + + const elasticsearchEntityBatch = createEntityWriteBatch({ + entity: esEntity, + delete: [ + { + PK: partitionKey, + SK: createPublishedSortKey() + } + ] + }); - const esItems: BatchWriteItem[] = [ - esEntity.deleteBatch({ - PK: partitionKey, - SK: createPublishedSortKey() - }) - ]; /** * If we are unpublishing the latest revision, let's also update the latest revision entry's status in both DynamoDB tables. */ @@ -1946,34 +1887,28 @@ export const createEntriesStorageOperations = ( model }); - items.push( - entity.putBatch({ - ...storageEntry, - PK: partitionKey, - SK: createLatestSortKey(), - TYPE: createLatestRecordType() - }) - ); + entityBatch.put({ + ...storageEntry, + PK: partitionKey, + SK: createLatestSortKey(), + TYPE: createLatestRecordType() + }); const esLatestData = await transformer.getElasticsearchLatestEntryData(); - esItems.push( - esEntity.putBatch({ - PK: partitionKey, - SK: createLatestSortKey(), - index, - data: esLatestData - }) - ); + elasticsearchEntityBatch.put({ + PK: partitionKey, + SK: createLatestSortKey(), + index, + data: esLatestData + }); } /** * Finally, execute regular table batch. */ try { - await batchWriteAll({ - table: entity.table, - items - }); + await entityBatch.execute(); + dataLoaders.clearAll({ model }); @@ -1991,10 +1926,7 @@ export const createEntriesStorageOperations = ( * And Elasticsearch table batch. */ try { - await batchWriteAll({ - table: esEntity.table, - items: esItems - }); + await elasticsearchEntityBatch.execute(); } catch (ex) { throw new WebinyError( ex.message || diff --git a/packages/api-headless-cms-ddb/src/operations/entry/dataLoader/getLatestRevisionByEntryId.ts b/packages/api-headless-cms-ddb/src/operations/entry/dataLoader/getLatestRevisionByEntryId.ts index b35994d54f1..fc13f4270d4 100644 --- a/packages/api-headless-cms-ddb/src/operations/entry/dataLoader/getLatestRevisionByEntryId.ts +++ b/packages/api-headless-cms-ddb/src/operations/entry/dataLoader/getLatestRevisionByEntryId.ts @@ -1,5 +1,5 @@ import DataLoader from "dataloader"; -import { batchReadAll } from "@webiny/db-dynamodb/utils/batchRead"; +import { batchReadAll } from "@webiny/db-dynamodb"; import { cleanupItems } from "@webiny/db-dynamodb/utils/cleanup"; import { CmsStorageEntry } from "@webiny/api-headless-cms/types"; import { createBatchScheduleFn } from "./createBatchScheduleFn"; diff --git a/packages/api-headless-cms-ddb/src/operations/entry/dataLoader/getPublishedRevisionByEntryId.ts b/packages/api-headless-cms-ddb/src/operations/entry/dataLoader/getPublishedRevisionByEntryId.ts index 062ebae0557..d3b97b70eae 100644 --- a/packages/api-headless-cms-ddb/src/operations/entry/dataLoader/getPublishedRevisionByEntryId.ts +++ b/packages/api-headless-cms-ddb/src/operations/entry/dataLoader/getPublishedRevisionByEntryId.ts @@ -1,5 +1,5 @@ import DataLoader from "dataloader"; -import { batchReadAll } from "@webiny/db-dynamodb/utils/batchRead"; +import { batchReadAll } from "@webiny/db-dynamodb"; import { cleanupItems } from "@webiny/db-dynamodb/utils/cleanup"; import { CmsStorageEntry } from "@webiny/api-headless-cms/types"; import { createPartitionKey, createPublishedSortKey } from "~/operations/entry/keys"; diff --git a/packages/api-headless-cms-ddb/src/operations/entry/dataLoader/getRevisionById.ts b/packages/api-headless-cms-ddb/src/operations/entry/dataLoader/getRevisionById.ts index 7c020d82574..04b5783ace6 100644 --- a/packages/api-headless-cms-ddb/src/operations/entry/dataLoader/getRevisionById.ts +++ b/packages/api-headless-cms-ddb/src/operations/entry/dataLoader/getRevisionById.ts @@ -1,5 +1,5 @@ import DataLoader from "dataloader"; -import { batchReadAll } from "@webiny/db-dynamodb/utils/batchRead"; +import { batchReadAll } from "@webiny/db-dynamodb"; import { CmsStorageEntry } from "@webiny/api-headless-cms/types"; import { cleanupItems } from "@webiny/db-dynamodb/utils/cleanup"; import { createPartitionKey, createRevisionSortKey } from "~/operations/entry/keys"; diff --git a/packages/api-headless-cms-ddb/src/operations/entry/index.ts b/packages/api-headless-cms-ddb/src/operations/entry/index.ts index 9c5f84e5b90..5600f97a037 100644 --- a/packages/api-headless-cms-ddb/src/operations/entry/index.ts +++ b/packages/api-headless-cms-ddb/src/operations/entry/index.ts @@ -1,15 +1,15 @@ import WebinyError from "@webiny/error"; import { DataLoadersHandler } from "./dataLoaders"; -import { +import type { CmsEntry, CmsEntryListWhere, CmsEntryUniqueValue, CmsModel, CmsStorageEntry, - CONTENT_ENTRY_STATUS, StorageOperationsCmsModel } from "@webiny/api-headless-cms/types"; -import { Entity } from "@webiny/db-dynamodb/toolbox"; +import { CONTENT_ENTRY_STATUS } from "@webiny/api-headless-cms/types"; +import type { Entity } from "@webiny/db-dynamodb/toolbox"; import { createGSIPartitionKey, createGSISortKey, @@ -18,24 +18,23 @@ import { createPublishedSortKey, createRevisionSortKey } from "~/operations/entry/keys"; -import { batchWriteAll } from "@webiny/db-dynamodb/utils/batchWrite"; import { - DbItem, + cleanupItem, + cleanupItems, + createEntityWriteBatch, queryAll, QueryAllParams, queryOne, QueryOneParams -} from "@webiny/db-dynamodb/utils/query"; -import { cleanupItem, cleanupItems } from "@webiny/db-dynamodb/utils/cleanup"; -import { PluginsContainer } from "@webiny/plugins"; +} from "@webiny/db-dynamodb"; +import type { PluginsContainer } from "@webiny/plugins"; import { decodeCursor, encodeCursor } from "@webiny/utils/cursor"; import { zeroPad } from "@webiny/utils/zeroPad"; import { StorageOperationsCmsModelPlugin, StorageTransformPlugin } from "@webiny/api-headless-cms"; -import { FilterItemFromStorage } from "./filtering/types"; +import type { FilterItemFromStorage } from "./filtering/types"; import { createFields } from "~/operations/entry/filtering/createFields"; import { filter, sort } from "~/operations/entry/filtering"; -import { WriteRequest } from "@webiny/aws-sdk/client-dynamodb"; -import { CmsEntryStorageOperations } from "~/types"; +import type { CmsEntryStorageOperations } from "~/types"; import { isDeletedEntryMetaField, isEntryLevelEntryMetaField, @@ -167,49 +166,47 @@ export const createEntriesStorageOperations = ( * - create new main entry item * - create new or update the latest entry item */ - const items = [ - entity.putBatch({ - ...storageEntry, - locked, - PK: partitionKey, - SK: createRevisionSortKey(entry), - TYPE: createType(), - GSI1_PK: createGSIPartitionKey(model, "A"), - GSI1_SK: createGSISortKey(storageEntry) - }), - entity.putBatch({ - ...storageEntry, - locked, - PK: partitionKey, - SK: createLatestSortKey(), - TYPE: createLatestType(), - GSI1_PK: createGSIPartitionKey(model, "L"), - GSI1_SK: createGSISortKey(storageEntry) - }) - ]; - - /** - * We need to create published entry if - */ - if (isPublished) { - items.push( - entity.putBatch({ + const entityBatch = createEntityWriteBatch({ + entity, + put: [ + { ...storageEntry, locked, PK: partitionKey, - SK: createPublishedSortKey(), + SK: createRevisionSortKey(entry), + TYPE: createType(), + GSI1_PK: createGSIPartitionKey(model, "A"), + GSI1_SK: createGSISortKey(storageEntry) + }, + { + ...storageEntry, + locked, + PK: partitionKey, + SK: createLatestSortKey(), TYPE: createLatestType(), - GSI1_PK: createGSIPartitionKey(model, "P"), + GSI1_PK: createGSIPartitionKey(model, "L"), GSI1_SK: createGSISortKey(storageEntry) - }) - ); + } + ] + }); + + /** + * We need to create published entry if + */ + if (isPublished) { + entityBatch.put({ + ...storageEntry, + locked, + PK: partitionKey, + SK: createPublishedSortKey(), + TYPE: createLatestType(), + GSI1_PK: createGSIPartitionKey(model, "P"), + GSI1_SK: createGSISortKey(storageEntry) + }); } try { - await batchWriteAll({ - table: entity.table, - items - }); + await entityBatch.execute(); dataLoaders.clearAll({ model }); @@ -253,37 +250,38 @@ export const createEntriesStorageOperations = ( * - update the published entry item to the current one * - unpublish previously published revision (if any) */ - const items = [ - entity.putBatch({ - ...storageEntry, - PK: partitionKey, - SK: createRevisionSortKey(storageEntry), - TYPE: createType(), - GSI1_PK: createGSIPartitionKey(model, "A"), - GSI1_SK: createGSISortKey(storageEntry) - }), - entity.putBatch({ - ...storageEntry, - PK: partitionKey, - SK: createLatestSortKey(), - TYPE: createLatestType(), - GSI1_PK: createGSIPartitionKey(model, "L"), - GSI1_SK: createGSISortKey(storageEntry) - }) - ]; - - const isPublished = entry.status === "published"; - if (isPublished) { - items.push( - entity.putBatch({ + const entityBatch = createEntityWriteBatch({ + entity, + put: [ + { ...storageEntry, PK: partitionKey, - SK: createPublishedSortKey(), - TYPE: createPublishedType(), - GSI1_PK: createGSIPartitionKey(model, "P"), + SK: createRevisionSortKey(storageEntry), + TYPE: createType(), + GSI1_PK: createGSIPartitionKey(model, "A"), GSI1_SK: createGSISortKey(storageEntry) - }) - ); + }, + { + ...storageEntry, + PK: partitionKey, + SK: createLatestSortKey(), + TYPE: createLatestType(), + GSI1_PK: createGSIPartitionKey(model, "L"), + GSI1_SK: createGSISortKey(storageEntry) + } + ] + }); + + const isPublished = entry.status === "published"; + if (isPublished) { + entityBatch.put({ + ...storageEntry, + PK: partitionKey, + SK: createPublishedSortKey(), + TYPE: createPublishedType(), + GSI1_PK: createGSIPartitionKey(model, "P"), + GSI1_SK: createGSISortKey(storageEntry) + }); // Unpublish previously published revision (if any). const [publishedRevisionStorageEntry] = await dataLoaders.getPublishedRevisionByEntryId( @@ -294,25 +292,20 @@ export const createEntriesStorageOperations = ( ); if (publishedRevisionStorageEntry) { - items.push( - entity.putBatch({ - ...publishedRevisionStorageEntry, - PK: partitionKey, - SK: createRevisionSortKey(publishedRevisionStorageEntry), - TYPE: createType(), - status: CONTENT_ENTRY_STATUS.UNPUBLISHED, - GSI1_PK: createGSIPartitionKey(model, "A"), - GSI1_SK: createGSISortKey(publishedRevisionStorageEntry) - }) - ); + entityBatch.put({ + ...publishedRevisionStorageEntry, + PK: partitionKey, + SK: createRevisionSortKey(publishedRevisionStorageEntry), + TYPE: createType(), + status: CONTENT_ENTRY_STATUS.UNPUBLISHED, + GSI1_PK: createGSIPartitionKey(model, "A"), + GSI1_SK: createGSISortKey(publishedRevisionStorageEntry) + }); } } try { - await batchWriteAll({ - table: entity.table, - items - }); + await entityBatch.execute(); dataLoaders.clearAll({ model }); @@ -346,8 +339,6 @@ export const createEntriesStorageOperations = ( const isPublished = entry.status === "published"; const locked = isPublished ? true : entry.locked; - const items = []; - const storageEntry = convertToStorageEntry({ model, storageEntry: initialStorageEntry @@ -357,30 +348,32 @@ export const createEntriesStorageOperations = ( * - update the current entry * - update the latest entry if the current entry is the latest one */ - items.push( - entity.putBatch({ - ...storageEntry, - locked, - PK: partitionKey, - SK: createRevisionSortKey(storageEntry), - TYPE: createType(), - GSI1_PK: createGSIPartitionKey(model, "A"), - GSI1_SK: createGSISortKey(storageEntry) - }) - ); - if (isPublished) { - items.push( - entity.putBatch({ + const entityBatch = createEntityWriteBatch({ + entity, + put: [ + { ...storageEntry, locked, PK: partitionKey, - SK: createPublishedSortKey(), - TYPE: createPublishedType(), - GSI1_PK: createGSIPartitionKey(model, "P"), + SK: createRevisionSortKey(storageEntry), + TYPE: createType(), + GSI1_PK: createGSIPartitionKey(model, "A"), GSI1_SK: createGSISortKey(storageEntry) - }) - ); + } + ] + }); + + if (isPublished) { + entityBatch.put({ + ...storageEntry, + locked, + PK: partitionKey, + SK: createPublishedSortKey(), + TYPE: createPublishedType(), + GSI1_PK: createGSIPartitionKey(model, "P"), + GSI1_SK: createGSISortKey(storageEntry) + }); } /** @@ -391,17 +384,15 @@ export const createEntriesStorageOperations = ( if (latestStorageEntry) { const updatingLatestRevision = latestStorageEntry.id === entry.id; if (updatingLatestRevision) { - items.push( - entity.putBatch({ - ...storageEntry, - locked, - PK: partitionKey, - SK: createLatestSortKey(), - TYPE: createLatestType(), - GSI1_PK: createGSIPartitionKey(model, "L"), - GSI1_SK: createGSISortKey(entry) - }) - ); + entityBatch.put({ + ...storageEntry, + locked, + PK: partitionKey, + SK: createLatestSortKey(), + TYPE: createLatestType(), + GSI1_PK: createGSIPartitionKey(model, "L"), + GSI1_SK: createGSISortKey(entry) + }); } else { /** * If not updating latest revision, we still want to update the latest revision's @@ -417,37 +408,30 @@ export const createEntriesStorageOperations = ( * - one for the actual revision record * - one for the latest record */ - items.push( - entity.putBatch({ - ...latestStorageEntry, - ...updatedEntryLevelMetaFields, - PK: partitionKey, - SK: createRevisionSortKey(latestStorageEntry), - TYPE: createType(), - GSI1_PK: createGSIPartitionKey(model, "A"), - GSI1_SK: createGSISortKey(latestStorageEntry) - }) - ); + entityBatch.put({ + ...latestStorageEntry, + ...updatedEntryLevelMetaFields, + PK: partitionKey, + SK: createRevisionSortKey(latestStorageEntry), + TYPE: createType(), + GSI1_PK: createGSIPartitionKey(model, "A"), + GSI1_SK: createGSISortKey(latestStorageEntry) + }); - items.push( - entity.putBatch({ - ...latestStorageEntry, - ...updatedEntryLevelMetaFields, - PK: partitionKey, - SK: createLatestSortKey(), - TYPE: createLatestType(), - GSI1_PK: createGSIPartitionKey(model, "L"), - GSI1_SK: createGSISortKey(latestStorageEntry) - }) - ); + entityBatch.put({ + ...latestStorageEntry, + ...updatedEntryLevelMetaFields, + PK: partitionKey, + SK: createLatestSortKey(), + TYPE: createLatestType(), + GSI1_PK: createGSIPartitionKey(model, "L"), + GSI1_SK: createGSISortKey(latestStorageEntry) + }); } } try { - await batchWriteAll({ - table: entity.table, - items - }); + await entityBatch.execute(); dataLoaders.clearAll({ model }); @@ -490,23 +474,24 @@ export const createEntriesStorageOperations = ( /** * Then create the batch writes for the DynamoDB, with the updated folderId. */ - const items = records.map(item => { - return entity.putBatch({ - ...item, - location: { - ...item.location, - folderId - } - }); + const entityBatch = createEntityWriteBatch({ + entity, + put: records.map(item => { + return { + ...item, + location: { + ...item.location, + folderId + } + }; + }) }); + /** * And finally write it... */ try { - await batchWriteAll({ - table: entity.table, - items - }); + await entityBatch.execute(); } catch (ex) { throw WebinyError.from(ex, { message: "Could not move records to a new folder.", @@ -518,7 +503,10 @@ export const createEntriesStorageOperations = ( } }; - const moveToBin: CmsEntryStorageOperations["moveToBin"] = async (initialModel, params) => { + const moveToBin: CmsEntryStorageOperations["moveToBin"] = async ( + initialModel, + params + ): Promise => { const { entry, storageEntry: initialStorageEntry } = params; const model = getStorageOperationsModel(initialModel); @@ -537,9 +525,9 @@ export const createEntriesStorageOperations = ( } }; - let records: DbItem[] = []; + let records: Awaited>> = []; try { - records = await queryAll(queryAllParams); + records = await queryAll(queryAllParams); } catch (ex) { throw new WebinyError( ex.message || "Could not load all records.", @@ -550,6 +538,9 @@ export const createEntriesStorageOperations = ( } ); } + if (records.length === 0) { + return; + } const storageEntry = convertToStorageEntry({ model, @@ -564,23 +555,23 @@ export const createEntriesStorageOperations = ( /** * Then create the batch writes for the DynamoDB, with the updated data. */ - const items = records.map(record => { - return entity.putBatch({ - ...record, - ...updatedDeletedMetaFields, - wbyDeleted: storageEntry.wbyDeleted, - location: storageEntry.location, - binOriginalFolderId: storageEntry.binOriginalFolderId - }); + const entityBatch = createEntityWriteBatch({ + entity, + put: records.map(record => { + return { + ...record, + ...updatedDeletedMetaFields, + wbyDeleted: storageEntry.wbyDeleted, + location: storageEntry.location, + binOriginalFolderId: storageEntry.binOriginalFolderId + }; + }) }); /** * And finally write it... */ try { - await batchWriteAll({ - table: entity.table, - items - }); + await entityBatch.execute(); } catch (ex) { throw new WebinyError( ex.message || "Could not move the entry to the bin.", @@ -611,7 +602,7 @@ export const createEntriesStorageOperations = ( } }; - let records: DbItem[] = []; + let records: Awaited>> = []; try { records = await queryAll(queryAllParams); } catch (ex) { @@ -624,18 +615,19 @@ export const createEntriesStorageOperations = ( } ); } - const items = records.map(item => { - return entity.deleteBatch({ - PK: item.PK, - SK: item.SK - }); + + const entityBatch = createEntityWriteBatch({ + entity, + delete: records.map(item => { + return { + PK: item.PK, + SK: item.SK + }; + }) }); try { - await batchWriteAll({ - table: entity.table, - items - }); + await entityBatch.execute(); dataLoaders.clearAll({ model }); @@ -655,7 +647,7 @@ export const createEntriesStorageOperations = ( const restoreFromBin: CmsEntryStorageOperations["restoreFromBin"] = async ( initialModel, params - ) => { + ): Promise => { const { entry, storageEntry: initialStorageEntry } = params; const model = getStorageOperationsModel(initialModel); @@ -674,9 +666,9 @@ export const createEntriesStorageOperations = ( } }; - let records: DbItem[] = []; + let records: Awaited>> = []; try { - records = await queryAll(queryAllParams); + records = await queryAll(queryAllParams); } catch (ex) { throw new WebinyError( ex.message || "Could not load all records.", @@ -687,6 +679,9 @@ export const createEntriesStorageOperations = ( } ); } + if (records.length === 0) { + return initialStorageEntry; + } const storageEntry = convertToStorageEntry({ model, @@ -701,23 +696,24 @@ export const createEntriesStorageOperations = ( isRestoredEntryMetaField ); - const items = records.map(record => { - return entity.putBatch({ - ...record, - ...updatedRestoredMetaFields, - wbyDeleted: storageEntry.wbyDeleted, - location: storageEntry.location, - binOriginalFolderId: storageEntry.binOriginalFolderId - }); + const entityBatch = createEntityWriteBatch({ + entity, + put: records.map(record => { + return { + ...record, + ...updatedRestoredMetaFields, + wbyDeleted: storageEntry.wbyDeleted, + location: storageEntry.location, + binOriginalFolderId: storageEntry.binOriginalFolderId + }; + }) }); + /** * And finally write it... */ try { - await batchWriteAll({ - table: entity.table, - items - }); + await entityBatch.execute(); dataLoaders.clearAll({ model @@ -751,12 +747,15 @@ export const createEntriesStorageOperations = ( tenant: model.tenant }); - const items = [ - entity.deleteBatch({ - PK: partitionKey, - SK: createRevisionSortKey(entry) - }) - ]; + const entityBatch = createEntityWriteBatch({ + entity, + delete: [ + { + PK: partitionKey, + SK: createRevisionSortKey(entry) + } + ] + }); const publishedStorageEntry = await getPublishedRevisionByEntryId(model, entry); @@ -764,12 +763,10 @@ export const createEntriesStorageOperations = ( * If revision we are deleting is the published one as well, we need to delete those records as well. */ if (publishedStorageEntry && entry.id === publishedStorageEntry.id) { - items.push( - entity.deleteBatch({ - PK: partitionKey, - SK: createPublishedSortKey() - }) - ); + entityBatch.delete({ + PK: partitionKey, + SK: createPublishedSortKey() + }); } if (initialLatestStorageEntry) { @@ -777,35 +774,29 @@ export const createEntriesStorageOperations = ( storageEntry: initialLatestStorageEntry, model }); - items.push( - entity.putBatch({ - ...latestStorageEntry, - PK: partitionKey, - SK: createLatestSortKey(), - TYPE: createLatestType(), - GSI1_PK: createGSIPartitionKey(model, "L"), - GSI1_SK: createGSISortKey(latestStorageEntry) - }) - ); + entityBatch.put({ + ...latestStorageEntry, + PK: partitionKey, + SK: createLatestSortKey(), + TYPE: createLatestType(), + GSI1_PK: createGSIPartitionKey(model, "L"), + GSI1_SK: createGSISortKey(latestStorageEntry) + }); // Do an update on the latest revision. We need to update the latest revision's // entry-level meta fields to match the previous revision's entry-level meta fields. - items.push( - entity.putBatch({ - ...latestStorageEntry, - PK: partitionKey, - SK: createRevisionSortKey(initialLatestStorageEntry), - TYPE: createType(), - GSI1_PK: createGSIPartitionKey(model, "A"), - GSI1_SK: createGSISortKey(initialLatestStorageEntry) - }) - ); + entityBatch.put({ + ...latestStorageEntry, + PK: partitionKey, + SK: createRevisionSortKey(initialLatestStorageEntry), + TYPE: createType(), + GSI1_PK: createGSIPartitionKey(model, "A"), + GSI1_SK: createGSISortKey(initialLatestStorageEntry) + }); } try { - await batchWriteAll({ - table: entity.table, - items - }); + entityBatch.execute(); + dataLoaders.clearAll({ model }); @@ -834,57 +825,43 @@ export const createEntriesStorageOperations = ( /** * Then we need to construct the queries for all the revisions and entries. */ - const items: Record[] = []; + + const entityBatch = createEntityWriteBatch({ + entity + }); + for (const id of entries) { - /** - * Latest item. - */ - items.push( - entity.deleteBatch({ - PK: createPartitionKey({ - id, - locale: model.locale, - tenant: model.tenant - }), - SK: "L" - }) - ); - /** - * Published item. - */ - items.push( - entity.deleteBatch({ - PK: createPartitionKey({ - id, - locale: model.locale, - tenant: model.tenant - }), - SK: "P" - }) - ); + const partitionKey = createPartitionKey({ + id, + locale: model.locale, + tenant: model.tenant + }); + entityBatch.delete({ + PK: partitionKey, + SK: "L" + }); + entityBatch.delete({ + PK: partitionKey, + SK: "P" + }); } /** * Exact revisions of all the entries */ for (const revision of revisions) { - items.push( - entity.deleteBatch({ - PK: createPartitionKey({ - id: revision.id, - locale: model.locale, - tenant: model.tenant - }), - SK: createRevisionSortKey({ - version: revision.version - }) + entityBatch.delete({ + PK: createPartitionKey({ + id: revision.id, + locale: model.locale, + tenant: model.tenant + }), + SK: createRevisionSortKey({ + version: revision.version }) - ); + }); } - await batchWriteAll({ - table: entity.table, - items - }); + await entityBatch.execute(); }; const getLatestRevisionByEntryId: CmsEntryStorageOperations["getLatestRevisionByEntryId"] = @@ -1239,24 +1216,27 @@ export const createEntriesStorageOperations = ( }); // 1. Update REV# and P records with new data. - const items = [ - entity.putBatch({ - ...storageEntry, - PK: partitionKey, - SK: createRevisionSortKey(entry), - TYPE: createType(), - GSI1_PK: createGSIPartitionKey(model, "A"), - GSI1_SK: createGSISortKey(entry) - }), - entity.putBatch({ - ...storageEntry, - PK: partitionKey, - SK: createPublishedSortKey(), - TYPE: createPublishedType(), - GSI1_PK: createGSIPartitionKey(model, "P"), - GSI1_SK: createGSISortKey(entry) - }) - ]; + const entityBatch = createEntityWriteBatch({ + entity, + put: [ + { + ...storageEntry, + PK: partitionKey, + SK: createRevisionSortKey(entry), + TYPE: createType(), + GSI1_PK: createGSIPartitionKey(model, "A"), + GSI1_SK: createGSISortKey(entry) + }, + { + ...storageEntry, + PK: partitionKey, + SK: createPublishedSortKey(), + TYPE: createPublishedType(), + GSI1_PK: createGSIPartitionKey(model, "P"), + GSI1_SK: createGSISortKey(entry) + } + ] + }); // 2. When it comes to the latest record, we need to perform a couple of different // updates, based on whether the entry being published is the latest revision or not. @@ -1265,16 +1245,14 @@ export const createEntriesStorageOperations = ( if (publishingLatestRevision) { // 2.1 If we're publishing the latest revision, we first need to update the L record. - items.push( - entity.putBatch({ - ...storageEntry, - PK: partitionKey, - SK: createLatestSortKey(), - TYPE: createLatestType(), - GSI1_PK: createGSIPartitionKey(model, "L"), - GSI1_SK: createGSISortKey(entry) - }) - ); + entityBatch.put({ + ...storageEntry, + PK: partitionKey, + SK: createLatestSortKey(), + TYPE: createLatestType(), + GSI1_PK: createGSIPartitionKey(model, "L"), + GSI1_SK: createGSISortKey(entry) + }); // 2.2 Additionally, if we have a previously published entry, we need to mark it as unpublished. if (publishedRevisionId && publishedRevisionId !== entry.id) { @@ -1283,17 +1261,15 @@ export const createEntriesStorageOperations = ( model }); - items.push( - entity.putBatch({ - ...publishedStorageEntry, - PK: partitionKey, - SK: createRevisionSortKey(publishedStorageEntry), - TYPE: createType(), - status: CONTENT_ENTRY_STATUS.UNPUBLISHED, - GSI1_PK: createGSIPartitionKey(model, "A"), - GSI1_SK: createGSISortKey(publishedStorageEntry) - }) - ); + entityBatch.put({ + ...publishedStorageEntry, + PK: partitionKey, + SK: createRevisionSortKey(publishedStorageEntry), + TYPE: createType(), + status: CONTENT_ENTRY_STATUS.UNPUBLISHED, + GSI1_PK: createGSIPartitionKey(model, "A"), + GSI1_SK: createGSISortKey(publishedStorageEntry) + }); } } else { // 2.3 If the published revision is not the latest one, the situation is a bit @@ -1322,28 +1298,24 @@ export const createEntriesStorageOperations = ( status: latestRevisionStatus }; - items.push( - entity.putBatch({ - ...latestStorageEntryFields, - PK: partitionKey, - SK: createLatestSortKey(), - TYPE: createLatestType(), - GSI1_PK: createGSIPartitionKey(model, "L"), - GSI1_SK: createGSISortKey(latestStorageEntry) - }) - ); + entityBatch.put({ + ...latestStorageEntryFields, + PK: partitionKey, + SK: createLatestSortKey(), + TYPE: createLatestType(), + GSI1_PK: createGSIPartitionKey(model, "L"), + GSI1_SK: createGSISortKey(latestStorageEntry) + }); // 2.3.2 Update REV# record. - items.push( - entity.putBatch({ - ...latestStorageEntryFields, - PK: partitionKey, - SK: createRevisionSortKey(latestStorageEntry), - TYPE: createType(), - GSI1_PK: createGSIPartitionKey(model, "A"), - GSI1_SK: createGSISortKey(latestStorageEntry) - }) - ); + entityBatch.put({ + ...latestStorageEntryFields, + PK: partitionKey, + SK: createRevisionSortKey(latestStorageEntry), + TYPE: createType(), + GSI1_PK: createGSIPartitionKey(model, "A"), + GSI1_SK: createGSISortKey(latestStorageEntry) + }); // 2.3.3 Finally, if we got a published entry, but it wasn't the latest one, we need to take // an extra step and mark it as unpublished. @@ -1355,25 +1327,20 @@ export const createEntriesStorageOperations = ( model }); - items.push( - entity.putBatch({ - ...publishedStorageEntry, - PK: partitionKey, - SK: createRevisionSortKey(publishedStorageEntry), - TYPE: createType(), - status: CONTENT_ENTRY_STATUS.UNPUBLISHED, - GSI1_PK: createGSIPartitionKey(model, "A"), - GSI1_SK: createGSISortKey(publishedStorageEntry) - }) - ); + entityBatch.put({ + ...publishedStorageEntry, + PK: partitionKey, + SK: createRevisionSortKey(publishedStorageEntry), + TYPE: createType(), + status: CONTENT_ENTRY_STATUS.UNPUBLISHED, + GSI1_PK: createGSIPartitionKey(model, "A"), + GSI1_SK: createGSISortKey(publishedStorageEntry) + }); } } try { - await batchWriteAll({ - table: entity.table, - items - }); + await entityBatch.execute(); dataLoaders.clearAll({ model }); @@ -1411,20 +1378,25 @@ export const createEntriesStorageOperations = ( * - update current entry revision with new data * - update the latest entry status - if entry being unpublished is latest */ - const items = [ - entity.deleteBatch({ - PK: partitionKey, - SK: createPublishedSortKey() - }), - entity.putBatch({ - ...storageEntry, - PK: partitionKey, - SK: createRevisionSortKey(entry), - TYPE: createType(), - GSI1_PK: createGSIPartitionKey(model, "A"), - GSI1_SK: createGSISortKey(entry) - }) - ]; + const entityBatch = createEntityWriteBatch({ + entity, + delete: [ + { + PK: partitionKey, + SK: createPublishedSortKey() + } + ], + put: [ + { + ...storageEntry, + PK: partitionKey, + SK: createRevisionSortKey(entry), + TYPE: createType(), + GSI1_PK: createGSIPartitionKey(model, "A"), + GSI1_SK: createGSISortKey(entry) + } + ] + }); /** * We need the latest entry to see if something needs to be updated alongside the unpublishing one. @@ -1434,16 +1406,14 @@ export const createEntriesStorageOperations = ( if (initialLatestStorageEntry) { const unpublishingLatestRevision = entry.id === initialLatestStorageEntry.id; if (unpublishingLatestRevision) { - items.push( - entity.putBatch({ - ...storageEntry, - PK: partitionKey, - SK: createLatestSortKey(), - TYPE: createLatestType(), - GSI1_PK: createGSIPartitionKey(model, "L"), - GSI1_SK: createGSISortKey(entry) - }) - ); + entityBatch.put({ + ...storageEntry, + PK: partitionKey, + SK: createLatestSortKey(), + TYPE: createLatestType(), + GSI1_PK: createGSIPartitionKey(model, "L"), + GSI1_SK: createGSISortKey(entry) + }); } else { const latestStorageEntry = convertToStorageEntry({ storageEntry: initialLatestStorageEntry, @@ -1458,38 +1428,31 @@ export const createEntriesStorageOperations = ( ); // 1. Update actual revision record. - items.push( - entity.putBatch({ - ...latestStorageEntry, - ...updatedEntryLevelMetaFields, - PK: partitionKey, - SK: createRevisionSortKey(latestStorageEntry), - TYPE: createType(), - GSI1_PK: createGSIPartitionKey(model, "A"), - GSI1_SK: createGSISortKey(latestStorageEntry) - }) - ); + entityBatch.put({ + ...latestStorageEntry, + ...updatedEntryLevelMetaFields, + PK: partitionKey, + SK: createRevisionSortKey(latestStorageEntry), + TYPE: createType(), + GSI1_PK: createGSIPartitionKey(model, "A"), + GSI1_SK: createGSISortKey(latestStorageEntry) + }); // 2. Update latest record. - items.push( - entity.putBatch({ - ...latestStorageEntry, - ...updatedEntryLevelMetaFields, - PK: partitionKey, - SK: createLatestSortKey(), - TYPE: createLatestType(), - GSI1_PK: createGSIPartitionKey(model, "L"), - GSI1_SK: createGSISortKey(latestStorageEntry) - }) - ); + entityBatch.put({ + ...latestStorageEntry, + ...updatedEntryLevelMetaFields, + PK: partitionKey, + SK: createLatestSortKey(), + TYPE: createLatestType(), + GSI1_PK: createGSIPartitionKey(model, "L"), + GSI1_SK: createGSISortKey(latestStorageEntry) + }); } } try { - await batchWriteAll({ - table: entity.table, - items - }); + await entityBatch.execute(); dataLoaders.clearAll({ model }); diff --git a/packages/api-headless-cms/src/modelManager/DefaultCmsModelManager.ts b/packages/api-headless-cms/src/modelManager/DefaultCmsModelManager.ts index 8a7ac3aafc5..c826de613c3 100644 --- a/packages/api-headless-cms/src/modelManager/DefaultCmsModelManager.ts +++ b/packages/api-headless-cms/src/modelManager/DefaultCmsModelManager.ts @@ -1,12 +1,13 @@ -import { - CmsModelManager, - CmsModel, +import type { CmsContext, + CmsDeleteEntryOptions, CmsEntryListParams, + CmsModel, + CmsModelManager, CreateCmsEntryInput, + CreateCmsEntryOptionsInput, UpdateCmsEntryInput, - UpdateCmsEntryOptionsInput, - CreateCmsEntryOptionsInput + UpdateCmsEntryOptionsInput } from "~/types"; import { parseIdentifier } from "@webiny/utils"; @@ -23,13 +24,13 @@ export class DefaultCmsModelManager implements CmsModelManager { return this._context.cms.createEntry(this.model, data, options); } - public async delete(id: string) { + public async delete(id: string, options?: CmsDeleteEntryOptions) { const { version } = parseIdentifier(id); if (version) { return this._context.cms.deleteEntryRevision(this.model, id); } - return this._context.cms.deleteEntry(this.model, id); + return this._context.cms.deleteEntry(this.model, id, options); } public async get(id: string) { diff --git a/packages/api-headless-cms/src/types/types.ts b/packages/api-headless-cms/src/types/types.ts index 7736379b85f..950033eafa9 100644 --- a/packages/api-headless-cms/src/types/types.ts +++ b/packages/api-headless-cms/src/types/types.ts @@ -733,7 +733,7 @@ export interface CmsModelManager { /** * Delete an entry. */ - delete(id: string): Promise; + delete(id: string, options?: CmsDeleteEntryOptions): Promise; } export type ICmsEntryManager = CmsModelManager; diff --git a/packages/api-i18n-ddb/src/definitions/localeEntity.ts b/packages/api-i18n-ddb/src/definitions/localeEntity.ts index 4513054a2d7..bdbed1a9747 100644 --- a/packages/api-i18n-ddb/src/definitions/localeEntity.ts +++ b/packages/api-i18n-ddb/src/definitions/localeEntity.ts @@ -1,15 +1,19 @@ -import { Entity, Table } from "@webiny/db-dynamodb/toolbox"; -import { I18NContext } from "@webiny/api-i18n/types"; -import { getExtraAttributes } from "@webiny/db-dynamodb/utils/attributes"; +import type { Table } from "@webiny/db-dynamodb/toolbox"; +import type { I18NContext } from "@webiny/api-i18n/types"; +import { getExtraAttributesFromPlugins } from "@webiny/db-dynamodb/utils/attributes"; +import type { IEntity } from "@webiny/db-dynamodb"; +import { createEntity } from "@webiny/db-dynamodb"; -export default (params: { +export interface ILocaleEntityParams { context: I18NContext; table: Table; -}): Entity => { +} + +export default (params: ILocaleEntityParams): IEntity => { const { context, table } = params; const entityName = "I18NLocale"; - const attributes = getExtraAttributes(context, entityName); - return new Entity({ + const attributes = getExtraAttributesFromPlugins(context.plugins, entityName); + return createEntity({ name: entityName, table, attributes: { diff --git a/packages/api-i18n-ddb/src/definitions/systemEntity.ts b/packages/api-i18n-ddb/src/definitions/systemEntity.ts index a2e80306a98..44cf24d3800 100644 --- a/packages/api-i18n-ddb/src/definitions/systemEntity.ts +++ b/packages/api-i18n-ddb/src/definitions/systemEntity.ts @@ -1,15 +1,17 @@ -import { Entity, Table } from "@webiny/db-dynamodb/toolbox"; -import { I18NContext } from "@webiny/api-i18n/types"; -import { getExtraAttributes } from "@webiny/db-dynamodb/utils/attributes"; +import type { Table } from "@webiny/db-dynamodb/toolbox"; +import type { I18NContext } from "@webiny/api-i18n/types"; +import { getExtraAttributesFromPlugins } from "@webiny/db-dynamodb/utils/attributes"; +import type { IEntity } from "@webiny/db-dynamodb"; +import { createEntity } from "@webiny/db-dynamodb"; export default (params: { - context: I18NContext; + context: Pick; table: Table; -}): Entity => { +}): IEntity => { const { context, table } = params; const entityName = "I18NSystem"; - const attributes = getExtraAttributes(context, entityName); - return new Entity({ + const attributes = getExtraAttributesFromPlugins(context.plugins, entityName); + return createEntity({ name: entityName, table, attributes: { diff --git a/packages/api-i18n-ddb/src/operations/locales/LocalesStorageOperations.ts b/packages/api-i18n-ddb/src/operations/locales/LocalesStorageOperations.ts index 96abf44272f..18e344863d6 100644 --- a/packages/api-i18n-ddb/src/operations/locales/LocalesStorageOperations.ts +++ b/packages/api-i18n-ddb/src/operations/locales/LocalesStorageOperations.ts @@ -1,4 +1,4 @@ -import { +import type { I18NContext, I18NLocaleData, I18NLocalesStorageOperations, @@ -11,17 +11,13 @@ import { I18NLocalesStorageOperationsUpdateDefaultParams, I18NLocalesStorageOperationsUpdateParams } from "@webiny/api-i18n/types"; -import { Entity, Table } from "@webiny/db-dynamodb/toolbox"; +import type { Table } from "@webiny/db-dynamodb/toolbox"; import WebinyError from "@webiny/error"; import defineTable from "~/definitions/table"; import defineLocaleEntity from "~/definitions/localeEntity"; -import { queryAll, QueryAllParams } from "@webiny/db-dynamodb/utils/query"; -import { filterItems } from "@webiny/db-dynamodb/utils/filter"; -import { sortItems } from "@webiny/db-dynamodb/utils/sort"; -import { createListResponse } from "@webiny/db-dynamodb/utils/listResponse"; -import { cleanupItems } from "@webiny/db-dynamodb/utils/cleanup"; +import type { IEntity, IEntityQueryAllParams } from "@webiny/db-dynamodb"; +import { createListResponse, filterItems, sortItems } from "@webiny/db-dynamodb"; import { LocaleDynamoDbFieldPlugin } from "~/plugins/LocaleDynamoDbFieldPlugin"; -import { deleteItem, getClean, put } from "@webiny/db-dynamodb"; interface ConstructorParams { context: I18NContext; @@ -32,7 +28,7 @@ const DEFAULT_SORT_KEY = "default"; export class LocalesStorageOperations implements I18NLocalesStorageOperations { private readonly context: I18NContext; private readonly table: Table; - private readonly entity: Entity; + private readonly entity: IEntity; public constructor({ context }: ConstructorParams) { this.context = context; @@ -48,12 +44,9 @@ export class LocalesStorageOperations implements I18NLocalesStorageOperations { public async getDefault(params: I18NLocalesStorageOperationsGetDefaultParams) { try { - return await getClean({ - entity: this.entity, - keys: { - PK: this.createDefaultPartitionKey(params), - SK: DEFAULT_SORT_KEY - } + return this.entity.getClean({ + PK: this.createDefaultPartitionKey(params), + SK: DEFAULT_SORT_KEY }); } catch (ex) { throw new WebinyError( @@ -65,12 +58,9 @@ export class LocalesStorageOperations implements I18NLocalesStorageOperations { public async get(params: I18NLocalesStorageOperationsGetParams) { try { - return await getClean({ - entity: this.entity, - keys: { - PK: this.createPartitionKey(params), - SK: params.code - } + return this.entity.getClean({ + PK: this.createPartitionKey(params), + SK: params.code }); } catch (ex) { throw new WebinyError( @@ -89,12 +79,9 @@ export class LocalesStorageOperations implements I18NLocalesStorageOperations { }; try { - await put({ - entity: this.entity, - item: { - ...locale, - ...keys - } + await this.entity.put({ + ...locale, + ...keys }); return locale; } catch (ex) { @@ -117,12 +104,9 @@ export class LocalesStorageOperations implements I18NLocalesStorageOperations { SK: this.getSortKey(locale) }; try { - await put({ - entity: this.entity, - item: { - ...locale, - ...keys - } + await this.entity.put({ + ...locale, + ...keys }); return locale; } catch (ex) { @@ -144,23 +128,24 @@ export class LocalesStorageOperations implements I18NLocalesStorageOperations { /** * Set the locale as the default one. */ - const batch = [ - { - ...locale, - PK: this.createPartitionKey(locale), - SK: this.getSortKey(locale) - }, - { - ...locale, - PK: this.createDefaultPartitionKey(locale), - SK: DEFAULT_SORT_KEY - } - ]; + const entityBatch = this.entity.createEntityWriter(); + + entityBatch.put({ + ...locale, + PK: this.createPartitionKey(locale), + SK: this.getSortKey(locale) + }); + entityBatch.put({ + ...locale, + PK: this.createDefaultPartitionKey(locale), + SK: DEFAULT_SORT_KEY + }); + /** * Set the previous locale not to be default in its data. */ if (previous) { - batch.push({ + entityBatch.put({ ...previous, default: false, PK: this.createPartitionKey(locale), @@ -168,8 +153,10 @@ export class LocalesStorageOperations implements I18NLocalesStorageOperations { }); } + const batch = entityBatch.items; + try { - await this.table.batchWrite(batch.map(item => this.entity.putBatch(item))); + await entityBatch.execute(); return locale; } catch (ex) { throw new WebinyError( @@ -190,10 +177,7 @@ export class LocalesStorageOperations implements I18NLocalesStorageOperations { SK: this.getSortKey(locale) }; try { - await deleteItem({ - entity: this.entity, - keys - }); + await this.entity.delete(keys); } catch (ex) { throw new WebinyError( ex.message || "Cannot delete I18N locale.", @@ -220,7 +204,7 @@ export class LocalesStorageOperations implements I18NLocalesStorageOperations { let results: I18NLocaleData[] = []; try { - results = await queryAll(queryAllParams); + results = await this.entity.queryAll(queryAllParams); } catch (ex) { throw new WebinyError( ex.message || "Cannot list I18N locales.", @@ -256,7 +240,7 @@ export class LocalesStorageOperations implements I18NLocalesStorageOperations { * Use the common db-dynamodb method to create the required response. */ return createListResponse({ - items: cleanupItems(this.entity, sortedFiles), + items: sortedFiles, after, totalCount, limit @@ -282,7 +266,7 @@ export class LocalesStorageOperations implements I18NLocalesStorageOperations { private createQueryAllParamsOptions( params: I18NLocalesStorageOperationsListParams - ): QueryAllParams { + ): IEntityQueryAllParams { const { where } = params; const tenant = where.tenant; @@ -295,7 +279,6 @@ export class LocalesStorageOperations implements I18NLocalesStorageOperations { delete where.default; } return { - entity: this.entity, partitionKey, options: {} }; diff --git a/packages/api-i18n-ddb/src/operations/system/SystemStorageOperations.ts b/packages/api-i18n-ddb/src/operations/system/SystemStorageOperations.ts index e333f8eef59..b840cf0c0e3 100644 --- a/packages/api-i18n-ddb/src/operations/system/SystemStorageOperations.ts +++ b/packages/api-i18n-ddb/src/operations/system/SystemStorageOperations.ts @@ -1,15 +1,14 @@ -import { +import type { I18NContext, I18NSystem, I18NSystemStorageOperations, I18NSystemStorageOperationsCreate, I18NSystemStorageOperationsUpdate } from "@webiny/api-i18n/types"; -import { Entity } from "@webiny/db-dynamodb/toolbox"; import WebinyError from "@webiny/error"; import defineSystemEntity from "~/definitions/systemEntity"; import defineTable from "~/definitions/table"; -import { getClean, put } from "@webiny/db-dynamodb"; +import type { IEntity } from "@webiny/db-dynamodb"; interface ConstructorParams { context: I18NContext; @@ -19,7 +18,7 @@ const SORT_KEY = "I18N"; export class SystemStorageOperations implements I18NSystemStorageOperations { private readonly _context: I18NContext; - private readonly _entity: Entity; + private readonly entity: IEntity; private get partitionKey(): string { const tenant = this._context.tenancy.getCurrentTenant(); @@ -35,7 +34,7 @@ export class SystemStorageOperations implements I18NSystemStorageOperations { context }); - this._entity = defineSystemEntity({ + this.entity = defineSystemEntity({ context, table }); @@ -48,10 +47,7 @@ export class SystemStorageOperations implements I18NSystemStorageOperations { }; try { - return await getClean({ - entity: this._entity, - keys - }); + return await this.entity.getClean(keys); } catch (ex) { throw new WebinyError( "Could not load system data from the database.", @@ -67,12 +63,9 @@ export class SystemStorageOperations implements I18NSystemStorageOperations { SK: SORT_KEY }; try { - await put({ - entity: this._entity, - item: { - ...system, - ...keys - } + await this.entity.put({ + ...system, + ...keys }); return system; } catch (ex) { @@ -90,12 +83,9 @@ export class SystemStorageOperations implements I18NSystemStorageOperations { SK: SORT_KEY }; try { - await put({ - entity: this._entity, - item: { - ...system, - ...keys - } + await this.entity.put({ + ...system, + ...keys }); return system; } catch (ex) { diff --git a/packages/api-i18n/src/graphql/context.ts b/packages/api-i18n/src/graphql/context.ts index 4aaa948073a..9293ea3af4b 100644 --- a/packages/api-i18n/src/graphql/context.ts +++ b/packages/api-i18n/src/graphql/context.ts @@ -239,6 +239,25 @@ const createBaseContextPlugin = () => { return results; }; + const withLocale: I18NContextObject["withLocale"] = async (locale, cb) => { + const initialLocale = getDefaultLocale(); + if (!initialLocale) { + return; + } + + setContentLocale(locale); + setCurrentLocale("default", locale); + + try { + // We have to await the callback, because, in case it's an async function, + // the `finally` block would get executed before the callback finishes. + return await cb(); + } finally { + setContentLocale(initialLocale); + setCurrentLocale("default", initialLocale); + } + }; + context.i18n = { ...context.i18n, getDefaultLocale, @@ -252,7 +271,8 @@ const createBaseContextPlugin = () => { reloadLocales, hasI18NContentPermission: () => hasI18NContentPermission(context), checkI18NContentPermission, - withEachLocale + withEachLocale, + withLocale }; }); }; diff --git a/packages/api-i18n/src/types.ts b/packages/api-i18n/src/types.ts index f0b83d43d04..407d6aea0bb 100644 --- a/packages/api-i18n/src/types.ts +++ b/packages/api-i18n/src/types.ts @@ -44,6 +44,10 @@ export interface I18NContextObject { locales: I18NLocale[], cb: (locale: I18NLocale) => Promise ) => Promise; + withLocale: ( + locale: I18NLocale, + cb: () => Promise + ) => Promise; } export interface SystemInstallParams { diff --git a/packages/api-page-builder-so-ddb-es/src/definitions/pageElasticsearchEntity.ts b/packages/api-page-builder-so-ddb-es/src/definitions/pageElasticsearchEntity.ts index 0d0bf837a21..5d21fc487f1 100644 --- a/packages/api-page-builder-so-ddb-es/src/definitions/pageElasticsearchEntity.ts +++ b/packages/api-page-builder-so-ddb-es/src/definitions/pageElasticsearchEntity.ts @@ -25,6 +25,9 @@ export const createPageElasticsearchEntity = (params: Params): Entity => { data: { type: "map" }, + TYPE: { + type: "string" + }, ...(attributes || {}) } }); diff --git a/packages/api-page-builder-so-ddb-es/src/operations/blockCategory/dataLoader.ts b/packages/api-page-builder-so-ddb-es/src/operations/blockCategory/dataLoader.ts index 5a158ff5aad..1fd149702d9 100644 --- a/packages/api-page-builder-so-ddb-es/src/operations/blockCategory/dataLoader.ts +++ b/packages/api-page-builder-so-ddb-es/src/operations/blockCategory/dataLoader.ts @@ -1,5 +1,5 @@ import DataLoader from "dataloader"; -import { batchReadAll } from "@webiny/db-dynamodb/utils/batchRead"; +import { batchReadAll } from "@webiny/db-dynamodb"; import { BlockCategory } from "@webiny/api-page-builder/types"; import { cleanupItem } from "@webiny/db-dynamodb/utils/cleanup"; import { Entity } from "@webiny/db-dynamodb/toolbox"; diff --git a/packages/api-page-builder-so-ddb-es/src/operations/category/dataLoader.ts b/packages/api-page-builder-so-ddb-es/src/operations/category/dataLoader.ts index 50a0d6a8358..913ee4fc3b0 100644 --- a/packages/api-page-builder-so-ddb-es/src/operations/category/dataLoader.ts +++ b/packages/api-page-builder-so-ddb-es/src/operations/category/dataLoader.ts @@ -1,5 +1,5 @@ import DataLoader from "dataloader"; -import { batchReadAll } from "@webiny/db-dynamodb/utils/batchRead"; +import { batchReadAll } from "@webiny/db-dynamodb"; import { Category } from "@webiny/api-page-builder/types"; import { cleanupItem } from "@webiny/db-dynamodb/utils/cleanup"; import { Entity } from "@webiny/db-dynamodb/toolbox"; diff --git a/packages/api-page-builder-so-ddb-es/src/operations/pageBlock/dataLoader.ts b/packages/api-page-builder-so-ddb-es/src/operations/pageBlock/dataLoader.ts index c9505ba7858..b3614d766f6 100644 --- a/packages/api-page-builder-so-ddb-es/src/operations/pageBlock/dataLoader.ts +++ b/packages/api-page-builder-so-ddb-es/src/operations/pageBlock/dataLoader.ts @@ -1,5 +1,5 @@ import DataLoader from "dataloader"; -import { batchReadAll } from "@webiny/db-dynamodb/utils/batchRead"; +import { batchReadAll } from "@webiny/db-dynamodb"; import { PageBlock } from "@webiny/api-page-builder/types"; import { cleanupItem } from "@webiny/db-dynamodb/utils/cleanup"; import { Entity } from "@webiny/db-dynamodb/toolbox"; diff --git a/packages/api-page-builder-so-ddb-es/src/operations/pageTemplate/dataLoader.ts b/packages/api-page-builder-so-ddb-es/src/operations/pageTemplate/dataLoader.ts index 32437cbacc9..248b6ffe129 100644 --- a/packages/api-page-builder-so-ddb-es/src/operations/pageTemplate/dataLoader.ts +++ b/packages/api-page-builder-so-ddb-es/src/operations/pageTemplate/dataLoader.ts @@ -1,5 +1,5 @@ import DataLoader from "dataloader"; -import { batchReadAll } from "@webiny/db-dynamodb/utils/batchRead"; +import { batchReadAll } from "@webiny/db-dynamodb"; import { PageTemplate } from "@webiny/api-page-builder/types"; import { Entity } from "@webiny/db-dynamodb/toolbox"; import { createPrimaryPK } from "./keys"; diff --git a/packages/api-page-builder-so-ddb-es/src/operations/pages/index.ts b/packages/api-page-builder-so-ddb-es/src/operations/pages/index.ts index 2bde41b247c..b523ec37d03 100644 --- a/packages/api-page-builder-so-ddb-es/src/operations/pages/index.ts +++ b/packages/api-page-builder-so-ddb-es/src/operations/pages/index.ts @@ -1,4 +1,4 @@ -import { +import type { Page, PageStorageOperations, PageStorageOperationsCreateFromParams, @@ -14,12 +14,12 @@ import { PageStorageOperationsUnpublishParams, PageStorageOperationsUpdateParams } from "@webiny/api-page-builder/types"; -import { Entity } from "@webiny/db-dynamodb/toolbox"; +import type { Entity } from "@webiny/db-dynamodb/toolbox"; import omit from "lodash/omit"; import WebinyError from "@webiny/error"; import { cleanupItem } from "@webiny/db-dynamodb/utils/cleanup"; -import { Client } from "@elastic/elasticsearch"; -import { +import type { Client } from "@elastic/elasticsearch"; +import type { ElasticsearchBoolQueryConfig, ElasticsearchSearchResponse } from "@webiny/api-elasticsearch/types"; @@ -28,11 +28,18 @@ import { createLimit, encodeCursor } from "@webiny/api-elasticsearch"; import { createElasticsearchQueryBody } from "./elasticsearchQueryBody"; import { SearchLatestPagesPlugin } from "~/plugins/definitions/SearchLatestPagesPlugin"; import { SearchPublishedPagesPlugin } from "~/plugins/definitions/SearchPublishedPagesPlugin"; -import { DbItem, queryAll, QueryAllParams, queryOne } from "@webiny/db-dynamodb/utils/query"; +import { + createEntityWriteBatch, + getClean, + put, + queryAll, + QueryAllParams, + queryOne, + sortItems +} from "@webiny/db-dynamodb"; import { SearchPagesPlugin } from "~/plugins/definitions/SearchPagesPlugin"; -import { batchWriteAll } from "@webiny/db-dynamodb/utils/batchWrite"; import { getESLatestPageData, getESPublishedPageData } from "./helpers"; -import { PluginsContainer } from "@webiny/plugins"; +import type { PluginsContainer } from "@webiny/plugins"; import { createBasicType, createLatestSortKey, @@ -45,9 +52,7 @@ import { createPublishedType, createSortKey } from "./keys"; -import { sortItems } from "@webiny/db-dynamodb/utils/sort"; import { PageDynamoDbElasticsearchFieldPlugin } from "~/plugins/definitions/PageDynamoDbElasticsearchFieldPlugin"; -import { getClean, put } from "@webiny/db-dynamodb"; import { shouldIgnoreEsResponseError } from "~/operations/pages/shouldIgnoreEsResponseError"; import { logIgnoredEsResponseError } from "~/operations/pages/logIgnoredEsResponseError"; @@ -81,24 +86,26 @@ export const createPageStorageOperations = ( SK: createLatestSortKey() }; - const items = [ - entity.putBatch({ - ...page, - ...versionKeys, - TYPE: createBasicType() - }), - entity.putBatch({ - ...page, - ...latestKeys, - TYPE: createLatestType() - }) - ]; + const entityBatch = createEntityWriteBatch({ + entity, + put: [ + { + ...page, + ...versionKeys, + TYPE: createBasicType() + }, + { + ...page, + ...latestKeys, + TYPE: createLatestType() + } + ] + }); + const esData = getESLatestPageData(plugins, page, input); try { - await batchWriteAll({ - table: entity.table, - items: items - }); + await entityBatch.execute(); + await put({ entity: esEntity, item: { @@ -133,26 +140,26 @@ export const createPageStorageOperations = ( SK: createLatestSortKey() }; - const items = [ - entity.putBatch({ - ...page, - TYPE: createBasicType(), - ...versionKeys - }), - entity.putBatch({ - ...page, - TYPE: createLatestType(), - ...latestKeys - }) - ]; + const entityBatch = createEntityWriteBatch({ + entity, + put: [ + { + ...page, + TYPE: createBasicType(), + ...versionKeys + }, + { + ...page, + TYPE: createLatestType(), + ...latestKeys + } + ] + }); const esData = getESLatestPageData(plugins, page); try { - await batchWriteAll({ - table: entity.table, - items - }); + await entityBatch.execute(); await put({ entity: esEntity, @@ -195,13 +202,16 @@ export const createPageStorageOperations = ( keys: latestKeys }); - const items = [ - entity.putBatch({ - ...page, - TYPE: createBasicType(), - ...keys - }) - ]; + const entityBatch = createEntityWriteBatch({ + entity, + put: [ + { + ...page, + TYPE: createBasicType(), + ...keys + } + ] + }); const esData = getESLatestPageData(plugins, page, input); @@ -209,22 +219,17 @@ export const createPageStorageOperations = ( /** * We also update the regular record. */ - items.push( - entity.putBatch({ - ...page, - TYPE: createLatestType(), - ...latestKeys - }) - ); + entityBatch.put({ + ...page, + TYPE: createLatestType(), + ...latestKeys + }); } /** * Unfortunately we cannot push regular and es record in the batch write because they are two separate tables. */ try { - await batchWriteAll({ - table: entity.table, - items - }); + await entityBatch.execute(); await put({ entity: esEntity, @@ -265,32 +270,35 @@ export const createPageStorageOperations = ( const partitionKey = createPartitionKey(page); - const items = [ - entity.deleteBatch({ - PK: partitionKey, - SK: createSortKey(page) - }) - ]; - const esItems = []; - if (publishedPage && publishedPage.id === page.id) { - items.push( - entity.deleteBatch({ - PK: partitionKey, - SK: createPublishedSortKey() - }) - ); - items.push( - entity.deleteBatch({ - PK: createPathPartitionKey(page), - SK: createPathSortKey(page) - }) - ); - esItems.push( - esEntity.deleteBatch({ + const entityBatch = createEntityWriteBatch({ + entity, + delete: [ + { PK: partitionKey, - SK: createPublishedSortKey() - }) - ); + SK: createSortKey(page) + } + ] + }); + + const elasticsearchEntityBatch = createEntityWriteBatch({ + entity: esEntity + }); + + if (publishedPage && publishedPage.id === page.id) { + entityBatch.delete({ + PK: partitionKey, + SK: createPublishedSortKey() + }); + + entityBatch.delete({ + PK: createPathPartitionKey(page), + SK: createPathSortKey(page) + }); + + elasticsearchEntityBatch.delete({ + PK: partitionKey, + SK: createPublishedSortKey() + }); } let previousLatestPage: Page | null = null; if (latestPage && latestPage.id === page.id) { @@ -303,44 +311,34 @@ export const createPageStorageOperations = ( } }); if (previousLatestRecord) { - items.push( - entity.putBatch({ - ...previousLatestRecord, - TYPE: createLatestType(), - PK: partitionKey, - SK: createLatestSortKey() - }) - ); - esItems.push( - esEntity.putBatch({ - PK: partitionKey, - SK: createLatestSortKey(), - index: configurations.es(page).index, - data: getESLatestPageData(plugins, previousLatestRecord) - }) - ); + entityBatch.put({ + ...previousLatestRecord, + TYPE: createLatestType(), + PK: partitionKey, + SK: createLatestSortKey() + }); + + elasticsearchEntityBatch.put({ + PK: partitionKey, + SK: createLatestSortKey(), + index: configurations.es(page).index, + data: getESLatestPageData(plugins, previousLatestRecord) + }); + previousLatestPage = cleanupItem(entity, previousLatestRecord); } } try { - await batchWriteAll({ - table: entity.table, - items - }); + await entityBatch.execute(); } catch (ex) { throw new WebinyError( ex.message || "Could not batch write all the page records.", ex.code || "BATCH_WRITE_RECORDS_ERROR" ); } - if (esItems.length === 0) { - return [page, previousLatestPage]; - } + try { - await batchWriteAll({ - table: entity.table, - items: esItems - }); + await elasticsearchEntityBatch.execute(); } catch (ex) { throw new WebinyError( ex.message || "Could not batch write all the page Elasticsearch records.", @@ -370,7 +368,7 @@ export const createPageStorageOperations = ( gte: " " } }; - let revisions: DbItem[]; + let revisions: Awaited>>; try { revisions = await queryAll(queryAllParams); } catch (ex) { @@ -387,48 +385,45 @@ export const createPageStorageOperations = ( * We need to go through all possible entries and delete them. * Also, delete the published entry path record. */ - const items = []; + + const entityBatch = createEntityWriteBatch({ + entity + }); + const elasticsearchEntityBatch = createEntityWriteBatch({ + entity: esEntity + }); + let publishedPathEntryDeleted = false; for (const revision of revisions) { if (revision.status === "published" && !publishedPathEntryDeleted) { publishedPathEntryDeleted = true; - items.push( - entity.deleteBatch({ - PK: createPathPartitionKey(page), - SK: revision.path - }) - ); + entityBatch.delete({ + PK: createPathPartitionKey(page), + SK: revision.path + }); } - items.push( - entity.deleteBatch({ - PK: revision.PK, - SK: revision.SK - }) - ); + entityBatch.delete({ + PK: revision.PK, + SK: revision.SK + }); } - const esItems = [ - esEntity.deleteBatch({ - PK: partitionKey, - SK: createLatestSortKey() - }) - ]; + elasticsearchEntityBatch.delete({ + PK: partitionKey, + SK: createLatestSortKey() + }); + /** * Delete published record if it is published. */ if (publishedPathEntryDeleted) { - esItems.push( - esEntity.deleteBatch({ - PK: partitionKey, - SK: createPublishedSortKey() - }) - ); + elasticsearchEntityBatch.delete({ + PK: partitionKey, + SK: createPublishedSortKey() + }); } try { - await batchWriteAll({ - table: entity.table, - items - }); + await entityBatch.execute(); } catch (ex) { throw new WebinyError( ex.message || "Could not delete all the page records.", @@ -436,10 +431,7 @@ export const createPageStorageOperations = ( ); } try { - await batchWriteAll({ - table: entity.table, - items: esItems - }); + await elasticsearchEntityBatch.execute(); } catch (ex) { throw new WebinyError( ex.message || "Could not delete all the page Elasticsearch records.", @@ -457,118 +449,100 @@ export const createPageStorageOperations = ( /** * Update the given revision of the page. */ - const items = [ - entity.putBatch({ - ...page, - TYPE: createBasicType(), - PK: createPartitionKey(page), - SK: createSortKey(page) - }) - ]; - const esItems = []; + const entityBatch = createEntityWriteBatch({ + entity, + put: [ + { + ...page, + TYPE: createBasicType(), + PK: createPartitionKey(page), + SK: createSortKey(page) + } + ] + }); + const elasticsearchEntityBatch = createEntityWriteBatch({ + entity: esEntity + }); /** * If we are publishing the latest revision, update the latest revision * status in ES. We also need to update the latest page revision entry in ES. */ if (latestPage.id === page.id) { - items.push( - entity.putBatch({ - ...page, - TYPE: createLatestType(), - PK: createPartitionKey(page), - SK: createLatestSortKey() - }) - ); + entityBatch.put({ + ...page, + TYPE: createLatestType(), + PK: createPartitionKey(page), + SK: createLatestSortKey() + }); - esItems.push( - esEntity.putBatch({ - PK: createPartitionKey(page), - SK: createLatestSortKey(), - index: configurations.es(page).index, - data: getESLatestPageData(plugins, page) - }) - ); + elasticsearchEntityBatch.put({ + PK: createPartitionKey(page), + SK: createLatestSortKey(), + index: configurations.es(page).index, + data: getESLatestPageData(plugins, page) + }); } /** * If we already have a published revision, and it's not the revision being published: * - set the existing published revision to "unpublished" */ if (publishedPage && publishedPage.id !== page.id) { - items.push( - entity.putBatch({ - ...publishedPage, - status: "unpublished", - PK: createPartitionKey(publishedPage), - SK: createSortKey(publishedPage) - }) - ); + entityBatch.put({ + ...publishedPage, + status: "unpublished", + PK: createPartitionKey(publishedPage), + SK: createSortKey(publishedPage) + }); + /** * Remove old published path if required. */ if (publishedPage.path !== page.path) { - items.push( - entity.deleteBatch({ - PK: createPathPartitionKey(page), - SK: publishedPage.path - }) - ); + entityBatch.delete({ + PK: createPathPartitionKey(page), + SK: publishedPage.path + }); } } - esItems.push( - esEntity.putBatch({ - PK: createPartitionKey(page), - SK: createPublishedSortKey(), - index: configurations.es(page).index, - data: getESPublishedPageData(plugins, page) - }) - ); + elasticsearchEntityBatch.put({ + PK: createPartitionKey(page), + SK: createPublishedSortKey(), + index: configurations.es(page).index, + data: getESPublishedPageData(plugins, page) + }); /** * Update or insert published path. */ - items.push( - entity.putBatch({ - ...page, - TYPE: createPublishedPathType(), - PK: createPathPartitionKey(page), - SK: createPathSortKey(page) - }) - ); + entityBatch.put({ + ...page, + TYPE: createPublishedPathType(), + PK: createPathPartitionKey(page), + SK: createPathSortKey(page) + }); + /** * Update or insert published page. */ - items.push( - entity.putBatch({ - ...page, - TYPE: createPublishedType(), - PK: createPartitionKey(page), - SK: createPublishedSortKey() - }) - ); + entityBatch.put({ + ...page, + TYPE: createPublishedType(), + PK: createPartitionKey(page), + SK: createPublishedSortKey() + }); try { - await batchWriteAll({ - table: entity.table, - items - }); + await entityBatch.execute(); } catch (ex) { throw new WebinyError( ex.message || "Could not update all the page records when publishing.", ex.code || "UPDATE_RECORDS_ERROR" ); } - /** - * No point in continuing if there are no items in Elasticsearch data - */ - if (esItems.length === 0) { - return page; - } + try { - await batchWriteAll({ - table: esEntity.table, - items: esItems - }); + await elasticsearchEntityBatch.execute(); } catch (ex) { throw new WebinyError( ex.message || @@ -584,74 +558,67 @@ export const createPageStorageOperations = ( page.status = "unpublished"; - const items = [ - entity.deleteBatch({ - PK: createPartitionKey(page), - SK: createPublishedSortKey() - }), - entity.deleteBatch({ - PK: createPathPartitionKey(page), - SK: createPathSortKey(page) - }), - entity.putBatch({ - ...page, - TYPE: createBasicType(), - PK: createPartitionKey(page), - SK: createSortKey(page) - }) - ]; - const esItems = []; + const entityBatch = createEntityWriteBatch({ + entity, + delete: [ + { + PK: createPartitionKey(page), + SK: createPublishedSortKey() + }, + { + PK: createPathPartitionKey(page), + SK: createPathSortKey(page) + } + ], + put: [ + { + ...page, + TYPE: createBasicType(), + PK: createPartitionKey(page), + SK: createSortKey(page) + } + ] + }); + + const elasticsearchEntityBatch = createEntityWriteBatch({ + entity: esEntity, + delete: [ + { + PK: createPartitionKey(page), + SK: createPublishedSortKey() + } + ] + }); /* * If we are unpublishing the latest revision, let's also update the latest revision entry's status in ES. */ if (latestPage.id === page.id) { - items.push( - entity.putBatch({ - ...page, - TYPE: createLatestType(), - PK: createPartitionKey(page), - SK: createLatestSortKey() - }) - ); - esItems.push( - esEntity.putBatch({ - PK: createPartitionKey(page), - SK: createLatestSortKey(), - index: configurations.es(page).index, - data: getESLatestPageData(plugins, page) - }) - ); - } + entityBatch.put({ + ...page, + TYPE: createLatestType(), + PK: createPartitionKey(page), + SK: createLatestSortKey() + }); - esItems.push( - esEntity.deleteBatch({ + elasticsearchEntityBatch.put({ PK: createPartitionKey(page), - SK: createPublishedSortKey() - }) - ); + SK: createLatestSortKey(), + index: configurations.es(page).index, + data: getESLatestPageData(plugins, page) + }); + } try { - await batchWriteAll({ - table: entity.table, - items - }); + await entityBatch.execute(); } catch (ex) { throw new WebinyError( ex.message || "Could not update all the page records when unpublishing.", ex.code || "UPDATE_RECORDS_ERROR" ); } - /** - * No need to go further if no Elasticsearch items to be applied. - */ - if (esItems.length === 0) { - return page; - } + try { - await batchWriteAll({ - table: esEntity.table, - items: esItems - }); + await elasticsearchEntityBatch.execute(); } catch (ex) { throw new WebinyError( ex.message || diff --git a/packages/api-page-builder-so-ddb/package.json b/packages/api-page-builder-so-ddb/package.json index ce0353d12e0..9ceb55a9e92 100644 --- a/packages/api-page-builder-so-ddb/package.json +++ b/packages/api-page-builder-so-ddb/package.json @@ -25,8 +25,7 @@ "@webiny/error": "0.0.0", "@webiny/handler-db": "0.0.0", "@webiny/utils": "0.0.0", - "dataloader": "^2.0.0", - "lodash": "^4.17.21" + "dataloader": "^2.0.0" }, "devDependencies": { "@babel/cli": "^7.23.9", diff --git a/packages/api-page-builder-so-ddb/src/operations/blockCategory/dataLoader.ts b/packages/api-page-builder-so-ddb/src/operations/blockCategory/dataLoader.ts index a8112be7e37..cd030e6a912 100644 --- a/packages/api-page-builder-so-ddb/src/operations/blockCategory/dataLoader.ts +++ b/packages/api-page-builder-so-ddb/src/operations/blockCategory/dataLoader.ts @@ -1,5 +1,5 @@ import DataLoader from "dataloader"; -import { batchReadAll } from "@webiny/db-dynamodb/utils/batchRead"; +import { batchReadAll } from "@webiny/db-dynamodb"; import { BlockCategory } from "@webiny/api-page-builder/types"; import { cleanupItem } from "@webiny/db-dynamodb/utils/cleanup"; import { Entity } from "@webiny/db-dynamodb/toolbox"; diff --git a/packages/api-page-builder-so-ddb/src/operations/category/dataLoader.ts b/packages/api-page-builder-so-ddb/src/operations/category/dataLoader.ts index 2c9ebc9c70a..661b32359d6 100644 --- a/packages/api-page-builder-so-ddb/src/operations/category/dataLoader.ts +++ b/packages/api-page-builder-so-ddb/src/operations/category/dataLoader.ts @@ -1,5 +1,5 @@ import DataLoader from "dataloader"; -import { batchReadAll } from "@webiny/db-dynamodb/utils/batchRead"; +import { batchReadAll } from "@webiny/db-dynamodb"; import { Category } from "@webiny/api-page-builder/types"; import { cleanupItem } from "@webiny/db-dynamodb/utils/cleanup"; import { Entity } from "@webiny/db-dynamodb/toolbox"; diff --git a/packages/api-page-builder-so-ddb/src/operations/pageBlock/dataLoader.ts b/packages/api-page-builder-so-ddb/src/operations/pageBlock/dataLoader.ts index c9505ba7858..b3614d766f6 100644 --- a/packages/api-page-builder-so-ddb/src/operations/pageBlock/dataLoader.ts +++ b/packages/api-page-builder-so-ddb/src/operations/pageBlock/dataLoader.ts @@ -1,5 +1,5 @@ import DataLoader from "dataloader"; -import { batchReadAll } from "@webiny/db-dynamodb/utils/batchRead"; +import { batchReadAll } from "@webiny/db-dynamodb"; import { PageBlock } from "@webiny/api-page-builder/types"; import { cleanupItem } from "@webiny/db-dynamodb/utils/cleanup"; import { Entity } from "@webiny/db-dynamodb/toolbox"; diff --git a/packages/api-page-builder-so-ddb/src/operations/pageTemplate/dataLoader.ts b/packages/api-page-builder-so-ddb/src/operations/pageTemplate/dataLoader.ts index 32437cbacc9..248b6ffe129 100644 --- a/packages/api-page-builder-so-ddb/src/operations/pageTemplate/dataLoader.ts +++ b/packages/api-page-builder-so-ddb/src/operations/pageTemplate/dataLoader.ts @@ -1,5 +1,5 @@ import DataLoader from "dataloader"; -import { batchReadAll } from "@webiny/db-dynamodb/utils/batchRead"; +import { batchReadAll } from "@webiny/db-dynamodb"; import { PageTemplate } from "@webiny/api-page-builder/types"; import { Entity } from "@webiny/db-dynamodb/toolbox"; import { createPrimaryPK } from "./keys"; diff --git a/packages/api-page-builder-so-ddb/src/operations/pages/index.ts b/packages/api-page-builder-so-ddb/src/operations/pages/index.ts index f2619b1fdb7..1938bace62b 100644 --- a/packages/api-page-builder-so-ddb/src/operations/pages/index.ts +++ b/packages/api-page-builder-so-ddb/src/operations/pages/index.ts @@ -1,6 +1,5 @@ import WebinyError from "@webiny/error"; -import lodashGet from "lodash/get"; -import { +import type { Page, PageStorageOperations, PageStorageOperationsCreateFromParams, @@ -18,21 +17,21 @@ import { PageStorageOperationsUpdateParams } from "@webiny/api-page-builder/types"; import { getClean } from "@webiny/db-dynamodb/utils/get"; -import { Entity } from "@webiny/db-dynamodb/toolbox"; +import type { Entity } from "@webiny/db-dynamodb/toolbox"; import { cleanupItem } from "@webiny/db-dynamodb/utils/cleanup"; +import type { QueryAllParams } from "@webiny/db-dynamodb"; import { - DbItem, + createEntityWriteBatch, + decodeCursor, + encodeCursor, + filterItems, queryAll, - QueryAllParams, queryOne, - queryOneClean -} from "@webiny/db-dynamodb/utils/query"; -import { batchWriteAll } from "@webiny/db-dynamodb/utils/batchWrite"; -import { filterItems } from "@webiny/db-dynamodb/utils/filter"; -import { sortItems } from "@webiny/db-dynamodb/utils/sort"; -import { decodeCursor, encodeCursor } from "@webiny/db-dynamodb/utils/cursor"; + queryOneClean, + sortItems +} from "@webiny/db-dynamodb"; import { PageDynamoDbFieldPlugin } from "~/plugins/definitions/PageDynamoDbFieldPlugin"; -import { PluginsContainer } from "@webiny/plugins"; +import type { PluginsContainer } from "@webiny/plugins"; import { createLatestPartitionKey, createLatestSortKey, @@ -105,25 +104,26 @@ export const createPageStorageOperations = ( * - latest * - revision */ - const items = [ - entity.putBatch({ - ...page, - titleLC, - ...latestKeys, - TYPE: createLatestType() - }), - entity.putBatch({ - ...page, - titleLC, - ...revisionKeys, - TYPE: createRevisionType() - }) - ]; + const entityBatch = createEntityWriteBatch({ + entity, + put: [ + { + ...page, + titleLC, + ...latestKeys, + TYPE: createLatestType() + }, + { + ...page, + titleLC, + ...revisionKeys, + TYPE: createRevisionType() + } + ] + }); + try { - await batchWriteAll({ - table: entity.table, - items - }); + await entityBatch.execute(); return page; } catch (ex) { throw new WebinyError( @@ -154,24 +154,24 @@ export const createPageStorageOperations = ( * - latest * - revision */ - const items = [ - entity.putBatch({ - ...page, - ...latestKeys, - TYPE: createLatestType() - }), - entity.putBatch({ - ...page, - ...revisionKeys, - TYPE: createRevisionType() - }) - ]; + const entityBatch = createEntityWriteBatch({ + entity, + put: [ + { + ...page, + ...latestKeys, + TYPE: createLatestType() + }, + { + ...page, + ...revisionKeys, + TYPE: createRevisionType() + } + ] + }); try { - await batchWriteAll({ - table: entity.table, - items - }); + await entityBatch.execute(); return page; } catch (ex) { throw new WebinyError( @@ -211,33 +211,32 @@ export const createPageStorageOperations = ( * - revision * - latest if this is the latest */ - const items = [ - entity.putBatch({ - ...page, - titleLC, - ...revisionKeys, - TYPE: createRevisionType() - }) - ]; + const entityBatch = createEntityWriteBatch({ + entity, + put: [ + { + ...page, + titleLC, + ...revisionKeys, + TYPE: createRevisionType() + } + ] + }); + /** * Latest if it is the one. */ if (latestPage && latestPage.id === page.id) { - items.push( - entity.putBatch({ - ...page, - titleLC, - ...latestKeys, - TYPE: createLatestType() - }) - ); + entityBatch.put({ + ...page, + titleLC, + ...latestKeys, + TYPE: createLatestType() + }); } try { - await batchWriteAll({ - table: entity.table, - items - }); + await entityBatch.execute(); return page; } catch (ex) { @@ -280,9 +279,13 @@ export const createPageStorageOperations = ( * We need to update * - latest, if it exists, with previous record */ - const items = [entity.deleteBatch(revisionKeys)]; + const entityBatch = createEntityWriteBatch({ + entity, + delete: [revisionKeys] + }); + if (publishedPage && publishedPage.id === page.id) { - items.push(entity.deleteBatch(publishedKeys)); + entityBatch.delete(publishedKeys); } let previousLatestPage: Page | null = null; if (latestPage && latestPage.id === page.id) { @@ -296,21 +299,17 @@ export const createPageStorageOperations = ( } }); if (previousLatestRecord) { - items.push( - entity.putBatch({ - ...previousLatestRecord, - ...latestKeys, - TYPE: createLatestType() - }) - ); + entityBatch.put({ + ...previousLatestRecord, + ...latestKeys, + TYPE: createLatestType() + }); + previousLatestPage = cleanupItem(entity, previousLatestRecord); } } try { - await batchWriteAll({ - table: entity.table, - items - }); + await entityBatch.execute(); } catch (ex) { throw new WebinyError( ex.message || "Could not batch write all the page records.", @@ -347,11 +346,14 @@ export const createPageStorageOperations = ( SK: createPublishedSortKey(page) }; - const items = [entity.deleteBatch(latestKeys)]; + const entityBatch = createEntityWriteBatch({ + entity, + delete: [latestKeys] + }); - let revisions: DbItem[]; + let revisions: Awaited>> = []; try { - revisions = await queryAll(queryAllParams); + revisions = await queryAll(queryAllParams); } catch (ex) { throw new WebinyError( ex.message || "Could not query for all revisions of the page.", @@ -369,22 +371,17 @@ export const createPageStorageOperations = ( */ for (const revision of revisions) { if (!deletedPublishedRecord && revision.status === "published") { - items.push(entity.deleteBatch(publishedKeys)); + entityBatch.delete(publishedKeys); deletedPublishedRecord = true; } - items.push( - entity.deleteBatch({ - PK: revision.PK, - SK: revision.SK - }) - ); + entityBatch.delete({ + PK: revision.PK, + SK: revision.SK + }); } try { - await batchWriteAll({ - table: entity.table, - items - }); + await entityBatch.execute(); } catch (ex) { throw new WebinyError( ex.message || "Could not delete all the page records.", @@ -420,22 +417,23 @@ export const createPageStorageOperations = ( /** * Update the given revision of the page. */ - const items = [ - entity.putBatch({ - ...page, - ...revisionKeys, - TYPE: createRevisionType() - }) - ]; + const entityBatch = createEntityWriteBatch({ + entity, + put: [ + { + ...page, + ...revisionKeys, + TYPE: createRevisionType() + } + ] + }); if (latestPage.id === page.id) { - items.push( - entity.putBatch({ - ...page, - ...latestKeys, - TYPE: createLatestType() - }) - ); + entityBatch.put({ + ...page, + ...latestKeys, + TYPE: createLatestType() + }); } /** * If we already have a published revision, and it's not the revision being published: @@ -446,31 +444,24 @@ export const createPageStorageOperations = ( PK: createRevisionPartitionKey(publishedPage), SK: createRevisionSortKey(publishedPage) }; - items.push( - entity.putBatch({ - ...publishedPage, - status: "unpublished", - ...publishedRevisionKeys, - TYPE: createRevisionType() - }) - ); + entityBatch.put({ + ...publishedPage, + status: "unpublished", + ...publishedRevisionKeys, + TYPE: createRevisionType() + }); } - items.push( - entity.putBatch({ - ...page, - ...publishedKeys, - GSI1_PK: createPathPartitionKey(page), - GSI1_SK: page.path, - TYPE: createPublishedType() - }) - ); + entityBatch.put({ + ...page, + ...publishedKeys, + GSI1_PK: createPathPartitionKey(page), + GSI1_SK: page.path, + TYPE: createPublishedType() + }); try { - await batchWriteAll({ - table: entity.table, - items - }); + await entityBatch.execute(); } catch (ex) { throw new WebinyError( ex.message || "Could not update all the page records when publishing.", @@ -504,30 +495,28 @@ export const createPageStorageOperations = ( SK: createPublishedSortKey(page) }; - const items = [ - entity.putBatch({ - ...page, - ...revisionKeys, - TYPE: createRevisionType() - }), - entity.deleteBatch(publishedKeys) - ]; + const entityBatch = createEntityWriteBatch({ + entity, + put: [ + { + ...page, + ...revisionKeys, + TYPE: createRevisionType() + } + ], + delete: [publishedKeys] + }); if (latestPage.id === page.id) { - items.push( - entity.putBatch({ - ...page, - ...latestKeys, - TYPE: createLatestType() - }) - ); + entityBatch.put({ + ...page, + ...latestKeys, + TYPE: createLatestType() + }); } try { - await batchWriteAll({ - table: entity.table, - items - }); + await entityBatch.execute(); } catch (ex) { throw new WebinyError( ex.message || "Could not update all the page records when unpublishing.", @@ -819,7 +808,7 @@ export const createPageStorageOperations = ( options }; - let pages: DbItem[] = []; + let pages: Awaited>> = []; try { pages = await queryAll(queryAllParams); } catch (ex) { @@ -833,22 +822,20 @@ export const createPageStorageOperations = ( ); } - const tags = pages.reduce((collection, page) => { - let list: string[] = lodashGet(page, "settings.general.tags") as unknown as string[]; - if (!list || list.length === 0) { - return collection; + const tags = new Set(); + for (const page of pages) { + let tagList = page.settings?.general?.tags; + if (!tagList?.length) { + continue; } else if (where.search) { const re = new RegExp(where.search, "i"); - list = list.filter(t => t.match(re) !== null); + tagList = tagList.filter(tag => !!tag && tag.match(re) !== null); } - - for (const t of list) { - collection[t] = undefined; + for (const tag of tagList) { + tags.add(tag); } - return collection; - }, {} as Record); - - return Object.keys(tags); + } + return Array.from(tags); }; return { diff --git a/packages/api-page-builder/__tests__/translations/translatableCollection/DeleteTranslatableCollectionUseCase.test.ts b/packages/api-page-builder/__tests__/translations/translatableCollection/DeleteTranslatableCollectionUseCase.test.ts new file mode 100644 index 00000000000..b269ea28818 --- /dev/null +++ b/packages/api-page-builder/__tests__/translations/translatableCollection/DeleteTranslatableCollectionUseCase.test.ts @@ -0,0 +1,38 @@ +import { useHandler } from "~tests/translations/useHandler"; +import { + DeleteTranslatableCollectionUseCase, + GetTranslatableCollectionUseCase, + SaveTranslatableCollectionUseCase +} from "~/translations"; + +describe("DeleteTranslatableCollectionUseCase", () => { + it("should delete a collection", async () => { + const { handler } = useHandler(); + const context = await handler(); + + // Setup + const saveTranslatableCollection = new SaveTranslatableCollectionUseCase(context); + const newCollection = await saveTranslatableCollection.execute({ + collectionId: "collection:1", + items: [ + { itemId: "element:1", value: "Value 1" }, + { itemId: "element:2", value: "Value 2" } + ] + }); + + const getTranslatableCollection = new GetTranslatableCollectionUseCase(context); + const collection = await getTranslatableCollection.execute(newCollection.getCollectionId()); + + expect(collection).toBeTruthy(); + expect(collection!.getCollectionId()).toEqual(newCollection.getCollectionId()); + + // Test + const deleteTranslatableCollection = new DeleteTranslatableCollectionUseCase(context); + await deleteTranslatableCollection.execute({ + collectionId: "collection:1" + }); + + const checkCollection = await getTranslatableCollection.execute("collection:1"); + expect(checkCollection).toBeUndefined(); + }); +}); diff --git a/packages/api-page-builder/__tests__/translations/translatedCollection/DeleteTranslatedCollectionUseCase.test.ts b/packages/api-page-builder/__tests__/translations/translatedCollection/DeleteTranslatedCollectionUseCase.test.ts new file mode 100644 index 00000000000..d0192645444 --- /dev/null +++ b/packages/api-page-builder/__tests__/translations/translatedCollection/DeleteTranslatedCollectionUseCase.test.ts @@ -0,0 +1,131 @@ +import { useHandler } from "~tests/translations/useHandler"; +import { + SaveTranslatableCollectionUseCase, + SaveTranslatableCollectionParams, + SaveTranslatedCollectionUseCase, + DeleteTranslatedCollectionUseCase, + GetTranslatedCollectionUseCase +} from "~/translations"; +import { PbContext } from "~/graphql/types"; + +const createTranslatableCollection = async ( + context: PbContext, + params: SaveTranslatableCollectionParams +) => { + const saveCollection = new SaveTranslatableCollectionUseCase(context); + await saveCollection.execute(params); +}; + +describe("DeleteTranslatedCollectionUseCase", () => { + it("should delete an entire collection with all translations", async () => { + const { handler } = useHandler(); + const context = await handler(); + + // Setup + await createTranslatableCollection(context, { + collectionId: "collection:1", + items: [ + { itemId: "element:1", value: "Value 1" }, + { itemId: "element:2", value: "Value 2" }, + { itemId: "element:3", value: "Value 3" } + ] + }); + + const saveTranslatedCollection = new SaveTranslatedCollectionUseCase(context); + await saveTranslatedCollection.execute({ + collectionId: "collection:1", + languageCode: "en", + items: [ + { itemId: "element:1", value: "Translated Value 1 EN" }, + { itemId: "element:2", value: "Translated Value 2 EN" } + ] + }); + + await saveTranslatedCollection.execute({ + collectionId: "collection:1", + languageCode: "de", + items: [ + { itemId: "element:1", value: "Translated Value 1 DE" }, + { itemId: "element:2", value: "Translated Value 2 DE" } + ] + }); + + // Test + const deleteTranslatedCollection = new DeleteTranslatedCollectionUseCase(context); + await deleteTranslatedCollection.execute({ collectionId: "collection:1" }); + + const getTranslatedCollection = new GetTranslatedCollectionUseCase(context); + + await expect( + getTranslatedCollection.execute({ + collectionId: "collection:1", + languageCode: "en" + }) + ).rejects.toThrow("not found"); + + await expect( + getTranslatedCollection.execute({ + collectionId: "collection:1", + languageCode: "de" + }) + ).rejects.toThrow("not found"); + }); + + it("should delete a collection for a given language", async () => { + const { handler } = useHandler(); + const context = await handler(); + + // Setup + await createTranslatableCollection(context, { + collectionId: "collection:1", + items: [ + { itemId: "element:1", value: "Value 1" }, + { itemId: "element:2", value: "Value 2" }, + { itemId: "element:3", value: "Value 3" } + ] + }); + + const saveTranslatedCollection = new SaveTranslatedCollectionUseCase(context); + await saveTranslatedCollection.execute({ + collectionId: "collection:1", + languageCode: "en", + items: [ + { itemId: "element:1", value: "Translated Value 1 EN" }, + { itemId: "element:2", value: "Translated Value 2 EN" } + ] + }); + + await saveTranslatedCollection.execute({ + collectionId: "collection:1", + languageCode: "de", + items: [ + { itemId: "element:1", value: "Translated Value 1 DE" }, + { itemId: "element:2", value: "Translated Value 2 DE" } + ] + }); + + // Test + const deleteTranslatedCollection = new DeleteTranslatedCollectionUseCase(context); + await deleteTranslatedCollection.execute({ + collectionId: "collection:1", + languageCode: "en" + }); + + const getTranslatedCollection = new GetTranslatedCollectionUseCase(context); + + await expect( + getTranslatedCollection.execute({ + collectionId: "collection:1", + languageCode: "en" + }) + ).rejects.toThrow("not found"); + + const deCollection = await getTranslatedCollection.execute({ + collectionId: "collection:1", + languageCode: "de" + }); + + expect(deCollection.getCollectionId()).toBe("collection:1"); + expect(deCollection.getLanguageCode()).toBe("de"); + }); +}); diff --git a/packages/api-page-builder/src/prerendering/prerenderingHandlers.ts b/packages/api-page-builder/src/prerendering/prerenderingHandlers.ts index b53958d60ec..7fd93b47026 100644 --- a/packages/api-page-builder/src/prerendering/prerenderingHandlers.ts +++ b/packages/api-page-builder/src/prerendering/prerenderingHandlers.ts @@ -20,6 +20,7 @@ export const prerenderingHandlers = new ContextPlugin(context => { const render = paths.map(item => ({ ...item, tenant, + groupId: tenant, locale: locale.code })); diff --git a/packages/api-page-builder/src/translations/index.ts b/packages/api-page-builder/src/translations/index.ts index b37419f089b..cecc7873775 100644 --- a/packages/api-page-builder/src/translations/index.ts +++ b/packages/api-page-builder/src/translations/index.ts @@ -3,9 +3,11 @@ export * from "./translatableCollection/useCases/GetTranslatableCollectionUseCas export * from "./translatableCollection/useCases/SaveTranslatableCollectionUseCase"; export * from "./translatableCollection/useCases/GetOrCreateTranslatableCollectionUseCase"; export * from "./translatableCollection/useCases/CloneTranslatableCollectionUseCase"; +export * from "./translatableCollection/useCases/DeleteTranslatableCollectionUseCase"; // TranslatedCollection export * from "./translatedCollection/useCases/GetTranslatedCollectionUseCase"; export * from "./translatedCollection/useCases/CloneTranslatedCollectionUseCase"; export * from "./translatedCollection/useCases/SaveTranslatedCollectionUseCase"; export * from "./translatedCollection/useCases/GetOrCreateTranslatedCollectionUseCase"; +export * from "./translatedCollection/useCases/DeleteTranslatedCollectionUseCase"; diff --git a/packages/api-page-builder/src/translations/translatableCollection/graphql/resolvers.ts b/packages/api-page-builder/src/translations/translatableCollection/graphql/resolvers.ts index 4d0268f1fec..6eb878e6acf 100644 --- a/packages/api-page-builder/src/translations/translatableCollection/graphql/resolvers.ts +++ b/packages/api-page-builder/src/translations/translatableCollection/graphql/resolvers.ts @@ -5,12 +5,17 @@ import { SaveTranslatableCollectionUseCase } from "~/translations/translatableCo import type { GqlTranslatableItemDTO } from "~/translations/translatableCollection/graphql/GqlTranslatableItemDTO"; import { GetTranslatableCollectionByIdRepository } from "~/translations/translatableCollection/repository/GetTranslatableCollectionByIdRepository"; import { GqlTranslatableCollectionMapper } from "~/translations/translatableCollection/graphql/GqlTranslatableCollectionMapper"; +import { DeleteTranslatableCollectionUseCase } from "~/translations"; interface UpdateTranslatableCollectionParams { collectionId: string; items: GqlTranslatableItemDTO[]; } +interface DeleteTranslatableCollectionParams { + collectionId: string; +} + export const translatableCollectionResolvers: Resolvers = { TranslationsQuery: { getTranslatableCollection: async (_, args, context) => { @@ -39,6 +44,18 @@ export const translatableCollectionResolvers: Resolvers = { } catch (err) { return new ErrorResponse(err); } + }, + deleteTranslatableCollection: async (_, args, context) => { + const { collectionId } = args as DeleteTranslatableCollectionParams; + + try { + const useCase = new DeleteTranslatableCollectionUseCase(context); + await useCase.execute({ collectionId }); + + return new Response(true); + } catch (err) { + return new ErrorResponse(err); + } } } }; diff --git a/packages/api-page-builder/src/translations/translatableCollection/graphql/schema.ts b/packages/api-page-builder/src/translations/translatableCollection/graphql/schema.ts index debbe433dd4..ff3c271c7a4 100644 --- a/packages/api-page-builder/src/translations/translatableCollection/graphql/schema.ts +++ b/packages/api-page-builder/src/translations/translatableCollection/graphql/schema.ts @@ -28,7 +28,7 @@ export const translatableCollectionSchema = /* GraphQL*/ ` data: TranslatableCollection error: PbError } - + extend type TranslationsQuery { """Get the source collection with all the items that need to be translated.""" getTranslatableCollection(collectionId: ID!): TranslatableCollectionResponse @@ -39,5 +39,7 @@ export const translatableCollectionSchema = /* GraphQL*/ ` collectionId: ID! items: [TranslatableItemInput!]! ): SaveTranslatableCollectionResponse + + deleteTranslatableCollection(collectionId: ID!): BooleanResponse } `; diff --git a/packages/api-page-builder/src/translations/translatableCollection/repository/DeleteTranslatableCollectionRepository.ts b/packages/api-page-builder/src/translations/translatableCollection/repository/DeleteTranslatableCollectionRepository.ts new file mode 100644 index 00000000000..87ff8cfdaf3 --- /dev/null +++ b/packages/api-page-builder/src/translations/translatableCollection/repository/DeleteTranslatableCollectionRepository.ts @@ -0,0 +1,32 @@ +import { PbContext } from "~/types"; +import { GetModel } from "~/translations/GetModel"; +import { TranslatableCollectionDTO } from "./mappers/TranslatableCollectionDTO"; + +export class DeleteTranslatableCollectionRepository { + private readonly context: PbContext; + + constructor(context: PbContext) { + this.context = context; + } + + async execute(collectionId: string): Promise { + const model = await GetModel.byModelId(this.context, "translatableCollection"); + + // `cms.getEntry` throws an error if an entry is not found. + try { + const existingEntry = await this.context.cms.getEntry( + model, + { + where: { collectionId, latest: true } + } + ); + + await this.context.cms.deleteEntry(model, existingEntry.entryId, { permanently: true }); + } catch { + // If a record doesn't exist, then there's nothing to delete, and we can exit. + console.log( + `[DeleteTranslatableCollectionRepository]: Collection doesn't exist: ${collectionId}` + ); + } + } +} diff --git a/packages/api-page-builder/src/translations/translatableCollection/repository/GetTranslatableCollectionByIdRepository.ts b/packages/api-page-builder/src/translations/translatableCollection/repository/GetTranslatableCollectionByIdRepository.ts index b743baf3787..291ba8304de 100644 --- a/packages/api-page-builder/src/translations/translatableCollection/repository/GetTranslatableCollectionByIdRepository.ts +++ b/packages/api-page-builder/src/translations/translatableCollection/repository/GetTranslatableCollectionByIdRepository.ts @@ -15,17 +15,23 @@ export class GetTranslatableCollectionByIdRepository { async execute(collectionId: string): Promise { const model = await GetModel.byModelId(this.context, "translatableCollection"); - const existingEntry = await this.context.cms.getEntry(model, { - where: { collectionId, latest: true } - }); + try { + const existingEntry = await this.context.cms.getEntry( + model, + { + where: { collectionId, latest: true } + } + ); - if (!existingEntry) { + return TranslatableCollectionMapper.fromDTO( + existingEntry.values, + existingEntry.entryId + ); + } catch { throw new WebinyError({ message: `TranslatableCollection "${collectionId}" not found!`, code: "NOT_FOUND" }); } - - return TranslatableCollectionMapper.fromDTO(existingEntry.values, existingEntry.entryId); } } diff --git a/packages/api-page-builder/src/translations/translatableCollection/useCases/DeleteTranslatableCollectionUseCase.ts b/packages/api-page-builder/src/translations/translatableCollection/useCases/DeleteTranslatableCollectionUseCase.ts new file mode 100644 index 00000000000..b5c12fe05a0 --- /dev/null +++ b/packages/api-page-builder/src/translations/translatableCollection/useCases/DeleteTranslatableCollectionUseCase.ts @@ -0,0 +1,20 @@ +import { PbContext } from "~/graphql/types"; +import { DeleteTranslatableCollectionRepository } from "~/translations/translatableCollection/repository/DeleteTranslatableCollectionRepository"; + +export interface DeleteTranslatableCollectionParams { + collectionId: string; +} + +export class DeleteTranslatableCollectionUseCase { + private readonly context: PbContext; + + constructor(context: PbContext) { + this.context = context; + } + + async execute(params: DeleteTranslatableCollectionParams): Promise { + const deleteRepository = new DeleteTranslatableCollectionRepository(this.context); + + await deleteRepository.execute(params.collectionId); + } +} diff --git a/packages/api-page-builder/src/translations/translatedCollection/graphql/resolvers.ts b/packages/api-page-builder/src/translations/translatedCollection/graphql/resolvers.ts index 0717d5c989a..bb56d13fe5e 100644 --- a/packages/api-page-builder/src/translations/translatedCollection/graphql/resolvers.ts +++ b/packages/api-page-builder/src/translations/translatedCollection/graphql/resolvers.ts @@ -4,13 +4,21 @@ import type { PbContext } from "~/graphql/types"; import { GqlTranslatedCollectionMapper } from "~/translations/translatedCollection/graphql/mappers/GqlTranslatedCollectionMapper"; import { SaveTranslatedCollectionUseCase } from "~/translations/translatedCollection/useCases/SaveTranslatedCollectionUseCase"; import { GetOrCreateTranslatedCollectionUseCase } from "~/translations/translatedCollection/useCases/GetOrCreateTranslatedCollectionUseCase"; -import { GetTranslatableCollectionUseCase } from "~/translations"; +import { + DeleteTranslatedCollectionUseCase, + GetTranslatableCollectionUseCase +} from "~/translations"; interface GetTranslatedCollectionParams { collectionId: string; languageCode: string; } +interface DeleteTranslatedCollectionParams { + collectionId: string; + languageCode?: string; +} + interface UpdateTranslatedCollectionParams { collectionId: string; languageCode: string; @@ -76,6 +84,18 @@ export const translatedCollectionResolvers: Resolvers = { } catch (err) { return new ErrorResponse(err); } + }, + deleteTranslatedCollection: async (_, args, context) => { + const { collectionId, languageCode } = args as DeleteTranslatedCollectionParams; + + try { + const useCase = new DeleteTranslatedCollectionUseCase(context); + await useCase.execute({ collectionId, languageCode }); + + return new Response(true); + } catch (err) { + return new ErrorResponse(err); + } } } }; diff --git a/packages/api-page-builder/src/translations/translatedCollection/graphql/schema.ts b/packages/api-page-builder/src/translations/translatedCollection/graphql/schema.ts index ca4cdc5a48d..d52189ed203 100644 --- a/packages/api-page-builder/src/translations/translatedCollection/graphql/schema.ts +++ b/packages/api-page-builder/src/translations/translatedCollection/graphql/schema.ts @@ -41,5 +41,7 @@ export const translatedCollectionSchema = /* GraphQL*/ ` languageCode: String! items: [TranslatedItemInput!]! ): SaveTranslatedCollectionResponse + + deleteTranslatedCollection(collectionId: ID!, languageCode: String): BooleanResponse } `; diff --git a/packages/api-page-builder/src/translations/translatedCollection/repository/DeleteTranslatedCollectionRepository.ts b/packages/api-page-builder/src/translations/translatedCollection/repository/DeleteTranslatedCollectionRepository.ts new file mode 100644 index 00000000000..18f15b16120 --- /dev/null +++ b/packages/api-page-builder/src/translations/translatedCollection/repository/DeleteTranslatedCollectionRepository.ts @@ -0,0 +1,40 @@ +import { PbContext } from "~/types"; +import { GetModel } from "~/translations/GetModel"; +import { TranslatedCollectionDTO } from "~/translations/translatedCollection/repository/mappers/TranslatedCollectionDTO"; + +export interface DeleteTranslatedCollectionParams { + collectionId: string; + languageCode?: string; +} + +export class DeleteTranslatedCollectionRepository { + private readonly context: PbContext; + + constructor(context: PbContext) { + this.context = context; + } + + async execute(params: DeleteTranslatedCollectionParams): Promise { + const model = await GetModel.byModelId(this.context, "translatedCollection"); + + const filter: DeleteTranslatedCollectionParams = { + collectionId: params.collectionId + }; + + if (params.languageCode) { + filter.languageCode = params.languageCode; + } + + const [entries] = await this.context.cms.listEntries(model, { + where: { latest: true, ...filter } + }); + + await Promise.all( + entries.map(entry => { + return this.context.cms.deleteEntry(model, entry.entryId, { + permanently: true + }); + }) + ); + } +} diff --git a/packages/api-page-builder/src/translations/translatedCollection/repository/GetTranslatedCollectionRepository.ts b/packages/api-page-builder/src/translations/translatedCollection/repository/GetTranslatedCollectionRepository.ts index 692271ee71a..43fcca7acde 100644 --- a/packages/api-page-builder/src/translations/translatedCollection/repository/GetTranslatedCollectionRepository.ts +++ b/packages/api-page-builder/src/translations/translatedCollection/repository/GetTranslatedCollectionRepository.ts @@ -20,21 +20,21 @@ export class GetTranslatedCollectionRepository { async execute(params: GetTranslatedCollectionParams): Promise { const model = await GetModel.byModelId(this.context, "translatedCollection"); - const existingEntry = await this.context.cms.getEntry(model, { - where: { - collectionId: params.collectionId, - languageCode: params.languageCode, - latest: true - } - }); + try { + const existingEntry = await this.context.cms.getEntry(model, { + where: { + collectionId: params.collectionId, + languageCode: params.languageCode, + latest: true + } + }); - if (!existingEntry) { + return TranslatedCollectionMapper.fromDTO(existingEntry.values, existingEntry.entryId); + } catch { throw new WebinyError({ message: `TranslatedCollection "${params.collectionId}" for language "${params.languageCode}" was not found!`, code: "NOT_FOUND" }); } - - return TranslatedCollectionMapper.fromDTO(existingEntry.values, existingEntry.entryId); } } diff --git a/packages/api-page-builder/src/translations/translatedCollection/useCases/DeleteTranslatedCollectionUseCase.ts b/packages/api-page-builder/src/translations/translatedCollection/useCases/DeleteTranslatedCollectionUseCase.ts new file mode 100644 index 00000000000..3257a89d16a --- /dev/null +++ b/packages/api-page-builder/src/translations/translatedCollection/useCases/DeleteTranslatedCollectionUseCase.ts @@ -0,0 +1,21 @@ +import { PbContext } from "~/graphql/types"; +import { DeleteTranslatedCollectionRepository } from "~/translations/translatedCollection/repository/DeleteTranslatedCollectionRepository"; + +export interface DeleteTranslatedCollectionParams { + collectionId: string; + languageCode?: string; +} + +export class DeleteTranslatedCollectionUseCase { + private readonly context: PbContext; + + constructor(context: PbContext) { + this.context = context; + } + + async execute(params: DeleteTranslatedCollectionParams): Promise { + const deleteRepository = new DeleteTranslatedCollectionRepository(this.context); + + await deleteRepository.execute(params); + } +} diff --git a/packages/api-prerendering-service-aws/src/render/subscriber.ts b/packages/api-prerendering-service-aws/src/render/subscriber.ts index 2182ee27ffc..aff36e40693 100644 --- a/packages/api-prerendering-service-aws/src/render/subscriber.ts +++ b/packages/api-prerendering-service-aws/src/render/subscriber.ts @@ -114,7 +114,7 @@ export default (params: HandlerConfig) => { * the database. This way we are sure that we don't store obsolete infrastructure information. */ toRender.push({ - groupId: render.tenant, + groupId: render.groupId ?? render.tenant, body: render }); } diff --git a/packages/api-prerendering-service-so-ddb/src/operations/queueJob.ts b/packages/api-prerendering-service-so-ddb/src/operations/queueJob.ts index 3499661f229..f8371e1c7e5 100644 --- a/packages/api-prerendering-service-so-ddb/src/operations/queueJob.ts +++ b/packages/api-prerendering-service-so-ddb/src/operations/queueJob.ts @@ -1,14 +1,14 @@ import WebinyError from "@webiny/error"; -import { +import type { PrerenderingServiceQueueJobStorageOperations, PrerenderingServiceStorageOperationsCreateQueueJobParams, PrerenderingServiceStorageOperationsDeleteQueueJobsParams, QueueJob } from "@webiny/api-prerendering-service/types"; -import { Entity } from "@webiny/db-dynamodb/toolbox"; -import { batchWriteAll } from "@webiny/db-dynamodb/utils/batchWrite"; -import { queryAllClean, QueryAllParams } from "@webiny/db-dynamodb/utils/query"; -import { put } from "@webiny/db-dynamodb"; +import type { Entity } from "@webiny/db-dynamodb/toolbox"; +import { createEntityWriteBatch, put } from "@webiny/db-dynamodb"; +import type { QueryAllParams } from "@webiny/db-dynamodb/utils/query"; +import { queryAllClean } from "@webiny/db-dynamodb/utils/query"; export interface CreateQueueJobStorageOperationsParams { entity: Entity; @@ -89,18 +89,18 @@ export const createQueueJobStorageOperations = ( ) => { const { queueJobs } = params; - const items = queueJobs.map(job => { - return entity.deleteBatch({ - PK: createQueueJobPartitionKey(), - SK: createQueueJobSortKey(job.id) - }); + const entityBatch = createEntityWriteBatch({ + entity, + delete: queueJobs.map(job => { + return { + PK: createQueueJobPartitionKey(), + SK: createQueueJobSortKey(job.id) + }; + }) }); try { - await batchWriteAll({ - table: entity.table, - items - }); + await entityBatch.execute(); return queueJobs; } catch (ex) { throw new WebinyError( diff --git a/packages/api-prerendering-service-so-ddb/src/operations/render.ts b/packages/api-prerendering-service-so-ddb/src/operations/render.ts index 07565470dc1..b9fb75a89b9 100644 --- a/packages/api-prerendering-service-so-ddb/src/operations/render.ts +++ b/packages/api-prerendering-service-so-ddb/src/operations/render.ts @@ -1,5 +1,5 @@ import WebinyError from "@webiny/error"; -import { +import type { PrerenderingServiceRenderStorageOperations, PrerenderingServiceStorageOperationsCreateRenderParams, PrerenderingServiceStorageOperationsCreateTagPathLinksParams, @@ -12,18 +12,24 @@ import { Tag, TagPathLink } from "@webiny/api-prerendering-service/types"; -import { Entity, EntityQueryOptions } from "@webiny/db-dynamodb/toolbox"; -import { get } from "@webiny/db-dynamodb/utils/get"; -import { queryAll, queryAllClean, QueryAllParams } from "@webiny/db-dynamodb/utils/query"; -import { batchReadAll } from "@webiny/db-dynamodb/utils/batchRead"; -import { batchWriteAll } from "@webiny/db-dynamodb/utils/batchWrite"; -import { cleanupItem, cleanupItems } from "@webiny/db-dynamodb/utils/cleanup"; -import { DataContainer } from "~/types"; -import { deleteItem, put } from "@webiny/db-dynamodb"; +import type { Entity, EntityQueryOptions } from "@webiny/db-dynamodb/toolbox"; +import { + batchReadAll, + cleanupItem, + cleanupItems, + createEntityWriteBatch, + deleteItem, + get, + put, + queryAll, + queryAllClean +} from "@webiny/db-dynamodb"; +import type { QueryAllParams } from "@webiny/db-dynamodb"; +import type { DataContainer } from "~/types"; export interface CreateRenderStorageOperationsParams { - entity: Entity; - tagPathLinkEntity: Entity; + entity: Entity; + tagPathLinkEntity: Entity; } export interface CreateTagPathLinkPartitionKeyParams { @@ -276,29 +282,29 @@ export const createRenderStorageOperations = ( ) => { const { tagPathLinks } = params; - const items = tagPathLinks.map(item => { - return tagPathLinkEntity.putBatch({ - data: item, - TYPE: createTagPathLinkType(), - PK: createTagPathLinkPartitionKey({ - tenant: item.tenant, - tag: item, - path: item.path - }), - SK: createTagPathLinkSortKey({ - tag: item, - path: item.path - }), - GSI1_PK: createTagPathLinkGSI1PartitionKey({ tag: item, tenant: item.tenant }), - GSI1_SK: createTagPathLinkGSI1SortKey({ tag: item, path: item.path }) - }); + const tagPathLinksBatch = createEntityWriteBatch({ + entity: tagPathLinkEntity, + put: tagPathLinks.map(item => { + return { + data: item, + TYPE: createTagPathLinkType(), + PK: createTagPathLinkPartitionKey({ + tenant: item.tenant, + tag: item, + path: item.path + }), + SK: createTagPathLinkSortKey({ + tag: item, + path: item.path + }), + GSI1_PK: createTagPathLinkGSI1PartitionKey({ tag: item, tenant: item.tenant }), + GSI1_SK: createTagPathLinkGSI1SortKey({ tag: item, path: item.path }) + }; + }) }); try { - await batchWriteAll({ - table: tagPathLinkEntity.table, - items - }); + await tagPathLinksBatch.execute(); return tagPathLinks; } catch (ex) { throw new WebinyError( @@ -315,25 +321,26 @@ export const createRenderStorageOperations = ( params: PrerenderingServiceStorageOperationsDeleteTagPathLinksParams ): Promise => { const { tenant, tags, path } = params; - const items = tags.map(tag => { - return tagPathLinkEntity.deleteBatch({ - PK: createTagPathLinkPartitionKey({ - tag, - tenant, - path - }), - SK: createTagPathLinkSortKey({ - tag, - path - }) - }); + + const tagPathLinksBatch = createEntityWriteBatch({ + entity: tagPathLinkEntity, + delete: tags.map(tag => { + return { + PK: createTagPathLinkPartitionKey({ + tag, + tenant, + path + }), + SK: createTagPathLinkSortKey({ + tag, + path + }) + }; + }) }); try { - await batchWriteAll({ - table: tagPathLinkEntity.table, - items - }); + await tagPathLinksBatch.execute(); } catch (ex) { throw new WebinyError( ex.message || "Could not delete tagPathLink records.", diff --git a/packages/api-prerendering-service/__tests__/render/extractPeLoaderDataFromHtml.test.ts b/packages/api-prerendering-service/__tests__/render/extractPeLoaderDataFromHtml.test.ts new file mode 100644 index 00000000000..ae5cdb8191d --- /dev/null +++ b/packages/api-prerendering-service/__tests__/render/extractPeLoaderDataFromHtml.test.ts @@ -0,0 +1,17 @@ +import extractPeLoaderDataFromHtml from "../../src/render/extractPeLoaderDataFromHtml"; + +describe("extractPeLoaderDataFromHtml Tests", () => { + it("must detect pe-loader-data-cache tags in given HTML", async () => { + const results = extractPeLoaderDataFromHtml(TEST_STRING); + + // The value is not decompressed, so it's still a string. + expect(results).toEqual([ + { + key: "GfT8AoRsYT-1238102521", + value: "pe_NobwRAJgpgzgxgJwJYAcAuSD2A7MAuMAFQAsoACAMQEMAbOHMgRjIHcqYyrsyoAPFKNghUARjXI0qAV2xxiZGAE8YaKAFsyKZADcqqmorLRtUGpgEROQsmq5SAZlThopCKJZGGAyiidQAGkauSNgA5mQATAAMUQBsALTRUQCcAHRkAPLcEQAcZF5Q6OoiUAiRMTkANJS09NzMJXBUauRopGT2SAgqmjp6porxxqbm7mQ0SACOUkgQ8fZSpuPSsvImxEhw4mRomGSheyG7ZJgIIkhonAiYMpZt5ACiVAhtqWCVYLP4YACsUMkQKIQZJ/YSxZIgn72ADsKQgjCgIneYGwzSg32odAYjGRLCQAGtUO4kFRvsQ0GgUDA8AB6GmCVJ4wkWEmpU6hGlMpA0zF1AD6OIAvpVwNB4Mh0FhcAReQxkmQkBwqDsWJh4ioqKFyNc4PioJcxUhQtgxlxLLZsA4nC43B5vL44AEOqcdu03BNRNszQoqPZWgguDAUKdLph7D79BNVErrPcyAARAMHbhBvyIX2XI57U7nNBvD5fAh/AFAkHuKjg5LQgDMVBycIRcGRqJaGNqctxBKJEBJZIpVNp9OwjK7LKobIQHK5PPb2D5yTAwtFsEQqAwOG+AHULvI46IkFHDMcJvZM9hjjmLicTGUfgAWGz65BwHY4DgACkYjGSlRiUXGIgAJTxPEyq2DAHBQNMSC6OI56vpwZA1tCZAAFb6hMJplGYVDQJYeJtJo7AwIIWrdNUiBQCw1Q0FIoShJq5DegspggbK3AABJQFQ2iGE03AnpcainK0xBcCqmyieQviKDhlhhq65AmrwlxbJgJE9KMAbrqiNBkOsmziNUcbxqYaDKgAkgAamQXE8Yo1R6CcJrxG0XR3O09AqPmnwQN8xaAsCoIVhC0J3hAIgNu4zZom2WKcdxvGdsyxKkgQ5KUtSdIMlyY4TlOXYzvFfJ2UlS6QCuEo6d8Xjmd0GwoFYlheFIAhlKVhgAEqYLq+pkG4WiwIIlzKgsNAGP1UBSDAnr+oGwYvHoUoKMoqgaIaxpjMcJEINokmcONZBPC88iXpcJruEqHAsKYensIpZAALKYAw3qPc8MDpCQioqmqGpavpUAbFs5AAMMAEUAEMAAGQwAKWQ9BqMGJHyeGcYtW1tmJYYOp6pc74iC9KilIBTX5HVMANWQ75U6gZMQzD8OsAeelQCY55SLQE0DZIjo1PFZA/gLdTY/Z5OJpqOA+YWvz/IFySxOWlbVjkESAgCCLoh8LbogQtUfQ1yXdr26X9llQ4jilPbjuynKFT4fj+HyBv1agi4ALpAA==" + } + ]); + }); +}); + +const TEST_STRING = `...
  • Starship

    Starship and Super Heavy Rocket represent a fully reusable transportation system designed to service all Earth orbit needs as well as the Moon and Mars. This two-stage vehicle — composed of the Super Heavy rocket (booster) and Starship (ship) — will eventually replace Falcon 9, Falcon Heavy and Dragon.

  • `; diff --git a/packages/api-prerendering-service/__tests__/render/handlers/render/linkPreloading.test.ts b/packages/api-prerendering-service/__tests__/render/handlers/render/linkPreloading.test.ts index 9ce663a69fe..88c106c5071 100644 --- a/packages/api-prerendering-service/__tests__/render/handlers/render/linkPreloading.test.ts +++ b/packages/api-prerendering-service/__tests__/render/handlers/render/linkPreloading.test.ts @@ -15,7 +15,9 @@ describe(`"renderUrl" Function Test`, () => { renderUrlFunction: async () => { return { content: BASE_HTML, - meta: {} + meta: { + interceptedRequests: [] + } }; } }); @@ -58,7 +60,9 @@ describe(`"renderUrl" Function Test`, () => { renderUrlFunction: async () => { return { content: BASE_HTML, - meta: {} + meta: { + interceptedRequests: [] + } }; } }); diff --git a/packages/api-prerendering-service/__tests__/render/handlers/render/renderUrl.test.ts b/packages/api-prerendering-service/__tests__/render/handlers/render/renderUrl.test.ts index 9ce663a69fe..88c106c5071 100644 --- a/packages/api-prerendering-service/__tests__/render/handlers/render/renderUrl.test.ts +++ b/packages/api-prerendering-service/__tests__/render/handlers/render/renderUrl.test.ts @@ -15,7 +15,9 @@ describe(`"renderUrl" Function Test`, () => { renderUrlFunction: async () => { return { content: BASE_HTML, - meta: {} + meta: { + interceptedRequests: [] + } }; } }); @@ -58,7 +60,9 @@ describe(`"renderUrl" Function Test`, () => { renderUrlFunction: async () => { return { content: BASE_HTML, - meta: {} + meta: { + interceptedRequests: [] + } }; } }); diff --git a/packages/api-prerendering-service/src/render/defaultRenderUrlFunction.ts b/packages/api-prerendering-service/src/render/defaultRenderUrlFunction.ts new file mode 100644 index 00000000000..6bbddfa5b9b --- /dev/null +++ b/packages/api-prerendering-service/src/render/defaultRenderUrlFunction.ts @@ -0,0 +1,157 @@ +import chromium from "@sparticuz/chromium"; +import puppeteer, { Browser, Page } from "puppeteer-core"; +import extractPeLoaderDataFromHtml from "./extractPeLoaderDataFromHtml"; +import { RenderResult, RenderUrlCallableParams } from "./types"; +import { TagPathLink } from "~/types"; + +const windowSet = (page: Page, name: string, value: string | boolean) => { + page.evaluateOnNewDocument(` + Object.defineProperty(window, '${name}', { + get() { + return '${value}' + } + })`); +}; + +export interface File { + type: string; + body: any; + name: string; + meta: { + tags?: TagPathLink[]; + [key: string]: any; + }; +} + +export const defaultRenderUrlFunction = async ( + url: string, + params: RenderUrlCallableParams +): Promise => { + let browser!: Browser; + + try { + browser = await puppeteer.launch({ + args: chromium.args, + defaultViewport: chromium.defaultViewport, + executablePath: await chromium.executablePath(), + headless: chromium.headless, + ignoreHTTPSErrors: true + }); + + const browserPage = await browser.newPage(); + + // Can be used to add additional logic - e.g. skip a GraphQL query to be made when in pre-rendering process. + windowSet(browserPage, "__PS_RENDER__", true); + + const tenant = params.args.tenant; + if (tenant) { + console.log("Setting tenant (__PS_RENDER_TENANT__) to window object...."); + windowSet(browserPage, "__PS_RENDER_TENANT__", tenant); + } + + const locale = params.args.locale; + if (locale) { + console.log("Setting locale (__PS_RENDER_LOCALE__) to window object...."); + windowSet(browserPage, "__PS_RENDER_LOCALE__", locale); + } + + const renderResult: RenderResult = { + content: "", + meta: { + interceptedRequests: [], + apolloState: {}, + cachedData: { + apolloGraphQl: [], + peLoaders: [] + } + } + }; + + // Don't load these resources during prerender. + const skipResources = ["image"]; + await browserPage.setRequestInterception(true); + + browserPage.on("request", request => { + const issuedRequest = { + type: request.resourceType(), + url: request.url(), + aborted: false + }; + + if (skipResources.includes(issuedRequest.type)) { + issuedRequest.aborted = true; + request.abort(); + } else { + request.continue(); + } + + renderResult.meta.interceptedRequests.push(issuedRequest); + }); + + // TODO: should be a plugin. + browserPage.on("response", async response => { + const request = response.request(); + const url = request.url(); + if (url.includes("/graphql") && request.method() === "POST") { + const responses = (await response.json()) as Record; + const postData = JSON.parse(request.postData() as string); + const operations = Array.isArray(postData) ? postData : [postData]; + + for (let i = 0; i < operations.length; i++) { + const { query, variables } = operations[i]; + + // For now, we're doing a basic @ps(cache: true) match to determine if the + // cache was set true. In the future, if we start introducing additional + // parameters here, we should probably make this parsing smarter. + const mustCache = query.match(/@ps\((cache: true)\)/); + + if (mustCache) { + const data = Array.isArray(responses) ? responses[i].data : responses.data; + renderResult.meta.cachedData.apolloGraphQl.push({ + query, + variables, + data + }); + } + } + return; + } + }); + + // Load URL and wait for all network requests to settle. + await browserPage.goto(url, { waitUntil: "networkidle0" }); + + renderResult.content = await browserPage.content(); + + renderResult.meta.apolloState = await browserPage.evaluate(() => { + // @ts-expect-error + return window.getApolloState(); + }); + + renderResult.meta.cachedData.peLoaders = extractPeLoaderDataFromHtml(renderResult.content); + + return renderResult; + } finally { + if (browser) { + // We need to close all open pages first, to prevent browser from hanging when closed. + const pages = await browser.pages(); + for (const page of pages) { + await page.close(); + } + + // This is fixing an issue where the `await browser.close()` would hang indefinitely. + // The "inspiration" for this fix came from the following issue: + // https://github.com/Sparticuz/chromium/issues/85 + console.log("Killing browser process..."); + const childProcess = browser.process(); + if (childProcess) { + childProcess.kill(9); + } + + console.log("Browser process killed."); + } + } + + // There's no catch block here because errors are already being handled + // in the entrypoint function, located in `./index.ts` file. +}; diff --git a/packages/api-prerendering-service/src/render/extractPeLoaderDataFromHtml.ts b/packages/api-prerendering-service/src/render/extractPeLoaderDataFromHtml.ts new file mode 100644 index 00000000000..cff831707b6 --- /dev/null +++ b/packages/api-prerendering-service/src/render/extractPeLoaderDataFromHtml.ts @@ -0,0 +1,65 @@ +import { PeLoaderCacheEntry } from "./types"; + +const parsePeLoaderDataCacheTag = (content: string): PeLoaderCacheEntry | null => { + const regex = + /<\/pe-loader-data-cache>/gm; + let m; + + while ((m = regex.exec(content)) !== null) { + // This is necessary to avoid infinite loops with zero-width matches + if (m.index === regex.lastIndex) { + regex.lastIndex++; + } + + const [, key, value] = m; + + return { key, value }; + } + + return null; +}; + +export default (content: string): PeLoaderCacheEntry[] => { + if (!content) { + return []; + } + + const cachedData: PeLoaderCacheEntry[] = []; + const regex = /<\/pe-loader-data-cache>/gm; + let m; + + while ((m = regex.exec(content)) !== null) { + // This is necessary to avoid infinite loops with zero-width matches + if (m.index === regex.lastIndex) { + regex.lastIndex++; + } + + const [matchedTag] = m; + + if (!matchedTag) { + continue; + } + + const parsedTag = parsePeLoaderDataCacheTag(matchedTag); + if (!parsedTag) { + continue; + } + + cachedData.push(parsedTag); + } + + if (cachedData.length > 0) { + const uniqueMap: Record = cachedData.reduce( + (collection, peLoaderDataCache) => { + collection[`${peLoaderDataCache.key || ""}${peLoaderDataCache.value || ""}`] = + peLoaderDataCache; + + return collection; + }, + {} as Record + ); + + return Object.values(uniqueMap); + } + return cachedData; +}; diff --git a/packages/api-prerendering-service/src/render/index.ts b/packages/api-prerendering-service/src/render/index.ts index a7de5ffbfcc..8981d291a77 100644 --- a/packages/api-prerendering-service/src/render/index.ts +++ b/packages/api-prerendering-service/src/render/index.ts @@ -56,7 +56,8 @@ export default (params: RenderParams) => { const settings = await storageOperations.getSettings(); for (const args of handlerArgs) { - const { tenant, path, locale } = args; + const { tenant, path, locale, groupId } = args; + console.log("Rendering item", args); const bucketRoot = isMultiTenant ? tenant : ""; @@ -118,6 +119,7 @@ export default (params: RenderParams) => { tenant, path, locale, + groupId: groupId ?? tenant, tags: args.tags, files: files.map(item => omit(item, ["body"])) }; diff --git a/packages/api-prerendering-service/src/render/preloadCss.ts b/packages/api-prerendering-service/src/render/preloadCss.ts new file mode 100644 index 00000000000..805967db6c7 --- /dev/null +++ b/packages/api-prerendering-service/src/render/preloadCss.ts @@ -0,0 +1,7 @@ +import { RenderResult } from "./types"; + +export const preloadCss = (render: RenderResult): void => { + const regex = / { + const fontsRequests = render.meta.interceptedRequests.filter( + req => req.type === "font" && req.url + ); + + const preloadLinks: string = Array.from(fontsRequests) + .map(req => { + return ``; + }) + .join("\n"); + + // Inject the preload tags into the section + render.content = render.content.replace("", `${preloadLinks}`); +}; diff --git a/packages/api-prerendering-service/src/render/preloadJs.ts b/packages/api-prerendering-service/src/render/preloadJs.ts new file mode 100644 index 00000000000..63fae20d827 --- /dev/null +++ b/packages/api-prerendering-service/src/render/preloadJs.ts @@ -0,0 +1,7 @@ +import { RenderResult } from "~/render/types"; + +export const preloadJs = (render: RenderResult): void => { + const regex = /