name: Publish Docs # Declare default permissions as read only. permissions: read-all on: push: branches: - main concurrency: group: publish-docs-${{ github.head_ref || github.run_id }} cancel-in-progress: true jobs: check-changes: uses: ./.github/workflows/changed-packages.yml with: check-mergeable-state: true deploy-docs: needs: check-changes name: Deploy docs (if needed) if: ${{ contains(fromJSON(needs.check-changes.outputs.changes), 'website') }} runs-on: ubuntu-latest permissions: contents: write steps: - name: Check out repository uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - name: Set up Node.js uses: actions/setup-node@8f152de45cc393bb48ce5d89d36b731f54556e65 # v4.0.0 with: cache: npm node-version: lts/* - name: Install dependencies working-directory: ./website run: npm ci - name: Build website working-directory: ./website env: NODE_OPTIONS: --max-old-space-size=6144 run: npm run build - name: Deploy to GitHub Pages uses: peaceiris/actions-gh-pages@373f7f263a76c20808c831209c920827a82a2847 # v3.9.3 with: github_token: ${{ secrets.GITHUB_TOKEN }} publish_dir: ./website/build user_name: release-please[bot] user_email: 55107282+release-please[bot]@users.noreply.github.com - name: Test Algolia if Crawler is blocked env: CRAWLER_USER_ID: ${{secrets.ALGOLIA_CRAWLER_USER_ID}} CRAWLER_API_KEY: ${{secrets.ALGOLIA_CRAWLER_API_KEY}} CRAWLER_ID: ${{secrets.ALGOLIA_CRAWLER_ID}} run: | RESPONSE=$(curl -H "Content-Type: application/json" -X GET --user "$CRAWLER_USER_ID:$CRAWLER_API_KEY" \ "https://crawler.algolia.com/api/1/crawlers/$CRAWLER_ID" | grep "\"blocked\":true" || true ); \ if [ ! -z "$RESPONSE" ]; then echo "Please go to https://crawler.algolia.com/" && exit 1; fi - name: Trigger Algolia reindexing env: CRAWLER_USER_ID: ${{secrets.ALGOLIA_CRAWLER_USER_ID}} CRAWLER_API_KEY: ${{secrets.ALGOLIA_CRAWLER_API_KEY}} CRAWLER_ID: ${{secrets.ALGOLIA_CRAWLER_ID}} run: | curl -H "Content-Type: application/json" -X POST --user "$CRAWLER_USER_ID:$CRAWLER_API_KEY" \ "https://crawler.algolia.com/api/1/crawlers/$CRAWLER_ID/reindex"