From 86a1378e1e87c6c20a6819eccf9124581ec2ce6e Mon Sep 17 00:00:00 2001 From: Matthieu Dumont Date: Tue, 8 Sep 2020 18:20:50 +0200 Subject: [PATCH] feat: new version --- .env.example | 2 + .gitignore | 9 +++++ README.md | 63 +++++++++++++++++++----------- dist/index.js | 29 -------------- netlify.toml | 7 +++- package.json | 28 ++++++++----- scripts/dev.sh | 21 ++++++++++ scripts/generate_netlify_toml.sh | 31 +++++++++++++++ src/index.ts | 67 ++++++++++++++++++++------------ 9 files changed, 170 insertions(+), 87 deletions(-) create mode 100644 .env.example delete mode 100644 dist/index.js create mode 100755 scripts/dev.sh create mode 100755 scripts/generate_netlify_toml.sh diff --git a/.env.example b/.env.example new file mode 100644 index 000000000..ed10049cc --- /dev/null +++ b/.env.example @@ -0,0 +1,2 @@ +ALGOLIA_API_KEY= +ALGOLIA_BASE_URL=https://crawler.algolia.com diff --git a/.gitignore b/.gitignore index ba47475e3..fcb9ccabe 100644 --- a/.gitignore +++ b/.gitignore @@ -105,5 +105,14 @@ typings/ # Local Netlify folder .netlify +# Built files +dist/ + +# VSCode files .vscode + +# npm package lock package-lock.json + +# .env +.env diff --git a/README.md b/README.md index 4c95cd496..bb98c2d9c 100644 --- a/README.md +++ b/README.md @@ -1,39 +1,56 @@ -# crawler-netlify-plugin +> ⚠️ **This project is an alpha, not usable without dedicated access.** -!> This is an alpha and not usable without an access to crawler.algolia.com +# crawler-netlify-plugin -## Install on your own repo +This plugin links your Netlify site with Algolia's Crawler. +It will trigger a crawl on each successful build. -- Add the plugin in your netlify.toml +## Architecture -```yaml -# netlify.toml +- [`src/`](./src/): plugin sources +- [`public/`](./public/): test website -[[plugins]] -package = "@algolia/crawler-netlify-plugin" -``` +## Environment variables -- Add those env in your Netlify's Environment variables - `CRAWLER_API_KEY` `CRAWLER_ID` `CRAWLER_USER_ID` +- `ALGOLIA_API_KEY` [Optional in dev] API Key to authenticate the call to the crawler. +- `ALGOLIA_BASE_URL` [Optional] Defaults to `https://crawler.algolia.com/`. -- Done +For a local run, those need to be set in `.env` using `cp .env.example .env` and modifying the values to fit your needs. -## Install +## Scripts -```bash -yarn +- `yarn dev`: run dev environment +- `yarn release`: build & publish the library -yarn build +## Development -yarn netlify build --dry -``` +### Pre-requisites -## Publish to npm +**Only accessible to Algolia employees.** -> yes commit `/dist` because Netlify does not support Typescript +1. Access to the Algolia team on Netlify (only granted to Algolia employees). +2. Access to the test website in this org: https://app.netlify.com/sites/crawler-netlify-plugin/ +3. Clone the repo and link it to the test website on Netlify: + ```sh + git clone git@github.com:algolia/crawler-netlify-plugin.git + cd crawler-netlify-plugin + yarn + yarn netlify link + # Accept linking it with the current git remote, it'll detect the correct site automatically + ``` +4. Setup `.env` by copying the example file: + ```sh + cp .env.example .env + ``` + Make sure the values in this file are good. -```bash -yarn build +### Running the dev env -yarn publish +```sh +yarn dev ``` + +This script creates a temporary `netlify.toml` which references a plugin located at `dist/index.js`. +It then builds the site locally, running the local version of the plugin. + +To change the crawler target from the prod one to a locally running on, simply change in your `.env` `ALGOLIA_BASE_URL` to target your local instance. diff --git a/dist/index.js b/dist/index.js deleted file mode 100644 index eaad699a4..000000000 --- a/dist/index.js +++ /dev/null @@ -1,29 +0,0 @@ -"use strict"; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.onEnd = exports.onPostBuild = void 0; -const node_fetch_1 = __importDefault(require("node-fetch")); -async function onPostBuild() { - console.log('Algolia Crawler Netlify plugin started'); - const crawlerID = process.env.CRAWLER_ID; - const crawlerUserID = process.env.CRAWLER_USER_ID; - const crawlerApiKey = process.env.CRAWLER_API_KEY; - if (!crawlerID || !crawlerUserID || !crawlerApiKey) { - throw new Error('Missing required Crawler credentials'); - } - const results = await node_fetch_1.default(`https://crawler.algolia.com/api/1/crawlers/${crawlerID}/reindex`, { - headers: { - Authorization: `Basic ${Buffer.from(`${crawlerUserID}:${crawlerApiKey}`).toString('base64')}`, - 'Content-Type': 'application/json', - }, - method: 'POST', - }); - console.log(results); -} -exports.onPostBuild = onPostBuild; -function onEnd(params) { - console.log(JSON.stringify(params)); -} -exports.onEnd = onEnd; diff --git a/netlify.toml b/netlify.toml index d60da92f3..1122b27de 100644 --- a/netlify.toml +++ b/netlify.toml @@ -1,3 +1,8 @@ +# This file is generated by scripts/generate_netlify_toml.sh +# DO NOT MODIFY, MODIFY THE GENERATING SCRIPT + + [[plugins]] -package = "@algolia/crawler-netlify-plugin" \ No newline at end of file +package = "@algolia/crawler-netlify-plugin" + diff --git a/package.json b/package.json index 26d355c99..c6f27b123 100644 --- a/package.json +++ b/package.json @@ -1,11 +1,22 @@ { "name": "@algolia/crawler-netlify-plugin", - "version": "0.0.2", + "version": "0.0.3", "main": "dist/index.js", "repository": "https://github.com/algolia/crawler-netlify-plugin.git", - "author": "Samuel Bodin ", + "author": "Algolia Team ", + "license": "MIT", "private": false, + "scripts": { + "build": "tsc -b", + "dev": "NODE_ENV=development ./scripts/dev.sh", + "lint": "eslint --ext=jsx,ts,tsx,js .", + "release": "yarn build && yarn publish" + }, + "dependencies": { + "node-fetch": "2.6.0" + }, "devDependencies": { + "@types/node": "14.0.27", "@typescript-eslint/eslint-plugin": "3.5.0", "@typescript-eslint/parser": "3.5.0", "eslint": "7.7.0", @@ -24,12 +35,9 @@ "prettier": "2.1.1", "typescript": "4.0.2" }, - "scripts": { - "lint": "eslint --ext=jsx,ts,tsx,js .", - "build": "tsc -b" - }, - "dependencies": { - "@types/node": "14.0.27", - "node-fetch": "2.6.0" - } + "files": [ + "README.md", + "manifest.yml", + "dist/" + ] } diff --git a/scripts/dev.sh b/scripts/dev.sh new file mode 100755 index 000000000..4c03bbef6 --- /dev/null +++ b/scripts/dev.sh @@ -0,0 +1,21 @@ +#! /bin/bash + +set -e + +cd "$(dirname "${BASH_SOURCE[0]}")" +cd .. + +[ -f .env ] || ( echo 'Missing .env' && exit 1 ) +set -a +source .env +set +a + +restore_netlify_toml() { + NODE_ENV=production ./scripts/generate_netlify_toml.sh +} + +NODE_ENV=development ./scripts/generate_netlify_toml.sh +trap restore_netlify_toml EXIT + +yarn build +yarn netlify build diff --git a/scripts/generate_netlify_toml.sh b/scripts/generate_netlify_toml.sh new file mode 100755 index 000000000..02fa94d65 --- /dev/null +++ b/scripts/generate_netlify_toml.sh @@ -0,0 +1,31 @@ +#! /bin/bash + +set -e +cd "$(dirname "${BASH_SOURCE[0]}")" + +target="../netlify.toml" + +# Common content +common=' +# This file is generated by scripts/generate_netlify_toml.sh +# DO NOT MODIFY, MODIFY THE GENERATING SCRIPT +' + +# Dev only +dev_only=' +[[plugins]] +package = "./dist/index.js" +' + +# Prod only +prod_only=' +[[plugins]] +package = "@algolia/crawler-netlify-plugin" +' + +echo "$common" > "$target" +if [ "$NODE_ENV" = "development" ]; then + echo "$dev_only" >> "$target" +else + echo "$prod_only" >> "$target" +fi diff --git a/src/index.ts b/src/index.ts index a211d0b68..3604d64ad 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1,32 +1,51 @@ import fetch from 'node-fetch'; -export async function onPostBuild() { - console.log('Algolia Crawler Netlify plugin started'); +process.env.NODE_ENV ??= 'production'; - const crawlerID = process.env.CRAWLER_ID; - const crawlerUserID = process.env.CRAWLER_USER_ID; - const crawlerApiKey = process.env.CRAWLER_API_KEY; +interface BuildParams { + constants: { + SITE_ID: string; + }; +} - if (!crawlerID || !crawlerUserID || !crawlerApiKey) { - throw new Error('Missing required Crawler credentials'); +function throwExceptInDev(message: string) { + if (process.env.NODE_ENV === 'development') { + console.warn(`WARN: ${message}`); + } else { + throw new Error(message); } - - const results = await fetch( - `https://crawler.algolia.com/api/1/crawlers/${crawlerID}/reindex`, - { - headers: { - Authorization: `Basic ${Buffer.from( - `${crawlerUserID}:${crawlerApiKey}` - ).toString('base64')}`, - 'Content-Type': 'application/json', - }, - method: 'POST', - } - ); - - console.log(results); } -export function onEnd(params: any) { - console.log(JSON.stringify(params)); +export async function onSuccess(params: BuildParams) { + console.log('Algolia Netlify plugin started'); + + // Debug + console.log(JSON.stringify(params, null, 2)); + console.log(JSON.stringify(process.env, null, 2)); + + const siteId = params.constants.SITE_ID; + const branch = process.env.BRANCH || 'master'; + const algoliaBaseUrl = + process.env.ALGOLIA_BASE_URL || 'https://crawler.algolia.com'; + const algoliaApiKey = process.env.ALGOLIA_API_KEY; + + if (!siteId) throw new Error('Missing SITE_ID'); + if (!branch) throw new Error('Missing BRANCH'); + if (!algoliaApiKey) throwExceptInDev('Missing ALGOLIA_API_KEY'); + + const endpoint = `${algoliaBaseUrl}/api/1/netlify/crawl`; + const creds = `${siteId}:${algoliaApiKey || 'unused'}`; + const response = await fetch(endpoint, { + method: 'POST', + headers: { + Authorization: `Basic ${Buffer.from(creds).toString('base64')}`, + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ branch }), + }); + + console.log({ + status: response.status, + text: await response.text(), + }); }