Skip to content

Commit

Permalink
Feature/issue 948 web api standardization (#1045)
Browse files Browse the repository at this point in the history
* web standard APIs adoption

* update graph lifecyle to work with URLs

* web standards refactoring for Resource plugins resolve lifecycle

* refactoring develop command and serve resource lifecycle for standard plugins

* refactor standard plugin intercept lifecycle

* clean up

* restore app templating to standard HTML resource plugin

* WIP refactoring plugins for web standardization

* handle develop command middleware support for handling binary or text response types

* restore nested relative routes resolution and general standard plugin refactoring

* group greenwood plugins

* adapt intercept lifecycles for web standards signature

* adapt pre-render build lifecycle and refactor page serving and intercepting

* refactor build and optimize lifecycles

* refactor bundle and optimize lifecycles

* restore cleanupResources

* restore copy lifecyle and all of build command

* restore serve command and full website prerendering

* restore specs and fixing missed upgrades

* all CLI specs passing

* all core specs and plugins working

* getting package plugin specs passing

* getting package plugin specs passing

* getting package plugin specs passing

* getting package plugin specs passing

* all plugin specs passing

* update spec for content type

* exp CSS import specs passing

* exp JSON import specs passing

* clean up console logs

* resolve max listeners warning

* restore E-Tag middleware for development

* merged response bundling working

* restore optimized graphql behavior

* update docs

* fix specs

* clean up TODOs

* docs revisions

* windows compat with fs.promises and URL

* develop command working

* convert to fs.promises

* build and serve refactored to fs.promises and URL

* all specs passing except for APIs

* all specs passing yay

* temp disable linting

* commands and almost all specs passing, phew

* refactor fs.access and fix lint

* restore linting to github actions

* resource and utils refactoring

* TODOs cleanup and tracking
  • Loading branch information
thescientist13 committed Apr 9, 2023
1 parent bed73d3 commit 6fac45d
Show file tree
Hide file tree
Showing 86 changed files with 1,797 additions and 2,019 deletions.
9 changes: 4 additions & 5 deletions greenwood.config.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,10 +6,9 @@ import { greenwoodPluginPolyfills } from '@greenwood/plugin-polyfills';
import { greenwoodPluginPostCss } from '@greenwood/plugin-postcss';
import { greenwoodPluginRendererPuppeteer } from '@greenwood/plugin-renderer-puppeteer';
import rollupPluginAnalyzer from 'rollup-plugin-analyzer';
import { fileURLToPath, URL } from 'url';

export default {
workspace: fileURLToPath(new URL('./www', import.meta.url)),
workspace: new URL('./www/', import.meta.url),
optimization: 'inline',
staticRouter: true,
interpolateFrontmatter: true,
Expand All @@ -19,6 +18,8 @@ export default {
greenwoodPluginPostCss(),
greenwoodPluginImportJson(),
greenwoodPluginImportCss(),
greenwoodPluginIncludeHTML(),
greenwoodPluginRendererPuppeteer(),
{
type: 'rollup',
name: 'rollup-plugin-analyzer',
Expand All @@ -32,9 +33,7 @@ export default {
})
];
}
},
greenwoodPluginIncludeHTML(),
greenwoodPluginRendererPuppeteer()
}
],
markdown: {
plugins: [
Expand Down
13 changes: 8 additions & 5 deletions packages/cli/src/commands/build.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import { bundleCompilation } from '../lifecycles/bundle.js';
import { checkResourceExists } from '../lib/resource-utils.js';
import { copyAssets } from '../lifecycles/copy.js';
import fs from 'fs';
import fs from 'fs/promises';
import { preRenderCompilationWorker, preRenderCompilationCustom, staticRenderCompilation } from '../lifecycles/prerender.js';
import { ServerInterface } from '../lib/server-interface.js';

Expand All @@ -11,12 +12,14 @@ const runProductionBuild = async (compilation) => {
try {
const { prerender } = compilation.config;
const outputDir = compilation.context.outputDir;
const prerenderPlugin = (compilation.config.plugins.filter(plugin => plugin.type === 'renderer') || []).length === 1
? compilation.config.plugins.filter(plugin => plugin.type === 'renderer')[0].provider(compilation)
const prerenderPlugin = compilation.config.plugins.find(plugin => plugin.type === 'renderer')
? compilation.config.plugins.find(plugin => plugin.type === 'renderer').provider(compilation)
: {};

if (!fs.existsSync(outputDir)) {
fs.mkdirSync(outputDir);
if (!await checkResourceExists(outputDir)) {
await fs.mkdir(outputDir, {
recursive: true
});
}

if (prerender || prerenderPlugin.prerender) {
Expand Down
20 changes: 9 additions & 11 deletions packages/cli/src/commands/eject.js
Original file line number Diff line number Diff line change
@@ -1,21 +1,19 @@
import fs from 'fs';
import path from 'path';
import { fileURLToPath, URL } from 'url';
import fs from 'fs/promises';

const ejectConfiguration = async (compilation) => {
return new Promise(async (resolve, reject) => {
try {
const configFilePath = fileURLToPath(new URL('../config', import.meta.url));
const configFiles = fs.readdirSync(configFilePath);
const configFileDirUrl = new URL('../config/', import.meta.url);
const configFiles = await fs.readdir(configFileDirUrl);

configFiles.forEach((configFile) => {
const from = path.join(configFilePath, configFile);
const to = `${compilation.context.projectDirectory}/${configFile}`;
for (const file of configFiles) {
const from = new URL(`./${file}`, configFileDirUrl);
const to = new URL(`./${file}`, compilation.context.projectDirectory);

fs.copyFileSync(from, to);
await fs.copyFile(from, to);

console.log(`Ejected ${configFile} successfully.`);
});
console.log(`Ejected ${file} successfully.`);
}

console.debug('all configuration files ejected.');

Expand Down
80 changes: 40 additions & 40 deletions packages/cli/src/config/rollup.config.js
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import fs from 'fs';
import path from 'path';
import fs from 'fs/promises';
import { checkResourceExists, normalizePathnameForWindows, resolveForRelativeUrl } from '../lib/resource-utils.js';

function greenwoodResourceLoader (compilation) {
const resourcePlugins = compilation.config.plugins.filter((plugin) => {
Expand All @@ -10,43 +10,42 @@ function greenwoodResourceLoader (compilation) {

return {
name: 'greenwood-resource-loader',
resolveId(id) {
const { userWorkspace } = compilation.context;
async resolveId(id) {
const normalizedId = id.replace(/\?type=(.*)/, '');
const { projectDirectory, userWorkspace } = compilation.context;

if ((id.indexOf('./') === 0 || id.indexOf('/') === 0) && fs.existsSync(path.join(userWorkspace, id))) {
return path.join(userWorkspace, id.replace(/\?type=(.*)/, ''));
}
if (id.startsWith('.') || id.startsWith('/')) {
const prefix = id.startsWith('/') ? '.' : '';
const contextUrl = id.indexOf('/node_modules/') >= 0 ? projectDirectory : userWorkspace;
const userWorkspaceUrl = await resolveForRelativeUrl(new URL(`${prefix}${normalizedId}`, contextUrl), contextUrl);

return null;
if (await checkResourceExists(userWorkspaceUrl)) {
return normalizePathnameForWindows(userWorkspaceUrl);
}
}
},
async load(id) {
const importAsIdAsUrl = id.replace(/\?type=(.*)/, '');
const extension = path.extname(importAsIdAsUrl);
const pathname = id.indexOf('?') >= 0 ? id.slice(0, id.indexOf('?')) : id;
const extension = pathname.split('.').pop();

if (extension !== '.js') {
const originalUrl = `${id}?type=${extension.replace('.', '')}`;
let contents;
if (extension !== '' && extension !== 'js') {
const url = new URL(`file://${pathname}?type=${extension}`);
const request = new Request(url.href);
let response = new Response('');

for (const plugin of resourcePlugins) {
const headers = {
request: {
originalUrl
},
response: {
'content-type': plugin.contentType
}
};

contents = await plugin.shouldServe(importAsIdAsUrl)
? (await plugin.serve(importAsIdAsUrl)).body
: contents;
if (plugin.shouldServe && await plugin.shouldServe(url, request)) {
response = await plugin.serve(url, request);
}
}

if (await plugin.shouldIntercept(importAsIdAsUrl, contents, headers)) {
contents = (await plugin.intercept(importAsIdAsUrl, contents, headers)).body;
for (const plugin of resourcePlugins) {
if (plugin.shouldIntercept && await plugin.shouldIntercept(url, request, response.clone())) {
response = await plugin.intercept(url, request, response.clone());
}
}

return contents;
return await response.text();
}
}
};
Expand All @@ -55,15 +54,14 @@ function greenwoodResourceLoader (compilation) {
function greenwoodSyncPageResourceBundlesPlugin(compilation) {
return {
name: 'greenwood-sync-page-resource-bundles-plugin',
writeBundle(outputOptions, bundles) {
async writeBundle(outputOptions, bundles) {
const { outputDir } = compilation.context;

for (const resource of compilation.resources.values()) {
const resourceKey = resource.sourcePathURL.pathname;
const resourceKey = normalizePathnameForWindows(resource.sourcePathURL);

for (const bundle in bundles) {
let facadeModuleId = (bundles[bundle].facadeModuleId || '').replace(/\\/g, '/');

/*
* this is an odd issue related to symlinking in our Greenwood monorepo when building the website
* and managing packages that we create as "virtual" modules, like for the mpa router
Expand All @@ -82,25 +80,27 @@ function greenwoodSyncPageResourceBundlesPlugin(compilation) {
* pathToMatch (before): /node_modules/@greenwood/cli/src/lib/router.js
* pathToMatch (after): /cli/src/lib/router.js
*/
if (facadeModuleId && resourceKey.indexOf('/node_modules/@greenwood/cli') > 0 && facadeModuleId.indexOf('/packages/cli') > 0 && fs.existsSync(facadeModuleId)) {
facadeModuleId = facadeModuleId.replace('/packages/cli', '/node_modules/@greenwood/cli');
if (resourceKey?.indexOf('/node_modules/@greenwood/cli') > 0 && facadeModuleId?.indexOf('/packages/cli') > 0) {
if (await checkResourceExists(new URL(`file://${facadeModuleId}`))) {
facadeModuleId = facadeModuleId.replace('/packages/cli', '/node_modules/@greenwood/cli');
}
}

if (resourceKey === facadeModuleId) {
const { fileName } = bundles[bundle];
const { rawAttributes, contents } = resource;
const noop = rawAttributes && rawAttributes.indexOf('data-gwd-opt="none"') >= 0 || compilation.config.optimization === 'none';
const outputPath = path.join(outputDir, fileName);
const outputPath = new URL(`./${fileName}`, outputDir);

compilation.resources.set(resourceKey, {
...compilation.resources.get(resourceKey),
compilation.resources.set(resource.sourcePathURL.pathname, {
...compilation.resources.get(resource.sourcePathURL.pathname),
optimizedFileName: fileName,
optimizedFileContents: fs.readFileSync(outputPath, 'utf-8'),
optimizedFileContents: await fs.readFile(outputPath, 'utf-8'),
contents: contents.replace(/\.\//g, '/')
});

if (noop) {
fs.writeFileSync(outputPath, contents);
await fs.writeFile(outputPath, contents);
}
}
}
Expand All @@ -113,7 +113,7 @@ const getRollupConfig = async (compilation) => {
const { outputDir } = compilation.context;
const input = [...compilation.resources.values()]
.filter(resource => resource.type === 'script')
.map(resource => resource.sourcePathURL.pathname);
.map(resource => normalizePathnameForWindows(resource.sourcePathURL));
const customRollupPlugins = compilation.config.plugins.filter(plugin => {
return plugin.type === 'rollup';
}).map(plugin => {
Expand All @@ -124,7 +124,7 @@ const getRollupConfig = async (compilation) => {
preserveEntrySignatures: 'strict', // https://github.com/ProjectEvergreen/greenwood/pull/990
input,
output: {
dir: outputDir,
dir: normalizePathnameForWindows(outputDir),
entryFileNames: '[name].[hash].js',
chunkFileNames: '[name].[hash].js',
sourcemap: true
Expand Down
4 changes: 2 additions & 2 deletions packages/cli/src/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,11 @@
process.setMaxListeners(0);

import { generateCompilation } from './lifecycles/compile.js';
import fs from 'fs';
import fs from 'fs/promises';
import program from 'commander';
import { URL } from 'url';

const greenwoodPackageJson = JSON.parse(await fs.promises.readFile(new URL('../package.json', import.meta.url), 'utf-8'));
const greenwoodPackageJson = JSON.parse(await fs.readFile(new URL('../package.json', import.meta.url), 'utf-8'));
let cmdOption = {};
let command = '';

Expand Down
24 changes: 20 additions & 4 deletions packages/cli/src/lib/node-modules-utils.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
// TODO convert this to use / return URLs
// https://github.com/ProjectEvergreen/greenwood/issues/953
import { createRequire } from 'module'; // https://stackoverflow.com/a/62499498/417806
import fs from 'fs';
import path from 'path';
import { checkResourceExists } from '../lib/resource-utils.js';
import fs from 'fs/promises';

// defer to NodeJS to find where on disk a package is located using import.meta.resolve
// and return the root absolute location
Expand Down Expand Up @@ -35,14 +37,14 @@ async function getNodeModulesLocationForPackage(packageName) {
const nodeModulesPackageRoot = `${locations[location]}/${packageName}`;
const packageJsonLocation = `${nodeModulesPackageRoot}/package.json`;

if (fs.existsSync(packageJsonLocation)) {
if (await checkResourceExists(new URL(`file://${packageJsonLocation}`))) {
nodeModulesUrl = nodeModulesPackageRoot;
}
}

if (!nodeModulesUrl) {
console.debug(`Unable to look up ${packageName} using NodeJS require.resolve. Falling back to process.cwd()`);
nodeModulesUrl = path.join(process.cwd(), 'node_modules', packageName); // force / for consistency and path matching);
nodeModulesUrl = new URL(`./node_modules/${packageName}`, `file://${process.cwd()}`).pathname;
}
}

Expand All @@ -62,7 +64,21 @@ function getPackageNameFromUrl(url) {
return packageName;
}

async function getPackageJson({ userWorkspace, projectDirectory }) {
const monorepoPackageJsonUrl = new URL('./package.json', userWorkspace);
const topLevelPackageJsonUrl = new URL('./package.json', projectDirectory);
const hasMonorepoPackageJson = await checkResourceExists(monorepoPackageJsonUrl);
const hasTopLevelPackageJson = await checkResourceExists(topLevelPackageJsonUrl);

return hasMonorepoPackageJson // handle monorepos first
? JSON.parse(await fs.readFile(monorepoPackageJsonUrl, 'utf-8'))
: hasTopLevelPackageJson
? JSON.parse(await fs.readFile(topLevelPackageJsonUrl, 'utf-8'))
: {};
}

export {
getNodeModulesLocationForPackage,
getPackageJson,
getPackageNameFromUrl
};
Loading

0 comments on commit 6fac45d

Please sign in to comment.