From c1a1aa4ae2e0ba5d7c4ba023686c29d592c60c87 Mon Sep 17 00:00:00 2001 From: Tim Neutkens Date: Sat, 4 Dec 2021 22:59:32 +0100 Subject: [PATCH] Add test for TailwindCSS JIT mode reloading (#32130) Test for https://github.com/tailwindlabs/tailwindcss/discussions/6265 Issue started with https://github.com/vercel/next.js/pull/31798 ## Bug - [ ] Related issues linked using `fixes #number` - [ ] Integration tests added - [ ] Errors have helpful link attached, see `contributing.md` ## Feature - [ ] Implements an existing feature request or RFC. Make sure the feature request has been accepted for implementation before opening a PR. - [ ] Related issues linked using `fixes #number` - [ ] Integration tests added - [ ] Documentation added - [ ] Telemetry added. In case of a feature if it's used or not. - [ ] Errors have helpful link attached, see `contributing.md` ## Documentation / Examples - [ ] Make sure the linting passes by running `yarn lint` --- packages/next/compiled/webpack/bundle5.js | 15962 ++++++++-------- packages/next/package.json | 2 +- test/development/basic/tailwind-jit.test.ts | 65 + .../basic/tailwind-jit/pages/_app.js | 7 + .../basic/tailwind-jit/pages/index.js | 63 + .../basic/tailwind-jit/postcss.config.js | 7 + .../basic/tailwind-jit/tailwind.config.js | 12 + yarn.lock | 27 +- 8 files changed, 8076 insertions(+), 8069 deletions(-) create mode 100644 test/development/basic/tailwind-jit.test.ts create mode 100644 test/development/basic/tailwind-jit/pages/_app.js create mode 100644 test/development/basic/tailwind-jit/pages/index.js create mode 100644 test/development/basic/tailwind-jit/postcss.config.js create mode 100644 test/development/basic/tailwind-jit/tailwind.config.js diff --git a/packages/next/compiled/webpack/bundle5.js b/packages/next/compiled/webpack/bundle5.js index c88b374d6fa61..be607621676f6 100644 --- a/packages/next/compiled/webpack/bundle5.js +++ b/packages/next/compiled/webpack/bundle5.js @@ -38,7 +38,7 @@ module.exports = JSON.parse("{\"application/1d-interleaved-parityfec\":{\"source /***/ (function(module) { "use strict"; -module.exports = {"i8":"5.64.4"}; +module.exports = {"i8":"5.64.3"}; /***/ }), @@ -22629,7 +22629,7 @@ var __classPrivateFieldSet; /***/ }), -/***/ 76595: +/***/ 46806: /***/ (function(module, __unused_webpack_exports, __webpack_require__) { "use strict"; @@ -22639,788 +22639,802 @@ var __classPrivateFieldSet; */ -const EventEmitter = __webpack_require__(28614).EventEmitter; -const fs = __webpack_require__(82161); -const path = __webpack_require__(85622); -const watchEventSource = __webpack_require__(72511); +const RuntimeGlobals = __webpack_require__(49404); +const WebpackError = __webpack_require__(68422); +const ConstDependency = __webpack_require__(60864); +const BasicEvaluatedExpression = __webpack_require__(2412); +const { + toConstantDependency, + evaluateToString +} = __webpack_require__(28723); +const ChunkNameRuntimeModule = __webpack_require__(43818); +const GetFullHashRuntimeModule = __webpack_require__(28454); -const EXISTANCE_ONLY_TIME_ENTRY = Object.freeze({}); +/** @typedef {import("./Compiler")} Compiler */ +/** @typedef {import("./javascript/JavascriptParser")} JavascriptParser */ -let FS_ACCURACY = 1000; +/* eslint-disable camelcase */ +const REPLACEMENTS = { + __webpack_require__: { + expr: RuntimeGlobals.require, + req: [RuntimeGlobals.require], + type: "function", + assign: false + }, + __webpack_public_path__: { + expr: RuntimeGlobals.publicPath, + req: [RuntimeGlobals.publicPath], + type: "string", + assign: true + }, + __webpack_base_uri__: { + expr: RuntimeGlobals.baseURI, + req: [RuntimeGlobals.baseURI], + type: "string", + assign: true + }, + __webpack_modules__: { + expr: RuntimeGlobals.moduleFactories, + req: [RuntimeGlobals.moduleFactories], + type: "object", + assign: false + }, + __webpack_chunk_load__: { + expr: RuntimeGlobals.ensureChunk, + req: [RuntimeGlobals.ensureChunk], + type: "function", + assign: true + }, + __non_webpack_require__: { + expr: "require", + req: null, + type: undefined, // type is not known, depends on environment + assign: true + }, + __webpack_nonce__: { + expr: RuntimeGlobals.scriptNonce, + req: [RuntimeGlobals.scriptNonce], + type: "string", + assign: true + }, + __webpack_hash__: { + expr: `${RuntimeGlobals.getFullHash}()`, + req: [RuntimeGlobals.getFullHash], + type: "string", + assign: false + }, + __webpack_chunkname__: { + expr: RuntimeGlobals.chunkName, + req: [RuntimeGlobals.chunkName], + type: "string", + assign: false + }, + __webpack_get_script_filename__: { + expr: RuntimeGlobals.getChunkScriptFilename, + req: [RuntimeGlobals.getChunkScriptFilename], + type: "function", + assign: true + }, + __webpack_runtime_id__: { + expr: RuntimeGlobals.runtimeId, + req: [RuntimeGlobals.runtimeId], + assign: false + }, + "require.onError": { + expr: RuntimeGlobals.uncaughtErrorHandler, + req: [RuntimeGlobals.uncaughtErrorHandler], + type: undefined, // type is not known, could be function or undefined + assign: true // is never a pattern + }, + __system_context__: { + expr: RuntimeGlobals.systemContext, + req: [RuntimeGlobals.systemContext], + type: "object", + assign: false + }, + __webpack_share_scopes__: { + expr: RuntimeGlobals.shareScopeMap, + req: [RuntimeGlobals.shareScopeMap], + type: "object", + assign: false + }, + __webpack_init_sharing__: { + expr: RuntimeGlobals.initializeSharing, + req: [RuntimeGlobals.initializeSharing], + type: "function", + assign: true + } +}; +/* eslint-enable camelcase */ -const IS_OSX = __webpack_require__(12087).platform() === "darwin"; -const WATCHPACK_POLLING = process.env.WATCHPACK_POLLING; -const FORCE_POLLING = - `${+WATCHPACK_POLLING}` === WATCHPACK_POLLING - ? +WATCHPACK_POLLING - : !!WATCHPACK_POLLING && WATCHPACK_POLLING !== "false"; +class APIPlugin { + /** + * Apply the plugin + * @param {Compiler} compiler the compiler instance + * @returns {void} + */ + apply(compiler) { + compiler.hooks.compilation.tap( + "APIPlugin", + (compilation, { normalModuleFactory }) => { + compilation.dependencyTemplates.set( + ConstDependency, + new ConstDependency.Template() + ); -function withoutCase(str) { - return str.toLowerCase(); + compilation.hooks.runtimeRequirementInTree + .for(RuntimeGlobals.chunkName) + .tap("APIPlugin", chunk => { + compilation.addRuntimeModule( + chunk, + new ChunkNameRuntimeModule(chunk.name) + ); + return true; + }); + + compilation.hooks.runtimeRequirementInTree + .for(RuntimeGlobals.getFullHash) + .tap("APIPlugin", (chunk, set) => { + compilation.addRuntimeModule(chunk, new GetFullHashRuntimeModule()); + return true; + }); + + /** + * @param {JavascriptParser} parser the parser + */ + const handler = parser => { + Object.keys(REPLACEMENTS).forEach(key => { + const info = REPLACEMENTS[key]; + parser.hooks.expression + .for(key) + .tap( + "APIPlugin", + toConstantDependency(parser, info.expr, info.req) + ); + if (info.assign === false) { + parser.hooks.assign.for(key).tap("APIPlugin", expr => { + const err = new WebpackError(`${key} must not be assigned`); + err.loc = expr.loc; + throw err; + }); + } + if (info.type) { + parser.hooks.evaluateTypeof + .for(key) + .tap("APIPlugin", evaluateToString(info.type)); + } + }); + + parser.hooks.expression + .for("__webpack_layer__") + .tap("APIPlugin", expr => { + const dep = new ConstDependency( + JSON.stringify(parser.state.module.layer), + expr.range + ); + dep.loc = expr.loc; + parser.state.module.addPresentationalDependency(dep); + return true; + }); + parser.hooks.evaluateIdentifier + .for("__webpack_layer__") + .tap("APIPlugin", expr => + (parser.state.module.layer === null + ? new BasicEvaluatedExpression().setNull() + : new BasicEvaluatedExpression().setString( + parser.state.module.layer + ) + ).setRange(expr.range) + ); + parser.hooks.evaluateTypeof + .for("__webpack_layer__") + .tap("APIPlugin", expr => + new BasicEvaluatedExpression() + .setString( + parser.state.module.layer === null ? "object" : "string" + ) + .setRange(expr.range) + ); + }; + + normalModuleFactory.hooks.parser + .for("javascript/auto") + .tap("APIPlugin", handler); + normalModuleFactory.hooks.parser + .for("javascript/dynamic") + .tap("APIPlugin", handler); + normalModuleFactory.hooks.parser + .for("javascript/esm") + .tap("APIPlugin", handler); + } + ); + } } -function needCalls(times, callback) { - return function() { - if (--times === 0) { - return callback(); - } - }; +module.exports = APIPlugin; + + +/***/ }), + +/***/ 570: +/***/ (function(module, __unused_webpack_exports, __webpack_require__) { + +"use strict"; +/* + MIT License http://www.opensource.org/licenses/mit-license.php + Author Ivan Kopeykin @vankop +*/ + + + +const WebpackError = __webpack_require__(68422); +const CURRENT_METHOD_REGEXP = /at ([a-zA-Z0-9_.]*)/; + +/** + * @param {string=} method method name + * @returns {string} message + */ +function createMessage(method) { + return `Abstract method${method ? " " + method : ""}. Must be overridden.`; } -class Watcher extends EventEmitter { - constructor(directoryWatcher, filePath, startTime) { - super(); - this.directoryWatcher = directoryWatcher; - this.path = filePath; - this.startTime = startTime && +startTime; - } +/** + * @constructor + */ +function Message() { + /** @type {string} */ + this.stack = undefined; + Error.captureStackTrace(this); + /** @type {RegExpMatchArray} */ + const match = this.stack.split("\n")[3].match(CURRENT_METHOD_REGEXP); - checkStartTime(mtime, initial) { - const startTime = this.startTime; - if (typeof startTime !== "number") return !initial; - return startTime <= mtime; - } + this.message = match && match[1] ? createMessage(match[1]) : createMessage(); +} - close() { - this.emit("closed"); +/** + * Error for abstract method + * @example + * class FooClass { + * abstractMethod() { + * throw new AbstractMethodError(); // error message: Abstract method FooClass.abstractMethod. Must be overridden. + * } + * } + * + */ +class AbstractMethodError extends WebpackError { + constructor() { + super(new Message().message); + this.name = "AbstractMethodError"; } } -class DirectoryWatcher extends EventEmitter { - constructor(watcherManager, directoryPath, options) { +module.exports = AbstractMethodError; + + +/***/ }), + +/***/ 11315: +/***/ (function(module, __unused_webpack_exports, __webpack_require__) { + +"use strict"; +/* + MIT License http://www.opensource.org/licenses/mit-license.php + Author Tobias Koppers @sokra +*/ + + + +const DependenciesBlock = __webpack_require__(21484); +const makeSerializable = __webpack_require__(26522); + +/** @typedef {import("./ChunkGraph")} ChunkGraph */ +/** @typedef {import("./ChunkGroup")} ChunkGroup */ +/** @typedef {import("./ChunkGroup").ChunkGroupOptions} ChunkGroupOptions */ +/** @typedef {import("./Dependency").DependencyLocation} DependencyLocation */ +/** @typedef {import("./Dependency").UpdateHashContext} UpdateHashContext */ +/** @typedef {import("./Entrypoint").EntryOptions} EntryOptions */ +/** @typedef {import("./Module")} Module */ +/** @typedef {import("./util/Hash")} Hash */ + +class AsyncDependenciesBlock extends DependenciesBlock { + /** + * @param {ChunkGroupOptions & { entryOptions?: EntryOptions }} groupOptions options for the group + * @param {DependencyLocation=} loc the line of code + * @param {string=} request the request + */ + constructor(groupOptions, loc, request) { super(); - if (FORCE_POLLING) { - options.poll = FORCE_POLLING; + if (typeof groupOptions === "string") { + groupOptions = { name: groupOptions }; + } else if (!groupOptions) { + groupOptions = { name: undefined }; } - this.watcherManager = watcherManager; - this.options = options; - this.path = directoryPath; - // safeTime is the point in time after which reading is safe to be unchanged - // timestamp is a value that should be compared with another timestamp (mtime) - /** @type {Map} */ - this.filesWithoutCase = new Map(); - this.directories = new Map(); - this.lastWatchEvent = 0; - this.initialScan = true; - this.ignored = options.ignored || (() => false); - this.nestedWatching = false; - this.polledWatching = - typeof options.poll === "number" - ? options.poll - : options.poll - ? 5007 - : false; - this.timeout = undefined; - this.initialScanRemoved = new Set(); - this.initialScanFinished = undefined; - /** @type {Map>} */ - this.watchers = new Map(); - this.parentWatcher = null; - this.refs = 0; - this._activeEvents = new Map(); - this.closed = false; - this.scanning = false; - this.scanAgain = false; - this.scanAgainInitial = false; + this.groupOptions = groupOptions; + this.loc = loc; + this.request = request; + this._stringifiedGroupOptions = undefined; + } - this.createWatcher(); - this.doScan(true); + /** + * @returns {string} The name of the chunk + */ + get chunkName() { + return this.groupOptions.name; } - createWatcher() { - try { - if (this.polledWatching) { - this.watcher = { - close: () => { - if (this.timeout) { - clearTimeout(this.timeout); - this.timeout = undefined; - } - } - }; - } else { - if (IS_OSX) { - this.watchInParentDirectory(); - } - this.watcher = watchEventSource.watch(this.path); - this.watcher.on("change", this.onWatchEvent.bind(this)); - this.watcher.on("error", this.onWatcherError.bind(this)); - } - } catch (err) { - this.onWatcherError(err); + /** + * @param {string} value The new chunk name + * @returns {void} + */ + set chunkName(value) { + if (this.groupOptions.name !== value) { + this.groupOptions.name = value; + this._stringifiedGroupOptions = undefined; } } - forEachWatcher(path, fn) { - const watchers = this.watchers.get(withoutCase(path)); - if (watchers !== undefined) { - for (const w of watchers) { - fn(w); - } + /** + * @param {Hash} hash the hash used to track dependencies + * @param {UpdateHashContext} context context + * @returns {void} + */ + updateHash(hash, context) { + const { chunkGraph } = context; + if (this._stringifiedGroupOptions === undefined) { + this._stringifiedGroupOptions = JSON.stringify(this.groupOptions); } + const chunkGroup = chunkGraph.getBlockChunkGroup(this); + hash.update( + `${this._stringifiedGroupOptions}${chunkGroup ? chunkGroup.id : ""}` + ); + super.updateHash(hash, context); } - setMissing(itemPath, initial, type) { - if (this.initialScan) { - this.initialScanRemoved.add(itemPath); - } + serialize(context) { + const { write } = context; + write(this.groupOptions); + write(this.loc); + write(this.request); + super.serialize(context); + } - const oldDirectory = this.directories.get(itemPath); - if (oldDirectory) { - if (this.nestedWatching) oldDirectory.close(); - this.directories.delete(itemPath); + deserialize(context) { + const { read } = context; + this.groupOptions = read(); + this.loc = read(); + this.request = read(); + super.deserialize(context); + } +} - this.forEachWatcher(itemPath, w => w.emit("remove", type)); - if (!initial) { - this.forEachWatcher(this.path, w => - w.emit("change", itemPath, null, type, initial) - ); - } - } +makeSerializable(AsyncDependenciesBlock, "webpack/lib/AsyncDependenciesBlock"); - const oldFile = this.files.get(itemPath); - if (oldFile) { - this.files.delete(itemPath); - const key = withoutCase(itemPath); - const count = this.filesWithoutCase.get(key) - 1; - if (count <= 0) { - this.filesWithoutCase.delete(key); - this.forEachWatcher(itemPath, w => w.emit("remove", type)); - } else { - this.filesWithoutCase.set(key, count); - } +Object.defineProperty(AsyncDependenciesBlock.prototype, "module", { + get() { + throw new Error( + "module property was removed from AsyncDependenciesBlock (it's not needed)" + ); + }, + set() { + throw new Error( + "module property was removed from AsyncDependenciesBlock (it's not needed)" + ); + } +}); - if (!initial) { - this.forEachWatcher(this.path, w => - w.emit("change", itemPath, null, type, initial) - ); - } - } +module.exports = AsyncDependenciesBlock; + + +/***/ }), + +/***/ 69073: +/***/ (function(module, __unused_webpack_exports, __webpack_require__) { + +"use strict"; +/* + MIT License http://www.opensource.org/licenses/mit-license.php + Author Sean Larkin @thelarkinn +*/ + + + +const WebpackError = __webpack_require__(68422); + +/** @typedef {import("./Dependency").DependencyLocation} DependencyLocation */ +/** @typedef {import("./Module")} Module */ + +class AsyncDependencyToInitialChunkError extends WebpackError { + /** + * Creates an instance of AsyncDependencyToInitialChunkError. + * @param {string} chunkName Name of Chunk + * @param {Module} module module tied to dependency + * @param {DependencyLocation} loc location of dependency + */ + constructor(chunkName, module, loc) { + super( + `It's not allowed to load an initial chunk on demand. The chunk name "${chunkName}" is already used by an entrypoint.` + ); + + this.name = "AsyncDependencyToInitialChunkError"; + this.module = module; + this.loc = loc; } +} - setFileTime(filePath, mtime, initial, ignoreWhenEqual, type) { - const now = Date.now(); +module.exports = AsyncDependencyToInitialChunkError; - if (this.ignored(filePath)) return; - const old = this.files.get(filePath); +/***/ }), - let safeTime, accuracy; - if (initial) { - safeTime = Math.min(now, mtime) + FS_ACCURACY; - accuracy = FS_ACCURACY; - } else { - safeTime = now; - accuracy = 0; +/***/ 51714: +/***/ (function(module, __unused_webpack_exports, __webpack_require__) { - if (old && old.timestamp === mtime && mtime + FS_ACCURACY < now - 1000) { - // We are sure that mtime is untouched - // This can be caused by some file attribute change - // e. g. when access time has been changed - // but the file content is untouched - return; - } - } +"use strict"; +/* + MIT License http://www.opensource.org/licenses/mit-license.php + Author Tobias Koppers @sokra +*/ - if (ignoreWhenEqual && old && old.timestamp === mtime) return; - this.files.set(filePath, { - safeTime, - accuracy, - timestamp: mtime - }); - if (!old) { - const key = withoutCase(filePath); - const count = this.filesWithoutCase.get(key); - this.filesWithoutCase.set(key, (count || 0) + 1); - if (count !== undefined) { - // There is already a file with case-insensitive-equal name - // On a case-insensitive filesystem we may miss the renaming - // when only casing is changed. - // To be sure that our information is correct - // we trigger a rescan here - this.doScan(false); +const asyncLib = __webpack_require__(36386); +const NormalModule = __webpack_require__(11026); +const PrefetchDependency = __webpack_require__(39986); + +/** @typedef {import("./Compiler")} Compiler */ + +class AutomaticPrefetchPlugin { + /** + * Apply the plugin + * @param {Compiler} compiler the compiler instance + * @returns {void} + */ + apply(compiler) { + compiler.hooks.compilation.tap( + "AutomaticPrefetchPlugin", + (compilation, { normalModuleFactory }) => { + compilation.dependencyFactories.set( + PrefetchDependency, + normalModuleFactory + ); } + ); + let lastModules = null; + compiler.hooks.afterCompile.tap("AutomaticPrefetchPlugin", compilation => { + lastModules = []; - this.forEachWatcher(filePath, w => { - if (!initial || w.checkStartTime(safeTime, initial)) { - w.emit("change", mtime, type); + for (const m of compilation.modules) { + if (m instanceof NormalModule) { + lastModules.push({ + context: m.context, + request: m.request + }); } - }); - } else if (!initial) { - this.forEachWatcher(filePath, w => w.emit("change", mtime, type)); - } - this.forEachWatcher(this.path, w => { - if (!initial || w.checkStartTime(safeTime, initial)) { - w.emit("change", filePath, safeTime, type, initial); } }); - } - - setDirectory(directoryPath, birthtime, initial, type) { - if (this.ignored(directoryPath)) return; - if (directoryPath === this.path) { - if (!initial) { - this.forEachWatcher(this.path, w => - w.emit("change", directoryPath, birthtime, type, initial) + compiler.hooks.make.tapAsync( + "AutomaticPrefetchPlugin", + (compilation, callback) => { + if (!lastModules) return callback(); + asyncLib.forEach( + lastModules, + (m, callback) => { + compilation.addModuleChain( + m.context || compiler.context, + new PrefetchDependency(`!!${m.request}`), + callback + ); + }, + err => { + lastModules = null; + callback(err); + } ); } - } else { - const old = this.directories.get(directoryPath); - if (!old) { - const now = Date.now(); + ); + } +} +module.exports = AutomaticPrefetchPlugin; - if (this.nestedWatching) { - this.createNestedWatcher(directoryPath); - } else { - this.directories.set(directoryPath, true); - } - let safeTime; - if (initial) { - safeTime = Math.min(now, birthtime) + FS_ACCURACY; - } else { - safeTime = now; - } +/***/ }), - this.forEachWatcher(directoryPath, w => { - if (!initial || w.checkStartTime(safeTime, false)) { - w.emit("change", birthtime, type); - } - }); - this.forEachWatcher(this.path, w => { - if (!initial || w.checkStartTime(safeTime, initial)) { - w.emit("change", directoryPath, safeTime, type, initial); - } - }); - } - } - } +/***/ 45480: +/***/ (function(module, __unused_webpack_exports, __webpack_require__) { - createNestedWatcher(directoryPath) { - const watcher = this.watcherManager.watchDirectory(directoryPath, 1); - watcher.on("change", (filePath, mtime, type, initial) => { - this.forEachWatcher(this.path, w => { - if (!initial || w.checkStartTime(mtime, initial)) { - w.emit("change", filePath, mtime, type, initial); - } - }); - }); - this.directories.set(directoryPath, watcher); +"use strict"; +/* + MIT License http://www.opensource.org/licenses/mit-license.php + Author Tobias Koppers @sokra +*/ + + + +const { ConcatSource } = __webpack_require__(96192); +const Compilation = __webpack_require__(59622); +const ModuleFilenameHelpers = __webpack_require__(80295); +const Template = __webpack_require__(92066); +const createSchemaValidation = __webpack_require__(77695); + +/** @typedef {import("../declarations/plugins/BannerPlugin").BannerPluginArgument} BannerPluginArgument */ +/** @typedef {import("../declarations/plugins/BannerPlugin").BannerPluginOptions} BannerPluginOptions */ +/** @typedef {import("./Compiler")} Compiler */ + +const validate = createSchemaValidation( + __webpack_require__(79456), + () => __webpack_require__(33946), + { + name: "Banner Plugin", + baseDataPath: "options" } +); - setNestedWatching(flag) { - if (this.nestedWatching !== !!flag) { - this.nestedWatching = !!flag; - if (this.nestedWatching) { - for (const directory of this.directories.keys()) { - this.createNestedWatcher(directory); - } - } else { - for (const [directory, watcher] of this.directories) { - watcher.close(); - this.directories.set(directory, true); - } - } - } +const wrapComment = str => { + if (!str.includes("\n")) { + return Template.toComment(str); } + return `/*!\n * ${str + .replace(/\*\//g, "* /") + .split("\n") + .join("\n * ") + .replace(/\s+\n/g, "\n") + .trimRight()}\n */`; +}; - watch(filePath, startTime) { - const key = withoutCase(filePath); - let watchers = this.watchers.get(key); - if (watchers === undefined) { - watchers = new Set(); - this.watchers.set(key, watchers); +class BannerPlugin { + /** + * @param {BannerPluginArgument} options options object + */ + constructor(options) { + if (typeof options === "string" || typeof options === "function") { + options = { + banner: options + }; } - this.refs++; - const watcher = new Watcher(this, filePath, startTime); - watcher.on("closed", () => { - if (--this.refs <= 0) { - this.close(); - return; - } - watchers.delete(watcher); - if (watchers.size === 0) { - this.watchers.delete(key); - if (this.path === filePath) this.setNestedWatching(false); - } - }); - watchers.add(watcher); - let safeTime; - if (filePath === this.path) { - this.setNestedWatching(true); - safeTime = this.lastWatchEvent; - for (const entry of this.files.values()) { - fixupEntryAccuracy(entry); - safeTime = Math.max(safeTime, entry.safeTime); - } + + validate(options); + + this.options = options; + + const bannerOption = options.banner; + if (typeof bannerOption === "function") { + const getBanner = bannerOption; + this.banner = this.options.raw + ? getBanner + : data => wrapComment(getBanner(data)); } else { - const entry = this.files.get(filePath); - if (entry) { - fixupEntryAccuracy(entry); - safeTime = entry.safeTime; - } else { - safeTime = 0; - } - } - if (safeTime) { - if (safeTime >= startTime) { - process.nextTick(() => { - if (this.closed) return; - if (filePath === this.path) { - watcher.emit( - "change", - filePath, - safeTime, - "watch (outdated on attach)", - true - ); - } else { - watcher.emit( - "change", - safeTime, - "watch (outdated on attach)", - true - ); - } - }); - } - } else if (this.initialScan) { - if (this.initialScanRemoved.has(filePath)) { - process.nextTick(() => { - if (this.closed) return; - watcher.emit("remove"); - }); - } - } else if ( - !this.directories.has(filePath) && - watcher.checkStartTime(this.initialScanFinished, false) - ) { - process.nextTick(() => { - if (this.closed) return; - watcher.emit("initial-missing", "watch (missing on attach)"); - }); + const banner = this.options.raw + ? bannerOption + : wrapComment(bannerOption); + this.banner = () => banner; } - return watcher; } - onWatchEvent(eventType, filename) { - if (this.closed) return; - if (!filename) { - // In some cases no filename is provided - // This seem to happen on windows - // So some event happened but we don't know which file is affected - // We have to do a full scan of the directory - this.doScan(false); - return; - } + /** + * Apply the plugin + * @param {Compiler} compiler the compiler instance + * @returns {void} + */ + apply(compiler) { + const options = this.options; + const banner = this.banner; + const matchObject = ModuleFilenameHelpers.matchObject.bind( + undefined, + options + ); - const filePath = path.join(this.path, filename); - if (this.ignored(filePath)) return; + compiler.hooks.compilation.tap("BannerPlugin", compilation => { + compilation.hooks.processAssets.tap( + { + name: "BannerPlugin", + stage: Compilation.PROCESS_ASSETS_STAGE_ADDITIONS + }, + () => { + for (const chunk of compilation.chunks) { + if (options.entryOnly && !chunk.canBeInitial()) { + continue; + } - if (this._activeEvents.get(filename) === undefined) { - this._activeEvents.set(filename, false); - const checkStats = () => { - if (this.closed) return; - this._activeEvents.set(filename, false); - fs.lstat(filePath, (err, stats) => { - if (this.closed) return; - if (this._activeEvents.get(filename) === true) { - process.nextTick(checkStats); - return; - } - this._activeEvents.delete(filename); - // ENOENT happens when the file/directory doesn't exist - // EPERM happens when the containing directory doesn't exist - if (err) { - if ( - err.code !== "ENOENT" && - err.code !== "EPERM" && - err.code !== "EBUSY" - ) { - this.onStatsError(err); - } else { - if (filename === path.basename(this.path)) { - // This may indicate that the directory itself was removed - if (!fs.existsSync(this.path)) { - this.onDirectoryRemoved("stat failed"); - } + for (const file of chunk.files) { + if (!matchObject(file)) { + continue; } + + const data = { + chunk, + filename: file + }; + + const comment = compilation.getPath(banner, data); + + compilation.updateAsset( + file, + old => new ConcatSource(comment, "\n", old) + ); } } - this.lastWatchEvent = Date.now(); - if (!stats) { - this.setMissing(filePath, false, eventType); - } else if (stats.isDirectory()) { - this.setDirectory( - filePath, - +stats.birthtime || 1, - false, - eventType - ); - } else if (stats.isFile() || stats.isSymbolicLink()) { - if (stats.mtime) { - ensureFsAccuracy(stats.mtime); - } - this.setFileTime( - filePath, - +stats.mtime || +stats.ctime || 1, - false, - false, - eventType - ); - } - }); - }; - process.nextTick(checkStats); - } else { - this._activeEvents.set(filename, true); - } + } + ); + }); } +} - onWatcherError(err) { - if (this.closed) return; - if (err) { - if (err.code !== "EPERM" && err.code !== "ENOENT") { - console.error("Watchpack Error (watcher): " + err); - } - this.onDirectoryRemoved("watch error"); - } - } +module.exports = BannerPlugin; - onStatsError(err) { - if (err) { - console.error("Watchpack Error (stats): " + err); - } - } - onScanError(err) { - if (err) { - console.error("Watchpack Error (initial scan): " + err); - } - this.onScanFinished(); - } +/***/ }), - onScanFinished() { - if (this.polledWatching) { - this.timeout = setTimeout(() => { - if (this.closed) return; - this.doScan(false); - }, this.polledWatching); - } - } +/***/ 99453: +/***/ (function(module, __unused_webpack_exports, __webpack_require__) { - onDirectoryRemoved(reason) { - if (this.watcher) { - this.watcher.close(); - this.watcher = null; - } - this.watchInParentDirectory(); - const type = `directory-removed (${reason})`; - for (const directory of this.directories.keys()) { - this.setMissing(directory, null, type); - } - for (const file of this.files.keys()) { - this.setMissing(file, null, type); - } - } +"use strict"; +/* + MIT License http://www.opensource.org/licenses/mit-license.php + Author Tobias Koppers @sokra +*/ - watchInParentDirectory() { - if (!this.parentWatcher) { - const parentDir = path.dirname(this.path); - // avoid watching in the root directory - // removing directories in the root directory is not supported - if (path.dirname(parentDir) === parentDir) return; - this.parentWatcher = this.watcherManager.watchFile(this.path, 1); - this.parentWatcher.on("change", (mtime, type) => { - if (this.closed) return; - // On non-osx platforms we don't need this watcher to detect - // directory removal, as an EPERM error indicates that - if ((!IS_OSX || this.polledWatching) && this.parentWatcher) { - this.parentWatcher.close(); - this.parentWatcher = null; - } - // Try to create the watcher when parent directory is found - if (!this.watcher) { - this.createWatcher(); - this.doScan(false); +const { AsyncParallelHook, AsyncSeriesBailHook, SyncHook } = __webpack_require__(34718); +const { + makeWebpackError, + makeWebpackErrorCallback +} = __webpack_require__(89935); - // directory was created so we emit an event - this.forEachWatcher(this.path, w => - w.emit("change", this.path, mtime, type, false) - ); - } - }); - this.parentWatcher.on("remove", () => { - this.onDirectoryRemoved("parent directory removed"); - }); - } - } +/** @typedef {import("./WebpackError")} WebpackError */ - doScan(initial) { - if (this.scanning) { - if (this.scanAgain) { - if (!initial) this.scanAgainInitial = false; - } else { - this.scanAgain = true; - this.scanAgainInitial = initial; - } - return; - } - this.scanning = true; - if (this.timeout) { - clearTimeout(this.timeout); - this.timeout = undefined; - } - process.nextTick(() => { - if (this.closed) return; - fs.readdir(this.path, (err, items) => { - if (this.closed) return; - if (err) { - if (err.code === "ENOENT" || err.code === "EPERM") { - this.onDirectoryRemoved("scan readdir failed"); - } else { - this.onScanError(err); - } - this.initialScan = false; - this.initialScanFinished = Date.now(); - if (initial) { - for (const watchers of this.watchers.values()) { - for (const watcher of watchers) { - if (watcher.checkStartTime(this.initialScanFinished, false)) { - watcher.emit( - "initial-missing", - "scan (parent directory missing in initial scan)" - ); - } - } - } - } - if (this.scanAgain) { - this.scanAgain = false; - this.doScan(this.scanAgainInitial); - } else { - this.scanning = false; - } - return; - } - const itemPaths = new Set( - items.map(item => path.join(this.path, item.normalize("NFC"))) - ); - for (const file of this.files.keys()) { - if (!itemPaths.has(file)) { - this.setMissing(file, initial, "scan (missing)"); - } - } - for (const directory of this.directories.keys()) { - if (!itemPaths.has(directory)) { - this.setMissing(directory, initial, "scan (missing)"); - } - } - if (this.scanAgain) { - // Early repeat of scan - this.scanAgain = false; - this.doScan(initial); - return; - } - const itemFinished = needCalls(itemPaths.size + 1, () => { - if (this.closed) return; - this.initialScan = false; - this.initialScanRemoved = null; - this.initialScanFinished = Date.now(); - if (initial) { - const missingWatchers = new Map(this.watchers); - missingWatchers.delete(withoutCase(this.path)); - for (const item of itemPaths) { - missingWatchers.delete(withoutCase(item)); - } - for (const watchers of missingWatchers.values()) { - for (const watcher of watchers) { - if (watcher.checkStartTime(this.initialScanFinished, false)) { - watcher.emit( - "initial-missing", - "scan (missing in initial scan)" - ); - } - } - } - } - if (this.scanAgain) { - this.scanAgain = false; - this.doScan(this.scanAgainInitial); - } else { - this.scanning = false; - this.onScanFinished(); - } - }); - for (const itemPath of itemPaths) { - fs.lstat(itemPath, (err2, stats) => { - if (this.closed) return; - if (err2) { - if ( - err2.code === "ENOENT" || - err2.code === "EPERM" || - err2.code === "EACCES" || - err2.code === "EBUSY" - ) { - this.setMissing(itemPath, initial, "scan (" + err2.code + ")"); - } else { - this.onScanError(err2); - } - itemFinished(); - return; - } - if (stats.isFile() || stats.isSymbolicLink()) { - if (stats.mtime) { - ensureFsAccuracy(stats.mtime); - } - this.setFileTime( - itemPath, - +stats.mtime || +stats.ctime || 1, - initial, - true, - "scan (file)" - ); - } else if (stats.isDirectory()) { - if (!initial || !this.directories.has(itemPath)) - this.setDirectory( - itemPath, - +stats.birthtime || 1, - initial, - "scan (dir)" - ); - } - itemFinished(); - }); - } - itemFinished(); - }); - }); - } +/** + * @typedef {Object} Etag + * @property {function(): string} toString + */ - getTimes() { - const obj = Object.create(null); - let safeTime = this.lastWatchEvent; - for (const [file, entry] of this.files) { - fixupEntryAccuracy(entry); - safeTime = Math.max(safeTime, entry.safeTime); - obj[file] = Math.max(entry.safeTime, entry.timestamp); - } - if (this.nestedWatching) { - for (const w of this.directories.values()) { - const times = w.directoryWatcher.getTimes(); - for (const file of Object.keys(times)) { - const time = times[file]; - safeTime = Math.max(safeTime, time); - obj[file] = time; - } - } - obj[this.path] = safeTime; +/** + * @template T + * @callback CallbackCache + * @param {WebpackError=} err + * @param {T=} result + * @returns {void} + */ + +/** + * @callback GotHandler + * @param {any} result + * @param {function(Error=): void} callback + * @returns {void} + */ + +const needCalls = (times, callback) => { + return err => { + if (--times === 0) { + return callback(err); } - if (!this.initialScan) { - for (const watchers of this.watchers.values()) { - for (const watcher of watchers) { - const path = watcher.path; - if (!Object.prototype.hasOwnProperty.call(obj, path)) { - obj[path] = null; - } - } - } + if (err && times > 0) { + times = 0; + return callback(err); } - return obj; + }; +}; + +class Cache { + constructor() { + this.hooks = { + /** @type {AsyncSeriesBailHook<[string, Etag | null, GotHandler[]], any>} */ + get: new AsyncSeriesBailHook(["identifier", "etag", "gotHandlers"]), + /** @type {AsyncParallelHook<[string, Etag | null, any]>} */ + store: new AsyncParallelHook(["identifier", "etag", "data"]), + /** @type {AsyncParallelHook<[Iterable]>} */ + storeBuildDependencies: new AsyncParallelHook(["dependencies"]), + /** @type {SyncHook<[]>} */ + beginIdle: new SyncHook([]), + /** @type {AsyncParallelHook<[]>} */ + endIdle: new AsyncParallelHook([]), + /** @type {AsyncParallelHook<[]>} */ + shutdown: new AsyncParallelHook([]) + }; } - collectTimeInfoEntries(fileTimestamps, directoryTimestamps) { - let safeTime = this.lastWatchEvent; - for (const [file, entry] of this.files) { - fixupEntryAccuracy(entry); - safeTime = Math.max(safeTime, entry.safeTime); - fileTimestamps.set(file, entry); - } - if (this.nestedWatching) { - for (const w of this.directories.values()) { - safeTime = Math.max( - safeTime, - w.directoryWatcher.collectTimeInfoEntries( - fileTimestamps, - directoryTimestamps, - safeTime - ) - ); + /** + * @template T + * @param {string} identifier the cache identifier + * @param {Etag | null} etag the etag + * @param {CallbackCache} callback signals when the value is retrieved + * @returns {void} + */ + get(identifier, etag, callback) { + const gotHandlers = []; + this.hooks.get.callAsync(identifier, etag, gotHandlers, (err, result) => { + if (err) { + callback(makeWebpackError(err, "Cache.hooks.get")); + return; } - fileTimestamps.set(this.path, EXISTANCE_ONLY_TIME_ENTRY); - directoryTimestamps.set(this.path, { - safeTime - }); - } else { - for (const dir of this.directories.keys()) { - // No additional info about this directory - directoryTimestamps.set(dir, EXISTANCE_ONLY_TIME_ENTRY); + if (result === null) { + result = undefined; } - fileTimestamps.set(this.path, EXISTANCE_ONLY_TIME_ENTRY); - directoryTimestamps.set(this.path, EXISTANCE_ONLY_TIME_ENTRY); - } - if (!this.initialScan) { - for (const watchers of this.watchers.values()) { - for (const watcher of watchers) { - const path = watcher.path; - if (!fileTimestamps.has(path)) { - fileTimestamps.set(path, null); - } + if (gotHandlers.length > 1) { + const innerCallback = needCalls(gotHandlers.length, () => + callback(null, result) + ); + for (const gotHandler of gotHandlers) { + gotHandler(result, innerCallback); } + } else if (gotHandlers.length === 1) { + gotHandlers[0](result, () => callback(null, result)); + } else { + callback(null, result); } - } - return safeTime; + }); } - close() { - this.closed = true; - this.initialScan = false; - if (this.watcher) { - this.watcher.close(); - this.watcher = null; - } - if (this.nestedWatching) { - for (const w of this.directories.values()) { - w.close(); - } - this.directories.clear(); - } - if (this.parentWatcher) { - this.parentWatcher.close(); - this.parentWatcher = null; - } - this.emit("closed"); + /** + * @template T + * @param {string} identifier the cache identifier + * @param {Etag | null} etag the etag + * @param {T} data the value to store + * @param {CallbackCache} callback signals when the value is stored + * @returns {void} + */ + store(identifier, etag, data, callback) { + this.hooks.store.callAsync( + identifier, + etag, + data, + makeWebpackErrorCallback(callback, "Cache.hooks.store") + ); } -} -module.exports = DirectoryWatcher; -module.exports.EXISTANCE_ONLY_TIME_ENTRY = EXISTANCE_ONLY_TIME_ENTRY; + /** + * After this method has succeeded the cache can only be restored when build dependencies are + * @param {Iterable} dependencies list of all build dependencies + * @param {CallbackCache} callback signals when the dependencies are stored + * @returns {void} + */ + storeBuildDependencies(dependencies, callback) { + this.hooks.storeBuildDependencies.callAsync( + dependencies, + makeWebpackErrorCallback(callback, "Cache.hooks.storeBuildDependencies") + ); + } -function fixupEntryAccuracy(entry) { - if (entry.accuracy > FS_ACCURACY) { - entry.safeTime = entry.safeTime - entry.accuracy + FS_ACCURACY; - entry.accuracy = FS_ACCURACY; + /** + * @returns {void} + */ + beginIdle() { + this.hooks.beginIdle.call(); } -} -function ensureFsAccuracy(mtime) { - if (!mtime) return; - if (FS_ACCURACY > 1 && mtime % 1 !== 0) FS_ACCURACY = 1; - else if (FS_ACCURACY > 10 && mtime % 10 !== 0) FS_ACCURACY = 10; - else if (FS_ACCURACY > 100 && mtime % 100 !== 0) FS_ACCURACY = 100; + /** + * @param {CallbackCache} callback signals when the call finishes + * @returns {void} + */ + endIdle(callback) { + this.hooks.endIdle.callAsync( + makeWebpackErrorCallback(callback, "Cache.hooks.endIdle") + ); + } + + /** + * @param {CallbackCache} callback signals when the call finishes + * @returns {void} + */ + shutdown(callback) { + this.hooks.shutdown.callAsync( + makeWebpackErrorCallback(callback, "Cache.hooks.shutdown") + ); + } } +Cache.STAGE_MEMORY = -10; +Cache.STAGE_DEFAULT = 0; +Cache.STAGE_DISK = 10; +Cache.STAGE_NETWORK = 20; + +module.exports = Cache; + /***/ }), -/***/ 30417: +/***/ 67014: /***/ (function(module, __unused_webpack_exports, __webpack_require__) { "use strict"; @@ -23430,172 +23444,357 @@ function ensureFsAccuracy(mtime) { */ -const fs = __webpack_require__(35747); -const path = __webpack_require__(85622); -// macOS, Linux, and Windows all rely on these errors -const EXPECTED_ERRORS = new Set(["EINVAL", "ENOENT"]); +const asyncLib = __webpack_require__(36386); +const getLazyHashedEtag = __webpack_require__(12339); +const mergeEtags = __webpack_require__(53885); -// On Windows there is also this error in some cases -if (process.platform === "win32") EXPECTED_ERRORS.add("UNKNOWN"); +/** @typedef {import("./Cache")} Cache */ +/** @typedef {import("./Cache").Etag} Etag */ +/** @typedef {import("./WebpackError")} WebpackError */ +/** @typedef {import("./cache/getLazyHashedEtag").HashableObject} HashableObject */ +/** @typedef {typeof import("./util/Hash")} HashConstructor */ -class LinkResolver { - constructor() { - this.cache = new Map(); +/** + * @template T + * @callback CallbackCache + * @param {WebpackError=} err + * @param {T=} result + * @returns {void} + */ + +/** + * @template T + * @callback CallbackNormalErrorCache + * @param {Error=} err + * @param {T=} result + * @returns {void} + */ + +class MultiItemCache { + /** + * @param {ItemCacheFacade[]} items item caches + */ + constructor(items) { + this._items = items; + if (items.length === 1) return /** @type {any} */ (items[0]); } /** - * @param {string} file path to file or directory - * @returns {string[]} array of file and all symlinks contributed in the resolving process (first item is the resolved file) + * @template T + * @param {CallbackCache} callback signals when the value is retrieved + * @returns {void} */ - resolve(file) { - const cacheEntry = this.cache.get(file); - if (cacheEntry !== undefined) { - return cacheEntry; - } - const parent = path.dirname(file); - if (parent === file) { - // At root of filesystem there can't be a link - const result = Object.freeze([file]); - this.cache.set(file, result); - return result; - } - // resolve the parent directory to find links there and get the real path - const parentResolved = this.resolve(parent); - let realFile = file; + get(callback) { + const next = i => { + this._items[i].get((err, result) => { + if (err) return callback(err); + if (result !== undefined) return callback(null, result); + if (++i >= this._items.length) return callback(); + next(i); + }); + }; + next(0); + } - // is the parent directory really somewhere else? - if (parentResolved[0] !== parent) { - // get the real location of file - const basename = path.basename(file); - realFile = path.resolve(parentResolved[0], basename); - } - // try to read the link content - try { - const linkContent = fs.readlinkSync(realFile); + /** + * @template T + * @returns {Promise} promise with the data + */ + getPromise() { + const next = i => { + return this._items[i].getPromise().then(result => { + if (result !== undefined) return result; + if (++i < this._items.length) return next(i); + }); + }; + return next(0); + } - // resolve the link content relative to the parent directory - const resolvedLink = path.resolve(parentResolved[0], linkContent); + /** + * @template T + * @param {T} data the value to store + * @param {CallbackCache} callback signals when the value is stored + * @returns {void} + */ + store(data, callback) { + asyncLib.each( + this._items, + (item, callback) => item.store(data, callback), + callback + ); + } - // recursive resolve the link content for more links in the structure - const linkResolved = this.resolve(resolvedLink); + /** + * @template T + * @param {T} data the value to store + * @returns {Promise} promise signals when the value is stored + */ + storePromise(data) { + return Promise.all(this._items.map(item => item.storePromise(data))).then( + () => {} + ); + } +} - // merge parent and link resolve results - let result; - if (linkResolved.length > 1 && parentResolved.length > 1) { - // when both contain links we need to duplicate them with a Set - const resultSet = new Set(linkResolved); - // add the link - resultSet.add(realFile); - // add all symlinks of the parent - for (let i = 1; i < parentResolved.length; i++) { - resultSet.add(parentResolved[i]); +class ItemCacheFacade { + /** + * @param {Cache} cache the root cache + * @param {string} name the child cache item name + * @param {Etag | null} etag the etag + */ + constructor(cache, name, etag) { + this._cache = cache; + this._name = name; + this._etag = etag; + } + + /** + * @template T + * @param {CallbackCache} callback signals when the value is retrieved + * @returns {void} + */ + get(callback) { + this._cache.get(this._name, this._etag, callback); + } + + /** + * @template T + * @returns {Promise} promise with the data + */ + getPromise() { + return new Promise((resolve, reject) => { + this._cache.get(this._name, this._etag, (err, data) => { + if (err) { + reject(err); + } else { + resolve(data); } - result = Object.freeze(Array.from(resultSet)); - } else if (parentResolved.length > 1) { - // we have links in the parent but not for the link content location - result = parentResolved.slice(); - result[0] = linkResolved[0]; - // add the link - result.push(realFile); - Object.freeze(result); - } else if (linkResolved.length > 1) { - // we can return the link content location result - result = linkResolved.slice(); - // add the link - result.push(realFile); - Object.freeze(result); - } else { - // neither link content location nor parent have links - // this link is the only link here - result = Object.freeze([ - // the resolve real location - linkResolved[0], - // add the link - realFile - ]); - } - this.cache.set(file, result); - return result; - } catch (e) { - if (!EXPECTED_ERRORS.has(e.code)) { - throw e; - } - // no link - const result = parentResolved.slice(); - result[0] = realFile; - Object.freeze(result); - this.cache.set(file, result); - return result; - } + }); + }); + } + + /** + * @template T + * @param {T} data the value to store + * @param {CallbackCache} callback signals when the value is stored + * @returns {void} + */ + store(data, callback) { + this._cache.store(this._name, this._etag, data, callback); + } + + /** + * @template T + * @param {T} data the value to store + * @returns {Promise} promise signals when the value is stored + */ + storePromise(data) { + return new Promise((resolve, reject) => { + this._cache.store(this._name, this._etag, data, err => { + if (err) { + reject(err); + } else { + resolve(); + } + }); + }); + } + + /** + * @template T + * @param {function(CallbackNormalErrorCache): void} computer function to compute the value if not cached + * @param {CallbackNormalErrorCache} callback signals when the value is retrieved + * @returns {void} + */ + provide(computer, callback) { + this.get((err, cacheEntry) => { + if (err) return callback(err); + if (cacheEntry !== undefined) return cacheEntry; + computer((err, result) => { + if (err) return callback(err); + this.store(result, err => { + if (err) return callback(err); + callback(null, result); + }); + }); + }); + } + + /** + * @template T + * @param {function(): Promise | T} computer function to compute the value if not cached + * @returns {Promise} promise with the data + */ + async providePromise(computer) { + const cacheEntry = await this.getPromise(); + if (cacheEntry !== undefined) return cacheEntry; + const result = await computer(); + await this.storePromise(result); + return result; } } -module.exports = LinkResolver; +class CacheFacade { + /** + * @param {Cache} cache the root cache + * @param {string} name the child cache name + * @param {string | HashConstructor} hashFunction the hash function to use + */ + constructor(cache, name, hashFunction) { + this._cache = cache; + this._name = name; + this._hashFunction = hashFunction; + } -/***/ }), + /** + * @param {string} name the child cache name# + * @returns {CacheFacade} child cache + */ + getChildCache(name) { + return new CacheFacade( + this._cache, + `${this._name}|${name}`, + this._hashFunction + ); + } -/***/ 56001: -/***/ (function(module, __unused_webpack_exports, __webpack_require__) { + /** + * @param {string} identifier the cache identifier + * @param {Etag | null} etag the etag + * @returns {ItemCacheFacade} item cache + */ + getItemCache(identifier, etag) { + return new ItemCacheFacade( + this._cache, + `${this._name}|${identifier}`, + etag + ); + } -"use strict"; -/* - MIT License http://www.opensource.org/licenses/mit-license.php - Author Tobias Koppers @sokra -*/ + /** + * @param {HashableObject} obj an hashable object + * @returns {Etag} an etag that is lazy hashed + */ + getLazyHashedEtag(obj) { + return getLazyHashedEtag(obj, this._hashFunction); + } + /** + * @param {Etag} a an etag + * @param {Etag} b another etag + * @returns {Etag} an etag that represents both + */ + mergeEtags(a, b) { + return mergeEtags(a, b); + } -const path = __webpack_require__(85622); -const DirectoryWatcher = __webpack_require__(76595); + /** + * @template T + * @param {string} identifier the cache identifier + * @param {Etag | null} etag the etag + * @param {CallbackCache} callback signals when the value is retrieved + * @returns {void} + */ + get(identifier, etag, callback) { + this._cache.get(`${this._name}|${identifier}`, etag, callback); + } -class WatcherManager { - constructor(options) { - this.options = options; - this.directoryWatchers = new Map(); + /** + * @template T + * @param {string} identifier the cache identifier + * @param {Etag | null} etag the etag + * @returns {Promise} promise with the data + */ + getPromise(identifier, etag) { + return new Promise((resolve, reject) => { + this._cache.get(`${this._name}|${identifier}`, etag, (err, data) => { + if (err) { + reject(err); + } else { + resolve(data); + } + }); + }); } - getDirectoryWatcher(directory) { - const watcher = this.directoryWatchers.get(directory); - if (watcher === undefined) { - const newWatcher = new DirectoryWatcher(this, directory, this.options); - this.directoryWatchers.set(directory, newWatcher); - newWatcher.on("closed", () => { - this.directoryWatchers.delete(directory); + /** + * @template T + * @param {string} identifier the cache identifier + * @param {Etag | null} etag the etag + * @param {T} data the value to store + * @param {CallbackCache} callback signals when the value is stored + * @returns {void} + */ + store(identifier, etag, data, callback) { + this._cache.store(`${this._name}|${identifier}`, etag, data, callback); + } + + /** + * @template T + * @param {string} identifier the cache identifier + * @param {Etag | null} etag the etag + * @param {T} data the value to store + * @returns {Promise} promise signals when the value is stored + */ + storePromise(identifier, etag, data) { + return new Promise((resolve, reject) => { + this._cache.store(`${this._name}|${identifier}`, etag, data, err => { + if (err) { + reject(err); + } else { + resolve(); + } }); - return newWatcher; - } - return watcher; + }); } - watchFile(p, startTime) { - const directory = path.dirname(p); - if (directory === p) return null; - return this.getDirectoryWatcher(directory).watch(p, startTime); + /** + * @template T + * @param {string} identifier the cache identifier + * @param {Etag | null} etag the etag + * @param {function(CallbackNormalErrorCache): void} computer function to compute the value if not cached + * @param {CallbackNormalErrorCache} callback signals when the value is retrieved + * @returns {void} + */ + provide(identifier, etag, computer, callback) { + this.get(identifier, etag, (err, cacheEntry) => { + if (err) return callback(err); + if (cacheEntry !== undefined) return cacheEntry; + computer((err, result) => { + if (err) return callback(err); + this.store(identifier, etag, result, err => { + if (err) return callback(err); + callback(null, result); + }); + }); + }); } - watchDirectory(directory, startTime) { - return this.getDirectoryWatcher(directory).watch(directory, startTime); + /** + * @template T + * @param {string} identifier the cache identifier + * @param {Etag | null} etag the etag + * @param {function(): Promise | T} computer function to compute the value if not cached + * @returns {Promise} promise with the data + */ + async providePromise(identifier, etag, computer) { + const cacheEntry = await this.getPromise(identifier, etag); + if (cacheEntry !== undefined) return cacheEntry; + const result = await computer(); + await this.storePromise(identifier, etag, result); + return result; } } -const watcherManagers = new WeakMap(); -/** - * @param {object} options options - * @returns {WatcherManager} the watcher manager - */ -module.exports = options => { - const watcherManager = watcherManagers.get(options); - if (watcherManager !== undefined) return watcherManager; - const newWatcherManager = new WatcherManager(options); - watcherManagers.set(options, newWatcherManager); - return newWatcherManager; -}; -module.exports.WatcherManager = WatcherManager; +module.exports = CacheFacade; +module.exports.ItemCacheFacade = ItemCacheFacade; +module.exports.MultiItemCache = MultiItemCache; /***/ }), -/***/ 93030: +/***/ 42444: /***/ (function(module, __unused_webpack_exports, __webpack_require__) { "use strict"; @@ -23605,144 +23804,77 @@ module.exports.WatcherManager = WatcherManager; */ -const path = __webpack_require__(85622); + +const WebpackError = __webpack_require__(68422); + +/** @typedef {import("./Module")} Module */ +/** @typedef {import("./ModuleGraph")} ModuleGraph */ /** - * @template T - * @typedef {Object} TreeNode - * @property {string} filePath - * @property {TreeNode} parent - * @property {TreeNode[]} children - * @property {number} entries - * @property {boolean} active - * @property {T[] | T | undefined} value + * @param {Module[]} modules the modules to be sorted + * @returns {Module[]} sorted version of original modules */ +const sortModules = modules => { + return modules.sort((a, b) => { + const aIdent = a.identifier(); + const bIdent = b.identifier(); + /* istanbul ignore next */ + if (aIdent < bIdent) return -1; + /* istanbul ignore next */ + if (aIdent > bIdent) return 1; + /* istanbul ignore next */ + return 0; + }); +}; /** - * @template T - * @param {Map>} the new plan + * @param {Module[]} modules each module from throw + * @param {ModuleGraph} moduleGraph the module graph + * @returns {string} each message from provided modules */ -module.exports = (plan, limit) => { - const treeMap = new Map(); - // Convert to tree - for (const [filePath, value] of plan) { - treeMap.set(filePath, { - filePath, - parent: undefined, - children: undefined, - entries: 1, - active: true, - value - }); - } - let currentCount = treeMap.size; - // Create parents and calculate sum of entries - for (const node of treeMap.values()) { - const parentPath = path.dirname(node.filePath); - if (parentPath !== node.filePath) { - let parent = treeMap.get(parentPath); - if (parent === undefined) { - parent = { - filePath: parentPath, - parent: undefined, - children: [node], - entries: node.entries, - active: false, - value: undefined - }; - treeMap.set(parentPath, parent); - node.parent = parent; - } else { - node.parent = parent; - if (parent.children === undefined) { - parent.children = [node]; - } else { - parent.children.push(node); - } - do { - parent.entries += node.entries; - parent = parent.parent; - } while (parent); - } - } - } - // Reduce until limit reached - while (currentCount > limit) { - // Select node that helps reaching the limit most effectively without overmerging - const overLimit = currentCount - limit; - let bestNode = undefined; - let bestCost = Infinity; - for (const node of treeMap.values()) { - if (node.entries <= 1 || !node.children || !node.parent) continue; - if (node.children.length === 0) continue; - if (node.children.length === 1 && !node.value) continue; - // Try to select the node with has just a bit more entries than we need to reduce - // When just a bit more is over 30% over the limit, - // also consider just a bit less entries then we need to reduce - const cost = - node.entries - 1 >= overLimit - ? node.entries - 1 - overLimit - : overLimit - node.entries + 1 + limit * 0.3; - if (cost < bestCost) { - bestNode = node; - bestCost = cost; - } - } - if (!bestNode) break; - // Merge all children - const reduction = bestNode.entries - 1; - bestNode.active = true; - bestNode.entries = 1; - currentCount -= reduction; - let parent = bestNode.parent; - while (parent) { - parent.entries -= reduction; - parent = parent.parent; - } - const queue = new Set(bestNode.children); - for (const node of queue) { - node.active = false; - node.entries = 0; - if (node.children) { - for (const child of node.children) queue.add(child); - } - } - } - // Write down new plan - const newPlan = new Map(); - for (const rootNode of treeMap.values()) { - if (!rootNode.active) continue; - const map = new Map(); - const queue = new Set([rootNode]); - for (const node of queue) { - if (node.active && node !== rootNode) continue; - if (node.value) { - if (Array.isArray(node.value)) { - for (const item of node.value) { - map.set(item, node.filePath); - } - } else { - map.set(node.value, node.filePath); - } - } - if (node.children) { - for (const child of node.children) { - queue.add(child); - } +const createModulesListMessage = (modules, moduleGraph) => { + return modules + .map(m => { + let message = `* ${m.identifier()}`; + const validReasons = Array.from( + moduleGraph.getIncomingConnectionsByOriginModule(m).keys() + ).filter(x => x); + + if (validReasons.length > 0) { + message += `\n Used by ${validReasons.length} module(s), i. e.`; + message += `\n ${validReasons[0].identifier()}`; } - } - newPlan.set(rootNode.filePath, map); - } - return newPlan; + return message; + }) + .join("\n"); }; +class CaseSensitiveModulesWarning extends WebpackError { + /** + * Creates an instance of CaseSensitiveModulesWarning. + * @param {Iterable} modules modules that were detected + * @param {ModuleGraph} moduleGraph the module graph + */ + constructor(modules, moduleGraph) { + const sortedModules = sortModules(Array.from(modules)); + const modulesList = createModulesListMessage(sortedModules, moduleGraph); + super(`There are multiple modules with names that only differ in casing. +This can lead to unexpected behavior when compiling on a filesystem with other case-semantic. +Use equal casing. Compare these module identifiers: +${modulesList}`); + + this.name = "CaseSensitiveModulesWarning"; + this.module = sortedModules[0]; + } +} + +module.exports = CaseSensitiveModulesWarning; + /***/ }), -/***/ 72511: -/***/ (function(__unused_webpack_module, exports, __webpack_require__) { +/***/ 65574: +/***/ (function(module, __unused_webpack_exports, __webpack_require__) { "use strict"; /* @@ -23751,733 +23883,823 @@ module.exports = (plan, limit) => { */ -const fs = __webpack_require__(35747); -const path = __webpack_require__(85622); -const { EventEmitter } = __webpack_require__(28614); -const reducePlan = __webpack_require__(93030); - -const IS_OSX = __webpack_require__(12087).platform() === "darwin"; -const IS_WIN = __webpack_require__(12087).platform() === "win32"; -const SUPPORTS_RECURSIVE_WATCHING = IS_OSX || IS_WIN; -const watcherLimit = - +process.env.WATCHPACK_WATCHER_LIMIT || (IS_OSX ? 2000 : 10000); +const ChunkGraph = __webpack_require__(97860); +const Entrypoint = __webpack_require__(86695); +const { intersect } = __webpack_require__(34715); +const SortableSet = __webpack_require__(67563); +const StringXor = __webpack_require__(23877); +const { + compareModulesByIdentifier, + compareChunkGroupsByIndex, + compareModulesById +} = __webpack_require__(26296); +const { createArrayToSetDeprecationSet } = __webpack_require__(2594); +const { mergeRuntime } = __webpack_require__(19655); -const recursiveWatcherLogging = !!process.env - .WATCHPACK_RECURSIVE_WATCHER_LOGGING; +/** @typedef {import("webpack-sources").Source} Source */ +/** @typedef {import("./ChunkGraph").ChunkFilterPredicate} ChunkFilterPredicate */ +/** @typedef {import("./ChunkGraph").ChunkSizeOptions} ChunkSizeOptions */ +/** @typedef {import("./ChunkGraph").ModuleFilterPredicate} ModuleFilterPredicate */ +/** @typedef {import("./ChunkGroup")} ChunkGroup */ +/** @typedef {import("./Compilation")} Compilation */ +/** @typedef {import("./Compilation").AssetInfo} AssetInfo */ +/** @typedef {import("./Compilation").PathData} PathData */ +/** @typedef {import("./Entrypoint").EntryOptions} EntryOptions */ +/** @typedef {import("./Module")} Module */ +/** @typedef {import("./ModuleGraph")} ModuleGraph */ +/** @typedef {import("./util/Hash")} Hash */ +/** @typedef {import("./util/runtime").RuntimeSpec} RuntimeSpec */ -let isBatch = false; -let watcherCount = 0; +const ChunkFilesSet = createArrayToSetDeprecationSet("chunk.files"); -/** @type {Map} */ -const pendingWatchers = new Map(); +/** + * @typedef {Object} WithId an object who has an id property * + * @property {string | number} id the id of the object + */ -/** @type {Map} */ -const recursiveWatchers = new Map(); +/** + * @deprecated + * @typedef {Object} ChunkMaps + * @property {Record} hash + * @property {Record>} contentHash + * @property {Record} name + */ -/** @type {Map} */ -const directWatchers = new Map(); +/** + * @deprecated + * @typedef {Object} ChunkModuleMaps + * @property {Record} id + * @property {Record} hash + */ -/** @type {Map} */ -const underlyingWatcher = new Map(); +let debugId = 1000; -class DirectWatcher { - constructor(filePath) { - this.filePath = filePath; - this.watchers = new Set(); - this.watcher = undefined; - try { - const watcher = fs.watch(filePath); - this.watcher = watcher; - watcher.on("change", (type, filename) => { - for (const w of this.watchers) { - w.emit("change", type, filename); - } - }); - watcher.on("error", error => { - for (const w of this.watchers) { - w.emit("error", error); - } - }); - } catch (err) { - process.nextTick(() => { - for (const w of this.watchers) { - w.emit("error", err); - } - }); +/** + * A Chunk is a unit of encapsulation for Modules. + * Chunks are "rendered" into bundles that get emitted when the build completes. + */ +class Chunk { + /** + * @param {string=} name of chunk being created, is optional (for subclasses) + * @param {boolean} backCompat enable backward-compatibility + */ + constructor(name, backCompat = true) { + /** @type {number | string | null} */ + this.id = null; + /** @type {(number|string)[] | null} */ + this.ids = null; + /** @type {number} */ + this.debugId = debugId++; + /** @type {string} */ + this.name = name; + /** @type {SortableSet} */ + this.idNameHints = new SortableSet(); + /** @type {boolean} */ + this.preventIntegration = false; + /** @type {(string | function(PathData, AssetInfo=): string)?} */ + this.filenameTemplate = undefined; + /** @private @type {SortableSet} */ + this._groups = new SortableSet(undefined, compareChunkGroupsByIndex); + /** @type {RuntimeSpec} */ + this.runtime = undefined; + /** @type {Set} */ + this.files = backCompat ? new ChunkFilesSet() : new Set(); + /** @type {Set} */ + this.auxiliaryFiles = new Set(); + /** @type {boolean} */ + this.rendered = false; + /** @type {string=} */ + this.hash = undefined; + /** @type {Record} */ + this.contentHash = Object.create(null); + /** @type {string=} */ + this.renderedHash = undefined; + /** @type {string=} */ + this.chunkReason = undefined; + /** @type {boolean} */ + this.extraAsync = false; + } + + // TODO remove in webpack 6 + // BACKWARD-COMPAT START + get entryModule() { + const entryModules = Array.from( + ChunkGraph.getChunkGraphForChunk( + this, + "Chunk.entryModule", + "DEP_WEBPACK_CHUNK_ENTRY_MODULE" + ).getChunkEntryModulesIterable(this) + ); + if (entryModules.length === 0) { + return undefined; + } else if (entryModules.length === 1) { + return entryModules[0]; + } else { + throw new Error( + "Module.entryModule: Multiple entry modules are not supported by the deprecated API (Use the new ChunkGroup API)" + ); } - watcherCount++; } - add(watcher) { - underlyingWatcher.set(watcher, this); - this.watchers.add(watcher); + /** + * @returns {boolean} true, if the chunk contains an entry module + */ + hasEntryModule() { + return ( + ChunkGraph.getChunkGraphForChunk( + this, + "Chunk.hasEntryModule", + "DEP_WEBPACK_CHUNK_HAS_ENTRY_MODULE" + ).getNumberOfEntryModules(this) > 0 + ); } - remove(watcher) { - this.watchers.delete(watcher); - if (this.watchers.size === 0) { - directWatchers.delete(this.filePath); - watcherCount--; - if (this.watcher) this.watcher.close(); - } + /** + * @param {Module} module the module + * @returns {boolean} true, if the chunk could be added + */ + addModule(module) { + const chunkGraph = ChunkGraph.getChunkGraphForChunk( + this, + "Chunk.addModule", + "DEP_WEBPACK_CHUNK_ADD_MODULE" + ); + if (chunkGraph.isModuleInChunk(module, this)) return false; + chunkGraph.connectChunkAndModule(this, module); + return true; } - getWatchers() { - return this.watchers; + /** + * @param {Module} module the module + * @returns {void} + */ + removeModule(module) { + ChunkGraph.getChunkGraphForChunk( + this, + "Chunk.removeModule", + "DEP_WEBPACK_CHUNK_REMOVE_MODULE" + ).disconnectChunkAndModule(this, module); } -} -class RecursiveWatcher { - constructor(rootPath) { - this.rootPath = rootPath; - /** @type {Map} */ - this.mapWatcherToPath = new Map(); - /** @type {Map>} */ - this.mapPathToWatchers = new Map(); - this.watcher = undefined; - try { - const watcher = fs.watch(rootPath, { - recursive: true - }); - this.watcher = watcher; - watcher.on("change", (type, filename) => { - if (!filename) { - if (recursiveWatcherLogging) { - process.stderr.write( - `[watchpack] dispatch ${type} event in recursive watcher (${ - this.rootPath - }) to all watchers\n` - ); - } - for (const w of this.mapWatcherToPath.keys()) { - w.emit("change", type); - } - } else { - const dir = path.dirname(filename); - const watchers = this.mapPathToWatchers.get(dir); - if (recursiveWatcherLogging) { - process.stderr.write( - `[watchpack] dispatch ${type} event in recursive watcher (${ - this.rootPath - }) for '${filename}' to ${ - watchers ? watchers.size : 0 - } watchers\n` - ); - } - if (watchers === undefined) return; - for (const w of watchers) { - w.emit("change", type, path.basename(filename)); - } - } - }); - watcher.on("error", error => { - for (const w of this.mapWatcherToPath.keys()) { - w.emit("error", error); - } - }); - } catch (err) { - process.nextTick(() => { - for (const w of this.mapWatcherToPath.keys()) { - w.emit("error", err); - } - }); - } - watcherCount++; - if (recursiveWatcherLogging) { - process.stderr.write( - `[watchpack] created recursive watcher at ${rootPath}\n` - ); - } + /** + * @returns {number} the number of module which are contained in this chunk + */ + getNumberOfModules() { + return ChunkGraph.getChunkGraphForChunk( + this, + "Chunk.getNumberOfModules", + "DEP_WEBPACK_CHUNK_GET_NUMBER_OF_MODULES" + ).getNumberOfChunkModules(this); } - add(filePath, watcher) { - underlyingWatcher.set(watcher, this); - const subpath = filePath.slice(this.rootPath.length + 1) || "."; - this.mapWatcherToPath.set(watcher, subpath); - const set = this.mapPathToWatchers.get(subpath); - if (set === undefined) { - const newSet = new Set(); - newSet.add(watcher); - this.mapPathToWatchers.set(subpath, newSet); - } else { - set.add(watcher); - } + get modulesIterable() { + const chunkGraph = ChunkGraph.getChunkGraphForChunk( + this, + "Chunk.modulesIterable", + "DEP_WEBPACK_CHUNK_MODULES_ITERABLE" + ); + return chunkGraph.getOrderedChunkModulesIterable( + this, + compareModulesByIdentifier + ); } - remove(watcher) { - const subpath = this.mapWatcherToPath.get(watcher); - if (!subpath) return; - this.mapWatcherToPath.delete(watcher); - const set = this.mapPathToWatchers.get(subpath); - set.delete(watcher); - if (set.size === 0) { - this.mapPathToWatchers.delete(subpath); - } - if (this.mapWatcherToPath.size === 0) { - recursiveWatchers.delete(this.rootPath); - watcherCount--; - if (this.watcher) this.watcher.close(); - if (recursiveWatcherLogging) { - process.stderr.write( - `[watchpack] closed recursive watcher at ${this.rootPath}\n` - ); - } - } + /** + * @param {Chunk} otherChunk the chunk to compare with + * @returns {-1|0|1} the comparison result + */ + compareTo(otherChunk) { + const chunkGraph = ChunkGraph.getChunkGraphForChunk( + this, + "Chunk.compareTo", + "DEP_WEBPACK_CHUNK_COMPARE_TO" + ); + return chunkGraph.compareChunks(this, otherChunk); } - getWatchers() { - return this.mapWatcherToPath; + /** + * @param {Module} module the module + * @returns {boolean} true, if the chunk contains the module + */ + containsModule(module) { + return ChunkGraph.getChunkGraphForChunk( + this, + "Chunk.containsModule", + "DEP_WEBPACK_CHUNK_CONTAINS_MODULE" + ).isModuleInChunk(module, this); } -} -class Watcher extends EventEmitter { - close() { - if (pendingWatchers.has(this)) { - pendingWatchers.delete(this); - return; - } - const watcher = underlyingWatcher.get(this); - watcher.remove(this); - underlyingWatcher.delete(this); + /** + * @returns {Module[]} the modules for this chunk + */ + getModules() { + return ChunkGraph.getChunkGraphForChunk( + this, + "Chunk.getModules", + "DEP_WEBPACK_CHUNK_GET_MODULES" + ).getChunkModules(this); } -} -const createDirectWatcher = filePath => { - const existing = directWatchers.get(filePath); - if (existing !== undefined) return existing; - const w = new DirectWatcher(filePath); - directWatchers.set(filePath, w); - return w; -}; + /** + * @returns {void} + */ + remove() { + const chunkGraph = ChunkGraph.getChunkGraphForChunk( + this, + "Chunk.remove", + "DEP_WEBPACK_CHUNK_REMOVE" + ); + chunkGraph.disconnectChunk(this); + this.disconnectFromGroups(); + } -const createRecursiveWatcher = rootPath => { - const existing = recursiveWatchers.get(rootPath); - if (existing !== undefined) return existing; - const w = new RecursiveWatcher(rootPath); - recursiveWatchers.set(rootPath, w); - return w; -}; + /** + * @param {Module} module the module + * @param {Chunk} otherChunk the target chunk + * @returns {void} + */ + moveModule(module, otherChunk) { + const chunkGraph = ChunkGraph.getChunkGraphForChunk( + this, + "Chunk.moveModule", + "DEP_WEBPACK_CHUNK_MOVE_MODULE" + ); + chunkGraph.disconnectChunkAndModule(this, module); + chunkGraph.connectChunkAndModule(otherChunk, module); + } -const execute = () => { - /** @type {Map} */ - const map = new Map(); - const addWatcher = (watcher, filePath) => { - const entry = map.get(filePath); - if (entry === undefined) { - map.set(filePath, watcher); - } else if (Array.isArray(entry)) { - entry.push(watcher); + /** + * @param {Chunk} otherChunk the other chunk + * @returns {boolean} true, if the specified chunk has been integrated + */ + integrate(otherChunk) { + const chunkGraph = ChunkGraph.getChunkGraphForChunk( + this, + "Chunk.integrate", + "DEP_WEBPACK_CHUNK_INTEGRATE" + ); + if (chunkGraph.canChunksBeIntegrated(this, otherChunk)) { + chunkGraph.integrateChunks(this, otherChunk); + return true; } else { - map.set(filePath, [entry, watcher]); + return false; } - }; - for (const [watcher, filePath] of pendingWatchers) { - addWatcher(watcher, filePath); } - pendingWatchers.clear(); - // Fast case when we are not reaching the limit - if (!SUPPORTS_RECURSIVE_WATCHING || watcherLimit - watcherCount >= map.size) { - // Create watchers for all entries in the map - for (const [filePath, entry] of map) { - const w = createDirectWatcher(filePath); - if (Array.isArray(entry)) { - for (const item of entry) w.add(item); - } else { - w.add(entry); - } - } - return; + /** + * @param {Chunk} otherChunk the other chunk + * @returns {boolean} true, if chunks could be integrated + */ + canBeIntegrated(otherChunk) { + const chunkGraph = ChunkGraph.getChunkGraphForChunk( + this, + "Chunk.canBeIntegrated", + "DEP_WEBPACK_CHUNK_CAN_BE_INTEGRATED" + ); + return chunkGraph.canChunksBeIntegrated(this, otherChunk); } - // Reconsider existing watchers to improving watch plan - for (const watcher of recursiveWatchers.values()) { - for (const [w, subpath] of watcher.getWatchers()) { - addWatcher(w, path.join(watcher.rootPath, subpath)); - } + /** + * @returns {boolean} true, if this chunk contains no module + */ + isEmpty() { + const chunkGraph = ChunkGraph.getChunkGraphForChunk( + this, + "Chunk.isEmpty", + "DEP_WEBPACK_CHUNK_IS_EMPTY" + ); + return chunkGraph.getNumberOfChunkModules(this) === 0; } - for (const watcher of directWatchers.values()) { - for (const w of watcher.getWatchers()) { - addWatcher(w, watcher.filePath); - } + + /** + * @returns {number} total size of all modules in this chunk + */ + modulesSize() { + const chunkGraph = ChunkGraph.getChunkGraphForChunk( + this, + "Chunk.modulesSize", + "DEP_WEBPACK_CHUNK_MODULES_SIZE" + ); + return chunkGraph.getChunkModulesSize(this); } - // Merge map entries to keep watcher limit - // Create a 10% buffer to be able to enter fast case more often - const plan = reducePlan(map, watcherLimit * 0.9); + /** + * @param {ChunkSizeOptions} options options object + * @returns {number} total size of this chunk + */ + size(options = {}) { + const chunkGraph = ChunkGraph.getChunkGraphForChunk( + this, + "Chunk.size", + "DEP_WEBPACK_CHUNK_SIZE" + ); + return chunkGraph.getChunkSize(this, options); + } - // Update watchers for all entries in the map - for (const [filePath, entry] of plan) { - if (entry.size === 1) { - for (const [watcher, filePath] of entry) { - const w = createDirectWatcher(filePath); - const old = underlyingWatcher.get(watcher); - if (old === w) continue; - w.add(watcher); - if (old !== undefined) old.remove(watcher); - } - } else { - const filePaths = new Set(entry.values()); - if (filePaths.size > 1) { - const w = createRecursiveWatcher(filePath); - for (const [watcher, watcherPath] of entry) { - const old = underlyingWatcher.get(watcher); - if (old === w) continue; - w.add(watcherPath, watcher); - if (old !== undefined) old.remove(watcher); - } - } else { - for (const filePath of filePaths) { - const w = createDirectWatcher(filePath); - for (const watcher of entry.keys()) { - const old = underlyingWatcher.get(watcher); - if (old === w) continue; - w.add(watcher); - if (old !== undefined) old.remove(watcher); + /** + * @param {Chunk} otherChunk the other chunk + * @param {ChunkSizeOptions} options options object + * @returns {number} total size of the chunk or false if the chunk can't be integrated + */ + integratedSize(otherChunk, options) { + const chunkGraph = ChunkGraph.getChunkGraphForChunk( + this, + "Chunk.integratedSize", + "DEP_WEBPACK_CHUNK_INTEGRATED_SIZE" + ); + return chunkGraph.getIntegratedChunksSize(this, otherChunk, options); + } + + /** + * @param {ModuleFilterPredicate} filterFn function used to filter modules + * @returns {ChunkModuleMaps} module map information + */ + getChunkModuleMaps(filterFn) { + const chunkGraph = ChunkGraph.getChunkGraphForChunk( + this, + "Chunk.getChunkModuleMaps", + "DEP_WEBPACK_CHUNK_GET_CHUNK_MODULE_MAPS" + ); + /** @type {Record} */ + const chunkModuleIdMap = Object.create(null); + /** @type {Record} */ + const chunkModuleHashMap = Object.create(null); + + for (const asyncChunk of this.getAllAsyncChunks()) { + /** @type {(string|number)[]} */ + let array; + for (const module of chunkGraph.getOrderedChunkModulesIterable( + asyncChunk, + compareModulesById(chunkGraph) + )) { + if (filterFn(module)) { + if (array === undefined) { + array = []; + chunkModuleIdMap[asyncChunk.id] = array; } + const moduleId = chunkGraph.getModuleId(module); + array.push(moduleId); + chunkModuleHashMap[moduleId] = chunkGraph.getRenderedModuleHash( + module, + undefined + ); } } } + + return { + id: chunkModuleIdMap, + hash: chunkModuleHashMap + }; } -}; -exports.watch = filePath => { - const watcher = new Watcher(); - // Find an existing watcher - const directWatcher = directWatchers.get(filePath); - if (directWatcher !== undefined) { - directWatcher.add(watcher); - return watcher; + /** + * @param {ModuleFilterPredicate} filterFn predicate function used to filter modules + * @param {ChunkFilterPredicate=} filterChunkFn predicate function used to filter chunks + * @returns {boolean} return true if module exists in graph + */ + hasModuleInGraph(filterFn, filterChunkFn) { + const chunkGraph = ChunkGraph.getChunkGraphForChunk( + this, + "Chunk.hasModuleInGraph", + "DEP_WEBPACK_CHUNK_HAS_MODULE_IN_GRAPH" + ); + return chunkGraph.hasModuleInGraph(this, filterFn, filterChunkFn); } - let current = filePath; - for (;;) { - const recursiveWatcher = recursiveWatchers.get(current); - if (recursiveWatcher !== undefined) { - recursiveWatcher.add(filePath, watcher); - return watcher; + + /** + * @deprecated + * @param {boolean} realHash whether the full hash or the rendered hash is to be used + * @returns {ChunkMaps} the chunk map information + */ + getChunkMaps(realHash) { + /** @type {Record} */ + const chunkHashMap = Object.create(null); + /** @type {Record>} */ + const chunkContentHashMap = Object.create(null); + /** @type {Record} */ + const chunkNameMap = Object.create(null); + + for (const chunk of this.getAllAsyncChunks()) { + chunkHashMap[chunk.id] = realHash ? chunk.hash : chunk.renderedHash; + for (const key of Object.keys(chunk.contentHash)) { + if (!chunkContentHashMap[key]) { + chunkContentHashMap[key] = Object.create(null); + } + chunkContentHashMap[key][chunk.id] = chunk.contentHash[key]; + } + if (chunk.name) { + chunkNameMap[chunk.id] = chunk.name; + } } - const parent = path.dirname(current); - if (parent === current) break; - current = parent; + + return { + hash: chunkHashMap, + contentHash: chunkContentHashMap, + name: chunkNameMap + }; } - // Queue up watcher for creation - pendingWatchers.set(watcher, filePath); - if (!isBatch) execute(); - return watcher; -}; + // BACKWARD-COMPAT END -exports.batch = fn => { - isBatch = true; - try { - fn(); - } finally { - isBatch = false; - execute(); + /** + * @returns {boolean} whether or not the Chunk will have a runtime + */ + hasRuntime() { + for (const chunkGroup of this._groups) { + if ( + chunkGroup instanceof Entrypoint && + chunkGroup.getRuntimeChunk() === this + ) { + return true; + } + } + return false; } -}; -exports.getNumberOfWatchers = () => { - return watcherCount; -}; + /** + * @returns {boolean} whether or not this chunk can be an initial chunk + */ + canBeInitial() { + for (const chunkGroup of this._groups) { + if (chunkGroup.isInitial()) return true; + } + return false; + } + /** + * @returns {boolean} whether this chunk can only be an initial chunk + */ + isOnlyInitial() { + if (this._groups.size <= 0) return false; + for (const chunkGroup of this._groups) { + if (!chunkGroup.isInitial()) return false; + } + return true; + } -/***/ }), + /** + * @returns {EntryOptions | undefined} the entry options for this chunk + */ + getEntryOptions() { + for (const chunkGroup of this._groups) { + if (chunkGroup instanceof Entrypoint) { + return chunkGroup.options; + } + } + return undefined; + } -/***/ 29149: -/***/ (function(module, __unused_webpack_exports, __webpack_require__) { + /** + * @param {ChunkGroup} chunkGroup the chunkGroup the chunk is being added + * @returns {void} + */ + addGroup(chunkGroup) { + this._groups.add(chunkGroup); + } -"use strict"; -/* - MIT License http://www.opensource.org/licenses/mit-license.php - Author Tobias Koppers @sokra -*/ + /** + * @param {ChunkGroup} chunkGroup the chunkGroup the chunk is being removed from + * @returns {void} + */ + removeGroup(chunkGroup) { + this._groups.delete(chunkGroup); + } + /** + * @param {ChunkGroup} chunkGroup the chunkGroup to check + * @returns {boolean} returns true if chunk has chunkGroup reference and exists in chunkGroup + */ + isInGroup(chunkGroup) { + return this._groups.has(chunkGroup); + } -const getWatcherManager = __webpack_require__(56001); -const LinkResolver = __webpack_require__(30417); -const EventEmitter = __webpack_require__(28614).EventEmitter; -const globToRegExp = __webpack_require__(22797); -const watchEventSource = __webpack_require__(72511); + /** + * @returns {number} the amount of groups that the said chunk is in + */ + getNumberOfGroups() { + return this._groups.size; + } -const EMPTY_ARRAY = []; -const EMPTY_OPTIONS = {}; + /** + * @returns {Iterable} the chunkGroups that the said chunk is referenced in + */ + get groupsIterable() { + this._groups.sort(); + return this._groups; + } -function addWatchersToSet(watchers, set) { - for (const ww of watchers) { - const w = ww.watcher; - if (!set.has(w.directoryWatcher)) { - set.add(w.directoryWatcher); + /** + * @returns {void} + */ + disconnectFromGroups() { + for (const chunkGroup of this._groups) { + chunkGroup.removeChunk(this); } } -} -const stringToRegexp = ignored => { - const source = globToRegExp(ignored, { globstar: true, extended: true }) - .source; - const matchingStart = source.slice(0, source.length - 1) + "(?:$|\\/)"; - return matchingStart; -}; + /** + * @param {Chunk} newChunk the new chunk that will be split out of + * @returns {void} + */ + split(newChunk) { + for (const chunkGroup of this._groups) { + chunkGroup.insertChunk(newChunk, this); + newChunk.addGroup(chunkGroup); + } + for (const idHint of this.idNameHints) { + newChunk.idNameHints.add(idHint); + } + newChunk.runtime = mergeRuntime(newChunk.runtime, this.runtime); + } -const ignoredToFunction = ignored => { - if (Array.isArray(ignored)) { - const regexp = new RegExp(ignored.map(i => stringToRegexp(i)).join("|")); - return x => regexp.test(x.replace(/\\/g, "/")); - } else if (typeof ignored === "string") { - const regexp = new RegExp(stringToRegexp(ignored)); - return x => regexp.test(x.replace(/\\/g, "/")); - } else if (ignored instanceof RegExp) { - return x => ignored.test(x.replace(/\\/g, "/")); - } else if (ignored instanceof Function) { - return ignored; - } else if (ignored) { - throw new Error(`Invalid option for 'ignored': ${ignored}`); - } else { - return () => false; + /** + * @param {Hash} hash hash (will be modified) + * @param {ChunkGraph} chunkGraph the chunk graph + * @returns {void} + */ + updateHash(hash, chunkGraph) { + hash.update( + `${this.id} ${this.ids ? this.ids.join() : ""} ${this.name || ""} ` + ); + const xor = new StringXor(); + for (const m of chunkGraph.getChunkModulesIterable(this)) { + xor.add(chunkGraph.getModuleHash(m, this.runtime)); + } + xor.updateHash(hash); + const entryModules = + chunkGraph.getChunkEntryModulesWithChunkGroupIterable(this); + for (const [m, chunkGroup] of entryModules) { + hash.update(`entry${chunkGraph.getModuleId(m)}${chunkGroup.id}`); + } } -}; -const normalizeOptions = options => { - return { - followSymlinks: !!options.followSymlinks, - ignored: ignoredToFunction(options.ignored), - poll: options.poll - }; -}; + /** + * @returns {Set} a set of all the async chunks + */ + getAllAsyncChunks() { + const queue = new Set(); + const chunks = new Set(); -const normalizeCache = new WeakMap(); -const cachedNormalizeOptions = options => { - const cacheEntry = normalizeCache.get(options); - if (cacheEntry !== undefined) return cacheEntry; - const normalized = normalizeOptions(options); - normalizeCache.set(options, normalized); - return normalized; -}; + const initialChunks = intersect( + Array.from(this.groupsIterable, g => new Set(g.chunks)) + ); -class WatchpackFileWatcher { - constructor(watchpack, watcher, files) { - if (!watcher) throw new Error(); - this.files = Array.isArray(files) ? files : [files]; - this.watcher = watcher; - watcher.on("initial-missing", type => { - for (const file of this.files) { - if (!watchpack._missing.has(file)) - watchpack._onRemove(file, file, type); + const initialQueue = new Set(this.groupsIterable); + + for (const chunkGroup of initialQueue) { + for (const child of chunkGroup.childrenIterable) { + if (child instanceof Entrypoint) { + initialQueue.add(child); + } else { + queue.add(child); + } } - }); - watcher.on("change", (mtime, type) => { - for (const file of this.files) { - watchpack._onChange(file, mtime, file, type); + } + + for (const chunkGroup of queue) { + for (const chunk of chunkGroup.chunks) { + if (!initialChunks.has(chunk)) { + chunks.add(chunk); + } } - }); - watcher.on("remove", type => { - for (const file of this.files) { - watchpack._onRemove(file, file, type); + for (const child of chunkGroup.childrenIterable) { + queue.add(child); } - }); + } + + return chunks; } - update(files) { - if (!Array.isArray(files)) { - if (this.files.length !== 1) { - this.files = [files]; - } else if (this.files[0] !== files) { - this.files[0] = files; + /** + * @returns {Set} a set of all the initial chunks (including itself) + */ + getAllInitialChunks() { + const chunks = new Set(); + const queue = new Set(this.groupsIterable); + for (const group of queue) { + if (group.isInitial()) { + for (const c of group.chunks) chunks.add(c); + for (const g of group.childrenIterable) queue.add(g); } - } else { - this.files = files; } + return chunks; } - close() { - this.watcher.close(); - } -} + /** + * @returns {Set} a set of all the referenced chunks (including itself) + */ + getAllReferencedChunks() { + const queue = new Set(this.groupsIterable); + const chunks = new Set(); -class WatchpackDirectoryWatcher { - constructor(watchpack, watcher, directories) { - if (!watcher) throw new Error(); - this.directories = Array.isArray(directories) ? directories : [directories]; - this.watcher = watcher; - watcher.on("initial-missing", type => { - for (const item of this.directories) { - watchpack._onRemove(item, item, type); - } - }); - watcher.on("change", (file, mtime, type) => { - for (const item of this.directories) { - watchpack._onChange(item, mtime, file, type); + for (const chunkGroup of queue) { + for (const chunk of chunkGroup.chunks) { + chunks.add(chunk); } - }); - watcher.on("remove", type => { - for (const item of this.directories) { - watchpack._onRemove(item, item, type); + for (const child of chunkGroup.childrenIterable) { + queue.add(child); } - }); + } + + return chunks; } - update(directories) { - if (!Array.isArray(directories)) { - if (this.directories.length !== 1) { - this.directories = [directories]; - } else if (this.directories[0] !== directories) { - this.directories[0] = directories; + /** + * @returns {Set} a set of all the referenced entrypoints + */ + getAllReferencedAsyncEntrypoints() { + const queue = new Set(this.groupsIterable); + const entrypoints = new Set(); + + for (const chunkGroup of queue) { + for (const entrypoint of chunkGroup.asyncEntrypointsIterable) { + entrypoints.add(entrypoint); + } + for (const child of chunkGroup.childrenIterable) { + queue.add(child); } - } else { - this.directories = directories; } - } - close() { - this.watcher.close(); + return entrypoints; } -} -class Watchpack extends EventEmitter { - constructor(options) { - super(); - if (!options) options = EMPTY_OPTIONS; - this.options = options; - this.aggregateTimeout = - typeof options.aggregateTimeout === "number" - ? options.aggregateTimeout - : 200; - this.watcherOptions = cachedNormalizeOptions(options); - this.watcherManager = getWatcherManager(this.watcherOptions); - this.fileWatchers = new Map(); - this.directoryWatchers = new Map(); - this._missing = new Set(); - this.startTime = undefined; - this.paused = false; - this.aggregatedChanges = new Set(); - this.aggregatedRemovals = new Set(); - this.aggregateTimer = undefined; - this._onTimeout = this._onTimeout.bind(this); - } + /** + * @returns {boolean} true, if the chunk references async chunks + */ + hasAsyncChunks() { + const queue = new Set(); - watch(arg1, arg2, arg3) { - let files, directories, missing, startTime; - if (!arg2) { - ({ - files = EMPTY_ARRAY, - directories = EMPTY_ARRAY, - missing = EMPTY_ARRAY, - startTime - } = arg1); - } else { - files = arg1; - directories = arg2; - missing = EMPTY_ARRAY; - startTime = arg3; - } - this.paused = false; - const fileWatchers = this.fileWatchers; - const directoryWatchers = this.directoryWatchers; - const ignored = this.watcherOptions.ignored; - const filter = path => !ignored(path); - const addToMap = (map, key, item) => { - const list = map.get(key); - if (list === undefined) { - map.set(key, item); - } else if (Array.isArray(list)) { - list.push(item); - } else { - map.set(key, [list, item]); + const initialChunks = intersect( + Array.from(this.groupsIterable, g => new Set(g.chunks)) + ); + + for (const chunkGroup of this.groupsIterable) { + for (const child of chunkGroup.childrenIterable) { + queue.add(child); } - }; - const fileWatchersNeeded = new Map(); - const directoryWatchersNeeded = new Map(); - const missingFiles = new Set(); - if (this.watcherOptions.followSymlinks) { - const resolver = new LinkResolver(); - for (const file of files) { - if (filter(file)) { - for (const innerFile of resolver.resolve(file)) { - if (file === innerFile || filter(innerFile)) { - addToMap(fileWatchersNeeded, innerFile, file); - } - } + } + + for (const chunkGroup of queue) { + for (const chunk of chunkGroup.chunks) { + if (!initialChunks.has(chunk)) { + return true; } } - for (const file of missing) { - if (filter(file)) { - for (const innerFile of resolver.resolve(file)) { - if (file === innerFile || filter(innerFile)) { - missingFiles.add(file); - addToMap(fileWatchersNeeded, innerFile, file); - } - } - } + for (const child of chunkGroup.childrenIterable) { + queue.add(child); } - for (const dir of directories) { - if (filter(dir)) { - let first = true; - for (const innerItem of resolver.resolve(dir)) { - if (filter(innerItem)) { - addToMap( - first ? directoryWatchersNeeded : fileWatchersNeeded, - innerItem, - dir - ); + } + + return false; + } + + /** + * @param {ChunkGraph} chunkGraph the chunk graph + * @param {ChunkFilterPredicate=} filterFn function used to filter chunks + * @returns {Record} a record object of names to lists of child ids(?) + */ + getChildIdsByOrders(chunkGraph, filterFn) { + /** @type {Map} */ + const lists = new Map(); + for (const group of this.groupsIterable) { + if (group.chunks[group.chunks.length - 1] === this) { + for (const childGroup of group.childrenIterable) { + for (const key of Object.keys(childGroup.options)) { + if (key.endsWith("Order")) { + const name = key.substr(0, key.length - "Order".length); + let list = lists.get(name); + if (list === undefined) { + list = []; + lists.set(name, list); + } + list.push({ + order: childGroup.options[key], + group: childGroup + }); } - first = false; } } } - } else { - for (const file of files) { - if (filter(file)) { - addToMap(fileWatchersNeeded, file, file); - } - } - for (const file of missing) { - if (filter(file)) { - missingFiles.add(file); - addToMap(fileWatchersNeeded, file, file); - } - } - for (const dir of directories) { - if (filter(dir)) { - addToMap(directoryWatchersNeeded, dir, dir); - } - } - } - // Close unneeded old watchers - // and update existing watchers - for (const [key, w] of fileWatchers) { - const needed = fileWatchersNeeded.get(key); - if (needed === undefined) { - w.close(); - fileWatchers.delete(key); - } else { - w.update(needed); - fileWatchersNeeded.delete(key); - } - } - for (const [key, w] of directoryWatchers) { - const needed = directoryWatchersNeeded.get(key); - if (needed === undefined) { - w.close(); - directoryWatchers.delete(key); - } else { - w.update(needed); - directoryWatchersNeeded.delete(key); - } } - // Create new watchers and install handlers on these watchers - watchEventSource.batch(() => { - for (const [key, files] of fileWatchersNeeded) { - const watcher = this.watcherManager.watchFile(key, startTime); - if (watcher) { - fileWatchers.set(key, new WatchpackFileWatcher(this, watcher, files)); + /** @type {Record} */ + const result = Object.create(null); + for (const [name, list] of lists) { + list.sort((a, b) => { + const cmp = b.order - a.order; + if (cmp !== 0) return cmp; + return a.group.compareTo(chunkGraph, b.group); + }); + /** @type {Set} */ + const chunkIdSet = new Set(); + for (const item of list) { + for (const chunk of item.group.chunks) { + if (filterFn && !filterFn(chunk, chunkGraph)) continue; + chunkIdSet.add(chunk.id); } } - for (const [key, directories] of directoryWatchersNeeded) { - const watcher = this.watcherManager.watchDirectory(key, startTime); - if (watcher) { - directoryWatchers.set( - key, - new WatchpackDirectoryWatcher(this, watcher, directories) - ); - } + if (chunkIdSet.size > 0) { + result[name] = Array.from(chunkIdSet); } - }); - this._missing = missingFiles; - this.startTime = startTime; - } - - close() { - this.paused = true; - if (this.aggregateTimer) clearTimeout(this.aggregateTimer); - for (const w of this.fileWatchers.values()) w.close(); - for (const w of this.directoryWatchers.values()) w.close(); - this.fileWatchers.clear(); - this.directoryWatchers.clear(); + } + return result; } - pause() { - this.paused = true; - if (this.aggregateTimer) clearTimeout(this.aggregateTimer); - } - - getTimes() { - const directoryWatchers = new Set(); - addWatchersToSet(this.fileWatchers.values(), directoryWatchers); - addWatchersToSet(this.directoryWatchers.values(), directoryWatchers); - const obj = Object.create(null); - for (const w of directoryWatchers) { - const times = w.getTimes(); - for (const file of Object.keys(times)) obj[file] = times[file]; + /** + * @param {ChunkGraph} chunkGraph the chunk graph + * @param {string} type option name + * @returns {{ onChunks: Chunk[], chunks: Set }[]} referenced chunks for a specific type + */ + getChildrenOfTypeInOrder(chunkGraph, type) { + const list = []; + for (const group of this.groupsIterable) { + for (const childGroup of group.childrenIterable) { + const order = childGroup.options[type]; + if (order === undefined) continue; + list.push({ + order, + group, + childGroup + }); + } } - return obj; - } - - getTimeInfoEntries() { - const map = new Map(); - this.collectTimeInfoEntries(map, map); - return map; - } - - collectTimeInfoEntries(fileTimestamps, directoryTimestamps) { - const allWatchers = new Set(); - addWatchersToSet(this.fileWatchers.values(), allWatchers); - addWatchersToSet(this.directoryWatchers.values(), allWatchers); - const safeTime = { value: 0 }; - for (const w of allWatchers) { - w.collectTimeInfoEntries(fileTimestamps, directoryTimestamps, safeTime); + if (list.length === 0) return undefined; + list.sort((a, b) => { + const cmp = b.order - a.order; + if (cmp !== 0) return cmp; + return a.group.compareTo(chunkGraph, b.group); + }); + const result = []; + let lastEntry; + for (const { group, childGroup } of list) { + if (lastEntry && lastEntry.onChunks === group.chunks) { + for (const chunk of childGroup.chunks) { + lastEntry.chunks.add(chunk); + } + } else { + result.push( + (lastEntry = { + onChunks: group.chunks, + chunks: new Set(childGroup.chunks) + }) + ); + } } + return result; } - getAggregated() { - if (this.aggregateTimer) { - clearTimeout(this.aggregateTimer); - this.aggregateTimer = undefined; - } - const changes = this.aggregatedChanges; - const removals = this.aggregatedRemovals; - this.aggregatedChanges = new Set(); - this.aggregatedRemovals = new Set(); - return { changes, removals }; - } + /** + * @param {ChunkGraph} chunkGraph the chunk graph + * @param {boolean=} includeDirectChildren include direct children (by default only children of async children are included) + * @param {ChunkFilterPredicate=} filterFn function used to filter chunks + * @returns {Record>} a record object of names to lists of child ids(?) by chunk id + */ + getChildIdsByOrdersMap(chunkGraph, includeDirectChildren, filterFn) { + /** @type {Record>} */ + const chunkMaps = Object.create(null); - _onChange(item, mtime, file, type) { - file = file || item; - if (!this.paused) { - this.emit("change", file, mtime, type); - if (this.aggregateTimer) clearTimeout(this.aggregateTimer); - this.aggregateTimer = setTimeout(this._onTimeout, this.aggregateTimeout); + /** + * @param {Chunk} chunk a chunk + * @returns {void} + */ + const addChildIdsByOrdersToMap = chunk => { + const data = chunk.getChildIdsByOrders(chunkGraph, filterFn); + for (const key of Object.keys(data)) { + let chunkMap = chunkMaps[key]; + if (chunkMap === undefined) { + chunkMaps[key] = chunkMap = Object.create(null); + } + chunkMap[chunk.id] = data[key]; + } + }; + + if (includeDirectChildren) { + /** @type {Set} */ + const chunks = new Set(); + for (const chunkGroup of this.groupsIterable) { + for (const chunk of chunkGroup.chunks) { + chunks.add(chunk); + } + } + for (const chunk of chunks) { + addChildIdsByOrdersToMap(chunk); + } } - this.aggregatedRemovals.delete(item); - this.aggregatedChanges.add(item); - } - _onRemove(item, file, type) { - file = file || item; - if (!this.paused) { - this.emit("remove", file, type); - if (this.aggregateTimer) clearTimeout(this.aggregateTimer); - this.aggregateTimer = setTimeout(this._onTimeout, this.aggregateTimeout); + for (const chunk of this.getAllAsyncChunks()) { + addChildIdsByOrdersToMap(chunk); } - this.aggregatedChanges.delete(item); - this.aggregatedRemovals.add(item); - } - _onTimeout() { - this.aggregateTimer = undefined; - const changes = this.aggregatedChanges; - const removals = this.aggregatedRemovals; - this.aggregatedChanges = new Set(); - this.aggregatedRemovals = new Set(); - this.emit("aggregated", changes, removals); + return chunkMaps; } } -module.exports = Watchpack; +module.exports = Chunk; /***/ }), -/***/ 46806: +/***/ 97860: /***/ (function(module, __unused_webpack_exports, __webpack_require__) { "use strict"; @@ -24488,1949 +24710,2236 @@ module.exports = Watchpack; -const RuntimeGlobals = __webpack_require__(49404); -const WebpackError = __webpack_require__(68422); -const ConstDependency = __webpack_require__(60864); -const BasicEvaluatedExpression = __webpack_require__(2412); +const util = __webpack_require__(31669); +const Entrypoint = __webpack_require__(86695); +const ModuleGraphConnection = __webpack_require__(94144); +const { first } = __webpack_require__(34715); +const SortableSet = __webpack_require__(67563); const { - toConstantDependency, - evaluateToString -} = __webpack_require__(28723); -const ChunkNameRuntimeModule = __webpack_require__(43818); -const GetFullHashRuntimeModule = __webpack_require__(28454); - -/** @typedef {import("./Compiler")} Compiler */ -/** @typedef {import("./javascript/JavascriptParser")} JavascriptParser */ + compareModulesById, + compareIterables, + compareModulesByIdentifier, + concatComparators, + compareSelect, + compareIds +} = __webpack_require__(26296); +const createHash = __webpack_require__(24123); +const findGraphRoots = __webpack_require__(38670); +const { + RuntimeSpecMap, + RuntimeSpecSet, + runtimeToString, + mergeRuntime, + forEachRuntime +} = __webpack_require__(19655); -/* eslint-disable camelcase */ -const REPLACEMENTS = { - __webpack_require__: { - expr: RuntimeGlobals.require, - req: [RuntimeGlobals.require], - type: "function", - assign: false - }, - __webpack_public_path__: { - expr: RuntimeGlobals.publicPath, - req: [RuntimeGlobals.publicPath], - type: "string", - assign: true - }, - __webpack_base_uri__: { - expr: RuntimeGlobals.baseURI, - req: [RuntimeGlobals.baseURI], - type: "string", - assign: true - }, - __webpack_modules__: { - expr: RuntimeGlobals.moduleFactories, - req: [RuntimeGlobals.moduleFactories], - type: "object", - assign: false - }, - __webpack_chunk_load__: { - expr: RuntimeGlobals.ensureChunk, - req: [RuntimeGlobals.ensureChunk], - type: "function", - assign: true - }, - __non_webpack_require__: { - expr: "require", - req: null, - type: undefined, // type is not known, depends on environment - assign: true - }, - __webpack_nonce__: { - expr: RuntimeGlobals.scriptNonce, - req: [RuntimeGlobals.scriptNonce], - type: "string", - assign: true - }, - __webpack_hash__: { - expr: `${RuntimeGlobals.getFullHash}()`, - req: [RuntimeGlobals.getFullHash], - type: "string", - assign: false - }, - __webpack_chunkname__: { - expr: RuntimeGlobals.chunkName, - req: [RuntimeGlobals.chunkName], - type: "string", - assign: false - }, - __webpack_get_script_filename__: { - expr: RuntimeGlobals.getChunkScriptFilename, - req: [RuntimeGlobals.getChunkScriptFilename], - type: "function", - assign: true - }, - __webpack_runtime_id__: { - expr: RuntimeGlobals.runtimeId, - req: [RuntimeGlobals.runtimeId], - assign: false - }, - "require.onError": { - expr: RuntimeGlobals.uncaughtErrorHandler, - req: [RuntimeGlobals.uncaughtErrorHandler], - type: undefined, // type is not known, could be function or undefined - assign: true // is never a pattern - }, - __system_context__: { - expr: RuntimeGlobals.systemContext, - req: [RuntimeGlobals.systemContext], - type: "object", - assign: false - }, - __webpack_share_scopes__: { - expr: RuntimeGlobals.shareScopeMap, - req: [RuntimeGlobals.shareScopeMap], - type: "object", - assign: false - }, - __webpack_init_sharing__: { - expr: RuntimeGlobals.initializeSharing, - req: [RuntimeGlobals.initializeSharing], - type: "function", - assign: true - } -}; -/* eslint-enable camelcase */ +/** @typedef {import("./AsyncDependenciesBlock")} AsyncDependenciesBlock */ +/** @typedef {import("./Chunk")} Chunk */ +/** @typedef {import("./ChunkGroup")} ChunkGroup */ +/** @typedef {import("./Module")} Module */ +/** @typedef {import("./ModuleGraph")} ModuleGraph */ +/** @typedef {import("./RuntimeModule")} RuntimeModule */ +/** @typedef {typeof import("./util/Hash")} Hash */ +/** @typedef {import("./util/runtime").RuntimeSpec} RuntimeSpec */ -class APIPlugin { - /** - * Apply the plugin - * @param {Compiler} compiler the compiler instance - * @returns {void} - */ - apply(compiler) { - compiler.hooks.compilation.tap( - "APIPlugin", - (compilation, { normalModuleFactory }) => { - compilation.dependencyTemplates.set( - ConstDependency, - new ConstDependency.Template() - ); +/** @type {ReadonlySet} */ +const EMPTY_SET = new Set(); - compilation.hooks.runtimeRequirementInTree - .for(RuntimeGlobals.chunkName) - .tap("APIPlugin", chunk => { - compilation.addRuntimeModule( - chunk, - new ChunkNameRuntimeModule(chunk.name) - ); - return true; - }); +const ZERO_BIG_INT = BigInt(0); - compilation.hooks.runtimeRequirementInTree - .for(RuntimeGlobals.getFullHash) - .tap("APIPlugin", (chunk, set) => { - compilation.addRuntimeModule(chunk, new GetFullHashRuntimeModule()); - return true; - }); +const compareModuleIterables = compareIterables(compareModulesByIdentifier); - /** - * @param {JavascriptParser} parser the parser - */ - const handler = parser => { - Object.keys(REPLACEMENTS).forEach(key => { - const info = REPLACEMENTS[key]; - parser.hooks.expression - .for(key) - .tap( - "APIPlugin", - toConstantDependency(parser, info.expr, info.req) - ); - if (info.assign === false) { - parser.hooks.assign.for(key).tap("APIPlugin", expr => { - const err = new WebpackError(`${key} must not be assigned`); - err.loc = expr.loc; - throw err; - }); - } - if (info.type) { - parser.hooks.evaluateTypeof - .for(key) - .tap("APIPlugin", evaluateToString(info.type)); - } - }); +/** @typedef {(c: Chunk, chunkGraph: ChunkGraph) => boolean} ChunkFilterPredicate */ +/** @typedef {(m: Module) => boolean} ModuleFilterPredicate */ - parser.hooks.expression - .for("__webpack_layer__") - .tap("APIPlugin", expr => { - const dep = new ConstDependency( - JSON.stringify(parser.state.module.layer), - expr.range - ); - dep.loc = expr.loc; - parser.state.module.addPresentationalDependency(dep); - return true; - }); - parser.hooks.evaluateIdentifier - .for("__webpack_layer__") - .tap("APIPlugin", expr => - (parser.state.module.layer === null - ? new BasicEvaluatedExpression().setNull() - : new BasicEvaluatedExpression().setString( - parser.state.module.layer - ) - ).setRange(expr.range) - ); - parser.hooks.evaluateTypeof - .for("__webpack_layer__") - .tap("APIPlugin", expr => - new BasicEvaluatedExpression() - .setString( - parser.state.module.layer === null ? "object" : "string" - ) - .setRange(expr.range) - ); - }; +/** + * @typedef {Object} ChunkSizeOptions + * @property {number=} chunkOverhead constant overhead for a chunk + * @property {number=} entryChunkMultiplicator multiplicator for initial chunks + */ - normalModuleFactory.hooks.parser - .for("javascript/auto") - .tap("APIPlugin", handler); - normalModuleFactory.hooks.parser - .for("javascript/dynamic") - .tap("APIPlugin", handler); - normalModuleFactory.hooks.parser - .for("javascript/esm") - .tap("APIPlugin", handler); - } - ); +class ModuleHashInfo { + constructor(hash, renderedHash) { + this.hash = hash; + this.renderedHash = renderedHash; } } -module.exports = APIPlugin; - - -/***/ }), - -/***/ 570: -/***/ (function(module, __unused_webpack_exports, __webpack_require__) { +/** @template T @typedef {(set: SortableSet) => T[]} SetToArrayFunction */ -"use strict"; -/* - MIT License http://www.opensource.org/licenses/mit-license.php - Author Ivan Kopeykin @vankop -*/ +/** + * @template T + * @param {SortableSet} set the set + * @returns {T[]} set as array + */ +const getArray = set => { + return Array.from(set); +}; +/** + * @param {SortableSet} chunks the chunks + * @returns {RuntimeSpecSet} runtimes + */ +const getModuleRuntimes = chunks => { + const runtimes = new RuntimeSpecSet(); + for (const chunk of chunks) { + runtimes.add(chunk.runtime); + } + return runtimes; +}; +/** + * @param {SortableSet} set the set + * @returns {Map>} modules by source type + */ +const modulesBySourceType = set => { + /** @type {Map>} */ + const map = new Map(); + for (const module of set) { + for (const sourceType of module.getSourceTypes()) { + let innerSet = map.get(sourceType); + if (innerSet === undefined) { + innerSet = new SortableSet(); + map.set(sourceType, innerSet); + } + innerSet.add(module); + } + } + for (const [key, innerSet] of map) { + // When all modules have the source type, we reuse the original SortableSet + // to benefit from the shared cache (especially for sorting) + if (innerSet.size === set.size) { + map.set(key, set); + } + } + return map; +}; -const WebpackError = __webpack_require__(68422); -const CURRENT_METHOD_REGEXP = /at ([a-zA-Z0-9_.]*)/; +/** @type {WeakMap} */ +const createOrderedArrayFunctionMap = new WeakMap(); /** - * @param {string=} method method name - * @returns {string} message + * @template T + * @param {function(T, T): -1|0|1} comparator comparator function + * @returns {SetToArrayFunction} set as ordered array */ -function createMessage(method) { - return `Abstract method${method ? " " + method : ""}. Must be overridden.`; -} +const createOrderedArrayFunction = comparator => { + /** @type {SetToArrayFunction} */ + let fn = createOrderedArrayFunctionMap.get(comparator); + if (fn !== undefined) return fn; + fn = set => { + set.sortWith(comparator); + return Array.from(set); + }; + createOrderedArrayFunctionMap.set(comparator, fn); + return fn; +}; /** - * @constructor + * @param {Iterable} modules the modules to get the count/size of + * @returns {number} the size of the modules */ -function Message() { - /** @type {string} */ - this.stack = undefined; - Error.captureStackTrace(this); - /** @type {RegExpMatchArray} */ - const match = this.stack.split("\n")[3].match(CURRENT_METHOD_REGEXP); +const getModulesSize = modules => { + let size = 0; + for (const module of modules) { + for (const type of module.getSourceTypes()) { + size += module.size(type); + } + } + return size; +}; - this.message = match && match[1] ? createMessage(match[1]) : createMessage(); -} +/** + * @param {Iterable} modules the sortable Set to get the size of + * @returns {Record} the sizes of the modules + */ +const getModulesSizes = modules => { + let sizes = Object.create(null); + for (const module of modules) { + for (const type of module.getSourceTypes()) { + sizes[type] = (sizes[type] || 0) + module.size(type); + } + } + return sizes; +}; /** - * Error for abstract method - * @example - * class FooClass { - * abstractMethod() { - * throw new AbstractMethodError(); // error message: Abstract method FooClass.abstractMethod. Must be overridden. - * } - * } - * + * @param {Chunk} a chunk + * @param {Chunk} b chunk + * @returns {boolean} true, if a is always a parent of b */ -class AbstractMethodError extends WebpackError { +const isAvailableChunk = (a, b) => { + const queue = new Set(b.groupsIterable); + for (const chunkGroup of queue) { + if (a.isInGroup(chunkGroup)) continue; + if (chunkGroup.isInitial()) return false; + for (const parent of chunkGroup.parentsIterable) { + queue.add(parent); + } + } + return true; +}; + +class ChunkGraphModule { constructor() { - super(new Message().message); - this.name = "AbstractMethodError"; + /** @type {SortableSet} */ + this.chunks = new SortableSet(); + /** @type {Set | undefined} */ + this.entryInChunks = undefined; + /** @type {Set | undefined} */ + this.runtimeInChunks = undefined; + /** @type {RuntimeSpecMap} */ + this.hashes = undefined; + /** @type {string | number} */ + this.id = null; + /** @type {RuntimeSpecMap> | undefined} */ + this.runtimeRequirements = undefined; + /** @type {RuntimeSpecMap} */ + this.graphHashes = undefined; + /** @type {RuntimeSpecMap} */ + this.graphHashesWithConnections = undefined; } } -module.exports = AbstractMethodError; - - -/***/ }), +class ChunkGraphChunk { + constructor() { + /** @type {SortableSet} */ + this.modules = new SortableSet(); + /** @type {Map} */ + this.entryModules = new Map(); + /** @type {SortableSet} */ + this.runtimeModules = new SortableSet(); + /** @type {Set | undefined} */ + this.fullHashModules = undefined; + /** @type {Set | undefined} */ + this.dependentHashModules = undefined; + /** @type {Set | undefined} */ + this.runtimeRequirements = undefined; + /** @type {Set} */ + this.runtimeRequirementsInTree = new Set(); + } +} -/***/ 11315: -/***/ (function(module, __unused_webpack_exports, __webpack_require__) { +class ChunkGraph { + /** + * @param {ModuleGraph} moduleGraph the module graph + * @param {string | Hash} hashFunction the hash function to use + */ + constructor(moduleGraph, hashFunction = "md4") { + /** @private @type {WeakMap} */ + this._modules = new WeakMap(); + /** @private @type {WeakMap} */ + this._chunks = new WeakMap(); + /** @private @type {WeakMap} */ + this._blockChunkGroups = new WeakMap(); + /** @private @type {Map} */ + this._runtimeIds = new Map(); + /** @type {ModuleGraph} */ + this.moduleGraph = moduleGraph; -"use strict"; -/* - MIT License http://www.opensource.org/licenses/mit-license.php - Author Tobias Koppers @sokra -*/ + this._hashFunction = hashFunction; + this._getGraphRoots = this._getGraphRoots.bind(this); + } + /** + * @private + * @param {Module} module the module + * @returns {ChunkGraphModule} internal module + */ + _getChunkGraphModule(module) { + let cgm = this._modules.get(module); + if (cgm === undefined) { + cgm = new ChunkGraphModule(); + this._modules.set(module, cgm); + } + return cgm; + } -const DependenciesBlock = __webpack_require__(21484); -const makeSerializable = __webpack_require__(26522); + /** + * @private + * @param {Chunk} chunk the chunk + * @returns {ChunkGraphChunk} internal chunk + */ + _getChunkGraphChunk(chunk) { + let cgc = this._chunks.get(chunk); + if (cgc === undefined) { + cgc = new ChunkGraphChunk(); + this._chunks.set(chunk, cgc); + } + return cgc; + } -/** @typedef {import("./ChunkGraph")} ChunkGraph */ -/** @typedef {import("./ChunkGroup")} ChunkGroup */ -/** @typedef {import("./ChunkGroup").ChunkGroupOptions} ChunkGroupOptions */ -/** @typedef {import("./Dependency").DependencyLocation} DependencyLocation */ -/** @typedef {import("./Dependency").UpdateHashContext} UpdateHashContext */ -/** @typedef {import("./Entrypoint").EntryOptions} EntryOptions */ -/** @typedef {import("./Module")} Module */ -/** @typedef {import("./util/Hash")} Hash */ + /** + * @param {SortableSet} set the sortable Set to get the roots of + * @returns {Module[]} the graph roots + */ + _getGraphRoots(set) { + const { moduleGraph } = this; + return Array.from( + findGraphRoots(set, module => { + /** @type {Set} */ + const set = new Set(); + const addDependencies = module => { + for (const connection of moduleGraph.getOutgoingConnections(module)) { + if (!connection.module) continue; + const activeState = connection.getActiveState(undefined); + if (activeState === false) continue; + if (activeState === ModuleGraphConnection.TRANSITIVE_ONLY) { + addDependencies(connection.module); + continue; + } + set.add(connection.module); + } + }; + addDependencies(module); + return set; + }) + ).sort(compareModulesByIdentifier); + } -class AsyncDependenciesBlock extends DependenciesBlock { /** - * @param {ChunkGroupOptions & { entryOptions?: EntryOptions }} groupOptions options for the group - * @param {DependencyLocation=} loc the line of code - * @param {string=} request the request + * @param {Chunk} chunk the new chunk + * @param {Module} module the module + * @returns {void} */ - constructor(groupOptions, loc, request) { - super(); - if (typeof groupOptions === "string") { - groupOptions = { name: groupOptions }; - } else if (!groupOptions) { - groupOptions = { name: undefined }; - } - this.groupOptions = groupOptions; - this.loc = loc; - this.request = request; - this._stringifiedGroupOptions = undefined; + connectChunkAndModule(chunk, module) { + const cgm = this._getChunkGraphModule(module); + const cgc = this._getChunkGraphChunk(chunk); + cgm.chunks.add(chunk); + cgc.modules.add(module); } /** - * @returns {string} The name of the chunk + * @param {Chunk} chunk the chunk + * @param {Module} module the module + * @returns {void} */ - get chunkName() { - return this.groupOptions.name; + disconnectChunkAndModule(chunk, module) { + const cgm = this._getChunkGraphModule(module); + const cgc = this._getChunkGraphChunk(chunk); + cgc.modules.delete(module); + cgm.chunks.delete(chunk); } /** - * @param {string} value The new chunk name + * @param {Chunk} chunk the chunk which will be disconnected * @returns {void} */ - set chunkName(value) { - if (this.groupOptions.name !== value) { - this.groupOptions.name = value; - this._stringifiedGroupOptions = undefined; + disconnectChunk(chunk) { + const cgc = this._getChunkGraphChunk(chunk); + for (const module of cgc.modules) { + const cgm = this._getChunkGraphModule(module); + cgm.chunks.delete(chunk); } + cgc.modules.clear(); + chunk.disconnectFromGroups(); + ChunkGraph.clearChunkGraphForChunk(chunk); } /** - * @param {Hash} hash the hash used to track dependencies - * @param {UpdateHashContext} context context + * @param {Chunk} chunk the chunk + * @param {Iterable} modules the modules * @returns {void} */ - updateHash(hash, context) { - const { chunkGraph } = context; - if (this._stringifiedGroupOptions === undefined) { - this._stringifiedGroupOptions = JSON.stringify(this.groupOptions); + attachModules(chunk, modules) { + const cgc = this._getChunkGraphChunk(chunk); + for (const module of modules) { + cgc.modules.add(module); } - const chunkGroup = chunkGraph.getBlockChunkGroup(this); - hash.update( - `${this._stringifiedGroupOptions}${chunkGroup ? chunkGroup.id : ""}` - ); - super.updateHash(hash, context); } - serialize(context) { - const { write } = context; - write(this.groupOptions); - write(this.loc); - write(this.request); - super.serialize(context); + /** + * @param {Chunk} chunk the chunk + * @param {Iterable} modules the runtime modules + * @returns {void} + */ + attachRuntimeModules(chunk, modules) { + const cgc = this._getChunkGraphChunk(chunk); + for (const module of modules) { + cgc.runtimeModules.add(module); + } } - deserialize(context) { - const { read } = context; - this.groupOptions = read(); - this.loc = read(); - this.request = read(); - super.deserialize(context); + /** + * @param {Chunk} chunk the chunk + * @param {Iterable} modules the modules that require a full hash + * @returns {void} + */ + attachFullHashModules(chunk, modules) { + const cgc = this._getChunkGraphChunk(chunk); + if (cgc.fullHashModules === undefined) cgc.fullHashModules = new Set(); + for (const module of modules) { + cgc.fullHashModules.add(module); + } } -} - -makeSerializable(AsyncDependenciesBlock, "webpack/lib/AsyncDependenciesBlock"); -Object.defineProperty(AsyncDependenciesBlock.prototype, "module", { - get() { - throw new Error( - "module property was removed from AsyncDependenciesBlock (it's not needed)" - ); - }, - set() { - throw new Error( - "module property was removed from AsyncDependenciesBlock (it's not needed)" - ); + /** + * @param {Chunk} chunk the chunk + * @param {Iterable} modules the modules that require a full hash + * @returns {void} + */ + attachDependentHashModules(chunk, modules) { + const cgc = this._getChunkGraphChunk(chunk); + if (cgc.dependentHashModules === undefined) + cgc.dependentHashModules = new Set(); + for (const module of modules) { + cgc.dependentHashModules.add(module); + } } -}); - -module.exports = AsyncDependenciesBlock; + /** + * @param {Module} oldModule the replaced module + * @param {Module} newModule the replacing module + * @returns {void} + */ + replaceModule(oldModule, newModule) { + const oldCgm = this._getChunkGraphModule(oldModule); + const newCgm = this._getChunkGraphModule(newModule); -/***/ }), + for (const chunk of oldCgm.chunks) { + const cgc = this._getChunkGraphChunk(chunk); + cgc.modules.delete(oldModule); + cgc.modules.add(newModule); + newCgm.chunks.add(chunk); + } + oldCgm.chunks.clear(); -/***/ 69073: -/***/ (function(module, __unused_webpack_exports, __webpack_require__) { + if (oldCgm.entryInChunks !== undefined) { + if (newCgm.entryInChunks === undefined) { + newCgm.entryInChunks = new Set(); + } + for (const chunk of oldCgm.entryInChunks) { + const cgc = this._getChunkGraphChunk(chunk); + const old = cgc.entryModules.get(oldModule); + /** @type {Map} */ + const newEntryModules = new Map(); + for (const [m, cg] of cgc.entryModules) { + if (m === oldModule) { + newEntryModules.set(newModule, old); + } else { + newEntryModules.set(m, cg); + } + } + cgc.entryModules = newEntryModules; + newCgm.entryInChunks.add(chunk); + } + oldCgm.entryInChunks = undefined; + } -"use strict"; -/* - MIT License http://www.opensource.org/licenses/mit-license.php - Author Sean Larkin @thelarkinn -*/ + if (oldCgm.runtimeInChunks !== undefined) { + if (newCgm.runtimeInChunks === undefined) { + newCgm.runtimeInChunks = new Set(); + } + for (const chunk of oldCgm.runtimeInChunks) { + const cgc = this._getChunkGraphChunk(chunk); + cgc.runtimeModules.delete(/** @type {RuntimeModule} */ (oldModule)); + cgc.runtimeModules.add(/** @type {RuntimeModule} */ (newModule)); + newCgm.runtimeInChunks.add(chunk); + if ( + cgc.fullHashModules !== undefined && + cgc.fullHashModules.has(/** @type {RuntimeModule} */ (oldModule)) + ) { + cgc.fullHashModules.delete(/** @type {RuntimeModule} */ (oldModule)); + cgc.fullHashModules.add(/** @type {RuntimeModule} */ (newModule)); + } + if ( + cgc.dependentHashModules !== undefined && + cgc.dependentHashModules.has(/** @type {RuntimeModule} */ (oldModule)) + ) { + cgc.dependentHashModules.delete( + /** @type {RuntimeModule} */ (oldModule) + ); + cgc.dependentHashModules.add( + /** @type {RuntimeModule} */ (newModule) + ); + } + } + oldCgm.runtimeInChunks = undefined; + } + } + /** + * @param {Module} module the checked module + * @param {Chunk} chunk the checked chunk + * @returns {boolean} true, if the chunk contains the module + */ + isModuleInChunk(module, chunk) { + const cgc = this._getChunkGraphChunk(chunk); + return cgc.modules.has(module); + } + /** + * @param {Module} module the checked module + * @param {ChunkGroup} chunkGroup the checked chunk group + * @returns {boolean} true, if the chunk contains the module + */ + isModuleInChunkGroup(module, chunkGroup) { + for (const chunk of chunkGroup.chunks) { + if (this.isModuleInChunk(module, chunk)) return true; + } + return false; + } -const WebpackError = __webpack_require__(68422); + /** + * @param {Module} module the checked module + * @returns {boolean} true, if the module is entry of any chunk + */ + isEntryModule(module) { + const cgm = this._getChunkGraphModule(module); + return cgm.entryInChunks !== undefined; + } -/** @typedef {import("./Dependency").DependencyLocation} DependencyLocation */ -/** @typedef {import("./Module")} Module */ + /** + * @param {Module} module the module + * @returns {Iterable} iterable of chunks (do not modify) + */ + getModuleChunksIterable(module) { + const cgm = this._getChunkGraphModule(module); + return cgm.chunks; + } -class AsyncDependencyToInitialChunkError extends WebpackError { /** - * Creates an instance of AsyncDependencyToInitialChunkError. - * @param {string} chunkName Name of Chunk - * @param {Module} module module tied to dependency - * @param {DependencyLocation} loc location of dependency + * @param {Module} module the module + * @param {function(Chunk, Chunk): -1|0|1} sortFn sort function + * @returns {Iterable} iterable of chunks (do not modify) */ - constructor(chunkName, module, loc) { - super( - `It's not allowed to load an initial chunk on demand. The chunk name "${chunkName}" is already used by an entrypoint.` - ); + getOrderedModuleChunksIterable(module, sortFn) { + const cgm = this._getChunkGraphModule(module); + cgm.chunks.sortWith(sortFn); + return cgm.chunks; + } - this.name = "AsyncDependencyToInitialChunkError"; - this.module = module; - this.loc = loc; + /** + * @param {Module} module the module + * @returns {Chunk[]} array of chunks (cached, do not modify) + */ + getModuleChunks(module) { + const cgm = this._getChunkGraphModule(module); + return cgm.chunks.getFromCache(getArray); } -} -module.exports = AsyncDependencyToInitialChunkError; + /** + * @param {Module} module the module + * @returns {number} the number of chunk which contain the module + */ + getNumberOfModuleChunks(module) { + const cgm = this._getChunkGraphModule(module); + return cgm.chunks.size; + } + /** + * @param {Module} module the module + * @returns {RuntimeSpecSet} runtimes + */ + getModuleRuntimes(module) { + const cgm = this._getChunkGraphModule(module); + return cgm.chunks.getFromUnorderedCache(getModuleRuntimes); + } -/***/ }), + /** + * @param {Chunk} chunk the chunk + * @returns {number} the number of modules which are contained in this chunk + */ + getNumberOfChunkModules(chunk) { + const cgc = this._getChunkGraphChunk(chunk); + return cgc.modules.size; + } -/***/ 51714: -/***/ (function(module, __unused_webpack_exports, __webpack_require__) { + /** + * @param {Chunk} chunk the chunk + * @returns {number} the number of full hash modules which are contained in this chunk + */ + getNumberOfChunkFullHashModules(chunk) { + const cgc = this._getChunkGraphChunk(chunk); + return cgc.fullHashModules === undefined ? 0 : cgc.fullHashModules.size; + } -"use strict"; -/* - MIT License http://www.opensource.org/licenses/mit-license.php - Author Tobias Koppers @sokra -*/ + /** + * @param {Chunk} chunk the chunk + * @returns {Iterable} return the modules for this chunk + */ + getChunkModulesIterable(chunk) { + const cgc = this._getChunkGraphChunk(chunk); + return cgc.modules; + } + /** + * @param {Chunk} chunk the chunk + * @param {string} sourceType source type + * @returns {Iterable | undefined} return the modules for this chunk + */ + getChunkModulesIterableBySourceType(chunk, sourceType) { + const cgc = this._getChunkGraphChunk(chunk); + const modulesWithSourceType = cgc.modules + .getFromUnorderedCache(modulesBySourceType) + .get(sourceType); + return modulesWithSourceType; + } + /** + * @param {Chunk} chunk the chunk + * @param {function(Module, Module): -1|0|1} comparator comparator function + * @returns {Iterable} return the modules for this chunk + */ + getOrderedChunkModulesIterable(chunk, comparator) { + const cgc = this._getChunkGraphChunk(chunk); + cgc.modules.sortWith(comparator); + return cgc.modules; + } -const asyncLib = __webpack_require__(36386); -const NormalModule = __webpack_require__(11026); -const PrefetchDependency = __webpack_require__(39986); + /** + * @param {Chunk} chunk the chunk + * @param {string} sourceType source type + * @param {function(Module, Module): -1|0|1} comparator comparator function + * @returns {Iterable | undefined} return the modules for this chunk + */ + getOrderedChunkModulesIterableBySourceType(chunk, sourceType, comparator) { + const cgc = this._getChunkGraphChunk(chunk); + const modulesWithSourceType = cgc.modules + .getFromUnorderedCache(modulesBySourceType) + .get(sourceType); + if (modulesWithSourceType === undefined) return undefined; + modulesWithSourceType.sortWith(comparator); + return modulesWithSourceType; + } -/** @typedef {import("./Compiler")} Compiler */ + /** + * @param {Chunk} chunk the chunk + * @returns {Module[]} return the modules for this chunk (cached, do not modify) + */ + getChunkModules(chunk) { + const cgc = this._getChunkGraphChunk(chunk); + return cgc.modules.getFromUnorderedCache(getArray); + } -class AutomaticPrefetchPlugin { /** - * Apply the plugin - * @param {Compiler} compiler the compiler instance - * @returns {void} + * @param {Chunk} chunk the chunk + * @param {function(Module, Module): -1|0|1} comparator comparator function + * @returns {Module[]} return the modules for this chunk (cached, do not modify) */ - apply(compiler) { - compiler.hooks.compilation.tap( - "AutomaticPrefetchPlugin", - (compilation, { normalModuleFactory }) => { - compilation.dependencyFactories.set( - PrefetchDependency, - normalModuleFactory - ); - } - ); - let lastModules = null; - compiler.hooks.afterCompile.tap("AutomaticPrefetchPlugin", compilation => { - lastModules = []; - - for (const m of compilation.modules) { - if (m instanceof NormalModule) { - lastModules.push({ - context: m.context, - request: m.request - }); - } - } - }); - compiler.hooks.make.tapAsync( - "AutomaticPrefetchPlugin", - (compilation, callback) => { - if (!lastModules) return callback(); - asyncLib.forEach( - lastModules, - (m, callback) => { - compilation.addModuleChain( - m.context || compiler.context, - new PrefetchDependency(`!!${m.request}`), - callback - ); - }, - err => { - lastModules = null; - callback(err); - } - ); - } - ); + getOrderedChunkModules(chunk, comparator) { + const cgc = this._getChunkGraphChunk(chunk); + const arrayFunction = createOrderedArrayFunction(comparator); + return cgc.modules.getFromUnorderedCache(arrayFunction); } -} -module.exports = AutomaticPrefetchPlugin; - - -/***/ }), - -/***/ 45480: -/***/ (function(module, __unused_webpack_exports, __webpack_require__) { - -"use strict"; -/* - MIT License http://www.opensource.org/licenses/mit-license.php - Author Tobias Koppers @sokra -*/ - - - -const { ConcatSource } = __webpack_require__(96192); -const Compilation = __webpack_require__(59622); -const ModuleFilenameHelpers = __webpack_require__(80295); -const Template = __webpack_require__(92066); -const createSchemaValidation = __webpack_require__(77695); -/** @typedef {import("../declarations/plugins/BannerPlugin").BannerPluginArgument} BannerPluginArgument */ -/** @typedef {import("../declarations/plugins/BannerPlugin").BannerPluginOptions} BannerPluginOptions */ -/** @typedef {import("./Compiler")} Compiler */ + /** + * @param {Chunk} chunk the chunk + * @param {ModuleFilterPredicate} filterFn function used to filter modules + * @param {boolean} includeAllChunks all chunks or only async chunks + * @returns {Record} chunk to module ids object + */ + getChunkModuleIdMap(chunk, filterFn, includeAllChunks = false) { + /** @type {Record} */ + const chunkModuleIdMap = Object.create(null); -const validate = createSchemaValidation( - __webpack_require__(79456), - () => __webpack_require__(33946), - { - name: "Banner Plugin", - baseDataPath: "options" - } -); + for (const asyncChunk of includeAllChunks + ? chunk.getAllReferencedChunks() + : chunk.getAllAsyncChunks()) { + /** @type {(string|number)[]} */ + let array; + for (const module of this.getOrderedChunkModulesIterable( + asyncChunk, + compareModulesById(this) + )) { + if (filterFn(module)) { + if (array === undefined) { + array = []; + chunkModuleIdMap[asyncChunk.id] = array; + } + const moduleId = this.getModuleId(module); + array.push(moduleId); + } + } + } -const wrapComment = str => { - if (!str.includes("\n")) { - return Template.toComment(str); + return chunkModuleIdMap; } - return `/*!\n * ${str - .replace(/\*\//g, "* /") - .split("\n") - .join("\n * ") - .replace(/\s+\n/g, "\n") - .trimRight()}\n */`; -}; -class BannerPlugin { /** - * @param {BannerPluginArgument} options options object + * @param {Chunk} chunk the chunk + * @param {ModuleFilterPredicate} filterFn function used to filter modules + * @param {number} hashLength length of the hash + * @param {boolean} includeAllChunks all chunks or only async chunks + * @returns {Record>} chunk to module id to module hash object */ - constructor(options) { - if (typeof options === "string" || typeof options === "function") { - options = { - banner: options - }; - } + getChunkModuleRenderedHashMap( + chunk, + filterFn, + hashLength = 0, + includeAllChunks = false + ) { + /** @type {Record>} */ + const chunkModuleHashMap = Object.create(null); - validate(options); + for (const asyncChunk of includeAllChunks + ? chunk.getAllReferencedChunks() + : chunk.getAllAsyncChunks()) { + /** @type {Record} */ + let idToHashMap; + for (const module of this.getOrderedChunkModulesIterable( + asyncChunk, + compareModulesById(this) + )) { + if (filterFn(module)) { + if (idToHashMap === undefined) { + idToHashMap = Object.create(null); + chunkModuleHashMap[asyncChunk.id] = idToHashMap; + } + const moduleId = this.getModuleId(module); + const hash = this.getRenderedModuleHash(module, asyncChunk.runtime); + idToHashMap[moduleId] = hashLength ? hash.slice(0, hashLength) : hash; + } + } + } - this.options = options; + return chunkModuleHashMap; + } - const bannerOption = options.banner; - if (typeof bannerOption === "function") { - const getBanner = bannerOption; - this.banner = this.options.raw - ? getBanner - : data => wrapComment(getBanner(data)); - } else { - const banner = this.options.raw - ? bannerOption - : wrapComment(bannerOption); - this.banner = () => banner; + /** + * @param {Chunk} chunk the chunk + * @param {ChunkFilterPredicate} filterFn function used to filter chunks + * @returns {Record} chunk map + */ + getChunkConditionMap(chunk, filterFn) { + const map = Object.create(null); + for (const c of chunk.getAllReferencedChunks()) { + map[c.id] = filterFn(c, this); } + return map; } /** - * Apply the plugin - * @param {Compiler} compiler the compiler instance - * @returns {void} + * @param {Chunk} chunk the chunk + * @param {ModuleFilterPredicate} filterFn predicate function used to filter modules + * @param {ChunkFilterPredicate=} filterChunkFn predicate function used to filter chunks + * @returns {boolean} return true if module exists in graph */ - apply(compiler) { - const options = this.options; - const banner = this.banner; - const matchObject = ModuleFilenameHelpers.matchObject.bind( - undefined, - options - ); - - compiler.hooks.compilation.tap("BannerPlugin", compilation => { - compilation.hooks.processAssets.tap( - { - name: "BannerPlugin", - stage: Compilation.PROCESS_ASSETS_STAGE_ADDITIONS - }, - () => { - for (const chunk of compilation.chunks) { - if (options.entryOnly && !chunk.canBeInitial()) { - continue; - } + hasModuleInGraph(chunk, filterFn, filterChunkFn) { + const queue = new Set(chunk.groupsIterable); + const chunksProcessed = new Set(); - for (const file of chunk.files) { - if (!matchObject(file)) { - continue; + for (const chunkGroup of queue) { + for (const innerChunk of chunkGroup.chunks) { + if (!chunksProcessed.has(innerChunk)) { + chunksProcessed.add(innerChunk); + if (!filterChunkFn || filterChunkFn(innerChunk, this)) { + for (const module of this.getChunkModulesIterable(innerChunk)) { + if (filterFn(module)) { + return true; } - - const data = { - chunk, - filename: file - }; - - const comment = compilation.getPath(banner, data); - - compilation.updateAsset( - file, - old => new ConcatSource(comment, "\n", old) - ); } } } - ); - }); + } + for (const child of chunkGroup.childrenIterable) { + queue.add(child); + } + } + return false; } -} -module.exports = BannerPlugin; + /** + * @param {Chunk} chunkA first chunk + * @param {Chunk} chunkB second chunk + * @returns {-1|0|1} this is a comparator function like sort and returns -1, 0, or 1 based on sort order + */ + compareChunks(chunkA, chunkB) { + const cgcA = this._getChunkGraphChunk(chunkA); + const cgcB = this._getChunkGraphChunk(chunkB); + if (cgcA.modules.size > cgcB.modules.size) return -1; + if (cgcA.modules.size < cgcB.modules.size) return 1; + cgcA.modules.sortWith(compareModulesByIdentifier); + cgcB.modules.sortWith(compareModulesByIdentifier); + return compareModuleIterables(cgcA.modules, cgcB.modules); + } + /** + * @param {Chunk} chunk the chunk + * @returns {number} total size of all modules in the chunk + */ + getChunkModulesSize(chunk) { + const cgc = this._getChunkGraphChunk(chunk); + return cgc.modules.getFromUnorderedCache(getModulesSize); + } -/***/ }), + /** + * @param {Chunk} chunk the chunk + * @returns {Record} total sizes of all modules in the chunk by source type + */ + getChunkModulesSizes(chunk) { + const cgc = this._getChunkGraphChunk(chunk); + return cgc.modules.getFromUnorderedCache(getModulesSizes); + } -/***/ 99453: -/***/ (function(module, __unused_webpack_exports, __webpack_require__) { + /** + * @param {Chunk} chunk the chunk + * @returns {Module[]} root modules of the chunks (ordered by identifier) + */ + getChunkRootModules(chunk) { + const cgc = this._getChunkGraphChunk(chunk); + return cgc.modules.getFromUnorderedCache(this._getGraphRoots); + } -"use strict"; -/* - MIT License http://www.opensource.org/licenses/mit-license.php - Author Tobias Koppers @sokra -*/ + /** + * @param {Chunk} chunk the chunk + * @param {ChunkSizeOptions} options options object + * @returns {number} total size of the chunk + */ + getChunkSize(chunk, options = {}) { + const cgc = this._getChunkGraphChunk(chunk); + const modulesSize = cgc.modules.getFromUnorderedCache(getModulesSize); + const chunkOverhead = + typeof options.chunkOverhead === "number" ? options.chunkOverhead : 10000; + const entryChunkMultiplicator = + typeof options.entryChunkMultiplicator === "number" + ? options.entryChunkMultiplicator + : 10; + return ( + chunkOverhead + + modulesSize * (chunk.canBeInitial() ? entryChunkMultiplicator : 1) + ); + } + /** + * @param {Chunk} chunkA chunk + * @param {Chunk} chunkB chunk + * @param {ChunkSizeOptions} options options object + * @returns {number} total size of the chunk or false if chunks can't be integrated + */ + getIntegratedChunksSize(chunkA, chunkB, options = {}) { + const cgcA = this._getChunkGraphChunk(chunkA); + const cgcB = this._getChunkGraphChunk(chunkB); + const allModules = new Set(cgcA.modules); + for (const m of cgcB.modules) allModules.add(m); + let modulesSize = getModulesSize(allModules); + const chunkOverhead = + typeof options.chunkOverhead === "number" ? options.chunkOverhead : 10000; + const entryChunkMultiplicator = + typeof options.entryChunkMultiplicator === "number" + ? options.entryChunkMultiplicator + : 10; + return ( + chunkOverhead + + modulesSize * + (chunkA.canBeInitial() || chunkB.canBeInitial() + ? entryChunkMultiplicator + : 1) + ); + } + /** + * @param {Chunk} chunkA chunk + * @param {Chunk} chunkB chunk + * @returns {boolean} true, if chunks could be integrated + */ + canChunksBeIntegrated(chunkA, chunkB) { + if (chunkA.preventIntegration || chunkB.preventIntegration) { + return false; + } -const { AsyncParallelHook, AsyncSeriesBailHook, SyncHook } = __webpack_require__(34718); -const { - makeWebpackError, - makeWebpackErrorCallback -} = __webpack_require__(89935); + const hasRuntimeA = chunkA.hasRuntime(); + const hasRuntimeB = chunkB.hasRuntime(); -/** @typedef {import("./WebpackError")} WebpackError */ + if (hasRuntimeA !== hasRuntimeB) { + if (hasRuntimeA) { + return isAvailableChunk(chunkA, chunkB); + } else if (hasRuntimeB) { + return isAvailableChunk(chunkB, chunkA); + } else { + return false; + } + } -/** - * @typedef {Object} Etag - * @property {function(): string} toString - */ + if ( + this.getNumberOfEntryModules(chunkA) > 0 || + this.getNumberOfEntryModules(chunkB) > 0 + ) { + return false; + } -/** - * @template T - * @callback CallbackCache - * @param {WebpackError=} err - * @param {T=} result - * @returns {void} - */ + return true; + } -/** - * @callback GotHandler - * @param {any} result - * @param {function(Error=): void} callback - * @returns {void} - */ + /** + * @param {Chunk} chunkA the target chunk + * @param {Chunk} chunkB the chunk to integrate + * @returns {void} + */ + integrateChunks(chunkA, chunkB) { + // Decide for one name (deterministic) + if (chunkA.name && chunkB.name) { + if ( + this.getNumberOfEntryModules(chunkA) > 0 === + this.getNumberOfEntryModules(chunkB) > 0 + ) { + // When both chunks have entry modules or none have one, use + // shortest name + if (chunkA.name.length !== chunkB.name.length) { + chunkA.name = + chunkA.name.length < chunkB.name.length ? chunkA.name : chunkB.name; + } else { + chunkA.name = chunkA.name < chunkB.name ? chunkA.name : chunkB.name; + } + } else if (this.getNumberOfEntryModules(chunkB) > 0) { + // Pick the name of the chunk with the entry module + chunkA.name = chunkB.name; + } + } else if (chunkB.name) { + chunkA.name = chunkB.name; + } -const needCalls = (times, callback) => { - return err => { - if (--times === 0) { - return callback(err); + // Merge id name hints + for (const hint of chunkB.idNameHints) { + chunkA.idNameHints.add(hint); } - if (err && times > 0) { - times = 0; - return callback(err); + + // Merge runtime + chunkA.runtime = mergeRuntime(chunkA.runtime, chunkB.runtime); + + // getChunkModules is used here to create a clone, because disconnectChunkAndModule modifies + for (const module of this.getChunkModules(chunkB)) { + this.disconnectChunkAndModule(chunkB, module); + this.connectChunkAndModule(chunkA, module); } - }; -}; -class Cache { - constructor() { - this.hooks = { - /** @type {AsyncSeriesBailHook<[string, Etag | null, GotHandler[]], any>} */ - get: new AsyncSeriesBailHook(["identifier", "etag", "gotHandlers"]), - /** @type {AsyncParallelHook<[string, Etag | null, any]>} */ - store: new AsyncParallelHook(["identifier", "etag", "data"]), - /** @type {AsyncParallelHook<[Iterable]>} */ - storeBuildDependencies: new AsyncParallelHook(["dependencies"]), - /** @type {SyncHook<[]>} */ - beginIdle: new SyncHook([]), - /** @type {AsyncParallelHook<[]>} */ - endIdle: new AsyncParallelHook([]), - /** @type {AsyncParallelHook<[]>} */ - shutdown: new AsyncParallelHook([]) - }; + for (const [module, chunkGroup] of Array.from( + this.getChunkEntryModulesWithChunkGroupIterable(chunkB) + )) { + this.disconnectChunkAndEntryModule(chunkB, module); + this.connectChunkAndEntryModule(chunkA, module, chunkGroup); + } + + for (const chunkGroup of chunkB.groupsIterable) { + chunkGroup.replaceChunk(chunkB, chunkA); + chunkA.addGroup(chunkGroup); + chunkB.removeGroup(chunkGroup); + } + ChunkGraph.clearChunkGraphForChunk(chunkB); } /** - * @template T - * @param {string} identifier the cache identifier - * @param {Etag | null} etag the etag - * @param {CallbackCache} callback signals when the value is retrieved + * @param {Chunk} chunk the chunk to upgrade * @returns {void} */ - get(identifier, etag, callback) { - const gotHandlers = []; - this.hooks.get.callAsync(identifier, etag, gotHandlers, (err, result) => { - if (err) { - callback(makeWebpackError(err, "Cache.hooks.get")); - return; - } - if (result === null) { - result = undefined; - } - if (gotHandlers.length > 1) { - const innerCallback = needCalls(gotHandlers.length, () => - callback(null, result) - ); - for (const gotHandler of gotHandlers) { - gotHandler(result, innerCallback); - } - } else if (gotHandlers.length === 1) { - gotHandlers[0](result, () => callback(null, result)); - } else { - callback(null, result); + upgradeDependentToFullHashModules(chunk) { + const cgc = this._getChunkGraphChunk(chunk); + if (cgc.dependentHashModules === undefined) return; + if (cgc.fullHashModules === undefined) { + cgc.fullHashModules = cgc.dependentHashModules; + } else { + for (const m of cgc.dependentHashModules) { + cgc.fullHashModules.add(m); } - }); + cgc.dependentHashModules = undefined; + } } /** - * @template T - * @param {string} identifier the cache identifier - * @param {Etag | null} etag the etag - * @param {T} data the value to store - * @param {CallbackCache} callback signals when the value is stored - * @returns {void} + * @param {Module} module the checked module + * @param {Chunk} chunk the checked chunk + * @returns {boolean} true, if the chunk contains the module as entry */ - store(identifier, etag, data, callback) { - this.hooks.store.callAsync( - identifier, - etag, - data, - makeWebpackErrorCallback(callback, "Cache.hooks.store") - ); + isEntryModuleInChunk(module, chunk) { + const cgc = this._getChunkGraphChunk(chunk); + return cgc.entryModules.has(module); } /** - * After this method has succeeded the cache can only be restored when build dependencies are - * @param {Iterable} dependencies list of all build dependencies - * @param {CallbackCache} callback signals when the dependencies are stored + * @param {Chunk} chunk the new chunk + * @param {Module} module the entry module + * @param {Entrypoint=} entrypoint the chunk group which must be loaded before the module is executed * @returns {void} */ - storeBuildDependencies(dependencies, callback) { - this.hooks.storeBuildDependencies.callAsync( - dependencies, - makeWebpackErrorCallback(callback, "Cache.hooks.storeBuildDependencies") - ); + connectChunkAndEntryModule(chunk, module, entrypoint) { + const cgm = this._getChunkGraphModule(module); + const cgc = this._getChunkGraphChunk(chunk); + if (cgm.entryInChunks === undefined) { + cgm.entryInChunks = new Set(); + } + cgm.entryInChunks.add(chunk); + cgc.entryModules.set(module, entrypoint); } /** + * @param {Chunk} chunk the new chunk + * @param {RuntimeModule} module the runtime module * @returns {void} */ - beginIdle() { - this.hooks.beginIdle.call(); + connectChunkAndRuntimeModule(chunk, module) { + const cgm = this._getChunkGraphModule(module); + const cgc = this._getChunkGraphChunk(chunk); + if (cgm.runtimeInChunks === undefined) { + cgm.runtimeInChunks = new Set(); + } + cgm.runtimeInChunks.add(chunk); + cgc.runtimeModules.add(module); } /** - * @param {CallbackCache} callback signals when the call finishes + * @param {Chunk} chunk the new chunk + * @param {RuntimeModule} module the module that require a full hash * @returns {void} */ - endIdle(callback) { - this.hooks.endIdle.callAsync( - makeWebpackErrorCallback(callback, "Cache.hooks.endIdle") - ); + addFullHashModuleToChunk(chunk, module) { + const cgc = this._getChunkGraphChunk(chunk); + if (cgc.fullHashModules === undefined) cgc.fullHashModules = new Set(); + cgc.fullHashModules.add(module); } /** - * @param {CallbackCache} callback signals when the call finishes + * @param {Chunk} chunk the new chunk + * @param {RuntimeModule} module the module that require a full hash * @returns {void} */ - shutdown(callback) { - this.hooks.shutdown.callAsync( - makeWebpackErrorCallback(callback, "Cache.hooks.shutdown") - ); + addDependentHashModuleToChunk(chunk, module) { + const cgc = this._getChunkGraphChunk(chunk); + if (cgc.dependentHashModules === undefined) + cgc.dependentHashModules = new Set(); + cgc.dependentHashModules.add(module); } -} -Cache.STAGE_MEMORY = -10; -Cache.STAGE_DEFAULT = 0; -Cache.STAGE_DISK = 10; -Cache.STAGE_NETWORK = 20; - -module.exports = Cache; - - -/***/ }), - -/***/ 67014: -/***/ (function(module, __unused_webpack_exports, __webpack_require__) { - -"use strict"; -/* - MIT License http://www.opensource.org/licenses/mit-license.php - Author Tobias Koppers @sokra -*/ - - - -const asyncLib = __webpack_require__(36386); -const getLazyHashedEtag = __webpack_require__(12339); -const mergeEtags = __webpack_require__(53885); - -/** @typedef {import("./Cache")} Cache */ -/** @typedef {import("./Cache").Etag} Etag */ -/** @typedef {import("./WebpackError")} WebpackError */ -/** @typedef {import("./cache/getLazyHashedEtag").HashableObject} HashableObject */ -/** @typedef {typeof import("./util/Hash")} HashConstructor */ - -/** - * @template T - * @callback CallbackCache - * @param {WebpackError=} err - * @param {T=} result - * @returns {void} - */ - -/** - * @template T - * @callback CallbackNormalErrorCache - * @param {Error=} err - * @param {T=} result - * @returns {void} - */ - -class MultiItemCache { /** - * @param {ItemCacheFacade[]} items item caches + * @param {Chunk} chunk the new chunk + * @param {Module} module the entry module + * @returns {void} */ - constructor(items) { - this._items = items; - if (items.length === 1) return /** @type {any} */ (items[0]); + disconnectChunkAndEntryModule(chunk, module) { + const cgm = this._getChunkGraphModule(module); + const cgc = this._getChunkGraphChunk(chunk); + cgm.entryInChunks.delete(chunk); + if (cgm.entryInChunks.size === 0) { + cgm.entryInChunks = undefined; + } + cgc.entryModules.delete(module); } /** - * @template T - * @param {CallbackCache} callback signals when the value is retrieved + * @param {Chunk} chunk the new chunk + * @param {RuntimeModule} module the runtime module * @returns {void} */ - get(callback) { - const next = i => { - this._items[i].get((err, result) => { - if (err) return callback(err); - if (result !== undefined) return callback(null, result); - if (++i >= this._items.length) return callback(); - next(i); - }); - }; - next(0); + disconnectChunkAndRuntimeModule(chunk, module) { + const cgm = this._getChunkGraphModule(module); + const cgc = this._getChunkGraphChunk(chunk); + cgm.runtimeInChunks.delete(chunk); + if (cgm.runtimeInChunks.size === 0) { + cgm.runtimeInChunks = undefined; + } + cgc.runtimeModules.delete(module); } /** - * @template T - * @returns {Promise} promise with the data + * @param {Module} module the entry module, it will no longer be entry + * @returns {void} */ - getPromise() { - const next = i => { - return this._items[i].getPromise().then(result => { - if (result !== undefined) return result; - if (++i < this._items.length) return next(i); - }); - }; - return next(0); + disconnectEntryModule(module) { + const cgm = this._getChunkGraphModule(module); + for (const chunk of cgm.entryInChunks) { + const cgc = this._getChunkGraphChunk(chunk); + cgc.entryModules.delete(module); + } + cgm.entryInChunks = undefined; } /** - * @template T - * @param {T} data the value to store - * @param {CallbackCache} callback signals when the value is stored + * @param {Chunk} chunk the chunk, for which all entries will be removed * @returns {void} */ - store(data, callback) { - asyncLib.each( - this._items, - (item, callback) => item.store(data, callback), - callback - ); + disconnectEntries(chunk) { + const cgc = this._getChunkGraphChunk(chunk); + for (const module of cgc.entryModules.keys()) { + const cgm = this._getChunkGraphModule(module); + cgm.entryInChunks.delete(chunk); + if (cgm.entryInChunks.size === 0) { + cgm.entryInChunks = undefined; + } + } + cgc.entryModules.clear(); } /** - * @template T - * @param {T} data the value to store - * @returns {Promise} promise signals when the value is stored + * @param {Chunk} chunk the chunk + * @returns {number} the amount of entry modules in chunk */ - storePromise(data) { - return Promise.all(this._items.map(item => item.storePromise(data))).then( - () => {} - ); + getNumberOfEntryModules(chunk) { + const cgc = this._getChunkGraphChunk(chunk); + return cgc.entryModules.size; } -} -class ItemCacheFacade { /** - * @param {Cache} cache the root cache - * @param {string} name the child cache item name - * @param {Etag | null} etag the etag + * @param {Chunk} chunk the chunk + * @returns {number} the amount of entry modules in chunk */ - constructor(cache, name, etag) { - this._cache = cache; - this._name = name; - this._etag = etag; + getNumberOfRuntimeModules(chunk) { + const cgc = this._getChunkGraphChunk(chunk); + return cgc.runtimeModules.size; } /** - * @template T - * @param {CallbackCache} callback signals when the value is retrieved - * @returns {void} + * @param {Chunk} chunk the chunk + * @returns {Iterable} iterable of modules (do not modify) */ - get(callback) { - this._cache.get(this._name, this._etag, callback); + getChunkEntryModulesIterable(chunk) { + const cgc = this._getChunkGraphChunk(chunk); + return cgc.entryModules.keys(); } /** - * @template T - * @returns {Promise} promise with the data + * @param {Chunk} chunk the chunk + * @returns {Iterable} iterable of chunks */ - getPromise() { - return new Promise((resolve, reject) => { - this._cache.get(this._name, this._etag, (err, data) => { - if (err) { - reject(err); - } else { - resolve(data); + getChunkEntryDependentChunksIterable(chunk) { + /** @type {Set} */ + const set = new Set(); + for (const chunkGroup of chunk.groupsIterable) { + if (chunkGroup instanceof Entrypoint) { + const entrypointChunk = chunkGroup.getEntrypointChunk(); + const cgc = this._getChunkGraphChunk(entrypointChunk); + for (const chunkGroup of cgc.entryModules.values()) { + for (const c of chunkGroup.chunks) { + if (c !== chunk && c !== entrypointChunk && !c.hasRuntime()) { + set.add(c); + } + } } - }); - }); + } + } + + return set; } /** - * @template T - * @param {T} data the value to store - * @param {CallbackCache} callback signals when the value is stored - * @returns {void} + * @param {Chunk} chunk the chunk + * @returns {boolean} true, when it has dependent chunks */ - store(data, callback) { - this._cache.store(this._name, this._etag, data, callback); + hasChunkEntryDependentChunks(chunk) { + const cgc = this._getChunkGraphChunk(chunk); + for (const chunkGroup of cgc.entryModules.values()) { + for (const c of chunkGroup.chunks) { + if (c !== chunk) { + return true; + } + } + } + return false; } /** - * @template T - * @param {T} data the value to store - * @returns {Promise} promise signals when the value is stored + * @param {Chunk} chunk the chunk + * @returns {Iterable} iterable of modules (do not modify) */ - storePromise(data) { - return new Promise((resolve, reject) => { - this._cache.store(this._name, this._etag, data, err => { - if (err) { - reject(err); - } else { - resolve(); - } - }); - }); + getChunkRuntimeModulesIterable(chunk) { + const cgc = this._getChunkGraphChunk(chunk); + return cgc.runtimeModules; } /** - * @template T - * @param {function(CallbackNormalErrorCache): void} computer function to compute the value if not cached - * @param {CallbackNormalErrorCache} callback signals when the value is retrieved - * @returns {void} + * @param {Chunk} chunk the chunk + * @returns {RuntimeModule[]} array of modules in order of execution */ - provide(computer, callback) { - this.get((err, cacheEntry) => { - if (err) return callback(err); - if (cacheEntry !== undefined) return cacheEntry; - computer((err, result) => { - if (err) return callback(err); - this.store(result, err => { - if (err) return callback(err); - callback(null, result); - }); - }); - }); + getChunkRuntimeModulesInOrder(chunk) { + const cgc = this._getChunkGraphChunk(chunk); + const array = Array.from(cgc.runtimeModules); + array.sort( + concatComparators( + compareSelect( + /** + * @param {RuntimeModule} r runtime module + * @returns {number=} stage + */ + r => r.stage, + compareIds + ), + compareModulesByIdentifier + ) + ); + return array; } /** - * @template T - * @param {function(): Promise | T} computer function to compute the value if not cached - * @returns {Promise} promise with the data + * @param {Chunk} chunk the chunk + * @returns {Iterable | undefined} iterable of modules (do not modify) */ - async providePromise(computer) { - const cacheEntry = await this.getPromise(); - if (cacheEntry !== undefined) return cacheEntry; - const result = await computer(); - await this.storePromise(result); - return result; + getChunkFullHashModulesIterable(chunk) { + const cgc = this._getChunkGraphChunk(chunk); + return cgc.fullHashModules; } -} -class CacheFacade { /** - * @param {Cache} cache the root cache - * @param {string} name the child cache name - * @param {string | HashConstructor} hashFunction the hash function to use + * @param {Chunk} chunk the chunk + * @returns {ReadonlySet | undefined} set of modules (do not modify) */ - constructor(cache, name, hashFunction) { - this._cache = cache; - this._name = name; - this._hashFunction = hashFunction; + getChunkFullHashModulesSet(chunk) { + const cgc = this._getChunkGraphChunk(chunk); + return cgc.fullHashModules; } /** - * @param {string} name the child cache name# - * @returns {CacheFacade} child cache + * @param {Chunk} chunk the chunk + * @returns {Iterable | undefined} iterable of modules (do not modify) */ - getChildCache(name) { - return new CacheFacade( - this._cache, - `${this._name}|${name}`, - this._hashFunction - ); + getChunkDependentHashModulesIterable(chunk) { + const cgc = this._getChunkGraphChunk(chunk); + return cgc.dependentHashModules; } + /** @typedef {[Module, Entrypoint | undefined]} EntryModuleWithChunkGroup */ + /** - * @param {string} identifier the cache identifier - * @param {Etag | null} etag the etag - * @returns {ItemCacheFacade} item cache + * @param {Chunk} chunk the chunk + * @returns {Iterable} iterable of modules (do not modify) */ - getItemCache(identifier, etag) { - return new ItemCacheFacade( - this._cache, - `${this._name}|${identifier}`, - etag - ); + getChunkEntryModulesWithChunkGroupIterable(chunk) { + const cgc = this._getChunkGraphChunk(chunk); + return cgc.entryModules; } /** - * @param {HashableObject} obj an hashable object - * @returns {Etag} an etag that is lazy hashed + * @param {AsyncDependenciesBlock} depBlock the async block + * @returns {ChunkGroup} the chunk group */ - getLazyHashedEtag(obj) { - return getLazyHashedEtag(obj, this._hashFunction); + getBlockChunkGroup(depBlock) { + return this._blockChunkGroups.get(depBlock); } /** - * @param {Etag} a an etag - * @param {Etag} b another etag - * @returns {Etag} an etag that represents both + * @param {AsyncDependenciesBlock} depBlock the async block + * @param {ChunkGroup} chunkGroup the chunk group + * @returns {void} */ - mergeEtags(a, b) { - return mergeEtags(a, b); + connectBlockAndChunkGroup(depBlock, chunkGroup) { + this._blockChunkGroups.set(depBlock, chunkGroup); + chunkGroup.addBlock(depBlock); } /** - * @template T - * @param {string} identifier the cache identifier - * @param {Etag | null} etag the etag - * @param {CallbackCache} callback signals when the value is retrieved + * @param {ChunkGroup} chunkGroup the chunk group * @returns {void} */ - get(identifier, etag, callback) { - this._cache.get(`${this._name}|${identifier}`, etag, callback); + disconnectChunkGroup(chunkGroup) { + for (const block of chunkGroup.blocksIterable) { + this._blockChunkGroups.delete(block); + } + // TODO refactor by moving blocks list into ChunkGraph + chunkGroup._blocks.clear(); } /** - * @template T - * @param {string} identifier the cache identifier - * @param {Etag | null} etag the etag - * @returns {Promise} promise with the data + * @param {Module} module the module + * @returns {string | number} the id of the module */ - getPromise(identifier, etag) { - return new Promise((resolve, reject) => { - this._cache.get(`${this._name}|${identifier}`, etag, (err, data) => { - if (err) { - reject(err); - } else { - resolve(data); - } - }); - }); + getModuleId(module) { + const cgm = this._getChunkGraphModule(module); + return cgm.id; } /** - * @template T - * @param {string} identifier the cache identifier - * @param {Etag | null} etag the etag - * @param {T} data the value to store - * @param {CallbackCache} callback signals when the value is stored + * @param {Module} module the module + * @param {string | number} id the id of the module * @returns {void} */ - store(identifier, etag, data, callback) { - this._cache.store(`${this._name}|${identifier}`, etag, data, callback); + setModuleId(module, id) { + const cgm = this._getChunkGraphModule(module); + cgm.id = id; } /** - * @template T - * @param {string} identifier the cache identifier - * @param {Etag | null} etag the etag - * @param {T} data the value to store - * @returns {Promise} promise signals when the value is stored + * @param {string} runtime runtime + * @returns {string | number} the id of the runtime */ - storePromise(identifier, etag, data) { - return new Promise((resolve, reject) => { - this._cache.store(`${this._name}|${identifier}`, etag, data, err => { - if (err) { - reject(err); - } else { - resolve(); - } - }); - }); + getRuntimeId(runtime) { + return this._runtimeIds.get(runtime); } /** - * @template T - * @param {string} identifier the cache identifier - * @param {Etag | null} etag the etag - * @param {function(CallbackNormalErrorCache): void} computer function to compute the value if not cached - * @param {CallbackNormalErrorCache} callback signals when the value is retrieved + * @param {string} runtime runtime + * @param {string | number} id the id of the runtime * @returns {void} */ - provide(identifier, etag, computer, callback) { - this.get(identifier, etag, (err, cacheEntry) => { - if (err) return callback(err); - if (cacheEntry !== undefined) return cacheEntry; - computer((err, result) => { - if (err) return callback(err); - this.store(identifier, etag, result, err => { - if (err) return callback(err); - callback(null, result); - }); - }); - }); + setRuntimeId(runtime, id) { + this._runtimeIds.set(runtime, id); } /** * @template T - * @param {string} identifier the cache identifier - * @param {Etag | null} etag the etag - * @param {function(): Promise | T} computer function to compute the value if not cached - * @returns {Promise} promise with the data + * @param {Module} module the module + * @param {RuntimeSpecMap} hashes hashes data + * @param {RuntimeSpec} runtime the runtime + * @returns {T} hash */ - async providePromise(identifier, etag, computer) { - const cacheEntry = await this.getPromise(identifier, etag); - if (cacheEntry !== undefined) return cacheEntry; - const result = await computer(); - await this.storePromise(identifier, etag, result); - return result; - } -} - -module.exports = CacheFacade; -module.exports.ItemCacheFacade = ItemCacheFacade; -module.exports.MultiItemCache = MultiItemCache; - - -/***/ }), - -/***/ 42444: -/***/ (function(module, __unused_webpack_exports, __webpack_require__) { - -"use strict"; -/* - MIT License http://www.opensource.org/licenses/mit-license.php - Author Tobias Koppers @sokra -*/ - - - -const WebpackError = __webpack_require__(68422); - -/** @typedef {import("./Module")} Module */ -/** @typedef {import("./ModuleGraph")} ModuleGraph */ - -/** - * @param {Module[]} modules the modules to be sorted - * @returns {Module[]} sorted version of original modules - */ -const sortModules = modules => { - return modules.sort((a, b) => { - const aIdent = a.identifier(); - const bIdent = b.identifier(); - /* istanbul ignore next */ - if (aIdent < bIdent) return -1; - /* istanbul ignore next */ - if (aIdent > bIdent) return 1; - /* istanbul ignore next */ - return 0; - }); -}; - -/** - * @param {Module[]} modules each module from throw - * @param {ModuleGraph} moduleGraph the module graph - * @returns {string} each message from provided modules - */ -const createModulesListMessage = (modules, moduleGraph) => { - return modules - .map(m => { - let message = `* ${m.identifier()}`; - const validReasons = Array.from( - moduleGraph.getIncomingConnectionsByOriginModule(m).keys() - ).filter(x => x); - - if (validReasons.length > 0) { - message += `\n Used by ${validReasons.length} module(s), i. e.`; - message += `\n ${validReasons[0].identifier()}`; + _getModuleHashInfo(module, hashes, runtime) { + if (!hashes) { + throw new Error( + `Module ${module.identifier()} has no hash info for runtime ${runtimeToString( + runtime + )} (hashes not set at all)` + ); + } else if (runtime === undefined) { + const hashInfoItems = new Set(hashes.values()); + if (hashInfoItems.size !== 1) { + throw new Error( + `No unique hash info entry for unspecified runtime for ${module.identifier()} (existing runtimes: ${Array.from( + hashes.keys(), + r => runtimeToString(r) + ).join(", ")}). +Caller might not support runtime-dependent code generation (opt-out via optimization.usedExports: "global").` + ); } - return message; - }) - .join("\n"); -}; + return first(hashInfoItems); + } else { + const hashInfo = hashes.get(runtime); + if (!hashInfo) { + throw new Error( + `Module ${module.identifier()} has no hash info for runtime ${runtimeToString( + runtime + )} (available runtimes ${Array.from( + hashes.keys(), + runtimeToString + ).join(", ")})` + ); + } + return hashInfo; + } + } -class CaseSensitiveModulesWarning extends WebpackError { /** - * Creates an instance of CaseSensitiveModulesWarning. - * @param {Iterable} modules modules that were detected - * @param {ModuleGraph} moduleGraph the module graph + * @param {Module} module the module + * @param {RuntimeSpec} runtime the runtime + * @returns {boolean} true, if the module has hashes for this runtime */ - constructor(modules, moduleGraph) { - const sortedModules = sortModules(Array.from(modules)); - const modulesList = createModulesListMessage(sortedModules, moduleGraph); - super(`There are multiple modules with names that only differ in casing. -This can lead to unexpected behavior when compiling on a filesystem with other case-semantic. -Use equal casing. Compare these module identifiers: -${modulesList}`); - - this.name = "CaseSensitiveModulesWarning"; - this.module = sortedModules[0]; + hasModuleHashes(module, runtime) { + const cgm = this._getChunkGraphModule(module); + const hashes = cgm.hashes; + return hashes && hashes.has(runtime); } -} - -module.exports = CaseSensitiveModulesWarning; - - -/***/ }), - -/***/ 65574: -/***/ (function(module, __unused_webpack_exports, __webpack_require__) { - -"use strict"; -/* - MIT License http://www.opensource.org/licenses/mit-license.php - Author Tobias Koppers @sokra -*/ - - - -const ChunkGraph = __webpack_require__(97860); -const Entrypoint = __webpack_require__(86695); -const { intersect } = __webpack_require__(34715); -const SortableSet = __webpack_require__(67563); -const StringXor = __webpack_require__(23877); -const { - compareModulesByIdentifier, - compareChunkGroupsByIndex, - compareModulesById -} = __webpack_require__(26296); -const { createArrayToSetDeprecationSet } = __webpack_require__(2594); -const { mergeRuntime } = __webpack_require__(19655); - -/** @typedef {import("webpack-sources").Source} Source */ -/** @typedef {import("./ChunkGraph").ChunkFilterPredicate} ChunkFilterPredicate */ -/** @typedef {import("./ChunkGraph").ChunkSizeOptions} ChunkSizeOptions */ -/** @typedef {import("./ChunkGraph").ModuleFilterPredicate} ModuleFilterPredicate */ -/** @typedef {import("./ChunkGroup")} ChunkGroup */ -/** @typedef {import("./Compilation")} Compilation */ -/** @typedef {import("./Compilation").AssetInfo} AssetInfo */ -/** @typedef {import("./Compilation").PathData} PathData */ -/** @typedef {import("./Entrypoint").EntryOptions} EntryOptions */ -/** @typedef {import("./Module")} Module */ -/** @typedef {import("./ModuleGraph")} ModuleGraph */ -/** @typedef {import("./util/Hash")} Hash */ -/** @typedef {import("./util/runtime").RuntimeSpec} RuntimeSpec */ - -const ChunkFilesSet = createArrayToSetDeprecationSet("chunk.files"); - -/** - * @typedef {Object} WithId an object who has an id property * - * @property {string | number} id the id of the object - */ - -/** - * @deprecated - * @typedef {Object} ChunkMaps - * @property {Record} hash - * @property {Record>} contentHash - * @property {Record} name - */ - -/** - * @deprecated - * @typedef {Object} ChunkModuleMaps - * @property {Record} id - * @property {Record} hash - */ - -let debugId = 1000; -/** - * A Chunk is a unit of encapsulation for Modules. - * Chunks are "rendered" into bundles that get emitted when the build completes. - */ -class Chunk { /** - * @param {string=} name of chunk being created, is optional (for subclasses) - * @param {boolean} backCompat enable backward-compatibility + * @param {Module} module the module + * @param {RuntimeSpec} runtime the runtime + * @returns {string} hash */ - constructor(name, backCompat = true) { - /** @type {number | string | null} */ - this.id = null; - /** @type {(number|string)[] | null} */ - this.ids = null; - /** @type {number} */ - this.debugId = debugId++; - /** @type {string} */ - this.name = name; - /** @type {SortableSet} */ - this.idNameHints = new SortableSet(); - /** @type {boolean} */ - this.preventIntegration = false; - /** @type {(string | function(PathData, AssetInfo=): string)?} */ - this.filenameTemplate = undefined; - /** @private @type {SortableSet} */ - this._groups = new SortableSet(undefined, compareChunkGroupsByIndex); - /** @type {RuntimeSpec} */ - this.runtime = undefined; - /** @type {Set} */ - this.files = backCompat ? new ChunkFilesSet() : new Set(); - /** @type {Set} */ - this.auxiliaryFiles = new Set(); - /** @type {boolean} */ - this.rendered = false; - /** @type {string=} */ - this.hash = undefined; - /** @type {Record} */ - this.contentHash = Object.create(null); - /** @type {string=} */ - this.renderedHash = undefined; - /** @type {string=} */ - this.chunkReason = undefined; - /** @type {boolean} */ - this.extraAsync = false; - } - - // TODO remove in webpack 6 - // BACKWARD-COMPAT START - get entryModule() { - const entryModules = Array.from( - ChunkGraph.getChunkGraphForChunk( - this, - "Chunk.entryModule", - "DEP_WEBPACK_CHUNK_ENTRY_MODULE" - ).getChunkEntryModulesIterable(this) - ); - if (entryModules.length === 0) { - return undefined; - } else if (entryModules.length === 1) { - return entryModules[0]; - } else { - throw new Error( - "Module.entryModule: Multiple entry modules are not supported by the deprecated API (Use the new ChunkGroup API)" - ); - } + getModuleHash(module, runtime) { + const cgm = this._getChunkGraphModule(module); + const hashes = cgm.hashes; + return this._getModuleHashInfo(module, hashes, runtime).hash; } /** - * @returns {boolean} true, if the chunk contains an entry module + * @param {Module} module the module + * @param {RuntimeSpec} runtime the runtime + * @returns {string} hash */ - hasEntryModule() { - return ( - ChunkGraph.getChunkGraphForChunk( - this, - "Chunk.hasEntryModule", - "DEP_WEBPACK_CHUNK_HAS_ENTRY_MODULE" - ).getNumberOfEntryModules(this) > 0 - ); + getRenderedModuleHash(module, runtime) { + const cgm = this._getChunkGraphModule(module); + const hashes = cgm.hashes; + return this._getModuleHashInfo(module, hashes, runtime).renderedHash; } /** * @param {Module} module the module - * @returns {boolean} true, if the chunk could be added + * @param {RuntimeSpec} runtime the runtime + * @param {string} hash the full hash + * @param {string} renderedHash the shortened hash for rendering + * @returns {void} */ - addModule(module) { - const chunkGraph = ChunkGraph.getChunkGraphForChunk( - this, - "Chunk.addModule", - "DEP_WEBPACK_CHUNK_ADD_MODULE" - ); - if (chunkGraph.isModuleInChunk(module, this)) return false; - chunkGraph.connectChunkAndModule(this, module); - return true; + setModuleHashes(module, runtime, hash, renderedHash) { + const cgm = this._getChunkGraphModule(module); + if (cgm.hashes === undefined) { + cgm.hashes = new RuntimeSpecMap(); + } + cgm.hashes.set(runtime, new ModuleHashInfo(hash, renderedHash)); } /** * @param {Module} module the module + * @param {RuntimeSpec} runtime the runtime + * @param {Set} items runtime requirements to be added (ownership of this Set is given to ChunkGraph when transferOwnership not false) + * @param {boolean} transferOwnership true: transfer ownership of the items object, false: items is immutable and shared and won't be modified * @returns {void} */ - removeModule(module) { - ChunkGraph.getChunkGraphForChunk( - this, - "Chunk.removeModule", - "DEP_WEBPACK_CHUNK_REMOVE_MODULE" - ).disconnectChunkAndModule(this, module); + addModuleRuntimeRequirements( + module, + runtime, + items, + transferOwnership = true + ) { + const cgm = this._getChunkGraphModule(module); + const runtimeRequirementsMap = cgm.runtimeRequirements; + if (runtimeRequirementsMap === undefined) { + const map = new RuntimeSpecMap(); + // TODO avoid cloning item and track ownership instead + map.set(runtime, transferOwnership ? items : new Set(items)); + cgm.runtimeRequirements = map; + return; + } + runtimeRequirementsMap.update(runtime, runtimeRequirements => { + if (runtimeRequirements === undefined) { + return transferOwnership ? items : new Set(items); + } else if (!transferOwnership || runtimeRequirements.size >= items.size) { + for (const item of items) runtimeRequirements.add(item); + return runtimeRequirements; + } else { + for (const item of runtimeRequirements) items.add(item); + return items; + } + }); } /** - * @returns {number} the number of module which are contained in this chunk + * @param {Chunk} chunk the chunk + * @param {Set} items runtime requirements to be added (ownership of this Set is given to ChunkGraph) + * @returns {void} */ - getNumberOfModules() { - return ChunkGraph.getChunkGraphForChunk( - this, - "Chunk.getNumberOfModules", - "DEP_WEBPACK_CHUNK_GET_NUMBER_OF_MODULES" - ).getNumberOfChunkModules(this); - } - - get modulesIterable() { - const chunkGraph = ChunkGraph.getChunkGraphForChunk( - this, - "Chunk.modulesIterable", - "DEP_WEBPACK_CHUNK_MODULES_ITERABLE" - ); - return chunkGraph.getOrderedChunkModulesIterable( - this, - compareModulesByIdentifier - ); + addChunkRuntimeRequirements(chunk, items) { + const cgc = this._getChunkGraphChunk(chunk); + const runtimeRequirements = cgc.runtimeRequirements; + if (runtimeRequirements === undefined) { + cgc.runtimeRequirements = items; + } else if (runtimeRequirements.size >= items.size) { + for (const item of items) runtimeRequirements.add(item); + } else { + for (const item of runtimeRequirements) items.add(item); + cgc.runtimeRequirements = items; + } } /** - * @param {Chunk} otherChunk the chunk to compare with - * @returns {-1|0|1} the comparison result + * @param {Chunk} chunk the chunk + * @param {Iterable} items runtime requirements to be added + * @returns {void} */ - compareTo(otherChunk) { - const chunkGraph = ChunkGraph.getChunkGraphForChunk( - this, - "Chunk.compareTo", - "DEP_WEBPACK_CHUNK_COMPARE_TO" - ); - return chunkGraph.compareChunks(this, otherChunk); + addTreeRuntimeRequirements(chunk, items) { + const cgc = this._getChunkGraphChunk(chunk); + const runtimeRequirements = cgc.runtimeRequirementsInTree; + for (const item of items) runtimeRequirements.add(item); } /** * @param {Module} module the module - * @returns {boolean} true, if the chunk contains the module + * @param {RuntimeSpec} runtime the runtime + * @returns {ReadonlySet} runtime requirements */ - containsModule(module) { - return ChunkGraph.getChunkGraphForChunk( - this, - "Chunk.containsModule", - "DEP_WEBPACK_CHUNK_CONTAINS_MODULE" - ).isModuleInChunk(module, this); + getModuleRuntimeRequirements(module, runtime) { + const cgm = this._getChunkGraphModule(module); + const runtimeRequirements = + cgm.runtimeRequirements && cgm.runtimeRequirements.get(runtime); + return runtimeRequirements === undefined ? EMPTY_SET : runtimeRequirements; } /** - * @returns {Module[]} the modules for this chunk + * @param {Chunk} chunk the chunk + * @returns {ReadonlySet} runtime requirements */ - getModules() { - return ChunkGraph.getChunkGraphForChunk( - this, - "Chunk.getModules", - "DEP_WEBPACK_CHUNK_GET_MODULES" - ).getChunkModules(this); + getChunkRuntimeRequirements(chunk) { + const cgc = this._getChunkGraphChunk(chunk); + const runtimeRequirements = cgc.runtimeRequirements; + return runtimeRequirements === undefined ? EMPTY_SET : runtimeRequirements; } /** - * @returns {void} + * @param {Module} module the module + * @param {RuntimeSpec} runtime the runtime + * @param {boolean} withConnections include connections + * @returns {string} hash */ - remove() { - const chunkGraph = ChunkGraph.getChunkGraphForChunk( - this, - "Chunk.remove", - "DEP_WEBPACK_CHUNK_REMOVE" - ); - chunkGraph.disconnectChunk(this); - this.disconnectFromGroups(); + getModuleGraphHash(module, runtime, withConnections = true) { + const cgm = this._getChunkGraphModule(module); + return withConnections + ? this._getModuleGraphHashWithConnections(cgm, module, runtime) + : this._getModuleGraphHashBigInt(cgm, module, runtime).toString(16); } /** * @param {Module} module the module - * @param {Chunk} otherChunk the target chunk - * @returns {void} + * @param {RuntimeSpec} runtime the runtime + * @param {boolean} withConnections include connections + * @returns {bigint} hash */ - moveModule(module, otherChunk) { - const chunkGraph = ChunkGraph.getChunkGraphForChunk( - this, - "Chunk.moveModule", - "DEP_WEBPACK_CHUNK_MOVE_MODULE" - ); - chunkGraph.disconnectChunkAndModule(this, module); - chunkGraph.connectChunkAndModule(otherChunk, module); + getModuleGraphHashBigInt(module, runtime, withConnections = true) { + const cgm = this._getChunkGraphModule(module); + return withConnections + ? BigInt( + `0x${this._getModuleGraphHashWithConnections(cgm, module, runtime)}` + ) + : this._getModuleGraphHashBigInt(cgm, module, runtime); } /** - * @param {Chunk} otherChunk the other chunk - * @returns {boolean} true, if the specified chunk has been integrated + * @param {ChunkGraphModule} cgm the ChunkGraphModule + * @param {Module} module the module + * @param {RuntimeSpec} runtime the runtime + * @returns {bigint} hash as big int */ - integrate(otherChunk) { - const chunkGraph = ChunkGraph.getChunkGraphForChunk( - this, - "Chunk.integrate", - "DEP_WEBPACK_CHUNK_INTEGRATE" - ); - if (chunkGraph.canChunksBeIntegrated(this, otherChunk)) { - chunkGraph.integrateChunks(this, otherChunk); - return true; - } else { - return false; + _getModuleGraphHashBigInt(cgm, module, runtime) { + if (cgm.graphHashes === undefined) { + cgm.graphHashes = new RuntimeSpecMap(); } + const graphHash = cgm.graphHashes.provide(runtime, () => { + const hash = createHash(this._hashFunction); + hash.update(`${cgm.id}${this.moduleGraph.isAsync(module)}`); + this.moduleGraph.getExportsInfo(module).updateHash(hash, runtime); + return BigInt(`0x${/** @type {string} */ (hash.digest("hex"))}`); + }); + return graphHash; } /** - * @param {Chunk} otherChunk the other chunk - * @returns {boolean} true, if chunks could be integrated + * @param {ChunkGraphModule} cgm the ChunkGraphModule + * @param {Module} module the module + * @param {RuntimeSpec} runtime the runtime + * @returns {string} hash */ - canBeIntegrated(otherChunk) { - const chunkGraph = ChunkGraph.getChunkGraphForChunk( - this, - "Chunk.canBeIntegrated", - "DEP_WEBPACK_CHUNK_CAN_BE_INTEGRATED" - ); - return chunkGraph.canChunksBeIntegrated(this, otherChunk); + _getModuleGraphHashWithConnections(cgm, module, runtime) { + if (cgm.graphHashesWithConnections === undefined) { + cgm.graphHashesWithConnections = new RuntimeSpecMap(); + } + const activeStateToString = state => { + if (state === false) return "F"; + if (state === true) return "T"; + if (state === ModuleGraphConnection.TRANSITIVE_ONLY) return "O"; + throw new Error("Not implemented active state"); + }; + const strict = module.buildMeta && module.buildMeta.strictHarmonyModule; + return cgm.graphHashesWithConnections.provide(runtime, () => { + const graphHash = this._getModuleGraphHashBigInt( + cgm, + module, + runtime + ).toString(16); + const connections = this.moduleGraph.getOutgoingConnections(module); + /** @type {Set} */ + const activeNamespaceModules = new Set(); + /** @type {Map>} */ + const connectedModules = new Map(); + const processConnection = (connection, stateInfo) => { + const module = connection.module; + stateInfo += module.getExportsType(this.moduleGraph, strict); + // cspell:word Tnamespace + if (stateInfo === "Tnamespace") activeNamespaceModules.add(module); + else { + const oldModule = connectedModules.get(stateInfo); + if (oldModule === undefined) { + connectedModules.set(stateInfo, module); + } else if (oldModule instanceof Set) { + oldModule.add(module); + } else if (oldModule !== module) { + connectedModules.set(stateInfo, new Set([oldModule, module])); + } + } + }; + if (runtime === undefined || typeof runtime === "string") { + for (const connection of connections) { + const state = connection.getActiveState(runtime); + if (state === false) continue; + processConnection(connection, state === true ? "T" : "O"); + } + } else { + // cspell:word Tnamespace + for (const connection of connections) { + const states = new Set(); + let stateInfo = ""; + forEachRuntime( + runtime, + runtime => { + const state = connection.getActiveState(runtime); + states.add(state); + stateInfo += activeStateToString(state) + runtime; + }, + true + ); + if (states.size === 1) { + const state = first(states); + if (state === false) continue; + stateInfo = activeStateToString(state); + } + processConnection(connection, stateInfo); + } + } + // cspell:word Tnamespace + if (activeNamespaceModules.size === 0 && connectedModules.size === 0) + return graphHash; + const connectedModulesInOrder = + connectedModules.size > 1 + ? Array.from(connectedModules).sort(([a], [b]) => (a < b ? -1 : 1)) + : connectedModules; + const hash = createHash(this._hashFunction); + const addModuleToHash = module => { + hash.update( + this._getModuleGraphHashBigInt( + this._getChunkGraphModule(module), + module, + runtime + ).toString(16) + ); + }; + const addModulesToHash = modules => { + let xor = ZERO_BIG_INT; + for (const m of modules) { + xor = + xor ^ + this._getModuleGraphHashBigInt( + this._getChunkGraphModule(m), + m, + runtime + ); + } + hash.update(xor.toString(16)); + }; + if (activeNamespaceModules.size === 1) + addModuleToHash(activeNamespaceModules.values().next().value); + else if (activeNamespaceModules.size > 1) + addModulesToHash(activeNamespaceModules); + for (const [stateInfo, modules] of connectedModulesInOrder) { + hash.update(stateInfo); + if (modules instanceof Set) { + addModulesToHash(modules); + } else { + addModuleToHash(modules); + } + } + hash.update(graphHash); + return /** @type {string} */ (hash.digest("hex")); + }); } /** - * @returns {boolean} true, if this chunk contains no module + * @param {Chunk} chunk the chunk + * @returns {ReadonlySet} runtime requirements */ - isEmpty() { - const chunkGraph = ChunkGraph.getChunkGraphForChunk( - this, - "Chunk.isEmpty", - "DEP_WEBPACK_CHUNK_IS_EMPTY" - ); - return chunkGraph.getNumberOfChunkModules(this) === 0; + getTreeRuntimeRequirements(chunk) { + const cgc = this._getChunkGraphChunk(chunk); + return cgc.runtimeRequirementsInTree; } + // TODO remove in webpack 6 /** - * @returns {number} total size of all modules in this chunk + * @param {Module} module the module + * @param {string} deprecateMessage message for the deprecation message + * @param {string} deprecationCode code for the deprecation + * @returns {ChunkGraph} the chunk graph */ - modulesSize() { - const chunkGraph = ChunkGraph.getChunkGraphForChunk( - this, - "Chunk.modulesSize", - "DEP_WEBPACK_CHUNK_MODULES_SIZE" + static getChunkGraphForModule(module, deprecateMessage, deprecationCode) { + const fn = deprecateGetChunkGraphForModuleMap.get(deprecateMessage); + if (fn) return fn(module); + const newFn = util.deprecate( + /** + * @param {Module} module the module + * @returns {ChunkGraph} the chunk graph + */ + module => { + const chunkGraph = chunkGraphForModuleMap.get(module); + if (!chunkGraph) + throw new Error( + deprecateMessage + + ": There was no ChunkGraph assigned to the Module for backward-compat (Use the new API)" + ); + return chunkGraph; + }, + deprecateMessage + ": Use new ChunkGraph API", + deprecationCode ); - return chunkGraph.getChunkModulesSize(this); + deprecateGetChunkGraphForModuleMap.set(deprecateMessage, newFn); + return newFn(module); } + // TODO remove in webpack 6 /** - * @param {ChunkSizeOptions} options options object - * @returns {number} total size of this chunk + * @param {Module} module the module + * @param {ChunkGraph} chunkGraph the chunk graph + * @returns {void} */ - size(options = {}) { - const chunkGraph = ChunkGraph.getChunkGraphForChunk( - this, - "Chunk.size", - "DEP_WEBPACK_CHUNK_SIZE" - ); - return chunkGraph.getChunkSize(this, options); + static setChunkGraphForModule(module, chunkGraph) { + chunkGraphForModuleMap.set(module, chunkGraph); } + // TODO remove in webpack 6 /** - * @param {Chunk} otherChunk the other chunk - * @param {ChunkSizeOptions} options options object - * @returns {number} total size of the chunk or false if the chunk can't be integrated + * @param {Module} module the module + * @returns {void} */ - integratedSize(otherChunk, options) { - const chunkGraph = ChunkGraph.getChunkGraphForChunk( - this, - "Chunk.integratedSize", - "DEP_WEBPACK_CHUNK_INTEGRATED_SIZE" - ); - return chunkGraph.getIntegratedChunksSize(this, otherChunk, options); + static clearChunkGraphForModule(module) { + chunkGraphForModuleMap.delete(module); } + // TODO remove in webpack 6 /** - * @param {ModuleFilterPredicate} filterFn function used to filter modules - * @returns {ChunkModuleMaps} module map information + * @param {Chunk} chunk the chunk + * @param {string} deprecateMessage message for the deprecation message + * @param {string} deprecationCode code for the deprecation + * @returns {ChunkGraph} the chunk graph */ - getChunkModuleMaps(filterFn) { - const chunkGraph = ChunkGraph.getChunkGraphForChunk( - this, - "Chunk.getChunkModuleMaps", - "DEP_WEBPACK_CHUNK_GET_CHUNK_MODULE_MAPS" - ); - /** @type {Record} */ - const chunkModuleIdMap = Object.create(null); - /** @type {Record} */ - const chunkModuleHashMap = Object.create(null); - - for (const asyncChunk of this.getAllAsyncChunks()) { - /** @type {(string|number)[]} */ - let array; - for (const module of chunkGraph.getOrderedChunkModulesIterable( - asyncChunk, - compareModulesById(chunkGraph) - )) { - if (filterFn(module)) { - if (array === undefined) { - array = []; - chunkModuleIdMap[asyncChunk.id] = array; - } - const moduleId = chunkGraph.getModuleId(module); - array.push(moduleId); - chunkModuleHashMap[moduleId] = chunkGraph.getRenderedModuleHash( - module, - undefined + static getChunkGraphForChunk(chunk, deprecateMessage, deprecationCode) { + const fn = deprecateGetChunkGraphForChunkMap.get(deprecateMessage); + if (fn) return fn(chunk); + const newFn = util.deprecate( + /** + * @param {Chunk} chunk the chunk + * @returns {ChunkGraph} the chunk graph + */ + chunk => { + const chunkGraph = chunkGraphForChunkMap.get(chunk); + if (!chunkGraph) + throw new Error( + deprecateMessage + + "There was no ChunkGraph assigned to the Chunk for backward-compat (Use the new API)" ); - } - } - } - - return { - id: chunkModuleIdMap, - hash: chunkModuleHashMap - }; + return chunkGraph; + }, + deprecateMessage + ": Use new ChunkGraph API", + deprecationCode + ); + deprecateGetChunkGraphForChunkMap.set(deprecateMessage, newFn); + return newFn(chunk); } + // TODO remove in webpack 6 /** - * @param {ModuleFilterPredicate} filterFn predicate function used to filter modules - * @param {ChunkFilterPredicate=} filterChunkFn predicate function used to filter chunks - * @returns {boolean} return true if module exists in graph + * @param {Chunk} chunk the chunk + * @param {ChunkGraph} chunkGraph the chunk graph + * @returns {void} */ - hasModuleInGraph(filterFn, filterChunkFn) { - const chunkGraph = ChunkGraph.getChunkGraphForChunk( - this, - "Chunk.hasModuleInGraph", - "DEP_WEBPACK_CHUNK_HAS_MODULE_IN_GRAPH" - ); - return chunkGraph.hasModuleInGraph(this, filterFn, filterChunkFn); + static setChunkGraphForChunk(chunk, chunkGraph) { + chunkGraphForChunkMap.set(chunk, chunkGraph); } + // TODO remove in webpack 6 /** - * @deprecated - * @param {boolean} realHash whether the full hash or the rendered hash is to be used - * @returns {ChunkMaps} the chunk map information + * @param {Chunk} chunk the chunk + * @returns {void} */ - getChunkMaps(realHash) { - /** @type {Record} */ - const chunkHashMap = Object.create(null); - /** @type {Record>} */ - const chunkContentHashMap = Object.create(null); - /** @type {Record} */ - const chunkNameMap = Object.create(null); + static clearChunkGraphForChunk(chunk) { + chunkGraphForChunkMap.delete(chunk); + } +} - for (const chunk of this.getAllAsyncChunks()) { - chunkHashMap[chunk.id] = realHash ? chunk.hash : chunk.renderedHash; - for (const key of Object.keys(chunk.contentHash)) { - if (!chunkContentHashMap[key]) { - chunkContentHashMap[key] = Object.create(null); - } - chunkContentHashMap[key][chunk.id] = chunk.contentHash[key]; - } - if (chunk.name) { - chunkNameMap[chunk.id] = chunk.name; - } - } +// TODO remove in webpack 6 +/** @type {WeakMap} */ +const chunkGraphForModuleMap = new WeakMap(); - return { - hash: chunkHashMap, - contentHash: chunkContentHashMap, - name: chunkNameMap - }; - } - // BACKWARD-COMPAT END +// TODO remove in webpack 6 +/** @type {WeakMap} */ +const chunkGraphForChunkMap = new WeakMap(); - /** - * @returns {boolean} whether or not the Chunk will have a runtime - */ - hasRuntime() { - for (const chunkGroup of this._groups) { - if ( - chunkGroup instanceof Entrypoint && - chunkGroup.getRuntimeChunk() === this - ) { - return true; - } - } - return false; - } +// TODO remove in webpack 6 +/** @type {Map ChunkGraph>} */ +const deprecateGetChunkGraphForModuleMap = new Map(); + +// TODO remove in webpack 6 +/** @type {Map ChunkGraph>} */ +const deprecateGetChunkGraphForChunkMap = new Map(); + +module.exports = ChunkGraph; + + +/***/ }), + +/***/ 96975: +/***/ (function(module, __unused_webpack_exports, __webpack_require__) { + +"use strict"; +/* + MIT License http://www.opensource.org/licenses/mit-license.php + Author Tobias Koppers @sokra +*/ + + + +const util = __webpack_require__(31669); +const SortableSet = __webpack_require__(67563); +const { + compareLocations, + compareChunks, + compareIterables +} = __webpack_require__(26296); + +/** @typedef {import("./AsyncDependenciesBlock")} AsyncDependenciesBlock */ +/** @typedef {import("./Chunk")} Chunk */ +/** @typedef {import("./ChunkGraph")} ChunkGraph */ +/** @typedef {import("./Dependency").DependencyLocation} DependencyLocation */ +/** @typedef {import("./Entrypoint")} Entrypoint */ +/** @typedef {import("./Module")} Module */ +/** @typedef {import("./ModuleGraph")} ModuleGraph */ + +/** @typedef {{id: number}} HasId */ +/** @typedef {{module: Module, loc: DependencyLocation, request: string}} OriginRecord */ +/** + * @typedef {Object} RawChunkGroupOptions + * @property {number=} preloadOrder + * @property {number=} prefetchOrder + */ + +/** @typedef {RawChunkGroupOptions & { name?: string }} ChunkGroupOptions */ + +let debugId = 5000; + +/** + * @template T + * @param {SortableSet} set set to convert to array. + * @returns {T[]} the array format of existing set + */ +const getArray = set => Array.from(set); + +/** + * A convenience method used to sort chunks based on their id's + * @param {ChunkGroup} a first sorting comparator + * @param {ChunkGroup} b second sorting comparator + * @returns {1|0|-1} a sorting index to determine order + */ +const sortById = (a, b) => { + if (a.id < b.id) return -1; + if (b.id < a.id) return 1; + return 0; +}; + +/** + * @param {OriginRecord} a the first comparator in sort + * @param {OriginRecord} b the second comparator in sort + * @returns {1|-1|0} returns sorting order as index + */ +const sortOrigin = (a, b) => { + const aIdent = a.module ? a.module.identifier() : ""; + const bIdent = b.module ? b.module.identifier() : ""; + if (aIdent < bIdent) return -1; + if (aIdent > bIdent) return 1; + return compareLocations(a.loc, b.loc); +}; + +class ChunkGroup { /** - * @returns {boolean} whether or not this chunk can be an initial chunk + * Creates an instance of ChunkGroup. + * @param {string|ChunkGroupOptions=} options chunk group options passed to chunkGroup */ - canBeInitial() { - for (const chunkGroup of this._groups) { - if (chunkGroup.isInitial()) return true; + constructor(options) { + if (typeof options === "string") { + options = { name: options }; + } else if (!options) { + options = { name: undefined }; } - return false; + /** @type {number} */ + this.groupDebugId = debugId++; + this.options = options; + /** @type {SortableSet} */ + this._children = new SortableSet(undefined, sortById); + /** @type {SortableSet} */ + this._parents = new SortableSet(undefined, sortById); + /** @type {SortableSet} */ + this._asyncEntrypoints = new SortableSet(undefined, sortById); + this._blocks = new SortableSet(); + /** @type {Chunk[]} */ + this.chunks = []; + /** @type {OriginRecord[]} */ + this.origins = []; + /** Indices in top-down order */ + /** @private @type {Map} */ + this._modulePreOrderIndices = new Map(); + /** Indices in bottom-up order */ + /** @private @type {Map} */ + this._modulePostOrderIndices = new Map(); + /** @type {number} */ + this.index = undefined; } /** - * @returns {boolean} whether this chunk can only be an initial chunk + * when a new chunk is added to a chunkGroup, addingOptions will occur. + * @param {ChunkGroupOptions} options the chunkGroup options passed to addOptions + * @returns {void} */ - isOnlyInitial() { - if (this._groups.size <= 0) return false; - for (const chunkGroup of this._groups) { - if (!chunkGroup.isInitial()) return false; + addOptions(options) { + for (const key of Object.keys(options)) { + if (this.options[key] === undefined) { + this.options[key] = options[key]; + } else if (this.options[key] !== options[key]) { + if (key.endsWith("Order")) { + this.options[key] = Math.max(this.options[key], options[key]); + } else { + throw new Error( + `ChunkGroup.addOptions: No option merge strategy for ${key}` + ); + } + } } - return true; } /** - * @returns {EntryOptions | undefined} the entry options for this chunk + * returns the name of current ChunkGroup + * @returns {string|undefined} returns the ChunkGroup name */ - getEntryOptions() { - for (const chunkGroup of this._groups) { - if (chunkGroup instanceof Entrypoint) { - return chunkGroup.options; - } - } - return undefined; + get name() { + return this.options.name; } /** - * @param {ChunkGroup} chunkGroup the chunkGroup the chunk is being added + * sets a new name for current ChunkGroup + * @param {string} value the new name for ChunkGroup * @returns {void} */ - addGroup(chunkGroup) { - this._groups.add(chunkGroup); + set name(value) { + this.options.name = value; } + /* istanbul ignore next */ /** - * @param {ChunkGroup} chunkGroup the chunkGroup the chunk is being removed from - * @returns {void} + * get a uniqueId for ChunkGroup, made up of its member Chunk debugId's + * @returns {string} a unique concatenation of chunk debugId's */ - removeGroup(chunkGroup) { - this._groups.delete(chunkGroup); + get debugId() { + return Array.from(this.chunks, x => x.debugId).join("+"); } /** - * @param {ChunkGroup} chunkGroup the chunkGroup to check - * @returns {boolean} returns true if chunk has chunkGroup reference and exists in chunkGroup + * get a unique id for ChunkGroup, made up of its member Chunk id's + * @returns {string} a unique concatenation of chunk ids */ - isInGroup(chunkGroup) { - return this._groups.has(chunkGroup); + get id() { + return Array.from(this.chunks, x => x.id).join("+"); } /** - * @returns {number} the amount of groups that the said chunk is in + * Performs an unshift of a specific chunk + * @param {Chunk} chunk chunk being unshifted + * @returns {boolean} returns true if attempted chunk shift is accepted */ - getNumberOfGroups() { - return this._groups.size; + unshiftChunk(chunk) { + const oldIdx = this.chunks.indexOf(chunk); + if (oldIdx > 0) { + this.chunks.splice(oldIdx, 1); + this.chunks.unshift(chunk); + } else if (oldIdx < 0) { + this.chunks.unshift(chunk); + return true; + } + return false; } /** - * @returns {Iterable} the chunkGroups that the said chunk is referenced in + * inserts a chunk before another existing chunk in group + * @param {Chunk} chunk Chunk being inserted + * @param {Chunk} before Placeholder/target chunk marking new chunk insertion point + * @returns {boolean} return true if insertion was successful */ - get groupsIterable() { - this._groups.sort(); - return this._groups; + insertChunk(chunk, before) { + const oldIdx = this.chunks.indexOf(chunk); + const idx = this.chunks.indexOf(before); + if (idx < 0) { + throw new Error("before chunk not found"); + } + if (oldIdx >= 0 && oldIdx > idx) { + this.chunks.splice(oldIdx, 1); + this.chunks.splice(idx, 0, chunk); + } else if (oldIdx < 0) { + this.chunks.splice(idx, 0, chunk); + return true; + } + return false; } /** - * @returns {void} + * add a chunk into ChunkGroup. Is pushed on or prepended + * @param {Chunk} chunk chunk being pushed into ChunkGroupS + * @returns {boolean} returns true if chunk addition was successful. */ - disconnectFromGroups() { - for (const chunkGroup of this._groups) { - chunkGroup.removeChunk(this); + pushChunk(chunk) { + const oldIdx = this.chunks.indexOf(chunk); + if (oldIdx >= 0) { + return false; } + this.chunks.push(chunk); + return true; } /** - * @param {Chunk} newChunk the new chunk that will be split out of - * @returns {void} + * @param {Chunk} oldChunk chunk to be replaced + * @param {Chunk} newChunk New chunk that will be replaced with + * @returns {boolean} returns true if the replacement was successful */ - split(newChunk) { - for (const chunkGroup of this._groups) { - chunkGroup.insertChunk(newChunk, this); - newChunk.addGroup(chunkGroup); + replaceChunk(oldChunk, newChunk) { + const oldIdx = this.chunks.indexOf(oldChunk); + if (oldIdx < 0) return false; + const newIdx = this.chunks.indexOf(newChunk); + if (newIdx < 0) { + this.chunks[oldIdx] = newChunk; + return true; } - for (const idHint of this.idNameHints) { - newChunk.idNameHints.add(idHint); + if (newIdx < oldIdx) { + this.chunks.splice(oldIdx, 1); + return true; + } else if (newIdx !== oldIdx) { + this.chunks[oldIdx] = newChunk; + this.chunks.splice(newIdx, 1); + return true; } - newChunk.runtime = mergeRuntime(newChunk.runtime, this.runtime); } /** - * @param {Hash} hash hash (will be modified) - * @param {ChunkGraph} chunkGraph the chunk graph - * @returns {void} + * @param {Chunk} chunk chunk to remove + * @returns {boolean} returns true if chunk was removed */ - updateHash(hash, chunkGraph) { - hash.update( - `${this.id} ${this.ids ? this.ids.join() : ""} ${this.name || ""} ` - ); - const xor = new StringXor(); - for (const m of chunkGraph.getChunkModulesIterable(this)) { - xor.add(chunkGraph.getModuleHash(m, this.runtime)); - } - xor.updateHash(hash); - const entryModules = - chunkGraph.getChunkEntryModulesWithChunkGroupIterable(this); - for (const [m, chunkGroup] of entryModules) { - hash.update(`entry${chunkGraph.getModuleId(m)}${chunkGroup.id}`); + removeChunk(chunk) { + const idx = this.chunks.indexOf(chunk); + if (idx >= 0) { + this.chunks.splice(idx, 1); + return true; } + return false; } /** - * @returns {Set} a set of all the async chunks + * @returns {boolean} true, when this chunk group will be loaded on initial page load */ - getAllAsyncChunks() { - const queue = new Set(); - const chunks = new Set(); + isInitial() { + return false; + } - const initialChunks = intersect( - Array.from(this.groupsIterable, g => new Set(g.chunks)) - ); + /** + * @param {ChunkGroup} group chunk group to add + * @returns {boolean} returns true if chunk group was added + */ + addChild(group) { + const size = this._children.size; + this._children.add(group); + return size !== this._children.size; + } - const initialQueue = new Set(this.groupsIterable); + /** + * @returns {ChunkGroup[]} returns the children of this group + */ + getChildren() { + return this._children.getFromCache(getArray); + } - for (const chunkGroup of initialQueue) { - for (const child of chunkGroup.childrenIterable) { - if (child instanceof Entrypoint) { - initialQueue.add(child); - } else { - queue.add(child); - } - } + getNumberOfChildren() { + return this._children.size; + } + + get childrenIterable() { + return this._children; + } + + /** + * @param {ChunkGroup} group the chunk group to remove + * @returns {boolean} returns true if the chunk group was removed + */ + removeChild(group) { + if (!this._children.has(group)) { + return false; } - for (const chunkGroup of queue) { - for (const chunk of chunkGroup.chunks) { - if (!initialChunks.has(chunk)) { - chunks.add(chunk); - } - } - for (const child of chunkGroup.childrenIterable) { - queue.add(child); - } + this._children.delete(group); + group.removeParent(this); + return true; + } + + /** + * @param {ChunkGroup} parentChunk the parent group to be added into + * @returns {boolean} returns true if this chunk group was added to the parent group + */ + addParent(parentChunk) { + if (!this._parents.has(parentChunk)) { + this._parents.add(parentChunk); + return true; } + return false; + } - return chunks; + /** + * @returns {ChunkGroup[]} returns the parents of this group + */ + getParents() { + return this._parents.getFromCache(getArray); + } + + getNumberOfParents() { + return this._parents.size; } /** - * @returns {Set} a set of all the initial chunks (including itself) + * @param {ChunkGroup} parent the parent group + * @returns {boolean} returns true if the parent group contains this group */ - getAllInitialChunks() { - const chunks = new Set(); - const queue = new Set(this.groupsIterable); - for (const group of queue) { - if (group.isInitial()) { - for (const c of group.chunks) chunks.add(c); - for (const g of group.childrenIterable) queue.add(g); - } + hasParent(parent) { + return this._parents.has(parent); + } + + get parentsIterable() { + return this._parents; + } + + /** + * @param {ChunkGroup} chunkGroup the parent group + * @returns {boolean} returns true if this group has been removed from the parent + */ + removeParent(chunkGroup) { + if (this._parents.delete(chunkGroup)) { + chunkGroup.removeChild(this); + return true; } - return chunks; + return false; } /** - * @returns {Set} a set of all the referenced chunks (including itself) + * @param {Entrypoint} entrypoint entrypoint to add + * @returns {boolean} returns true if entrypoint was added */ - getAllReferencedChunks() { - const queue = new Set(this.groupsIterable); - const chunks = new Set(); + addAsyncEntrypoint(entrypoint) { + const size = this._asyncEntrypoints.size; + this._asyncEntrypoints.add(entrypoint); + return size !== this._asyncEntrypoints.size; + } - for (const chunkGroup of queue) { - for (const chunk of chunkGroup.chunks) { - chunks.add(chunk); - } - for (const child of chunkGroup.childrenIterable) { - queue.add(child); - } + get asyncEntrypointsIterable() { + return this._asyncEntrypoints; + } + + /** + * @returns {Array} an array containing the blocks + */ + getBlocks() { + return this._blocks.getFromCache(getArray); + } + + getNumberOfBlocks() { + return this._blocks.size; + } + + hasBlock(block) { + return this._blocks.has(block); + } + + /** + * @returns {Iterable} blocks + */ + get blocksIterable() { + return this._blocks; + } + + /** + * @param {AsyncDependenciesBlock} block a block + * @returns {boolean} false, if block was already added + */ + addBlock(block) { + if (!this._blocks.has(block)) { + this._blocks.add(block); + return true; } + return false; + } - return chunks; + /** + * @param {Module} module origin module + * @param {DependencyLocation} loc location of the reference in the origin module + * @param {string} request request name of the reference + * @returns {void} + */ + addOrigin(module, loc, request) { + this.origins.push({ + module, + loc, + request + }); } /** - * @returns {Set} a set of all the referenced entrypoints + * @returns {string[]} the files contained this chunk group */ - getAllReferencedAsyncEntrypoints() { - const queue = new Set(this.groupsIterable); - const entrypoints = new Set(); + getFiles() { + const files = new Set(); - for (const chunkGroup of queue) { - for (const entrypoint of chunkGroup.asyncEntrypointsIterable) { - entrypoints.add(entrypoint); - } - for (const child of chunkGroup.childrenIterable) { - queue.add(child); + for (const chunk of this.chunks) { + for (const file of chunk.files) { + files.add(file); } } - return entrypoints; + return Array.from(files); } /** - * @returns {boolean} true, if the chunk references async chunks + * @returns {void} */ - hasAsyncChunks() { - const queue = new Set(); - - const initialChunks = intersect( - Array.from(this.groupsIterable, g => new Set(g.chunks)) - ); + remove() { + // cleanup parents + for (const parentChunkGroup of this._parents) { + // remove this chunk from its parents + parentChunkGroup._children.delete(this); - for (const chunkGroup of this.groupsIterable) { - for (const child of chunkGroup.childrenIterable) { - queue.add(child); + // cleanup "sub chunks" + for (const chunkGroup of this._children) { + /** + * remove this chunk as "intermediary" and connect + * it "sub chunks" and parents directly + */ + // add parent to each "sub chunk" + chunkGroup.addParent(parentChunkGroup); + // add "sub chunk" to parent + parentChunkGroup.addChild(chunkGroup); } } - for (const chunkGroup of queue) { - for (const chunk of chunkGroup.chunks) { - if (!initialChunks.has(chunk)) { - return true; - } - } - for (const child of chunkGroup.childrenIterable) { - queue.add(child); - } + /** + * we need to iterate again over the children + * to remove this from the child's parents. + * This can not be done in the above loop + * as it is not guaranteed that `this._parents` contains anything. + */ + for (const chunkGroup of this._children) { + // remove this as parent of every "sub chunk" + chunkGroup._parents.delete(this); } - return false; + // remove chunks + for (const chunk of this.chunks) { + chunk.removeGroup(this); + } + } + + sortItems() { + this.origins.sort(sortOrigin); } /** + * Sorting predicate which allows current ChunkGroup to be compared against another. + * Sorting values are based off of number of chunks in ChunkGroup. + * * @param {ChunkGraph} chunkGraph the chunk graph - * @param {ChunkFilterPredicate=} filterFn function used to filter chunks - * @returns {Record} a record object of names to lists of child ids(?) + * @param {ChunkGroup} otherGroup the chunkGroup to compare this against + * @returns {-1|0|1} sort position for comparison */ - getChildIdsByOrders(chunkGraph, filterFn) { + compareTo(chunkGraph, otherGroup) { + if (this.chunks.length > otherGroup.chunks.length) return -1; + if (this.chunks.length < otherGroup.chunks.length) return 1; + return compareIterables(compareChunks(chunkGraph))( + this.chunks, + otherGroup.chunks + ); + } + + /** + * @param {ModuleGraph} moduleGraph the module graph + * @param {ChunkGraph} chunkGraph the chunk graph + * @returns {Record} mapping from children type to ordered list of ChunkGroups + */ + getChildrenByOrders(moduleGraph, chunkGraph) { /** @type {Map} */ const lists = new Map(); - for (const group of this.groupsIterable) { - if (group.chunks[group.chunks.length - 1] === this) { - for (const childGroup of group.childrenIterable) { - for (const key of Object.keys(childGroup.options)) { - if (key.endsWith("Order")) { - const name = key.substr(0, key.length - "Order".length); - let list = lists.get(name); - if (list === undefined) { - list = []; - lists.set(name, list); - } - list.push({ - order: childGroup.options[key], - group: childGroup - }); - } + for (const childGroup of this._children) { + for (const key of Object.keys(childGroup.options)) { + if (key.endsWith("Order")) { + const name = key.substr(0, key.length - "Order".length); + let list = lists.get(name); + if (list === undefined) { + lists.set(name, (list = [])); } + list.push({ + order: childGroup.options[key], + group: childGroup + }); } } } - /** @type {Record} */ + /** @type {Record} */ const result = Object.create(null); for (const [name, list] of lists) { list.sort((a, b) => { @@ -26438,116 +26947,87 @@ class Chunk { if (cmp !== 0) return cmp; return a.group.compareTo(chunkGraph, b.group); }); - /** @type {Set} */ - const chunkIdSet = new Set(); - for (const item of list) { - for (const chunk of item.group.chunks) { - if (filterFn && !filterFn(chunk, chunkGraph)) continue; - chunkIdSet.add(chunk.id); - } - } - if (chunkIdSet.size > 0) { - result[name] = Array.from(chunkIdSet); - } + result[name] = list.map(i => i.group); } return result; } /** - * @param {ChunkGraph} chunkGraph the chunk graph - * @param {string} type option name - * @returns {{ onChunks: Chunk[], chunks: Set }[]} referenced chunks for a specific type + * Sets the top-down index of a module in this ChunkGroup + * @param {Module} module module for which the index should be set + * @param {number} index the index of the module + * @returns {void} */ - getChildrenOfTypeInOrder(chunkGraph, type) { - const list = []; - for (const group of this.groupsIterable) { - for (const childGroup of group.childrenIterable) { - const order = childGroup.options[type]; - if (order === undefined) continue; - list.push({ - order, - group, - childGroup - }); - } - } - if (list.length === 0) return undefined; - list.sort((a, b) => { - const cmp = b.order - a.order; - if (cmp !== 0) return cmp; - return a.group.compareTo(chunkGraph, b.group); - }); - const result = []; - let lastEntry; - for (const { group, childGroup } of list) { - if (lastEntry && lastEntry.onChunks === group.chunks) { - for (const chunk of childGroup.chunks) { - lastEntry.chunks.add(chunk); - } - } else { - result.push( - (lastEntry = { - onChunks: group.chunks, - chunks: new Set(childGroup.chunks) - }) - ); - } - } - return result; + setModulePreOrderIndex(module, index) { + this._modulePreOrderIndices.set(module, index); } /** - * @param {ChunkGraph} chunkGraph the chunk graph - * @param {boolean=} includeDirectChildren include direct children (by default only children of async children are included) - * @param {ChunkFilterPredicate=} filterFn function used to filter chunks - * @returns {Record>} a record object of names to lists of child ids(?) by chunk id + * Gets the top-down index of a module in this ChunkGroup + * @param {Module} module the module + * @returns {number} index */ - getChildIdsByOrdersMap(chunkGraph, includeDirectChildren, filterFn) { - /** @type {Record>} */ - const chunkMaps = Object.create(null); + getModulePreOrderIndex(module) { + return this._modulePreOrderIndices.get(module); + } - /** - * @param {Chunk} chunk a chunk - * @returns {void} - */ - const addChildIdsByOrdersToMap = chunk => { - const data = chunk.getChildIdsByOrders(chunkGraph, filterFn); - for (const key of Object.keys(data)) { - let chunkMap = chunkMaps[key]; - if (chunkMap === undefined) { - chunkMaps[key] = chunkMap = Object.create(null); - } - chunkMap[chunk.id] = data[key]; - } - }; + /** + * Sets the bottom-up index of a module in this ChunkGroup + * @param {Module} module module for which the index should be set + * @param {number} index the index of the module + * @returns {void} + */ + setModulePostOrderIndex(module, index) { + this._modulePostOrderIndices.set(module, index); + } - if (includeDirectChildren) { - /** @type {Set} */ - const chunks = new Set(); - for (const chunkGroup of this.groupsIterable) { - for (const chunk of chunkGroup.chunks) { - chunks.add(chunk); - } - } - for (const chunk of chunks) { - addChildIdsByOrdersToMap(chunk); + /** + * Gets the bottom-up index of a module in this ChunkGroup + * @param {Module} module the module + * @returns {number} index + */ + getModulePostOrderIndex(module) { + return this._modulePostOrderIndices.get(module); + } + + /* istanbul ignore next */ + checkConstraints() { + const chunk = this; + for (const child of chunk._children) { + if (!child._parents.has(chunk)) { + throw new Error( + `checkConstraints: child missing parent ${chunk.debugId} -> ${child.debugId}` + ); } } - - for (const chunk of this.getAllAsyncChunks()) { - addChildIdsByOrdersToMap(chunk); + for (const parentChunk of chunk._parents) { + if (!parentChunk._children.has(chunk)) { + throw new Error( + `checkConstraints: parent missing child ${parentChunk.debugId} <- ${chunk.debugId}` + ); + } } - - return chunkMaps; } } -module.exports = Chunk; +ChunkGroup.prototype.getModuleIndex = util.deprecate( + ChunkGroup.prototype.getModulePreOrderIndex, + "ChunkGroup.getModuleIndex was renamed to getModulePreOrderIndex", + "DEP_WEBPACK_CHUNK_GROUP_GET_MODULE_INDEX" +); + +ChunkGroup.prototype.getModuleIndex2 = util.deprecate( + ChunkGroup.prototype.getModulePostOrderIndex, + "ChunkGroup.getModuleIndex2 was renamed to getModulePostOrderIndex", + "DEP_WEBPACK_CHUNK_GROUP_GET_MODULE_INDEX_2" +); + +module.exports = ChunkGroup; /***/ }), -/***/ 97860: +/***/ 80187: /***/ (function(module, __unused_webpack_exports, __webpack_require__) { "use strict"; @@ -26558,3153 +27038,825 @@ module.exports = Chunk; -const util = __webpack_require__(31669); -const Entrypoint = __webpack_require__(86695); -const ModuleGraphConnection = __webpack_require__(94144); -const { first } = __webpack_require__(34715); -const SortableSet = __webpack_require__(67563); -const { - compareModulesById, - compareIterables, - compareModulesByIdentifier, - concatComparators, - compareSelect, - compareIds -} = __webpack_require__(26296); -const createHash = __webpack_require__(24123); -const findGraphRoots = __webpack_require__(38670); -const { - RuntimeSpecMap, - RuntimeSpecSet, - runtimeToString, - mergeRuntime, - forEachRuntime -} = __webpack_require__(19655); +const WebpackError = __webpack_require__(68422); -/** @typedef {import("./AsyncDependenciesBlock")} AsyncDependenciesBlock */ /** @typedef {import("./Chunk")} Chunk */ -/** @typedef {import("./ChunkGroup")} ChunkGroup */ -/** @typedef {import("./Module")} Module */ -/** @typedef {import("./ModuleGraph")} ModuleGraph */ -/** @typedef {import("./RuntimeModule")} RuntimeModule */ -/** @typedef {typeof import("./util/Hash")} Hash */ -/** @typedef {import("./util/runtime").RuntimeSpec} RuntimeSpec */ -/** @type {ReadonlySet} */ -const EMPTY_SET = new Set(); +class ChunkRenderError extends WebpackError { + /** + * Create a new ChunkRenderError + * @param {Chunk} chunk A chunk + * @param {string} file Related file + * @param {Error} error Original error + */ + constructor(chunk, file, error) { + super(); -const ZERO_BIG_INT = BigInt(0); + this.name = "ChunkRenderError"; + this.error = error; + this.message = error.message; + this.details = error.stack; + this.file = file; + this.chunk = chunk; + } +} -const compareModuleIterables = compareIterables(compareModulesByIdentifier); +module.exports = ChunkRenderError; -/** @typedef {(c: Chunk, chunkGraph: ChunkGraph) => boolean} ChunkFilterPredicate */ -/** @typedef {(m: Module) => boolean} ModuleFilterPredicate */ -/** - * @typedef {Object} ChunkSizeOptions - * @property {number=} chunkOverhead constant overhead for a chunk - * @property {number=} entryChunkMultiplicator multiplicator for initial chunks - */ +/***/ }), -class ModuleHashInfo { - constructor(hash, renderedHash) { - this.hash = hash; - this.renderedHash = renderedHash; +/***/ 50527: +/***/ (function(module, __unused_webpack_exports, __webpack_require__) { + +"use strict"; +/* + MIT License http://www.opensource.org/licenses/mit-license.php + Author Tobias Koppers @sokra +*/ + + + +const util = __webpack_require__(31669); +const memoize = __webpack_require__(84297); + +/** @typedef {import("../declarations/WebpackOptions").Output} OutputOptions */ +/** @typedef {import("./Compilation")} Compilation */ + +const getJavascriptModulesPlugin = memoize(() => + __webpack_require__(76767) +); + +// TODO webpack 6 remove this class +class ChunkTemplate { + /** + * @param {OutputOptions} outputOptions output options + * @param {Compilation} compilation the compilation + */ + constructor(outputOptions, compilation) { + this._outputOptions = outputOptions || {}; + this.hooks = Object.freeze({ + renderManifest: { + tap: util.deprecate( + (options, fn) => { + compilation.hooks.renderManifest.tap( + options, + (entries, options) => { + if (options.chunk.hasRuntime()) return entries; + return fn(entries, options); + } + ); + }, + "ChunkTemplate.hooks.renderManifest is deprecated (use Compilation.hooks.renderManifest instead)", + "DEP_WEBPACK_CHUNK_TEMPLATE_RENDER_MANIFEST" + ) + }, + modules: { + tap: util.deprecate( + (options, fn) => { + getJavascriptModulesPlugin() + .getCompilationHooks(compilation) + .renderChunk.tap(options, (source, renderContext) => + fn( + source, + compilation.moduleTemplates.javascript, + renderContext + ) + ); + }, + "ChunkTemplate.hooks.modules is deprecated (use JavascriptModulesPlugin.getCompilationHooks().renderChunk instead)", + "DEP_WEBPACK_CHUNK_TEMPLATE_MODULES" + ) + }, + render: { + tap: util.deprecate( + (options, fn) => { + getJavascriptModulesPlugin() + .getCompilationHooks(compilation) + .renderChunk.tap(options, (source, renderContext) => + fn( + source, + compilation.moduleTemplates.javascript, + renderContext + ) + ); + }, + "ChunkTemplate.hooks.render is deprecated (use JavascriptModulesPlugin.getCompilationHooks().renderChunk instead)", + "DEP_WEBPACK_CHUNK_TEMPLATE_RENDER" + ) + }, + renderWithEntry: { + tap: util.deprecate( + (options, fn) => { + getJavascriptModulesPlugin() + .getCompilationHooks(compilation) + .render.tap(options, (source, renderContext) => { + if ( + renderContext.chunkGraph.getNumberOfEntryModules( + renderContext.chunk + ) === 0 || + renderContext.chunk.hasRuntime() + ) { + return source; + } + return fn(source, renderContext.chunk); + }); + }, + "ChunkTemplate.hooks.renderWithEntry is deprecated (use JavascriptModulesPlugin.getCompilationHooks().render instead)", + "DEP_WEBPACK_CHUNK_TEMPLATE_RENDER_WITH_ENTRY" + ) + }, + hash: { + tap: util.deprecate( + (options, fn) => { + compilation.hooks.fullHash.tap(options, fn); + }, + "ChunkTemplate.hooks.hash is deprecated (use Compilation.hooks.fullHash instead)", + "DEP_WEBPACK_CHUNK_TEMPLATE_HASH" + ) + }, + hashForChunk: { + tap: util.deprecate( + (options, fn) => { + getJavascriptModulesPlugin() + .getCompilationHooks(compilation) + .chunkHash.tap(options, (chunk, hash, context) => { + if (chunk.hasRuntime()) return; + fn(hash, chunk, context); + }); + }, + "ChunkTemplate.hooks.hashForChunk is deprecated (use JavascriptModulesPlugin.getCompilationHooks().chunkHash instead)", + "DEP_WEBPACK_CHUNK_TEMPLATE_HASH_FOR_CHUNK" + ) + } + }); } } -/** @template T @typedef {(set: SortableSet) => T[]} SetToArrayFunction */ +Object.defineProperty(ChunkTemplate.prototype, "outputOptions", { + get: util.deprecate( + /** + * @this {ChunkTemplate} + * @returns {OutputOptions} output options + */ + function () { + return this._outputOptions; + }, + "ChunkTemplate.outputOptions is deprecated (use Compilation.outputOptions instead)", + "DEP_WEBPACK_CHUNK_TEMPLATE_OUTPUT_OPTIONS" + ) +}); -/** - * @template T - * @param {SortableSet} set the set - * @returns {T[]} set as array - */ -const getArray = set => { - return Array.from(set); -}; +module.exports = ChunkTemplate; + + +/***/ }), + +/***/ 49129: +/***/ (function(module, __unused_webpack_exports, __webpack_require__) { + +"use strict"; +/* + MIT License http://www.opensource.org/licenses/mit-license.php + Author Sergey Melyukov @smelukov +*/ + + + +const asyncLib = __webpack_require__(36386); +const { SyncBailHook } = __webpack_require__(34718); +const Compilation = __webpack_require__(59622); +const createSchemaValidation = __webpack_require__(77695); +const { join } = __webpack_require__(93204); +const processAsyncTree = __webpack_require__(76815); + +/** @typedef {import("../declarations/WebpackOptions").CleanOptions} CleanOptions */ +/** @typedef {import("./Compiler")} Compiler */ +/** @typedef {import("./logging/Logger").Logger} Logger */ +/** @typedef {import("./util/fs").OutputFileSystem} OutputFileSystem */ +/** @typedef {import("./util/fs").StatsCallback} StatsCallback */ + +/** @typedef {(function(string):boolean)|RegExp} IgnoreItem */ +/** @typedef {function(IgnoreItem): void} AddToIgnoreCallback */ /** - * @param {SortableSet} chunks the chunks - * @returns {RuntimeSpecSet} runtimes + * @typedef {Object} CleanPluginCompilationHooks + * @property {SyncBailHook<[string], boolean>} keep when returning true the file/directory will be kept during cleaning, returning false will clean it and ignore the following plugins and config */ -const getModuleRuntimes = chunks => { - const runtimes = new RuntimeSpecSet(); - for (const chunk of chunks) { - runtimes.add(chunk.runtime); + +const validate = createSchemaValidation( + undefined, + () => { + const { definitions } = __webpack_require__(1863); + return { + definitions, + oneOf: [{ $ref: "#/definitions/CleanOptions" }] + }; + }, + { + name: "Clean Plugin", + baseDataPath: "options" } - return runtimes; -}; +); /** - * @param {SortableSet} set the set - * @returns {Map>} modules by source type + * @param {OutputFileSystem} fs filesystem + * @param {string} outputPath output path + * @param {Set} currentAssets filename of the current assets (must not start with .. or ., must only use / as path separator) + * @param {function(Error=, Set=): void} callback returns the filenames of the assets that shouldn't be there + * @returns {void} */ -const modulesBySourceType = set => { - /** @type {Map>} */ - const map = new Map(); - for (const module of set) { - for (const sourceType of module.getSourceTypes()) { - let innerSet = map.get(sourceType); - if (innerSet === undefined) { - innerSet = new SortableSet(); - map.set(sourceType, innerSet); - } - innerSet.add(module); - } +const getDiffToFs = (fs, outputPath, currentAssets, callback) => { + const directories = new Set(); + // get directories of assets + for (const asset of currentAssets) { + directories.add(asset.replace(/(^|\/)[^/]*$/, "")); } - for (const [key, innerSet] of map) { - // When all modules have the source type, we reuse the original SortableSet - // to benefit from the shared cache (especially for sorting) - if (innerSet.size === set.size) { - map.set(key, set); - } + // and all parent directories + for (const directory of directories) { + directories.add(directory.replace(/(^|\/)[^/]*$/, "")); } - return map; -}; - -/** @type {WeakMap} */ -const createOrderedArrayFunctionMap = new WeakMap(); + const diff = new Set(); + asyncLib.forEachLimit( + directories, + 10, + (directory, callback) => { + fs.readdir(join(fs, outputPath, directory), (err, entries) => { + if (err) { + if (err.code === "ENOENT") return callback(); + if (err.code === "ENOTDIR") { + diff.add(directory); + return callback(); + } + return callback(err); + } + for (const entry of entries) { + const file = /** @type {string} */ (entry); + const filename = directory ? `${directory}/${file}` : file; + if (!directories.has(filename) && !currentAssets.has(filename)) { + diff.add(filename); + } + } + callback(); + }); + }, + err => { + if (err) return callback(err); -/** - * @template T - * @param {function(T, T): -1|0|1} comparator comparator function - * @returns {SetToArrayFunction} set as ordered array - */ -const createOrderedArrayFunction = comparator => { - /** @type {SetToArrayFunction} */ - let fn = createOrderedArrayFunctionMap.get(comparator); - if (fn !== undefined) return fn; - fn = set => { - set.sortWith(comparator); - return Array.from(set); - }; - createOrderedArrayFunctionMap.set(comparator, fn); - return fn; + callback(null, diff); + } + ); }; /** - * @param {Iterable} modules the modules to get the count/size of - * @returns {number} the size of the modules + * @param {Set} currentAssets assets list + * @param {Set} oldAssets old assets list + * @returns {Set} diff */ -const getModulesSize = modules => { - let size = 0; - for (const module of modules) { - for (const type of module.getSourceTypes()) { - size += module.size(type); - } +const getDiffToOldAssets = (currentAssets, oldAssets) => { + const diff = new Set(); + for (const asset of oldAssets) { + if (!currentAssets.has(asset)) diff.add(asset); } - return size; + return diff; }; /** - * @param {Iterable} modules the sortable Set to get the size of - * @returns {Record} the sizes of the modules + * @param {OutputFileSystem} fs filesystem + * @param {string} filename path to file + * @param {StatsCallback} callback callback for provided filename + * @returns {void} */ -const getModulesSizes = modules => { - let sizes = Object.create(null); - for (const module of modules) { - for (const type of module.getSourceTypes()) { - sizes[type] = (sizes[type] || 0) + module.size(type); - } +const doStat = (fs, filename, callback) => { + if ("lstat" in fs) { + fs.lstat(filename, callback); + } else { + fs.stat(filename, callback); } - return sizes; }; /** - * @param {Chunk} a chunk - * @param {Chunk} b chunk - * @returns {boolean} true, if a is always a parent of b + * @param {OutputFileSystem} fs filesystem + * @param {string} outputPath output path + * @param {boolean} dry only log instead of fs modification + * @param {Logger} logger logger + * @param {Set} diff filenames of the assets that shouldn't be there + * @param {function(string): boolean} isKept check if the entry is ignored + * @param {function(Error=): void} callback callback + * @returns {void} */ -const isAvailableChunk = (a, b) => { - const queue = new Set(b.groupsIterable); - for (const chunkGroup of queue) { - if (a.isInGroup(chunkGroup)) continue; - if (chunkGroup.isInitial()) return false; - for (const parent of chunkGroup.parentsIterable) { - queue.add(parent); +const applyDiff = (fs, outputPath, dry, logger, diff, isKept, callback) => { + const log = msg => { + if (dry) { + logger.info(msg); + } else { + logger.log(msg); } - } - return true; + }; + /** @typedef {{ type: "check" | "unlink" | "rmdir", filename: string, parent: { remaining: number, job: Job } | undefined }} Job */ + /** @type {Job[]} */ + const jobs = Array.from(diff, filename => ({ + type: "check", + filename, + parent: undefined + })); + processAsyncTree( + jobs, + 10, + ({ type, filename, parent }, push, callback) => { + const handleError = err => { + if (err.code === "ENOENT") { + log(`${filename} was removed during cleaning by something else`); + handleParent(); + return callback(); + } + return callback(err); + }; + const handleParent = () => { + if (parent && --parent.remaining === 0) push(parent.job); + }; + const path = join(fs, outputPath, filename); + switch (type) { + case "check": + if (isKept(filename)) { + // do not decrement parent entry as we don't want to delete the parent + log(`${filename} will be kept`); + return process.nextTick(callback); + } + doStat(fs, path, (err, stats) => { + if (err) return handleError(err); + if (!stats.isDirectory()) { + push({ + type: "unlink", + filename, + parent + }); + return callback(); + } + fs.readdir(path, (err, entries) => { + if (err) return handleError(err); + /** @type {Job} */ + const deleteJob = { + type: "rmdir", + filename, + parent + }; + if (entries.length === 0) { + push(deleteJob); + } else { + const parentToken = { + remaining: entries.length, + job: deleteJob + }; + for (const entry of entries) { + const file = /** @type {string} */ (entry); + if (file.startsWith(".")) { + log( + `${filename} will be kept (dot-files will never be removed)` + ); + continue; + } + push({ + type: "check", + filename: `${filename}/${file}`, + parent: parentToken + }); + } + } + return callback(); + }); + }); + break; + case "rmdir": + log(`${filename} will be removed`); + if (dry) { + handleParent(); + return process.nextTick(callback); + } + if (!fs.rmdir) { + logger.warn( + `${filename} can't be removed because output file system doesn't support removing directories (rmdir)` + ); + return process.nextTick(callback); + } + fs.rmdir(path, err => { + if (err) return handleError(err); + handleParent(); + callback(); + }); + break; + case "unlink": + log(`${filename} will be removed`); + if (dry) { + handleParent(); + return process.nextTick(callback); + } + if (!fs.unlink) { + logger.warn( + `${filename} can't be removed because output file system doesn't support removing files (rmdir)` + ); + return process.nextTick(callback); + } + fs.unlink(path, err => { + if (err) return handleError(err); + handleParent(); + callback(); + }); + break; + } + }, + callback + ); }; -class ChunkGraphModule { - constructor() { - /** @type {SortableSet} */ - this.chunks = new SortableSet(); - /** @type {Set | undefined} */ - this.entryInChunks = undefined; - /** @type {Set | undefined} */ - this.runtimeInChunks = undefined; - /** @type {RuntimeSpecMap} */ - this.hashes = undefined; - /** @type {string | number} */ - this.id = null; - /** @type {RuntimeSpecMap> | undefined} */ - this.runtimeRequirements = undefined; - /** @type {RuntimeSpecMap} */ - this.graphHashes = undefined; - /** @type {RuntimeSpecMap} */ - this.graphHashesWithConnections = undefined; - } -} - -class ChunkGraphChunk { - constructor() { - /** @type {SortableSet} */ - this.modules = new SortableSet(); - /** @type {Map} */ - this.entryModules = new Map(); - /** @type {SortableSet} */ - this.runtimeModules = new SortableSet(); - /** @type {Set | undefined} */ - this.fullHashModules = undefined; - /** @type {Set | undefined} */ - this.dependentHashModules = undefined; - /** @type {Set | undefined} */ - this.runtimeRequirements = undefined; - /** @type {Set} */ - this.runtimeRequirementsInTree = new Set(); - } -} - -class ChunkGraph { - /** - * @param {ModuleGraph} moduleGraph the module graph - * @param {string | Hash} hashFunction the hash function to use - */ - constructor(moduleGraph, hashFunction = "md4") { - /** @private @type {WeakMap} */ - this._modules = new WeakMap(); - /** @private @type {WeakMap} */ - this._chunks = new WeakMap(); - /** @private @type {WeakMap} */ - this._blockChunkGroups = new WeakMap(); - /** @private @type {Map} */ - this._runtimeIds = new Map(); - /** @type {ModuleGraph} */ - this.moduleGraph = moduleGraph; - - this._hashFunction = hashFunction; - - this._getGraphRoots = this._getGraphRoots.bind(this); - } +/** @type {WeakMap} */ +const compilationHooksMap = new WeakMap(); +class CleanPlugin { /** - * @private - * @param {Module} module the module - * @returns {ChunkGraphModule} internal module + * @param {Compilation} compilation the compilation + * @returns {CleanPluginCompilationHooks} the attached hooks */ - _getChunkGraphModule(module) { - let cgm = this._modules.get(module); - if (cgm === undefined) { - cgm = new ChunkGraphModule(); - this._modules.set(module, cgm); + static getCompilationHooks(compilation) { + if (!(compilation instanceof Compilation)) { + throw new TypeError( + "The 'compilation' argument must be an instance of Compilation" + ); } - return cgm; - } - - /** - * @private - * @param {Chunk} chunk the chunk - * @returns {ChunkGraphChunk} internal chunk - */ - _getChunkGraphChunk(chunk) { - let cgc = this._chunks.get(chunk); - if (cgc === undefined) { - cgc = new ChunkGraphChunk(); - this._chunks.set(chunk, cgc); + let hooks = compilationHooksMap.get(compilation); + if (hooks === undefined) { + hooks = { + /** @type {SyncBailHook<[string], boolean>} */ + keep: new SyncBailHook(["ignore"]) + }; + compilationHooksMap.set(compilation, hooks); } - return cgc; - } - - /** - * @param {SortableSet} set the sortable Set to get the roots of - * @returns {Module[]} the graph roots - */ - _getGraphRoots(set) { - const { moduleGraph } = this; - return Array.from( - findGraphRoots(set, module => { - /** @type {Set} */ - const set = new Set(); - const addDependencies = module => { - for (const connection of moduleGraph.getOutgoingConnections(module)) { - if (!connection.module) continue; - const activeState = connection.getActiveState(undefined); - if (activeState === false) continue; - if (activeState === ModuleGraphConnection.TRANSITIVE_ONLY) { - addDependencies(connection.module); - continue; - } - set.add(connection.module); - } - }; - addDependencies(module); - return set; - }) - ).sort(compareModulesByIdentifier); + return hooks; } - /** - * @param {Chunk} chunk the new chunk - * @param {Module} module the module - * @returns {void} - */ - connectChunkAndModule(chunk, module) { - const cgm = this._getChunkGraphModule(module); - const cgc = this._getChunkGraphChunk(chunk); - cgm.chunks.add(chunk); - cgc.modules.add(module); + /** @param {CleanOptions} options options */ + constructor(options = {}) { + validate(options); + this.options = { dry: false, ...options }; } /** - * @param {Chunk} chunk the chunk - * @param {Module} module the module + * Apply the plugin + * @param {Compiler} compiler the compiler instance * @returns {void} */ - disconnectChunkAndModule(chunk, module) { - const cgm = this._getChunkGraphModule(module); - const cgc = this._getChunkGraphChunk(chunk); - cgc.modules.delete(module); - cgm.chunks.delete(chunk); - } + apply(compiler) { + const { dry, keep } = this.options; - /** - * @param {Chunk} chunk the chunk which will be disconnected - * @returns {void} - */ - disconnectChunk(chunk) { - const cgc = this._getChunkGraphChunk(chunk); - for (const module of cgc.modules) { - const cgm = this._getChunkGraphModule(module); - cgm.chunks.delete(chunk); - } - cgc.modules.clear(); - chunk.disconnectFromGroups(); - ChunkGraph.clearChunkGraphForChunk(chunk); - } + const keepFn = + typeof keep === "function" + ? keep + : typeof keep === "string" + ? path => path.startsWith(keep) + : typeof keep === "object" && keep.test + ? path => keep.test(path) + : () => false; - /** - * @param {Chunk} chunk the chunk - * @param {Iterable} modules the modules - * @returns {void} - */ - attachModules(chunk, modules) { - const cgc = this._getChunkGraphChunk(chunk); - for (const module of modules) { - cgc.modules.add(module); - } - } + // We assume that no external modification happens while the compiler is active + // So we can store the old assets and only diff to them to avoid fs access on + // incremental builds + let oldAssets; - /** - * @param {Chunk} chunk the chunk - * @param {Iterable} modules the runtime modules - * @returns {void} - */ - attachRuntimeModules(chunk, modules) { - const cgc = this._getChunkGraphChunk(chunk); - for (const module of modules) { - cgc.runtimeModules.add(module); - } - } + compiler.hooks.emit.tapAsync( + { + name: "CleanPlugin", + stage: 100 + }, + (compilation, callback) => { + const hooks = CleanPlugin.getCompilationHooks(compilation); + const logger = compilation.getLogger("webpack.CleanPlugin"); + const fs = compiler.outputFileSystem; - /** - * @param {Chunk} chunk the chunk - * @param {Iterable} modules the modules that require a full hash - * @returns {void} - */ - attachFullHashModules(chunk, modules) { - const cgc = this._getChunkGraphChunk(chunk); - if (cgc.fullHashModules === undefined) cgc.fullHashModules = new Set(); - for (const module of modules) { - cgc.fullHashModules.add(module); - } - } + if (!fs.readdir) { + return callback( + new Error( + "CleanPlugin: Output filesystem doesn't support listing directories (readdir)" + ) + ); + } - /** - * @param {Chunk} chunk the chunk - * @param {Iterable} modules the modules that require a full hash - * @returns {void} - */ - attachDependentHashModules(chunk, modules) { - const cgc = this._getChunkGraphChunk(chunk); - if (cgc.dependentHashModules === undefined) - cgc.dependentHashModules = new Set(); - for (const module of modules) { - cgc.dependentHashModules.add(module); - } - } + const currentAssets = new Set(); + for (const asset of Object.keys(compilation.assets)) { + if (/^[A-Za-z]:\\|^\/|^\\\\/.test(asset)) continue; + let normalizedAsset; + let newNormalizedAsset = asset.replace(/\\/g, "/"); + do { + normalizedAsset = newNormalizedAsset; + newNormalizedAsset = normalizedAsset.replace( + /(^|\/)(?!\.\.)[^/]+\/\.\.\//g, + "$1" + ); + } while (newNormalizedAsset !== normalizedAsset); + if (normalizedAsset.startsWith("../")) continue; + currentAssets.add(normalizedAsset); + } - /** - * @param {Module} oldModule the replaced module - * @param {Module} newModule the replacing module - * @returns {void} - */ - replaceModule(oldModule, newModule) { - const oldCgm = this._getChunkGraphModule(oldModule); - const newCgm = this._getChunkGraphModule(newModule); + const outputPath = compilation.getPath(compiler.outputPath, {}); - for (const chunk of oldCgm.chunks) { - const cgc = this._getChunkGraphChunk(chunk); - cgc.modules.delete(oldModule); - cgc.modules.add(newModule); - newCgm.chunks.add(chunk); - } - oldCgm.chunks.clear(); + const isKept = path => { + const result = hooks.keep.call(path); + if (result !== undefined) return result; + return keepFn(path); + }; - if (oldCgm.entryInChunks !== undefined) { - if (newCgm.entryInChunks === undefined) { - newCgm.entryInChunks = new Set(); - } - for (const chunk of oldCgm.entryInChunks) { - const cgc = this._getChunkGraphChunk(chunk); - const old = cgc.entryModules.get(oldModule); - /** @type {Map} */ - const newEntryModules = new Map(); - for (const [m, cg] of cgc.entryModules) { - if (m === oldModule) { - newEntryModules.set(newModule, old); - } else { - newEntryModules.set(m, cg); + const diffCallback = (err, diff) => { + if (err) { + oldAssets = undefined; + return callback(err); } - } - cgc.entryModules = newEntryModules; - newCgm.entryInChunks.add(chunk); - } - oldCgm.entryInChunks = undefined; - } + applyDiff(fs, outputPath, dry, logger, diff, isKept, err => { + if (err) { + oldAssets = undefined; + } else { + oldAssets = currentAssets; + } + callback(err); + }); + }; - if (oldCgm.runtimeInChunks !== undefined) { - if (newCgm.runtimeInChunks === undefined) { - newCgm.runtimeInChunks = new Set(); - } - for (const chunk of oldCgm.runtimeInChunks) { - const cgc = this._getChunkGraphChunk(chunk); - cgc.runtimeModules.delete(/** @type {RuntimeModule} */ (oldModule)); - cgc.runtimeModules.add(/** @type {RuntimeModule} */ (newModule)); - newCgm.runtimeInChunks.add(chunk); - if ( - cgc.fullHashModules !== undefined && - cgc.fullHashModules.has(/** @type {RuntimeModule} */ (oldModule)) - ) { - cgc.fullHashModules.delete(/** @type {RuntimeModule} */ (oldModule)); - cgc.fullHashModules.add(/** @type {RuntimeModule} */ (newModule)); - } - if ( - cgc.dependentHashModules !== undefined && - cgc.dependentHashModules.has(/** @type {RuntimeModule} */ (oldModule)) - ) { - cgc.dependentHashModules.delete( - /** @type {RuntimeModule} */ (oldModule) - ); - cgc.dependentHashModules.add( - /** @type {RuntimeModule} */ (newModule) - ); + if (oldAssets) { + diffCallback(null, getDiffToOldAssets(currentAssets, oldAssets)); + } else { + getDiffToFs(fs, outputPath, currentAssets, diffCallback); } } - oldCgm.runtimeInChunks = undefined; - } - } - - /** - * @param {Module} module the checked module - * @param {Chunk} chunk the checked chunk - * @returns {boolean} true, if the chunk contains the module - */ - isModuleInChunk(module, chunk) { - const cgc = this._getChunkGraphChunk(chunk); - return cgc.modules.has(module); - } - - /** - * @param {Module} module the checked module - * @param {ChunkGroup} chunkGroup the checked chunk group - * @returns {boolean} true, if the chunk contains the module - */ - isModuleInChunkGroup(module, chunkGroup) { - for (const chunk of chunkGroup.chunks) { - if (this.isModuleInChunk(module, chunk)) return true; - } - return false; + ); } +} - /** - * @param {Module} module the checked module - * @returns {boolean} true, if the module is entry of any chunk - */ - isEntryModule(module) { - const cgm = this._getChunkGraphModule(module); - return cgm.entryInChunks !== undefined; - } +module.exports = CleanPlugin; - /** - * @param {Module} module the module - * @returns {Iterable} iterable of chunks (do not modify) - */ - getModuleChunksIterable(module) { - const cgm = this._getChunkGraphModule(module); - return cgm.chunks; - } - /** - * @param {Module} module the module - * @param {function(Chunk, Chunk): -1|0|1} sortFn sort function - * @returns {Iterable} iterable of chunks (do not modify) - */ - getOrderedModuleChunksIterable(module, sortFn) { - const cgm = this._getChunkGraphModule(module); - cgm.chunks.sortWith(sortFn); - return cgm.chunks; - } +/***/ }), - /** - * @param {Module} module the module - * @returns {Chunk[]} array of chunks (cached, do not modify) - */ - getModuleChunks(module) { - const cgm = this._getChunkGraphModule(module); - return cgm.chunks.getFromCache(getArray); - } +/***/ 43604: +/***/ (function(module, __unused_webpack_exports, __webpack_require__) { - /** - * @param {Module} module the module - * @returns {number} the number of chunk which contain the module - */ - getNumberOfModuleChunks(module) { - const cgm = this._getChunkGraphModule(module); - return cgm.chunks.size; - } +"use strict"; +/* + MIT License http://www.opensource.org/licenses/mit-license.php + Author Tobias Koppers @sokra +*/ - /** - * @param {Module} module the module - * @returns {RuntimeSpecSet} runtimes - */ - getModuleRuntimes(module) { - const cgm = this._getChunkGraphModule(module); - return cgm.chunks.getFromUnorderedCache(getModuleRuntimes); - } - /** - * @param {Chunk} chunk the chunk - * @returns {number} the number of modules which are contained in this chunk - */ - getNumberOfChunkModules(chunk) { - const cgc = this._getChunkGraphChunk(chunk); - return cgc.modules.size; - } - /** - * @param {Chunk} chunk the chunk - * @returns {number} the number of full hash modules which are contained in this chunk - */ - getNumberOfChunkFullHashModules(chunk) { - const cgc = this._getChunkGraphChunk(chunk); - return cgc.fullHashModules === undefined ? 0 : cgc.fullHashModules.size; - } +const WebpackError = __webpack_require__(68422); - /** - * @param {Chunk} chunk the chunk - * @returns {Iterable} return the modules for this chunk - */ - getChunkModulesIterable(chunk) { - const cgc = this._getChunkGraphChunk(chunk); - return cgc.modules; - } +/** @typedef {import("./Module")} Module */ +class CodeGenerationError extends WebpackError { /** - * @param {Chunk} chunk the chunk - * @param {string} sourceType source type - * @returns {Iterable | undefined} return the modules for this chunk + * Create a new CodeGenerationError + * @param {Module} module related module + * @param {Error} error Original error */ - getChunkModulesIterableBySourceType(chunk, sourceType) { - const cgc = this._getChunkGraphChunk(chunk); - const modulesWithSourceType = cgc.modules - .getFromUnorderedCache(modulesBySourceType) - .get(sourceType); - return modulesWithSourceType; - } + constructor(module, error) { + super(); - /** - * @param {Chunk} chunk the chunk - * @param {function(Module, Module): -1|0|1} comparator comparator function - * @returns {Iterable} return the modules for this chunk - */ - getOrderedChunkModulesIterable(chunk, comparator) { - const cgc = this._getChunkGraphChunk(chunk); - cgc.modules.sortWith(comparator); - return cgc.modules; + this.name = "CodeGenerationError"; + this.error = error; + this.message = error.message; + this.details = error.stack; + this.module = module; } +} - /** - * @param {Chunk} chunk the chunk - * @param {string} sourceType source type - * @param {function(Module, Module): -1|0|1} comparator comparator function - * @returns {Iterable | undefined} return the modules for this chunk - */ - getOrderedChunkModulesIterableBySourceType(chunk, sourceType, comparator) { - const cgc = this._getChunkGraphChunk(chunk); - const modulesWithSourceType = cgc.modules - .getFromUnorderedCache(modulesBySourceType) - .get(sourceType); - if (modulesWithSourceType === undefined) return undefined; - modulesWithSourceType.sortWith(comparator); - return modulesWithSourceType; - } +module.exports = CodeGenerationError; - /** - * @param {Chunk} chunk the chunk - * @returns {Module[]} return the modules for this chunk (cached, do not modify) - */ - getChunkModules(chunk) { - const cgc = this._getChunkGraphChunk(chunk); - return cgc.modules.getFromUnorderedCache(getArray); - } - /** - * @param {Chunk} chunk the chunk - * @param {function(Module, Module): -1|0|1} comparator comparator function - * @returns {Module[]} return the modules for this chunk (cached, do not modify) - */ - getOrderedChunkModules(chunk, comparator) { - const cgc = this._getChunkGraphChunk(chunk); - const arrayFunction = createOrderedArrayFunction(comparator); - return cgc.modules.getFromUnorderedCache(arrayFunction); - } +/***/ }), - /** - * @param {Chunk} chunk the chunk - * @param {ModuleFilterPredicate} filterFn function used to filter modules - * @param {boolean} includeAllChunks all chunks or only async chunks - * @returns {Record} chunk to module ids object - */ - getChunkModuleIdMap(chunk, filterFn, includeAllChunks = false) { - /** @type {Record} */ - const chunkModuleIdMap = Object.create(null); +/***/ 74841: +/***/ (function(module, __unused_webpack_exports, __webpack_require__) { - for (const asyncChunk of includeAllChunks - ? chunk.getAllReferencedChunks() - : chunk.getAllAsyncChunks()) { - /** @type {(string|number)[]} */ - let array; - for (const module of this.getOrderedChunkModulesIterable( - asyncChunk, - compareModulesById(this) - )) { - if (filterFn(module)) { - if (array === undefined) { - array = []; - chunkModuleIdMap[asyncChunk.id] = array; - } - const moduleId = this.getModuleId(module); - array.push(moduleId); - } - } - } +"use strict"; +/* + MIT License http://www.opensource.org/licenses/mit-license.php + Author Tobias Koppers @sokra +*/ - return chunkModuleIdMap; - } - /** - * @param {Chunk} chunk the chunk - * @param {ModuleFilterPredicate} filterFn function used to filter modules - * @param {number} hashLength length of the hash - * @param {boolean} includeAllChunks all chunks or only async chunks - * @returns {Record>} chunk to module id to module hash object - */ - getChunkModuleRenderedHashMap( - chunk, - filterFn, - hashLength = 0, - includeAllChunks = false - ) { - /** @type {Record>} */ - const chunkModuleHashMap = Object.create(null); - for (const asyncChunk of includeAllChunks - ? chunk.getAllReferencedChunks() - : chunk.getAllAsyncChunks()) { - /** @type {Record} */ - let idToHashMap; - for (const module of this.getOrderedChunkModulesIterable( - asyncChunk, - compareModulesById(this) - )) { - if (filterFn(module)) { - if (idToHashMap === undefined) { - idToHashMap = Object.create(null); - chunkModuleHashMap[asyncChunk.id] = idToHashMap; - } - const moduleId = this.getModuleId(module); - const hash = this.getRenderedModuleHash(module, asyncChunk.runtime); - idToHashMap[moduleId] = hashLength ? hash.slice(0, hashLength) : hash; - } - } - } +const { provide } = __webpack_require__(30498); +const { first } = __webpack_require__(34715); +const createHash = __webpack_require__(24123); +const { runtimeToString, RuntimeSpecMap } = __webpack_require__(19655); - return chunkModuleHashMap; - } +/** @typedef {import("webpack-sources").Source} Source */ +/** @typedef {import("./Module")} Module */ +/** @typedef {import("./Module").CodeGenerationResult} CodeGenerationResult */ +/** @typedef {typeof import("./util/Hash")} Hash */ +/** @typedef {import("./util/runtime").RuntimeSpec} RuntimeSpec */ +class CodeGenerationResults { /** - * @param {Chunk} chunk the chunk - * @param {ChunkFilterPredicate} filterFn function used to filter chunks - * @returns {Record} chunk map + * @param {string | Hash} hashFunction the hash function to use */ - getChunkConditionMap(chunk, filterFn) { - const map = Object.create(null); - for (const c of chunk.getAllReferencedChunks()) { - map[c.id] = filterFn(c, this); - } - return map; + constructor(hashFunction = "md4") { + /** @type {Map>} */ + this.map = new Map(); + this._hashFunction = hashFunction; } /** - * @param {Chunk} chunk the chunk - * @param {ModuleFilterPredicate} filterFn predicate function used to filter modules - * @param {ChunkFilterPredicate=} filterChunkFn predicate function used to filter chunks - * @returns {boolean} return true if module exists in graph + * @param {Module} module the module + * @param {RuntimeSpec} runtime runtime(s) + * @returns {CodeGenerationResult} the CodeGenerationResult */ - hasModuleInGraph(chunk, filterFn, filterChunkFn) { - const queue = new Set(chunk.groupsIterable); - const chunksProcessed = new Set(); - - for (const chunkGroup of queue) { - for (const innerChunk of chunkGroup.chunks) { - if (!chunksProcessed.has(innerChunk)) { - chunksProcessed.add(innerChunk); - if (!filterChunkFn || filterChunkFn(innerChunk, this)) { - for (const module of this.getChunkModulesIterable(innerChunk)) { - if (filterFn(module)) { - return true; - } - } - } + get(module, runtime) { + const entry = this.map.get(module); + if (entry === undefined) { + throw new Error( + `No code generation entry for ${module.identifier()} (existing entries: ${Array.from( + this.map.keys(), + m => m.identifier() + ).join(", ")})` + ); + } + if (runtime === undefined) { + if (entry.size > 1) { + const results = new Set(entry.values()); + if (results.size !== 1) { + throw new Error( + `No unique code generation entry for unspecified runtime for ${module.identifier()} (existing runtimes: ${Array.from( + entry.keys(), + r => runtimeToString(r) + ).join(", ")}). +Caller might not support runtime-dependent code generation (opt-out via optimization.usedExports: "global").` + ); } + return first(results); } - for (const child of chunkGroup.childrenIterable) { - queue.add(child); - } + return entry.values().next().value; } - return false; + const result = entry.get(runtime); + if (result === undefined) { + throw new Error( + `No code generation entry for runtime ${runtimeToString( + runtime + )} for ${module.identifier()} (existing runtimes: ${Array.from( + entry.keys(), + r => runtimeToString(r) + ).join(", ")})` + ); + } + return result; } /** - * @param {Chunk} chunkA first chunk - * @param {Chunk} chunkB second chunk - * @returns {-1|0|1} this is a comparator function like sort and returns -1, 0, or 1 based on sort order + * @param {Module} module the module + * @param {RuntimeSpec} runtime runtime(s) + * @returns {boolean} true, when we have data for this */ - compareChunks(chunkA, chunkB) { - const cgcA = this._getChunkGraphChunk(chunkA); - const cgcB = this._getChunkGraphChunk(chunkB); - if (cgcA.modules.size > cgcB.modules.size) return -1; - if (cgcA.modules.size < cgcB.modules.size) return 1; - cgcA.modules.sortWith(compareModulesByIdentifier); - cgcB.modules.sortWith(compareModulesByIdentifier); - return compareModuleIterables(cgcA.modules, cgcB.modules); + has(module, runtime) { + const entry = this.map.get(module); + if (entry === undefined) { + return false; + } + if (runtime !== undefined) { + return entry.has(runtime); + } else if (entry.size > 1) { + const results = new Set(entry.values()); + return results.size === 1; + } else { + return entry.size === 1; + } } /** - * @param {Chunk} chunk the chunk - * @returns {number} total size of all modules in the chunk + * @param {Module} module the module + * @param {RuntimeSpec} runtime runtime(s) + * @param {string} sourceType the source type + * @returns {Source} a source */ - getChunkModulesSize(chunk) { - const cgc = this._getChunkGraphChunk(chunk); - return cgc.modules.getFromUnorderedCache(getModulesSize); + getSource(module, runtime, sourceType) { + return this.get(module, runtime).sources.get(sourceType); } /** - * @param {Chunk} chunk the chunk - * @returns {Record} total sizes of all modules in the chunk by source type + * @param {Module} module the module + * @param {RuntimeSpec} runtime runtime(s) + * @returns {ReadonlySet} runtime requirements */ - getChunkModulesSizes(chunk) { - const cgc = this._getChunkGraphChunk(chunk); - return cgc.modules.getFromUnorderedCache(getModulesSizes); + getRuntimeRequirements(module, runtime) { + return this.get(module, runtime).runtimeRequirements; } /** - * @param {Chunk} chunk the chunk - * @returns {Module[]} root modules of the chunks (ordered by identifier) + * @param {Module} module the module + * @param {RuntimeSpec} runtime runtime(s) + * @param {string} key data key + * @returns {any} data generated by code generation */ - getChunkRootModules(chunk) { - const cgc = this._getChunkGraphChunk(chunk); - return cgc.modules.getFromUnorderedCache(this._getGraphRoots); + getData(module, runtime, key) { + const data = this.get(module, runtime).data; + return data === undefined ? undefined : data.get(key); } /** - * @param {Chunk} chunk the chunk - * @param {ChunkSizeOptions} options options object - * @returns {number} total size of the chunk + * @param {Module} module the module + * @param {RuntimeSpec} runtime runtime(s) + * @returns {any} hash of the code generation */ - getChunkSize(chunk, options = {}) { - const cgc = this._getChunkGraphChunk(chunk); - const modulesSize = cgc.modules.getFromUnorderedCache(getModulesSize); - const chunkOverhead = - typeof options.chunkOverhead === "number" ? options.chunkOverhead : 10000; - const entryChunkMultiplicator = - typeof options.entryChunkMultiplicator === "number" - ? options.entryChunkMultiplicator - : 10; - return ( - chunkOverhead + - modulesSize * (chunk.canBeInitial() ? entryChunkMultiplicator : 1) - ); + getHash(module, runtime) { + const info = this.get(module, runtime); + if (info.hash !== undefined) return info.hash; + const hash = createHash(this._hashFunction); + for (const [type, source] of info.sources) { + hash.update(type); + source.updateHash(hash); + } + if (info.runtimeRequirements) { + for (const rr of info.runtimeRequirements) hash.update(rr); + } + return (info.hash = /** @type {string} */ (hash.digest("hex"))); } /** - * @param {Chunk} chunkA chunk - * @param {Chunk} chunkB chunk - * @param {ChunkSizeOptions} options options object - * @returns {number} total size of the chunk or false if chunks can't be integrated + * @param {Module} module the module + * @param {RuntimeSpec} runtime runtime(s) + * @param {CodeGenerationResult} result result from module + * @returns {void} */ - getIntegratedChunksSize(chunkA, chunkB, options = {}) { - const cgcA = this._getChunkGraphChunk(chunkA); - const cgcB = this._getChunkGraphChunk(chunkB); - const allModules = new Set(cgcA.modules); - for (const m of cgcB.modules) allModules.add(m); - let modulesSize = getModulesSize(allModules); - const chunkOverhead = - typeof options.chunkOverhead === "number" ? options.chunkOverhead : 10000; - const entryChunkMultiplicator = - typeof options.entryChunkMultiplicator === "number" - ? options.entryChunkMultiplicator - : 10; - return ( - chunkOverhead + - modulesSize * - (chunkA.canBeInitial() || chunkB.canBeInitial() - ? entryChunkMultiplicator - : 1) - ); + add(module, runtime, result) { + const map = provide(this.map, module, () => new RuntimeSpecMap()); + map.set(runtime, result); } +} - /** - * @param {Chunk} chunkA chunk - * @param {Chunk} chunkB chunk - * @returns {boolean} true, if chunks could be integrated - */ - canChunksBeIntegrated(chunkA, chunkB) { - if (chunkA.preventIntegration || chunkB.preventIntegration) { - return false; - } +module.exports = CodeGenerationResults; - const hasRuntimeA = chunkA.hasRuntime(); - const hasRuntimeB = chunkB.hasRuntime(); - if (hasRuntimeA !== hasRuntimeB) { - if (hasRuntimeA) { - return isAvailableChunk(chunkA, chunkB); - } else if (hasRuntimeB) { - return isAvailableChunk(chunkB, chunkA); - } else { - return false; - } - } +/***/ }), - if ( - this.getNumberOfEntryModules(chunkA) > 0 || - this.getNumberOfEntryModules(chunkB) > 0 - ) { - return false; - } +/***/ 28151: +/***/ (function(module, __unused_webpack_exports, __webpack_require__) { + +"use strict"; +/* + MIT License http://www.opensource.org/licenses/mit-license.php + Author Tobias Koppers @sokra +*/ - return true; - } + +const WebpackError = __webpack_require__(68422); +const makeSerializable = __webpack_require__(26522); + +/** @typedef {import("./Dependency").DependencyLocation} DependencyLocation */ + +class CommentCompilationWarning extends WebpackError { /** - * @param {Chunk} chunkA the target chunk - * @param {Chunk} chunkB the chunk to integrate - * @returns {void} + * + * @param {string} message warning message + * @param {DependencyLocation} loc affected lines of code */ - integrateChunks(chunkA, chunkB) { - // Decide for one name (deterministic) - if (chunkA.name && chunkB.name) { - if ( - this.getNumberOfEntryModules(chunkA) > 0 === - this.getNumberOfEntryModules(chunkB) > 0 - ) { - // When both chunks have entry modules or none have one, use - // shortest name - if (chunkA.name.length !== chunkB.name.length) { - chunkA.name = - chunkA.name.length < chunkB.name.length ? chunkA.name : chunkB.name; - } else { - chunkA.name = chunkA.name < chunkB.name ? chunkA.name : chunkB.name; - } - } else if (this.getNumberOfEntryModules(chunkB) > 0) { - // Pick the name of the chunk with the entry module - chunkA.name = chunkB.name; - } - } else if (chunkB.name) { - chunkA.name = chunkB.name; - } + constructor(message, loc) { + super(message); - // Merge id name hints - for (const hint of chunkB.idNameHints) { - chunkA.idNameHints.add(hint); - } + this.name = "CommentCompilationWarning"; - // Merge runtime - chunkA.runtime = mergeRuntime(chunkA.runtime, chunkB.runtime); + this.loc = loc; + } +} - // getChunkModules is used here to create a clone, because disconnectChunkAndModule modifies - for (const module of this.getChunkModules(chunkB)) { - this.disconnectChunkAndModule(chunkB, module); - this.connectChunkAndModule(chunkA, module); - } +makeSerializable( + CommentCompilationWarning, + "webpack/lib/CommentCompilationWarning" +); - for (const [module, chunkGroup] of Array.from( - this.getChunkEntryModulesWithChunkGroupIterable(chunkB) - )) { - this.disconnectChunkAndEntryModule(chunkB, module); - this.connectChunkAndEntryModule(chunkA, module, chunkGroup); - } +module.exports = CommentCompilationWarning; - for (const chunkGroup of chunkB.groupsIterable) { - chunkGroup.replaceChunk(chunkB, chunkA); - chunkA.addGroup(chunkGroup); - chunkB.removeGroup(chunkGroup); - } - ChunkGraph.clearChunkGraphForChunk(chunkB); - } - /** - * @param {Chunk} chunk the chunk to upgrade - * @returns {void} - */ - upgradeDependentToFullHashModules(chunk) { - const cgc = this._getChunkGraphChunk(chunk); - if (cgc.dependentHashModules === undefined) return; - if (cgc.fullHashModules === undefined) { - cgc.fullHashModules = cgc.dependentHashModules; - } else { - for (const m of cgc.dependentHashModules) { - cgc.fullHashModules.add(m); - } - cgc.dependentHashModules = undefined; - } - } +/***/ }), - /** - * @param {Module} module the checked module - * @param {Chunk} chunk the checked chunk - * @returns {boolean} true, if the chunk contains the module as entry - */ - isEntryModuleInChunk(module, chunk) { - const cgc = this._getChunkGraphChunk(chunk); - return cgc.entryModules.has(module); - } +/***/ 27847: +/***/ (function(module, __unused_webpack_exports, __webpack_require__) { - /** - * @param {Chunk} chunk the new chunk - * @param {Module} module the entry module - * @param {Entrypoint=} entrypoint the chunk group which must be loaded before the module is executed - * @returns {void} - */ - connectChunkAndEntryModule(chunk, module, entrypoint) { - const cgm = this._getChunkGraphModule(module); - const cgc = this._getChunkGraphChunk(chunk); - if (cgm.entryInChunks === undefined) { - cgm.entryInChunks = new Set(); - } - cgm.entryInChunks.add(chunk); - cgc.entryModules.set(module, entrypoint); - } +"use strict"; +/* + MIT License http://www.opensource.org/licenses/mit-license.php + Author Tobias Koppers @sokra +*/ - /** - * @param {Chunk} chunk the new chunk - * @param {RuntimeModule} module the runtime module - * @returns {void} - */ - connectChunkAndRuntimeModule(chunk, module) { - const cgm = this._getChunkGraphModule(module); - const cgc = this._getChunkGraphChunk(chunk); - if (cgm.runtimeInChunks === undefined) { - cgm.runtimeInChunks = new Set(); - } - cgm.runtimeInChunks.add(chunk); - cgc.runtimeModules.add(module); - } - /** - * @param {Chunk} chunk the new chunk - * @param {RuntimeModule} module the module that require a full hash - * @returns {void} - */ - addFullHashModuleToChunk(chunk, module) { - const cgc = this._getChunkGraphChunk(chunk); - if (cgc.fullHashModules === undefined) cgc.fullHashModules = new Set(); - cgc.fullHashModules.add(module); - } - /** - * @param {Chunk} chunk the new chunk - * @param {RuntimeModule} module the module that require a full hash - * @returns {void} - */ - addDependentHashModuleToChunk(chunk, module) { - const cgc = this._getChunkGraphChunk(chunk); - if (cgc.dependentHashModules === undefined) - cgc.dependentHashModules = new Set(); - cgc.dependentHashModules.add(module); - } +const ConstDependency = __webpack_require__(60864); - /** - * @param {Chunk} chunk the new chunk - * @param {Module} module the entry module - * @returns {void} - */ - disconnectChunkAndEntryModule(chunk, module) { - const cgm = this._getChunkGraphModule(module); - const cgc = this._getChunkGraphChunk(chunk); - cgm.entryInChunks.delete(chunk); - if (cgm.entryInChunks.size === 0) { - cgm.entryInChunks = undefined; - } - cgc.entryModules.delete(module); - } +/** @typedef {import("./Compiler")} Compiler */ +/** @typedef {import("./javascript/JavascriptParser")} JavascriptParser */ - /** - * @param {Chunk} chunk the new chunk - * @param {RuntimeModule} module the runtime module - * @returns {void} - */ - disconnectChunkAndRuntimeModule(chunk, module) { - const cgm = this._getChunkGraphModule(module); - const cgc = this._getChunkGraphChunk(chunk); - cgm.runtimeInChunks.delete(chunk); - if (cgm.runtimeInChunks.size === 0) { - cgm.runtimeInChunks = undefined; - } - cgc.runtimeModules.delete(module); - } +const nestedWebpackRequireTag = Symbol("nested __webpack_require__"); +class CompatibilityPlugin { /** - * @param {Module} module the entry module, it will no longer be entry - * @returns {void} - */ - disconnectEntryModule(module) { - const cgm = this._getChunkGraphModule(module); - for (const chunk of cgm.entryInChunks) { - const cgc = this._getChunkGraphChunk(chunk); - cgc.entryModules.delete(module); - } - cgm.entryInChunks = undefined; - } - - /** - * @param {Chunk} chunk the chunk, for which all entries will be removed - * @returns {void} - */ - disconnectEntries(chunk) { - const cgc = this._getChunkGraphChunk(chunk); - for (const module of cgc.entryModules.keys()) { - const cgm = this._getChunkGraphModule(module); - cgm.entryInChunks.delete(chunk); - if (cgm.entryInChunks.size === 0) { - cgm.entryInChunks = undefined; - } - } - cgc.entryModules.clear(); - } - - /** - * @param {Chunk} chunk the chunk - * @returns {number} the amount of entry modules in chunk - */ - getNumberOfEntryModules(chunk) { - const cgc = this._getChunkGraphChunk(chunk); - return cgc.entryModules.size; - } - - /** - * @param {Chunk} chunk the chunk - * @returns {number} the amount of entry modules in chunk - */ - getNumberOfRuntimeModules(chunk) { - const cgc = this._getChunkGraphChunk(chunk); - return cgc.runtimeModules.size; - } - - /** - * @param {Chunk} chunk the chunk - * @returns {Iterable} iterable of modules (do not modify) - */ - getChunkEntryModulesIterable(chunk) { - const cgc = this._getChunkGraphChunk(chunk); - return cgc.entryModules.keys(); - } - - /** - * @param {Chunk} chunk the chunk - * @returns {Iterable} iterable of chunks - */ - getChunkEntryDependentChunksIterable(chunk) { - /** @type {Set} */ - const set = new Set(); - for (const chunkGroup of chunk.groupsIterable) { - if (chunkGroup instanceof Entrypoint) { - const entrypointChunk = chunkGroup.getEntrypointChunk(); - const cgc = this._getChunkGraphChunk(entrypointChunk); - for (const chunkGroup of cgc.entryModules.values()) { - for (const c of chunkGroup.chunks) { - if (c !== chunk && c !== entrypointChunk && !c.hasRuntime()) { - set.add(c); - } - } - } - } - } - - return set; - } - - /** - * @param {Chunk} chunk the chunk - * @returns {boolean} true, when it has dependent chunks - */ - hasChunkEntryDependentChunks(chunk) { - const cgc = this._getChunkGraphChunk(chunk); - for (const chunkGroup of cgc.entryModules.values()) { - for (const c of chunkGroup.chunks) { - if (c !== chunk) { - return true; - } - } - } - return false; - } - - /** - * @param {Chunk} chunk the chunk - * @returns {Iterable} iterable of modules (do not modify) - */ - getChunkRuntimeModulesIterable(chunk) { - const cgc = this._getChunkGraphChunk(chunk); - return cgc.runtimeModules; - } - - /** - * @param {Chunk} chunk the chunk - * @returns {RuntimeModule[]} array of modules in order of execution - */ - getChunkRuntimeModulesInOrder(chunk) { - const cgc = this._getChunkGraphChunk(chunk); - const array = Array.from(cgc.runtimeModules); - array.sort( - concatComparators( - compareSelect( - /** - * @param {RuntimeModule} r runtime module - * @returns {number=} stage - */ - r => r.stage, - compareIds - ), - compareModulesByIdentifier - ) - ); - return array; - } - - /** - * @param {Chunk} chunk the chunk - * @returns {Iterable | undefined} iterable of modules (do not modify) - */ - getChunkFullHashModulesIterable(chunk) { - const cgc = this._getChunkGraphChunk(chunk); - return cgc.fullHashModules; - } - - /** - * @param {Chunk} chunk the chunk - * @returns {ReadonlySet | undefined} set of modules (do not modify) - */ - getChunkFullHashModulesSet(chunk) { - const cgc = this._getChunkGraphChunk(chunk); - return cgc.fullHashModules; - } - - /** - * @param {Chunk} chunk the chunk - * @returns {Iterable | undefined} iterable of modules (do not modify) - */ - getChunkDependentHashModulesIterable(chunk) { - const cgc = this._getChunkGraphChunk(chunk); - return cgc.dependentHashModules; - } - - /** @typedef {[Module, Entrypoint | undefined]} EntryModuleWithChunkGroup */ - - /** - * @param {Chunk} chunk the chunk - * @returns {Iterable} iterable of modules (do not modify) - */ - getChunkEntryModulesWithChunkGroupIterable(chunk) { - const cgc = this._getChunkGraphChunk(chunk); - return cgc.entryModules; - } - - /** - * @param {AsyncDependenciesBlock} depBlock the async block - * @returns {ChunkGroup} the chunk group - */ - getBlockChunkGroup(depBlock) { - return this._blockChunkGroups.get(depBlock); - } - - /** - * @param {AsyncDependenciesBlock} depBlock the async block - * @param {ChunkGroup} chunkGroup the chunk group - * @returns {void} - */ - connectBlockAndChunkGroup(depBlock, chunkGroup) { - this._blockChunkGroups.set(depBlock, chunkGroup); - chunkGroup.addBlock(depBlock); - } - - /** - * @param {ChunkGroup} chunkGroup the chunk group - * @returns {void} - */ - disconnectChunkGroup(chunkGroup) { - for (const block of chunkGroup.blocksIterable) { - this._blockChunkGroups.delete(block); - } - // TODO refactor by moving blocks list into ChunkGraph - chunkGroup._blocks.clear(); - } - - /** - * @param {Module} module the module - * @returns {string | number} the id of the module - */ - getModuleId(module) { - const cgm = this._getChunkGraphModule(module); - return cgm.id; - } - - /** - * @param {Module} module the module - * @param {string | number} id the id of the module - * @returns {void} - */ - setModuleId(module, id) { - const cgm = this._getChunkGraphModule(module); - cgm.id = id; - } - - /** - * @param {string} runtime runtime - * @returns {string | number} the id of the runtime - */ - getRuntimeId(runtime) { - return this._runtimeIds.get(runtime); - } - - /** - * @param {string} runtime runtime - * @param {string | number} id the id of the runtime - * @returns {void} - */ - setRuntimeId(runtime, id) { - this._runtimeIds.set(runtime, id); - } - - /** - * @template T - * @param {Module} module the module - * @param {RuntimeSpecMap} hashes hashes data - * @param {RuntimeSpec} runtime the runtime - * @returns {T} hash - */ - _getModuleHashInfo(module, hashes, runtime) { - if (!hashes) { - throw new Error( - `Module ${module.identifier()} has no hash info for runtime ${runtimeToString( - runtime - )} (hashes not set at all)` - ); - } else if (runtime === undefined) { - const hashInfoItems = new Set(hashes.values()); - if (hashInfoItems.size !== 1) { - throw new Error( - `No unique hash info entry for unspecified runtime for ${module.identifier()} (existing runtimes: ${Array.from( - hashes.keys(), - r => runtimeToString(r) - ).join(", ")}). -Caller might not support runtime-dependent code generation (opt-out via optimization.usedExports: "global").` - ); - } - return first(hashInfoItems); - } else { - const hashInfo = hashes.get(runtime); - if (!hashInfo) { - throw new Error( - `Module ${module.identifier()} has no hash info for runtime ${runtimeToString( - runtime - )} (available runtimes ${Array.from( - hashes.keys(), - runtimeToString - ).join(", ")})` - ); - } - return hashInfo; - } - } - - /** - * @param {Module} module the module - * @param {RuntimeSpec} runtime the runtime - * @returns {boolean} true, if the module has hashes for this runtime - */ - hasModuleHashes(module, runtime) { - const cgm = this._getChunkGraphModule(module); - const hashes = cgm.hashes; - return hashes && hashes.has(runtime); - } - - /** - * @param {Module} module the module - * @param {RuntimeSpec} runtime the runtime - * @returns {string} hash - */ - getModuleHash(module, runtime) { - const cgm = this._getChunkGraphModule(module); - const hashes = cgm.hashes; - return this._getModuleHashInfo(module, hashes, runtime).hash; - } - - /** - * @param {Module} module the module - * @param {RuntimeSpec} runtime the runtime - * @returns {string} hash - */ - getRenderedModuleHash(module, runtime) { - const cgm = this._getChunkGraphModule(module); - const hashes = cgm.hashes; - return this._getModuleHashInfo(module, hashes, runtime).renderedHash; - } - - /** - * @param {Module} module the module - * @param {RuntimeSpec} runtime the runtime - * @param {string} hash the full hash - * @param {string} renderedHash the shortened hash for rendering - * @returns {void} - */ - setModuleHashes(module, runtime, hash, renderedHash) { - const cgm = this._getChunkGraphModule(module); - if (cgm.hashes === undefined) { - cgm.hashes = new RuntimeSpecMap(); - } - cgm.hashes.set(runtime, new ModuleHashInfo(hash, renderedHash)); - } - - /** - * @param {Module} module the module - * @param {RuntimeSpec} runtime the runtime - * @param {Set} items runtime requirements to be added (ownership of this Set is given to ChunkGraph when transferOwnership not false) - * @param {boolean} transferOwnership true: transfer ownership of the items object, false: items is immutable and shared and won't be modified - * @returns {void} - */ - addModuleRuntimeRequirements( - module, - runtime, - items, - transferOwnership = true - ) { - const cgm = this._getChunkGraphModule(module); - const runtimeRequirementsMap = cgm.runtimeRequirements; - if (runtimeRequirementsMap === undefined) { - const map = new RuntimeSpecMap(); - // TODO avoid cloning item and track ownership instead - map.set(runtime, transferOwnership ? items : new Set(items)); - cgm.runtimeRequirements = map; - return; - } - runtimeRequirementsMap.update(runtime, runtimeRequirements => { - if (runtimeRequirements === undefined) { - return transferOwnership ? items : new Set(items); - } else if (!transferOwnership || runtimeRequirements.size >= items.size) { - for (const item of items) runtimeRequirements.add(item); - return runtimeRequirements; - } else { - for (const item of runtimeRequirements) items.add(item); - return items; - } - }); - } - - /** - * @param {Chunk} chunk the chunk - * @param {Set} items runtime requirements to be added (ownership of this Set is given to ChunkGraph) - * @returns {void} - */ - addChunkRuntimeRequirements(chunk, items) { - const cgc = this._getChunkGraphChunk(chunk); - const runtimeRequirements = cgc.runtimeRequirements; - if (runtimeRequirements === undefined) { - cgc.runtimeRequirements = items; - } else if (runtimeRequirements.size >= items.size) { - for (const item of items) runtimeRequirements.add(item); - } else { - for (const item of runtimeRequirements) items.add(item); - cgc.runtimeRequirements = items; - } - } - - /** - * @param {Chunk} chunk the chunk - * @param {Iterable} items runtime requirements to be added - * @returns {void} - */ - addTreeRuntimeRequirements(chunk, items) { - const cgc = this._getChunkGraphChunk(chunk); - const runtimeRequirements = cgc.runtimeRequirementsInTree; - for (const item of items) runtimeRequirements.add(item); - } - - /** - * @param {Module} module the module - * @param {RuntimeSpec} runtime the runtime - * @returns {ReadonlySet} runtime requirements - */ - getModuleRuntimeRequirements(module, runtime) { - const cgm = this._getChunkGraphModule(module); - const runtimeRequirements = - cgm.runtimeRequirements && cgm.runtimeRequirements.get(runtime); - return runtimeRequirements === undefined ? EMPTY_SET : runtimeRequirements; - } - - /** - * @param {Chunk} chunk the chunk - * @returns {ReadonlySet} runtime requirements - */ - getChunkRuntimeRequirements(chunk) { - const cgc = this._getChunkGraphChunk(chunk); - const runtimeRequirements = cgc.runtimeRequirements; - return runtimeRequirements === undefined ? EMPTY_SET : runtimeRequirements; - } - - /** - * @param {Module} module the module - * @param {RuntimeSpec} runtime the runtime - * @param {boolean} withConnections include connections - * @returns {string} hash - */ - getModuleGraphHash(module, runtime, withConnections = true) { - const cgm = this._getChunkGraphModule(module); - return withConnections - ? this._getModuleGraphHashWithConnections(cgm, module, runtime) - : this._getModuleGraphHashBigInt(cgm, module, runtime).toString(16); - } - - /** - * @param {Module} module the module - * @param {RuntimeSpec} runtime the runtime - * @param {boolean} withConnections include connections - * @returns {bigint} hash - */ - getModuleGraphHashBigInt(module, runtime, withConnections = true) { - const cgm = this._getChunkGraphModule(module); - return withConnections - ? BigInt( - `0x${this._getModuleGraphHashWithConnections(cgm, module, runtime)}` - ) - : this._getModuleGraphHashBigInt(cgm, module, runtime); - } - - /** - * @param {ChunkGraphModule} cgm the ChunkGraphModule - * @param {Module} module the module - * @param {RuntimeSpec} runtime the runtime - * @returns {bigint} hash as big int - */ - _getModuleGraphHashBigInt(cgm, module, runtime) { - if (cgm.graphHashes === undefined) { - cgm.graphHashes = new RuntimeSpecMap(); - } - const graphHash = cgm.graphHashes.provide(runtime, () => { - const hash = createHash(this._hashFunction); - hash.update(`${cgm.id}${this.moduleGraph.isAsync(module)}`); - this.moduleGraph.getExportsInfo(module).updateHash(hash, runtime); - return BigInt(`0x${/** @type {string} */ (hash.digest("hex"))}`); - }); - return graphHash; - } - - /** - * @param {ChunkGraphModule} cgm the ChunkGraphModule - * @param {Module} module the module - * @param {RuntimeSpec} runtime the runtime - * @returns {string} hash - */ - _getModuleGraphHashWithConnections(cgm, module, runtime) { - if (cgm.graphHashesWithConnections === undefined) { - cgm.graphHashesWithConnections = new RuntimeSpecMap(); - } - const activeStateToString = state => { - if (state === false) return "F"; - if (state === true) return "T"; - if (state === ModuleGraphConnection.TRANSITIVE_ONLY) return "O"; - throw new Error("Not implemented active state"); - }; - const strict = module.buildMeta && module.buildMeta.strictHarmonyModule; - return cgm.graphHashesWithConnections.provide(runtime, () => { - const graphHash = this._getModuleGraphHashBigInt( - cgm, - module, - runtime - ).toString(16); - const connections = this.moduleGraph.getOutgoingConnections(module); - /** @type {Set} */ - const activeNamespaceModules = new Set(); - /** @type {Map>} */ - const connectedModules = new Map(); - const processConnection = (connection, stateInfo) => { - const module = connection.module; - stateInfo += module.getExportsType(this.moduleGraph, strict); - // cspell:word Tnamespace - if (stateInfo === "Tnamespace") activeNamespaceModules.add(module); - else { - const oldModule = connectedModules.get(stateInfo); - if (oldModule === undefined) { - connectedModules.set(stateInfo, module); - } else if (oldModule instanceof Set) { - oldModule.add(module); - } else if (oldModule !== module) { - connectedModules.set(stateInfo, new Set([oldModule, module])); - } - } - }; - if (runtime === undefined || typeof runtime === "string") { - for (const connection of connections) { - const state = connection.getActiveState(runtime); - if (state === false) continue; - processConnection(connection, state === true ? "T" : "O"); - } - } else { - // cspell:word Tnamespace - for (const connection of connections) { - const states = new Set(); - let stateInfo = ""; - forEachRuntime( - runtime, - runtime => { - const state = connection.getActiveState(runtime); - states.add(state); - stateInfo += activeStateToString(state) + runtime; - }, - true - ); - if (states.size === 1) { - const state = first(states); - if (state === false) continue; - stateInfo = activeStateToString(state); - } - processConnection(connection, stateInfo); - } - } - // cspell:word Tnamespace - if (activeNamespaceModules.size === 0 && connectedModules.size === 0) - return graphHash; - const connectedModulesInOrder = - connectedModules.size > 1 - ? Array.from(connectedModules).sort(([a], [b]) => (a < b ? -1 : 1)) - : connectedModules; - const hash = createHash(this._hashFunction); - const addModuleToHash = module => { - hash.update( - this._getModuleGraphHashBigInt( - this._getChunkGraphModule(module), - module, - runtime - ).toString(16) - ); - }; - const addModulesToHash = modules => { - let xor = ZERO_BIG_INT; - for (const m of modules) { - xor = - xor ^ - this._getModuleGraphHashBigInt( - this._getChunkGraphModule(m), - m, - runtime - ); - } - hash.update(xor.toString(16)); - }; - if (activeNamespaceModules.size === 1) - addModuleToHash(activeNamespaceModules.values().next().value); - else if (activeNamespaceModules.size > 1) - addModulesToHash(activeNamespaceModules); - for (const [stateInfo, modules] of connectedModulesInOrder) { - hash.update(stateInfo); - if (modules instanceof Set) { - addModulesToHash(modules); - } else { - addModuleToHash(modules); - } - } - hash.update(graphHash); - return /** @type {string} */ (hash.digest("hex")); - }); - } - - /** - * @param {Chunk} chunk the chunk - * @returns {ReadonlySet} runtime requirements - */ - getTreeRuntimeRequirements(chunk) { - const cgc = this._getChunkGraphChunk(chunk); - return cgc.runtimeRequirementsInTree; - } - - // TODO remove in webpack 6 - /** - * @param {Module} module the module - * @param {string} deprecateMessage message for the deprecation message - * @param {string} deprecationCode code for the deprecation - * @returns {ChunkGraph} the chunk graph - */ - static getChunkGraphForModule(module, deprecateMessage, deprecationCode) { - const fn = deprecateGetChunkGraphForModuleMap.get(deprecateMessage); - if (fn) return fn(module); - const newFn = util.deprecate( - /** - * @param {Module} module the module - * @returns {ChunkGraph} the chunk graph - */ - module => { - const chunkGraph = chunkGraphForModuleMap.get(module); - if (!chunkGraph) - throw new Error( - deprecateMessage + - ": There was no ChunkGraph assigned to the Module for backward-compat (Use the new API)" - ); - return chunkGraph; - }, - deprecateMessage + ": Use new ChunkGraph API", - deprecationCode - ); - deprecateGetChunkGraphForModuleMap.set(deprecateMessage, newFn); - return newFn(module); - } - - // TODO remove in webpack 6 - /** - * @param {Module} module the module - * @param {ChunkGraph} chunkGraph the chunk graph - * @returns {void} - */ - static setChunkGraphForModule(module, chunkGraph) { - chunkGraphForModuleMap.set(module, chunkGraph); - } - - // TODO remove in webpack 6 - /** - * @param {Module} module the module - * @returns {void} - */ - static clearChunkGraphForModule(module) { - chunkGraphForModuleMap.delete(module); - } - - // TODO remove in webpack 6 - /** - * @param {Chunk} chunk the chunk - * @param {string} deprecateMessage message for the deprecation message - * @param {string} deprecationCode code for the deprecation - * @returns {ChunkGraph} the chunk graph - */ - static getChunkGraphForChunk(chunk, deprecateMessage, deprecationCode) { - const fn = deprecateGetChunkGraphForChunkMap.get(deprecateMessage); - if (fn) return fn(chunk); - const newFn = util.deprecate( - /** - * @param {Chunk} chunk the chunk - * @returns {ChunkGraph} the chunk graph - */ - chunk => { - const chunkGraph = chunkGraphForChunkMap.get(chunk); - if (!chunkGraph) - throw new Error( - deprecateMessage + - "There was no ChunkGraph assigned to the Chunk for backward-compat (Use the new API)" - ); - return chunkGraph; - }, - deprecateMessage + ": Use new ChunkGraph API", - deprecationCode - ); - deprecateGetChunkGraphForChunkMap.set(deprecateMessage, newFn); - return newFn(chunk); - } - - // TODO remove in webpack 6 - /** - * @param {Chunk} chunk the chunk - * @param {ChunkGraph} chunkGraph the chunk graph - * @returns {void} - */ - static setChunkGraphForChunk(chunk, chunkGraph) { - chunkGraphForChunkMap.set(chunk, chunkGraph); - } - - // TODO remove in webpack 6 - /** - * @param {Chunk} chunk the chunk - * @returns {void} - */ - static clearChunkGraphForChunk(chunk) { - chunkGraphForChunkMap.delete(chunk); - } -} - -// TODO remove in webpack 6 -/** @type {WeakMap} */ -const chunkGraphForModuleMap = new WeakMap(); - -// TODO remove in webpack 6 -/** @type {WeakMap} */ -const chunkGraphForChunkMap = new WeakMap(); - -// TODO remove in webpack 6 -/** @type {Map ChunkGraph>} */ -const deprecateGetChunkGraphForModuleMap = new Map(); - -// TODO remove in webpack 6 -/** @type {Map ChunkGraph>} */ -const deprecateGetChunkGraphForChunkMap = new Map(); - -module.exports = ChunkGraph; - - -/***/ }), - -/***/ 96975: -/***/ (function(module, __unused_webpack_exports, __webpack_require__) { - -"use strict"; -/* - MIT License http://www.opensource.org/licenses/mit-license.php - Author Tobias Koppers @sokra -*/ - - - -const util = __webpack_require__(31669); -const SortableSet = __webpack_require__(67563); -const { - compareLocations, - compareChunks, - compareIterables -} = __webpack_require__(26296); - -/** @typedef {import("./AsyncDependenciesBlock")} AsyncDependenciesBlock */ -/** @typedef {import("./Chunk")} Chunk */ -/** @typedef {import("./ChunkGraph")} ChunkGraph */ -/** @typedef {import("./Dependency").DependencyLocation} DependencyLocation */ -/** @typedef {import("./Entrypoint")} Entrypoint */ -/** @typedef {import("./Module")} Module */ -/** @typedef {import("./ModuleGraph")} ModuleGraph */ - -/** @typedef {{id: number}} HasId */ -/** @typedef {{module: Module, loc: DependencyLocation, request: string}} OriginRecord */ - -/** - * @typedef {Object} RawChunkGroupOptions - * @property {number=} preloadOrder - * @property {number=} prefetchOrder - */ - -/** @typedef {RawChunkGroupOptions & { name?: string }} ChunkGroupOptions */ - -let debugId = 5000; - -/** - * @template T - * @param {SortableSet} set set to convert to array. - * @returns {T[]} the array format of existing set - */ -const getArray = set => Array.from(set); - -/** - * A convenience method used to sort chunks based on their id's - * @param {ChunkGroup} a first sorting comparator - * @param {ChunkGroup} b second sorting comparator - * @returns {1|0|-1} a sorting index to determine order - */ -const sortById = (a, b) => { - if (a.id < b.id) return -1; - if (b.id < a.id) return 1; - return 0; -}; - -/** - * @param {OriginRecord} a the first comparator in sort - * @param {OriginRecord} b the second comparator in sort - * @returns {1|-1|0} returns sorting order as index - */ -const sortOrigin = (a, b) => { - const aIdent = a.module ? a.module.identifier() : ""; - const bIdent = b.module ? b.module.identifier() : ""; - if (aIdent < bIdent) return -1; - if (aIdent > bIdent) return 1; - return compareLocations(a.loc, b.loc); -}; - -class ChunkGroup { - /** - * Creates an instance of ChunkGroup. - * @param {string|ChunkGroupOptions=} options chunk group options passed to chunkGroup - */ - constructor(options) { - if (typeof options === "string") { - options = { name: options }; - } else if (!options) { - options = { name: undefined }; - } - /** @type {number} */ - this.groupDebugId = debugId++; - this.options = options; - /** @type {SortableSet} */ - this._children = new SortableSet(undefined, sortById); - /** @type {SortableSet} */ - this._parents = new SortableSet(undefined, sortById); - /** @type {SortableSet} */ - this._asyncEntrypoints = new SortableSet(undefined, sortById); - this._blocks = new SortableSet(); - /** @type {Chunk[]} */ - this.chunks = []; - /** @type {OriginRecord[]} */ - this.origins = []; - /** Indices in top-down order */ - /** @private @type {Map} */ - this._modulePreOrderIndices = new Map(); - /** Indices in bottom-up order */ - /** @private @type {Map} */ - this._modulePostOrderIndices = new Map(); - /** @type {number} */ - this.index = undefined; - } - - /** - * when a new chunk is added to a chunkGroup, addingOptions will occur. - * @param {ChunkGroupOptions} options the chunkGroup options passed to addOptions - * @returns {void} - */ - addOptions(options) { - for (const key of Object.keys(options)) { - if (this.options[key] === undefined) { - this.options[key] = options[key]; - } else if (this.options[key] !== options[key]) { - if (key.endsWith("Order")) { - this.options[key] = Math.max(this.options[key], options[key]); - } else { - throw new Error( - `ChunkGroup.addOptions: No option merge strategy for ${key}` - ); - } - } - } - } - - /** - * returns the name of current ChunkGroup - * @returns {string|undefined} returns the ChunkGroup name - */ - get name() { - return this.options.name; - } - - /** - * sets a new name for current ChunkGroup - * @param {string} value the new name for ChunkGroup - * @returns {void} - */ - set name(value) { - this.options.name = value; - } - - /* istanbul ignore next */ - /** - * get a uniqueId for ChunkGroup, made up of its member Chunk debugId's - * @returns {string} a unique concatenation of chunk debugId's - */ - get debugId() { - return Array.from(this.chunks, x => x.debugId).join("+"); - } - - /** - * get a unique id for ChunkGroup, made up of its member Chunk id's - * @returns {string} a unique concatenation of chunk ids - */ - get id() { - return Array.from(this.chunks, x => x.id).join("+"); - } - - /** - * Performs an unshift of a specific chunk - * @param {Chunk} chunk chunk being unshifted - * @returns {boolean} returns true if attempted chunk shift is accepted - */ - unshiftChunk(chunk) { - const oldIdx = this.chunks.indexOf(chunk); - if (oldIdx > 0) { - this.chunks.splice(oldIdx, 1); - this.chunks.unshift(chunk); - } else if (oldIdx < 0) { - this.chunks.unshift(chunk); - return true; - } - return false; - } - - /** - * inserts a chunk before another existing chunk in group - * @param {Chunk} chunk Chunk being inserted - * @param {Chunk} before Placeholder/target chunk marking new chunk insertion point - * @returns {boolean} return true if insertion was successful - */ - insertChunk(chunk, before) { - const oldIdx = this.chunks.indexOf(chunk); - const idx = this.chunks.indexOf(before); - if (idx < 0) { - throw new Error("before chunk not found"); - } - if (oldIdx >= 0 && oldIdx > idx) { - this.chunks.splice(oldIdx, 1); - this.chunks.splice(idx, 0, chunk); - } else if (oldIdx < 0) { - this.chunks.splice(idx, 0, chunk); - return true; - } - return false; - } - - /** - * add a chunk into ChunkGroup. Is pushed on or prepended - * @param {Chunk} chunk chunk being pushed into ChunkGroupS - * @returns {boolean} returns true if chunk addition was successful. - */ - pushChunk(chunk) { - const oldIdx = this.chunks.indexOf(chunk); - if (oldIdx >= 0) { - return false; - } - this.chunks.push(chunk); - return true; - } - - /** - * @param {Chunk} oldChunk chunk to be replaced - * @param {Chunk} newChunk New chunk that will be replaced with - * @returns {boolean} returns true if the replacement was successful - */ - replaceChunk(oldChunk, newChunk) { - const oldIdx = this.chunks.indexOf(oldChunk); - if (oldIdx < 0) return false; - const newIdx = this.chunks.indexOf(newChunk); - if (newIdx < 0) { - this.chunks[oldIdx] = newChunk; - return true; - } - if (newIdx < oldIdx) { - this.chunks.splice(oldIdx, 1); - return true; - } else if (newIdx !== oldIdx) { - this.chunks[oldIdx] = newChunk; - this.chunks.splice(newIdx, 1); - return true; - } - } - - /** - * @param {Chunk} chunk chunk to remove - * @returns {boolean} returns true if chunk was removed - */ - removeChunk(chunk) { - const idx = this.chunks.indexOf(chunk); - if (idx >= 0) { - this.chunks.splice(idx, 1); - return true; - } - return false; - } - - /** - * @returns {boolean} true, when this chunk group will be loaded on initial page load - */ - isInitial() { - return false; - } - - /** - * @param {ChunkGroup} group chunk group to add - * @returns {boolean} returns true if chunk group was added - */ - addChild(group) { - const size = this._children.size; - this._children.add(group); - return size !== this._children.size; - } - - /** - * @returns {ChunkGroup[]} returns the children of this group - */ - getChildren() { - return this._children.getFromCache(getArray); - } - - getNumberOfChildren() { - return this._children.size; - } - - get childrenIterable() { - return this._children; - } - - /** - * @param {ChunkGroup} group the chunk group to remove - * @returns {boolean} returns true if the chunk group was removed - */ - removeChild(group) { - if (!this._children.has(group)) { - return false; - } - - this._children.delete(group); - group.removeParent(this); - return true; - } - - /** - * @param {ChunkGroup} parentChunk the parent group to be added into - * @returns {boolean} returns true if this chunk group was added to the parent group - */ - addParent(parentChunk) { - if (!this._parents.has(parentChunk)) { - this._parents.add(parentChunk); - return true; - } - return false; - } - - /** - * @returns {ChunkGroup[]} returns the parents of this group - */ - getParents() { - return this._parents.getFromCache(getArray); - } - - getNumberOfParents() { - return this._parents.size; - } - - /** - * @param {ChunkGroup} parent the parent group - * @returns {boolean} returns true if the parent group contains this group - */ - hasParent(parent) { - return this._parents.has(parent); - } - - get parentsIterable() { - return this._parents; - } - - /** - * @param {ChunkGroup} chunkGroup the parent group - * @returns {boolean} returns true if this group has been removed from the parent - */ - removeParent(chunkGroup) { - if (this._parents.delete(chunkGroup)) { - chunkGroup.removeChild(this); - return true; - } - return false; - } - - /** - * @param {Entrypoint} entrypoint entrypoint to add - * @returns {boolean} returns true if entrypoint was added - */ - addAsyncEntrypoint(entrypoint) { - const size = this._asyncEntrypoints.size; - this._asyncEntrypoints.add(entrypoint); - return size !== this._asyncEntrypoints.size; - } - - get asyncEntrypointsIterable() { - return this._asyncEntrypoints; - } - - /** - * @returns {Array} an array containing the blocks - */ - getBlocks() { - return this._blocks.getFromCache(getArray); - } - - getNumberOfBlocks() { - return this._blocks.size; - } - - hasBlock(block) { - return this._blocks.has(block); - } - - /** - * @returns {Iterable} blocks - */ - get blocksIterable() { - return this._blocks; - } - - /** - * @param {AsyncDependenciesBlock} block a block - * @returns {boolean} false, if block was already added - */ - addBlock(block) { - if (!this._blocks.has(block)) { - this._blocks.add(block); - return true; - } - return false; - } - - /** - * @param {Module} module origin module - * @param {DependencyLocation} loc location of the reference in the origin module - * @param {string} request request name of the reference - * @returns {void} - */ - addOrigin(module, loc, request) { - this.origins.push({ - module, - loc, - request - }); - } - - /** - * @returns {string[]} the files contained this chunk group - */ - getFiles() { - const files = new Set(); - - for (const chunk of this.chunks) { - for (const file of chunk.files) { - files.add(file); - } - } - - return Array.from(files); - } - - /** - * @returns {void} - */ - remove() { - // cleanup parents - for (const parentChunkGroup of this._parents) { - // remove this chunk from its parents - parentChunkGroup._children.delete(this); - - // cleanup "sub chunks" - for (const chunkGroup of this._children) { - /** - * remove this chunk as "intermediary" and connect - * it "sub chunks" and parents directly - */ - // add parent to each "sub chunk" - chunkGroup.addParent(parentChunkGroup); - // add "sub chunk" to parent - parentChunkGroup.addChild(chunkGroup); - } - } - - /** - * we need to iterate again over the children - * to remove this from the child's parents. - * This can not be done in the above loop - * as it is not guaranteed that `this._parents` contains anything. - */ - for (const chunkGroup of this._children) { - // remove this as parent of every "sub chunk" - chunkGroup._parents.delete(this); - } - - // remove chunks - for (const chunk of this.chunks) { - chunk.removeGroup(this); - } - } - - sortItems() { - this.origins.sort(sortOrigin); - } - - /** - * Sorting predicate which allows current ChunkGroup to be compared against another. - * Sorting values are based off of number of chunks in ChunkGroup. - * - * @param {ChunkGraph} chunkGraph the chunk graph - * @param {ChunkGroup} otherGroup the chunkGroup to compare this against - * @returns {-1|0|1} sort position for comparison - */ - compareTo(chunkGraph, otherGroup) { - if (this.chunks.length > otherGroup.chunks.length) return -1; - if (this.chunks.length < otherGroup.chunks.length) return 1; - return compareIterables(compareChunks(chunkGraph))( - this.chunks, - otherGroup.chunks - ); - } - - /** - * @param {ModuleGraph} moduleGraph the module graph - * @param {ChunkGraph} chunkGraph the chunk graph - * @returns {Record} mapping from children type to ordered list of ChunkGroups - */ - getChildrenByOrders(moduleGraph, chunkGraph) { - /** @type {Map} */ - const lists = new Map(); - for (const childGroup of this._children) { - for (const key of Object.keys(childGroup.options)) { - if (key.endsWith("Order")) { - const name = key.substr(0, key.length - "Order".length); - let list = lists.get(name); - if (list === undefined) { - lists.set(name, (list = [])); - } - list.push({ - order: childGroup.options[key], - group: childGroup - }); - } - } - } - /** @type {Record} */ - const result = Object.create(null); - for (const [name, list] of lists) { - list.sort((a, b) => { - const cmp = b.order - a.order; - if (cmp !== 0) return cmp; - return a.group.compareTo(chunkGraph, b.group); - }); - result[name] = list.map(i => i.group); - } - return result; - } - - /** - * Sets the top-down index of a module in this ChunkGroup - * @param {Module} module module for which the index should be set - * @param {number} index the index of the module - * @returns {void} - */ - setModulePreOrderIndex(module, index) { - this._modulePreOrderIndices.set(module, index); - } - - /** - * Gets the top-down index of a module in this ChunkGroup - * @param {Module} module the module - * @returns {number} index - */ - getModulePreOrderIndex(module) { - return this._modulePreOrderIndices.get(module); - } - - /** - * Sets the bottom-up index of a module in this ChunkGroup - * @param {Module} module module for which the index should be set - * @param {number} index the index of the module - * @returns {void} - */ - setModulePostOrderIndex(module, index) { - this._modulePostOrderIndices.set(module, index); - } - - /** - * Gets the bottom-up index of a module in this ChunkGroup - * @param {Module} module the module - * @returns {number} index - */ - getModulePostOrderIndex(module) { - return this._modulePostOrderIndices.get(module); - } - - /* istanbul ignore next */ - checkConstraints() { - const chunk = this; - for (const child of chunk._children) { - if (!child._parents.has(chunk)) { - throw new Error( - `checkConstraints: child missing parent ${chunk.debugId} -> ${child.debugId}` - ); - } - } - for (const parentChunk of chunk._parents) { - if (!parentChunk._children.has(chunk)) { - throw new Error( - `checkConstraints: parent missing child ${parentChunk.debugId} <- ${chunk.debugId}` - ); - } - } - } -} - -ChunkGroup.prototype.getModuleIndex = util.deprecate( - ChunkGroup.prototype.getModulePreOrderIndex, - "ChunkGroup.getModuleIndex was renamed to getModulePreOrderIndex", - "DEP_WEBPACK_CHUNK_GROUP_GET_MODULE_INDEX" -); - -ChunkGroup.prototype.getModuleIndex2 = util.deprecate( - ChunkGroup.prototype.getModulePostOrderIndex, - "ChunkGroup.getModuleIndex2 was renamed to getModulePostOrderIndex", - "DEP_WEBPACK_CHUNK_GROUP_GET_MODULE_INDEX_2" -); - -module.exports = ChunkGroup; - - -/***/ }), - -/***/ 80187: -/***/ (function(module, __unused_webpack_exports, __webpack_require__) { - -"use strict"; -/* - MIT License http://www.opensource.org/licenses/mit-license.php - Author Tobias Koppers @sokra -*/ - - - -const WebpackError = __webpack_require__(68422); - -/** @typedef {import("./Chunk")} Chunk */ - -class ChunkRenderError extends WebpackError { - /** - * Create a new ChunkRenderError - * @param {Chunk} chunk A chunk - * @param {string} file Related file - * @param {Error} error Original error - */ - constructor(chunk, file, error) { - super(); - - this.name = "ChunkRenderError"; - this.error = error; - this.message = error.message; - this.details = error.stack; - this.file = file; - this.chunk = chunk; - } -} - -module.exports = ChunkRenderError; - - -/***/ }), - -/***/ 50527: -/***/ (function(module, __unused_webpack_exports, __webpack_require__) { - -"use strict"; -/* - MIT License http://www.opensource.org/licenses/mit-license.php - Author Tobias Koppers @sokra -*/ - - - -const util = __webpack_require__(31669); -const memoize = __webpack_require__(84297); - -/** @typedef {import("../declarations/WebpackOptions").Output} OutputOptions */ -/** @typedef {import("./Compilation")} Compilation */ - -const getJavascriptModulesPlugin = memoize(() => - __webpack_require__(76767) -); - -// TODO webpack 6 remove this class -class ChunkTemplate { - /** - * @param {OutputOptions} outputOptions output options - * @param {Compilation} compilation the compilation - */ - constructor(outputOptions, compilation) { - this._outputOptions = outputOptions || {}; - this.hooks = Object.freeze({ - renderManifest: { - tap: util.deprecate( - (options, fn) => { - compilation.hooks.renderManifest.tap( - options, - (entries, options) => { - if (options.chunk.hasRuntime()) return entries; - return fn(entries, options); - } - ); - }, - "ChunkTemplate.hooks.renderManifest is deprecated (use Compilation.hooks.renderManifest instead)", - "DEP_WEBPACK_CHUNK_TEMPLATE_RENDER_MANIFEST" - ) - }, - modules: { - tap: util.deprecate( - (options, fn) => { - getJavascriptModulesPlugin() - .getCompilationHooks(compilation) - .renderChunk.tap(options, (source, renderContext) => - fn( - source, - compilation.moduleTemplates.javascript, - renderContext - ) - ); - }, - "ChunkTemplate.hooks.modules is deprecated (use JavascriptModulesPlugin.getCompilationHooks().renderChunk instead)", - "DEP_WEBPACK_CHUNK_TEMPLATE_MODULES" - ) - }, - render: { - tap: util.deprecate( - (options, fn) => { - getJavascriptModulesPlugin() - .getCompilationHooks(compilation) - .renderChunk.tap(options, (source, renderContext) => - fn( - source, - compilation.moduleTemplates.javascript, - renderContext - ) - ); - }, - "ChunkTemplate.hooks.render is deprecated (use JavascriptModulesPlugin.getCompilationHooks().renderChunk instead)", - "DEP_WEBPACK_CHUNK_TEMPLATE_RENDER" - ) - }, - renderWithEntry: { - tap: util.deprecate( - (options, fn) => { - getJavascriptModulesPlugin() - .getCompilationHooks(compilation) - .render.tap(options, (source, renderContext) => { - if ( - renderContext.chunkGraph.getNumberOfEntryModules( - renderContext.chunk - ) === 0 || - renderContext.chunk.hasRuntime() - ) { - return source; - } - return fn(source, renderContext.chunk); - }); - }, - "ChunkTemplate.hooks.renderWithEntry is deprecated (use JavascriptModulesPlugin.getCompilationHooks().render instead)", - "DEP_WEBPACK_CHUNK_TEMPLATE_RENDER_WITH_ENTRY" - ) - }, - hash: { - tap: util.deprecate( - (options, fn) => { - compilation.hooks.fullHash.tap(options, fn); - }, - "ChunkTemplate.hooks.hash is deprecated (use Compilation.hooks.fullHash instead)", - "DEP_WEBPACK_CHUNK_TEMPLATE_HASH" - ) - }, - hashForChunk: { - tap: util.deprecate( - (options, fn) => { - getJavascriptModulesPlugin() - .getCompilationHooks(compilation) - .chunkHash.tap(options, (chunk, hash, context) => { - if (chunk.hasRuntime()) return; - fn(hash, chunk, context); - }); - }, - "ChunkTemplate.hooks.hashForChunk is deprecated (use JavascriptModulesPlugin.getCompilationHooks().chunkHash instead)", - "DEP_WEBPACK_CHUNK_TEMPLATE_HASH_FOR_CHUNK" - ) - } - }); - } -} - -Object.defineProperty(ChunkTemplate.prototype, "outputOptions", { - get: util.deprecate( - /** - * @this {ChunkTemplate} - * @returns {OutputOptions} output options - */ - function () { - return this._outputOptions; - }, - "ChunkTemplate.outputOptions is deprecated (use Compilation.outputOptions instead)", - "DEP_WEBPACK_CHUNK_TEMPLATE_OUTPUT_OPTIONS" - ) -}); - -module.exports = ChunkTemplate; - - -/***/ }), - -/***/ 49129: -/***/ (function(module, __unused_webpack_exports, __webpack_require__) { - -"use strict"; -/* - MIT License http://www.opensource.org/licenses/mit-license.php - Author Sergey Melyukov @smelukov -*/ - - - -const asyncLib = __webpack_require__(36386); -const { SyncBailHook } = __webpack_require__(34718); -const Compilation = __webpack_require__(59622); -const createSchemaValidation = __webpack_require__(77695); -const { join } = __webpack_require__(93204); -const processAsyncTree = __webpack_require__(76815); - -/** @typedef {import("../declarations/WebpackOptions").CleanOptions} CleanOptions */ -/** @typedef {import("./Compiler")} Compiler */ -/** @typedef {import("./logging/Logger").Logger} Logger */ -/** @typedef {import("./util/fs").OutputFileSystem} OutputFileSystem */ -/** @typedef {import("./util/fs").StatsCallback} StatsCallback */ - -/** @typedef {(function(string):boolean)|RegExp} IgnoreItem */ -/** @typedef {function(IgnoreItem): void} AddToIgnoreCallback */ - -/** - * @typedef {Object} CleanPluginCompilationHooks - * @property {SyncBailHook<[string], boolean>} keep when returning true the file/directory will be kept during cleaning, returning false will clean it and ignore the following plugins and config - */ - -const validate = createSchemaValidation( - undefined, - () => { - const { definitions } = __webpack_require__(1863); - return { - definitions, - oneOf: [{ $ref: "#/definitions/CleanOptions" }] - }; - }, - { - name: "Clean Plugin", - baseDataPath: "options" - } -); - -/** - * @param {OutputFileSystem} fs filesystem - * @param {string} outputPath output path - * @param {Set} currentAssets filename of the current assets (must not start with .. or ., must only use / as path separator) - * @param {function(Error=, Set=): void} callback returns the filenames of the assets that shouldn't be there - * @returns {void} - */ -const getDiffToFs = (fs, outputPath, currentAssets, callback) => { - const directories = new Set(); - // get directories of assets - for (const asset of currentAssets) { - directories.add(asset.replace(/(^|\/)[^/]*$/, "")); - } - // and all parent directories - for (const directory of directories) { - directories.add(directory.replace(/(^|\/)[^/]*$/, "")); - } - const diff = new Set(); - asyncLib.forEachLimit( - directories, - 10, - (directory, callback) => { - fs.readdir(join(fs, outputPath, directory), (err, entries) => { - if (err) { - if (err.code === "ENOENT") return callback(); - if (err.code === "ENOTDIR") { - diff.add(directory); - return callback(); - } - return callback(err); - } - for (const entry of entries) { - const file = /** @type {string} */ (entry); - const filename = directory ? `${directory}/${file}` : file; - if (!directories.has(filename) && !currentAssets.has(filename)) { - diff.add(filename); - } - } - callback(); - }); - }, - err => { - if (err) return callback(err); - - callback(null, diff); - } - ); -}; - -/** - * @param {Set} currentAssets assets list - * @param {Set} oldAssets old assets list - * @returns {Set} diff - */ -const getDiffToOldAssets = (currentAssets, oldAssets) => { - const diff = new Set(); - for (const asset of oldAssets) { - if (!currentAssets.has(asset)) diff.add(asset); - } - return diff; -}; - -/** - * @param {OutputFileSystem} fs filesystem - * @param {string} filename path to file - * @param {StatsCallback} callback callback for provided filename - * @returns {void} - */ -const doStat = (fs, filename, callback) => { - if ("lstat" in fs) { - fs.lstat(filename, callback); - } else { - fs.stat(filename, callback); - } -}; - -/** - * @param {OutputFileSystem} fs filesystem - * @param {string} outputPath output path - * @param {boolean} dry only log instead of fs modification - * @param {Logger} logger logger - * @param {Set} diff filenames of the assets that shouldn't be there - * @param {function(string): boolean} isKept check if the entry is ignored - * @param {function(Error=): void} callback callback - * @returns {void} - */ -const applyDiff = (fs, outputPath, dry, logger, diff, isKept, callback) => { - const log = msg => { - if (dry) { - logger.info(msg); - } else { - logger.log(msg); - } - }; - /** @typedef {{ type: "check" | "unlink" | "rmdir", filename: string, parent: { remaining: number, job: Job } | undefined }} Job */ - /** @type {Job[]} */ - const jobs = Array.from(diff, filename => ({ - type: "check", - filename, - parent: undefined - })); - processAsyncTree( - jobs, - 10, - ({ type, filename, parent }, push, callback) => { - const handleError = err => { - if (err.code === "ENOENT") { - log(`${filename} was removed during cleaning by something else`); - handleParent(); - return callback(); - } - return callback(err); - }; - const handleParent = () => { - if (parent && --parent.remaining === 0) push(parent.job); - }; - const path = join(fs, outputPath, filename); - switch (type) { - case "check": - if (isKept(filename)) { - // do not decrement parent entry as we don't want to delete the parent - log(`${filename} will be kept`); - return process.nextTick(callback); - } - doStat(fs, path, (err, stats) => { - if (err) return handleError(err); - if (!stats.isDirectory()) { - push({ - type: "unlink", - filename, - parent - }); - return callback(); - } - fs.readdir(path, (err, entries) => { - if (err) return handleError(err); - /** @type {Job} */ - const deleteJob = { - type: "rmdir", - filename, - parent - }; - if (entries.length === 0) { - push(deleteJob); - } else { - const parentToken = { - remaining: entries.length, - job: deleteJob - }; - for (const entry of entries) { - const file = /** @type {string} */ (entry); - if (file.startsWith(".")) { - log( - `${filename} will be kept (dot-files will never be removed)` - ); - continue; - } - push({ - type: "check", - filename: `${filename}/${file}`, - parent: parentToken - }); - } - } - return callback(); - }); - }); - break; - case "rmdir": - log(`${filename} will be removed`); - if (dry) { - handleParent(); - return process.nextTick(callback); - } - if (!fs.rmdir) { - logger.warn( - `${filename} can't be removed because output file system doesn't support removing directories (rmdir)` - ); - return process.nextTick(callback); - } - fs.rmdir(path, err => { - if (err) return handleError(err); - handleParent(); - callback(); - }); - break; - case "unlink": - log(`${filename} will be removed`); - if (dry) { - handleParent(); - return process.nextTick(callback); - } - if (!fs.unlink) { - logger.warn( - `${filename} can't be removed because output file system doesn't support removing files (rmdir)` - ); - return process.nextTick(callback); - } - fs.unlink(path, err => { - if (err) return handleError(err); - handleParent(); - callback(); - }); - break; - } - }, - callback - ); -}; - -/** @type {WeakMap} */ -const compilationHooksMap = new WeakMap(); - -class CleanPlugin { - /** - * @param {Compilation} compilation the compilation - * @returns {CleanPluginCompilationHooks} the attached hooks - */ - static getCompilationHooks(compilation) { - if (!(compilation instanceof Compilation)) { - throw new TypeError( - "The 'compilation' argument must be an instance of Compilation" - ); - } - let hooks = compilationHooksMap.get(compilation); - if (hooks === undefined) { - hooks = { - /** @type {SyncBailHook<[string], boolean>} */ - keep: new SyncBailHook(["ignore"]) - }; - compilationHooksMap.set(compilation, hooks); - } - return hooks; - } - - /** @param {CleanOptions} options options */ - constructor(options = {}) { - validate(options); - this.options = { dry: false, ...options }; - } - - /** - * Apply the plugin - * @param {Compiler} compiler the compiler instance - * @returns {void} - */ - apply(compiler) { - const { dry, keep } = this.options; - - const keepFn = - typeof keep === "function" - ? keep - : typeof keep === "string" - ? path => path.startsWith(keep) - : typeof keep === "object" && keep.test - ? path => keep.test(path) - : () => false; - - // We assume that no external modification happens while the compiler is active - // So we can store the old assets and only diff to them to avoid fs access on - // incremental builds - let oldAssets; - - compiler.hooks.emit.tapAsync( - { - name: "CleanPlugin", - stage: 100 - }, - (compilation, callback) => { - const hooks = CleanPlugin.getCompilationHooks(compilation); - const logger = compilation.getLogger("webpack.CleanPlugin"); - const fs = compiler.outputFileSystem; - - if (!fs.readdir) { - return callback( - new Error( - "CleanPlugin: Output filesystem doesn't support listing directories (readdir)" - ) - ); - } - - const currentAssets = new Set(); - for (const asset of Object.keys(compilation.assets)) { - if (/^[A-Za-z]:\\|^\/|^\\\\/.test(asset)) continue; - let normalizedAsset; - let newNormalizedAsset = asset.replace(/\\/g, "/"); - do { - normalizedAsset = newNormalizedAsset; - newNormalizedAsset = normalizedAsset.replace( - /(^|\/)(?!\.\.)[^/]+\/\.\.\//g, - "$1" - ); - } while (newNormalizedAsset !== normalizedAsset); - if (normalizedAsset.startsWith("../")) continue; - currentAssets.add(normalizedAsset); - } - - const outputPath = compilation.getPath(compiler.outputPath, {}); - - const isKept = path => { - const result = hooks.keep.call(path); - if (result !== undefined) return result; - return keepFn(path); - }; - - const diffCallback = (err, diff) => { - if (err) { - oldAssets = undefined; - return callback(err); - } - applyDiff(fs, outputPath, dry, logger, diff, isKept, err => { - if (err) { - oldAssets = undefined; - } else { - oldAssets = currentAssets; - } - callback(err); - }); - }; - - if (oldAssets) { - diffCallback(null, getDiffToOldAssets(currentAssets, oldAssets)); - } else { - getDiffToFs(fs, outputPath, currentAssets, diffCallback); - } - } - ); - } -} - -module.exports = CleanPlugin; - - -/***/ }), - -/***/ 43604: -/***/ (function(module, __unused_webpack_exports, __webpack_require__) { - -"use strict"; -/* - MIT License http://www.opensource.org/licenses/mit-license.php - Author Tobias Koppers @sokra -*/ - - - -const WebpackError = __webpack_require__(68422); - -/** @typedef {import("./Module")} Module */ - -class CodeGenerationError extends WebpackError { - /** - * Create a new CodeGenerationError - * @param {Module} module related module - * @param {Error} error Original error - */ - constructor(module, error) { - super(); - - this.name = "CodeGenerationError"; - this.error = error; - this.message = error.message; - this.details = error.stack; - this.module = module; - } -} - -module.exports = CodeGenerationError; - - -/***/ }), - -/***/ 74841: -/***/ (function(module, __unused_webpack_exports, __webpack_require__) { - -"use strict"; -/* - MIT License http://www.opensource.org/licenses/mit-license.php - Author Tobias Koppers @sokra -*/ - - - -const { provide } = __webpack_require__(30498); -const { first } = __webpack_require__(34715); -const createHash = __webpack_require__(24123); -const { runtimeToString, RuntimeSpecMap } = __webpack_require__(19655); - -/** @typedef {import("webpack-sources").Source} Source */ -/** @typedef {import("./Module")} Module */ -/** @typedef {import("./Module").CodeGenerationResult} CodeGenerationResult */ -/** @typedef {typeof import("./util/Hash")} Hash */ -/** @typedef {import("./util/runtime").RuntimeSpec} RuntimeSpec */ - -class CodeGenerationResults { - /** - * @param {string | Hash} hashFunction the hash function to use - */ - constructor(hashFunction = "md4") { - /** @type {Map>} */ - this.map = new Map(); - this._hashFunction = hashFunction; - } - - /** - * @param {Module} module the module - * @param {RuntimeSpec} runtime runtime(s) - * @returns {CodeGenerationResult} the CodeGenerationResult - */ - get(module, runtime) { - const entry = this.map.get(module); - if (entry === undefined) { - throw new Error( - `No code generation entry for ${module.identifier()} (existing entries: ${Array.from( - this.map.keys(), - m => m.identifier() - ).join(", ")})` - ); - } - if (runtime === undefined) { - if (entry.size > 1) { - const results = new Set(entry.values()); - if (results.size !== 1) { - throw new Error( - `No unique code generation entry for unspecified runtime for ${module.identifier()} (existing runtimes: ${Array.from( - entry.keys(), - r => runtimeToString(r) - ).join(", ")}). -Caller might not support runtime-dependent code generation (opt-out via optimization.usedExports: "global").` - ); - } - return first(results); - } - return entry.values().next().value; - } - const result = entry.get(runtime); - if (result === undefined) { - throw new Error( - `No code generation entry for runtime ${runtimeToString( - runtime - )} for ${module.identifier()} (existing runtimes: ${Array.from( - entry.keys(), - r => runtimeToString(r) - ).join(", ")})` - ); - } - return result; - } - - /** - * @param {Module} module the module - * @param {RuntimeSpec} runtime runtime(s) - * @returns {boolean} true, when we have data for this - */ - has(module, runtime) { - const entry = this.map.get(module); - if (entry === undefined) { - return false; - } - if (runtime !== undefined) { - return entry.has(runtime); - } else if (entry.size > 1) { - const results = new Set(entry.values()); - return results.size === 1; - } else { - return entry.size === 1; - } - } - - /** - * @param {Module} module the module - * @param {RuntimeSpec} runtime runtime(s) - * @param {string} sourceType the source type - * @returns {Source} a source - */ - getSource(module, runtime, sourceType) { - return this.get(module, runtime).sources.get(sourceType); - } - - /** - * @param {Module} module the module - * @param {RuntimeSpec} runtime runtime(s) - * @returns {ReadonlySet} runtime requirements - */ - getRuntimeRequirements(module, runtime) { - return this.get(module, runtime).runtimeRequirements; - } - - /** - * @param {Module} module the module - * @param {RuntimeSpec} runtime runtime(s) - * @param {string} key data key - * @returns {any} data generated by code generation - */ - getData(module, runtime, key) { - const data = this.get(module, runtime).data; - return data === undefined ? undefined : data.get(key); - } - - /** - * @param {Module} module the module - * @param {RuntimeSpec} runtime runtime(s) - * @returns {any} hash of the code generation - */ - getHash(module, runtime) { - const info = this.get(module, runtime); - if (info.hash !== undefined) return info.hash; - const hash = createHash(this._hashFunction); - for (const [type, source] of info.sources) { - hash.update(type); - source.updateHash(hash); - } - if (info.runtimeRequirements) { - for (const rr of info.runtimeRequirements) hash.update(rr); - } - return (info.hash = /** @type {string} */ (hash.digest("hex"))); - } - - /** - * @param {Module} module the module - * @param {RuntimeSpec} runtime runtime(s) - * @param {CodeGenerationResult} result result from module - * @returns {void} - */ - add(module, runtime, result) { - const map = provide(this.map, module, () => new RuntimeSpecMap()); - map.set(runtime, result); - } -} - -module.exports = CodeGenerationResults; - - -/***/ }), - -/***/ 28151: -/***/ (function(module, __unused_webpack_exports, __webpack_require__) { - -"use strict"; -/* - MIT License http://www.opensource.org/licenses/mit-license.php - Author Tobias Koppers @sokra -*/ - - - -const WebpackError = __webpack_require__(68422); -const makeSerializable = __webpack_require__(26522); - -/** @typedef {import("./Dependency").DependencyLocation} DependencyLocation */ - -class CommentCompilationWarning extends WebpackError { - /** - * - * @param {string} message warning message - * @param {DependencyLocation} loc affected lines of code - */ - constructor(message, loc) { - super(message); - - this.name = "CommentCompilationWarning"; - - this.loc = loc; - } -} - -makeSerializable( - CommentCompilationWarning, - "webpack/lib/CommentCompilationWarning" -); - -module.exports = CommentCompilationWarning; - - -/***/ }), - -/***/ 27847: -/***/ (function(module, __unused_webpack_exports, __webpack_require__) { - -"use strict"; -/* - MIT License http://www.opensource.org/licenses/mit-license.php - Author Tobias Koppers @sokra -*/ - - - -const ConstDependency = __webpack_require__(60864); - -/** @typedef {import("./Compiler")} Compiler */ -/** @typedef {import("./javascript/JavascriptParser")} JavascriptParser */ - -const nestedWebpackRequireTag = Symbol("nested __webpack_require__"); - -class CompatibilityPlugin { - /** - * Apply the plugin - * @param {Compiler} compiler the compiler instance + * Apply the plugin + * @param {Compiler} compiler the compiler instance * @returns {void} */ apply(compiler) { @@ -63809,20 +61961,7 @@ class IgnoringWatchFileSystem { fileTimestamps.set(path, IGNORE_TIME_ENTRY); } return fileTimestamps; - }, - getInfo: - watcher.getInfo && - (() => { - const info = watcher.getInfo(); - const { fileTimeInfoEntries, contextTimeInfoEntries } = info; - for (const path of ignoredFiles) { - fileTimeInfoEntries.set(path, IGNORE_TIME_ENTRY); - } - for (const path of ignoredDirs) { - contextTimeInfoEntries.set(path, IGNORE_TIME_ENTRY); - } - return info; - }) + } }; } } @@ -63971,44 +62110,30 @@ class Watching { this.lastWatcherStartTime = Date.now(); } this.compiler.fsStartTime = Date.now(); - if ( - changedFiles && - removedFiles && - fileTimeInfoEntries && - contextTimeInfoEntries - ) { - this._mergeWithCollected(changedFiles, removedFiles); - this.compiler.fileTimestamps = fileTimeInfoEntries; - this.compiler.contextTimestamps = contextTimeInfoEntries; - } else if (this.pausedWatcher) { - if (this.pausedWatcher.getInfo) { - const { - changes, - removals, - fileTimeInfoEntries, - contextTimeInfoEntries - } = this.pausedWatcher.getInfo(); - this._mergeWithCollected(changes, removals); - this.compiler.fileTimestamps = fileTimeInfoEntries; - this.compiler.contextTimestamps = contextTimeInfoEntries; - } else { - this._mergeWithCollected( + this._mergeWithCollected( + changedFiles || + (this.pausedWatcher && this.pausedWatcher.getAggregatedChanges && - this.pausedWatcher.getAggregatedChanges(), + this.pausedWatcher.getAggregatedChanges()), + (this.compiler.removedFiles = + removedFiles || + (this.pausedWatcher && this.pausedWatcher.getAggregatedRemovals && - this.pausedWatcher.getAggregatedRemovals() - ); - this.compiler.fileTimestamps = - this.pausedWatcher.getFileTimeInfoEntries(); - this.compiler.contextTimestamps = - this.pausedWatcher.getContextTimeInfoEntries(); - } - } + this.pausedWatcher.getAggregatedRemovals())) + ); + this.compiler.modifiedFiles = this._collectedChangedFiles; this._collectedChangedFiles = undefined; this.compiler.removedFiles = this._collectedRemovedFiles; this._collectedRemovedFiles = undefined; + this.compiler.fileTimestamps = + fileTimeInfoEntries || + (this.pausedWatcher && this.pausedWatcher.getFileTimeInfoEntries()); + this.compiler.contextTimestamps = + contextTimeInfoEntries || + (this.pausedWatcher && this.pausedWatcher.getContextTimeInfoEntries()); + const run = () => { if (this.compiler.idle) { return this.compiler.cache.endIdle(err => { @@ -71231,7 +69356,7 @@ const applySnapshotDefaults = (snapshot, { production, futureDefaults }) => { if (process.versions.pnp === "3") { const match = /^(.+?)[\\/]cache[\\/]watchpack-npm-[^\\/]+\.zip[\\/]node_modules[\\/]/.exec( - /*require.resolve*/(29149) + /*require.resolve*/(98342) ); if (match) { return [path.resolve(match[1], "unplugged")]; @@ -71239,7 +69364,7 @@ const applySnapshotDefaults = (snapshot, { production, futureDefaults }) => { } else { const match = /^(.+?[\\/]node_modules[\\/])/.exec( // eslint-disable-next-line node/no-extraneous-require - /*require.resolve*/(29149) + /*require.resolve*/(98342) ); if (match) { return [match[1]]; @@ -71251,7 +69376,7 @@ const applySnapshotDefaults = (snapshot, { production, futureDefaults }) => { if (process.versions.pnp === "1") { const match = /^(.+?[\\/]v4)[\\/]npm-watchpack-[^\\/]+-[\da-f]{40}[\\/]node_modules[\\/]/.exec( - /*require.resolve*/(29149) + /*require.resolve*/(98342) ); if (match) { return [match[1]]; @@ -71259,7 +69384,7 @@ const applySnapshotDefaults = (snapshot, { production, futureDefaults }) => { } else if (process.versions.pnp === "3") { const match = /^(.+?)[\\/]watchpack-npm-[^\\/]+\.zip[\\/]node_modules[\\/]/.exec( - /*require.resolve*/(29149) + /*require.resolve*/(98342) ); if (match) { return [match[1]]; @@ -73034,7 +71159,6 @@ const AsyncDependenciesBlock = __webpack_require__(11315); const Module = __webpack_require__(85887); const RuntimeGlobals = __webpack_require__(49404); const Template = __webpack_require__(92066); -const StaticExportsDependency = __webpack_require__(67967); const makeSerializable = __webpack_require__(26522); const ContainerExposedDependency = __webpack_require__(50); @@ -73129,7 +71253,6 @@ class ContainerEntryModule extends Module { strict: true, topLevelDeclarations: new Set(["moduleMap", "get", "init"]) }; - this.buildMeta.exportsType = "namespace"; this.clearDependenciesAndBlocks(); @@ -73153,7 +71276,6 @@ class ContainerEntryModule extends Module { } this.addBlock(block); } - this.addDependency(new StaticExportsDependency(["get", "init"], false)); callback(); } @@ -88888,16 +87010,18 @@ module.exports = ExportWebpackRequireRuntimeModule; -const { ConcatSource } = __webpack_require__(96192); +const { ConcatSource, RawSource } = __webpack_require__(96192); const { RuntimeGlobals } = __webpack_require__(48169); const HotUpdateChunk = __webpack_require__(39222); const Template = __webpack_require__(92066); -const { getAllChunks } = __webpack_require__(8272); const { getCompilationHooks, getChunkFilenameTemplate } = __webpack_require__(76767); -const { updateHashForEntryStartup } = __webpack_require__(30951); +const { + generateEntryStartup, + updateHashForEntryStartup +} = __webpack_require__(30951); /** @typedef {import("../Compiler")} Compiler */ @@ -88965,90 +87089,63 @@ class ModuleChunkFormatPlugin { } ) .split("/"); + const runtimeOutputName = compilation + .getPath( + getChunkFilenameTemplate( + runtimeChunk, + compilation.outputOptions + ), + { + chunk: runtimeChunk, + contentHashType: "javascript" + } + ) + .split("/"); // remove filename, we only need the directory - currentOutputName.pop(); + const outputFilename = currentOutputName.pop(); - const getRelativePath = chunk => { - const baseOutputName = currentOutputName.slice(); - const chunkOutputName = compilation - .getPath( - getChunkFilenameTemplate( - chunk, - compilation.outputOptions - ), - { - chunk: chunk, - contentHashType: "javascript" - } - ) - .split("/"); + // remove common parts + while ( + currentOutputName.length > 0 && + runtimeOutputName.length > 0 && + currentOutputName[0] === runtimeOutputName[0] + ) { + currentOutputName.shift(); + runtimeOutputName.shift(); + } - // remove common parts - while ( - baseOutputName.length > 0 && - chunkOutputName.length > 0 && - baseOutputName[0] === chunkOutputName[0] - ) { - baseOutputName.shift(); - chunkOutputName.shift(); - } - // create final path - return ( - (baseOutputName.length > 0 - ? "../".repeat(baseOutputName.length) - : "./") + chunkOutputName.join("/") - ); - }; + // create final path + const runtimePath = + (currentOutputName.length > 0 + ? "../".repeat(currentOutputName.length) + : "./") + runtimeOutputName.join("/"); const entrySource = new ConcatSource(); entrySource.add(source); entrySource.add(";\n\n// load runtime\n"); entrySource.add( `import __webpack_require__ from ${JSON.stringify( - getRelativePath(runtimeChunk) + runtimePath )};\n` ); - - const startupSource = new ConcatSource(); - startupSource.add( - `var __webpack_exec__ = ${runtimeTemplate.returningFunction( - `__webpack_require__(${RuntimeGlobals.entryModuleId} = moduleId)`, - "moduleId" - )}\n` + entrySource.add( + `import * as __webpack_self_exports__ from ${JSON.stringify( + "./" + outputFilename + )};\n` + ); + entrySource.add( + `${RuntimeGlobals.externalInstallChunk}(__webpack_self_exports__);\n` + ); + const startupSource = new RawSource( + generateEntryStartup( + chunkGraph, + runtimeTemplate, + entries, + chunk, + false + ) ); - - const loadedChunks = new Set(); - let index = 0; - for (let i = 0; i < entries.length; i++) { - const [module, entrypoint] = entries[i]; - const final = i + 1 === entries.length; - const moduleId = chunkGraph.getModuleId(module); - const chunks = getAllChunks( - entrypoint, - runtimeChunk, - undefined - ); - for (const chunk of chunks) { - if (loadedChunks.has(chunk)) continue; - loadedChunks.add(chunk); - startupSource.add( - `import * as __webpack_chunk_${index}__ from ${JSON.stringify( - getRelativePath(chunk) - )};\n` - ); - startupSource.add( - `${RuntimeGlobals.externalInstallChunk}(__webpack_chunk_${index}__);\n` - ); - index++; - } - startupSource.add( - `${ - final ? "var __webpack_exports__ = " : "" - }__webpack_exec__(${JSON.stringify(moduleId)});\n` - ); - } - entrySource.add( hooks.renderStartup.call( startupSource, @@ -92612,47 +90709,6 @@ BasicEvaluatedExpression.isValidRegExpFlags = flags => { module.exports = BasicEvaluatedExpression; -/***/ }), - -/***/ 8272: -/***/ (function(__unused_webpack_module, exports, __webpack_require__) { - -"use strict"; -/* - MIT License http://www.opensource.org/licenses/mit-license.php - Author Tobias Koppers @sokra -*/ - - - -const Entrypoint = __webpack_require__(86695); - -/** @typedef {import("../Chunk")} Chunk */ - -/** - * @param {Entrypoint} entrypoint a chunk group - * @param {Chunk} excludedChunk1 current chunk which is excluded - * @param {Chunk} excludedChunk2 runtime chunk which is excluded - * @returns {Set} chunks - */ -const getAllChunks = (entrypoint, excludedChunk1, excludedChunk2) => { - const queue = new Set([entrypoint]); - const chunks = new Set(); - for (const entrypoint of queue) { - for (const chunk of entrypoint.chunks) { - if (chunk === excludedChunk1) continue; - if (chunk === excludedChunk2) continue; - chunks.add(chunk); - } - for (const parent of entrypoint.parentsIterable) { - if (parent instanceof Entrypoint) queue.add(parent); - } - } - return chunks; -}; -exports.getAllChunks = getAllChunks; - - /***/ }), /***/ 57634: @@ -95741,15 +93797,14 @@ class JavascriptParser extends Parser { const node = /** @type {TaggedTemplateExpressionNode} */ (_node); const tag = this.evaluateExpression(node.tag); - if (tag.isIdentifier() && tag.identifier === "String.raw") { - const { quasis, parts } = getSimplifiedTemplateResult( - "raw", - node.quasi - ); - return new BasicEvaluatedExpression() - .setTemplateString(quasis, parts, "raw") - .setRange(node.range); - } + if (tag.isIdentifier() && tag.identifier !== "String.raw") return; + const { quasis, parts } = getSimplifiedTemplateResult( + "raw", + node.quasi + ); + return new BasicEvaluatedExpression() + .setTemplateString(quasis, parts, "raw") + .setRange(node.range); }); this.hooks.evaluateCallExpressionMember @@ -98449,10 +96504,10 @@ exports.approve = () => true; +const Entrypoint = __webpack_require__(86695); const RuntimeGlobals = __webpack_require__(49404); const Template = __webpack_require__(92066); const { isSubset } = __webpack_require__(34715); -const { getAllChunks } = __webpack_require__(8272); const { chunkHasJs } = __webpack_require__(76767); /** @typedef {import("../util/Hash")} Hash */ @@ -98463,6 +96518,30 @@ const { chunkHasJs } = __webpack_require__(76767); /** @typedef {import("../RuntimeTemplate")} RuntimeTemplate */ /** @typedef {(string|number)[]} EntryItem */ +// TODO move to this file to ../javascript/ChunkHelpers.js + +/** + * @param {Entrypoint} entrypoint a chunk group + * @param {Chunk} excludedChunk1 current chunk which is excluded + * @param {Chunk} excludedChunk2 runtime chunk which is excluded + * @returns {Set} chunks + */ +const getAllChunks = (entrypoint, excludedChunk1, excludedChunk2) => { + const queue = new Set([entrypoint]); + const chunks = new Set(); + for (const entrypoint of queue) { + for (const chunk of entrypoint.chunks) { + if (chunk === excludedChunk1) continue; + if (chunk === excludedChunk2) continue; + chunks.add(chunk); + } + for (const parent of entrypoint.parentsIterable) { + if (parent instanceof Entrypoint) queue.add(parent); + } + } + return chunks; +}; + const EXPORT_PREFIX = "var __webpack_exports__ = "; /** @@ -101774,8 +99853,7 @@ module.exports = NodeTemplatePlugin; -const util = __webpack_require__(31669); -const Watchpack = __webpack_require__(29149); +const Watchpack = __webpack_require__(98342); /** @typedef {import("../../declarations/WebpackOptions").WatchOptions} WatchOptions */ /** @typedef {import("../FileSystemInfo").FileSystemInfoEntry} FileSystemInfoEntry */ @@ -101838,22 +99916,7 @@ class NodeWatchFileSystem { if (callbackUndelayed) { this.watcher.once("change", callbackUndelayed); } - - const fetchTimeInfo = () => { - const fileTimeInfoEntries = new Map(); - const contextTimeInfoEntries = new Map(); - if (this.watcher) { - this.watcher.collectTimeInfoEntries( - fileTimeInfoEntries, - contextTimeInfoEntries - ); - } - return { fileTimeInfoEntries, contextTimeInfoEntries }; - }; this.watcher.once("aggregated", (changes, removals) => { - // pause emitting events (avoids clearing aggregated changes and removals on timeout) - this.watcher.pause(); - if (this.inputFileSystem && this.inputFileSystem.purge) { const fs = this.inputFileSystem; for (const item of changes) { @@ -101863,14 +99926,8 @@ class NodeWatchFileSystem { fs.purge(item); } } - const { fileTimeInfoEntries, contextTimeInfoEntries } = fetchTimeInfo(); - callback( - null, - fileTimeInfoEntries, - contextTimeInfoEntries, - changes, - removals - ); + const times = this.watcher.getTimeInfoEntries(); + callback(null, times, times, changes, removals); }); this.watcher.watch({ files, directories, missing, startTime }); @@ -101890,71 +99947,39 @@ class NodeWatchFileSystem { this.watcher.pause(); } }, - getAggregatedRemovals: util.deprecate( - () => { - const items = this.watcher && this.watcher.aggregatedRemovals; - if (items && this.inputFileSystem && this.inputFileSystem.purge) { - const fs = this.inputFileSystem; - for (const item of items) { - fs.purge(item); - } - } - return items; - }, - "Watcher.getAggregatedRemovals is deprecated in favor of Watcher.getInfo since that's more performant.", - "DEP_WEBPACK_WATCHER_GET_AGGREGATED_REMOVALS" - ), - getAggregatedChanges: util.deprecate( - () => { - const items = this.watcher && this.watcher.aggregatedChanges; - if (items && this.inputFileSystem && this.inputFileSystem.purge) { - const fs = this.inputFileSystem; - for (const item of items) { - fs.purge(item); - } - } - return items; - }, - "Watcher.getAggregatedChanges is deprecated in favor of Watcher.getInfo since that's more performant.", - "DEP_WEBPACK_WATCHER_GET_AGGREGATED_CHANGES" - ), - getFileTimeInfoEntries: util.deprecate( - () => { - return fetchTimeInfo().fileTimeInfoEntries; - }, - "Watcher.getFileTimeInfoEntries is deprecated in favor of Watcher.getInfo since that's more performant.", - "DEP_WEBPACK_WATCHER_FILE_TIME_INFO_ENTRIES" - ), - getContextTimeInfoEntries: util.deprecate( - () => { - return fetchTimeInfo().contextTimeInfoEntries; - }, - "Watcher.getContextTimeInfoEntries is deprecated in favor of Watcher.getInfo since that's more performant.", - "DEP_WEBPACK_WATCHER_CONTEXT_TIME_INFO_ENTRIES" - ), - getInfo: () => { - const removals = this.watcher && this.watcher.aggregatedRemovals; - const changes = this.watcher && this.watcher.aggregatedChanges; - if (this.inputFileSystem && this.inputFileSystem.purge) { + getAggregatedRemovals: () => { + const items = this.watcher && this.watcher.aggregatedRemovals; + if (items && this.inputFileSystem && this.inputFileSystem.purge) { const fs = this.inputFileSystem; - if (removals) { - for (const item of removals) { - fs.purge(item); - } + for (const item of items) { + fs.purge(item); } - if (changes) { - for (const item of changes) { - fs.purge(item); - } + } + return items; + }, + getAggregatedChanges: () => { + const items = this.watcher && this.watcher.aggregatedChanges; + if (items && this.inputFileSystem && this.inputFileSystem.purge) { + const fs = this.inputFileSystem; + for (const item of items) { + fs.purge(item); } } - const { fileTimeInfoEntries, contextTimeInfoEntries } = fetchTimeInfo(); - return { - changes, - removals, - fileTimeInfoEntries, - contextTimeInfoEntries - }; + return items; + }, + getFileTimeInfoEntries: () => { + if (this.watcher) { + return this.watcher.getTimeInfoEntries(); + } else { + return new Map(); + } + }, + getContextTimeInfoEntries: () => { + if (this.watcher) { + return this.watcher.getTimeInfoEntries(); + } else { + return new Map(); + } } }; } @@ -118439,9 +116464,9 @@ class ConsumeSharedRuntimeModule extends RuntimeModule { ] )};`, `var getInvalidSingletonVersionMessage = ${runtimeTemplate.basicFunction( - "scope, key, version, requiredVersion", + "key, version, requiredVersion", [ - `return "Unsatisfied version " + version + " from " + (version && scope[key][version].from) + " of shared singleton module " + key + " (required " + rangeToString(requiredVersion) + ")"` + `return "Unsatisfied version " + version + " of shared singleton module " + key + " (required " + rangeToString(requiredVersion) + ")"` ] )};`, `var getSingletonVersion = ${runtimeTemplate.basicFunction( @@ -118449,7 +116474,7 @@ class ConsumeSharedRuntimeModule extends RuntimeModule { [ "var version = findSingletonVersionKey(scope, key);", "if (!satisfy(requiredVersion, version)) " + - 'typeof console !== "undefined" && console.warn && console.warn(getInvalidSingletonVersionMessage(scope, key, version, requiredVersion));', + 'typeof console !== "undefined" && console.warn && console.warn(getInvalidSingletonVersionMessage(key, version, requiredVersion));', "return get(scope[key][version]);" ] )};`, @@ -118458,7 +116483,7 @@ class ConsumeSharedRuntimeModule extends RuntimeModule { [ "var version = findSingletonVersionKey(scope, key);", "if (!satisfy(requiredVersion, version)) " + - "throw new Error(getInvalidSingletonVersionMessage(scope, key, version, requiredVersion));", + "throw new Error(getInvalidSingletonVersionMessage(key, version, requiredVersion));", "return get(scope[key][version]);" ] )};`, @@ -129347,15 +127372,6 @@ const path = __webpack_require__(85622); /** @typedef {function((NodeJS.ErrnoException | Error | null)=, any=): void} ReadJsonCallback */ /** @typedef {function((NodeJS.ErrnoException | Error | null)=, IStats|string=): void} LstatReadlinkAbsoluteCallback */ -/** - * @typedef {Object} WatcherInfo - * @property {Set} changes get current aggregated changes that have not yet send to callback - * @property {Set} removals get current aggregated removals that have not yet send to callback - * @property {Map} fileTimeInfoEntries get info about files - * @property {Map} contextTimeInfoEntries get info about directories - */ - -// TODO webpack 6 deprecate missing getInfo /** * @typedef {Object} Watcher * @property {function(): void} close closes the watcher and all underlying file watchers @@ -129364,7 +127380,6 @@ const path = __webpack_require__(85622); * @property {function(): Set=} getAggregatedRemovals get current aggregated removals that have not yet send to callback * @property {function(): Map} getFileTimeInfoEntries get info about files * @property {function(): Map} getContextTimeInfoEntries get info about directories - * @property {function(): WatcherInfo=} getInfo get info about timestamps and changes */ /** @@ -142173,776 +140188,1768 @@ const join = (rootPath, request) => { }; exports.join = join; -const joinCache = new Map(); +const joinCache = new Map(); + +/** + * @param {string} rootPath the root path + * @param {string | undefined} request the request path + * @returns {string} the joined path + */ +const cachedJoin = (rootPath, request) => { + let cacheEntry; + let cache = joinCache.get(rootPath); + if (cache === undefined) { + joinCache.set(rootPath, (cache = new Map())); + } else { + cacheEntry = cache.get(request); + if (cacheEntry !== undefined) return cacheEntry; + } + cacheEntry = join(rootPath, request); + cache.set(request, cacheEntry); + return cacheEntry; +}; +exports.cachedJoin = cachedJoin; + +const checkExportsFieldTarget = relativePath => { + let lastNonSlashIndex = 2; + let slashIndex = relativePath.indexOf("/", 2); + let cd = 0; + + while (slashIndex !== -1) { + const folder = relativePath.slice(lastNonSlashIndex, slashIndex); + + switch (folder) { + case "..": { + cd--; + if (cd < 0) + return new Error( + `Trying to access out of package scope. Requesting ${relativePath}` + ); + break; + } + default: + cd++; + break; + } + + lastNonSlashIndex = slashIndex + 1; + slashIndex = relativePath.indexOf("/", lastNonSlashIndex); + } +}; +exports.checkExportsFieldTarget = checkExportsFieldTarget; + + +/***/ }), + +/***/ 83798: +/***/ (function(module) { + +"use strict"; + + +class LoadingLoaderError extends Error { + constructor(message) { + super(message); + this.name = "LoaderRunnerError"; + Error.captureStackTrace(this, this.constructor); + } +} + +module.exports = LoadingLoaderError; + + +/***/ }), + +/***/ 67578: +/***/ (function(__unused_webpack_module, exports, __webpack_require__) { + +/* + MIT License http://www.opensource.org/licenses/mit-license.php + Author Tobias Koppers @sokra +*/ +var fs = __webpack_require__(35747); +var readFile = fs.readFile.bind(fs); +var loadLoader = __webpack_require__(63395); + +function utf8BufferToString(buf) { + var str = buf.toString("utf-8"); + if(str.charCodeAt(0) === 0xFEFF) { + return str.substr(1); + } else { + return str; + } +} + +const PATH_QUERY_FRAGMENT_REGEXP = /^((?:\0.|[^?#\0])*)(\?(?:\0.|[^#\0])*)?(#.*)?$/; + +/** + * @param {string} str the path with query and fragment + * @returns {{ path: string, query: string, fragment: string }} parsed parts + */ +function parsePathQueryFragment(str) { + var match = PATH_QUERY_FRAGMENT_REGEXP.exec(str); + return { + path: match[1].replace(/\0(.)/g, "$1"), + query: match[2] ? match[2].replace(/\0(.)/g, "$1") : "", + fragment: match[3] || "" + }; +} + +function dirname(path) { + if(path === "/") return "/"; + var i = path.lastIndexOf("/"); + var j = path.lastIndexOf("\\"); + var i2 = path.indexOf("/"); + var j2 = path.indexOf("\\"); + var idx = i > j ? i : j; + var idx2 = i > j ? i2 : j2; + if(idx < 0) return path; + if(idx === idx2) return path.substr(0, idx + 1); + return path.substr(0, idx); +} + +function createLoaderObject(loader) { + var obj = { + path: null, + query: null, + fragment: null, + options: null, + ident: null, + normal: null, + pitch: null, + raw: null, + data: null, + pitchExecuted: false, + normalExecuted: false + }; + Object.defineProperty(obj, "request", { + enumerable: true, + get: function() { + return obj.path.replace(/#/g, "\0#") + obj.query.replace(/#/g, "\0#") + obj.fragment; + }, + set: function(value) { + if(typeof value === "string") { + var splittedRequest = parsePathQueryFragment(value); + obj.path = splittedRequest.path; + obj.query = splittedRequest.query; + obj.fragment = splittedRequest.fragment; + obj.options = undefined; + obj.ident = undefined; + } else { + if(!value.loader) + throw new Error("request should be a string or object with loader and options (" + JSON.stringify(value) + ")"); + obj.path = value.loader; + obj.fragment = value.fragment || ""; + obj.type = value.type; + obj.options = value.options; + obj.ident = value.ident; + if(obj.options === null) + obj.query = ""; + else if(obj.options === undefined) + obj.query = ""; + else if(typeof obj.options === "string") + obj.query = "?" + obj.options; + else if(obj.ident) + obj.query = "??" + obj.ident; + else if(typeof obj.options === "object" && obj.options.ident) + obj.query = "??" + obj.options.ident; + else + obj.query = "?" + JSON.stringify(obj.options); + } + } + }); + obj.request = loader; + if(Object.preventExtensions) { + Object.preventExtensions(obj); + } + return obj; +} + +function runSyncOrAsync(fn, context, args, callback) { + var isSync = true; + var isDone = false; + var isError = false; // internal error + var reportedError = false; + context.async = function async() { + if(isDone) { + if(reportedError) return; // ignore + throw new Error("async(): The callback was already called."); + } + isSync = false; + return innerCallback; + }; + var innerCallback = context.callback = function() { + if(isDone) { + if(reportedError) return; // ignore + throw new Error("callback(): The callback was already called."); + } + isDone = true; + isSync = false; + try { + callback.apply(null, arguments); + } catch(e) { + isError = true; + throw e; + } + }; + try { + var result = (function LOADER_EXECUTION() { + return fn.apply(context, args); + }()); + if(isSync) { + isDone = true; + if(result === undefined) + return callback(); + if(result && typeof result === "object" && typeof result.then === "function") { + return result.then(function(r) { + callback(null, r); + }, callback); + } + return callback(null, result); + } + } catch(e) { + if(isError) throw e; + if(isDone) { + // loader is already "done", so we cannot use the callback function + // for better debugging we print the error on the console + if(typeof e === "object" && e.stack) console.error(e.stack); + else console.error(e); + return; + } + isDone = true; + reportedError = true; + callback(e); + } + +} + +function convertArgs(args, raw) { + if(!raw && Buffer.isBuffer(args[0])) + args[0] = utf8BufferToString(args[0]); + else if(raw && typeof args[0] === "string") + args[0] = Buffer.from(args[0], "utf-8"); +} + +function iteratePitchingLoaders(options, loaderContext, callback) { + // abort after last loader + if(loaderContext.loaderIndex >= loaderContext.loaders.length) + return processResource(options, loaderContext, callback); + + var currentLoaderObject = loaderContext.loaders[loaderContext.loaderIndex]; + + // iterate + if(currentLoaderObject.pitchExecuted) { + loaderContext.loaderIndex++; + return iteratePitchingLoaders(options, loaderContext, callback); + } + + // load loader module + loadLoader(currentLoaderObject, function(err) { + if(err) { + loaderContext.cacheable(false); + return callback(err); + } + var fn = currentLoaderObject.pitch; + currentLoaderObject.pitchExecuted = true; + if(!fn) return iteratePitchingLoaders(options, loaderContext, callback); + + runSyncOrAsync( + fn, + loaderContext, [loaderContext.remainingRequest, loaderContext.previousRequest, currentLoaderObject.data = {}], + function(err) { + if(err) return callback(err); + var args = Array.prototype.slice.call(arguments, 1); + // Determine whether to continue the pitching process based on + // argument values (as opposed to argument presence) in order + // to support synchronous and asynchronous usages. + var hasArg = args.some(function(value) { + return value !== undefined; + }); + if(hasArg) { + loaderContext.loaderIndex--; + iterateNormalLoaders(options, loaderContext, args, callback); + } else { + iteratePitchingLoaders(options, loaderContext, callback); + } + } + ); + }); +} + +function processResource(options, loaderContext, callback) { + // set loader index to last loader + loaderContext.loaderIndex = loaderContext.loaders.length - 1; + + var resourcePath = loaderContext.resourcePath; + if(resourcePath) { + options.processResource(loaderContext, resourcePath, function(err, buffer) { + if(err) return callback(err); + options.resourceBuffer = buffer; + iterateNormalLoaders(options, loaderContext, [buffer], callback); + }); + } else { + iterateNormalLoaders(options, loaderContext, [null], callback); + } +} + +function iterateNormalLoaders(options, loaderContext, args, callback) { + if(loaderContext.loaderIndex < 0) + return callback(null, args); + + var currentLoaderObject = loaderContext.loaders[loaderContext.loaderIndex]; + + // iterate + if(currentLoaderObject.normalExecuted) { + loaderContext.loaderIndex--; + return iterateNormalLoaders(options, loaderContext, args, callback); + } + + var fn = currentLoaderObject.normal; + currentLoaderObject.normalExecuted = true; + if(!fn) { + return iterateNormalLoaders(options, loaderContext, args, callback); + } + + convertArgs(args, currentLoaderObject.raw); + + runSyncOrAsync(fn, loaderContext, args, function(err) { + if(err) return callback(err); + + var args = Array.prototype.slice.call(arguments, 1); + iterateNormalLoaders(options, loaderContext, args, callback); + }); +} + +exports.getContext = function getContext(resource) { + var path = parsePathQueryFragment(resource).path; + return dirname(path); +}; + +exports.runLoaders = function runLoaders(options, callback) { + // read options + var resource = options.resource || ""; + var loaders = options.loaders || []; + var loaderContext = options.context || {}; + var processResource = options.processResource || ((readResource, context, resource, callback) => { + context.addDependency(resource); + readResource(resource, callback); + }).bind(null, options.readResource || readFile); + + // + var splittedResource = resource && parsePathQueryFragment(resource); + var resourcePath = splittedResource ? splittedResource.path : undefined; + var resourceQuery = splittedResource ? splittedResource.query : undefined; + var resourceFragment = splittedResource ? splittedResource.fragment : undefined; + var contextDirectory = resourcePath ? dirname(resourcePath) : null; + + // execution state + var requestCacheable = true; + var fileDependencies = []; + var contextDependencies = []; + var missingDependencies = []; + + // prepare loader objects + loaders = loaders.map(createLoaderObject); + + loaderContext.context = contextDirectory; + loaderContext.loaderIndex = 0; + loaderContext.loaders = loaders; + loaderContext.resourcePath = resourcePath; + loaderContext.resourceQuery = resourceQuery; + loaderContext.resourceFragment = resourceFragment; + loaderContext.async = null; + loaderContext.callback = null; + loaderContext.cacheable = function cacheable(flag) { + if(flag === false) { + requestCacheable = false; + } + }; + loaderContext.dependency = loaderContext.addDependency = function addDependency(file) { + fileDependencies.push(file); + }; + loaderContext.addContextDependency = function addContextDependency(context) { + contextDependencies.push(context); + }; + loaderContext.addMissingDependency = function addMissingDependency(context) { + missingDependencies.push(context); + }; + loaderContext.getDependencies = function getDependencies() { + return fileDependencies.slice(); + }; + loaderContext.getContextDependencies = function getContextDependencies() { + return contextDependencies.slice(); + }; + loaderContext.getMissingDependencies = function getMissingDependencies() { + return missingDependencies.slice(); + }; + loaderContext.clearDependencies = function clearDependencies() { + fileDependencies.length = 0; + contextDependencies.length = 0; + missingDependencies.length = 0; + requestCacheable = true; + }; + Object.defineProperty(loaderContext, "resource", { + enumerable: true, + get: function() { + if(loaderContext.resourcePath === undefined) + return undefined; + return loaderContext.resourcePath.replace(/#/g, "\0#") + loaderContext.resourceQuery.replace(/#/g, "\0#") + loaderContext.resourceFragment; + }, + set: function(value) { + var splittedResource = value && parsePathQueryFragment(value); + loaderContext.resourcePath = splittedResource ? splittedResource.path : undefined; + loaderContext.resourceQuery = splittedResource ? splittedResource.query : undefined; + loaderContext.resourceFragment = splittedResource ? splittedResource.fragment : undefined; + } + }); + Object.defineProperty(loaderContext, "request", { + enumerable: true, + get: function() { + return loaderContext.loaders.map(function(o) { + return o.request; + }).concat(loaderContext.resource || "").join("!"); + } + }); + Object.defineProperty(loaderContext, "remainingRequest", { + enumerable: true, + get: function() { + if(loaderContext.loaderIndex >= loaderContext.loaders.length - 1 && !loaderContext.resource) + return ""; + return loaderContext.loaders.slice(loaderContext.loaderIndex + 1).map(function(o) { + return o.request; + }).concat(loaderContext.resource || "").join("!"); + } + }); + Object.defineProperty(loaderContext, "currentRequest", { + enumerable: true, + get: function() { + return loaderContext.loaders.slice(loaderContext.loaderIndex).map(function(o) { + return o.request; + }).concat(loaderContext.resource || "").join("!"); + } + }); + Object.defineProperty(loaderContext, "previousRequest", { + enumerable: true, + get: function() { + return loaderContext.loaders.slice(0, loaderContext.loaderIndex).map(function(o) { + return o.request; + }).join("!"); + } + }); + Object.defineProperty(loaderContext, "query", { + enumerable: true, + get: function() { + var entry = loaderContext.loaders[loaderContext.loaderIndex]; + return entry.options && typeof entry.options === "object" ? entry.options : entry.query; + } + }); + Object.defineProperty(loaderContext, "data", { + enumerable: true, + get: function() { + return loaderContext.loaders[loaderContext.loaderIndex].data; + } + }); + + // finish loader context + if(Object.preventExtensions) { + Object.preventExtensions(loaderContext); + } + + var processOptions = { + resourceBuffer: null, + processResource: processResource + }; + iteratePitchingLoaders(processOptions, loaderContext, function(err, result) { + if(err) { + return callback(err, { + cacheable: requestCacheable, + fileDependencies: fileDependencies, + contextDependencies: contextDependencies, + missingDependencies: missingDependencies + }); + } + callback(null, { + result: result, + resourceBuffer: processOptions.resourceBuffer, + cacheable: requestCacheable, + fileDependencies: fileDependencies, + contextDependencies: contextDependencies, + missingDependencies: missingDependencies + }); + }); +}; + + +/***/ }), + +/***/ 63395: +/***/ (function(module, __unused_webpack_exports, __webpack_require__) { + +var LoaderLoadingError = __webpack_require__(83798); +var url; + +module.exports = function loadLoader(loader, callback) { + if(loader.type === "module") { + try { + if(url === undefined) url = __webpack_require__(78835); + var loaderUrl = url.pathToFileURL(loader.path); + var modulePromise = eval("import(" + JSON.stringify(loaderUrl.toString()) + ")"); + modulePromise.then(function(module) { + handleResult(loader, module, callback); + }, callback); + return; + } catch(e) { + callback(e); + } + } else { + try { + var module = require(loader.path); + } catch(e) { + // it is possible for node to choke on a require if the FD descriptor + // limit has been reached. give it a chance to recover. + if(e instanceof Error && e.code === "EMFILE") { + var retry = loadLoader.bind(null, loader, callback); + if(typeof setImmediate === "function") { + // node >= 0.9.0 + return setImmediate(retry); + } else { + // node < 0.9.0 + return process.nextTick(retry); + } + } + return callback(e); + } + return handleResult(loader, module, callback); + } +}; + +function handleResult(loader, module, callback) { + if(typeof module !== "function" && typeof module !== "object") { + return callback(new LoaderLoadingError( + "Module '" + loader.path + "' is not a loader (export function or es6 module)" + )); + } + loader.normal = typeof module === "function" ? module : module.default; + loader.pitch = module.pitch; + loader.raw = module.raw; + if(typeof loader.normal !== "function" && typeof loader.pitch !== "function") { + return callback(new LoaderLoadingError( + "Module '" + loader.path + "' is not a loader (must have normal or pitch function)" + )); + } + callback(); +} + + +/***/ }), + +/***/ 11060: +/***/ (function(module, __unused_webpack_exports, __webpack_require__) { + +/*! + * mime-db + * Copyright(c) 2014 Jonathan Ong + * MIT Licensed + */ + +/** + * Module exports. + */ + +module.exports = __webpack_require__(53243) + + +/***/ }), + +/***/ 19338: +/***/ (function(__unused_webpack_module, exports, __webpack_require__) { + +"use strict"; +/*! + * mime-types + * Copyright(c) 2014 Jonathan Ong + * Copyright(c) 2015 Douglas Christopher Wilson + * MIT Licensed + */ + + + +/** + * Module dependencies. + * @private + */ + +var db = __webpack_require__(11060) +var extname = __webpack_require__(85622).extname + +/** + * Module variables. + * @private + */ + +var EXTRACT_TYPE_REGEXP = /^\s*([^;\s]*)(?:;|\s|$)/ +var TEXT_TYPE_REGEXP = /^text\//i + +/** + * Module exports. + * @public + */ + +exports.charset = charset +exports.charsets = { lookup: charset } +exports.contentType = contentType +exports.extension = extension +exports.extensions = Object.create(null) +exports.lookup = lookup +exports.types = Object.create(null) + +// Populate the extensions/types maps +populateMaps(exports.extensions, exports.types) + +/** + * Get the default charset for a MIME type. + * + * @param {string} type + * @return {boolean|string} + */ + +function charset (type) { + if (!type || typeof type !== 'string') { + return false + } + + // TODO: use media-typer + var match = EXTRACT_TYPE_REGEXP.exec(type) + var mime = match && db[match[1].toLowerCase()] + + if (mime && mime.charset) { + return mime.charset + } + + // default text/* to utf-8 + if (match && TEXT_TYPE_REGEXP.test(match[1])) { + return 'UTF-8' + } + + return false +} + +/** + * Create a full Content-Type header given a MIME type or extension. + * + * @param {string} str + * @return {boolean|string} + */ + +function contentType (str) { + // TODO: should this even be in this module? + if (!str || typeof str !== 'string') { + return false + } + + var mime = str.indexOf('/') === -1 + ? exports.lookup(str) + : str + + if (!mime) { + return false + } + + // TODO: use content-type or other module + if (mime.indexOf('charset') === -1) { + var charset = exports.charset(mime) + if (charset) mime += '; charset=' + charset.toLowerCase() + } + + return mime +} + +/** + * Get the default extension for a MIME type. + * + * @param {string} type + * @return {boolean|string} + */ + +function extension (type) { + if (!type || typeof type !== 'string') { + return false + } + + // TODO: use media-typer + var match = EXTRACT_TYPE_REGEXP.exec(type) + + // get extensions + var exts = match && exports.extensions[match[1].toLowerCase()] + + if (!exts || !exts.length) { + return false + } + + return exts[0] +} + +/** + * Lookup the MIME type for a file path/extension. + * + * @param {string} path + * @return {boolean|string} + */ + +function lookup (path) { + if (!path || typeof path !== 'string') { + return false + } + + // get the extension ("ext" or ".ext" or full path) + var extension = extname('x.' + path) + .toLowerCase() + .substr(1) + + if (!extension) { + return false + } + + return exports.types[extension] || false +} + +/** + * Populate the extensions and types maps. + * @private + */ + +function populateMaps (extensions, types) { + // source preference (least -> most) + var preference = ['nginx', 'apache', undefined, 'iana'] + + Object.keys(db).forEach(function forEachMimeType (type) { + var mime = db[type] + var exts = mime.extensions + + if (!exts || !exts.length) { + return + } + + // mime -> extensions + extensions[type] = exts + + // extension -> mime + for (var i = 0; i < exts.length; i++) { + var extension = exts[i] + + if (types[extension]) { + var from = preference.indexOf(db[types[extension]].source) + var to = preference.indexOf(mime.source) + + if (types[extension] !== 'application/octet-stream' && + (from > to || (from === to && types[extension].substr(0, 12) === 'application/'))) { + // skip the remapping + continue + } + } + + // set the extension -> mime + types[extension] = type + } + }) +} + + +/***/ }), + +/***/ 28339: +/***/ (function(module, __unused_webpack_exports, __webpack_require__) { + +"use strict"; +/* + MIT License http://www.opensource.org/licenses/mit-license.php + Author Tobias Koppers @sokra +*/ + + +const Hook = __webpack_require__(36591); +const HookCodeFactory = __webpack_require__(85572); + +class AsyncParallelBailHookCodeFactory extends HookCodeFactory { + content({ onError, onResult, onDone }) { + let code = ""; + code += `var _results = new Array(${this.options.taps.length});\n`; + code += "var _checkDone = function() {\n"; + code += "for(var i = 0; i < _results.length; i++) {\n"; + code += "var item = _results[i];\n"; + code += "if(item === undefined) return false;\n"; + code += "if(item.result !== undefined) {\n"; + code += onResult("item.result"); + code += "return true;\n"; + code += "}\n"; + code += "if(item.error) {\n"; + code += onError("item.error"); + code += "return true;\n"; + code += "}\n"; + code += "}\n"; + code += "return false;\n"; + code += "}\n"; + code += this.callTapsParallel({ + onError: (i, err, done, doneBreak) => { + let code = ""; + code += `if(${i} < _results.length && ((_results.length = ${i + + 1}), (_results[${i}] = { error: ${err} }), _checkDone())) {\n`; + code += doneBreak(true); + code += "} else {\n"; + code += done(); + code += "}\n"; + return code; + }, + onResult: (i, result, done, doneBreak) => { + let code = ""; + code += `if(${i} < _results.length && (${result} !== undefined && (_results.length = ${i + + 1}), (_results[${i}] = { result: ${result} }), _checkDone())) {\n`; + code += doneBreak(true); + code += "} else {\n"; + code += done(); + code += "}\n"; + return code; + }, + onTap: (i, run, done, doneBreak) => { + let code = ""; + if (i > 0) { + code += `if(${i} >= _results.length) {\n`; + code += done(); + code += "} else {\n"; + } + code += run(); + if (i > 0) code += "}\n"; + return code; + }, + onDone + }); + return code; + } +} + +const factory = new AsyncParallelBailHookCodeFactory(); + +const COMPILE = function(options) { + factory.setup(this, options); + return factory.create(options); +}; + +function AsyncParallelBailHook(args = [], name = undefined) { + const hook = new Hook(args, name); + hook.constructor = AsyncParallelBailHook; + hook.compile = COMPILE; + hook._call = undefined; + hook.call = undefined; + return hook; +} + +AsyncParallelBailHook.prototype = null; + +module.exports = AsyncParallelBailHook; + + +/***/ }), + +/***/ 82890: +/***/ (function(module, __unused_webpack_exports, __webpack_require__) { + +"use strict"; +/* + MIT License http://www.opensource.org/licenses/mit-license.php + Author Tobias Koppers @sokra +*/ + + +const Hook = __webpack_require__(36591); +const HookCodeFactory = __webpack_require__(85572); + +class AsyncParallelHookCodeFactory extends HookCodeFactory { + content({ onError, onDone }) { + return this.callTapsParallel({ + onError: (i, err, done, doneBreak) => onError(err) + doneBreak(true), + onDone + }); + } +} + +const factory = new AsyncParallelHookCodeFactory(); + +const COMPILE = function(options) { + factory.setup(this, options); + return factory.create(options); +}; + +function AsyncParallelHook(args = [], name = undefined) { + const hook = new Hook(args, name); + hook.constructor = AsyncParallelHook; + hook.compile = COMPILE; + hook._call = undefined; + hook.call = undefined; + return hook; +} + +AsyncParallelHook.prototype = null; + +module.exports = AsyncParallelHook; + + +/***/ }), + +/***/ 97681: +/***/ (function(module, __unused_webpack_exports, __webpack_require__) { -/** - * @param {string} rootPath the root path - * @param {string | undefined} request the request path - * @returns {string} the joined path - */ -const cachedJoin = (rootPath, request) => { - let cacheEntry; - let cache = joinCache.get(rootPath); - if (cache === undefined) { - joinCache.set(rootPath, (cache = new Map())); - } else { - cacheEntry = cache.get(request); - if (cacheEntry !== undefined) return cacheEntry; +"use strict"; +/* + MIT License http://www.opensource.org/licenses/mit-license.php + Author Tobias Koppers @sokra +*/ + + +const Hook = __webpack_require__(36591); +const HookCodeFactory = __webpack_require__(85572); + +class AsyncSeriesBailHookCodeFactory extends HookCodeFactory { + content({ onError, onResult, resultReturns, onDone }) { + return this.callTapsSeries({ + onError: (i, err, next, doneBreak) => onError(err) + doneBreak(true), + onResult: (i, result, next) => + `if(${result} !== undefined) {\n${onResult( + result + )}\n} else {\n${next()}}\n`, + resultReturns, + onDone + }); } - cacheEntry = join(rootPath, request); - cache.set(request, cacheEntry); - return cacheEntry; +} + +const factory = new AsyncSeriesBailHookCodeFactory(); + +const COMPILE = function(options) { + factory.setup(this, options); + return factory.create(options); }; -exports.cachedJoin = cachedJoin; -const checkExportsFieldTarget = relativePath => { - let lastNonSlashIndex = 2; - let slashIndex = relativePath.indexOf("/", 2); - let cd = 0; +function AsyncSeriesBailHook(args = [], name = undefined) { + const hook = new Hook(args, name); + hook.constructor = AsyncSeriesBailHook; + hook.compile = COMPILE; + hook._call = undefined; + hook.call = undefined; + return hook; +} - while (slashIndex !== -1) { - const folder = relativePath.slice(lastNonSlashIndex, slashIndex); +AsyncSeriesBailHook.prototype = null; - switch (folder) { - case "..": { - cd--; - if (cd < 0) - return new Error( - `Trying to access out of package scope. Requesting ${relativePath}` - ); - break; - } - default: - cd++; - break; - } +module.exports = AsyncSeriesBailHook; - lastNonSlashIndex = slashIndex + 1; - slashIndex = relativePath.indexOf("/", lastNonSlashIndex); + +/***/ }), + +/***/ 45146: +/***/ (function(module, __unused_webpack_exports, __webpack_require__) { + +"use strict"; +/* + MIT License http://www.opensource.org/licenses/mit-license.php + Author Tobias Koppers @sokra +*/ + + +const Hook = __webpack_require__(36591); +const HookCodeFactory = __webpack_require__(85572); + +class AsyncSeriesHookCodeFactory extends HookCodeFactory { + content({ onError, onDone }) { + return this.callTapsSeries({ + onError: (i, err, next, doneBreak) => onError(err) + doneBreak(true), + onDone + }); } +} + +const factory = new AsyncSeriesHookCodeFactory(); + +const COMPILE = function(options) { + factory.setup(this, options); + return factory.create(options); }; -exports.checkExportsFieldTarget = checkExportsFieldTarget; + +function AsyncSeriesHook(args = [], name = undefined) { + const hook = new Hook(args, name); + hook.constructor = AsyncSeriesHook; + hook.compile = COMPILE; + hook._call = undefined; + hook.call = undefined; + return hook; +} + +AsyncSeriesHook.prototype = null; + +module.exports = AsyncSeriesHook; /***/ }), -/***/ 83798: -/***/ (function(module) { +/***/ 5463: +/***/ (function(module, __unused_webpack_exports, __webpack_require__) { "use strict"; +/* + MIT License http://www.opensource.org/licenses/mit-license.php + Author Tobias Koppers @sokra +*/ -class LoadingLoaderError extends Error { - constructor(message) { - super(message); - this.name = "LoaderRunnerError"; - Error.captureStackTrace(this, this.constructor); +const Hook = __webpack_require__(36591); +const HookCodeFactory = __webpack_require__(85572); + +class AsyncSeriesLoopHookCodeFactory extends HookCodeFactory { + content({ onError, onDone }) { + return this.callTapsLooping({ + onError: (i, err, next, doneBreak) => onError(err) + doneBreak(true), + onDone + }); } } -module.exports = LoadingLoaderError; +const factory = new AsyncSeriesLoopHookCodeFactory(); + +const COMPILE = function(options) { + factory.setup(this, options); + return factory.create(options); +}; + +function AsyncSeriesLoopHook(args = [], name = undefined) { + const hook = new Hook(args, name); + hook.constructor = AsyncSeriesLoopHook; + hook.compile = COMPILE; + hook._call = undefined; + hook.call = undefined; + return hook; +} + +AsyncSeriesLoopHook.prototype = null; + +module.exports = AsyncSeriesLoopHook; /***/ }), -/***/ 67578: -/***/ (function(__unused_webpack_module, exports, __webpack_require__) { +/***/ 73448: +/***/ (function(module, __unused_webpack_exports, __webpack_require__) { +"use strict"; /* MIT License http://www.opensource.org/licenses/mit-license.php Author Tobias Koppers @sokra */ -var fs = __webpack_require__(35747); -var readFile = fs.readFile.bind(fs); -var loadLoader = __webpack_require__(63395); -function utf8BufferToString(buf) { - var str = buf.toString("utf-8"); - if(str.charCodeAt(0) === 0xFEFF) { - return str.substr(1); - } else { - return str; + +const Hook = __webpack_require__(36591); +const HookCodeFactory = __webpack_require__(85572); + +class AsyncSeriesWaterfallHookCodeFactory extends HookCodeFactory { + content({ onError, onResult, onDone }) { + return this.callTapsSeries({ + onError: (i, err, next, doneBreak) => onError(err) + doneBreak(true), + onResult: (i, result, next) => { + let code = ""; + code += `if(${result} !== undefined) {\n`; + code += `${this._args[0]} = ${result};\n`; + code += `}\n`; + code += next(); + return code; + }, + onDone: () => onResult(this._args[0]) + }); } } -const PATH_QUERY_FRAGMENT_REGEXP = /^((?:\0.|[^?#\0])*)(\?(?:\0.|[^#\0])*)?(#.*)?$/; +const factory = new AsyncSeriesWaterfallHookCodeFactory(); -/** - * @param {string} str the path with query and fragment - * @returns {{ path: string, query: string, fragment: string }} parsed parts - */ -function parsePathQueryFragment(str) { - var match = PATH_QUERY_FRAGMENT_REGEXP.exec(str); - return { - path: match[1].replace(/\0(.)/g, "$1"), - query: match[2] ? match[2].replace(/\0(.)/g, "$1") : "", - fragment: match[3] || "" - }; -} +const COMPILE = function(options) { + factory.setup(this, options); + return factory.create(options); +}; -function dirname(path) { - if(path === "/") return "/"; - var i = path.lastIndexOf("/"); - var j = path.lastIndexOf("\\"); - var i2 = path.indexOf("/"); - var j2 = path.indexOf("\\"); - var idx = i > j ? i : j; - var idx2 = i > j ? i2 : j2; - if(idx < 0) return path; - if(idx === idx2) return path.substr(0, idx + 1); - return path.substr(0, idx); +function AsyncSeriesWaterfallHook(args = [], name = undefined) { + if (args.length < 1) + throw new Error("Waterfall hooks must have at least one argument"); + const hook = new Hook(args, name); + hook.constructor = AsyncSeriesWaterfallHook; + hook.compile = COMPILE; + hook._call = undefined; + hook.call = undefined; + return hook; } -function createLoaderObject(loader) { - var obj = { - path: null, - query: null, - fragment: null, - options: null, - ident: null, - normal: null, - pitch: null, - raw: null, - data: null, - pitchExecuted: false, - normalExecuted: false - }; - Object.defineProperty(obj, "request", { - enumerable: true, - get: function() { - return obj.path.replace(/#/g, "\0#") + obj.query.replace(/#/g, "\0#") + obj.fragment; - }, - set: function(value) { - if(typeof value === "string") { - var splittedRequest = parsePathQueryFragment(value); - obj.path = splittedRequest.path; - obj.query = splittedRequest.query; - obj.fragment = splittedRequest.fragment; - obj.options = undefined; - obj.ident = undefined; - } else { - if(!value.loader) - throw new Error("request should be a string or object with loader and options (" + JSON.stringify(value) + ")"); - obj.path = value.loader; - obj.fragment = value.fragment || ""; - obj.type = value.type; - obj.options = value.options; - obj.ident = value.ident; - if(obj.options === null) - obj.query = ""; - else if(obj.options === undefined) - obj.query = ""; - else if(typeof obj.options === "string") - obj.query = "?" + obj.options; - else if(obj.ident) - obj.query = "??" + obj.ident; - else if(typeof obj.options === "object" && obj.options.ident) - obj.query = "??" + obj.options.ident; - else - obj.query = "?" + JSON.stringify(obj.options); - } - } - }); - obj.request = loader; - if(Object.preventExtensions) { - Object.preventExtensions(obj); +AsyncSeriesWaterfallHook.prototype = null; + +module.exports = AsyncSeriesWaterfallHook; + + +/***/ }), + +/***/ 36591: +/***/ (function(module, __unused_webpack_exports, __webpack_require__) { + +"use strict"; +/* + MIT License http://www.opensource.org/licenses/mit-license.php + Author Tobias Koppers @sokra +*/ + + +const util = __webpack_require__(31669); + +const deprecateContext = util.deprecate(() => {}, +"Hook.context is deprecated and will be removed"); + +const CALL_DELEGATE = function(...args) { + this.call = this._createCall("sync"); + return this.call(...args); +}; +const CALL_ASYNC_DELEGATE = function(...args) { + this.callAsync = this._createCall("async"); + return this.callAsync(...args); +}; +const PROMISE_DELEGATE = function(...args) { + this.promise = this._createCall("promise"); + return this.promise(...args); +}; + +class Hook { + constructor(args = [], name = undefined) { + this._args = args; + this.name = name; + this.taps = []; + this.interceptors = []; + this._call = CALL_DELEGATE; + this.call = CALL_DELEGATE; + this._callAsync = CALL_ASYNC_DELEGATE; + this.callAsync = CALL_ASYNC_DELEGATE; + this._promise = PROMISE_DELEGATE; + this.promise = PROMISE_DELEGATE; + this._x = undefined; + + this.compile = this.compile; + this.tap = this.tap; + this.tapAsync = this.tapAsync; + this.tapPromise = this.tapPromise; } - return obj; -} -function runSyncOrAsync(fn, context, args, callback) { - var isSync = true; - var isDone = false; - var isError = false; // internal error - var reportedError = false; - context.async = function async() { - if(isDone) { - if(reportedError) return; // ignore - throw new Error("async(): The callback was already called."); + compile(options) { + throw new Error("Abstract: should be overridden"); + } + + _createCall(type) { + return this.compile({ + taps: this.taps, + interceptors: this.interceptors, + args: this._args, + type: type + }); + } + + _tap(type, options, fn) { + if (typeof options === "string") { + options = { + name: options.trim() + }; + } else if (typeof options !== "object" || options === null) { + throw new Error("Invalid tap options"); } - isSync = false; - return innerCallback; - }; - var innerCallback = context.callback = function() { - if(isDone) { - if(reportedError) return; // ignore - throw new Error("callback(): The callback was already called."); + if (typeof options.name !== "string" || options.name === "") { + throw new Error("Missing name for tap"); } - isDone = true; - isSync = false; - try { - callback.apply(null, arguments); - } catch(e) { - isError = true; - throw e; + if (typeof options.context !== "undefined") { + deprecateContext(); } - }; - try { - var result = (function LOADER_EXECUTION() { - return fn.apply(context, args); - }()); - if(isSync) { - isDone = true; - if(result === undefined) - return callback(); - if(result && typeof result === "object" && typeof result.then === "function") { - return result.then(function(r) { - callback(null, r); - }, callback); + options = Object.assign({ type, fn }, options); + options = this._runRegisterInterceptors(options); + this._insert(options); + } + + tap(options, fn) { + this._tap("sync", options, fn); + } + + tapAsync(options, fn) { + this._tap("async", options, fn); + } + + tapPromise(options, fn) { + this._tap("promise", options, fn); + } + + _runRegisterInterceptors(options) { + for (const interceptor of this.interceptors) { + if (interceptor.register) { + const newOptions = interceptor.register(options); + if (newOptions !== undefined) { + options = newOptions; + } } - return callback(null, result); - } - } catch(e) { - if(isError) throw e; - if(isDone) { - // loader is already "done", so we cannot use the callback function - // for better debugging we print the error on the console - if(typeof e === "object" && e.stack) console.error(e.stack); - else console.error(e); - return; } - isDone = true; - reportedError = true; - callback(e); + return options; } -} + withOptions(options) { + const mergeOptions = opt => + Object.assign({}, options, typeof opt === "string" ? { name: opt } : opt); -function convertArgs(args, raw) { - if(!raw && Buffer.isBuffer(args[0])) - args[0] = utf8BufferToString(args[0]); - else if(raw && typeof args[0] === "string") - args[0] = Buffer.from(args[0], "utf-8"); -} + return { + name: this.name, + tap: (opt, fn) => this.tap(mergeOptions(opt), fn), + tapAsync: (opt, fn) => this.tapAsync(mergeOptions(opt), fn), + tapPromise: (opt, fn) => this.tapPromise(mergeOptions(opt), fn), + intercept: interceptor => this.intercept(interceptor), + isUsed: () => this.isUsed(), + withOptions: opt => this.withOptions(mergeOptions(opt)) + }; + } -function iteratePitchingLoaders(options, loaderContext, callback) { - // abort after last loader - if(loaderContext.loaderIndex >= loaderContext.loaders.length) - return processResource(options, loaderContext, callback); + isUsed() { + return this.taps.length > 0 || this.interceptors.length > 0; + } - var currentLoaderObject = loaderContext.loaders[loaderContext.loaderIndex]; + intercept(interceptor) { + this._resetCompilation(); + this.interceptors.push(Object.assign({}, interceptor)); + if (interceptor.register) { + for (let i = 0; i < this.taps.length; i++) { + this.taps[i] = interceptor.register(this.taps[i]); + } + } + } - // iterate - if(currentLoaderObject.pitchExecuted) { - loaderContext.loaderIndex++; - return iteratePitchingLoaders(options, loaderContext, callback); + _resetCompilation() { + this.call = this._call; + this.callAsync = this._callAsync; + this.promise = this._promise; } - // load loader module - loadLoader(currentLoaderObject, function(err) { - if(err) { - loaderContext.cacheable(false); - return callback(err); + _insert(item) { + this._resetCompilation(); + let before; + if (typeof item.before === "string") { + before = new Set([item.before]); + } else if (Array.isArray(item.before)) { + before = new Set(item.before); } - var fn = currentLoaderObject.pitch; - currentLoaderObject.pitchExecuted = true; - if(!fn) return iteratePitchingLoaders(options, loaderContext, callback); - - runSyncOrAsync( - fn, - loaderContext, [loaderContext.remainingRequest, loaderContext.previousRequest, currentLoaderObject.data = {}], - function(err) { - if(err) return callback(err); - var args = Array.prototype.slice.call(arguments, 1); - // Determine whether to continue the pitching process based on - // argument values (as opposed to argument presence) in order - // to support synchronous and asynchronous usages. - var hasArg = args.some(function(value) { - return value !== undefined; - }); - if(hasArg) { - loaderContext.loaderIndex--; - iterateNormalLoaders(options, loaderContext, args, callback); - } else { - iteratePitchingLoaders(options, loaderContext, callback); + let stage = 0; + if (typeof item.stage === "number") { + stage = item.stage; + } + let i = this.taps.length; + while (i > 0) { + i--; + const x = this.taps[i]; + this.taps[i + 1] = x; + const xStage = x.stage || 0; + if (before) { + if (before.has(x.name)) { + before.delete(x.name); + continue; + } + if (before.size > 0) { + continue; } } - ); - }); -} - -function processResource(options, loaderContext, callback) { - // set loader index to last loader - loaderContext.loaderIndex = loaderContext.loaders.length - 1; - - var resourcePath = loaderContext.resourcePath; - if(resourcePath) { - options.processResource(loaderContext, resourcePath, function(err, buffer) { - if(err) return callback(err); - options.resourceBuffer = buffer; - iterateNormalLoaders(options, loaderContext, [buffer], callback); - }); - } else { - iterateNormalLoaders(options, loaderContext, [null], callback); + if (xStage > stage) { + continue; + } + i++; + break; + } + this.taps[i] = item; } } -function iterateNormalLoaders(options, loaderContext, args, callback) { - if(loaderContext.loaderIndex < 0) - return callback(null, args); +Object.setPrototypeOf(Hook.prototype, null); - var currentLoaderObject = loaderContext.loaders[loaderContext.loaderIndex]; +module.exports = Hook; - // iterate - if(currentLoaderObject.normalExecuted) { - loaderContext.loaderIndex--; - return iterateNormalLoaders(options, loaderContext, args, callback); - } - var fn = currentLoaderObject.normal; - currentLoaderObject.normalExecuted = true; - if(!fn) { - return iterateNormalLoaders(options, loaderContext, args, callback); - } +/***/ }), - convertArgs(args, currentLoaderObject.raw); +/***/ 85572: +/***/ (function(module) { - runSyncOrAsync(fn, loaderContext, args, function(err) { - if(err) return callback(err); +"use strict"; +/* + MIT License http://www.opensource.org/licenses/mit-license.php + Author Tobias Koppers @sokra +*/ - var args = Array.prototype.slice.call(arguments, 1); - iterateNormalLoaders(options, loaderContext, args, callback); - }); -} -exports.getContext = function getContext(resource) { - var path = parsePathQueryFragment(resource).path; - return dirname(path); -}; +class HookCodeFactory { + constructor(config) { + this.config = config; + this.options = undefined; + this._args = undefined; + } -exports.runLoaders = function runLoaders(options, callback) { - // read options - var resource = options.resource || ""; - var loaders = options.loaders || []; - var loaderContext = options.context || {}; - var processResource = options.processResource || ((readResource, context, resource, callback) => { - context.addDependency(resource); - readResource(resource, callback); - }).bind(null, options.readResource || readFile); + create(options) { + this.init(options); + let fn; + switch (this.options.type) { + case "sync": + fn = new Function( + this.args(), + '"use strict";\n' + + this.header() + + this.contentWithInterceptors({ + onError: err => `throw ${err};\n`, + onResult: result => `return ${result};\n`, + resultReturns: true, + onDone: () => "", + rethrowIfPossible: true + }) + ); + break; + case "async": + fn = new Function( + this.args({ + after: "_callback" + }), + '"use strict";\n' + + this.header() + + this.contentWithInterceptors({ + onError: err => `_callback(${err});\n`, + onResult: result => `_callback(null, ${result});\n`, + onDone: () => "_callback();\n" + }) + ); + break; + case "promise": + let errorHelperUsed = false; + const content = this.contentWithInterceptors({ + onError: err => { + errorHelperUsed = true; + return `_error(${err});\n`; + }, + onResult: result => `_resolve(${result});\n`, + onDone: () => "_resolve();\n" + }); + let code = ""; + code += '"use strict";\n'; + code += this.header(); + code += "return new Promise((function(_resolve, _reject) {\n"; + if (errorHelperUsed) { + code += "var _sync = true;\n"; + code += "function _error(_err) {\n"; + code += "if(_sync)\n"; + code += + "_resolve(Promise.resolve().then((function() { throw _err; })));\n"; + code += "else\n"; + code += "_reject(_err);\n"; + code += "};\n"; + } + code += content; + if (errorHelperUsed) { + code += "_sync = false;\n"; + } + code += "}));\n"; + fn = new Function(this.args(), code); + break; + } + this.deinit(); + return fn; + } - // - var splittedResource = resource && parsePathQueryFragment(resource); - var resourcePath = splittedResource ? splittedResource.path : undefined; - var resourceQuery = splittedResource ? splittedResource.query : undefined; - var resourceFragment = splittedResource ? splittedResource.fragment : undefined; - var contextDirectory = resourcePath ? dirname(resourcePath) : null; + setup(instance, options) { + instance._x = options.taps.map(t => t.fn); + } - // execution state - var requestCacheable = true; - var fileDependencies = []; - var contextDependencies = []; - var missingDependencies = []; + /** + * @param {{ type: "sync" | "promise" | "async", taps: Array, interceptors: Array }} options + */ + init(options) { + this.options = options; + this._args = options.args.slice(); + } - // prepare loader objects - loaders = loaders.map(createLoaderObject); + deinit() { + this.options = undefined; + this._args = undefined; + } - loaderContext.context = contextDirectory; - loaderContext.loaderIndex = 0; - loaderContext.loaders = loaders; - loaderContext.resourcePath = resourcePath; - loaderContext.resourceQuery = resourceQuery; - loaderContext.resourceFragment = resourceFragment; - loaderContext.async = null; - loaderContext.callback = null; - loaderContext.cacheable = function cacheable(flag) { - if(flag === false) { - requestCacheable = false; + contentWithInterceptors(options) { + if (this.options.interceptors.length > 0) { + const onError = options.onError; + const onResult = options.onResult; + const onDone = options.onDone; + let code = ""; + for (let i = 0; i < this.options.interceptors.length; i++) { + const interceptor = this.options.interceptors[i]; + if (interceptor.call) { + code += `${this.getInterceptor(i)}.call(${this.args({ + before: interceptor.context ? "_context" : undefined + })});\n`; + } + } + code += this.content( + Object.assign(options, { + onError: + onError && + (err => { + let code = ""; + for (let i = 0; i < this.options.interceptors.length; i++) { + const interceptor = this.options.interceptors[i]; + if (interceptor.error) { + code += `${this.getInterceptor(i)}.error(${err});\n`; + } + } + code += onError(err); + return code; + }), + onResult: + onResult && + (result => { + let code = ""; + for (let i = 0; i < this.options.interceptors.length; i++) { + const interceptor = this.options.interceptors[i]; + if (interceptor.result) { + code += `${this.getInterceptor(i)}.result(${result});\n`; + } + } + code += onResult(result); + return code; + }), + onDone: + onDone && + (() => { + let code = ""; + for (let i = 0; i < this.options.interceptors.length; i++) { + const interceptor = this.options.interceptors[i]; + if (interceptor.done) { + code += `${this.getInterceptor(i)}.done();\n`; + } + } + code += onDone(); + return code; + }) + }) + ); + return code; + } else { + return this.content(options); } - }; - loaderContext.dependency = loaderContext.addDependency = function addDependency(file) { - fileDependencies.push(file); - }; - loaderContext.addContextDependency = function addContextDependency(context) { - contextDependencies.push(context); - }; - loaderContext.addMissingDependency = function addMissingDependency(context) { - missingDependencies.push(context); - }; - loaderContext.getDependencies = function getDependencies() { - return fileDependencies.slice(); - }; - loaderContext.getContextDependencies = function getContextDependencies() { - return contextDependencies.slice(); - }; - loaderContext.getMissingDependencies = function getMissingDependencies() { - return missingDependencies.slice(); - }; - loaderContext.clearDependencies = function clearDependencies() { - fileDependencies.length = 0; - contextDependencies.length = 0; - missingDependencies.length = 0; - requestCacheable = true; - }; - Object.defineProperty(loaderContext, "resource", { - enumerable: true, - get: function() { - if(loaderContext.resourcePath === undefined) - return undefined; - return loaderContext.resourcePath.replace(/#/g, "\0#") + loaderContext.resourceQuery.replace(/#/g, "\0#") + loaderContext.resourceFragment; - }, - set: function(value) { - var splittedResource = value && parsePathQueryFragment(value); - loaderContext.resourcePath = splittedResource ? splittedResource.path : undefined; - loaderContext.resourceQuery = splittedResource ? splittedResource.query : undefined; - loaderContext.resourceFragment = splittedResource ? splittedResource.fragment : undefined; + } + + header() { + let code = ""; + if (this.needContext()) { + code += "var _context = {};\n"; + } else { + code += "var _context;\n"; } - }); - Object.defineProperty(loaderContext, "request", { - enumerable: true, - get: function() { - return loaderContext.loaders.map(function(o) { - return o.request; - }).concat(loaderContext.resource || "").join("!"); + code += "var _x = this._x;\n"; + if (this.options.interceptors.length > 0) { + code += "var _taps = this.taps;\n"; + code += "var _interceptors = this.interceptors;\n"; } - }); - Object.defineProperty(loaderContext, "remainingRequest", { - enumerable: true, - get: function() { - if(loaderContext.loaderIndex >= loaderContext.loaders.length - 1 && !loaderContext.resource) - return ""; - return loaderContext.loaders.slice(loaderContext.loaderIndex + 1).map(function(o) { - return o.request; - }).concat(loaderContext.resource || "").join("!"); + return code; + } + + needContext() { + for (const tap of this.options.taps) if (tap.context) return true; + return false; + } + + callTap(tapIndex, { onError, onResult, onDone, rethrowIfPossible }) { + let code = ""; + let hasTapCached = false; + for (let i = 0; i < this.options.interceptors.length; i++) { + const interceptor = this.options.interceptors[i]; + if (interceptor.tap) { + if (!hasTapCached) { + code += `var _tap${tapIndex} = ${this.getTap(tapIndex)};\n`; + hasTapCached = true; + } + code += `${this.getInterceptor(i)}.tap(${ + interceptor.context ? "_context, " : "" + }_tap${tapIndex});\n`; + } } - }); - Object.defineProperty(loaderContext, "currentRequest", { - enumerable: true, - get: function() { - return loaderContext.loaders.slice(loaderContext.loaderIndex).map(function(o) { - return o.request; - }).concat(loaderContext.resource || "").join("!"); + code += `var _fn${tapIndex} = ${this.getTapFn(tapIndex)};\n`; + const tap = this.options.taps[tapIndex]; + switch (tap.type) { + case "sync": + if (!rethrowIfPossible) { + code += `var _hasError${tapIndex} = false;\n`; + code += "try {\n"; + } + if (onResult) { + code += `var _result${tapIndex} = _fn${tapIndex}(${this.args({ + before: tap.context ? "_context" : undefined + })});\n`; + } else { + code += `_fn${tapIndex}(${this.args({ + before: tap.context ? "_context" : undefined + })});\n`; + } + if (!rethrowIfPossible) { + code += "} catch(_err) {\n"; + code += `_hasError${tapIndex} = true;\n`; + code += onError("_err"); + code += "}\n"; + code += `if(!_hasError${tapIndex}) {\n`; + } + if (onResult) { + code += onResult(`_result${tapIndex}`); + } + if (onDone) { + code += onDone(); + } + if (!rethrowIfPossible) { + code += "}\n"; + } + break; + case "async": + let cbCode = ""; + if (onResult) + cbCode += `(function(_err${tapIndex}, _result${tapIndex}) {\n`; + else cbCode += `(function(_err${tapIndex}) {\n`; + cbCode += `if(_err${tapIndex}) {\n`; + cbCode += onError(`_err${tapIndex}`); + cbCode += "} else {\n"; + if (onResult) { + cbCode += onResult(`_result${tapIndex}`); + } + if (onDone) { + cbCode += onDone(); + } + cbCode += "}\n"; + cbCode += "})"; + code += `_fn${tapIndex}(${this.args({ + before: tap.context ? "_context" : undefined, + after: cbCode + })});\n`; + break; + case "promise": + code += `var _hasResult${tapIndex} = false;\n`; + code += `var _promise${tapIndex} = _fn${tapIndex}(${this.args({ + before: tap.context ? "_context" : undefined + })});\n`; + code += `if (!_promise${tapIndex} || !_promise${tapIndex}.then)\n`; + code += ` throw new Error('Tap function (tapPromise) did not return promise (returned ' + _promise${tapIndex} + ')');\n`; + code += `_promise${tapIndex}.then((function(_result${tapIndex}) {\n`; + code += `_hasResult${tapIndex} = true;\n`; + if (onResult) { + code += onResult(`_result${tapIndex}`); + } + if (onDone) { + code += onDone(); + } + code += `}), function(_err${tapIndex}) {\n`; + code += `if(_hasResult${tapIndex}) throw _err${tapIndex};\n`; + code += onError(`_err${tapIndex}`); + code += "});\n"; + break; } - }); - Object.defineProperty(loaderContext, "previousRequest", { - enumerable: true, - get: function() { - return loaderContext.loaders.slice(0, loaderContext.loaderIndex).map(function(o) { - return o.request; - }).join("!"); + return code; + } + + callTapsSeries({ + onError, + onResult, + resultReturns, + onDone, + doneReturns, + rethrowIfPossible + }) { + if (this.options.taps.length === 0) return onDone(); + const firstAsync = this.options.taps.findIndex(t => t.type !== "sync"); + const somethingReturns = resultReturns || doneReturns; + let code = ""; + let current = onDone; + let unrollCounter = 0; + for (let j = this.options.taps.length - 1; j >= 0; j--) { + const i = j; + const unroll = + current !== onDone && + (this.options.taps[i].type !== "sync" || unrollCounter++ > 20); + if (unroll) { + unrollCounter = 0; + code += `function _next${i}() {\n`; + code += current(); + code += `}\n`; + current = () => `${somethingReturns ? "return " : ""}_next${i}();\n`; + } + const done = current; + const doneBreak = skipDone => { + if (skipDone) return ""; + return onDone(); + }; + const content = this.callTap(i, { + onError: error => onError(i, error, done, doneBreak), + onResult: + onResult && + (result => { + return onResult(i, result, done, doneBreak); + }), + onDone: !onResult && done, + rethrowIfPossible: + rethrowIfPossible && (firstAsync < 0 || i < firstAsync) + }); + current = () => content; } - }); - Object.defineProperty(loaderContext, "query", { - enumerable: true, - get: function() { - var entry = loaderContext.loaders[loaderContext.loaderIndex]; - return entry.options && typeof entry.options === "object" ? entry.options : entry.query; + code += current(); + return code; + } + + callTapsLooping({ onError, onDone, rethrowIfPossible }) { + if (this.options.taps.length === 0) return onDone(); + const syncOnly = this.options.taps.every(t => t.type === "sync"); + let code = ""; + if (!syncOnly) { + code += "var _looper = (function() {\n"; + code += "var _loopAsync = false;\n"; + } + code += "var _loop;\n"; + code += "do {\n"; + code += "_loop = false;\n"; + for (let i = 0; i < this.options.interceptors.length; i++) { + const interceptor = this.options.interceptors[i]; + if (interceptor.loop) { + code += `${this.getInterceptor(i)}.loop(${this.args({ + before: interceptor.context ? "_context" : undefined + })});\n`; + } } - }); - Object.defineProperty(loaderContext, "data", { - enumerable: true, - get: function() { - return loaderContext.loaders[loaderContext.loaderIndex].data; + code += this.callTapsSeries({ + onError, + onResult: (i, result, next, doneBreak) => { + let code = ""; + code += `if(${result} !== undefined) {\n`; + code += "_loop = true;\n"; + if (!syncOnly) code += "if(_loopAsync) _looper();\n"; + code += doneBreak(true); + code += `} else {\n`; + code += next(); + code += `}\n`; + return code; + }, + onDone: + onDone && + (() => { + let code = ""; + code += "if(!_loop) {\n"; + code += onDone(); + code += "}\n"; + return code; + }), + rethrowIfPossible: rethrowIfPossible && syncOnly + }); + code += "} while(_loop);\n"; + if (!syncOnly) { + code += "_loopAsync = true;\n"; + code += "});\n"; + code += "_looper();\n"; } - }); - - // finish loader context - if(Object.preventExtensions) { - Object.preventExtensions(loaderContext); + return code; } - var processOptions = { - resourceBuffer: null, - processResource: processResource - }; - iteratePitchingLoaders(processOptions, loaderContext, function(err, result) { - if(err) { - return callback(err, { - cacheable: requestCacheable, - fileDependencies: fileDependencies, - contextDependencies: contextDependencies, - missingDependencies: missingDependencies + callTapsParallel({ + onError, + onResult, + onDone, + rethrowIfPossible, + onTap = (i, run) => run() + }) { + if (this.options.taps.length <= 1) { + return this.callTapsSeries({ + onError, + onResult, + onDone, + rethrowIfPossible }); } - callback(null, { - result: result, - resourceBuffer: processOptions.resourceBuffer, - cacheable: requestCacheable, - fileDependencies: fileDependencies, - contextDependencies: contextDependencies, - missingDependencies: missingDependencies - }); - }); -}; - - -/***/ }), - -/***/ 63395: -/***/ (function(module, __unused_webpack_exports, __webpack_require__) { - -var LoaderLoadingError = __webpack_require__(83798); -var url; - -module.exports = function loadLoader(loader, callback) { - if(loader.type === "module") { - try { - if(url === undefined) url = __webpack_require__(78835); - var loaderUrl = url.pathToFileURL(loader.path); - var modulePromise = eval("import(" + JSON.stringify(loaderUrl.toString()) + ")"); - modulePromise.then(function(module) { - handleResult(loader, module, callback); - }, callback); - return; - } catch(e) { - callback(e); + let code = ""; + code += "do {\n"; + code += `var _counter = ${this.options.taps.length};\n`; + if (onDone) { + code += "var _done = (function() {\n"; + code += onDone(); + code += "});\n"; } - } else { - try { - var module = require(loader.path); - } catch(e) { - // it is possible for node to choke on a require if the FD descriptor - // limit has been reached. give it a chance to recover. - if(e instanceof Error && e.code === "EMFILE") { - var retry = loadLoader.bind(null, loader, callback); - if(typeof setImmediate === "function") { - // node >= 0.9.0 - return setImmediate(retry); - } else { - // node < 0.9.0 - return process.nextTick(retry); - } - } - return callback(e); + for (let i = 0; i < this.options.taps.length; i++) { + const done = () => { + if (onDone) return "if(--_counter === 0) _done();\n"; + else return "--_counter;"; + }; + const doneBreak = skipDone => { + if (skipDone || !onDone) return "_counter = 0;\n"; + else return "_counter = 0;\n_done();\n"; + }; + code += "if(_counter <= 0) break;\n"; + code += onTap( + i, + () => + this.callTap(i, { + onError: error => { + let code = ""; + code += "if(_counter > 0) {\n"; + code += onError(i, error, done, doneBreak); + code += "}\n"; + return code; + }, + onResult: + onResult && + (result => { + let code = ""; + code += "if(_counter > 0) {\n"; + code += onResult(i, result, done, doneBreak); + code += "}\n"; + return code; + }), + onDone: + !onResult && + (() => { + return done(); + }), + rethrowIfPossible + }), + done, + doneBreak + ); } - return handleResult(loader, module, callback); + code += "} while(false);\n"; + return code; } -}; -function handleResult(loader, module, callback) { - if(typeof module !== "function" && typeof module !== "object") { - return callback(new LoaderLoadingError( - "Module '" + loader.path + "' is not a loader (export function or es6 module)" - )); - } - loader.normal = typeof module === "function" ? module : module.default; - loader.pitch = module.pitch; - loader.raw = module.raw; - if(typeof loader.normal !== "function" && typeof loader.pitch !== "function") { - return callback(new LoaderLoadingError( - "Module '" + loader.path + "' is not a loader (must have normal or pitch function)" - )); + args({ before, after } = {}) { + let allArgs = this._args; + if (before) allArgs = [before].concat(allArgs); + if (after) allArgs = allArgs.concat(after); + if (allArgs.length === 0) { + return ""; + } else { + return allArgs.join(", "); + } } - callback(); -} - - -/***/ }), - -/***/ 11060: -/***/ (function(module, __unused_webpack_exports, __webpack_require__) { - -/*! - * mime-db - * Copyright(c) 2014 Jonathan Ong - * MIT Licensed - */ - -/** - * Module exports. - */ - -module.exports = __webpack_require__(53243) - - -/***/ }), - -/***/ 19338: -/***/ (function(__unused_webpack_module, exports, __webpack_require__) { - -"use strict"; -/*! - * mime-types - * Copyright(c) 2014 Jonathan Ong - * Copyright(c) 2015 Douglas Christopher Wilson - * MIT Licensed - */ - - - -/** - * Module dependencies. - * @private - */ - -var db = __webpack_require__(11060) -var extname = __webpack_require__(85622).extname - -/** - * Module variables. - * @private - */ - -var EXTRACT_TYPE_REGEXP = /^\s*([^;\s]*)(?:;|\s|$)/ -var TEXT_TYPE_REGEXP = /^text\//i - -/** - * Module exports. - * @public - */ - -exports.charset = charset -exports.charsets = { lookup: charset } -exports.contentType = contentType -exports.extension = extension -exports.extensions = Object.create(null) -exports.lookup = lookup -exports.types = Object.create(null) - -// Populate the extensions/types maps -populateMaps(exports.extensions, exports.types) - -/** - * Get the default charset for a MIME type. - * - * @param {string} type - * @return {boolean|string} - */ - -function charset (type) { - if (!type || typeof type !== 'string') { - return false - } - - // TODO: use media-typer - var match = EXTRACT_TYPE_REGEXP.exec(type) - var mime = match && db[match[1].toLowerCase()] - - if (mime && mime.charset) { - return mime.charset - } - - // default text/* to utf-8 - if (match && TEXT_TYPE_REGEXP.test(match[1])) { - return 'UTF-8' - } - - return false -} - -/** - * Create a full Content-Type header given a MIME type or extension. - * - * @param {string} str - * @return {boolean|string} - */ - -function contentType (str) { - // TODO: should this even be in this module? - if (!str || typeof str !== 'string') { - return false - } - - var mime = str.indexOf('/') === -1 - ? exports.lookup(str) - : str - - if (!mime) { - return false - } - - // TODO: use content-type or other module - if (mime.indexOf('charset') === -1) { - var charset = exports.charset(mime) - if (charset) mime += '; charset=' + charset.toLowerCase() - } - - return mime -} - -/** - * Get the default extension for a MIME type. - * - * @param {string} type - * @return {boolean|string} - */ -function extension (type) { - if (!type || typeof type !== 'string') { - return false - } - - // TODO: use media-typer - var match = EXTRACT_TYPE_REGEXP.exec(type) - - // get extensions - var exts = match && exports.extensions[match[1].toLowerCase()] - - if (!exts || !exts.length) { - return false - } - - return exts[0] -} - -/** - * Lookup the MIME type for a file path/extension. - * - * @param {string} path - * @return {boolean|string} - */ - -function lookup (path) { - if (!path || typeof path !== 'string') { - return false - } - - // get the extension ("ext" or ".ext" or full path) - var extension = extname('x.' + path) - .toLowerCase() - .substr(1) + getTapFn(idx) { + return `_x[${idx}]`; + } - if (!extension) { - return false - } + getTap(idx) { + return `_taps[${idx}]`; + } - return exports.types[extension] || false + getInterceptor(idx) { + return `_interceptors[${idx}]`; + } } -/** - * Populate the extensions and types maps. - * @private - */ - -function populateMaps (extensions, types) { - // source preference (least -> most) - var preference = ['nginx', 'apache', undefined, 'iana'] - - Object.keys(db).forEach(function forEachMimeType (type) { - var mime = db[type] - var exts = mime.extensions - - if (!exts || !exts.length) { - return - } - - // mime -> extensions - extensions[type] = exts - - // extension -> mime - for (var i = 0; i < exts.length; i++) { - var extension = exts[i] - - if (types[extension]) { - var from = preference.indexOf(db[types[extension]].source) - var to = preference.indexOf(mime.source) - - if (types[extension] !== 'application/octet-stream' && - (from > to || (from === to && types[extension].substr(0, 12) === 'application/'))) { - // skip the remapping - continue - } - } - - // set the extension -> mime - types[extension] = type - } - }) -} +module.exports = HookCodeFactory; /***/ }), -/***/ 28339: +/***/ 81647: /***/ (function(module, __unused_webpack_exports, __webpack_require__) { "use strict"; @@ -142952,90 +141959,66 @@ function populateMaps (extensions, types) { */ -const Hook = __webpack_require__(36591); -const HookCodeFactory = __webpack_require__(85572); +const util = __webpack_require__(31669); -class AsyncParallelBailHookCodeFactory extends HookCodeFactory { - content({ onError, onResult, onDone }) { - let code = ""; - code += `var _results = new Array(${this.options.taps.length});\n`; - code += "var _checkDone = function() {\n"; - code += "for(var i = 0; i < _results.length; i++) {\n"; - code += "var item = _results[i];\n"; - code += "if(item === undefined) return false;\n"; - code += "if(item.result !== undefined) {\n"; - code += onResult("item.result"); - code += "return true;\n"; - code += "}\n"; - code += "if(item.error) {\n"; - code += onError("item.error"); - code += "return true;\n"; - code += "}\n"; - code += "}\n"; - code += "return false;\n"; - code += "}\n"; - code += this.callTapsParallel({ - onError: (i, err, done, doneBreak) => { - let code = ""; - code += `if(${i} < _results.length && ((_results.length = ${i + - 1}), (_results[${i}] = { error: ${err} }), _checkDone())) {\n`; - code += doneBreak(true); - code += "} else {\n"; - code += done(); - code += "}\n"; - return code; - }, - onResult: (i, result, done, doneBreak) => { - let code = ""; - code += `if(${i} < _results.length && (${result} !== undefined && (_results.length = ${i + - 1}), (_results[${i}] = { result: ${result} }), _checkDone())) {\n`; - code += doneBreak(true); - code += "} else {\n"; - code += done(); - code += "}\n"; - return code; - }, - onTap: (i, run, done, doneBreak) => { - let code = ""; - if (i > 0) { - code += `if(${i} >= _results.length) {\n`; - code += done(); - code += "} else {\n"; - } - code += run(); - if (i > 0) code += "}\n"; - return code; - }, - onDone - }); - return code; +const defaultFactory = (key, hook) => hook; + +class HookMap { + constructor(factory, name = undefined) { + this._map = new Map(); + this.name = name; + this._factory = factory; + this._interceptors = []; } -} -const factory = new AsyncParallelBailHookCodeFactory(); + get(key) { + return this._map.get(key); + } -const COMPILE = function(options) { - factory.setup(this, options); - return factory.create(options); -}; + for(key) { + const hook = this.get(key); + if (hook !== undefined) { + return hook; + } + let newHook = this._factory(key); + const interceptors = this._interceptors; + for (let i = 0; i < interceptors.length; i++) { + newHook = interceptors[i].factory(key, newHook); + } + this._map.set(key, newHook); + return newHook; + } -function AsyncParallelBailHook(args = [], name = undefined) { - const hook = new Hook(args, name); - hook.constructor = AsyncParallelBailHook; - hook.compile = COMPILE; - hook._call = undefined; - hook.call = undefined; - return hook; + intercept(interceptor) { + this._interceptors.push( + Object.assign( + { + factory: defaultFactory + }, + interceptor + ) + ); + } } -AsyncParallelBailHook.prototype = null; +HookMap.prototype.tap = util.deprecate(function(key, options, fn) { + return this.for(key).tap(options, fn); +}, "HookMap#tap(key,…) is deprecated. Use HookMap#for(key).tap(…) instead."); -module.exports = AsyncParallelBailHook; +HookMap.prototype.tapAsync = util.deprecate(function(key, options, fn) { + return this.for(key).tapAsync(options, fn); +}, "HookMap#tapAsync(key,…) is deprecated. Use HookMap#for(key).tapAsync(…) instead."); + +HookMap.prototype.tapPromise = util.deprecate(function(key, options, fn) { + return this.for(key).tapPromise(options, fn); +}, "HookMap#tapPromise(key,…) is deprecated. Use HookMap#for(key).tapPromise(…) instead."); + +module.exports = HookMap; /***/ }), -/***/ 82890: +/***/ 63708: /***/ (function(module, __unused_webpack_exports, __webpack_require__) { "use strict"; @@ -143046,41 +142029,58 @@ module.exports = AsyncParallelBailHook; const Hook = __webpack_require__(36591); -const HookCodeFactory = __webpack_require__(85572); -class AsyncParallelHookCodeFactory extends HookCodeFactory { - content({ onError, onDone }) { - return this.callTapsParallel({ - onError: (i, err, done, doneBreak) => onError(err) + doneBreak(true), - onDone - }); +class MultiHook { + constructor(hooks, name = undefined) { + this.hooks = hooks; + this.name = name; } -} -const factory = new AsyncParallelHookCodeFactory(); + tap(options, fn) { + for (const hook of this.hooks) { + hook.tap(options, fn); + } + } -const COMPILE = function(options) { - factory.setup(this, options); - return factory.create(options); -}; + tapAsync(options, fn) { + for (const hook of this.hooks) { + hook.tapAsync(options, fn); + } + } -function AsyncParallelHook(args = [], name = undefined) { - const hook = new Hook(args, name); - hook.constructor = AsyncParallelHook; - hook.compile = COMPILE; - hook._call = undefined; - hook.call = undefined; - return hook; -} + tapPromise(options, fn) { + for (const hook of this.hooks) { + hook.tapPromise(options, fn); + } + } -AsyncParallelHook.prototype = null; + isUsed() { + for (const hook of this.hooks) { + if (hook.isUsed()) return true; + } + return false; + } -module.exports = AsyncParallelHook; + intercept(interceptor) { + for (const hook of this.hooks) { + hook.intercept(interceptor); + } + } + + withOptions(options) { + return new MultiHook( + this.hooks.map(h => h.withOptions(options)), + this.name + ); + } +} + +module.exports = MultiHook; /***/ }), -/***/ 97681: +/***/ 98034: /***/ (function(module, __unused_webpack_exports, __webpack_require__) { "use strict"; @@ -143093,44 +142093,53 @@ module.exports = AsyncParallelHook; const Hook = __webpack_require__(36591); const HookCodeFactory = __webpack_require__(85572); -class AsyncSeriesBailHookCodeFactory extends HookCodeFactory { - content({ onError, onResult, resultReturns, onDone }) { +class SyncBailHookCodeFactory extends HookCodeFactory { + content({ onError, onResult, resultReturns, onDone, rethrowIfPossible }) { return this.callTapsSeries({ - onError: (i, err, next, doneBreak) => onError(err) + doneBreak(true), + onError: (i, err) => onError(err), onResult: (i, result, next) => `if(${result} !== undefined) {\n${onResult( result - )}\n} else {\n${next()}}\n`, + )};\n} else {\n${next()}}\n`, resultReturns, - onDone + onDone, + rethrowIfPossible }); } } -const factory = new AsyncSeriesBailHookCodeFactory(); +const factory = new SyncBailHookCodeFactory(); + +const TAP_ASYNC = () => { + throw new Error("tapAsync is not supported on a SyncBailHook"); +}; + +const TAP_PROMISE = () => { + throw new Error("tapPromise is not supported on a SyncBailHook"); +}; const COMPILE = function(options) { factory.setup(this, options); return factory.create(options); }; -function AsyncSeriesBailHook(args = [], name = undefined) { +function SyncBailHook(args = [], name = undefined) { const hook = new Hook(args, name); - hook.constructor = AsyncSeriesBailHook; + hook.constructor = SyncBailHook; + hook.tapAsync = TAP_ASYNC; + hook.tapPromise = TAP_PROMISE; hook.compile = COMPILE; - hook._call = undefined; - hook.call = undefined; return hook; } -AsyncSeriesBailHook.prototype = null; +SyncBailHook.prototype = null; -module.exports = AsyncSeriesBailHook; +module.exports = SyncBailHook; /***/ }), -/***/ 45146: +/***/ 35079: /***/ (function(module, __unused_webpack_exports, __webpack_require__) { "use strict"; @@ -143143,39 +142152,48 @@ module.exports = AsyncSeriesBailHook; const Hook = __webpack_require__(36591); const HookCodeFactory = __webpack_require__(85572); -class AsyncSeriesHookCodeFactory extends HookCodeFactory { - content({ onError, onDone }) { +class SyncHookCodeFactory extends HookCodeFactory { + content({ onError, onDone, rethrowIfPossible }) { return this.callTapsSeries({ - onError: (i, err, next, doneBreak) => onError(err) + doneBreak(true), - onDone + onError: (i, err) => onError(err), + onDone, + rethrowIfPossible }); } } -const factory = new AsyncSeriesHookCodeFactory(); +const factory = new SyncHookCodeFactory(); + +const TAP_ASYNC = () => { + throw new Error("tapAsync is not supported on a SyncHook"); +}; + +const TAP_PROMISE = () => { + throw new Error("tapPromise is not supported on a SyncHook"); +}; const COMPILE = function(options) { factory.setup(this, options); return factory.create(options); }; -function AsyncSeriesHook(args = [], name = undefined) { +function SyncHook(args = [], name = undefined) { const hook = new Hook(args, name); - hook.constructor = AsyncSeriesHook; + hook.constructor = SyncHook; + hook.tapAsync = TAP_ASYNC; + hook.tapPromise = TAP_PROMISE; hook.compile = COMPILE; - hook._call = undefined; - hook.call = undefined; return hook; } -AsyncSeriesHook.prototype = null; +SyncHook.prototype = null; -module.exports = AsyncSeriesHook; +module.exports = SyncHook; /***/ }), -/***/ 5463: +/***/ 66953: /***/ (function(module, __unused_webpack_exports, __webpack_require__) { "use strict"; @@ -143188,39 +142206,48 @@ module.exports = AsyncSeriesHook; const Hook = __webpack_require__(36591); const HookCodeFactory = __webpack_require__(85572); -class AsyncSeriesLoopHookCodeFactory extends HookCodeFactory { - content({ onError, onDone }) { +class SyncLoopHookCodeFactory extends HookCodeFactory { + content({ onError, onDone, rethrowIfPossible }) { return this.callTapsLooping({ - onError: (i, err, next, doneBreak) => onError(err) + doneBreak(true), - onDone + onError: (i, err) => onError(err), + onDone, + rethrowIfPossible }); } } -const factory = new AsyncSeriesLoopHookCodeFactory(); +const factory = new SyncLoopHookCodeFactory(); + +const TAP_ASYNC = () => { + throw new Error("tapAsync is not supported on a SyncLoopHook"); +}; + +const TAP_PROMISE = () => { + throw new Error("tapPromise is not supported on a SyncLoopHook"); +}; const COMPILE = function(options) { factory.setup(this, options); return factory.create(options); }; -function AsyncSeriesLoopHook(args = [], name = undefined) { +function SyncLoopHook(args = [], name = undefined) { const hook = new Hook(args, name); - hook.constructor = AsyncSeriesLoopHook; + hook.constructor = SyncLoopHook; + hook.tapAsync = TAP_ASYNC; + hook.tapPromise = TAP_PROMISE; hook.compile = COMPILE; - hook._call = undefined; - hook.call = undefined; return hook; } -AsyncSeriesLoopHook.prototype = null; +SyncLoopHook.prototype = null; -module.exports = AsyncSeriesLoopHook; +module.exports = SyncLoopHook; /***/ }), -/***/ 73448: +/***/ 92878: /***/ (function(module, __unused_webpack_exports, __webpack_require__) { "use strict"; @@ -143233,10 +142260,10 @@ module.exports = AsyncSeriesLoopHook; const Hook = __webpack_require__(36591); const HookCodeFactory = __webpack_require__(85572); -class AsyncSeriesWaterfallHookCodeFactory extends HookCodeFactory { - content({ onError, onResult, onDone }) { +class SyncWaterfallHookCodeFactory extends HookCodeFactory { + content({ onError, onResult, resultReturns, rethrowIfPossible }) { return this.callTapsSeries({ - onError: (i, err, next, doneBreak) => onError(err) + doneBreak(true), + onError: (i, err) => onError(err), onResult: (i, result, next) => { let code = ""; code += `if(${result} !== undefined) {\n`; @@ -143245,37 +142272,74 @@ class AsyncSeriesWaterfallHookCodeFactory extends HookCodeFactory { code += next(); return code; }, - onDone: () => onResult(this._args[0]) + onDone: () => onResult(this._args[0]), + doneReturns: resultReturns, + rethrowIfPossible }); } } -const factory = new AsyncSeriesWaterfallHookCodeFactory(); +const factory = new SyncWaterfallHookCodeFactory(); + +const TAP_ASYNC = () => { + throw new Error("tapAsync is not supported on a SyncWaterfallHook"); +}; + +const TAP_PROMISE = () => { + throw new Error("tapPromise is not supported on a SyncWaterfallHook"); +}; const COMPILE = function(options) { factory.setup(this, options); return factory.create(options); }; -function AsyncSeriesWaterfallHook(args = [], name = undefined) { +function SyncWaterfallHook(args = [], name = undefined) { if (args.length < 1) throw new Error("Waterfall hooks must have at least one argument"); const hook = new Hook(args, name); - hook.constructor = AsyncSeriesWaterfallHook; + hook.constructor = SyncWaterfallHook; + hook.tapAsync = TAP_ASYNC; + hook.tapPromise = TAP_PROMISE; hook.compile = COMPILE; - hook._call = undefined; - hook.call = undefined; return hook; } -AsyncSeriesWaterfallHook.prototype = null; +SyncWaterfallHook.prototype = null; -module.exports = AsyncSeriesWaterfallHook; +module.exports = SyncWaterfallHook; /***/ }), -/***/ 36591: +/***/ 34718: +/***/ (function(__unused_webpack_module, exports, __webpack_require__) { + +"use strict"; +/* + MIT License http://www.opensource.org/licenses/mit-license.php + Author Tobias Koppers @sokra +*/ + + +exports.__esModule = true; +exports.SyncHook = __webpack_require__(35079); +exports.SyncBailHook = __webpack_require__(98034); +exports.SyncWaterfallHook = __webpack_require__(92878); +exports.SyncLoopHook = __webpack_require__(66953); +exports.AsyncParallelHook = __webpack_require__(82890); +exports.AsyncParallelBailHook = __webpack_require__(28339); +exports.AsyncSeriesHook = __webpack_require__(45146); +exports.AsyncSeriesBailHook = __webpack_require__(97681); +exports.AsyncSeriesLoopHook = __webpack_require__(5463); +exports.AsyncSeriesWaterfallHook = __webpack_require__(73448); +exports.HookMap = __webpack_require__(81647); +exports.MultiHook = __webpack_require__(63708); + + +/***/ }), + +/***/ 39144: /***/ (function(module, __unused_webpack_exports, __webpack_require__) { "use strict"; @@ -143285,656 +142349,917 @@ module.exports = AsyncSeriesWaterfallHook; */ -const util = __webpack_require__(31669); +const EventEmitter = __webpack_require__(28614).EventEmitter; +const fs = __webpack_require__(82161); +const path = __webpack_require__(85622); -const deprecateContext = util.deprecate(() => {}, -"Hook.context is deprecated and will be removed"); +const watchEventSource = __webpack_require__(10547); -const CALL_DELEGATE = function(...args) { - this.call = this._createCall("sync"); - return this.call(...args); -}; -const CALL_ASYNC_DELEGATE = function(...args) { - this.callAsync = this._createCall("async"); - return this.callAsync(...args); -}; -const PROMISE_DELEGATE = function(...args) { - this.promise = this._createCall("promise"); - return this.promise(...args); -}; +const EXISTANCE_ONLY_TIME_ENTRY = Object.freeze({}); -class Hook { - constructor(args = [], name = undefined) { - this._args = args; - this.name = name; - this.taps = []; - this.interceptors = []; - this._call = CALL_DELEGATE; - this.call = CALL_DELEGATE; - this._callAsync = CALL_ASYNC_DELEGATE; - this.callAsync = CALL_ASYNC_DELEGATE; - this._promise = PROMISE_DELEGATE; - this.promise = PROMISE_DELEGATE; - this._x = undefined; +let FS_ACCURACY = 1000; - this.compile = this.compile; - this.tap = this.tap; - this.tapAsync = this.tapAsync; - this.tapPromise = this.tapPromise; +const IS_OSX = __webpack_require__(12087).platform() === "darwin"; +const WATCHPACK_POLLING = process.env.WATCHPACK_POLLING; +const FORCE_POLLING = + `${+WATCHPACK_POLLING}` === WATCHPACK_POLLING + ? +WATCHPACK_POLLING + : !!WATCHPACK_POLLING && WATCHPACK_POLLING !== "false"; + +function withoutCase(str) { + return str.toLowerCase(); +} + +function needCalls(times, callback) { + return function() { + if (--times === 0) { + return callback(); + } + }; +} + +class Watcher extends EventEmitter { + constructor(directoryWatcher, filePath, startTime) { + super(); + this.directoryWatcher = directoryWatcher; + this.path = filePath; + this.startTime = startTime && +startTime; + this._cachedTimeInfoEntries = undefined; } - compile(options) { - throw new Error("Abstract: should be overridden"); + checkStartTime(mtime, initial) { + const startTime = this.startTime; + if (typeof startTime !== "number") return !initial; + return startTime <= mtime; } - _createCall(type) { - return this.compile({ - taps: this.taps, - interceptors: this.interceptors, - args: this._args, - type: type - }); + close() { + this.emit("closed"); } +} - _tap(type, options, fn) { - if (typeof options === "string") { - options = { - name: options.trim() - }; - } else if (typeof options !== "object" || options === null) { - throw new Error("Invalid tap options"); +class DirectoryWatcher extends EventEmitter { + constructor(watcherManager, directoryPath, options) { + super(); + if (FORCE_POLLING) { + options.poll = FORCE_POLLING; } - if (typeof options.name !== "string" || options.name === "") { - throw new Error("Missing name for tap"); + this.watcherManager = watcherManager; + this.options = options; + this.path = directoryPath; + // safeTime is the point in time after which reading is safe to be unchanged + // timestamp is a value that should be compared with another timestamp (mtime) + /** @type {Map} */ + this.filesWithoutCase = new Map(); + this.directories = new Map(); + this.lastWatchEvent = 0; + this.initialScan = true; + this.ignored = options.ignored; + this.nestedWatching = false; + this.polledWatching = + typeof options.poll === "number" + ? options.poll + : options.poll + ? 5007 + : false; + this.timeout = undefined; + this.initialScanRemoved = new Set(); + this.initialScanFinished = undefined; + /** @type {Map>} */ + this.watchers = new Map(); + this.parentWatcher = null; + this.refs = 0; + this._activeEvents = new Map(); + this.closed = false; + this.scanning = false; + this.scanAgain = false; + this.scanAgainInitial = false; + + this.createWatcher(); + this.doScan(true); + } + + checkIgnore(path) { + if (!this.ignored) return false; + path = path.replace(/\\/g, "/"); + return this.ignored.test(path); + } + + createWatcher() { + try { + if (this.polledWatching) { + this.watcher = { + close: () => { + if (this.timeout) { + clearTimeout(this.timeout); + this.timeout = undefined; + } + } + }; + } else { + if (IS_OSX) { + this.watchInParentDirectory(); + } + this.watcher = watchEventSource.watch(this.path); + this.watcher.on("change", this.onWatchEvent.bind(this)); + this.watcher.on("error", this.onWatcherError.bind(this)); + } + } catch (err) { + this.onWatcherError(err); } - if (typeof options.context !== "undefined") { - deprecateContext(); + } + + forEachWatcher(path, fn) { + const watchers = this.watchers.get(withoutCase(path)); + if (watchers !== undefined) { + for (const w of watchers) { + fn(w); + } } - options = Object.assign({ type, fn }, options); - options = this._runRegisterInterceptors(options); - this._insert(options); } - tap(options, fn) { - this._tap("sync", options, fn); + setMissing(itemPath, initial, type) { + this._cachedTimeInfoEntries = undefined; + + if (this.initialScan) { + this.initialScanRemoved.add(itemPath); + } + + const oldDirectory = this.directories.get(itemPath); + if (oldDirectory) { + if (this.nestedWatching) oldDirectory.close(); + this.directories.delete(itemPath); + + this.forEachWatcher(itemPath, w => w.emit("remove", type)); + if (!initial) { + this.forEachWatcher(this.path, w => + w.emit("change", itemPath, null, type, initial) + ); + } + } + + const oldFile = this.files.get(itemPath); + if (oldFile) { + this.files.delete(itemPath); + const key = withoutCase(itemPath); + const count = this.filesWithoutCase.get(key) - 1; + if (count <= 0) { + this.filesWithoutCase.delete(key); + this.forEachWatcher(itemPath, w => w.emit("remove", type)); + } else { + this.filesWithoutCase.set(key, count); + } + + if (!initial) { + this.forEachWatcher(this.path, w => + w.emit("change", itemPath, null, type, initial) + ); + } + } } - tapAsync(options, fn) { - this._tap("async", options, fn); + setFileTime(filePath, mtime, initial, ignoreWhenEqual, type) { + const now = Date.now(); + + if (this.checkIgnore(filePath)) return; + + const old = this.files.get(filePath); + + let safeTime, accuracy; + if (initial) { + safeTime = Math.min(now, mtime) + FS_ACCURACY; + accuracy = FS_ACCURACY; + } else { + safeTime = now; + accuracy = 0; + + if (old && old.timestamp === mtime && mtime + FS_ACCURACY < now - 1000) { + // We are sure that mtime is untouched + // This can be caused by some file attribute change + // e. g. when access time has been changed + // but the file content is untouched + return; + } + } + + if (ignoreWhenEqual && old && old.timestamp === mtime) return; + + this.files.set(filePath, { + safeTime, + accuracy, + timestamp: mtime + }); + this._cachedTimeInfoEntries = undefined; + + if (!old) { + const key = withoutCase(filePath); + const count = this.filesWithoutCase.get(key); + this.filesWithoutCase.set(key, (count || 0) + 1); + if (count !== undefined) { + // There is already a file with case-insensitive-equal name + // On a case-insensitive filesystem we may miss the renaming + // when only casing is changed. + // To be sure that our information is correct + // we trigger a rescan here + this.doScan(false); + } + + this.forEachWatcher(filePath, w => { + if (!initial || w.checkStartTime(safeTime, initial)) { + w.emit("change", mtime, type); + } + }); + } else if (!initial) { + this.forEachWatcher(filePath, w => w.emit("change", mtime, type)); + } + this.forEachWatcher(this.path, w => { + if (!initial || w.checkStartTime(safeTime, initial)) { + w.emit("change", filePath, safeTime, type, initial); + } + }); } - tapPromise(options, fn) { - this._tap("promise", options, fn); - } + setDirectory(directoryPath, birthtime, initial, type) { + if (this.checkIgnore(directoryPath)) return; + if (directoryPath === this.path) { + if (!initial) { + this.forEachWatcher(this.path, w => + w.emit("change", directoryPath, birthtime, type, initial) + ); + } + } else { + const old = this.directories.get(directoryPath); + if (!old) { + const now = Date.now(); + + this._cachedTimeInfoEntries = undefined; + if (this.nestedWatching) { + this.createNestedWatcher(directoryPath); + } else { + this.directories.set(directoryPath, true); + } - _runRegisterInterceptors(options) { - for (const interceptor of this.interceptors) { - if (interceptor.register) { - const newOptions = interceptor.register(options); - if (newOptions !== undefined) { - options = newOptions; + let safeTime; + if (initial) { + safeTime = Math.min(now, birthtime) + FS_ACCURACY; + } else { + safeTime = now; } + + this.forEachWatcher(directoryPath, w => { + if (!initial || w.checkStartTime(safeTime, false)) { + w.emit("change", birthtime, type); + } + }); + this.forEachWatcher(this.path, w => { + if (!initial || w.checkStartTime(safeTime, initial)) { + w.emit("change", directoryPath, safeTime, type, initial); + } + }); } } - return options; - } - - withOptions(options) { - const mergeOptions = opt => - Object.assign({}, options, typeof opt === "string" ? { name: opt } : opt); - - return { - name: this.name, - tap: (opt, fn) => this.tap(mergeOptions(opt), fn), - tapAsync: (opt, fn) => this.tapAsync(mergeOptions(opt), fn), - tapPromise: (opt, fn) => this.tapPromise(mergeOptions(opt), fn), - intercept: interceptor => this.intercept(interceptor), - isUsed: () => this.isUsed(), - withOptions: opt => this.withOptions(mergeOptions(opt)) - }; } - isUsed() { - return this.taps.length > 0 || this.interceptors.length > 0; + createNestedWatcher(directoryPath) { + const watcher = this.watcherManager.watchDirectory(directoryPath, 1); + watcher.on("change", (filePath, mtime, type, initial) => { + this._cachedTimeInfoEntries = undefined; + this.forEachWatcher(this.path, w => { + if (!initial || w.checkStartTime(mtime, initial)) { + w.emit("change", filePath, mtime, type, initial); + } + }); + }); + this.directories.set(directoryPath, watcher); } - intercept(interceptor) { - this._resetCompilation(); - this.interceptors.push(Object.assign({}, interceptor)); - if (interceptor.register) { - for (let i = 0; i < this.taps.length; i++) { - this.taps[i] = interceptor.register(this.taps[i]); + setNestedWatching(flag) { + if (this.nestedWatching !== !!flag) { + this.nestedWatching = !!flag; + this._cachedTimeInfoEntries = undefined; + if (this.nestedWatching) { + for (const directory of this.directories.keys()) { + this.createNestedWatcher(directory); + } + } else { + for (const [directory, watcher] of this.directories) { + watcher.close(); + this.directories.set(directory, true); + } } } } - _resetCompilation() { - this.call = this._call; - this.callAsync = this._callAsync; - this.promise = this._promise; - } - - _insert(item) { - this._resetCompilation(); - let before; - if (typeof item.before === "string") { - before = new Set([item.before]); - } else if (Array.isArray(item.before)) { - before = new Set(item.before); + watch(filePath, startTime) { + const key = withoutCase(filePath); + let watchers = this.watchers.get(key); + if (watchers === undefined) { + watchers = new Set(); + this.watchers.set(key, watchers); } - let stage = 0; - if (typeof item.stage === "number") { - stage = item.stage; + this.refs++; + const watcher = new Watcher(this, filePath, startTime); + watcher.on("closed", () => { + if (--this.refs <= 0) { + this.close(); + return; + } + watchers.delete(watcher); + if (watchers.size === 0) { + this.watchers.delete(key); + if (this.path === filePath) this.setNestedWatching(false); + } + }); + watchers.add(watcher); + let safeTime; + if (filePath === this.path) { + this.setNestedWatching(true); + safeTime = this.lastWatchEvent; + for (const entry of this.files.values()) { + fixupEntryAccuracy(entry); + safeTime = Math.max(safeTime, entry.safeTime); + } + } else { + const entry = this.files.get(filePath); + if (entry) { + fixupEntryAccuracy(entry); + safeTime = entry.safeTime; + } else { + safeTime = 0; + } } - let i = this.taps.length; - while (i > 0) { - i--; - const x = this.taps[i]; - this.taps[i + 1] = x; - const xStage = x.stage || 0; - if (before) { - if (before.has(x.name)) { - before.delete(x.name); - continue; - } - if (before.size > 0) { - continue; - } + if (safeTime) { + if (safeTime >= startTime) { + process.nextTick(() => { + if (this.closed) return; + if (filePath === this.path) { + watcher.emit( + "change", + filePath, + safeTime, + "watch (outdated on attach)", + true + ); + } else { + watcher.emit( + "change", + safeTime, + "watch (outdated on attach)", + true + ); + } + }); } - if (xStage > stage) { - continue; + } else if (this.initialScan) { + if (this.initialScanRemoved.has(filePath)) { + process.nextTick(() => { + if (this.closed) return; + watcher.emit("remove"); + }); } - i++; - break; + } else if ( + !this.directories.has(filePath) && + watcher.checkStartTime(this.initialScanFinished, false) + ) { + process.nextTick(() => { + if (this.closed) return; + watcher.emit("initial-missing", "watch (missing on attach)"); + }); } - this.taps[i] = item; + return watcher; } -} - -Object.setPrototypeOf(Hook.prototype, null); - -module.exports = Hook; - - -/***/ }), - -/***/ 85572: -/***/ (function(module) { - -"use strict"; -/* - MIT License http://www.opensource.org/licenses/mit-license.php - Author Tobias Koppers @sokra -*/ + onWatchEvent(eventType, filename) { + if (this.closed) return; + if (!filename) { + // In some cases no filename is provided + // This seem to happen on windows + // So some event happened but we don't know which file is affected + // We have to do a full scan of the directory + this.doScan(false); + return; + } -class HookCodeFactory { - constructor(config) { - this.config = config; - this.options = undefined; - this._args = undefined; - } + const filePath = path.join(this.path, filename); + if (this.checkIgnore(filePath)) return; - create(options) { - this.init(options); - let fn; - switch (this.options.type) { - case "sync": - fn = new Function( - this.args(), - '"use strict";\n' + - this.header() + - this.contentWithInterceptors({ - onError: err => `throw ${err};\n`, - onResult: result => `return ${result};\n`, - resultReturns: true, - onDone: () => "", - rethrowIfPossible: true - }) - ); - break; - case "async": - fn = new Function( - this.args({ - after: "_callback" - }), - '"use strict";\n' + - this.header() + - this.contentWithInterceptors({ - onError: err => `_callback(${err});\n`, - onResult: result => `_callback(null, ${result});\n`, - onDone: () => "_callback();\n" - }) - ); - break; - case "promise": - let errorHelperUsed = false; - const content = this.contentWithInterceptors({ - onError: err => { - errorHelperUsed = true; - return `_error(${err});\n`; - }, - onResult: result => `_resolve(${result});\n`, - onDone: () => "_resolve();\n" + if (this._activeEvents.get(filename) === undefined) { + this._activeEvents.set(filename, false); + const checkStats = () => { + if (this.closed) return; + this._activeEvents.set(filename, false); + fs.lstat(filePath, (err, stats) => { + if (this.closed) return; + if (this._activeEvents.get(filename) === true) { + process.nextTick(checkStats); + return; + } + this._activeEvents.delete(filename); + // ENOENT happens when the file/directory doesn't exist + // EPERM happens when the containing directory doesn't exist + if (err) { + if ( + err.code !== "ENOENT" && + err.code !== "EPERM" && + err.code !== "EBUSY" + ) { + this.onStatsError(err); + } else { + if (filename === path.basename(this.path)) { + // This may indicate that the directory itself was removed + if (!fs.existsSync(this.path)) { + this.onDirectoryRemoved("stat failed"); + } + } + } + } + this.lastWatchEvent = Date.now(); + this._cachedTimeInfoEntries = undefined; + if (!stats) { + this.setMissing(filePath, false, eventType); + } else if (stats.isDirectory()) { + this.setDirectory( + filePath, + +stats.birthtime || 1, + false, + eventType + ); + } else if (stats.isFile() || stats.isSymbolicLink()) { + if (stats.mtime) { + ensureFsAccuracy(stats.mtime); + } + this.setFileTime( + filePath, + +stats.mtime || +stats.ctime || 1, + false, + false, + eventType + ); + } }); - let code = ""; - code += '"use strict";\n'; - code += this.header(); - code += "return new Promise((function(_resolve, _reject) {\n"; - if (errorHelperUsed) { - code += "var _sync = true;\n"; - code += "function _error(_err) {\n"; - code += "if(_sync)\n"; - code += - "_resolve(Promise.resolve().then((function() { throw _err; })));\n"; - code += "else\n"; - code += "_reject(_err);\n"; - code += "};\n"; - } - code += content; - if (errorHelperUsed) { - code += "_sync = false;\n"; - } - code += "}));\n"; - fn = new Function(this.args(), code); - break; + }; + process.nextTick(checkStats); + } else { + this._activeEvents.set(filename, true); } - this.deinit(); - return fn; } - setup(instance, options) { - instance._x = options.taps.map(t => t.fn); + onWatcherError(err) { + if (this.closed) return; + if (err) { + if (err.code !== "EPERM" && err.code !== "ENOENT") { + console.error("Watchpack Error (watcher): " + err); + } + this.onDirectoryRemoved("watch error"); + } } - /** - * @param {{ type: "sync" | "promise" | "async", taps: Array, interceptors: Array }} options - */ - init(options) { - this.options = options; - this._args = options.args.slice(); + onStatsError(err) { + if (err) { + console.error("Watchpack Error (stats): " + err); + } } - deinit() { - this.options = undefined; - this._args = undefined; + onScanError(err) { + if (err) { + console.error("Watchpack Error (initial scan): " + err); + } + this.onScanFinished(); } - contentWithInterceptors(options) { - if (this.options.interceptors.length > 0) { - const onError = options.onError; - const onResult = options.onResult; - const onDone = options.onDone; - let code = ""; - for (let i = 0; i < this.options.interceptors.length; i++) { - const interceptor = this.options.interceptors[i]; - if (interceptor.call) { - code += `${this.getInterceptor(i)}.call(${this.args({ - before: interceptor.context ? "_context" : undefined - })});\n`; - } - } - code += this.content( - Object.assign(options, { - onError: - onError && - (err => { - let code = ""; - for (let i = 0; i < this.options.interceptors.length; i++) { - const interceptor = this.options.interceptors[i]; - if (interceptor.error) { - code += `${this.getInterceptor(i)}.error(${err});\n`; - } - } - code += onError(err); - return code; - }), - onResult: - onResult && - (result => { - let code = ""; - for (let i = 0; i < this.options.interceptors.length; i++) { - const interceptor = this.options.interceptors[i]; - if (interceptor.result) { - code += `${this.getInterceptor(i)}.result(${result});\n`; - } - } - code += onResult(result); - return code; - }), - onDone: - onDone && - (() => { - let code = ""; - for (let i = 0; i < this.options.interceptors.length; i++) { - const interceptor = this.options.interceptors[i]; - if (interceptor.done) { - code += `${this.getInterceptor(i)}.done();\n`; - } - } - code += onDone(); - return code; - }) - }) - ); - return code; - } else { - return this.content(options); + onScanFinished() { + if (this.polledWatching) { + this.timeout = setTimeout(() => { + if (this.closed) return; + this.doScan(false); + }, this.polledWatching); } } - header() { - let code = ""; - if (this.needContext()) { - code += "var _context = {};\n"; - } else { - code += "var _context;\n"; + onDirectoryRemoved(reason) { + if (this.watcher) { + this.watcher.close(); + this.watcher = null; } - code += "var _x = this._x;\n"; - if (this.options.interceptors.length > 0) { - code += "var _taps = this.taps;\n"; - code += "var _interceptors = this.interceptors;\n"; + this.watchInParentDirectory(); + const type = `directory-removed (${reason})`; + for (const directory of this.directories.keys()) { + this.setMissing(directory, null, type); + } + for (const file of this.files.keys()) { + this.setMissing(file, null, type); } - return code; } - needContext() { - for (const tap of this.options.taps) if (tap.context) return true; - return false; - } + watchInParentDirectory() { + if (!this.parentWatcher) { + const parentDir = path.dirname(this.path); + // avoid watching in the root directory + // removing directories in the root directory is not supported + if (path.dirname(parentDir) === parentDir) return; - callTap(tapIndex, { onError, onResult, onDone, rethrowIfPossible }) { - let code = ""; - let hasTapCached = false; - for (let i = 0; i < this.options.interceptors.length; i++) { - const interceptor = this.options.interceptors[i]; - if (interceptor.tap) { - if (!hasTapCached) { - code += `var _tap${tapIndex} = ${this.getTap(tapIndex)};\n`; - hasTapCached = true; + this.parentWatcher = this.watcherManager.watchFile(this.path, 1); + this.parentWatcher.on("change", (mtime, type) => { + if (this.closed) return; + + // On non-osx platforms we don't need this watcher to detect + // directory removal, as an EPERM error indicates that + if ((!IS_OSX || this.polledWatching) && this.parentWatcher) { + this.parentWatcher.close(); + this.parentWatcher = null; } - code += `${this.getInterceptor(i)}.tap(${ - interceptor.context ? "_context, " : "" - }_tap${tapIndex});\n`; + // Try to create the watcher when parent directory is found + if (!this.watcher) { + this.createWatcher(); + this.doScan(false); + + // directory was created so we emit an event + this.forEachWatcher(this.path, w => + w.emit("change", this.path, mtime, type, false) + ); + } + }); + this.parentWatcher.on("remove", () => { + this.onDirectoryRemoved("parent directory removed"); + }); + } + } + + doScan(initial) { + if (this.scanning) { + if (this.scanAgain) { + if (!initial) this.scanAgainInitial = false; + } else { + this.scanAgain = true; + this.scanAgainInitial = initial; } + return; } - code += `var _fn${tapIndex} = ${this.getTapFn(tapIndex)};\n`; - const tap = this.options.taps[tapIndex]; - switch (tap.type) { - case "sync": - if (!rethrowIfPossible) { - code += `var _hasError${tapIndex} = false;\n`; - code += "try {\n"; - } - if (onResult) { - code += `var _result${tapIndex} = _fn${tapIndex}(${this.args({ - before: tap.context ? "_context" : undefined - })});\n`; - } else { - code += `_fn${tapIndex}(${this.args({ - before: tap.context ? "_context" : undefined - })});\n`; - } - if (!rethrowIfPossible) { - code += "} catch(_err) {\n"; - code += `_hasError${tapIndex} = true;\n`; - code += onError("_err"); - code += "}\n"; - code += `if(!_hasError${tapIndex}) {\n`; - } - if (onResult) { - code += onResult(`_result${tapIndex}`); + this.scanning = true; + if (this.timeout) { + clearTimeout(this.timeout); + this.timeout = undefined; + } + process.nextTick(() => { + if (this.closed) return; + fs.readdir(this.path, (err, items) => { + if (this.closed) return; + if (err) { + if (err.code === "ENOENT" || err.code === "EPERM") { + this.onDirectoryRemoved("scan readdir failed"); + } else { + this.onScanError(err); + } + this.initialScan = false; + this.initialScanFinished = Date.now(); + if (initial) { + for (const watchers of this.watchers.values()) { + for (const watcher of watchers) { + if (watcher.checkStartTime(this.initialScanFinished, false)) { + watcher.emit( + "initial-missing", + "scan (parent directory missing in initial scan)" + ); + } + } + } + } + if (this.scanAgain) { + this.scanAgain = false; + this.doScan(this.scanAgainInitial); + } else { + this.scanning = false; + } + return; } - if (onDone) { - code += onDone(); + const itemPaths = new Set( + items.map(item => path.join(this.path, item.normalize("NFC"))) + ); + for (const file of this.files.keys()) { + if (!itemPaths.has(file)) { + this.setMissing(file, initial, "scan (missing)"); + } } - if (!rethrowIfPossible) { - code += "}\n"; + for (const directory of this.directories.keys()) { + if (!itemPaths.has(directory)) { + this.setMissing(directory, initial, "scan (missing)"); + } } - break; - case "async": - let cbCode = ""; - if (onResult) - cbCode += `(function(_err${tapIndex}, _result${tapIndex}) {\n`; - else cbCode += `(function(_err${tapIndex}) {\n`; - cbCode += `if(_err${tapIndex}) {\n`; - cbCode += onError(`_err${tapIndex}`); - cbCode += "} else {\n"; - if (onResult) { - cbCode += onResult(`_result${tapIndex}`); + if (this.scanAgain) { + // Early repeat of scan + this.scanAgain = false; + this.doScan(initial); + return; } - if (onDone) { - cbCode += onDone(); + const itemFinished = needCalls(itemPaths.size + 1, () => { + if (this.closed) return; + this.initialScan = false; + this.initialScanRemoved = null; + this.initialScanFinished = Date.now(); + if (initial) { + const missingWatchers = new Map(this.watchers); + missingWatchers.delete(withoutCase(this.path)); + for (const item of itemPaths) { + missingWatchers.delete(withoutCase(item)); + } + for (const watchers of missingWatchers.values()) { + for (const watcher of watchers) { + if (watcher.checkStartTime(this.initialScanFinished, false)) { + watcher.emit( + "initial-missing", + "scan (missing in initial scan)" + ); + } + } + } + } + if (this.scanAgain) { + this.scanAgain = false; + this.doScan(this.scanAgainInitial); + } else { + this.scanning = false; + this.onScanFinished(); + } + }); + for (const itemPath of itemPaths) { + fs.lstat(itemPath, (err2, stats) => { + if (this.closed) return; + if (err2) { + if ( + err2.code === "ENOENT" || + err2.code === "EPERM" || + err2.code === "EBUSY" + ) { + this.setMissing(itemPath, initial, "scan (" + err2.code + ")"); + } else { + this.onScanError(err2); + } + itemFinished(); + return; + } + if (stats.isFile() || stats.isSymbolicLink()) { + if (stats.mtime) { + ensureFsAccuracy(stats.mtime); + } + this.setFileTime( + itemPath, + +stats.mtime || +stats.ctime || 1, + initial, + true, + "scan (file)" + ); + } else if (stats.isDirectory()) { + if (!initial || !this.directories.has(itemPath)) + this.setDirectory( + itemPath, + +stats.birthtime || 1, + initial, + "scan (dir)" + ); + } + itemFinished(); + }); } - cbCode += "}\n"; - cbCode += "})"; - code += `_fn${tapIndex}(${this.args({ - before: tap.context ? "_context" : undefined, - after: cbCode - })});\n`; - break; - case "promise": - code += `var _hasResult${tapIndex} = false;\n`; - code += `var _promise${tapIndex} = _fn${tapIndex}(${this.args({ - before: tap.context ? "_context" : undefined - })});\n`; - code += `if (!_promise${tapIndex} || !_promise${tapIndex}.then)\n`; - code += ` throw new Error('Tap function (tapPromise) did not return promise (returned ' + _promise${tapIndex} + ')');\n`; - code += `_promise${tapIndex}.then((function(_result${tapIndex}) {\n`; - code += `_hasResult${tapIndex} = true;\n`; - if (onResult) { - code += onResult(`_result${tapIndex}`); + itemFinished(); + }); + }); + } + + getTimes() { + const obj = Object.create(null); + let safeTime = this.lastWatchEvent; + for (const [file, entry] of this.files) { + fixupEntryAccuracy(entry); + safeTime = Math.max(safeTime, entry.safeTime); + obj[file] = Math.max(entry.safeTime, entry.timestamp); + } + if (this.nestedWatching) { + for (const w of this.directories.values()) { + const times = w.directoryWatcher.getTimes(); + for (const file of Object.keys(times)) { + const time = times[file]; + safeTime = Math.max(safeTime, time); + obj[file] = time; } - if (onDone) { - code += onDone(); + } + obj[this.path] = safeTime; + } + if (!this.initialScan) { + for (const watchers of this.watchers.values()) { + for (const watcher of watchers) { + const path = watcher.path; + if (!Object.prototype.hasOwnProperty.call(obj, path)) { + obj[path] = null; + } } - code += `}), function(_err${tapIndex}) {\n`; - code += `if(_hasResult${tapIndex}) throw _err${tapIndex};\n`; - code += onError(`_err${tapIndex}`); - code += "});\n"; - break; + } } - return code; + return obj; } - callTapsSeries({ - onError, - onResult, - resultReturns, - onDone, - doneReturns, - rethrowIfPossible - }) { - if (this.options.taps.length === 0) return onDone(); - const firstAsync = this.options.taps.findIndex(t => t.type !== "sync"); - const somethingReturns = resultReturns || doneReturns; - let code = ""; - let current = onDone; - let unrollCounter = 0; - for (let j = this.options.taps.length - 1; j >= 0; j--) { - const i = j; - const unroll = - current !== onDone && - (this.options.taps[i].type !== "sync" || unrollCounter++ > 20); - if (unroll) { - unrollCounter = 0; - code += `function _next${i}() {\n`; - code += current(); - code += `}\n`; - current = () => `${somethingReturns ? "return " : ""}_next${i}();\n`; + getTimeInfoEntries() { + if (this._cachedTimeInfoEntries !== undefined) + return this._cachedTimeInfoEntries; + const map = new Map(); + let safeTime = this.lastWatchEvent; + for (const [file, entry] of this.files) { + fixupEntryAccuracy(entry); + safeTime = Math.max(safeTime, entry.safeTime); + map.set(file, entry); + } + if (this.nestedWatching) { + for (const w of this.directories.values()) { + const timeInfoEntries = w.directoryWatcher.getTimeInfoEntries(); + for (const [file, entry] of timeInfoEntries) { + if (entry) { + safeTime = Math.max(safeTime, entry.safeTime); + } + map.set(file, entry); + } } - const done = current; - const doneBreak = skipDone => { - if (skipDone) return ""; - return onDone(); - }; - const content = this.callTap(i, { - onError: error => onError(i, error, done, doneBreak), - onResult: - onResult && - (result => { - return onResult(i, result, done, doneBreak); - }), - onDone: !onResult && done, - rethrowIfPossible: - rethrowIfPossible && (firstAsync < 0 || i < firstAsync) + map.set(this.path, { + safeTime }); - current = () => content; + } else { + for (const dir of this.directories.keys()) { + // No additional info about this directory + map.set(dir, EXISTANCE_ONLY_TIME_ENTRY); + } + map.set(this.path, EXISTANCE_ONLY_TIME_ENTRY); } - code += current(); - return code; + if (!this.initialScan) { + for (const watchers of this.watchers.values()) { + for (const watcher of watchers) { + const path = watcher.path; + if (!map.has(path)) { + map.set(path, null); + } + } + } + this._cachedTimeInfoEntries = map; + } + return map; } - callTapsLooping({ onError, onDone, rethrowIfPossible }) { - if (this.options.taps.length === 0) return onDone(); - const syncOnly = this.options.taps.every(t => t.type === "sync"); - let code = ""; - if (!syncOnly) { - code += "var _looper = (function() {\n"; - code += "var _loopAsync = false;\n"; + close() { + this.closed = true; + this.initialScan = false; + if (this.watcher) { + this.watcher.close(); + this.watcher = null; } - code += "var _loop;\n"; - code += "do {\n"; - code += "_loop = false;\n"; - for (let i = 0; i < this.options.interceptors.length; i++) { - const interceptor = this.options.interceptors[i]; - if (interceptor.loop) { - code += `${this.getInterceptor(i)}.loop(${this.args({ - before: interceptor.context ? "_context" : undefined - })});\n`; + if (this.nestedWatching) { + for (const w of this.directories.values()) { + w.close(); } + this.directories.clear(); } - code += this.callTapsSeries({ - onError, - onResult: (i, result, next, doneBreak) => { - let code = ""; - code += `if(${result} !== undefined) {\n`; - code += "_loop = true;\n"; - if (!syncOnly) code += "if(_loopAsync) _looper();\n"; - code += doneBreak(true); - code += `} else {\n`; - code += next(); - code += `}\n`; - return code; - }, - onDone: - onDone && - (() => { - let code = ""; - code += "if(!_loop) {\n"; - code += onDone(); - code += "}\n"; - return code; - }), - rethrowIfPossible: rethrowIfPossible && syncOnly - }); - code += "} while(_loop);\n"; - if (!syncOnly) { - code += "_loopAsync = true;\n"; - code += "});\n"; - code += "_looper();\n"; + if (this.parentWatcher) { + this.parentWatcher.close(); + this.parentWatcher = null; } - return code; + this.emit("closed"); } +} - callTapsParallel({ - onError, - onResult, - onDone, - rethrowIfPossible, - onTap = (i, run) => run() - }) { - if (this.options.taps.length <= 1) { - return this.callTapsSeries({ - onError, - onResult, - onDone, - rethrowIfPossible - }); +module.exports = DirectoryWatcher; +module.exports.EXISTANCE_ONLY_TIME_ENTRY = EXISTANCE_ONLY_TIME_ENTRY; + +function fixupEntryAccuracy(entry) { + if (entry.accuracy > FS_ACCURACY) { + entry.safeTime = entry.safeTime - entry.accuracy + FS_ACCURACY; + entry.accuracy = FS_ACCURACY; + } +} + +function ensureFsAccuracy(mtime) { + if (!mtime) return; + if (FS_ACCURACY > 1 && mtime % 1 !== 0) FS_ACCURACY = 1; + else if (FS_ACCURACY > 10 && mtime % 10 !== 0) FS_ACCURACY = 10; + else if (FS_ACCURACY > 100 && mtime % 100 !== 0) FS_ACCURACY = 100; +} + + +/***/ }), + +/***/ 56083: +/***/ (function(module, __unused_webpack_exports, __webpack_require__) { + +"use strict"; +/* + MIT License http://www.opensource.org/licenses/mit-license.php + Author Tobias Koppers @sokra +*/ + + +const fs = __webpack_require__(35747); +const path = __webpack_require__(85622); + +// macOS, Linux, and Windows all rely on these errors +const EXPECTED_ERRORS = new Set(["EINVAL", "ENOENT"]); + +// On Windows there is also this error in some cases +if (process.platform === "win32") EXPECTED_ERRORS.add("UNKNOWN"); + +class LinkResolver { + constructor() { + this.cache = new Map(); + } + + /** + * @param {string} file path to file or directory + * @returns {string[]} array of file and all symlinks contributed in the resolving process (first item is the resolved file) + */ + resolve(file) { + const cacheEntry = this.cache.get(file); + if (cacheEntry !== undefined) { + return cacheEntry; } - let code = ""; - code += "do {\n"; - code += `var _counter = ${this.options.taps.length};\n`; - if (onDone) { - code += "var _done = (function() {\n"; - code += onDone(); - code += "});\n"; + const parent = path.dirname(file); + if (parent === file) { + // At root of filesystem there can't be a link + const result = Object.freeze([file]); + this.cache.set(file, result); + return result; } - for (let i = 0; i < this.options.taps.length; i++) { - const done = () => { - if (onDone) return "if(--_counter === 0) _done();\n"; - else return "--_counter;"; - }; - const doneBreak = skipDone => { - if (skipDone || !onDone) return "_counter = 0;\n"; - else return "_counter = 0;\n_done();\n"; - }; - code += "if(_counter <= 0) break;\n"; - code += onTap( - i, - () => - this.callTap(i, { - onError: error => { - let code = ""; - code += "if(_counter > 0) {\n"; - code += onError(i, error, done, doneBreak); - code += "}\n"; - return code; - }, - onResult: - onResult && - (result => { - let code = ""; - code += "if(_counter > 0) {\n"; - code += onResult(i, result, done, doneBreak); - code += "}\n"; - return code; - }), - onDone: - !onResult && - (() => { - return done(); - }), - rethrowIfPossible - }), - done, - doneBreak - ); + // resolve the parent directory to find links there and get the real path + const parentResolved = this.resolve(parent); + let realFile = file; + + // is the parent directory really somewhere else? + if (parentResolved[0] !== parent) { + // get the real location of file + const basename = path.basename(file); + realFile = path.resolve(parentResolved[0], basename); } - code += "} while(false);\n"; - return code; - } + // try to read the link content + try { + const linkContent = fs.readlinkSync(realFile); - args({ before, after } = {}) { - let allArgs = this._args; - if (before) allArgs = [before].concat(allArgs); - if (after) allArgs = allArgs.concat(after); - if (allArgs.length === 0) { - return ""; - } else { - return allArgs.join(", "); + // resolve the link content relative to the parent directory + const resolvedLink = path.resolve(parentResolved[0], linkContent); + + // recursive resolve the link content for more links in the structure + const linkResolved = this.resolve(resolvedLink); + + // merge parent and link resolve results + let result; + if (linkResolved.length > 1 && parentResolved.length > 1) { + // when both contain links we need to duplicate them with a Set + const resultSet = new Set(linkResolved); + // add the link + resultSet.add(realFile); + // add all symlinks of the parent + for (let i = 1; i < parentResolved.length; i++) { + resultSet.add(parentResolved[i]); + } + result = Object.freeze(Array.from(resultSet)); + } else if (parentResolved.length > 1) { + // we have links in the parent but not for the link content location + result = parentResolved.slice(); + result[0] = linkResolved[0]; + // add the link + result.push(realFile); + Object.freeze(result); + } else if (linkResolved.length > 1) { + // we can return the link content location result + result = linkResolved.slice(); + // add the link + result.push(realFile); + Object.freeze(result); + } else { + // neither link content location nor parent have links + // this link is the only link here + result = Object.freeze([ + // the resolve real location + linkResolved[0], + // add the link + realFile + ]); + } + this.cache.set(file, result); + return result; + } catch (e) { + if (!EXPECTED_ERRORS.has(e.code)) { + throw e; + } + // no link + const result = parentResolved.slice(); + result[0] = realFile; + Object.freeze(result); + this.cache.set(file, result); + return result; } } - - getTapFn(idx) { - return `_x[${idx}]`; - } - - getTap(idx) { - return `_taps[${idx}]`; - } - - getInterceptor(idx) { - return `_interceptors[${idx}]`; - } } - -module.exports = HookCodeFactory; +module.exports = LinkResolver; /***/ }), -/***/ 81647: +/***/ 29539: /***/ (function(module, __unused_webpack_exports, __webpack_require__) { "use strict"; @@ -143944,66 +143269,57 @@ module.exports = HookCodeFactory; */ -const util = __webpack_require__(31669); - -const defaultFactory = (key, hook) => hook; +const path = __webpack_require__(85622); +const DirectoryWatcher = __webpack_require__(39144); -class HookMap { - constructor(factory, name = undefined) { - this._map = new Map(); - this.name = name; - this._factory = factory; - this._interceptors = []; +class WatcherManager { + constructor(options) { + this.options = options; + this.directoryWatchers = new Map(); } - get(key) { - return this._map.get(key); + getDirectoryWatcher(directory) { + const watcher = this.directoryWatchers.get(directory); + if (watcher === undefined) { + const newWatcher = new DirectoryWatcher(this, directory, this.options); + this.directoryWatchers.set(directory, newWatcher); + newWatcher.on("closed", () => { + this.directoryWatchers.delete(directory); + }); + return newWatcher; + } + return watcher; } - for(key) { - const hook = this.get(key); - if (hook !== undefined) { - return hook; - } - let newHook = this._factory(key); - const interceptors = this._interceptors; - for (let i = 0; i < interceptors.length; i++) { - newHook = interceptors[i].factory(key, newHook); - } - this._map.set(key, newHook); - return newHook; + watchFile(p, startTime) { + const directory = path.dirname(p); + if (directory === p) return null; + return this.getDirectoryWatcher(directory).watch(p, startTime); } - intercept(interceptor) { - this._interceptors.push( - Object.assign( - { - factory: defaultFactory - }, - interceptor - ) - ); + watchDirectory(directory, startTime) { + return this.getDirectoryWatcher(directory).watch(directory, startTime); } } -HookMap.prototype.tap = util.deprecate(function(key, options, fn) { - return this.for(key).tap(options, fn); -}, "HookMap#tap(key,…) is deprecated. Use HookMap#for(key).tap(…) instead."); - -HookMap.prototype.tapAsync = util.deprecate(function(key, options, fn) { - return this.for(key).tapAsync(options, fn); -}, "HookMap#tapAsync(key,…) is deprecated. Use HookMap#for(key).tapAsync(…) instead."); - -HookMap.prototype.tapPromise = util.deprecate(function(key, options, fn) { - return this.for(key).tapPromise(options, fn); -}, "HookMap#tapPromise(key,…) is deprecated. Use HookMap#for(key).tapPromise(…) instead."); - -module.exports = HookMap; +const watcherManagers = new WeakMap(); +/** + * @param {object} options options + * @returns {WatcherManager} the watcher manager + */ +module.exports = options => { + const watcherManager = watcherManagers.get(options); + if (watcherManager !== undefined) return watcherManager; + const newWatcherManager = new WatcherManager(options); + watcherManagers.set(options, newWatcherManager); + return newWatcherManager; +}; +module.exports.WatcherManager = WatcherManager; /***/ }), -/***/ 63708: +/***/ 89946: /***/ (function(module, __unused_webpack_exports, __webpack_require__) { "use strict"; @@ -144013,60 +143329,144 @@ module.exports = HookMap; */ -const Hook = __webpack_require__(36591); +const path = __webpack_require__(85622); -class MultiHook { - constructor(hooks, name = undefined) { - this.hooks = hooks; - this.name = name; - } +/** + * @template T + * @typedef {Object} TreeNode + * @property {string} filePath + * @property {TreeNode} parent + * @property {TreeNode[]} children + * @property {number} entries + * @property {boolean} active + * @property {T[] | T | undefined} value + */ - tap(options, fn) { - for (const hook of this.hooks) { - hook.tap(options, fn); - } +/** + * @template T + * @param {Map>} the new plan + */ +module.exports = (plan, limit) => { + const treeMap = new Map(); + // Convert to tree + for (const [filePath, value] of plan) { + treeMap.set(filePath, { + filePath, + parent: undefined, + children: undefined, + entries: 1, + active: true, + value + }); } - - tapAsync(options, fn) { - for (const hook of this.hooks) { - hook.tapAsync(options, fn); + let currentCount = treeMap.size; + // Create parents and calculate sum of entries + for (const node of treeMap.values()) { + const parentPath = path.dirname(node.filePath); + if (parentPath !== node.filePath) { + let parent = treeMap.get(parentPath); + if (parent === undefined) { + parent = { + filePath: parentPath, + parent: undefined, + children: [node], + entries: node.entries, + active: false, + value: undefined + }; + treeMap.set(parentPath, parent); + node.parent = parent; + } else { + node.parent = parent; + if (parent.children === undefined) { + parent.children = [node]; + } else { + parent.children.push(node); + } + do { + parent.entries += node.entries; + parent = parent.parent; + } while (parent); + } } } - - tapPromise(options, fn) { - for (const hook of this.hooks) { - hook.tapPromise(options, fn); + // Reduce until limit reached + while (currentCount > limit) { + // Select node that helps reaching the limit most effectively without overmerging + const overLimit = currentCount - limit; + let bestNode = undefined; + let bestCost = Infinity; + for (const node of treeMap.values()) { + if (node.entries <= 1 || !node.children || !node.parent) continue; + if (node.children.length === 0) continue; + if (node.children.length === 1 && !node.value) continue; + // Try to select the node with has just a bit more entries than we need to reduce + // When just a bit more is over 30% over the limit, + // also consider just a bit less entries then we need to reduce + const cost = + node.entries - 1 >= overLimit + ? node.entries - 1 - overLimit + : overLimit - node.entries + 1 + limit * 0.3; + if (cost < bestCost) { + bestNode = node; + bestCost = cost; + } } - } - - isUsed() { - for (const hook of this.hooks) { - if (hook.isUsed()) return true; + if (!bestNode) break; + // Merge all children + const reduction = bestNode.entries - 1; + bestNode.active = true; + bestNode.entries = 1; + currentCount -= reduction; + let parent = bestNode.parent; + while (parent) { + parent.entries -= reduction; + parent = parent.parent; } - return false; - } - - intercept(interceptor) { - for (const hook of this.hooks) { - hook.intercept(interceptor); + const queue = new Set(bestNode.children); + for (const node of queue) { + node.active = false; + node.entries = 0; + if (node.children) { + for (const child of node.children) queue.add(child); + } } } - - withOptions(options) { - return new MultiHook( - this.hooks.map(h => h.withOptions(options)), - this.name - ); + // Write down new plan + const newPlan = new Map(); + for (const rootNode of treeMap.values()) { + if (!rootNode.active) continue; + const map = new Map(); + const queue = new Set([rootNode]); + for (const node of queue) { + if (node.active && node !== rootNode) continue; + if (node.value) { + if (Array.isArray(node.value)) { + for (const item of node.value) { + map.set(item, node.filePath); + } + } else { + map.set(node.value, node.filePath); + } + } + if (node.children) { + for (const child of node.children) { + queue.add(child); + } + } + } + newPlan.set(rootNode.filePath, map); } -} - -module.exports = MultiHook; + return newPlan; +}; /***/ }), -/***/ 98034: -/***/ (function(module, __unused_webpack_exports, __webpack_require__) { +/***/ 10547: +/***/ (function(__unused_webpack_module, exports, __webpack_require__) { "use strict"; /* @@ -144075,164 +143475,340 @@ module.exports = MultiHook; */ -const Hook = __webpack_require__(36591); -const HookCodeFactory = __webpack_require__(85572); - -class SyncBailHookCodeFactory extends HookCodeFactory { - content({ onError, onResult, resultReturns, onDone, rethrowIfPossible }) { - return this.callTapsSeries({ - onError: (i, err) => onError(err), - onResult: (i, result, next) => - `if(${result} !== undefined) {\n${onResult( - result - )};\n} else {\n${next()}}\n`, - resultReturns, - onDone, - rethrowIfPossible - }); - } -} - -const factory = new SyncBailHookCodeFactory(); - -const TAP_ASYNC = () => { - throw new Error("tapAsync is not supported on a SyncBailHook"); -}; +const fs = __webpack_require__(35747); +const path = __webpack_require__(85622); +const { EventEmitter } = __webpack_require__(28614); +const reducePlan = __webpack_require__(89946); -const TAP_PROMISE = () => { - throw new Error("tapPromise is not supported on a SyncBailHook"); -}; +const IS_OSX = __webpack_require__(12087).platform() === "darwin"; +const IS_WIN = __webpack_require__(12087).platform() === "win32"; +const SUPPORTS_RECURSIVE_WATCHING = IS_OSX || IS_WIN; -const COMPILE = function(options) { - factory.setup(this, options); - return factory.create(options); -}; +const watcherLimit = + +process.env.WATCHPACK_WATCHER_LIMIT || (IS_OSX ? 2000 : 10000); -function SyncBailHook(args = [], name = undefined) { - const hook = new Hook(args, name); - hook.constructor = SyncBailHook; - hook.tapAsync = TAP_ASYNC; - hook.tapPromise = TAP_PROMISE; - hook.compile = COMPILE; - return hook; -} +const recursiveWatcherLogging = !!process.env + .WATCHPACK_RECURSIVE_WATCHER_LOGGING; -SyncBailHook.prototype = null; +let isBatch = false; +let watcherCount = 0; -module.exports = SyncBailHook; +/** @type {Map} */ +const pendingWatchers = new Map(); +/** @type {Map} */ +const recursiveWatchers = new Map(); -/***/ }), +/** @type {Map} */ +const directWatchers = new Map(); -/***/ 35079: -/***/ (function(module, __unused_webpack_exports, __webpack_require__) { +/** @type {Map} */ +const underlyingWatcher = new Map(); -"use strict"; -/* - MIT License http://www.opensource.org/licenses/mit-license.php - Author Tobias Koppers @sokra -*/ +class DirectWatcher { + constructor(filePath) { + this.filePath = filePath; + this.watchers = new Set(); + this.watcher = undefined; + try { + const watcher = fs.watch(filePath); + this.watcher = watcher; + watcher.on("change", (type, filename) => { + for (const w of this.watchers) { + w.emit("change", type, filename); + } + }); + watcher.on("error", error => { + for (const w of this.watchers) { + w.emit("error", error); + } + }); + } catch (err) { + process.nextTick(() => { + for (const w of this.watchers) { + w.emit("error", err); + } + }); + } + watcherCount++; + } + add(watcher) { + underlyingWatcher.set(watcher, this); + this.watchers.add(watcher); + } -const Hook = __webpack_require__(36591); -const HookCodeFactory = __webpack_require__(85572); + remove(watcher) { + this.watchers.delete(watcher); + if (this.watchers.size === 0) { + directWatchers.delete(this.filePath); + watcherCount--; + if (this.watcher) this.watcher.close(); + } + } -class SyncHookCodeFactory extends HookCodeFactory { - content({ onError, onDone, rethrowIfPossible }) { - return this.callTapsSeries({ - onError: (i, err) => onError(err), - onDone, - rethrowIfPossible - }); + getWatchers() { + return this.watchers; } } -const factory = new SyncHookCodeFactory(); - -const TAP_ASYNC = () => { - throw new Error("tapAsync is not supported on a SyncHook"); -}; +class RecursiveWatcher { + constructor(rootPath) { + this.rootPath = rootPath; + /** @type {Map} */ + this.mapWatcherToPath = new Map(); + /** @type {Map>} */ + this.mapPathToWatchers = new Map(); + this.watcher = undefined; + try { + const watcher = fs.watch(rootPath, { + recursive: true + }); + this.watcher = watcher; + watcher.on("change", (type, filename) => { + if (!filename) { + if (recursiveWatcherLogging) { + process.stderr.write( + `[watchpack] dispatch ${type} event in recursive watcher (${ + this.rootPath + }) to all watchers\n` + ); + } + for (const w of this.mapWatcherToPath.keys()) { + w.emit("change", type); + } + } else { + const dir = path.dirname(filename); + const watchers = this.mapPathToWatchers.get(dir); + if (recursiveWatcherLogging) { + process.stderr.write( + `[watchpack] dispatch ${type} event in recursive watcher (${ + this.rootPath + }) for '${filename}' to ${ + watchers ? watchers.size : 0 + } watchers\n` + ); + } + if (watchers === undefined) return; + for (const w of watchers) { + w.emit("change", type, path.basename(filename)); + } + } + }); + watcher.on("error", error => { + for (const w of this.mapWatcherToPath.keys()) { + w.emit("error", error); + } + }); + } catch (err) { + process.nextTick(() => { + for (const w of this.mapWatcherToPath.keys()) { + w.emit("error", err); + } + }); + } + watcherCount++; + if (recursiveWatcherLogging) { + process.stderr.write( + `[watchpack] created recursive watcher at ${rootPath}\n` + ); + } + } -const TAP_PROMISE = () => { - throw new Error("tapPromise is not supported on a SyncHook"); -}; + add(filePath, watcher) { + underlyingWatcher.set(watcher, this); + const subpath = filePath.slice(this.rootPath.length + 1) || "."; + this.mapWatcherToPath.set(watcher, subpath); + const set = this.mapPathToWatchers.get(subpath); + if (set === undefined) { + const newSet = new Set(); + newSet.add(watcher); + this.mapPathToWatchers.set(subpath, newSet); + } else { + set.add(watcher); + } + } -const COMPILE = function(options) { - factory.setup(this, options); - return factory.create(options); -}; + remove(watcher) { + const subpath = this.mapWatcherToPath.get(watcher); + if (!subpath) return; + this.mapWatcherToPath.delete(watcher); + const set = this.mapPathToWatchers.get(subpath); + set.delete(watcher); + if (set.size === 0) { + this.mapPathToWatchers.delete(subpath); + } + if (this.mapWatcherToPath.size === 0) { + recursiveWatchers.delete(this.rootPath); + watcherCount--; + if (this.watcher) this.watcher.close(); + if (recursiveWatcherLogging) { + process.stderr.write( + `[watchpack] closed recursive watcher at ${this.rootPath}\n` + ); + } + } + } -function SyncHook(args = [], name = undefined) { - const hook = new Hook(args, name); - hook.constructor = SyncHook; - hook.tapAsync = TAP_ASYNC; - hook.tapPromise = TAP_PROMISE; - hook.compile = COMPILE; - return hook; + getWatchers() { + return this.mapWatcherToPath; + } } -SyncHook.prototype = null; - -module.exports = SyncHook; - - -/***/ }), +class Watcher extends EventEmitter { + close() { + if (pendingWatchers.has(this)) { + pendingWatchers.delete(this); + return; + } + const watcher = underlyingWatcher.get(this); + watcher.remove(this); + underlyingWatcher.delete(this); + } +} -/***/ 66953: -/***/ (function(module, __unused_webpack_exports, __webpack_require__) { +const createDirectWatcher = filePath => { + const existing = directWatchers.get(filePath); + if (existing !== undefined) return existing; + const w = new DirectWatcher(filePath); + directWatchers.set(filePath, w); + return w; +}; -"use strict"; -/* - MIT License http://www.opensource.org/licenses/mit-license.php - Author Tobias Koppers @sokra -*/ +const createRecursiveWatcher = rootPath => { + const existing = recursiveWatchers.get(rootPath); + if (existing !== undefined) return existing; + const w = new RecursiveWatcher(rootPath); + recursiveWatchers.set(rootPath, w); + return w; +}; +const execute = () => { + /** @type {Map} */ + const map = new Map(); + const addWatcher = (watcher, filePath) => { + const entry = map.get(filePath); + if (entry === undefined) { + map.set(filePath, watcher); + } else if (Array.isArray(entry)) { + entry.push(watcher); + } else { + map.set(filePath, [entry, watcher]); + } + }; + for (const [watcher, filePath] of pendingWatchers) { + addWatcher(watcher, filePath); + } + pendingWatchers.clear(); -const Hook = __webpack_require__(36591); -const HookCodeFactory = __webpack_require__(85572); + // Fast case when we are not reaching the limit + if (!SUPPORTS_RECURSIVE_WATCHING || watcherLimit - watcherCount >= map.size) { + // Create watchers for all entries in the map + for (const [filePath, entry] of map) { + const w = createDirectWatcher(filePath); + if (Array.isArray(entry)) { + for (const item of entry) w.add(item); + } else { + w.add(entry); + } + } + return; + } -class SyncLoopHookCodeFactory extends HookCodeFactory { - content({ onError, onDone, rethrowIfPossible }) { - return this.callTapsLooping({ - onError: (i, err) => onError(err), - onDone, - rethrowIfPossible - }); + // Reconsider existing watchers to improving watch plan + for (const watcher of recursiveWatchers.values()) { + for (const [w, subpath] of watcher.getWatchers()) { + addWatcher(w, path.join(watcher.rootPath, subpath)); + } + } + for (const watcher of directWatchers.values()) { + for (const w of watcher.getWatchers()) { + addWatcher(w, watcher.filePath); + } } -} -const factory = new SyncLoopHookCodeFactory(); + // Merge map entries to keep watcher limit + // Create a 10% buffer to be able to enter fast case more often + const plan = reducePlan(map, watcherLimit * 0.9); -const TAP_ASYNC = () => { - throw new Error("tapAsync is not supported on a SyncLoopHook"); + // Update watchers for all entries in the map + for (const [filePath, entry] of plan) { + if (entry.size === 1) { + for (const [watcher, filePath] of entry) { + const w = createDirectWatcher(filePath); + const old = underlyingWatcher.get(watcher); + if (old === w) continue; + w.add(watcher); + if (old !== undefined) old.remove(watcher); + } + } else { + const filePaths = new Set(entry.values()); + if (filePaths.size > 1) { + const w = createRecursiveWatcher(filePath); + for (const [watcher, watcherPath] of entry) { + const old = underlyingWatcher.get(watcher); + if (old === w) continue; + w.add(watcherPath, watcher); + if (old !== undefined) old.remove(watcher); + } + } else { + for (const filePath of filePaths) { + const w = createDirectWatcher(filePath); + for (const watcher of entry.keys()) { + const old = underlyingWatcher.get(watcher); + if (old === w) continue; + w.add(watcher); + if (old !== undefined) old.remove(watcher); + } + } + } + } + } }; -const TAP_PROMISE = () => { - throw new Error("tapPromise is not supported on a SyncLoopHook"); +exports.watch = filePath => { + const watcher = new Watcher(); + // Find an existing watcher + const directWatcher = directWatchers.get(filePath); + if (directWatcher !== undefined) { + directWatcher.add(watcher); + return watcher; + } + let current = filePath; + for (;;) { + const recursiveWatcher = recursiveWatchers.get(current); + if (recursiveWatcher !== undefined) { + recursiveWatcher.add(filePath, watcher); + return watcher; + } + const parent = path.dirname(current); + if (parent === current) break; + current = parent; + } + // Queue up watcher for creation + pendingWatchers.set(watcher, filePath); + if (!isBatch) execute(); + return watcher; }; -const COMPILE = function(options) { - factory.setup(this, options); - return factory.create(options); +exports.batch = fn => { + isBatch = true; + try { + fn(); + } finally { + isBatch = false; + execute(); + } }; -function SyncLoopHook(args = [], name = undefined) { - const hook = new Hook(args, name); - hook.constructor = SyncLoopHook; - hook.tapAsync = TAP_ASYNC; - hook.tapPromise = TAP_PROMISE; - hook.compile = COMPILE; - return hook; -} - -SyncLoopHook.prototype = null; - -module.exports = SyncLoopHook; +exports.getNumberOfWatchers = () => { + return watcherCount; +}; /***/ }), -/***/ 92878: +/***/ 98342: /***/ (function(module, __unused_webpack_exports, __webpack_require__) { "use strict"; @@ -144242,84 +143818,348 @@ module.exports = SyncLoopHook; */ -const Hook = __webpack_require__(36591); -const HookCodeFactory = __webpack_require__(85572); +const getWatcherManager = __webpack_require__(29539); +const LinkResolver = __webpack_require__(56083); +const EventEmitter = __webpack_require__(28614).EventEmitter; +const globToRegExp = __webpack_require__(22797); +const watchEventSource = __webpack_require__(10547); -class SyncWaterfallHookCodeFactory extends HookCodeFactory { - content({ onError, onResult, resultReturns, rethrowIfPossible }) { - return this.callTapsSeries({ - onError: (i, err) => onError(err), - onResult: (i, result, next) => { - let code = ""; - code += `if(${result} !== undefined) {\n`; - code += `${this._args[0]} = ${result};\n`; - code += `}\n`; - code += next(); - return code; - }, - onDone: () => onResult(this._args[0]), - doneReturns: resultReturns, - rethrowIfPossible - }); +let EXISTANCE_ONLY_TIME_ENTRY; // lazy required + +const EMPTY_ARRAY = []; +const EMPTY_OPTIONS = {}; + +function addWatchersToSet(watchers, set) { + for (const w of watchers) { + if (w !== true && !set.has(w.directoryWatcher)) { + set.add(w.directoryWatcher); + addWatchersToSet(w.directoryWatcher.directories.values(), set); + } } } -const factory = new SyncWaterfallHookCodeFactory(); +const stringToRegexp = ignored => { + const source = globToRegExp(ignored, { globstar: true, extended: true }) + .source; + const matchingStart = source.slice(0, source.length - 1) + "(?:$|\\/)"; + return matchingStart; +}; -const TAP_ASYNC = () => { - throw new Error("tapAsync is not supported on a SyncWaterfallHook"); +const ignoredToRegexp = ignored => { + if (Array.isArray(ignored)) { + return new RegExp(ignored.map(i => stringToRegexp(i)).join("|")); + } else if (typeof ignored === "string") { + return new RegExp(stringToRegexp(ignored)); + } else if (ignored instanceof RegExp) { + return ignored; + } else if (ignored) { + throw new Error(`Invalid option for 'ignored': ${ignored}`); + } else { + return undefined; + } }; -const TAP_PROMISE = () => { - throw new Error("tapPromise is not supported on a SyncWaterfallHook"); +const normalizeOptions = options => { + return { + followSymlinks: !!options.followSymlinks, + ignored: ignoredToRegexp(options.ignored), + poll: options.poll + }; }; -const COMPILE = function(options) { - factory.setup(this, options); - return factory.create(options); +const normalizeCache = new WeakMap(); +const cachedNormalizeOptions = options => { + const cacheEntry = normalizeCache.get(options); + if (cacheEntry !== undefined) return cacheEntry; + const normalized = normalizeOptions(options); + normalizeCache.set(options, normalized); + return normalized; }; -function SyncWaterfallHook(args = [], name = undefined) { - if (args.length < 1) - throw new Error("Waterfall hooks must have at least one argument"); - const hook = new Hook(args, name); - hook.constructor = SyncWaterfallHook; - hook.tapAsync = TAP_ASYNC; - hook.tapPromise = TAP_PROMISE; - hook.compile = COMPILE; - return hook; -} +class Watchpack extends EventEmitter { + constructor(options) { + super(); + if (!options) options = EMPTY_OPTIONS; + this.options = options; + this.aggregateTimeout = + typeof options.aggregateTimeout === "number" + ? options.aggregateTimeout + : 200; + this.watcherOptions = cachedNormalizeOptions(options); + this.watcherManager = getWatcherManager(this.watcherOptions); + this.fileWatchers = new Map(); + this.directoryWatchers = new Map(); + this.startTime = undefined; + this.paused = false; + this.aggregatedChanges = new Set(); + this.aggregatedRemovals = new Set(); + this.aggregateTimer = undefined; + this._onTimeout = this._onTimeout.bind(this); + } -SyncWaterfallHook.prototype = null; + watch(arg1, arg2, arg3) { + let files, directories, missing, startTime; + if (!arg2) { + ({ + files = EMPTY_ARRAY, + directories = EMPTY_ARRAY, + missing = EMPTY_ARRAY, + startTime + } = arg1); + } else { + files = arg1; + directories = arg2; + missing = EMPTY_ARRAY; + startTime = arg3; + } + this.paused = false; + const oldFileWatchers = this.fileWatchers; + const oldDirectoryWatchers = this.directoryWatchers; + const ignored = this.watcherOptions.ignored; + const filter = ignored + ? path => !ignored.test(path.replace(/\\/g, "/")) + : () => true; + const addToMap = (map, key, item) => { + const list = map.get(key); + if (list === undefined) { + map.set(key, [item]); + } else { + list.push(item); + } + }; + const fileWatchersNeeded = new Map(); + const directoryWatchersNeeded = new Map(); + const missingFiles = new Set(); + if (this.watcherOptions.followSymlinks) { + const resolver = new LinkResolver(); + for (const file of files) { + if (filter(file)) { + for (const innerFile of resolver.resolve(file)) { + if (file === innerFile || filter(innerFile)) { + addToMap(fileWatchersNeeded, innerFile, file); + } + } + } + } + for (const file of missing) { + if (filter(file)) { + for (const innerFile of resolver.resolve(file)) { + if (file === innerFile || filter(innerFile)) { + missingFiles.add(file); + addToMap(fileWatchersNeeded, innerFile, file); + } + } + } + } + for (const dir of directories) { + if (filter(dir)) { + let first = true; + for (const innerItem of resolver.resolve(dir)) { + if (filter(innerItem)) { + addToMap( + first ? directoryWatchersNeeded : fileWatchersNeeded, + innerItem, + dir + ); + } + first = false; + } + } + } + } else { + for (const file of files) { + if (filter(file)) { + addToMap(fileWatchersNeeded, file, file); + } + } + for (const file of missing) { + if (filter(file)) { + missingFiles.add(file); + addToMap(fileWatchersNeeded, file, file); + } + } + for (const dir of directories) { + if (filter(dir)) { + addToMap(directoryWatchersNeeded, dir, dir); + } + } + } + const newFileWatchers = new Map(); + const newDirectoryWatchers = new Map(); + const setupFileWatcher = (watcher, key, files) => { + watcher.on("initial-missing", type => { + for (const file of files) { + if (!missingFiles.has(file)) this._onRemove(file, file, type); + } + }); + watcher.on("change", (mtime, type) => { + for (const file of files) { + this._onChange(file, mtime, file, type); + } + }); + watcher.on("remove", type => { + for (const file of files) { + this._onRemove(file, file, type); + } + }); + newFileWatchers.set(key, watcher); + }; + const setupDirectoryWatcher = (watcher, key, directories) => { + watcher.on("initial-missing", type => { + for (const item of directories) { + this._onRemove(item, item, type); + } + }); + watcher.on("change", (file, mtime, type) => { + for (const item of directories) { + this._onChange(item, mtime, file, type); + } + }); + watcher.on("remove", type => { + for (const item of directories) { + this._onRemove(item, item, type); + } + }); + newDirectoryWatchers.set(key, watcher); + }; + // Close unneeded old watchers + const fileWatchersToClose = []; + const directoryWatchersToClose = []; + for (const [key, w] of oldFileWatchers) { + if (!fileWatchersNeeded.has(key)) { + w.close(); + } else { + fileWatchersToClose.push(w); + } + } + for (const [key, w] of oldDirectoryWatchers) { + if (!directoryWatchersNeeded.has(key)) { + w.close(); + } else { + directoryWatchersToClose.push(w); + } + } + // Create new watchers and install handlers on these watchers + watchEventSource.batch(() => { + for (const [key, files] of fileWatchersNeeded) { + const watcher = this.watcherManager.watchFile(key, startTime); + if (watcher) { + setupFileWatcher(watcher, key, files); + } + } + for (const [key, directories] of directoryWatchersNeeded) { + const watcher = this.watcherManager.watchDirectory(key, startTime); + if (watcher) { + setupDirectoryWatcher(watcher, key, directories); + } + } + }); + // Close old watchers + for (const w of fileWatchersToClose) w.close(); + for (const w of directoryWatchersToClose) w.close(); + // Store watchers + this.fileWatchers = newFileWatchers; + this.directoryWatchers = newDirectoryWatchers; + this.startTime = startTime; + } -module.exports = SyncWaterfallHook; + close() { + this.paused = true; + if (this.aggregateTimer) clearTimeout(this.aggregateTimer); + for (const w of this.fileWatchers.values()) w.close(); + for (const w of this.directoryWatchers.values()) w.close(); + this.fileWatchers.clear(); + this.directoryWatchers.clear(); + } + pause() { + this.paused = true; + if (this.aggregateTimer) clearTimeout(this.aggregateTimer); + } -/***/ }), + getTimes() { + const directoryWatchers = new Set(); + addWatchersToSet(this.fileWatchers.values(), directoryWatchers); + addWatchersToSet(this.directoryWatchers.values(), directoryWatchers); + const obj = Object.create(null); + for (const w of directoryWatchers) { + const times = w.getTimes(); + for (const file of Object.keys(times)) obj[file] = times[file]; + } + return obj; + } -/***/ 34718: -/***/ (function(__unused_webpack_module, exports, __webpack_require__) { + getTimeInfoEntries() { + if (EXISTANCE_ONLY_TIME_ENTRY === undefined) { + EXISTANCE_ONLY_TIME_ENTRY = __webpack_require__(39144).EXISTANCE_ONLY_TIME_ENTRY; + } + const directoryWatchers = new Set(); + addWatchersToSet(this.fileWatchers.values(), directoryWatchers); + addWatchersToSet(this.directoryWatchers.values(), directoryWatchers); + const map = new Map(); + for (const w of directoryWatchers) { + const times = w.getTimeInfoEntries(); + for (const [path, entry] of times) { + if (map.has(path)) { + if (entry === EXISTANCE_ONLY_TIME_ENTRY) continue; + const value = map.get(path); + if (value === entry) continue; + if (value !== EXISTANCE_ONLY_TIME_ENTRY) { + map.set(path, Object.assign({}, value, entry)); + continue; + } + } + map.set(path, entry); + } + } + return map; + } -"use strict"; -/* - MIT License http://www.opensource.org/licenses/mit-license.php - Author Tobias Koppers @sokra -*/ + getAggregated() { + if (this.aggregateTimer) { + clearTimeout(this.aggregateTimer); + this.aggregateTimer = undefined; + } + const changes = this.aggregatedChanges; + const removals = this.aggregatedRemovals; + this.aggregatedChanges = new Set(); + this.aggregatedRemovals = new Set(); + return { changes, removals }; + } + _onChange(item, mtime, file, type) { + file = file || item; + if (!this.paused) { + this.emit("change", file, mtime, type); + if (this.aggregateTimer) clearTimeout(this.aggregateTimer); + this.aggregateTimer = setTimeout(this._onTimeout, this.aggregateTimeout); + } + this.aggregatedRemovals.delete(item); + this.aggregatedChanges.add(item); + } -exports.__esModule = true; -exports.SyncHook = __webpack_require__(35079); -exports.SyncBailHook = __webpack_require__(98034); -exports.SyncWaterfallHook = __webpack_require__(92878); -exports.SyncLoopHook = __webpack_require__(66953); -exports.AsyncParallelHook = __webpack_require__(82890); -exports.AsyncParallelBailHook = __webpack_require__(28339); -exports.AsyncSeriesHook = __webpack_require__(45146); -exports.AsyncSeriesBailHook = __webpack_require__(97681); -exports.AsyncSeriesLoopHook = __webpack_require__(5463); -exports.AsyncSeriesWaterfallHook = __webpack_require__(73448); -exports.HookMap = __webpack_require__(81647); -exports.MultiHook = __webpack_require__(63708); + _onRemove(item, file, type) { + file = file || item; + if (!this.paused) { + this.emit("remove", file, type); + if (this.aggregateTimer) clearTimeout(this.aggregateTimer); + this.aggregateTimer = setTimeout(this._onTimeout, this.aggregateTimeout); + } + this.aggregatedChanges.delete(item); + this.aggregatedRemovals.add(item); + } + + _onTimeout() { + this.aggregateTimer = undefined; + const changes = this.aggregatedChanges; + const removals = this.aggregatedRemovals; + this.aggregatedChanges = new Set(); + this.aggregatedRemovals = new Set(); + this.emit("aggregated", changes, removals); + } +} + +module.exports = Watchpack; /***/ }), diff --git a/packages/next/package.json b/packages/next/package.json index ef6af606b2c9f..29534ff36b9ae 100644 --- a/packages/next/package.json +++ b/packages/next/package.json @@ -268,7 +268,7 @@ "webpack-sources1": "npm:webpack-sources@1.4.3", "webpack-sources3": "npm:webpack-sources@3.2.2", "webpack4": "npm:webpack@4.44.1", - "webpack5": "npm:webpack@5.64.4", + "webpack5": "npm:webpack@5.64.3", "ws": "8.2.3" }, "resolutions": { diff --git a/test/development/basic/tailwind-jit.test.ts b/test/development/basic/tailwind-jit.test.ts new file mode 100644 index 0000000000000..6d880c2a989a5 --- /dev/null +++ b/test/development/basic/tailwind-jit.test.ts @@ -0,0 +1,65 @@ +import { join } from 'path' +import webdriver from 'next-webdriver' +import { createNext, FileRef } from 'e2e-utils' +import { NextInstance } from 'test/lib/next-modes/base' +import { check } from 'next-test-utils' + +describe('TailwindCSS JIT', () => { + let next: NextInstance + + beforeAll(async () => { + next = await createNext({ + files: { + 'postcss.config.js': new FileRef( + join(__dirname, 'tailwind-jit/postcss.config.js') + ), + 'tailwind.config.js': new FileRef( + join(__dirname, 'tailwind-jit/tailwind.config.js') + ), + pages: new FileRef(join(__dirname, 'tailwind-jit/pages')), + }, + dependencies: { + tailwindcss: '2.2.19', + postcss: '8.3.5', + }, + }) + }) + afterAll(() => next.destroy()) + + it('works with JIT enabled', async () => { + let browser + try { + browser = await webdriver(next.appPort, '/') + const text = await browser.elementByCss('.text-6xl').text() + expect(text).toMatch(/Welcome to/) + const cssBlue = await browser + .elementByCss('#test-link') + .getComputedCss('color') + expect(cssBlue).toBe('rgb(37, 99, 235)') + + const aboutPagePath = join('pages', 'index.js') + + const originalContent = await next.readFile(aboutPagePath) + const editedContent = originalContent.replace( + '', + '' + ) + + // change the content + try { + await next.patchFile(aboutPagePath, editedContent) + await check( + () => browser.elementByCss('#test-link').getComputedCss('color'), + /rgb\(220, 38, 38\)/ + ) + } finally { + // add the original content + await next.patchFile(aboutPagePath, originalContent) + } + } finally { + if (browser) { + await browser.close() + } + } + }) +}) diff --git a/test/development/basic/tailwind-jit/pages/_app.js b/test/development/basic/tailwind-jit/pages/_app.js new file mode 100644 index 0000000000000..10401209b3e0a --- /dev/null +++ b/test/development/basic/tailwind-jit/pages/_app.js @@ -0,0 +1,7 @@ +import 'tailwindcss/tailwind.css' + +function MyApp({ Component, pageProps }) { + return +} + +export default MyApp diff --git a/test/development/basic/tailwind-jit/pages/index.js b/test/development/basic/tailwind-jit/pages/index.js new file mode 100644 index 0000000000000..a3db606301626 --- /dev/null +++ b/test/development/basic/tailwind-jit/pages/index.js @@ -0,0 +1,63 @@ +export default function Home() { + return ( + + ) +} diff --git a/test/development/basic/tailwind-jit/postcss.config.js b/test/development/basic/tailwind-jit/postcss.config.js new file mode 100644 index 0000000000000..5331562012af4 --- /dev/null +++ b/test/development/basic/tailwind-jit/postcss.config.js @@ -0,0 +1,7 @@ +// If you want to use other PostCSS plugins, see the following: +// https://tailwindcss.com/docs/using-with-preprocessors +module.exports = { + plugins: { + tailwindcss: {}, + }, +} diff --git a/test/development/basic/tailwind-jit/tailwind.config.js b/test/development/basic/tailwind-jit/tailwind.config.js new file mode 100644 index 0000000000000..211048ff7755e --- /dev/null +++ b/test/development/basic/tailwind-jit/tailwind.config.js @@ -0,0 +1,12 @@ +module.exports = { + mode: 'jit', + purge: ['./pages/**/*.{js,ts,jsx,tsx}', './components/**/*.{js,ts,jsx,tsx}'], + darkMode: false, // or 'media' or 'class' + theme: { + extend: {}, + }, + variants: { + extend: {}, + }, + plugins: [], +} diff --git a/yarn.lock b/yarn.lock index 3a849acddbaff..6b9f70a29baf4 100644 --- a/yarn.lock +++ b/yarn.lock @@ -19884,7 +19884,7 @@ watchpack-chokidar2@^2.0.0: dependencies: chokidar "^2.1.8" -watchpack@2.3.0, watchpack@^2.3.0: +watchpack@2.3.0: version "2.3.0" resolved "https://registry.yarnpkg.com/watchpack/-/watchpack-2.3.0.tgz#a41bca3da6afaff31e92a433f4c856a0c25ea0c4" integrity sha512-MnN0Q1OsvB/GGHETrFeZPQaOelWh/7O+EiFlj8sM9GPjtQkis7k01aAxrg/18kTfoIVcLL+haEVFlXDaSRwKRw== @@ -19903,6 +19903,14 @@ watchpack@^1.7.4: chokidar "^3.4.1" watchpack-chokidar2 "^2.0.0" +watchpack@^2.2.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/watchpack/-/watchpack-2.2.0.tgz#47d78f5415fe550ecd740f99fe2882323a58b1ce" + integrity sha512-up4YAn/XHgZHIxFBVCdlMiWDj6WaLKpwVeGQk2I5thdYxF/KmF0aaz6TfJZ/hfl1h/XlcDr7k1KH7ThDagpFaA== + dependencies: + glob-to-regexp "^0.4.1" + graceful-fs "^4.1.2" + wcwidth@^1.0.0, wcwidth@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/wcwidth/-/wcwidth-1.0.1.tgz#f0b0dcf915bc5ff1528afadb2c0e17b532da2fe8" @@ -19981,11 +19989,16 @@ webpack-bundle-analyzer@4.3.0: source-list-map "^2.0.0" source-map "~0.6.1" -"webpack-sources3@npm:webpack-sources@3.2.2", webpack-sources@^3.2.2: +"webpack-sources3@npm:webpack-sources@3.2.2": version "3.2.2" resolved "https://registry.yarnpkg.com/webpack-sources/-/webpack-sources-3.2.2.tgz#d88e3741833efec57c4c789b6010db9977545260" integrity sha512-cp5qdmHnu5T8wRg2G3vZZHoJPN14aqQ89SyQ11NpGH5zEMDCclt49rzo+MaRazk7/UeILhAI+/sEtcM+7Fr0nw== +webpack-sources@^3.2.2: + version "3.2.2" + resolved "https://registry.npmjs.org/webpack-sources/-/webpack-sources-3.2.2.tgz#d88e3741833efec57c4c789b6010db9977545260" + integrity sha512-cp5qdmHnu5T8wRg2G3vZZHoJPN14aqQ89SyQ11NpGH5zEMDCclt49rzo+MaRazk7/UeILhAI+/sEtcM+7Fr0nw== + "webpack4@npm:webpack@4.44.1": version "4.44.1" resolved "https://registry.yarnpkg.com/webpack/-/webpack-4.44.1.tgz#17e69fff9f321b8f117d1fda714edfc0b939cc21" @@ -20015,10 +20028,10 @@ webpack-bundle-analyzer@4.3.0: watchpack "^1.7.4" webpack-sources "^1.4.1" -"webpack5@npm:webpack@5.64.4": - version "5.64.4" - resolved "https://registry.yarnpkg.com/webpack/-/webpack-5.64.4.tgz#e1454b6a13009f57cc2c78e08416cd674622937b" - integrity sha512-LWhqfKjCLoYJLKJY8wk2C3h77i8VyHowG3qYNZiIqD6D0ZS40439S/KVuc/PY48jp2yQmy0mhMknq8cys4jFMw== +"webpack5@npm:webpack@5.64.3": + version "5.64.3" + resolved "https://registry.yarnpkg.com/webpack/-/webpack-5.64.3.tgz#f4792cc3f8528db2c18375fa2cd269f69e0bf69f" + integrity sha512-XF6/IL9Bw2PPQioiR1UYA8Bs4tX3QXJtSelezKECdLFeSFzWoe44zqTzPW5N+xI3fACaRl2/G3sNA4WYHD7Iww== dependencies: "@types/eslint-scope" "^3.7.0" "@types/estree" "^0.0.50" @@ -20042,7 +20055,7 @@ webpack-bundle-analyzer@4.3.0: schema-utils "^3.1.0" tapable "^2.1.1" terser-webpack-plugin "^5.1.3" - watchpack "^2.3.0" + watchpack "^2.2.0" webpack-sources "^3.2.2" "webpack@link:./node_modules/webpack5":