diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml
index 377caec98ce0..412a40107b42 100644
--- a/.github/workflows/ci.yaml
+++ b/.github/workflows/ci.yaml
@@ -18,7 +18,7 @@ jobs:
prebuild:
name: Pre-Build checks
runs-on: ubuntu-latest
- timeout-minutes: 5
+ timeout-minutes: 15
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
steps:
diff --git a/ci/build/build-release.sh b/ci/build/build-release.sh
index ce8a3eaf43bc..fe788d7c886d 100755
--- a/ci/build/build-release.sh
+++ b/ci/build/build-release.sh
@@ -82,10 +82,12 @@ bundle_vscode() {
rsync "$VSCODE_SRC_PATH/resources/linux/code.png" "$VSCODE_OUT_PATH/resources/linux/code.png"
rsync "$VSCODE_SRC_PATH/resources/web/callback.html" "$VSCODE_OUT_PATH/resources/web/callback.html"
- # Adds the commit and date to product.json
+ # Add the commit and date and enable telemetry. This just makes telemetry
+ # available; telemetry can still be disabled by flag or setting.
jq --slurp '.[0] * .[1]' "$VSCODE_SRC_PATH/product.json" <(
cat << EOF
{
+ "enableTelemetry": true,
"commit": "$(git rev-parse HEAD)",
"date": $(jq -n 'now | todate')
}
diff --git a/ci/build/build-standalone-release.sh b/ci/build/build-standalone-release.sh
index c91debe1ee8a..f12f240d0087 100755
--- a/ci/build/build-standalone-release.sh
+++ b/ci/build/build-standalone-release.sh
@@ -27,6 +27,12 @@ main() {
cd "$RELEASE_PATH"
yarn --production --frozen-lockfile
+
+ # HACK: the version of Typescript vscode 1.57 uses in extensions/
+ # leaves a few stray symlinks. Clean them up so nfpm does not fail.
+ # Remove this line when its no longer needed.
+
+ rm -fr "$RELEASE_PATH/lib/vscode/extensions/node_modules/.bin"
}
main "$@"
diff --git a/docs/CONTRIBUTING.md b/docs/CONTRIBUTING.md
index d4bf6f07a10b..f49f07daa038 100644
--- a/docs/CONTRIBUTING.md
+++ b/docs/CONTRIBUTING.md
@@ -198,16 +198,62 @@ a Git subtree to fork and modify VS Code. This code lives under
Some noteworthy changes in our version of VS Code include:
-- Adding our build file, which includes our code and VS Code's web code
-- Allowing multiple extension directories (both user and built-in)
-- Modifying the loader, WebSocket, webview, service worker, and asset requests to
- use the URL of the page as a base (and TLS, if necessary for the WebSocket)
-- Sending client-side telemetry through the server
-- Allowing modification of the display language
-- Making it possible for us to load code on the client
-- Making it possible to install extensions of any kind
-- Fixing issue with getting disconnected when your machine sleeps or hibernates
-- Adding connection type to web socket query parameters
+- Adding our build file, [`lib/vscode/coder.js`](../lib/vscode/coder.js), which includes build steps specific to code-server
+- Node.js version detection changes in [`build/lib/node.ts`](../lib/vscode/build/lib/node.ts) and [`build/lib/util.ts`](../lib/vscode/build/lib/util.ts)
+- Allowing extra extension directories
+ - Added extra arguments to [`src/vs/platform/environment/common/argv.ts`](../lib/vscode/src/vs/platform/environment/common/argv.ts) and to [`src/vs/platform/environment/node/argv.ts`](../lib/vscode/src/vs/platform/environment/node/argv.ts)
+ - Added extra environment state to [`src/vs/platform/environment/common/environment.ts`](../lib/vscode/src/vs/platform/environment/common/environment.ts);
+ - Added extra getters to [`src/vs/platform/environment/common/environmentService.ts`](../lib/vscode/src/vs/platform/environment/common/environmentService.ts)
+ - Added extra scanning paths to [`src/vs/platform/extensionManagement/node/extensionsScanner.ts`](../lib/vscode/src/vs/platform/extensionManagement/node/extensionsScanner.ts)
+- Additions/removals from [`package.json`](../lib/vscode/package.json):
+ - Removing `electron`, `keytar` and `native-keymap` to avoid pulling in desktop dependencies during build on Linux
+ - Removing `gulp-azure-storage` and `gulp-tar` (unsued in our build process, may pull in outdated dependencies)
+ - Adding `proxy-agent`, `proxy-from-env` (for proxying) and `rimraf` (used during build/install steps)
+- Adding our branding/custom URLs/version:
+ - [`product.json`](../lib/vscode/product.json)
+ - [`src/vs/base/common/product.ts`](../lib/vscode/src/vs/base/common/product.ts)
+ - [`src/vs/workbench/browser/parts/dialogs/dialogHandler.ts`](../lib/vscode/src/vs/workbench/browser/parts/dialogs/dialogHandler.ts)
+ - [`src/vs/workbench/contrib/welcome/page/browser/vs_code_welcome_page.ts`](../lib/vscode/src/vs/workbench/contrib/welcome/page/browser/vs_code_welcome_page.ts)
+ - [`src/vs/workbench/contrib/welcome/page/browser/welcomePage.ts`](../lib/vscode/src/vs/workbench/contrib/welcome/page/browser/welcomePage.ts)
+- Removing azure/macOS signing related dependencies from [`build/package.json`](../lib/vscode/build/package.json)
+- Modifying `.gitignore` to allow us to add files to `src/vs/server` and modifying `.eslintignore` to ignore lint on the shared files below (we use different formatter settings than VS Code).
+- Sharing some files with our codebase via symlinks:
+ - [`src/vs/base/common/ipc.d.ts`](../lib/vscode/src/vs/base/common/ipc.d.ts) points to [`typings/ipc.d.ts`](../typings/ipc.d.ts)
+ - [`src/vs/base/common/util.ts`](../lib/vscode/src/vs/base/common/util.ts) points to [`src/common/util.ts`](../src/common/util.ts)
+ - [`src/vs/base/node/proxy_agent.ts`](../lib/vscode/src/vs/base/node/proxy_agent.ts) points to [`src/node/proxy_agent.ts`](../src/node/proxy_agent.ts)
+- Allowing socket changes by adding `setSocket` in [`src/vs/base/parts/ipc/common/ipc.net.ts`](../lib/vscode/src/vs/base/parts/ipc/common/ipc.net.ts)
+ - We use this for connection persistence in our server-side code.
+- Added our server-side Node.JS code to `src/vs/server`.
+ - This code includes the logic to spawn the various services (extension host, terminal, etc.) and some glue
+- Added [`src/vs/workbench/browser/client.ts`](../lib/vscode/src/vs/workbench/browser/client.ts) to hold some server customizations.
+ - Includes the functionality for the Log Out command and menu item
+ - Also, imported and called `initialize` from the main web file, [`src/vs/workbench/browser/web.main.ts`](../lib/vscode/src/vs/workbench/browser/web.main.ts)
+- Added a (hopefully temporary) hotfix to [`src/vs/workbench/common/resources.ts`](../lib/vscode/src/vs/workbench/common/resources.ts) to get context menu actions working for the Git integration.
+- Added connection type to WebSocket query parameters in [`src/vs/platform/remote/common/remoteAgentConnection.ts`](../lib/vscode/src/vs/platform/remote/common/remoteAgentConnection.ts)
+- Added `CODE_SERVER*` variables to the sanitization list in [`src/vs/base/common/processes.ts`](../lib/vscode/src/vs/base/common/processes.ts)
+- Fix localization support:
+ - Added file [`src/vs/workbench/services/localizations/browser/localizationsService.ts`](../lib/vscode/src/vs/workbench/services/localizations/browser/localizationsService.ts).
+ - Modified file [`src/vs/base/common/platform.ts`](../lib/vscode/src/vs/base/common/platform.ts)
+ - Modified file [`src/vs/base/node/languagePacks.js`](../lib/vscode/src/vs/base/node/languagePacks.js)
+- Added code to allow server to inject settings to [`src/vs/platform/product/common/product.ts`](../lib/vscode/src/vs/platform/product/common/product.ts)
+- Extension fixes:
+ - Avoid disabling extensions by extensionKind in [`src/vs/workbench/services/extensionManagement/browser/extensionEnablementService.ts`](../lib/vscode/src/vs/workbench/services/extensionManagement/browser/extensionEnablementService.ts) (Needed for vscode-icons)
+ - Remove broken symlinks in [`extensions/postinstall.js`](../lib/vscode/extensions/postinstall.js)
+ - Add tip about extension gallery in [`src/vs/workbench/contrib/extensions/browser/extensionsViewlet.ts`](../lib/vscode/src/vs/workbench/contrib/extensions/browser/extensionsViewlet.ts)
+ - Use our own server for GitHub authentication in [`extensions/github-authentication/src/githubServer.ts`](../lib/vscode/extensions/github-authentication/src/githubServer.ts)
+ - Settings persistence on the server in [`src/vs/workbench/services/environment/browser/environmentService.ts`](../lib/vscode/src/vs/workbench/services/environment/browser/environmentService.ts)
+ - Add extension install fallback in [`src/vs/workbench/services/extensionManagement/common/extensionManagementService.ts`](../lib/vscode/src/vs/workbench/services/extensionManagement/common/extensionManagementService.ts)
+ - Add proxy-agent monkeypatch and keep extension host indefinitely running in [`src/vs/workbench/services/extensions/node/extensionHostProcessSetup.ts`](../lib/vscode/src/vs/workbench/services/extensions/node/extensionHostProcessSetup.ts)
+ - Patch build system to avoid removing extension dependencies for `yarn global add` users in [`build/lib/extensions.ts`](../lib/vscode/build/lib/extensions.ts)
+ - Allow all extensions to use proposed APIs in [`src/vs/workbench/services/environment/browser/environmentService.ts`](../lib/vscode/src/vs/workbench/services/environment/browser/environmentService.ts)
+ - Make storage writes async to allow extensions to wait for them to complete in [`src/vs/platform/storage/common/storage.ts`](../lib/vscode/src/vs/platform/storage/common/storage.ts)
+- Specify webview path in [`src/vs/code/browser/workbench/workbench.ts`](../lib/vscode/src/vs/code/browser/workbench/workbench.ts)
+- URL readability improvements for folder/workspace in [`src/vs/code/browser/workbench/workbench.ts`](../lib/vscode/src/vs/code/browser/workbench/workbench.ts)
+- Socket/Authority-related fixes (for remote proxying etc.):
+ - [`src/vs/code/browser/workbench/workbench.ts`](../lib/vscode/src/vs/code/browser/workbench/workbench.ts)
+ - [`src/vs/platform/remote/browser/browserSocketFactory.ts`](../lib/vscode/src/vs/platform/remote/browser/browserSocketFactory.ts)
+ - [`src/vs/base/common/network.ts`](../lib/vscode/src/vs/base/common/network.ts)
+- Added code to write out IPC path in [`src/vs/workbench/api/node/extHostCLIServer.ts`](../lib/vscode/src/vs/workbench/api/node/extHostCLIServer.ts)
As the web portion of VS Code matures, we'll be able to shrink and possibly
eliminate our modifications. In the meantime, upgrading the VS Code version requires
diff --git a/lib/vscode/.devcontainer/README.md b/lib/vscode/.devcontainer/README.md
index 8262d411570b..827166823d73 100644
--- a/lib/vscode/.devcontainer/README.md
+++ b/lib/vscode/.devcontainer/README.md
@@ -1,14 +1,14 @@
# Code - OSS Development Container
-This repository includes configuration for a development container for working with Code - OSS in an isolated local container or using [GitHub Codespaces](https://github.com/features/codespaces).
+This repository includes configuration for a development container for working with Code - OSS in a local container or using [GitHub Codespaces](https://github.com/features/codespaces).
-> **Tip:** The default VNC password is `vscode`. The VNC server runs on port `5901` with a web client at `6080`. For better performance, we recommend using a [VNC Viewer](https://www.realvnc.com/en/connect/download/viewer/). Applications like the macOS Screen Sharing app will not perform as well.
+> **Tip:** The default VNC password is `vscode`. The VNC server runs on port `5901` and a web client is available on port `6080`.
## Quick start - local
1. Install Docker Desktop or Docker for Linux on your local machine. (See [docs](https://aka.ms/vscode-remote/containers/getting-started) for additional details.)
-2. **Important**: Docker needs at least **4 Cores and 6 GB of RAM (8 GB recommended)** to run full build. If you on macOS, or using the old Hyper-V engine for Windows, update these values for Docker Desktop by right-clicking on the Docker status bar item, going to **Preferences/Settings > Resources > Advanced**.
+2. **Important**: Docker needs at least **4 Cores and 6 GB of RAM (8 GB recommended)** to run a full build. If you are on macOS, or are using the old Hyper-V engine for Windows, update these values for Docker Desktop by right-clicking on the Docker status bar item and going to **Preferences/Settings > Resources > Advanced**.
> **Note:** The [Resource Monitor](https://marketplace.visualstudio.com/items?itemName=mutantdino.resourcemonitor) extension is included in the container so you can keep an eye on CPU/Memory in the status bar.
@@ -16,53 +16,56 @@ This repository includes configuration for a development container for working w
![Image of Remote - Containers extension](https://microsoft.github.io/vscode-remote-release/images/remote-containers-extn.png)
- > Note that the Remote - Containers extension requires the Visual Studio Code distribution of Code - OSS. See the [FAQ](https://aka.ms/vscode-remote/faq/license) for details.
+ > **Note:** The Remote - Containers extension requires the Visual Studio Code distribution of Code - OSS. See the [FAQ](https://aka.ms/vscode-remote/faq/license) for details.
-4. Press Ctrl/Cmd + Shift + P and select **Remote-Containers: Clone Repository in Container Volume...**.
+4. Press Ctrl/Cmd + Shift + P or F1 and select **Remote-Containers: Clone Repository in Container Volume...**.
- > **Tip:** While you can use your local source tree instead, operations like `yarn install` can be slow on macOS or using the Hyper-V engine on Windows. We recommend the "clone repository in container" approach instead since it uses "named volume" rather than the local filesystem.
+ > **Tip:** While you can use your local source tree instead, operations like `yarn install` can be slow on macOS or when using the Hyper-V engine on Windows. We recommend the "clone repository in container" approach instead since it uses "named volume" rather than the local filesystem.
5. Type `https://github.com/microsoft/vscode` (or a branch or PR URL) in the input box and press Enter.
-6. After the container is running, open a web browser and go to [http://localhost:6080](http://localhost:6080) or use a [VNC Viewer](https://www.realvnc.com/en/connect/download/viewer/) to connect to `localhost:5901` and enter `vscode` as the password.
+6. After the container is running, open a web browser and go to [http://localhost:6080](http://localhost:6080), or use a [VNC Viewer](https://www.realvnc.com/en/connect/download/viewer/) to connect to `localhost:5901` and enter `vscode` as the password.
-Anything you start in VS Code or the integrated terminal will appear here.
+Anything you start in VS Code, or the integrated terminal, will appear here.
Next: **[Try it out!](#try-it)**
## Quick start - GitHub Codespaces
-> **IMPORTANT:** You need to use a "Standard" sized codespace or larger (4-core, 8GB) since VS Code needs 6GB of RAM to compile. This is now the default for GitHub Codespaces, but do not downgrade to "Basic" unless you do not intend to compile.
+1. From the [microsoft/vscode GitHub repository](https://github.com/microsoft/vscode), click on the **Code** dropdown, select **Open with Codespaces**, and then click on **New codespace**. If prompted, select the **Standard** machine size (which is also the default).
-1. From the [microsoft/vscode GitHub repository](https://github.com/microsoft/vscode), click on the **Code** dropdown, select **Open with Codespaces**, and the **New codespace**
+ > **Note:** You will not see these options within GitHub if you are not in the Codespaces beta.
- > Note that you will not see these options if you are not in the beta yet.
+2. After the codespace is up and running in your browser, press Ctrl/Cmd + Shift + P or F1 and select **Ports: Focus on Ports View**.
-2. After the codespace is up and running in your browser, press F1 and select **Ports: Focus on Ports View**.
+3. You should see **VNC web client (6080)** under in the list of ports. Select the line and click on the globe icon to open it in a browser tab.
-3. You should see port `6080` under **Forwarded Ports**. Select the line and click on the globe icon to open it in a browser tab.
-
- > If you do not see port `6080`, press F1, select **Forward a Port** and enter port `6080`.
+ > **Tip:** If you do not see the port, Ctrl/Cmd + Shift + P or F1, select **Forward a Port** and enter port `6080`.
4. In the new tab, you should see noVNC. Click **Connect** and enter `vscode` as the password.
-Anything you start in VS Code or the integrated terminal will appear here.
+Anything you start in VS Code, or the integrated terminal, will appear here.
Next: **[Try it out!](#try-it)**
### Using VS Code with GitHub Codespaces
-You will likely see better performance when accessing the codespace you created from VS Code since you can use a[VNC Viewer](https://www.realvnc.com/en/connect/download/viewer/). Here's how to do it.
+You may see improved VNC responsiveness when accessing a codespace from VS Code client since you can use a [VNC Viewer](https://www.realvnc.com/en/connect/download/viewer/). Here's how to do it.
+
+1. Install [Visual Studio Code Stable](https://code.visualstudio.com/) or [Insiders](https://code.visualstudio.com/insiders/) and the the [GitHub Codespaces extension](https://marketplace.visualstudio.com/items?itemName=GitHub.codespaces).
-1. [Create a codespace](#quick-start---github-codespaces) if you have not already.
+ > **Note:** The GitHub Codespaces extension requires the Visual Studio Code distribution of Code - OSS.
-2. Set up [VS Code for use with GitHub Codespaces](https://docs.github.com/github/developing-online-with-codespaces/using-codespaces-in-visual-studio-code)
+2. After the VS Code is up and running, press Ctrl/Cmd + Shift + P or F1, choose **Codespaces: Create New Codespace**, and use the following settings:
+ - `microsoft/vscode` for the repository.
+ - Select any branch (e.g. **main**) - you select a different one later.
+ - Choose **Standard** (4-core, 8GB) as the size.
-3. After the VS Code is up and running, press F1, choose **Codespaces: Connect to Codespace**, and select the codespace you created.
+4. After you have connected to the codespace, you can use a [VNC Viewer](https://www.realvnc.com/en/connect/download/viewer/) to connect to `localhost:5901` and enter `vscode` as the password.
-4. After you've connected to the codespace, use a [VNC Viewer](https://www.realvnc.com/en/connect/download/viewer/) to connect to `localhost:5901` and enter `vscode` as the password.
+ > **Tip:** You may also need change your VNC client's **Picture Quaility** setting to **High** to get a full color desktop.
-5. Anything you start in VS Code or the integrated terminal will appear here.
+5. Anything you start in VS Code, or the integrated terminal, will appear here.
Next: **[Try it out!](#try-it)**
@@ -70,20 +73,18 @@ Next: **[Try it out!](#try-it)**
This container uses the [Fluxbox](http://fluxbox.org/) window manager to keep things lean. **Right-click on the desktop** to see menu options. It works with GNOME and GTK applications, so other tools can be installed if needed.
-Note you can also set the resolution from the command line by typing `set-resolution`.
+> **Note:** You can also set the resolution from the command line by typing `set-resolution`.
To start working with Code - OSS, follow these steps:
-1. In your local VS Code, open a terminal (Ctrl/Cmd + Shift + \`) and type the following commands:
+1. In your local VS Code client, open a terminal (Ctrl/Cmd + Shift + \`) and type the following commands:
```bash
yarn install
bash scripts/code.sh
```
- Note that a previous run of `yarn install` will already be cached, so this step should simply pick up any recent differences.
-
-2. After the build is complete, open a web browser or a [VNC Viewer](https://www.realvnc.com/en/connect/download/viewer/) to the desktop environnement as described in the quick start and enter `vscode` as the password.
+2. After the build is complete, open a web browser or a [VNC Viewer](https://www.realvnc.com/en/connect/download/viewer/) to connect to the desktop environment as described in the quick start and enter `vscode` as the password.
3. You should now see Code - OSS!
@@ -91,7 +92,7 @@ Next, let's try debugging.
1. Shut down Code - OSS by clicking the box in the upper right corner of the Code - OSS window through your browser or VNC viewer.
-2. Go to your local VS Code client, and use Run / Debug view to launch the **VS Code** configuration. (Typically the default, so you can likely just press F5).
+2. Go to your local VS Code client, and use the **Run / Debug** view to launch the **VS Code** configuration. (Typically the default, so you can likely just press F5).
> **Note:** If launching times out, you can increase the value of `timeout` in the "VS Code", "Attach Main Process", "Attach Extension Host", and "Attach to Shared Process" configurations in [launch.json](../.vscode/launch.json). However, running `scripts/code.sh` first will set up Electron which will usually solve timeout issues.
diff --git a/lib/vscode/.devcontainer/devcontainer.json b/lib/vscode/.devcontainer/devcontainer.json
index 3b82cd9028d2..d66344eccf65 100644
--- a/lib/vscode/.devcontainer/devcontainer.json
+++ b/lib/vscode/.devcontainer/devcontainer.json
@@ -3,20 +3,26 @@
// Image contents: https://github.com/microsoft/vscode-dev-containers/blob/master/repository-containers/images/github.com/microsoft/vscode/.devcontainer/base.Dockerfile
"image": "mcr.microsoft.com/vscode/devcontainers/repos/microsoft/vscode:branch-main",
-
- "workspaceMount": "source=${localWorkspaceFolder},target=/home/node/workspace/vscode,type=bind,consistency=cached",
- "workspaceFolder": "/home/node/workspace/vscode",
"overrideCommand": false,
"runArgs": [ "--init", "--security-opt", "seccomp=unconfined"],
"settings": {
- "terminal.integrated.shell.linux": "/bin/bash",
"resmon.show.battery": false,
"resmon.show.cpufreq": false
},
- // noVNC, VNC, debug ports
- "forwardPorts": [6080, 5901, 9222],
+ // noVNC, VNC
+ "forwardPorts": [6080, 5901],
+ "portsAttributes": {
+ "6080": {
+ "label": "VNC web client (noVNC)",
+ "onAutoForward": "silent"
+ },
+ "5901": {
+ "label": "VNC TCP port",
+ "onAutoForward": "silent"
+ }
+ },
"extensions": [
"dbaeumer.vscode-eslint",
diff --git a/lib/vscode/.eslintignore b/lib/vscode/.eslintignore
index b67a816156a4..f9c117426c75 100644
--- a/lib/vscode/.eslintignore
+++ b/lib/vscode/.eslintignore
@@ -17,6 +17,7 @@
**/extensions/typescript-basics/test/colorize-fixtures/**
**/extensions/**/dist/**
# These are code-server code symlinks.
+src/vs/base/common/util.ts
+src/vs/base/common/ipc.d.ts
src/vs/base/node/proxy_agent.ts
-src/vs/ipc.d.ts
-src/vs/server/common/util.ts
+src/vs/server/uriTransformer.ts
diff --git a/lib/vscode/.eslintrc.json b/lib/vscode/.eslintrc.json
index e7ff9be00a1f..dac80d296476 100644
--- a/lib/vscode/.eslintrc.json
+++ b/lib/vscode/.eslintrc.json
@@ -62,7 +62,7 @@
"code-no-standalone-editor": "warn",
"code-no-unexternalized-strings": "warn",
"code-layering": [
- "off",
+ "warn",
{
"common": [],
"node": [
@@ -88,7 +88,7 @@
}
],
"code-import-patterns": [
- "off",
+ "warn",
// !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
// !!! Do not relax these rules !!!
// !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
@@ -501,7 +501,7 @@
"**/vs/platform/**",
"**/vs/editor/**",
"**/vs/workbench/{common,browser,node,electron-sandbox,electron-browser}/**",
- "vs/workbench/contrib/files/common/editors/fileEditorInput",
+ "vs/workbench/contrib/files/browser/editors/fileEditorInput",
"**/vs/workbench/services/**",
"**/vs/workbench/test/**",
"*" // node modules
@@ -807,6 +807,7 @@
"**/vs/platform/**/{common,node}/**",
"**/vs/workbench/**/{common,node}/**",
"**/vs/server/**",
+ "@coder/logger", // NOTE@coder: add logger
"*" // node modules
]
},
diff --git a/lib/vscode/.github/ISSUE_TEMPLATE/bug_report.md b/lib/vscode/.github/ISSUE_TEMPLATE/bug_report.md
index b40ba5ddd483..8a44ce8c7ac1 100644
--- a/lib/vscode/.github/ISSUE_TEMPLATE/bug_report.md
+++ b/lib/vscode/.github/ISSUE_TEMPLATE/bug_report.md
@@ -8,6 +8,11 @@ about: Create a report to help us improve
+
+Does this issue occur when all extensions are disabled?: Yes/No
+
+
+
- VS Code Version:
- OS Version:
@@ -15,9 +20,3 @@ Steps to Reproduce:
1.
2.
-
-
-Does this issue occur when all extensions are disabled?: Yes/No
-
-
-
diff --git a/lib/vscode/.github/classifier.json b/lib/vscode/.github/classifier.json
index 5adabb9aa9c9..1783b82c8797 100644
--- a/lib/vscode/.github/classifier.json
+++ b/lib/vscode/.github/classifier.json
@@ -1,16 +1,17 @@
{
"$schema": "https://raw.githubusercontent.com/microsoft/vscode-github-triage-actions/master/classifier-deep/apply/apply-labels/deep-classifier-config.schema.json",
- "vacation": [],
+ "vacation": ["RMacfarlane"],
"assignees": {
"JacksonKearl": {"accuracy": 0.5}
},
"labels": {
"L10N": {"assign": []},
"VIM": {"assign": []},
+ "accessibility": { "assign": ["isidorn"]},
"api": {"assign": ["jrieken"]},
"api-finalization": {"assign": []},
"api-proposal": {"assign": ["jrieken"]},
- "authentication": {"assign": ["RMacfarlane"]},
+ "authentication": {"assign": ["TylerLeonhardt"]},
"breadcrumbs": {"assign": ["jrieken"]},
"callhierarchy": {"assign": ["jrieken"]},
"code-lens": {"assign": ["jrieken"]},
@@ -20,8 +21,7 @@
"context-keys": {"assign": []},
"css-less-scss": {"assign": ["aeschli"]},
"custom-editors": {"assign": ["mjbvz"]},
- "debug": {"assign": ["isidorn"]},
- "debug-console": {"assign": ["isidorn"]},
+ "debug": {"assign": ["weinand"]},
"dialogs": {"assign": ["sbatten"]},
"diff-editor": {"assign": []},
"dropdown": {"assign": []},
@@ -81,14 +81,15 @@
"icon-brand": {"assign": []},
"icons-product": {"assign": ["misolori"]},
"install-update": {"assign": []},
- "integrated-terminal": {"assign": ["meganrogge"]},
- "integrated-terminal-conpty": {"assign": ["meganrogge"]},
- "integrated-terminal-links": {"assign": ["meganrogge"]},
+ "terminal": {"assign": ["meganrogge"]},
+ "terminal-conpty": {"assign": ["meganrogge"]},
+ "terminal-links": {"assign": ["meganrogge"]},
+ "terminal-external": {"assign": ["meganrogge"]},
"integration-test": {"assign": []},
"intellisense-config": {"assign": []},
"ipc": {"assign": ["joaomoreno"]},
"issue-bot": {"assign": ["chrmarti"]},
- "issue-reporter": {"assign": ["RMacfarlane"]},
+ "issue-reporter": {"assign": ["TylerLeonhardt"]},
"javascript": {"assign": ["mjbvz"]},
"json": {"assign": ["aeschli"]},
"keybindings": {"assign": []},
@@ -113,7 +114,7 @@
"php": {"assign": ["roblourens"]},
"portable-mode": {"assign": ["joaomoreno"]},
"proxy": {"assign": []},
- "quick-pick": {"assign": ["chrmarti"]},
+ "quick-pick": {"assign": ["TylerLeonhardt"]},
"references-viewlet": {"assign": ["jrieken"]},
"release-notes": {"assign": []},
"remote": {"assign": []},
@@ -152,7 +153,7 @@
"web": {"assign": ["bpasero"]},
"webview": {"assign": ["mjbvz"]},
"workbench-cli": {"assign": []},
- "workbench-diagnostics": {"assign": ["RMacfarlane"]},
+ "workbench-diagnostics": {"assign": ["Tyriar"]},
"workbench-dnd": {"assign": ["bpasero"]},
"workbench-editor-grid": {"assign": ["sbatten"]},
"workbench-editors": {"assign": ["bpasero"]},
diff --git a/lib/vscode/.github/commands.json b/lib/vscode/.github/commands.json
index de0643d56c92..388a9c3dbb33 100644
--- a/lib/vscode/.github/commands.json
+++ b/lib/vscode/.github/commands.json
@@ -90,7 +90,7 @@
"@author"
],
"action": "updateLabels",
- "addLabel": "z-author-verified",
+ "addLabel": "verified",
"removeLabel": "author-verification-requested",
"requireLabel": "author-verification-requested",
"disallowLabel": "unreleased"
@@ -133,6 +133,18 @@
"action": "updateLabels",
"addLabel": "~needs more info"
},
+ {
+ "type": "comment",
+ "name": "needsPerfInfo",
+ "allowUsers": [
+ "cleidigh",
+ "usernamehw",
+ "gjsjohnmurray",
+ "IllusionMH"
+ ],
+ "addLabel": "needs more info",
+ "comment": "Thanks for creating this issue regarding performance! Please follow this guide to help us diagnose performance issues: https://github.com/microsoft/vscode/wiki/Performance-Issues \n\nHappy Coding!"
+ },
{
"type": "comment",
"name": "jsDebugLogs",
diff --git a/lib/vscode/.github/subscribers.json b/lib/vscode/.github/subscribers.json
index 7ee6e5cdadd3..25c676a47c74 100644
--- a/lib/vscode/.github/subscribers.json
+++ b/lib/vscode/.github/subscribers.json
@@ -4,6 +4,7 @@
"rchiodo",
"greazer",
"donjayamanne",
- "jilljac"
+ "jilljac",
+ "IanMatthewHuff"
]
}
diff --git a/lib/vscode/.github/workflows/ci.yml b/lib/vscode/.github/workflows/ci.yml
index d08dac3c03b9..faac7802508d 100644
--- a/lib/vscode/.github/workflows/ci.yml
+++ b/lib/vscode/.github/workflows/ci.yml
@@ -244,6 +244,9 @@ jobs:
- name: Run Valid Layers Checks
run: yarn valid-layers-check
+ - name: Compile /build/
+ run: yarn --cwd build compile
+
- name: Run Monaco Editor Checks
run: yarn monaco-compile-check
diff --git a/lib/vscode/.gitignore b/lib/vscode/.gitignore
index c53681396d35..ed42d401d9cd 100644
--- a/lib/vscode/.gitignore
+++ b/lib/vscode/.gitignore
@@ -7,7 +7,8 @@ node_modules/
extensions/**/dist/
/out*/
/extensions/**/out/
-# src/vs/server NOTE@coder: So our code isn't ignored.
+# NOTE@coder: remove to provide our own server
+# src/vs/server
resources/server
build/node_modules
coverage/
diff --git a/lib/vscode/.vscode/notebooks/api.github-issues b/lib/vscode/.vscode/notebooks/api.github-issues
index b9e25a7c914f..112920287601 100644
--- a/lib/vscode/.vscode/notebooks/api.github-issues
+++ b/lib/vscode/.vscode/notebooks/api.github-issues
@@ -2,37 +2,31 @@
{
"kind": 1,
"language": "markdown",
- "value": "#### Config",
- "editable": true
+ "value": "#### Config"
},
{
"kind": 2,
"language": "github-issues",
- "value": "$repo=repo:microsoft/vscode\n$milestone=milestone:\"April 2021\"",
- "editable": true
+ "value": "$repo=repo:microsoft/vscode\n$milestone=milestone:\"May 2021\""
},
{
"kind": 1,
"language": "markdown",
- "value": "### Finalization",
- "editable": true
+ "value": "### Finalization"
},
{
"kind": 2,
"language": "github-issues",
- "value": "$repo $milestone label:api-finalization",
- "editable": true
+ "value": "$repo $milestone label:api-finalization"
},
{
"kind": 1,
"language": "markdown",
- "value": "### Proposals",
- "editable": true
+ "value": "### Proposals"
},
{
"kind": 2,
"language": "github-issues",
- "value": "$repo $milestone is:open label:api-proposal ",
- "editable": true
+ "value": "$repo $milestone is:open label:api-proposal "
}
]
\ No newline at end of file
diff --git a/lib/vscode/.vscode/notebooks/endgame.github-issues b/lib/vscode/.vscode/notebooks/endgame.github-issues
index 881af2c14b43..bc2fba29ddf1 100644
--- a/lib/vscode/.vscode/notebooks/endgame.github-issues
+++ b/lib/vscode/.vscode/notebooks/endgame.github-issues
@@ -7,7 +7,7 @@
{
"kind": 2,
"language": "github-issues",
- "value": "$REPOS=repo:microsoft/vscode repo:microsoft/vscode-internalbacklog repo:microsoft/vscode-js-debug repo:microsoft/vscode-remote-release repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-settings-sync-server repo:microsoft/vscode-emmet-helper repo:microsoft/vscode-remotehub\n\n$MILESTONE=milestone:\"April 2021\""
+ "value": "$REPOS=repo:microsoft/vscode repo:microsoft/vscode-internalbacklog repo:microsoft/vscode-js-debug repo:microsoft/vscode-remote-release repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-settings-sync-server repo:microsoft/vscode-emmet-helper repo:microsoft/vscode-remotehub\n\n$MILESTONE=milestone:\"May 2021\""
},
{
"kind": 1,
diff --git a/lib/vscode/.vscode/notebooks/my-endgame.github-issues b/lib/vscode/.vscode/notebooks/my-endgame.github-issues
index c435ee77500b..aad3a8db3a90 100644
--- a/lib/vscode/.vscode/notebooks/my-endgame.github-issues
+++ b/lib/vscode/.vscode/notebooks/my-endgame.github-issues
@@ -7,7 +7,7 @@
{
"kind": 2,
"language": "github-issues",
- "value": "$REPOS=repo:microsoft/vscode repo:microsoft/vscode-internalbacklog repo:microsoft/vscode-js-debug repo:microsoft/vscode-remote-release repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-settings-sync-server repo:microsoft/vscode-remotehub\n\n$MILESTONE=milestone:\"April 2021\"\n\n$MINE=assignee:@me"
+ "value": "$REPOS=repo:microsoft/vscode repo:microsoft/vscode-internalbacklog repo:microsoft/vscode-js-debug repo:microsoft/vscode-remote-release repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-settings-sync-server repo:microsoft/vscode-remotehub\n\n$MILESTONE=milestone:\"May 2021\"\n\n$MINE=assignee:@me"
},
{
"kind": 1,
@@ -157,7 +157,7 @@
{
"kind": 2,
"language": "github-issues",
- "value": "$REPOS $MILESTONE -$MINE is:issue is:closed sort:updated-asc label:bug -label:verified -label:z-author-verified -label:on-testplan -label:*duplicate -label:duplicate -label:invalid -label:*as-designed -label:error-telemetry -label:verification-steps-needed -label:verification-found -author:aeschli -author:alexdima -author:alexr00 -author:AmandaSilver -author:bamurtaugh -author:bpasero -author:btholt -author:chrisdias -author:chrmarti -author:Chuxel -author:connor4312 -author:dbaeumer -author:deepak1556 -author:devinvalenciano -author:digitarald -author:eamodio -author:egamma -author:fiveisprime -author:gregvanl -author:isidorn -author:ItalyPaleAle -author:JacksonKearl -author:joaomoreno -author:jrieken -author:kieferrm -author:lszomoru -author:meganrogge -author:misolori -author:mjbvz -author:ornellaalt -author:orta -author:rebornix -author:RMacfarlane -author:roblourens -author:rzhao271 -author:sana-ajani -author:sandy081 -author:sbatten -author:stevencl -author:Tyriar -author:weinand -author:TylerLeonhardt -author:lramos15"
+ "value": "$REPOS $MILESTONE -$MINE is:issue is:closed sort:updated-asc label:bug -label:verified -label:z-author-verified -label:on-testplan -label:*duplicate -label:duplicate -label:invalid -label:*as-designed -label:error-telemetry -label:verification-steps-needed -label:verification-found -author:aeschli -author:alexdima -author:alexr00 -author:AmandaSilver -author:bamurtaugh -author:bpasero -author:btholt -author:chrisdias -author:chrmarti -author:Chuxel -author:connor4312 -author:dbaeumer -author:deepak1556 -author:devinvalenciano -author:digitarald -author:eamodio -author:egamma -author:fiveisprime -author:gregvanl -author:isidorn -author:ItalyPaleAle -author:JacksonKearl -author:joaomoreno -author:jrieken -author:kieferrm -author:lszomoru -author:meganrogge -author:misolori -author:mjbvz -author:ornellaalt -author:orta -author:rebornix -author:RMacfarlane -author:roblourens -author:rzhao271 -author:sana-ajani -author:sandy081 -author:sbatten -author:stevencl -author:Tyriar -author:weinand -author:TylerLeonhardt -author:lramos15 -author:hediet"
},
{
"kind": 1,
diff --git a/lib/vscode/.vscode/notebooks/my-work.github-issues b/lib/vscode/.vscode/notebooks/my-work.github-issues
index 4e288133b7a2..77ca0e0443b5 100644
--- a/lib/vscode/.vscode/notebooks/my-work.github-issues
+++ b/lib/vscode/.vscode/notebooks/my-work.github-issues
@@ -2,115 +2,96 @@
{
"kind": 1,
"language": "markdown",
- "value": "##### `Config`: This should be changed every month/milestone",
- "editable": true
+ "value": "##### `Config`: This should be changed every month/milestone"
},
{
"kind": 2,
"language": "github-issues",
- "value": "// list of repos we work in\n$repos=repo:microsoft/vscode repo:microsoft/vscode-remote-release repo:microsoft/vscode-js-debug repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-github-issue-notebooks repo:microsoft/vscode-internalbacklog\n\n// current milestone name\n$milestone=milestone:\"April 2021\"",
- "editable": true
+ "value": "// list of repos we work in\n$repos=repo:microsoft/vscode repo:microsoft/vscode-remote-release repo:microsoft/vscode-js-debug repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-github-issue-notebooks repo:microsoft/vscode-internalbacklog\n\n// current milestone name\n$milestone=milestone:\"May 2021\""
},
{
"kind": 1,
"language": "github-issues",
- "value": "## Milestone Work",
- "editable": true
+ "value": "## Milestone Work"
},
{
"kind": 2,
"language": "github-issues",
- "value": "$repos $milestone assignee:@me is:open",
- "editable": true
+ "value": "$repos $milestone assignee:@me is:open"
},
{
"kind": 1,
"language": "github-issues",
- "value": "## Bugs, Debt, Features...",
- "editable": true
+ "value": "## Bugs, Debt, Features..."
},
{
"kind": 1,
"language": "markdown",
- "value": "#### My Bugs",
- "editable": true
+ "value": "#### My Bugs"
},
{
"kind": 2,
"language": "github-issues",
- "value": "$repos assignee:@me is:open label:bug",
- "editable": true
+ "value": "$repos assignee:@me is:open label:bug"
},
{
"kind": 1,
"language": "markdown",
- "value": "#### Debt & Engineering",
- "editable": true
+ "value": "#### Debt & Engineering"
},
{
"kind": 2,
"language": "github-issues",
- "value": "$repos assignee:@me is:open label:debt OR $repos assignee:@me is:open label:engineering",
- "editable": true
+ "value": "$repos assignee:@me is:open label:debt OR $repos assignee:@me is:open label:engineering"
},
{
"kind": 1,
"language": "markdown",
- "value": "#### Performance ๐ ๐ ๐",
- "editable": true
+ "value": "#### Performance ๐ ๐ ๐"
},
{
"kind": 2,
"language": "github-issues",
- "value": "$repos assignee:@me is:open label:perf OR $repos assignee:@me is:open label:perf-startup OR $repos assignee:@me is:open label:perf-bloat OR $repos assignee:@me is:open label:freeze-slow-crash-leak",
- "editable": true
+ "value": "$repos assignee:@me is:open label:perf OR $repos assignee:@me is:open label:perf-startup OR $repos assignee:@me is:open label:perf-bloat OR $repos assignee:@me is:open label:freeze-slow-crash-leak"
},
{
"kind": 1,
"language": "markdown",
- "value": "#### Feature Requests",
- "editable": true
+ "value": "#### Feature Requests"
},
{
"kind": 2,
"language": "github-issues",
- "value": "$repos assignee:@me is:open label:feature-request milestone:Backlog sort:reactions-+1-desc",
- "editable": true
+ "value": "$repos assignee:@me is:open label:feature-request milestone:Backlog sort:reactions-+1-desc"
},
{
"kind": 2,
"language": "github-issues",
- "value": "$repos assignee:@me is:open milestone:\"Backlog Candidates\"",
- "editable": true
+ "value": "$repos assignee:@me is:open milestone:\"Backlog Candidates\""
},
{
"kind": 1,
"language": "markdown",
- "value": "### Personal Inbox\n",
- "editable": true
+ "value": "### Personal Inbox\n"
},
{
"kind": 1,
"language": "markdown",
- "value": "\n#### Missing Type label",
- "editable": true
+ "value": "\n#### Missing Type label"
},
{
"kind": 2,
"language": "github-issues",
- "value": "$repos assignee:@me is:open type:issue -label:bug -label:\"needs more info\" -label:feature-request -label:under-discussion -label:debt -label:plan-item -label:upstream",
- "editable": true
+ "value": "$repos assignee:@me is:open type:issue -label:bug -label:\"needs more info\" -label:feature-request -label:under-discussion -label:debt -label:plan-item -label:upstream"
},
{
"kind": 1,
"language": "markdown",
- "value": "#### Not Actionable",
- "editable": true
+ "value": "#### Not Actionable"
},
{
"kind": 2,
"language": "github-issues",
- "value": "$repos assignee:@me is:open label:\"needs more info\"",
- "editable": true
+ "value": "$repos assignee:@me is:open label:\"needs more info\""
}
]
\ No newline at end of file
diff --git a/lib/vscode/.vscode/tasks.json b/lib/vscode/.vscode/tasks.json
index 8fb5cb440b79..c0b290afa729 100644
--- a/lib/vscode/.vscode/tasks.json
+++ b/lib/vscode/.vscode/tasks.json
@@ -55,39 +55,11 @@
}
}
},
- {
- "type": "npm",
- "script": "watch-extension-mediad",
- "label": "Ext Media - Build",
- "isBackground": true,
- "presentation": {
- "reveal": "never",
- "group": "buildWatchers"
- },
- "problemMatcher": {
- "owner": "typescript",
- "applyTo": "closedDocuments",
- "fileLocation": [
- "absolute"
- ],
- "pattern": {
- "regexp": "Error: ([^(]+)\\((\\d+|\\d+,\\d+|\\d+,\\d+,\\d+,\\d+)\\): (.*)$",
- "file": 1,
- "location": 2,
- "message": 3
- },
- "background": {
- "beginsPattern": "Starting compilation",
- "endsPattern": "Finished compilation"
- }
- }
- },
{
"label": "VS Code - Build",
"dependsOn": [
"Core - Build",
- "Ext - Build",
- "Ext Media - Build",
+ "Ext - Build"
],
"group": {
"kind": "build",
@@ -102,7 +74,8 @@
"group": "build",
"presentation": {
"reveal": "never",
- "group": "buildKillers"
+ "group": "buildKillers",
+ "close": true
},
"problemMatcher": "$tsc"
},
@@ -113,18 +86,8 @@
"group": "build",
"presentation": {
"reveal": "never",
- "group": "buildKillers"
- },
- "problemMatcher": "$tsc"
- },
- {
- "type": "npm",
- "script": "kill-watch-extension-mediad",
- "label": "Kill Ext Media - Build",
- "group": "build",
- "presentation": {
- "reveal": "never",
- "group": "buildKillers"
+ "group": "buildKillers",
+ "close": true
},
"problemMatcher": "$tsc"
},
@@ -132,8 +95,7 @@
"label": "Kill VS Code - Build",
"dependsOn": [
"Kill Core - Build",
- "Kill Ext - Build",
- "Kill Ext Media - Build",
+ "Kill Ext - Build"
],
"group": "build",
"problemMatcher": []
@@ -238,7 +200,8 @@
"command": "node build/lib/preLaunch.js",
"label": "Ensure Prelaunch Dependencies",
"presentation": {
- "reveal": "silent"
+ "reveal": "silent",
+ "close": true
}
},
{
diff --git a/lib/vscode/.yarnrc b/lib/vscode/.yarnrc
deleted file mode 100644
index 1965e671993f..000000000000
--- a/lib/vscode/.yarnrc
+++ /dev/null
@@ -1,3 +0,0 @@
-disturl "https://electronjs.org/headers"
-target "12.0.4"
-runtime "electron"
diff --git a/lib/vscode/README.md b/lib/vscode/README.md
index 0a9a62d9b7f3..ac7f2194233f 100644
--- a/lib/vscode/README.md
+++ b/lib/vscode/README.md
@@ -10,7 +10,7 @@ This repository ("`Code - OSS`") is where we (Microsoft) develop the [Visual Stu
## Visual Studio Code
-
+
[Visual Studio Code](https://code.visualstudio.com) is a distribution of the `Code - OSS` repository with Microsoft specific customizations released under a traditional [Microsoft product license](https://code.visualstudio.com/License/).
@@ -21,11 +21,11 @@ Visual Studio Code is updated monthly with new features and bug fixes. You can d
## Contributing
-There are many ways in which you can participate in the project, for example:
+There are many ways in which you can participate in this project, for example:
* [Submit bugs and feature requests](https://github.com/microsoft/vscode/issues), and help us verify as they are checked in
* Review [source code changes](https://github.com/microsoft/vscode/pulls)
-* Review the [documentation](https://github.com/microsoft/vscode-docs) and make pull requests for anything from typos to new content
+* Review the [documentation](https://github.com/microsoft/vscode-docs) and make pull requests for anything from typos to additional and new content
If you are interested in fixing issues and contributing directly to the code base,
please see the document [How to Contribute](https://github.com/microsoft/vscode/wiki/How-to-Contribute), which covers the following:
@@ -57,10 +57,10 @@ VS Code includes a set of built-in extensions located in the [extensions](extens
## Development Container
-This repository includes a Visual Studio Code Remote - Containers / Codespaces development container.
+This repository includes a Visual Studio Code Remote - Containers / GitHub Codespaces development container.
-- For [Remote - Containers](https://aka.ms/vscode-remote/download/containers), use the **Remote-Containers: Open Repository in Container...** command which creates a Docker volume for better disk I/O on macOS and Windows.
-- For Codespaces, install the [Visual Studio Codespaces](https://aka.ms/vscs-ext-vscode) extension in VS Code, and use the **Codespaces: Create New Codespace** command.
+- For [Remote - Containers](https://aka.ms/vscode-remote/download/containers), use the **Remote-Containers: Clone Repository in Container Volume...** command which creates a Docker volume for better disk I/O on macOS and Windows.
+- For Codespaces, install the [Github Codespaces](https://marketplace.visualstudio.com/items?itemName=GitHub.codespacese) extension in VS Code, and use the **Codespaces: Create New Codespace** command.
Docker / the Codespace should have at least **4 Cores and 6 GB of RAM (8 GB recommended)** to run full build. See the [development container README](.devcontainer/README.md) for more information.
diff --git a/lib/vscode/ThirdPartyNotices.txt b/lib/vscode/ThirdPartyNotices.txt
index e30e71ee4a21..d929c6b59e78 100644
--- a/lib/vscode/ThirdPartyNotices.txt
+++ b/lib/vscode/ThirdPartyNotices.txt
@@ -5,17 +5,17 @@ Do Not Translate or Localize
This project incorporates components from the projects listed below. The original copyright notices and the licenses under which Microsoft received such components are set forth below. Microsoft reserves all rights not expressly granted herein, whether by implication, estoppel or otherwise.
-1. JuliaEditorSupport/atom-language-julia version 0.21.0 (https://github.com/JuliaEditorSupport/atom-language-julia)
-2. atom/language-clojure version 0.22.7 (https://github.com/atom/language-clojure)
-3. atom/language-coffee-script version 0.49.3 (https://github.com/atom/language-coffee-script)
-4. atom/language-css version 0.44.4 (https://github.com/atom/language-css)
-5. atom/language-java version 0.32.1 (https://github.com/atom/language-java)
-6. atom/language-sass version 0.61.4 (https://github.com/atom/language-sass)
-7. atom/language-shellscript version 0.26.0 (https://github.com/atom/language-shellscript)
-8. atom/language-xml version 0.35.2 (https://github.com/atom/language-xml)
-9. better-go-syntax version 1.0.0 (https://github.com/jeff-hykin/better-go-syntax/ )
-10. Colorsublime-Themes version 0.1.0 (https://github.com/Colorsublime/Colorsublime-Themes)
-11. daaain/Handlebars version 1.8.0 (https://github.com/daaain/Handlebars)
+1. atom/language-clojure version 0.22.7 (https://github.com/atom/language-clojure)
+2. atom/language-coffee-script version 0.49.3 (https://github.com/atom/language-coffee-script)
+3. atom/language-css version 0.44.4 (https://github.com/atom/language-css)
+4. atom/language-java version 0.32.1 (https://github.com/atom/language-java)
+5. atom/language-sass version 0.62.1 (https://github.com/atom/language-sass)
+6. atom/language-shellscript version 0.26.0 (https://github.com/atom/language-shellscript)
+7. atom/language-xml version 0.35.2 (https://github.com/atom/language-xml)
+8. better-go-syntax version 1.0.0 (https://github.com/jeff-hykin/better-go-syntax/ )
+9. Colorsublime-Themes version 0.1.0 (https://github.com/Colorsublime/Colorsublime-Themes)
+10. daaain/Handlebars version 1.8.0 (https://github.com/daaain/Handlebars)
+11. dart-lang/dart-syntax-highlight (https://github.com/dart-lang/dart-syntax-highlight)
12. davidrios/pug-tmbundle (https://github.com/davidrios/pug-tmbundle)
13. definitelytyped (https://github.com/DefinitelyTyped/DefinitelyTyped)
14. demyte/language-cshtml version 0.3.0 (https://github.com/demyte/language-cshtml)
@@ -32,71 +32,45 @@ This project incorporates components from the projects listed below. The origina
25. jeff-hykin/cpp-textmate-grammar version 1.12.11 (https://github.com/jeff-hykin/cpp-textmate-grammar)
26. jeff-hykin/cpp-textmate-grammar version 1.15.5 (https://github.com/jeff-hykin/cpp-textmate-grammar)
27. js-beautify version 1.6.8 (https://github.com/beautify-web/js-beautify)
-28. Jxck/assert version 1.0.0 (https://github.com/Jxck/assert)
-29. language-docker (https://github.com/moby/moby)
-30. language-less version 0.34.2 (https://github.com/atom/language-less)
-31. language-php version 0.46.0 (https://github.com/atom/language-php)
-32. MagicStack/MagicPython version 1.1.1 (https://github.com/MagicStack/MagicPython)
-33. marked version 1.1.0 (https://github.com/markedjs/marked)
-34. mdn-data version 1.1.12 (https://github.com/mdn/data)
-35. microsoft/TypeScript-TmLanguage version 0.0.1 (https://github.com/microsoft/TypeScript-TmLanguage)
-36. microsoft/vscode-JSON.tmLanguage (https://github.com/microsoft/vscode-JSON.tmLanguage)
-37. microsoft/vscode-markdown-tm-grammar version 1.0.0 (https://github.com/microsoft/vscode-markdown-tm-grammar)
-38. microsoft/vscode-mssql version 1.9.0 (https://github.com/microsoft/vscode-mssql)
-39. mmims/language-batchfile version 0.7.5 (https://github.com/mmims/language-batchfile)
-40. NVIDIA/cuda-cpp-grammar (https://github.com/NVIDIA/cuda-cpp-grammar)
-41. PowerShell/EditorSyntax version 1.0.0 (https://github.com/PowerShell/EditorSyntax)
-42. rust-syntax version 0.4.3 (https://github.com/dustypomerleau/rust-syntax)
-43. seti-ui version 0.1.0 (https://github.com/jesseweed/seti-ui)
-44. shaders-tmLanguage version 0.1.0 (https://github.com/tgjones/shaders-tmLanguage)
-45. textmate/asp.vb.net.tmbundle (https://github.com/textmate/asp.vb.net.tmbundle)
-46. textmate/c.tmbundle (https://github.com/textmate/c.tmbundle)
-47. textmate/diff.tmbundle (https://github.com/textmate/diff.tmbundle)
-48. textmate/git.tmbundle (https://github.com/textmate/git.tmbundle)
-49. textmate/groovy.tmbundle (https://github.com/textmate/groovy.tmbundle)
-50. textmate/html.tmbundle (https://github.com/textmate/html.tmbundle)
-51. textmate/ini.tmbundle (https://github.com/textmate/ini.tmbundle)
-52. textmate/javascript.tmbundle (https://github.com/textmate/javascript.tmbundle)
-53. textmate/lua.tmbundle (https://github.com/textmate/lua.tmbundle)
-54. textmate/markdown.tmbundle (https://github.com/textmate/markdown.tmbundle)
-55. textmate/perl.tmbundle (https://github.com/textmate/perl.tmbundle)
-56. textmate/ruby.tmbundle (https://github.com/textmate/ruby.tmbundle)
-57. textmate/yaml.tmbundle (https://github.com/textmate/yaml.tmbundle)
-58. TypeScript-TmLanguage version 0.1.8 (https://github.com/microsoft/TypeScript-TmLanguage)
-59. TypeScript-TmLanguage version 1.0.0 (https://github.com/microsoft/TypeScript-TmLanguage)
-60. Unicode version 12.0.0 (https://home.unicode.org/)
-61. vscode-codicons version 0.0.14 (https://github.com/microsoft/vscode-codicons)
-62. vscode-logfile-highlighter version 2.11.0 (https://github.com/emilast/vscode-logfile-highlighter)
-63. vscode-swift version 0.0.1 (https://github.com/owensd/vscode-swift)
-64. Web Background Synchronization (https://github.com/WICG/background-sync)
-
-
-%% JuliaEditorSupport/atom-language-julia NOTICES AND INFORMATION BEGIN HERE
-=========================================
-The atom-language-julia package is licensed under the MIT "Expat" License:
+28. JuliaEditorSupport/atom-language-julia version 0.21.0 (https://github.com/JuliaEditorSupport/atom-language-julia)
+29. Jxck/assert version 1.0.0 (https://github.com/Jxck/assert)
+30. language-docker (https://github.com/moby/moby)
+31. language-less version 0.34.2 (https://github.com/atom/language-less)
+32. language-php version 0.46.2 (https://github.com/atom/language-php)
+33. MagicStack/MagicPython version 1.1.1 (https://github.com/MagicStack/MagicPython)
+34. marked version 1.1.0 (https://github.com/markedjs/marked)
+35. mdn-data version 1.1.12 (https://github.com/mdn/data)
+36. microsoft/TypeScript-TmLanguage version 0.0.1 (https://github.com/microsoft/TypeScript-TmLanguage)
+37. microsoft/vscode-JSON.tmLanguage (https://github.com/microsoft/vscode-JSON.tmLanguage)
+38. microsoft/vscode-markdown-tm-grammar version 1.0.0 (https://github.com/microsoft/vscode-markdown-tm-grammar)
+39. microsoft/vscode-mssql version 1.9.0 (https://github.com/microsoft/vscode-mssql)
+40. mmims/language-batchfile version 0.7.6 (https://github.com/mmims/language-batchfile)
+41. NVIDIA/cuda-cpp-grammar (https://github.com/NVIDIA/cuda-cpp-grammar)
+42. PowerShell/EditorSyntax version 1.0.0 (https://github.com/PowerShell/EditorSyntax)
+43. rust-syntax version 0.4.3 (https://github.com/dustypomerleau/rust-syntax)
+44. seti-ui version 0.1.0 (https://github.com/jesseweed/seti-ui)
+45. shaders-tmLanguage version 0.1.0 (https://github.com/tgjones/shaders-tmLanguage)
+46. textmate/asp.vb.net.tmbundle (https://github.com/textmate/asp.vb.net.tmbundle)
+47. textmate/c.tmbundle (https://github.com/textmate/c.tmbundle)
+48. textmate/diff.tmbundle (https://github.com/textmate/diff.tmbundle)
+49. textmate/git.tmbundle (https://github.com/textmate/git.tmbundle)
+50. textmate/groovy.tmbundle (https://github.com/textmate/groovy.tmbundle)
+51. textmate/html.tmbundle (https://github.com/textmate/html.tmbundle)
+52. textmate/ini.tmbundle (https://github.com/textmate/ini.tmbundle)
+53. textmate/javascript.tmbundle (https://github.com/textmate/javascript.tmbundle)
+54. textmate/lua.tmbundle (https://github.com/textmate/lua.tmbundle)
+55. textmate/markdown.tmbundle (https://github.com/textmate/markdown.tmbundle)
+56. textmate/perl.tmbundle (https://github.com/textmate/perl.tmbundle)
+57. textmate/ruby.tmbundle (https://github.com/textmate/ruby.tmbundle)
+58. textmate/yaml.tmbundle (https://github.com/textmate/yaml.tmbundle)
+59. TypeScript-TmLanguage version 0.1.8 (https://github.com/microsoft/TypeScript-TmLanguage)
+60. TypeScript-TmLanguage version 1.0.0 (https://github.com/microsoft/TypeScript-TmLanguage)
+61. Unicode version 12.0.0 (https://home.unicode.org/)
+62. vscode-codicons version 0.0.14 (https://github.com/microsoft/vscode-codicons)
+63. vscode-logfile-highlighter version 2.11.0 (https://github.com/emilast/vscode-logfile-highlighter)
+64. vscode-swift version 0.0.1 (https://github.com/owensd/vscode-swift)
+65. Web Background Synchronization (https://github.com/WICG/background-sync)
-> Copyright (c) 2015
->
-> Permission is hereby granted, free of charge, to any person obtaining
-> a copy of this software and associated documentation files (the
-> "Software"), to deal in the Software without restriction, including
-> without limitation the rights to use, copy, modify, merge, publish,
-> distribute, sublicense, and/or sell copies of the Software, and to
-> permit persons to whom the Software is furnished to do so, subject to
-> the following conditions:
->
-> The above copyright notice and this permission notice shall be
-> included in all copies or substantial portions of the Software.
->
-> THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-> EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-> MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
-> IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-> CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
-> TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-> SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-=========================================
-END OF JuliaEditorSupport/atom-language-julia NOTICES AND INFORMATION
%% atom/language-clojure NOTICES AND INFORMATION BEGIN HERE
=========================================
@@ -477,6 +451,38 @@ THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLI
=========================================
END OF daaain/Handlebars NOTICES AND INFORMATION
+%% dart-lang/dart-syntax-highlight NOTICES AND INFORMATION BEGIN HERE
+=========================================
+Copyright 2020, the Dart project authors.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+ * Neither the name of Google LLC nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+=========================================
+END OF dart-lang/dart-syntax-highlight NOTICES AND INFORMATION
+
%% davidrios/pug-tmbundle NOTICES AND INFORMATION BEGIN HERE
=========================================
The MIT License (MIT)
@@ -855,6 +861,33 @@ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLI
=========================================
END OF js-beautify NOTICES AND INFORMATION
+%% JuliaEditorSupport/atom-language-julia NOTICES AND INFORMATION BEGIN HERE
+=========================================
+The atom-language-julia package is licensed under the MIT "Expat" License:
+
+> Copyright (c) 2015
+>
+> Permission is hereby granted, free of charge, to any person obtaining
+> a copy of this software and associated documentation files (the
+> "Software"), to deal in the Software without restriction, including
+> without limitation the rights to use, copy, modify, merge, publish,
+> distribute, sublicense, and/or sell copies of the Software, and to
+> permit persons to whom the Software is furnished to do so, subject to
+> the following conditions:
+>
+> The above copyright notice and this permission notice shall be
+> included in all copies or substantial portions of the Software.
+>
+> THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+> EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+> MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+> IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+> CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+> TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+> SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+=========================================
+END OF JuliaEditorSupport/atom-language-julia NOTICES AND INFORMATION
+
%% Jxck/assert NOTICES AND INFORMATION BEGIN HERE
=========================================
The MIT License (MIT)
diff --git a/lib/vscode/build/azure-pipelines/common/createAsset.js b/lib/vscode/build/azure-pipelines/common/createAsset.js
index 3038ff62b825..340c6fd7e5d9 100644
--- a/lib/vscode/build/azure-pipelines/common/createAsset.js
+++ b/lib/vscode/build/azure-pipelines/common/createAsset.js
@@ -5,15 +5,101 @@
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
const fs = require("fs");
+const url = require("url");
const crypto = require("crypto");
const azure = require("azure-storage");
const mime = require("mime");
const cosmos_1 = require("@azure/cosmos");
const retry_1 = require("./retry");
-if (process.argv.length !== 6) {
- console.error('Usage: node createAsset.js PLATFORM TYPE NAME FILE');
+if (process.argv.length !== 8) {
+ console.error('Usage: node createAsset.js PRODUCT OS ARCH TYPE NAME FILE');
process.exit(-1);
}
+// Contains all of the logic for mapping details to our actual product names in CosmosDB
+function getPlatform(product, os, arch, type) {
+ switch (os) {
+ case 'win32':
+ switch (product) {
+ case 'client':
+ const asset = arch === 'ia32' ? 'win32' : `win32-${arch}`;
+ switch (type) {
+ case 'archive':
+ return `${asset}-archive`;
+ case 'setup':
+ return asset;
+ case 'user-setup':
+ return `${asset}-user`;
+ default:
+ throw `Unrecognized: ${product} ${os} ${arch} ${type}`;
+ }
+ case 'server':
+ if (arch === 'arm64') {
+ throw `Unrecognized: ${product} ${os} ${arch} ${type}`;
+ }
+ return arch === 'ia32' ? 'server-win32' : `server-win32-${arch}`;
+ case 'web':
+ if (arch === 'arm64') {
+ throw `Unrecognized: ${product} ${os} ${arch} ${type}`;
+ }
+ return arch === 'ia32' ? 'server-win32-web' : `server-win32-${arch}-web`;
+ default:
+ throw `Unrecognized: ${product} ${os} ${arch} ${type}`;
+ }
+ case 'linux':
+ switch (type) {
+ case 'snap':
+ return `linux-snap-${arch}`;
+ case 'archive-unsigned':
+ switch (product) {
+ case 'client':
+ return `linux-${arch}`;
+ case 'server':
+ return `server-linux-${arch}`;
+ case 'web':
+ return arch === 'standalone' ? 'web-standalone' : `server-linux-${arch}-web`;
+ default:
+ throw `Unrecognized: ${product} ${os} ${arch} ${type}`;
+ }
+ case 'deb-package':
+ return `linux-deb-${arch}`;
+ case 'rpm-package':
+ return `linux-rpm-${arch}`;
+ default:
+ throw `Unrecognized: ${product} ${os} ${arch} ${type}`;
+ }
+ case 'darwin':
+ switch (product) {
+ case 'client':
+ if (arch === 'x64') {
+ return 'darwin';
+ }
+ return `darwin-${arch}`;
+ case 'server':
+ return 'server-darwin';
+ case 'web':
+ if (arch !== 'x64') {
+ throw `What should the platform be?: ${product} ${os} ${arch} ${type}`;
+ }
+ return 'server-darwin-web';
+ default:
+ throw `Unrecognized: ${product} ${os} ${arch} ${type}`;
+ }
+ default:
+ throw `Unrecognized: ${product} ${os} ${arch} ${type}`;
+ }
+}
+// Contains all of the logic for mapping types to our actual types in CosmosDB
+function getRealType(type) {
+ switch (type) {
+ case 'user-setup':
+ return 'setup';
+ case 'deb-package':
+ case 'rpm-package':
+ return 'package';
+ default:
+ return type;
+ }
+}
function hashStream(hashName, stream) {
return new Promise((c, e) => {
const shasum = crypto.createHash(hashName);
@@ -45,7 +131,10 @@ function getEnv(name) {
return result;
}
async function main() {
- const [, , platform, type, fileName, filePath] = process.argv;
+ const [, , product, os, arch, unprocessedType, fileName, filePath] = process.argv;
+ // getPlatform needs the unprocessedType
+ const platform = getPlatform(product, os, arch, unprocessedType);
+ const type = getRealType(unprocessedType);
const quality = getEnv('VSCODE_QUALITY');
const commit = getEnv('BUILD_SOURCEVERSION');
console.log('Creating asset...');
@@ -65,14 +154,27 @@ async function main() {
console.log(`Blob ${quality}, ${blobName} already exists, not publishing again.`);
return;
}
- console.log('Uploading blobs to Azure storage...');
- await uploadBlob(blobService, quality, blobName, filePath, fileName);
+ const mooncakeBlobService = azure.createBlobService(storageAccount, process.env['MOONCAKE_STORAGE_ACCESS_KEY'], `${storageAccount}.blob.core.chinacloudapi.cn`)
+ .withFilter(new azure.ExponentialRetryPolicyFilter(20));
+ // mooncake is fussy and far away, this is needed!
+ blobService.defaultClientRequestTimeoutInMs = 10 * 60 * 1000;
+ mooncakeBlobService.defaultClientRequestTimeoutInMs = 10 * 60 * 1000;
+ console.log('Uploading blobs to Azure storage and Mooncake Azure storage...');
+ await retry_1.retry(() => Promise.all([
+ uploadBlob(blobService, quality, blobName, filePath, fileName),
+ uploadBlob(mooncakeBlobService, quality, blobName, filePath, fileName)
+ ]));
console.log('Blobs successfully uploaded.');
+ // TODO: Understand if blobName and blobPath are the same and replace blobPath with blobName if so.
+ const assetUrl = `${process.env['AZURE_CDN_URL']}/${quality}/${blobName}`;
+ const blobPath = url.parse(assetUrl).path;
+ const mooncakeUrl = `${process.env['MOONCAKE_CDN_URL']}${blobPath}`;
const asset = {
platform,
type,
- url: `${process.env['AZURE_CDN_URL']}/${quality}/${blobName}`,
+ url: assetUrl,
hash: sha1hash,
+ mooncakeUrl,
sha256hash,
size
};
@@ -84,6 +186,7 @@ async function main() {
const client = new cosmos_1.CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT'], key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
const scripts = client.database('builds').container(quality).scripts;
await retry_1.retry(() => scripts.storedProcedure('createAsset').execute('', [commit, asset, true]));
+ console.log(` Done โ๏ธ`);
}
main().then(() => {
console.log('Asset successfully created');
diff --git a/lib/vscode/build/azure-pipelines/common/createAsset.ts b/lib/vscode/build/azure-pipelines/common/createAsset.ts
index daf60d710eec..5fe93c566bb1 100644
--- a/lib/vscode/build/azure-pipelines/common/createAsset.ts
+++ b/lib/vscode/build/azure-pipelines/common/createAsset.ts
@@ -6,6 +6,7 @@
'use strict';
import * as fs from 'fs';
+import * as url from 'url';
import { Readable } from 'stream';
import * as crypto from 'crypto';
import * as azure from 'azure-storage';
@@ -24,11 +25,98 @@ interface Asset {
supportsFastUpdate?: boolean;
}
-if (process.argv.length !== 6) {
- console.error('Usage: node createAsset.js PLATFORM TYPE NAME FILE');
+if (process.argv.length !== 8) {
+ console.error('Usage: node createAsset.js PRODUCT OS ARCH TYPE NAME FILE');
process.exit(-1);
}
+// Contains all of the logic for mapping details to our actual product names in CosmosDB
+function getPlatform(product: string, os: string, arch: string, type: string): string {
+ switch (os) {
+ case 'win32':
+ switch (product) {
+ case 'client':
+ const asset = arch === 'ia32' ? 'win32' : `win32-${arch}`;
+ switch (type) {
+ case 'archive':
+ return `${asset}-archive`;
+ case 'setup':
+ return asset;
+ case 'user-setup':
+ return `${asset}-user`;
+ default:
+ throw `Unrecognized: ${product} ${os} ${arch} ${type}`;
+ }
+ case 'server':
+ if (arch === 'arm64') {
+ throw `Unrecognized: ${product} ${os} ${arch} ${type}`;
+ }
+ return arch === 'ia32' ? 'server-win32' : `server-win32-${arch}`;
+ case 'web':
+ if (arch === 'arm64') {
+ throw `Unrecognized: ${product} ${os} ${arch} ${type}`;
+ }
+ return arch === 'ia32' ? 'server-win32-web' : `server-win32-${arch}-web`;
+ default:
+ throw `Unrecognized: ${product} ${os} ${arch} ${type}`;
+ }
+ case 'linux':
+ switch (type) {
+ case 'snap':
+ return `linux-snap-${arch}`;
+ case 'archive-unsigned':
+ switch (product) {
+ case 'client':
+ return `linux-${arch}`;
+ case 'server':
+ return `server-linux-${arch}`;
+ case 'web':
+ return arch === 'standalone' ? 'web-standalone' : `server-linux-${arch}-web`;
+ default:
+ throw `Unrecognized: ${product} ${os} ${arch} ${type}`;
+ }
+ case 'deb-package':
+ return `linux-deb-${arch}`;
+ case 'rpm-package':
+ return `linux-rpm-${arch}`;
+ default:
+ throw `Unrecognized: ${product} ${os} ${arch} ${type}`;
+ }
+ case 'darwin':
+ switch (product) {
+ case 'client':
+ if (arch === 'x64') {
+ return 'darwin';
+ }
+ return `darwin-${arch}`;
+ case 'server':
+ return 'server-darwin';
+ case 'web':
+ if (arch !== 'x64') {
+ throw `What should the platform be?: ${product} ${os} ${arch} ${type}`;
+ }
+ return 'server-darwin-web';
+ default:
+ throw `Unrecognized: ${product} ${os} ${arch} ${type}`;
+ }
+ default:
+ throw `Unrecognized: ${product} ${os} ${arch} ${type}`;
+ }
+}
+
+// Contains all of the logic for mapping types to our actual types in CosmosDB
+function getRealType(type: string) {
+ switch (type) {
+ case 'user-setup':
+ return 'setup';
+ case 'deb-package':
+ case 'rpm-package':
+ return 'package';
+ default:
+ return type;
+ }
+}
+
function hashStream(hashName: string, stream: Readable): Promise {
return new Promise((c, e) => {
const shasum = crypto.createHash(hashName);
@@ -68,7 +156,10 @@ function getEnv(name: string): string {
}
async function main(): Promise {
- const [, , platform, type, fileName, filePath] = process.argv;
+ const [, , product, os, arch, unprocessedType, fileName, filePath] = process.argv;
+ // getPlatform needs the unprocessedType
+ const platform = getPlatform(product, os, arch, unprocessedType);
+ const type = getRealType(unprocessedType);
const quality = getEnv('VSCODE_QUALITY');
const commit = getEnv('BUILD_SOURCEVERSION');
@@ -98,17 +189,33 @@ async function main(): Promise {
return;
}
- console.log('Uploading blobs to Azure storage...');
+ const mooncakeBlobService = azure.createBlobService(storageAccount, process.env['MOONCAKE_STORAGE_ACCESS_KEY']!, `${storageAccount}.blob.core.chinacloudapi.cn`)
+ .withFilter(new azure.ExponentialRetryPolicyFilter(20));
+
+ // mooncake is fussy and far away, this is needed!
+ blobService.defaultClientRequestTimeoutInMs = 10 * 60 * 1000;
+ mooncakeBlobService.defaultClientRequestTimeoutInMs = 10 * 60 * 1000;
+
+ console.log('Uploading blobs to Azure storage and Mooncake Azure storage...');
- await uploadBlob(blobService, quality, blobName, filePath, fileName);
+ await retry(() => Promise.all([
+ uploadBlob(blobService, quality, blobName, filePath, fileName),
+ uploadBlob(mooncakeBlobService, quality, blobName, filePath, fileName)
+ ]));
console.log('Blobs successfully uploaded.');
+ // TODO: Understand if blobName and blobPath are the same and replace blobPath with blobName if so.
+ const assetUrl = `${process.env['AZURE_CDN_URL']}/${quality}/${blobName}`;
+ const blobPath = url.parse(assetUrl).path;
+ const mooncakeUrl = `${process.env['MOONCAKE_CDN_URL']}${blobPath}`;
+
const asset: Asset = {
platform,
type,
- url: `${process.env['AZURE_CDN_URL']}/${quality}/${blobName}`,
+ url: assetUrl,
hash: sha1hash,
+ mooncakeUrl,
sha256hash,
size
};
@@ -123,6 +230,8 @@ async function main(): Promise {
const client = new CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT']!, key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
const scripts = client.database('builds').container(quality).scripts;
await retry(() => scripts.storedProcedure('createAsset').execute('', [commit, asset, true]));
+
+ console.log(` Done โ๏ธ`);
}
main().then(() => {
diff --git a/lib/vscode/build/azure-pipelines/common/sync-mooncake.js b/lib/vscode/build/azure-pipelines/common/sync-mooncake.js
deleted file mode 100644
index 1f3354226519..000000000000
--- a/lib/vscode/build/azure-pipelines/common/sync-mooncake.js
+++ /dev/null
@@ -1,87 +0,0 @@
-/*---------------------------------------------------------------------------------------------
- * Copyright (c) Microsoft Corporation. All rights reserved.
- * Licensed under the MIT License. See License.txt in the project root for license information.
- *--------------------------------------------------------------------------------------------*/
-'use strict';
-Object.defineProperty(exports, "__esModule", { value: true });
-const url = require("url");
-const azure = require("azure-storage");
-const mime = require("mime");
-const cosmos_1 = require("@azure/cosmos");
-const retry_1 = require("./retry");
-function log(...args) {
- console.log(...[`[${new Date().toISOString()}]`, ...args]);
-}
-function error(...args) {
- console.error(...[`[${new Date().toISOString()}]`, ...args]);
-}
-if (process.argv.length < 3) {
- error('Usage: node sync-mooncake.js ');
- process.exit(-1);
-}
-async function sync(commit, quality) {
- log(`Synchronizing Mooncake assets for ${quality}, ${commit}...`);
- const client = new cosmos_1.CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT'], key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
- const container = client.database('builds').container(quality);
- const query = `SELECT TOP 1 * FROM c WHERE c.id = "${commit}"`;
- const res = await container.items.query(query, {}).fetchAll();
- if (res.resources.length !== 1) {
- throw new Error(`No builds found for ${commit}`);
- }
- const build = res.resources[0];
- log(`Found build for ${commit}, with ${build.assets.length} assets`);
- const storageAccount = process.env['AZURE_STORAGE_ACCOUNT_2'];
- const blobService = azure.createBlobService(storageAccount, process.env['AZURE_STORAGE_ACCESS_KEY_2'])
- .withFilter(new azure.ExponentialRetryPolicyFilter(20));
- const mooncakeBlobService = azure.createBlobService(storageAccount, process.env['MOONCAKE_STORAGE_ACCESS_KEY'], `${storageAccount}.blob.core.chinacloudapi.cn`)
- .withFilter(new azure.ExponentialRetryPolicyFilter(20));
- // mooncake is fussy and far away, this is needed!
- blobService.defaultClientRequestTimeoutInMs = 10 * 60 * 1000;
- mooncakeBlobService.defaultClientRequestTimeoutInMs = 10 * 60 * 1000;
- for (const asset of build.assets) {
- try {
- const blobPath = url.parse(asset.url).path;
- if (!blobPath) {
- throw new Error(`Failed to parse URL: ${asset.url}`);
- }
- const blobName = blobPath.replace(/^\/\w+\//, '');
- log(`Found ${blobName}`);
- if (asset.mooncakeUrl) {
- log(` Already in Mooncake โ๏ธ`);
- continue;
- }
- const readStream = blobService.createReadStream(quality, blobName, undefined);
- const blobOptions = {
- contentSettings: {
- contentType: mime.lookup(blobPath),
- cacheControl: 'max-age=31536000, public'
- }
- };
- const writeStream = mooncakeBlobService.createWriteStreamToBlockBlob(quality, blobName, blobOptions, undefined);
- log(` Uploading to Mooncake...`);
- await new Promise((c, e) => readStream.pipe(writeStream).on('finish', c).on('error', e));
- log(` Updating build in DB...`);
- const mooncakeUrl = `${process.env['MOONCAKE_CDN_URL']}${blobPath}`;
- await retry_1.retry(() => container.scripts.storedProcedure('setAssetMooncakeUrl')
- .execute('', [commit, asset.platform, asset.type, mooncakeUrl]));
- log(` Done โ๏ธ`);
- }
- catch (err) {
- error(err);
- }
- }
- log(`All done โ๏ธ`);
-}
-function main() {
- const commit = process.env['BUILD_SOURCEVERSION'];
- if (!commit) {
- error('Skipping publish due to missing BUILD_SOURCEVERSION');
- return;
- }
- const quality = process.argv[2];
- sync(commit, quality).catch(err => {
- error(err);
- process.exit(1);
- });
-}
-main();
diff --git a/lib/vscode/build/azure-pipelines/common/sync-mooncake.ts b/lib/vscode/build/azure-pipelines/common/sync-mooncake.ts
deleted file mode 100644
index 4ffe7a8f15bb..000000000000
--- a/lib/vscode/build/azure-pipelines/common/sync-mooncake.ts
+++ /dev/null
@@ -1,131 +0,0 @@
-/*---------------------------------------------------------------------------------------------
- * Copyright (c) Microsoft Corporation. All rights reserved.
- * Licensed under the MIT License. See License.txt in the project root for license information.
- *--------------------------------------------------------------------------------------------*/
-
-'use strict';
-
-import * as url from 'url';
-import * as azure from 'azure-storage';
-import * as mime from 'mime';
-import { CosmosClient } from '@azure/cosmos';
-import { retry } from './retry';
-
-function log(...args: any[]) {
- console.log(...[`[${new Date().toISOString()}]`, ...args]);
-}
-
-function error(...args: any[]) {
- console.error(...[`[${new Date().toISOString()}]`, ...args]);
-}
-
-if (process.argv.length < 3) {
- error('Usage: node sync-mooncake.js ');
- process.exit(-1);
-}
-
-interface Build {
- assets: Asset[];
-}
-
-interface Asset {
- platform: string;
- type: string;
- url: string;
- mooncakeUrl: string;
- hash: string;
- sha256hash: string;
- size: number;
- supportsFastUpdate?: boolean;
-}
-
-async function sync(commit: string, quality: string): Promise {
- log(`Synchronizing Mooncake assets for ${quality}, ${commit}...`);
-
- const client = new CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT']!, key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
- const container = client.database('builds').container(quality);
-
- const query = `SELECT TOP 1 * FROM c WHERE c.id = "${commit}"`;
- const res = await container.items.query(query, {}).fetchAll();
-
- if (res.resources.length !== 1) {
- throw new Error(`No builds found for ${commit}`);
- }
-
- const build = res.resources[0];
-
- log(`Found build for ${commit}, with ${build.assets.length} assets`);
-
- const storageAccount = process.env['AZURE_STORAGE_ACCOUNT_2']!;
-
- const blobService = azure.createBlobService(storageAccount, process.env['AZURE_STORAGE_ACCESS_KEY_2']!)
- .withFilter(new azure.ExponentialRetryPolicyFilter(20));
-
- const mooncakeBlobService = azure.createBlobService(storageAccount, process.env['MOONCAKE_STORAGE_ACCESS_KEY']!, `${storageAccount}.blob.core.chinacloudapi.cn`)
- .withFilter(new azure.ExponentialRetryPolicyFilter(20));
-
- // mooncake is fussy and far away, this is needed!
- blobService.defaultClientRequestTimeoutInMs = 10 * 60 * 1000;
- mooncakeBlobService.defaultClientRequestTimeoutInMs = 10 * 60 * 1000;
-
- for (const asset of build.assets) {
- try {
- const blobPath = url.parse(asset.url).path;
-
- if (!blobPath) {
- throw new Error(`Failed to parse URL: ${asset.url}`);
- }
-
- const blobName = blobPath.replace(/^\/\w+\//, '');
-
- log(`Found ${blobName}`);
-
- if (asset.mooncakeUrl) {
- log(` Already in Mooncake โ๏ธ`);
- continue;
- }
-
- const readStream = blobService.createReadStream(quality, blobName, undefined!);
- const blobOptions: azure.BlobService.CreateBlockBlobRequestOptions = {
- contentSettings: {
- contentType: mime.lookup(blobPath),
- cacheControl: 'max-age=31536000, public'
- }
- };
-
- const writeStream = mooncakeBlobService.createWriteStreamToBlockBlob(quality, blobName, blobOptions, undefined);
-
- log(` Uploading to Mooncake...`);
- await new Promise((c, e) => readStream.pipe(writeStream).on('finish', c).on('error', e));
-
- log(` Updating build in DB...`);
- const mooncakeUrl = `${process.env['MOONCAKE_CDN_URL']}${blobPath}`;
- await retry(() => container.scripts.storedProcedure('setAssetMooncakeUrl')
- .execute('', [commit, asset.platform, asset.type, mooncakeUrl]));
-
- log(` Done โ๏ธ`);
- } catch (err) {
- error(err);
- }
- }
-
- log(`All done โ๏ธ`);
-}
-
-function main(): void {
- const commit = process.env['BUILD_SOURCEVERSION'];
-
- if (!commit) {
- error('Skipping publish due to missing BUILD_SOURCEVERSION');
- return;
- }
-
- const quality = process.argv[2];
-
- sync(commit, quality).catch(err => {
- error(err);
- process.exit(1);
- });
-}
-
-main();
diff --git a/lib/vscode/build/azure-pipelines/darwin/product-build-darwin-sign.yml b/lib/vscode/build/azure-pipelines/darwin/product-build-darwin-sign.yml
index 4ad8349c51a8..49f74b55c933 100644
--- a/lib/vscode/build/azure-pipelines/darwin/product-build-darwin-sign.yml
+++ b/lib/vscode/build/azure-pipelines/darwin/product-build-darwin-sign.yml
@@ -35,13 +35,13 @@ steps:
displayName: Restore modules for just build folder and compile it
- download: current
- artifact: vscode-darwin-$(VSCODE_ARCH)
+ artifact: unsigned_vscode_client_darwin_$(VSCODE_ARCH)_archive
displayName: Download $(VSCODE_ARCH) artifact
- script: |
set -e
- unzip $(Pipeline.Workspace)/vscode-darwin-$(VSCODE_ARCH)/VSCode-darwin-$(VSCODE_ARCH).zip -d $(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH)
- mv $(Pipeline.Workspace)/vscode-darwin-$(VSCODE_ARCH)/VSCode-darwin-$(VSCODE_ARCH).zip $(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH).zip
+ unzip $(Pipeline.Workspace)/unsigned_vscode_client_darwin_$(VSCODE_ARCH)_archive/VSCode-darwin-$(VSCODE_ARCH).zip -d $(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH)
+ mv $(Pipeline.Workspace)/unsigned_vscode_client_darwin_$(VSCODE_ARCH)_archive/VSCode-darwin-$(VSCODE_ARCH).zip $(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH).zip
displayName: Unzip & move
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
@@ -108,22 +108,18 @@ steps:
condition: and(succeeded(), ne(variables['VSCODE_ARCH'], 'arm64'))
- script: |
- set -e
-
# For legacy purposes, arch for x64 is just 'darwin'
case $VSCODE_ARCH in
x64) ASSET_ID="darwin" ;;
arm64) ASSET_ID="darwin-arm64" ;;
universal) ASSET_ID="darwin-universal" ;;
esac
+ echo "##vso[task.setvariable variable=ASSET_ID]$ASSET_ID"
+ displayName: Set asset id variable
+
+ - script: mv $(agent.builddirectory)/VSCode-darwin-x64.zip $(agent.builddirectory)/VSCode-darwin.zip
+ displayName: Rename x64 build to it's legacy name
+ condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'))
- VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
- AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
- AZURE_STORAGE_ACCESS_KEY="$(ticino-storage-key)" \
- AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
- node build/azure-pipelines/common/createAsset.js \
- "$ASSET_ID" \
- archive \
- "VSCode-$ASSET_ID.zip" \
- ../VSCode-darwin-$(VSCODE_ARCH).zip
- displayName: Publish Clients
+ - publish: $(Agent.BuildDirectory)/VSCode-$(ASSET_ID).zip
+ artifact: vscode_client_darwin_$(VSCODE_ARCH)_archive
diff --git a/lib/vscode/build/azure-pipelines/darwin/product-build-darwin.yml b/lib/vscode/build/azure-pipelines/darwin/product-build-darwin.yml
index 186920fe96d6..566eeb805229 100644
--- a/lib/vscode/build/azure-pipelines/darwin/product-build-darwin.yml
+++ b/lib/vscode/build/azure-pipelines/darwin/product-build-darwin.yml
@@ -138,19 +138,19 @@ steps:
condition: and(succeeded(), ne(variables['VSCODE_ARCH'], 'universal'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- download: current
- artifact: vscode-darwin-x64
+ artifact: unsigned_vscode_client_darwin_x64_archive
displayName: Download x64 artifact
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'universal'))
- download: current
- artifact: vscode-darwin-arm64
+ artifact: unsigned_vscode_client_darwin_arm64_archive
displayName: Download arm64 artifact
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'universal'))
- script: |
set -e
- cp $(Pipeline.Workspace)/vscode-darwin-x64/VSCode-darwin-x64.zip $(agent.builddirectory)/VSCode-darwin-x64.zip
- cp $(Pipeline.Workspace)/vscode-darwin-arm64/VSCode-darwin-arm64.zip $(agent.builddirectory)/VSCode-darwin-arm64.zip
+ cp $(Pipeline.Workspace)/unsigned_vscode_client_darwin_x64_archive/VSCode-darwin-x64.zip $(agent.builddirectory)/VSCode-darwin-x64.zip
+ cp $(Pipeline.Workspace)/unsigned_vscode_client_darwin_arm64_archive/VSCode-darwin-arm64.zip $(agent.builddirectory)/VSCode-darwin-arm64.zip
unzip $(agent.builddirectory)/VSCode-darwin-x64.zip -d $(agent.builddirectory)/VSCode-darwin-x64
unzip $(agent.builddirectory)/VSCode-darwin-arm64.zip -d $(agent.builddirectory)/VSCode-darwin-arm64
DEBUG=* node build/darwin/create-universal-app.js
@@ -280,26 +280,27 @@ steps:
- script: |
set -e
- VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
- AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
- AZURE_STORAGE_ACCESS_KEY="$(ticino-storage-key)" \
- AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
- VSCODE_ARCH="$(VSCODE_ARCH)" ./build/azure-pipelines/darwin/publish-server.sh
- displayName: Publish Servers
+
+ # package Remote Extension Host
+ pushd .. && mv vscode-reh-darwin vscode-server-darwin && zip -Xry vscode-server-darwin.zip vscode-server-darwin && popd
+
+ # package Remote Extension Host (Web)
+ pushd .. && mv vscode-reh-web-darwin vscode-server-darwin-web && zip -Xry vscode-server-darwin-web.zip vscode-server-darwin-web && popd
+ displayName: Prepare to publish servers
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), ne(variables['VSCODE_PUBLISH'], 'false'))
- publish: $(Agent.BuildDirectory)/VSCode-darwin-$(VSCODE_ARCH).zip
- artifact: vscode-darwin-$(VSCODE_ARCH)
+ artifact: unsigned_vscode_client_darwin_$(VSCODE_ARCH)_archive
displayName: Publish client archive
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- publish: $(Agent.BuildDirectory)/vscode-server-darwin.zip
- artifact: vscode-server-darwin-$(VSCODE_ARCH)
+ artifact: vscode_server_darwin_$(VSCODE_ARCH)_archive-unsigned
displayName: Publish server archive
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), ne(variables['VSCODE_PUBLISH'], 'false'))
- publish: $(Agent.BuildDirectory)/vscode-server-darwin-web.zip
- artifact: vscode-server-darwin-$(VSCODE_ARCH)-web
+ artifact: vscode_web_darwin_$(VSCODE_ARCH)_archive-unsigned
displayName: Publish web server archive
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), ne(variables['VSCODE_PUBLISH'], 'false'))
@@ -308,5 +309,5 @@ steps:
VSCODE_ARCH="$(VSCODE_ARCH)" \
yarn gulp upload-vscode-configuration
displayName: Upload configuration (for Bing settings search)
- condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'))
+ condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), ne(variables['VSCODE_PUBLISH'], 'false'))
continueOnError: true
diff --git a/lib/vscode/build/azure-pipelines/darwin/publish-server.sh b/lib/vscode/build/azure-pipelines/darwin/publish-server.sh
deleted file mode 100755
index 72a85942d5a5..000000000000
--- a/lib/vscode/build/azure-pipelines/darwin/publish-server.sh
+++ /dev/null
@@ -1,14 +0,0 @@
-#!/usr/bin/env bash
-set -e
-
-if [ "$VSCODE_ARCH" == "x64" ]; then
- # package Remote Extension Host
- pushd .. && mv vscode-reh-darwin vscode-server-darwin && zip -Xry vscode-server-darwin.zip vscode-server-darwin && popd
-
- # publish Remote Extension Host
- node build/azure-pipelines/common/createAsset.js \
- server-darwin \
- archive-unsigned \
- "vscode-server-darwin.zip" \
- ../vscode-server-darwin.zip
-fi
diff --git a/lib/vscode/build/azure-pipelines/linux/alpine/publish.sh b/lib/vscode/build/azure-pipelines/linux/alpine/publish.sh
deleted file mode 100755
index 2f5647d1ea36..000000000000
--- a/lib/vscode/build/azure-pipelines/linux/alpine/publish.sh
+++ /dev/null
@@ -1,28 +0,0 @@
-#!/usr/bin/env bash
-set -e
-REPO="$(pwd)"
-ROOT="$REPO/.."
-
-PLATFORM_LINUX="linux-alpine"
-
-# Publish Remote Extension Host
-LEGACY_SERVER_BUILD_NAME="vscode-reh-$PLATFORM_LINUX"
-SERVER_BUILD_NAME="vscode-server-$PLATFORM_LINUX"
-SERVER_TARBALL_FILENAME="vscode-server-$PLATFORM_LINUX.tar.gz"
-SERVER_TARBALL_PATH="$ROOT/$SERVER_TARBALL_FILENAME"
-
-rm -rf $ROOT/vscode-server-*.tar.*
-(cd $ROOT && mv $LEGACY_SERVER_BUILD_NAME $SERVER_BUILD_NAME && tar --owner=0 --group=0 -czf $SERVER_TARBALL_PATH $SERVER_BUILD_NAME)
-
-node build/azure-pipelines/common/createAsset.js "server-$PLATFORM_LINUX" archive-unsigned "$SERVER_TARBALL_FILENAME" "$SERVER_TARBALL_PATH"
-
-# Publish Remote Extension Host (Web)
-LEGACY_SERVER_BUILD_NAME="vscode-reh-web-$PLATFORM_LINUX"
-SERVER_BUILD_NAME="vscode-server-$PLATFORM_LINUX-web"
-SERVER_TARBALL_FILENAME="vscode-server-$PLATFORM_LINUX-web.tar.gz"
-SERVER_TARBALL_PATH="$ROOT/$SERVER_TARBALL_FILENAME"
-
-rm -rf $ROOT/vscode-server-*-web.tar.*
-(cd $ROOT && mv $LEGACY_SERVER_BUILD_NAME $SERVER_BUILD_NAME && tar --owner=0 --group=0 -czf $SERVER_TARBALL_PATH $SERVER_BUILD_NAME)
-
-node build/azure-pipelines/common/createAsset.js "server-$PLATFORM_LINUX-web" archive-unsigned "$SERVER_TARBALL_FILENAME" "$SERVER_TARBALL_PATH"
diff --git a/lib/vscode/build/azure-pipelines/linux/publish.sh b/lib/vscode/build/azure-pipelines/linux/prepare-publish.sh
similarity index 79%
rename from lib/vscode/build/azure-pipelines/linux/publish.sh
rename to lib/vscode/build/azure-pipelines/linux/prepare-publish.sh
index 6d748c6e340d..891fa8024ef5 100755
--- a/lib/vscode/build/azure-pipelines/linux/publish.sh
+++ b/lib/vscode/build/azure-pipelines/linux/prepare-publish.sh
@@ -13,8 +13,6 @@ TARBALL_PATH="$ROOT/$TARBALL_FILENAME"
rm -rf $ROOT/code-*.tar.*
(cd $ROOT && tar -czf $TARBALL_PATH $BUILDNAME)
-node build/azure-pipelines/common/createAsset.js "$PLATFORM_LINUX" archive-unsigned "$TARBALL_FILENAME" "$TARBALL_PATH"
-
# Publish Remote Extension Host
LEGACY_SERVER_BUILD_NAME="vscode-reh-$PLATFORM_LINUX"
SERVER_BUILD_NAME="vscode-server-$PLATFORM_LINUX"
@@ -24,8 +22,6 @@ SERVER_TARBALL_PATH="$ROOT/$SERVER_TARBALL_FILENAME"
rm -rf $ROOT/vscode-server-*.tar.*
(cd $ROOT && mv $LEGACY_SERVER_BUILD_NAME $SERVER_BUILD_NAME && tar --owner=0 --group=0 -czf $SERVER_TARBALL_PATH $SERVER_BUILD_NAME)
-node build/azure-pipelines/common/createAsset.js "server-$PLATFORM_LINUX" archive-unsigned "$SERVER_TARBALL_FILENAME" "$SERVER_TARBALL_PATH"
-
# Publish Remote Extension Host (Web)
LEGACY_SERVER_BUILD_NAME="vscode-reh-web-$PLATFORM_LINUX"
SERVER_BUILD_NAME="vscode-server-$PLATFORM_LINUX-web"
@@ -35,8 +31,6 @@ SERVER_TARBALL_PATH="$ROOT/$SERVER_TARBALL_FILENAME"
rm -rf $ROOT/vscode-server-*-web.tar.*
(cd $ROOT && mv $LEGACY_SERVER_BUILD_NAME $SERVER_BUILD_NAME && tar --owner=0 --group=0 -czf $SERVER_TARBALL_PATH $SERVER_BUILD_NAME)
-node build/azure-pipelines/common/createAsset.js "server-$PLATFORM_LINUX-web" archive-unsigned "$SERVER_TARBALL_FILENAME" "$SERVER_TARBALL_PATH"
-
# Publish DEB
case $VSCODE_ARCH in
x64) DEB_ARCH="amd64" ;;
@@ -47,8 +41,6 @@ PLATFORM_DEB="linux-deb-$VSCODE_ARCH"
DEB_FILENAME="$(ls $REPO/.build/linux/deb/$DEB_ARCH/deb/)"
DEB_PATH="$REPO/.build/linux/deb/$DEB_ARCH/deb/$DEB_FILENAME"
-node build/azure-pipelines/common/createAsset.js "$PLATFORM_DEB" package "$DEB_FILENAME" "$DEB_PATH"
-
# Publish RPM
case $VSCODE_ARCH in
x64) RPM_ARCH="x86_64" ;;
@@ -61,8 +53,6 @@ PLATFORM_RPM="linux-rpm-$VSCODE_ARCH"
RPM_FILENAME="$(ls $REPO/.build/linux/rpm/$RPM_ARCH/ | grep .rpm)"
RPM_PATH="$REPO/.build/linux/rpm/$RPM_ARCH/$RPM_FILENAME"
-node build/azure-pipelines/common/createAsset.js "$PLATFORM_RPM" package "$RPM_FILENAME" "$RPM_PATH"
-
# Publish Snap
# Pack snap tarball artifact, in order to preserve file perms
mkdir -p $REPO/.build/linux/snap-tarball
@@ -73,3 +63,4 @@ rm -rf $SNAP_TARBALL_PATH
# Export DEB_PATH, RPM_PATH
echo "##vso[task.setvariable variable=DEB_PATH]$DEB_PATH"
echo "##vso[task.setvariable variable=RPM_PATH]$RPM_PATH"
+echo "##vso[task.setvariable variable=TARBALL_PATH]$TARBALL_PATH"
diff --git a/lib/vscode/build/azure-pipelines/linux/product-build-alpine.yml b/lib/vscode/build/azure-pipelines/linux/product-build-alpine.yml
index 8376c079ce88..ed0c35346c70 100644
--- a/lib/vscode/build/azure-pipelines/linux/product-build-alpine.yml
+++ b/lib/vscode/build/azure-pipelines/linux/product-build-alpine.yml
@@ -117,19 +117,37 @@ steps:
- script: |
set -e
- AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
- AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
- VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
- ./build/azure-pipelines/linux/alpine/publish.sh
- displayName: Publish
+ REPO="$(pwd)"
+ ROOT="$REPO/.."
+
+ PLATFORM_LINUX="linux-alpine"
+
+ # Publish Remote Extension Host
+ LEGACY_SERVER_BUILD_NAME="vscode-reh-$PLATFORM_LINUX"
+ SERVER_BUILD_NAME="vscode-server-$PLATFORM_LINUX"
+ SERVER_TARBALL_FILENAME="vscode-server-$PLATFORM_LINUX.tar.gz"
+ SERVER_TARBALL_PATH="$ROOT/$SERVER_TARBALL_FILENAME"
+
+ rm -rf $ROOT/vscode-server-*.tar.*
+ (cd $ROOT && mv $LEGACY_SERVER_BUILD_NAME $SERVER_BUILD_NAME && tar --owner=0 --group=0 -czf $SERVER_TARBALL_PATH $SERVER_BUILD_NAME)
+
+ # Publish Remote Extension Host (Web)
+ LEGACY_SERVER_BUILD_NAME="vscode-reh-web-$PLATFORM_LINUX"
+ SERVER_BUILD_NAME="vscode-server-$PLATFORM_LINUX-web"
+ SERVER_TARBALL_FILENAME="vscode-server-$PLATFORM_LINUX-web.tar.gz"
+ SERVER_TARBALL_PATH="$ROOT/$SERVER_TARBALL_FILENAME"
+
+ rm -rf $ROOT/vscode-server-*-web.tar.*
+ (cd $ROOT && mv $LEGACY_SERVER_BUILD_NAME $SERVER_BUILD_NAME && tar --owner=0 --group=0 -czf $SERVER_TARBALL_PATH $SERVER_BUILD_NAME)
+ displayName: Prepare for publish
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- publish: $(Agent.BuildDirectory)/vscode-server-linux-alpine.tar.gz
- artifact: vscode-server-linux-alpine
+ artifact: vscode_server_linux_alpine_archive-unsigned
displayName: Publish server archive
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- publish: $(Agent.BuildDirectory)/vscode-server-linux-alpine-web.tar.gz
- artifact: vscode-server-linux-alpine-web
+ artifact: vscode_web_linux_alpine_archive-unsigned
displayName: Publish web server archive
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
diff --git a/lib/vscode/build/azure-pipelines/linux/product-build-linux.yml b/lib/vscode/build/azure-pipelines/linux/product-build-linux.yml
index cb06bf6a7249..8181083d1f25 100644
--- a/lib/vscode/build/azure-pipelines/linux/product-build-linux.yml
+++ b/lib/vscode/build/azure-pipelines/linux/product-build-linux.yml
@@ -245,27 +245,32 @@ steps:
AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
VSCODE_ARCH="$(VSCODE_ARCH)" \
- ./build/azure-pipelines/linux/publish.sh
- displayName: Publish
+ ./build/azure-pipelines/linux/prepare-publish.sh
+ displayName: Prepare for Publish
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- publish: $(DEB_PATH)
- artifact: vscode-linux-deb-$(VSCODE_ARCH)
+ artifact: vscode_client_linux_$(VSCODE_ARCH)_deb-package
displayName: Publish deb package
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- publish: $(RPM_PATH)
- artifact: vscode-linux-rpm-$(VSCODE_ARCH)
+ artifact: vscode_client_linux_$(VSCODE_ARCH)_rpm-package
displayName: Publish rpm package
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
+ - publish: $(TARBALL_PATH)
+ artifact: vscode_client_linux_$(VSCODE_ARCH)_archive-unsigned
+ displayName: Publish client archive
+ condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
+
- publish: $(Agent.BuildDirectory)/vscode-server-linux-$(VSCODE_ARCH).tar.gz
- artifact: vscode-server-linux-$(VSCODE_ARCH)
+ artifact: vscode_server_linux_$(VSCODE_ARCH)_archive-unsigned
displayName: Publish server archive
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- publish: $(Agent.BuildDirectory)/vscode-server-linux-$(VSCODE_ARCH)-web.tar.gz
- artifact: vscode-server-linux-$(VSCODE_ARCH)-web
+ artifact: vscode_web_linux_$(VSCODE_ARCH)_archive-unsigned
displayName: Publish web server archive
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
diff --git a/lib/vscode/build/azure-pipelines/linux/snap-build-linux.yml b/lib/vscode/build/azure-pipelines/linux/snap-build-linux.yml
index f5e0288f0b92..f7af900e1d0d 100644
--- a/lib/vscode/build/azure-pipelines/linux/snap-build-linux.yml
+++ b/lib/vscode/build/azure-pipelines/linux/snap-build-linux.yml
@@ -50,15 +50,11 @@ steps:
esac
(cd $SNAP_ROOT/code-* && sudo --preserve-env snapcraft prime $SNAPCRAFT_TARGET_ARGS && snap pack prime --compression=lzo --filename="$SNAP_PATH")
- # Publish snap package
- AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
- AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
- node build/azure-pipelines/common/createAsset.js "linux-snap-$(VSCODE_ARCH)" package "$SNAP_FILENAME" "$SNAP_PATH"
-
# Export SNAP_PATH
echo "##vso[task.setvariable variable=SNAP_PATH]$SNAP_PATH"
+ displayName: Prepare for publish
- publish: $(SNAP_PATH)
- artifact: vscode-linux-snap-$(VSCODE_ARCH)
+ artifact: vscode_client_linux_$(VSCODE_ARCH)_snap
displayName: Publish snap package
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
diff --git a/lib/vscode/build/azure-pipelines/product-build.yml b/lib/vscode/build/azure-pipelines/product-build.yml
index fd698a0e7dfc..2c475b9deddd 100644
--- a/lib/vscode/build/azure-pipelines/product-build.yml
+++ b/lib/vscode/build/azure-pipelines/product-build.yml
@@ -86,6 +86,8 @@ variables:
value: ${{ eq(parameters.ENABLE_TERRAPIN, true) }}
- name: VSCODE_QUALITY
value: ${{ parameters.VSCODE_QUALITY }}
+ - name: VSCODE_RELEASE
+ value: ${{ parameters.VSCODE_RELEASE }}
- name: VSCODE_BUILD_STAGE_WINDOWS
value: ${{ or(eq(parameters.VSCODE_BUILD_WIN32, true), eq(parameters.VSCODE_BUILD_WIN32_32BIT, true), eq(parameters.VSCODE_BUILD_WIN32_ARM64, true)) }}
- name: VSCODE_BUILD_STAGE_LINUX
@@ -301,37 +303,30 @@ stages:
steps:
- template: darwin/product-build-darwin-sign.yml
- - ${{ if and(eq(variables['VSCODE_PUBLISH'], true), eq(parameters.VSCODE_COMPILE_ONLY, false)) }}:
- - stage: Mooncake
+ - ${{ if and(eq(parameters.VSCODE_COMPILE_ONLY, false), ne(variables['VSCODE_PUBLISH'], 'false')) }}:
+ - stage: Publish
dependsOn:
- - ${{ if eq(variables['VSCODE_BUILD_STAGE_WINDOWS'], true) }}:
- - Windows
- - ${{ if eq(variables['VSCODE_BUILD_STAGE_LINUX'], true) }}:
- - Linux
- - ${{ if eq(variables['VSCODE_BUILD_STAGE_MACOS'], true) }}:
- - macOS
- condition: succeededOrFailed()
+ - Compile
pool:
vmImage: "Ubuntu-18.04"
+ variables:
+ - name: BUILDS_API_URL
+ value: $(System.CollectionUri)$(System.TeamProject)/_apis/build/builds/$(Build.BuildId)/
jobs:
- - job: SyncMooncake
- displayName: Sync Mooncake
+ - job: PublishBuild
+ timeoutInMinutes: 180
+ displayName: Publish Build
steps:
- - template: sync-mooncake.yml
+ - template: product-publish.yml
- - ${{ if and(eq(parameters.VSCODE_COMPILE_ONLY, false), or(eq(parameters.VSCODE_RELEASE, true), and(in(parameters.VSCODE_QUALITY, 'insider', 'exploration'), eq(variables['VSCODE_SCHEDULEDBUILD'], true)))) }}:
- - stage: Release
- dependsOn:
- - ${{ if eq(variables['VSCODE_BUILD_STAGE_WINDOWS'], true) }}:
- - Windows
- - ${{ if eq(variables['VSCODE_BUILD_STAGE_LINUX'], true) }}:
- - Linux
- - ${{ if eq(variables['VSCODE_BUILD_STAGE_MACOS'], true) }}:
- - macOS
- pool:
- vmImage: "Ubuntu-18.04"
- jobs:
- - job: ReleaseBuild
- displayName: Release Build
- steps:
- - template: release.yml
+ - ${{ if or(eq(parameters.VSCODE_RELEASE, true), and(in(parameters.VSCODE_QUALITY, 'insider', 'exploration'), eq(variables['VSCODE_SCHEDULEDBUILD'], true))) }}:
+ - stage: Release
+ dependsOn:
+ - Publish
+ pool:
+ vmImage: "Ubuntu-18.04"
+ jobs:
+ - job: ReleaseBuild
+ displayName: Release Build
+ steps:
+ - template: product-release.yml
diff --git a/lib/vscode/build/azure-pipelines/product-compile.yml b/lib/vscode/build/azure-pipelines/product-compile.yml
index 52c7758cfdee..18c17639b830 100644
--- a/lib/vscode/build/azure-pipelines/product-compile.yml
+++ b/lib/vscode/build/azure-pipelines/product-compile.yml
@@ -118,14 +118,6 @@ steps:
displayName: Publish Webview
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- - script: |
- set -e
- VERSION=`node -p "require(\"./package.json\").version"`
- AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
- node build/azure-pipelines/common/createBuild.js $VERSION
- displayName: Create build
- condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
-
# we gotta tarball everything in order to preserve file permissions
- script: |
set -e
diff --git a/lib/vscode/build/azure-pipelines/product-publish.ps1 b/lib/vscode/build/azure-pipelines/product-publish.ps1
new file mode 100644
index 000000000000..339002ab0c17
--- /dev/null
+++ b/lib/vscode/build/azure-pipelines/product-publish.ps1
@@ -0,0 +1,114 @@
+. build/azure-pipelines/win32/exec.ps1
+$ErrorActionPreference = 'Stop'
+$ProgressPreference = 'SilentlyContinue'
+$ARTIFACT_PROCESSED_WILDCARD_PATH = "$env:PIPELINE_WORKSPACE/artifacts_processed_*/artifacts_processed_*"
+$ARTIFACT_PROCESSED_FILE_PATH = "$env:PIPELINE_WORKSPACE/artifacts_processed_$env:SYSTEM_STAGEATTEMPT/artifacts_processed_$env:SYSTEM_STAGEATTEMPT.txt"
+
+function Get-PipelineArtifact {
+ param($Name = '*')
+ try {
+ $res = Invoke-RestMethod "$($env:BUILDS_API_URL)artifacts?api-version=6.0" -Headers @{
+ Authorization = "Bearer $env:SYSTEM_ACCESSTOKEN"
+ } -MaximumRetryCount 5 -RetryIntervalSec 1
+
+ if (!$res) {
+ return
+ }
+
+ $res.value | Where-Object { $_.name -Like $Name }
+ } catch {
+ Write-Warning $_
+ }
+}
+
+# This set will keep track of which artifacts have already been processed
+$set = [System.Collections.Generic.HashSet[string]]::new()
+
+if (Test-Path $ARTIFACT_PROCESSED_WILDCARD_PATH) {
+ # Grab the latest artifact_processed text file and load all assets already processed from that.
+ # This means that the latest artifact_processed_*.txt file has all of the contents of the previous ones.
+ # Note: The kusto-like syntax only works in PS7+ and only in scripts, not at the REPL.
+ Get-ChildItem $ARTIFACT_PROCESSED_WILDCARD_PATH
+ | Sort-Object
+ | Select-Object -Last 1
+ | Get-Content
+ | ForEach-Object {
+ $set.Add($_) | Out-Null
+ Write-Host "Already processed artifact: $_"
+ }
+}
+
+# Create the artifact file that will be used for this run
+New-Item -Path $ARTIFACT_PROCESSED_FILE_PATH -Force | Out-Null
+
+# Determine which stages we need to watch
+$stages = @(
+ if ($env:VSCODE_BUILD_STAGE_WINDOWS -eq 'True') { 'Windows' }
+ if ($env:VSCODE_BUILD_STAGE_LINUX -eq 'True') { 'Linux' }
+ if ($env:VSCODE_BUILD_STAGE_MACOS -eq 'True') { 'macOS' }
+)
+
+do {
+ Start-Sleep -Seconds 10
+
+ $artifacts = Get-PipelineArtifact -Name 'vscode_*'
+ if (!$artifacts) {
+ continue
+ }
+
+ $artifacts | ForEach-Object {
+ $artifactName = $_.name
+ if($set.Add($artifactName)) {
+ Write-Host "Processing artifact: '$artifactName. Downloading from: $($_.resource.downloadUrl)"
+
+ try {
+ Invoke-RestMethod $_.resource.downloadUrl -OutFile "$env:AGENT_TEMPDIRECTORY/$artifactName.zip" -Headers @{
+ Authorization = "Bearer $env:SYSTEM_ACCESSTOKEN"
+ } -MaximumRetryCount 5 -RetryIntervalSec 1 | Out-Null
+
+ Expand-Archive -Path "$env:AGENT_TEMPDIRECTORY/$artifactName.zip" -DestinationPath $env:AGENT_TEMPDIRECTORY | Out-Null
+ } catch {
+ Write-Warning $_
+ $set.Remove($artifactName) | Out-Null
+ continue
+ }
+
+ $null,$product,$os,$arch,$type = $artifactName -split '_'
+ $asset = Get-ChildItem -rec "$env:AGENT_TEMPDIRECTORY/$artifactName"
+ Write-Host "Processing artifact with the following values:"
+ # turning in into an object just to log nicely
+ @{
+ product = $product
+ os = $os
+ arch = $arch
+ type = $type
+ asset = $asset.Name
+ } | Format-Table
+
+ exec { node build/azure-pipelines/common/createAsset.js $product $os $arch $type $asset.Name $asset.FullName }
+ $artifactName >> $ARTIFACT_PROCESSED_FILE_PATH
+ }
+ }
+
+ # Get the timeline and see if it says the other stage completed
+ try {
+ $timeline = Invoke-RestMethod "$($env:BUILDS_API_URL)timeline?api-version=6.0" -Headers @{
+ Authorization = "Bearer $env:SYSTEM_ACCESSTOKEN"
+ } -MaximumRetryCount 5 -RetryIntervalSec 1
+ } catch {
+ Write-Warning $_
+ continue
+ }
+
+ foreach ($stage in $stages) {
+ $otherStageFinished = $timeline.records | Where-Object { $_.name -eq $stage -and $_.type -eq 'stage' -and $_.state -eq 'completed' }
+ if (!$otherStageFinished) {
+ break
+ }
+ }
+
+ $artifacts = Get-PipelineArtifact -Name 'vscode_*'
+ $artifactsStillToProcess = $artifacts.Count -ne $set.Count
+} while (!$otherStageFinished -or $artifactsStillToProcess)
+
+Write-Host "Processed $($set.Count) artifacts."
diff --git a/lib/vscode/build/azure-pipelines/product-publish.yml b/lib/vscode/build/azure-pipelines/product-publish.yml
new file mode 100644
index 000000000000..de8cb216b8a1
--- /dev/null
+++ b/lib/vscode/build/azure-pipelines/product-publish.yml
@@ -0,0 +1,89 @@
+steps:
+ - task: NodeTool@0
+ inputs:
+ versionSpec: "12.x"
+
+ - task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
+ inputs:
+ versionSpec: "1.x"
+
+ - task: AzureKeyVault@1
+ displayName: "Azure Key Vault: Get Secrets"
+ inputs:
+ azureSubscription: "vscode-builds-subscription"
+ KeyVaultName: vscode
+
+ - pwsh: |
+ . build/azure-pipelines/win32/exec.ps1
+ cd build
+ exec { yarn }
+ displayName: Install dependencies
+
+ - download: current
+ patterns: '**/artifacts_processed_*.txt'
+ displayName: Download all artifacts_processed text files
+
+ - pwsh: |
+ . build/azure-pipelines/win32/exec.ps1
+
+ if (Test-Path "$(Pipeline.Workspace)/artifacts_processed_*/artifacts_processed_*.txt") {
+ Write-Host "Artifacts already processed so a build must have already been created."
+ return
+ }
+
+ $env:AZURE_DOCUMENTDB_MASTERKEY = "$(builds-docdb-key-readwrite)"
+ $VERSION = node -p "require('./package.json').version"
+ Write-Host "Creating build with version: $VERSION"
+ exec { node build/azure-pipelines/common/createBuild.js $VERSION }
+ displayName: Create build if it hasn't been created before
+
+ - pwsh: |
+ $env:VSCODE_MIXIN_PASSWORD = "$(github-distro-mixin-password)"
+ $env:AZURE_DOCUMENTDB_MASTERKEY = "$(builds-docdb-key-readwrite)"
+ $env:AZURE_STORAGE_ACCESS_KEY = "$(ticino-storage-key)"
+ $env:AZURE_STORAGE_ACCESS_KEY_2 = "$(vscode-storage-key)"
+ $env:MOONCAKE_STORAGE_ACCESS_KEY = "$(vscode-mooncake-storage-key)"
+ build/azure-pipelines/product-publish.ps1
+ env:
+ SYSTEM_ACCESSTOKEN: $(System.AccessToken)
+ displayName: Process artifacts
+
+ - publish: $(Pipeline.Workspace)/artifacts_processed_$(System.StageAttempt)/artifacts_processed_$(System.StageAttempt).txt
+ artifact: artifacts_processed_$(System.StageAttempt)
+ displayName: Publish what artifacts were published for this stage attempt
+
+ - pwsh: |
+ $ErrorActionPreference = 'Stop'
+
+ # Determine which stages we need to watch
+ $stages = @(
+ if ($env:VSCODE_BUILD_STAGE_WINDOWS -eq 'True') { 'Windows' }
+ if ($env:VSCODE_BUILD_STAGE_LINUX -eq 'True') { 'Linux' }
+ if ($env:VSCODE_BUILD_STAGE_MACOS -eq 'True') { 'macOS' }
+ )
+ Write-Host "Stages to check: $stages"
+
+ # Get the timeline and see if it says the other stage completed
+ $timeline = Invoke-RestMethod "$($env:BUILDS_API_URL)timeline?api-version=6.0" -Headers @{
+ Authorization = "Bearer $env:SYSTEM_ACCESSTOKEN"
+ } -MaximumRetryCount 5 -RetryIntervalSec 1
+
+ $failedStages = @()
+ foreach ($stage in $stages) {
+ $didStageFail = $timeline.records | Where-Object {
+ $_.name -eq $stage -and $_.type -eq 'stage' -and $_.result -ne 'succeeded' -and $_.result -ne 'succeededWithIssues'
+ }
+
+ if($didStageFail) {
+ $failedStages += $stage
+ } else {
+ Write-Host "'$stage' did not fail."
+ }
+ }
+
+ if ($failedStages.Length) {
+ throw "Failed stages: $($failedStages -join ', '). This stage will now fail so that it is easier to retry failed jobs."
+ }
+ env:
+ SYSTEM_ACCESSTOKEN: $(System.AccessToken)
+ displayName: Determine if stage should succeed
diff --git a/lib/vscode/build/azure-pipelines/release.yml b/lib/vscode/build/azure-pipelines/product-release.yml
similarity index 100%
rename from lib/vscode/build/azure-pipelines/release.yml
rename to lib/vscode/build/azure-pipelines/product-release.yml
diff --git a/lib/vscode/build/azure-pipelines/sync-mooncake.yml b/lib/vscode/build/azure-pipelines/sync-mooncake.yml
deleted file mode 100644
index 6e379754f2f8..000000000000
--- a/lib/vscode/build/azure-pipelines/sync-mooncake.yml
+++ /dev/null
@@ -1,24 +0,0 @@
-steps:
- - task: NodeTool@0
- inputs:
- versionSpec: "14.x"
-
- - task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
- inputs:
- versionSpec: "1.x"
-
- - task: AzureKeyVault@1
- displayName: "Azure Key Vault: Get Secrets"
- inputs:
- azureSubscription: "vscode-builds-subscription"
- KeyVaultName: vscode
-
- - script: |
- set -e
-
- (cd build ; yarn)
-
- AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
- AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
- MOONCAKE_STORAGE_ACCESS_KEY="$(vscode-mooncake-storage-key)" \
- node build/azure-pipelines/common/sync-mooncake.js "$VSCODE_QUALITY"
diff --git a/lib/vscode/build/azure-pipelines/web/product-build-web.yml b/lib/vscode/build/azure-pipelines/web/product-build-web.yml
index 772fe1c05abd..45dedea1b4c6 100644
--- a/lib/vscode/build/azure-pipelines/web/product-build-web.yml
+++ b/lib/vscode/build/azure-pipelines/web/product-build-web.yml
@@ -119,13 +119,19 @@ steps:
- script: |
set -e
- AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
- AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
- VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
- ./build/azure-pipelines/web/publish.sh
- displayName: Publish
+ REPO="$(pwd)"
+ ROOT="$REPO/.."
+
+ WEB_BUILD_NAME="vscode-web"
+ WEB_TARBALL_FILENAME="vscode-web.tar.gz"
+ WEB_TARBALL_PATH="$ROOT/$WEB_TARBALL_FILENAME"
+
+ rm -rf $ROOT/vscode-web.tar.*
+
+ cd $ROOT && tar --owner=0 --group=0 -czf $WEB_TARBALL_PATH $WEB_BUILD_NAME
+ displayName: Prepare for publish
- publish: $(Agent.BuildDirectory)/vscode-web.tar.gz
- artifact: vscode-web-standalone
+ artifact: vscode_web_linux_standalone_archive-unsigned
displayName: Publish web archive
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
diff --git a/lib/vscode/build/azure-pipelines/web/publish.sh b/lib/vscode/build/azure-pipelines/web/publish.sh
deleted file mode 100755
index 827edc2661bf..000000000000
--- a/lib/vscode/build/azure-pipelines/web/publish.sh
+++ /dev/null
@@ -1,15 +0,0 @@
-#!/usr/bin/env bash
-set -e
-REPO="$(pwd)"
-ROOT="$REPO/.."
-
-# Publish Web Client
-WEB_BUILD_NAME="vscode-web"
-WEB_TARBALL_FILENAME="vscode-web.tar.gz"
-WEB_TARBALL_PATH="$ROOT/$WEB_TARBALL_FILENAME"
-
-rm -rf $ROOT/vscode-web.tar.*
-
-(cd $ROOT && tar --owner=0 --group=0 -czf $WEB_TARBALL_PATH $WEB_BUILD_NAME)
-
-node build/azure-pipelines/common/createAsset.js web-standalone archive-unsigned "$WEB_TARBALL_FILENAME" "$WEB_TARBALL_PATH"
diff --git a/lib/vscode/build/azure-pipelines/win32/publish.ps1 b/lib/vscode/build/azure-pipelines/win32/prepare-publish.ps1
similarity index 51%
rename from lib/vscode/build/azure-pipelines/win32/publish.ps1
rename to lib/vscode/build/azure-pipelines/win32/prepare-publish.ps1
index a225f9d5fdf9..f80e1ca0ce9a 100644
--- a/lib/vscode/build/azure-pipelines/win32/publish.ps1
+++ b/lib/vscode/build/azure-pipelines/win32/prepare-publish.ps1
@@ -13,24 +13,31 @@ $Zip = "$Repo\.build\win32-$Arch\archive\VSCode-win32-$Arch.zip"
$LegacyServer = "$Root\vscode-reh-win32-$Arch"
$Server = "$Root\vscode-server-win32-$Arch"
$ServerZip = "$Repo\.build\vscode-server-win32-$Arch.zip"
+$LegacyWeb = "$Root\vscode-reh-web-win32-$Arch"
+$Web = "$Root\vscode-server-win32-$Arch-web"
+$WebZip = "$Repo\.build\vscode-server-win32-$Arch-web.zip"
$Build = "$Root\VSCode-win32-$Arch"
# Create server archive
if ("$Arch" -ne "arm64") {
exec { xcopy $LegacyServer $Server /H /E /I }
exec { .\node_modules\7zip\7zip-lite\7z.exe a -tzip $ServerZip $Server -r }
+ exec { xcopy $LegacyWeb $Web /H /E /I }
+ exec { .\node_modules\7zip\7zip-lite\7z.exe a -tzip $WebZip $Web -r }
}
# get version
$PackageJson = Get-Content -Raw -Path "$Build\resources\app\package.json" | ConvertFrom-Json
$Version = $PackageJson.version
-$AssetPlatform = if ("$Arch" -eq "ia32") { "win32" } else { "win32-$Arch" }
-
-exec { node build/azure-pipelines/common/createAsset.js "$AssetPlatform-archive" archive "VSCode-win32-$Arch-$Version.zip" $Zip }
-exec { node build/azure-pipelines/common/createAsset.js "$AssetPlatform" setup "VSCodeSetup-$Arch-$Version.exe" $SystemExe }
-exec { node build/azure-pipelines/common/createAsset.js "$AssetPlatform-user" setup "VSCodeUserSetup-$Arch-$Version.exe" $UserExe }
-
-if ("$Arch" -ne "arm64") {
- exec { node build/azure-pipelines/common/createAsset.js "server-$AssetPlatform" archive "vscode-server-win32-$Arch.zip" $ServerZip }
-}
+$ARCHIVE_NAME = "VSCode-win32-$Arch-$Version.zip"
+$SYSTEM_SETUP_NAME = "VSCodeSetup-$Arch-$Version.exe"
+$USER_SETUP_NAME = "VSCodeUserSetup-$Arch-$Version.exe"
+
+# Set variables for upload
+Move-Item $Zip "$Repo\.build\win32-$Arch\archive\$ARCHIVE_NAME"
+Write-Host "##vso[task.setvariable variable=ARCHIVE_NAME]$ARCHIVE_NAME"
+Move-Item $SystemExe "$Repo\.build\win32-$Arch\system-setup\$SYSTEM_SETUP_NAME"
+Write-Host "##vso[task.setvariable variable=SYSTEM_SETUP_NAME]$SYSTEM_SETUP_NAME"
+Move-Item $UserExe "$Repo\.build\win32-$Arch\user-setup\$USER_SETUP_NAME"
+Write-Host "##vso[task.setvariable variable=USER_SETUP_NAME]$USER_SETUP_NAME"
diff --git a/lib/vscode/build/azure-pipelines/win32/product-build-win32.yml b/lib/vscode/build/azure-pipelines/win32/product-build-win32.yml
index 2dcaf8b2e010..1f8514ae7e3e 100644
--- a/lib/vscode/build/azure-pipelines/win32/product-build-win32.yml
+++ b/lib/vscode/build/azure-pipelines/win32/product-build-win32.yml
@@ -295,31 +295,31 @@ steps:
$env:AZURE_STORAGE_ACCESS_KEY_2 = "$(vscode-storage-key)"
$env:AZURE_DOCUMENTDB_MASTERKEY = "$(builds-docdb-key-readwrite)"
$env:VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)"
- .\build\azure-pipelines\win32\publish.ps1
+ .\build\azure-pipelines\win32\prepare-publish.ps1
displayName: Publish
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- - publish: $(System.DefaultWorkingDirectory)\.build\win32-$(VSCODE_ARCH)\archive\VSCode-win32-$(VSCODE_ARCH).zip
- artifact: vscode-win32-$(VSCODE_ARCH)
+ - publish: $(System.DefaultWorkingDirectory)\.build\win32-$(VSCODE_ARCH)\archive\$(ARCHIVE_NAME)
+ artifact: vscode_client_win32_$(VSCODE_ARCH)_archive
displayName: Publish archive
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- - publish: $(System.DefaultWorkingDirectory)\.build\win32-$(VSCODE_ARCH)\system-setup\VSCodeSetup.exe
- artifact: vscode-win32-$(VSCODE_ARCH)-setup
+ - publish: $(System.DefaultWorkingDirectory)\.build\win32-$(VSCODE_ARCH)\system-setup\$(SYSTEM_SETUP_NAME)
+ artifact: vscode_client_win32_$(VSCODE_ARCH)_setup
displayName: Publish system setup
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- - publish: $(System.DefaultWorkingDirectory)\.build\win32-$(VSCODE_ARCH)\user-setup\VSCodeSetup.exe
- artifact: vscode-win32-$(VSCODE_ARCH)-user-setup
+ - publish: $(System.DefaultWorkingDirectory)\.build\win32-$(VSCODE_ARCH)\user-setup\$(USER_SETUP_NAME)
+ artifact: vscode_client_win32_$(VSCODE_ARCH)_user-setup
displayName: Publish user setup
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- publish: $(System.DefaultWorkingDirectory)\.build\vscode-server-win32-$(VSCODE_ARCH).zip
- artifact: vscode-server-win32-$(VSCODE_ARCH)
+ artifact: vscode_server_win32_$(VSCODE_ARCH)_archive
displayName: Publish server archive
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
- publish: $(System.DefaultWorkingDirectory)\.build\vscode-server-win32-$(VSCODE_ARCH)-web.zip
- artifact: vscode-server-win32-$(VSCODE_ARCH)-web
+ artifact: vscode_web_win32_$(VSCODE_ARCH)_archive
displayName: Publish web server archive
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
diff --git a/lib/vscode/build/darwin/create-universal-app.js b/lib/vscode/build/darwin/create-universal-app.js
index d455a5cef703..d91064d41a60 100644
--- a/lib/vscode/build/darwin/create-universal-app.js
+++ b/lib/vscode/build/darwin/create-universal-app.js
@@ -33,7 +33,7 @@ async function main() {
'Credits.rtf',
'CodeResources',
'fsevents.node',
- 'Info.plist', // TODO@deepak1556: regressed with 11.4.2 internal builds
+ 'Info.plist',
'.npmrc'
],
outAppPath,
diff --git a/lib/vscode/build/gulpfile.extensions.js b/lib/vscode/build/gulpfile.extensions.js
index 9312f6c930a2..cbb7c486b13d 100644
--- a/lib/vscode/build/gulpfile.extensions.js
+++ b/lib/vscode/build/gulpfile.extensions.js
@@ -8,7 +8,6 @@ require('events').EventEmitter.defaultMaxListeners = 100;
const gulp = require('gulp');
const path = require('path');
-const child_process = require('child_process');
const nodeUtil = require('util');
const es = require('event-stream');
const filter = require('gulp-filter');
@@ -20,8 +19,6 @@ const glob = require('glob');
const root = path.dirname(__dirname);
const commit = util.getVersion(root);
const plumber = require('gulp-plumber');
-const fancyLog = require('fancy-log');
-const ansiColors = require('ansi-colors');
const ext = require('./lib/extensions');
const extensionsPath = path.join(path.dirname(__dirname), 'extensions');
@@ -201,45 +198,17 @@ gulp.task(compileExtensionsBuildLegacyTask);
//#region Extension media
-// Additional projects to webpack. These typically build code for webviews
-const webpackMediaConfigFiles = [
- 'markdown-language-features/webpack.config.js',
- 'simple-browser/webpack.config.js',
-];
-
-// Additional projects to run esbuild on. These typically build code for webviews
-const esbuildMediaScripts = [
- 'markdown-language-features/esbuild.js',
- 'notebook-markdown-extensions/esbuild.js',
-];
-
-const compileExtensionMediaTask = task.define('compile-extension-media', () => buildExtensionMedia(false));
+const compileExtensionMediaTask = task.define('compile-extension-media', () => ext.buildExtensionMedia(false));
gulp.task(compileExtensionMediaTask);
exports.compileExtensionMediaTask = compileExtensionMediaTask;
-const watchExtensionMedia = task.define('watch-extension-media', () => buildExtensionMedia(true));
+const watchExtensionMedia = task.define('watch-extension-media', () => ext.buildExtensionMedia(true));
gulp.task(watchExtensionMedia);
exports.watchExtensionMedia = watchExtensionMedia;
-const compileExtensionMediaBuildTask = task.define('compile-extension-media-build', () => buildExtensionMedia(false, '.build/extensions'));
+const compileExtensionMediaBuildTask = task.define('compile-extension-media-build', () => ext.buildExtensionMedia(false, '.build/extensions'));
gulp.task(compileExtensionMediaBuildTask);
-async function buildExtensionMedia(isWatch, outputRoot) {
- const webpackConfigLocations = webpackMediaConfigFiles.map(p => {
- return {
- configPath: path.join(extensionsPath, p),
- outputRoot: outputRoot ? path.join(root, outputRoot, path.dirname(p)) : undefined
- };
- });
- return Promise.all([
- webpackExtensions('webpacking extension media', isWatch, webpackConfigLocations),
- esbuildExtensions('esbuilding extension media', isWatch, esbuildMediaScripts.map(p => ({
- script: path.join(extensionsPath, p),
- outputRoot: outputRoot ? path.join(root, outputRoot, path.dirname(p)) : undefined
- }))),
- ]);
-}
-
//#endregion
//#region Azure Pipelines
@@ -271,121 +240,5 @@ async function buildWebExtensions(isWatch) {
path.join(extensionsPath, '**', 'extension-browser.webpack.config.js'),
{ ignore: ['**/node_modules'] }
);
- return webpackExtensions('packaging web extension', isWatch, webpackConfigLocations.map(configPath => ({ configPath })));
-}
-
-/**
- * @param {string} taskName
- * @param {boolean} isWatch
- * @param {{ configPath: string, outputRoot?: boolean}} webpackConfigLocations
- */
-async function webpackExtensions(taskName, isWatch, webpackConfigLocations) {
- const webpack = require('webpack');
-
- const webpackConfigs = [];
-
- for (const { configPath, outputRoot } of webpackConfigLocations) {
- const configOrFnOrArray = require(configPath);
- function addConfig(configOrFn) {
- let config;
- if (typeof configOrFn === 'function') {
- config = configOrFn({}, {});
- webpackConfigs.push(config);
- } else {
- config = configOrFn;
- }
-
- if (outputRoot) {
- config.output.path = path.join(outputRoot, path.relative(path.dirname(configPath), config.output.path));
- }
-
- webpackConfigs.push(configOrFn);
- }
- addConfig(configOrFnOrArray);
- }
- function reporter(fullStats) {
- if (Array.isArray(fullStats.children)) {
- for (const stats of fullStats.children) {
- const outputPath = stats.outputPath;
- if (outputPath) {
- const relativePath = path.relative(extensionsPath, outputPath).replace(/\\/g, '/');
- const match = relativePath.match(/[^\/]+(\/server|\/client)?/);
- fancyLog(`Finished ${ansiColors.green(taskName)} ${ansiColors.cyan(match[0])} with ${stats.errors.length} errors.`);
- }
- if (Array.isArray(stats.errors)) {
- stats.errors.forEach(error => {
- fancyLog.error(error);
- });
- }
- if (Array.isArray(stats.warnings)) {
- stats.warnings.forEach(warning => {
- fancyLog.warn(warning);
- });
- }
- }
- }
- }
- return new Promise((resolve, reject) => {
- if (isWatch) {
- webpack(webpackConfigs).watch({}, (err, stats) => {
- if (err) {
- reject();
- } else {
- reporter(stats.toJson());
- }
- });
- } else {
- webpack(webpackConfigs).run((err, stats) => {
- if (err) {
- fancyLog.error(err);
- reject();
- } else {
- reporter(stats.toJson());
- resolve();
- }
- });
- }
- });
-}
-
-/**
- * @param {string} taskName
- * @param {boolean} isWatch
- * @param {{ script: string, outputRoot?: string }}} scripts
- */
-async function esbuildExtensions(taskName, isWatch, scripts) {
- function reporter(/** @type {string} */ stdError, /** @type {string} */script) {
- const matches = (stdError || '').match(/\> (.+): error: (.+)?/g);
- fancyLog(`Finished ${ansiColors.green(taskName)} ${script} with ${matches ? matches.length : 0} errors.`);
- for (const match of matches || []) {
- fancyLog.error(match);
- }
- }
-
- const tasks = scripts.map(({ script, outputRoot }) => {
- return new Promise((resolve, reject) => {
- const args = [script];
- if (isWatch) {
- args.push('--watch');
- }
- if (outputRoot) {
- args.push('--outputRoot', outputRoot);
- }
- const proc = child_process.execFile(process.argv[0], args, {}, (error, _stdout, stderr) => {
- if (error) {
- return reject(error);
- }
- reporter(stderr, script);
- if (stderr) {
- return reject();
- }
- return resolve();
- });
-
- proc.stdout.on('data', (data) => {
- fancyLog(`${ansiColors.green(taskName)}: ${data.toString('utf8')}`);
- });
- });
- });
- return Promise.all(tasks);
+ return ext.webpackExtensions('packaging web extension', isWatch, webpackConfigLocations.map(configPath => ({ configPath })));
}
diff --git a/lib/vscode/build/gulpfile.reh.js b/lib/vscode/build/gulpfile.reh.js
index 6230916a1dc8..0704ab70e9d1 100644
--- a/lib/vscode/build/gulpfile.reh.js
+++ b/lib/vscode/build/gulpfile.reh.js
@@ -42,6 +42,7 @@ BUILD_TARGETS.forEach(({ platform, arch }) => {
});
function getNodeVersion() {
+ // NOTE@coder: Fix version due to .yarnrc removal.
return process.versions.node;
const yarnrc = fs.readFileSync(path.join(REPO_ROOT, 'remote', '.yarnrc'), 'utf8');
const target = /^target "(.*)"$/m.exec(yarnrc)[1];
diff --git a/lib/vscode/build/gulpfile.vscode.js b/lib/vscode/build/gulpfile.vscode.js
index 3eeba69d2581..a6e043f122cb 100644
--- a/lib/vscode/build/gulpfile.vscode.js
+++ b/lib/vscode/build/gulpfile.vscode.js
@@ -228,7 +228,14 @@ function packageTask(platform, arch, sourceFolderName, destinationFolderName, op
.pipe(jsFilter)
.pipe(util.rewriteSourceMappingURL(sourceMappingURLBase))
.pipe(jsFilter.restore)
- .pipe(createAsar(path.join(process.cwd(), 'node_modules'), ['**/*.node', '**/vscode-ripgrep/bin/*', '**/node-pty/build/Release/*', '**/*.wasm'], 'node_modules.asar'));
+ .pipe(createAsar(path.join(process.cwd(), 'node_modules'), [
+ '**/*.node',
+ '**/vscode-ripgrep/bin/*',
+ '**/node-pty/build/Release/*',
+ '**/node-pty/lib/worker/conoutSocketWorker.js',
+ '**/node-pty/lib/shared/conout.js',
+ '**/*.wasm'
+ ], 'node_modules.asar'));
let all = es.merge(
packageJsonStream,
@@ -383,8 +390,6 @@ BUILD_TARGETS.forEach(buildTarget => {
}
});
-// Transifex Localizations
-
const innoSetupConfig = {
'zh-cn': { codePage: 'CP936', defaultInfo: { name: 'Simplified Chinese', id: '$0804', } },
'zh-tw': { codePage: 'CP950', defaultInfo: { name: 'Traditional Chinese', id: '$0404' } },
@@ -400,6 +405,8 @@ const innoSetupConfig = {
'tr': { codePage: 'CP1254' }
};
+// Transifex Localizations
+
const apiHostname = process.env.TRANSIFEX_API_URL;
const apiName = process.env.TRANSIFEX_API_NAME;
const apiToken = process.env.TRANSIFEX_API_TOKEN;
@@ -434,7 +441,7 @@ gulp.task(task.define(
function () {
const pathToMetadata = './out-vscode/nls.metadata.json';
const pathToExtensions = '.build/extensions/*';
- const pathToSetup = 'build/win32/**/{Default.isl,messages.en.isl}';
+ const pathToSetup = 'build/win32/i18n/messages.en.isl';
return es.merge(
gulp.src(pathToMetadata).pipe(i18n.createXlfFilesForCoreBundle()),
@@ -460,8 +467,8 @@ gulp.task('vscode-translations-import', function () {
}
});
return es.merge([...i18n.defaultLanguages, ...i18n.extraLanguages].map(language => {
- let id = language.transifexId || language.id;
- return gulp.src(`${options.location}/${id}/setup/*/*.xlf`)
+ let id = language.id;
+ return gulp.src(`${options.location}/${id}/vscode-setup/messages.xlf`)
.pipe(i18n.prepareIslFiles(language, innoSetupConfig[language.id]))
.pipe(vfs.dest(`./build/win32/i18n`));
}));
diff --git a/lib/vscode/build/lib/builtInExtensions.js b/lib/vscode/build/lib/builtInExtensions.js
index 9b8bcc9e0009..d851a6ee5f9a 100644
--- a/lib/vscode/build/lib/builtInExtensions.js
+++ b/lib/vscode/build/lib/builtInExtensions.js
@@ -18,8 +18,8 @@ const ansiColors = require("ansi-colors");
const mkdirp = require('mkdirp');
const root = path.dirname(path.dirname(__dirname));
const productjson = JSON.parse(fs.readFileSync(path.join(__dirname, '../../product.json'), 'utf8'));
-const builtInExtensions = productjson.builtInExtensions;
-const webBuiltInExtensions = productjson.webBuiltInExtensions;
+const builtInExtensions = productjson.builtInExtensions || [];
+const webBuiltInExtensions = productjson.webBuiltInExtensions || [];
const controlFilePath = path.join(os.homedir(), '.vscode-oss-dev', 'extensions', 'control.json');
const ENABLE_LOGGING = !process.env['VSCODE_BUILD_BUILTIN_EXTENSIONS_SILENCE_PLEASE'];
function log(...messages) {
diff --git a/lib/vscode/build/lib/builtInExtensions.ts b/lib/vscode/build/lib/builtInExtensions.ts
index af444defab88..2c1cb3abba2c 100644
--- a/lib/vscode/build/lib/builtInExtensions.ts
+++ b/lib/vscode/build/lib/builtInExtensions.ts
@@ -36,8 +36,8 @@ export interface IExtensionDefinition {
const root = path.dirname(path.dirname(__dirname));
const productjson = JSON.parse(fs.readFileSync(path.join(__dirname, '../../product.json'), 'utf8'));
-const builtInExtensions = productjson.builtInExtensions;
-const webBuiltInExtensions = productjson.webBuiltInExtensions;
+const builtInExtensions = productjson.builtInExtensions || [];
+const webBuiltInExtensions = productjson.webBuiltInExtensions || [];
const controlFilePath = path.join(os.homedir(), '.vscode-oss-dev', 'extensions', 'control.json');
const ENABLE_LOGGING = !process.env['VSCODE_BUILD_BUILTIN_EXTENSIONS_SILENCE_PLEASE'];
diff --git a/lib/vscode/build/lib/builtInExtensionsCG.js b/lib/vscode/build/lib/builtInExtensionsCG.js
index 10bf38f8c8e2..679663724c8b 100644
--- a/lib/vscode/build/lib/builtInExtensionsCG.js
+++ b/lib/vscode/build/lib/builtInExtensionsCG.js
@@ -12,8 +12,8 @@ const ansiColors = require("ansi-colors");
const root = path.dirname(path.dirname(__dirname));
const rootCG = path.join(root, 'extensionsCG');
const productjson = JSON.parse(fs.readFileSync(path.join(__dirname, '../../product.json'), 'utf8'));
-const builtInExtensions = productjson.builtInExtensions;
-const webBuiltInExtensions = productjson.webBuiltInExtensions;
+const builtInExtensions = productjson.builtInExtensions || [];
+const webBuiltInExtensions = productjson.webBuiltInExtensions || [];
const token = process.env['VSCODE_MIXIN_PASSWORD'] || process.env['GITHUB_TOKEN'] || undefined;
const contentBasePath = 'raw.githubusercontent.com';
const contentFileNames = ['package.json', 'package-lock.json', 'yarn.lock'];
diff --git a/lib/vscode/build/lib/builtInExtensionsCG.ts b/lib/vscode/build/lib/builtInExtensionsCG.ts
index 45785529b66d..2b758da5c912 100644
--- a/lib/vscode/build/lib/builtInExtensionsCG.ts
+++ b/lib/vscode/build/lib/builtInExtensionsCG.ts
@@ -13,8 +13,8 @@ import { IExtensionDefinition } from './builtInExtensions';
const root = path.dirname(path.dirname(__dirname));
const rootCG = path.join(root, 'extensionsCG');
const productjson = JSON.parse(fs.readFileSync(path.join(__dirname, '../../product.json'), 'utf8'));
-const builtInExtensions = productjson.builtInExtensions;
-const webBuiltInExtensions = productjson.webBuiltInExtensions;
+const builtInExtensions = productjson.builtInExtensions || [];
+const webBuiltInExtensions = productjson.webBuiltInExtensions || [];
const token = process.env['VSCODE_MIXIN_PASSWORD'] || process.env['GITHUB_TOKEN'] || undefined;
const contentBasePath = 'raw.githubusercontent.com';
diff --git a/lib/vscode/build/lib/extensions.js b/lib/vscode/build/lib/extensions.js
index b25c3713ea83..3d6a2906178c 100644
--- a/lib/vscode/build/lib/extensions.js
+++ b/lib/vscode/build/lib/extensions.js
@@ -4,9 +4,10 @@
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
-exports.translatePackageJSON = exports.scanBuiltinExtensions = exports.packageMarketplaceExtensionsStream = exports.packageLocalExtensionsStream = exports.fromMarketplace = void 0;
+exports.buildExtensionMedia = exports.webpackExtensions = exports.translatePackageJSON = exports.scanBuiltinExtensions = exports.packageMarketplaceExtensionsStream = exports.packageLocalExtensionsStream = exports.fromMarketplace = void 0;
const es = require("event-stream");
const fs = require("fs");
+const cp = require("child_process");
const glob = require("glob");
const gulp = require("gulp");
const path = require("path");
@@ -328,3 +329,132 @@ function translatePackageJSON(packageJSON, packageNLSPath) {
return packageJSON;
}
exports.translatePackageJSON = translatePackageJSON;
+const extensionsPath = path.join(root, 'extensions');
+// Additional projects to webpack. These typically build code for webviews
+const webpackMediaConfigFiles = [
+ 'markdown-language-features/webpack.config.js',
+ 'simple-browser/webpack.config.js',
+];
+// Additional projects to run esbuild on. These typically build code for webviews
+const esbuildMediaScripts = [
+ 'markdown-language-features/esbuild.js',
+ 'notebook-markdown-extensions/esbuild.js',
+];
+async function webpackExtensions(taskName, isWatch, webpackConfigLocations) {
+ const webpack = require('webpack');
+ const webpackConfigs = [];
+ for (const { configPath, outputRoot } of webpackConfigLocations) {
+ const configOrFnOrArray = require(configPath);
+ function addConfig(configOrFn) {
+ let config;
+ if (typeof configOrFn === 'function') {
+ config = configOrFn({}, {});
+ webpackConfigs.push(config);
+ }
+ else {
+ config = configOrFn;
+ }
+ if (outputRoot) {
+ config.output.path = path.join(outputRoot, path.relative(path.dirname(configPath), config.output.path));
+ }
+ webpackConfigs.push(configOrFn);
+ }
+ addConfig(configOrFnOrArray);
+ }
+ function reporter(fullStats) {
+ if (Array.isArray(fullStats.children)) {
+ for (const stats of fullStats.children) {
+ const outputPath = stats.outputPath;
+ if (outputPath) {
+ const relativePath = path.relative(extensionsPath, outputPath).replace(/\\/g, '/');
+ const match = relativePath.match(/[^\/]+(\/server|\/client)?/);
+ fancyLog(`Finished ${ansiColors.green(taskName)} ${ansiColors.cyan(match[0])} with ${stats.errors.length} errors.`);
+ }
+ if (Array.isArray(stats.errors)) {
+ stats.errors.forEach((error) => {
+ fancyLog.error(error);
+ });
+ }
+ if (Array.isArray(stats.warnings)) {
+ stats.warnings.forEach((warning) => {
+ fancyLog.warn(warning);
+ });
+ }
+ }
+ }
+ }
+ return new Promise((resolve, reject) => {
+ if (isWatch) {
+ webpack(webpackConfigs).watch({}, (err, stats) => {
+ if (err) {
+ reject();
+ }
+ else {
+ reporter(stats.toJson());
+ }
+ });
+ }
+ else {
+ webpack(webpackConfigs).run((err, stats) => {
+ if (err) {
+ fancyLog.error(err);
+ reject();
+ }
+ else {
+ reporter(stats.toJson());
+ resolve();
+ }
+ });
+ }
+ });
+}
+exports.webpackExtensions = webpackExtensions;
+async function esbuildExtensions(taskName, isWatch, scripts) {
+ function reporter(stdError, script) {
+ const matches = (stdError || '').match(/\> (.+): error: (.+)?/g);
+ fancyLog(`Finished ${ansiColors.green(taskName)} ${script} with ${matches ? matches.length : 0} errors.`);
+ for (const match of matches || []) {
+ fancyLog.error(match);
+ }
+ }
+ const tasks = scripts.map(({ script, outputRoot }) => {
+ return new Promise((resolve, reject) => {
+ const args = [script];
+ if (isWatch) {
+ args.push('--watch');
+ }
+ if (outputRoot) {
+ args.push('--outputRoot', outputRoot);
+ }
+ const proc = cp.execFile(process.argv[0], args, {}, (error, _stdout, stderr) => {
+ if (error) {
+ return reject(error);
+ }
+ reporter(stderr, script);
+ if (stderr) {
+ return reject();
+ }
+ return resolve();
+ });
+ proc.stdout.on('data', (data) => {
+ fancyLog(`${ansiColors.green(taskName)}: ${data.toString('utf8')}`);
+ });
+ });
+ });
+ return Promise.all(tasks);
+}
+async function buildExtensionMedia(isWatch, outputRoot) {
+ return Promise.all([
+ webpackExtensions('webpacking extension media', isWatch, webpackMediaConfigFiles.map(p => {
+ return {
+ configPath: path.join(extensionsPath, p),
+ outputRoot: outputRoot ? path.join(root, outputRoot, path.dirname(p)) : undefined
+ };
+ })),
+ esbuildExtensions('esbuilding extension media', isWatch, esbuildMediaScripts.map(p => ({
+ script: path.join(extensionsPath, p),
+ outputRoot: outputRoot ? path.join(root, outputRoot, path.dirname(p)) : undefined
+ }))),
+ ]);
+}
+exports.buildExtensionMedia = buildExtensionMedia;
diff --git a/lib/vscode/build/lib/extensions.ts b/lib/vscode/build/lib/extensions.ts
index ec5c7d03b101..9281bb19e20b 100644
--- a/lib/vscode/build/lib/extensions.ts
+++ b/lib/vscode/build/lib/extensions.ts
@@ -5,6 +5,7 @@
import * as es from 'event-stream';
import * as fs from 'fs';
+import * as cp from 'child_process';
import * as glob from 'glob';
import * as gulp from 'gulp';
import * as path from 'path';
@@ -19,6 +20,7 @@ import * as fancyLog from 'fancy-log';
import * as ansiColors from 'ansi-colors';
const buffer = require('gulp-buffer');
import * as jsoncParser from 'jsonc-parser';
+import webpack = require('webpack');
const util = require('./util');
const root = path.dirname(path.dirname(__dirname));
const commit = util.getVersion(root);
@@ -403,3 +405,138 @@ export function translatePackageJSON(packageJSON: string, packageNLSPath: string
translate(packageJSON);
return packageJSON;
}
+
+const extensionsPath = path.join(root, 'extensions');
+
+// Additional projects to webpack. These typically build code for webviews
+const webpackMediaConfigFiles = [
+ 'markdown-language-features/webpack.config.js',
+ 'simple-browser/webpack.config.js',
+];
+
+// Additional projects to run esbuild on. These typically build code for webviews
+const esbuildMediaScripts = [
+ 'markdown-language-features/esbuild.js',
+ 'notebook-markdown-extensions/esbuild.js',
+];
+
+export async function webpackExtensions(taskName: string, isWatch: boolean, webpackConfigLocations: { configPath: string, outputRoot?: string }[]) {
+ const webpack = require('webpack') as typeof import('webpack');
+
+ const webpackConfigs: webpack.Configuration[] = [];
+
+ for (const { configPath, outputRoot } of webpackConfigLocations) {
+ const configOrFnOrArray = require(configPath);
+ function addConfig(configOrFn: webpack.Configuration | Function) {
+ let config;
+ if (typeof configOrFn === 'function') {
+ config = configOrFn({}, {});
+ webpackConfigs.push(config);
+ } else {
+ config = configOrFn;
+ }
+
+ if (outputRoot) {
+ config.output.path = path.join(outputRoot, path.relative(path.dirname(configPath), config.output.path));
+ }
+
+ webpackConfigs.push(configOrFn);
+ }
+ addConfig(configOrFnOrArray);
+ }
+ function reporter(fullStats: any) {
+ if (Array.isArray(fullStats.children)) {
+ for (const stats of fullStats.children) {
+ const outputPath = stats.outputPath;
+ if (outputPath) {
+ const relativePath = path.relative(extensionsPath, outputPath).replace(/\\/g, '/');
+ const match = relativePath.match(/[^\/]+(\/server|\/client)?/);
+ fancyLog(`Finished ${ansiColors.green(taskName)} ${ansiColors.cyan(match![0])} with ${stats.errors.length} errors.`);
+ }
+ if (Array.isArray(stats.errors)) {
+ stats.errors.forEach((error: any) => {
+ fancyLog.error(error);
+ });
+ }
+ if (Array.isArray(stats.warnings)) {
+ stats.warnings.forEach((warning: any) => {
+ fancyLog.warn(warning);
+ });
+ }
+ }
+ }
+ }
+ return new Promise((resolve, reject) => {
+ if (isWatch) {
+ webpack(webpackConfigs).watch({}, (err, stats) => {
+ if (err) {
+ reject();
+ } else {
+ reporter(stats.toJson());
+ }
+ });
+ } else {
+ webpack(webpackConfigs).run((err, stats) => {
+ if (err) {
+ fancyLog.error(err);
+ reject();
+ } else {
+ reporter(stats.toJson());
+ resolve();
+ }
+ });
+ }
+ });
+}
+
+async function esbuildExtensions(taskName: string, isWatch: boolean, scripts: { script: string, outputRoot?: string }[]) {
+ function reporter(stdError: string, script: string) {
+ const matches = (stdError || '').match(/\> (.+): error: (.+)?/g);
+ fancyLog(`Finished ${ansiColors.green(taskName)} ${script} with ${matches ? matches.length : 0} errors.`);
+ for (const match of matches || []) {
+ fancyLog.error(match);
+ }
+ }
+
+ const tasks = scripts.map(({ script, outputRoot }) => {
+ return new Promise((resolve, reject) => {
+ const args = [script];
+ if (isWatch) {
+ args.push('--watch');
+ }
+ if (outputRoot) {
+ args.push('--outputRoot', outputRoot);
+ }
+ const proc = cp.execFile(process.argv[0], args, {}, (error, _stdout, stderr) => {
+ if (error) {
+ return reject(error);
+ }
+ reporter(stderr, script);
+ if (stderr) {
+ return reject();
+ }
+ return resolve();
+ });
+
+ proc.stdout!.on('data', (data) => {
+ fancyLog(`${ansiColors.green(taskName)}: ${data.toString('utf8')}`);
+ });
+ });
+ });
+ return Promise.all(tasks);
+}
+
+export async function buildExtensionMedia(isWatch: boolean, outputRoot?: string) {
+ return Promise.all([
+ webpackExtensions('webpacking extension media', isWatch, webpackMediaConfigFiles.map(p => {
+ return {
+ configPath: path.join(extensionsPath, p),
+ outputRoot: outputRoot ? path.join(root, outputRoot, path.dirname(p)) : undefined
+ };
+ })),
+ esbuildExtensions('esbuilding extension media', isWatch, esbuildMediaScripts.map(p => ({
+ script: path.join(extensionsPath, p),
+ outputRoot: outputRoot ? path.join(root, outputRoot, path.dirname(p)) : undefined
+ }))),
+ ]);
+}
diff --git a/lib/vscode/build/lib/i18n.js b/lib/vscode/build/lib/i18n.js
index 50ec3f3f45a0..4c7c5c32eb44 100644
--- a/lib/vscode/build/lib/i18n.js
+++ b/lib/vscode/build/lib/i18n.js
@@ -4,14 +4,13 @@
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
-exports.prepareIslFiles = exports.prepareI18nPackFiles = exports.pullI18nPackFiles = exports.prepareI18nFiles = exports.pullSetupXlfFiles = exports.pullCoreAndExtensionsXlfFiles = exports.findObsoleteResources = exports.pushXlfFiles = exports.createXlfFilesForIsl = exports.createXlfFilesForExtensions = exports.createXlfFilesForCoreBundle = exports.getResource = exports.processNlsFiles = exports.Limiter = exports.XLF = exports.Line = exports.externalExtensionsWithTranslations = exports.extraLanguages = exports.defaultLanguages = void 0;
+exports.prepareIslFiles = exports.prepareI18nPackFiles = exports.prepareI18nFiles = exports.pullSetupXlfFiles = exports.findObsoleteResources = exports.pushXlfFiles = exports.createXlfFilesForIsl = exports.createXlfFilesForExtensions = exports.createXlfFilesForCoreBundle = exports.getResource = exports.processNlsFiles = exports.Limiter = exports.XLF = exports.Line = exports.externalExtensionsWithTranslations = exports.extraLanguages = exports.defaultLanguages = void 0;
const path = require("path");
const fs = require("fs");
const event_stream_1 = require("event-stream");
const File = require("vinyl");
const Is = require("is");
const xml2js = require("xml2js");
-const glob = require("glob");
const https = require("https");
const gulp = require("gulp");
const fancyLog = require("fancy-log");
@@ -19,1186 +18,1146 @@ const ansiColors = require("ansi-colors");
const iconv = require("iconv-lite-umd");
const NUMBER_OF_CONCURRENT_DOWNLOADS = 4;
function log(message, ...rest) {
- fancyLog(ansiColors.green('[i18n]'), message, ...rest);
+ fancyLog(ansiColors.green('[i18n]'), message, ...rest);
}
exports.defaultLanguages = [
- { id: 'zh-tw', folderName: 'cht', translationId: 'zh-hant' },
- { id: 'zh-cn', folderName: 'chs', translationId: 'zh-hans' },
- { id: 'ja', folderName: 'jpn' },
- { id: 'ko', folderName: 'kor' },
- { id: 'de', folderName: 'deu' },
- { id: 'fr', folderName: 'fra' },
- { id: 'es', folderName: 'esn' },
- { id: 'ru', folderName: 'rus' },
- { id: 'it', folderName: 'ita' }
+ { id: 'zh-tw', folderName: 'cht', translationId: 'zh-hant' },
+ { id: 'zh-cn', folderName: 'chs', translationId: 'zh-hans' },
+ { id: 'ja', folderName: 'jpn' },
+ { id: 'ko', folderName: 'kor' },
+ { id: 'de', folderName: 'deu' },
+ { id: 'fr', folderName: 'fra' },
+ { id: 'es', folderName: 'esn' },
+ { id: 'ru', folderName: 'rus' },
+ { id: 'it', folderName: 'ita' }
];
// languages requested by the community to non-stable builds
exports.extraLanguages = [
- { id: 'pt-br', folderName: 'ptb' },
- { id: 'hu', folderName: 'hun' },
- { id: 'tr', folderName: 'trk' }
+ { id: 'pt-br', folderName: 'ptb' },
+ { id: 'hu', folderName: 'hun' },
+ { id: 'tr', folderName: 'trk' }
];
// non built-in extensions also that are transifex and need to be part of the language packs
exports.externalExtensionsWithTranslations = {
- 'vscode-chrome-debug': 'msjsdiag.debugger-for-chrome',
- 'vscode-node-debug': 'ms-vscode.node-debug',
- 'vscode-node-debug2': 'ms-vscode.node-debug2'
+ 'vscode-chrome-debug': 'msjsdiag.debugger-for-chrome',
+ 'vscode-node-debug': 'ms-vscode.node-debug',
+ 'vscode-node-debug2': 'ms-vscode.node-debug2'
};
var LocalizeInfo;
(function (LocalizeInfo) {
- function is(value) {
- let candidate = value;
- return Is.defined(candidate) && Is.string(candidate.key) && (Is.undef(candidate.comment) || (Is.array(candidate.comment) && candidate.comment.every(element => Is.string(element))));
- }
- LocalizeInfo.is = is;
+ function is(value) {
+ let candidate = value;
+ return Is.defined(candidate) && Is.string(candidate.key) && (Is.undef(candidate.comment) || (Is.array(candidate.comment) && candidate.comment.every(element => Is.string(element))));
+ }
+ LocalizeInfo.is = is;
})(LocalizeInfo || (LocalizeInfo = {}));
var BundledFormat;
(function (BundledFormat) {
- function is(value) {
- if (Is.undef(value)) {
- return false;
- }
- let candidate = value;
- let length = Object.keys(value).length;
- return length === 3 && Is.defined(candidate.keys) && Is.defined(candidate.messages) && Is.defined(candidate.bundles);
- }
- BundledFormat.is = is;
+ function is(value) {
+ if (Is.undef(value)) {
+ return false;
+ }
+ let candidate = value;
+ let length = Object.keys(value).length;
+ return length === 3 && Is.defined(candidate.keys) && Is.defined(candidate.messages) && Is.defined(candidate.bundles);
+ }
+ BundledFormat.is = is;
})(BundledFormat || (BundledFormat = {}));
var PackageJsonFormat;
(function (PackageJsonFormat) {
- function is(value) {
- if (Is.undef(value) || !Is.object(value)) {
- return false;
- }
- return Object.keys(value).every(key => {
- let element = value[key];
- return Is.string(element) || (Is.object(element) && Is.defined(element.message) && Is.defined(element.comment));
- });
- }
- PackageJsonFormat.is = is;
+ function is(value) {
+ if (Is.undef(value) || !Is.object(value)) {
+ return false;
+ }
+ return Object.keys(value).every(key => {
+ let element = value[key];
+ return Is.string(element) || (Is.object(element) && Is.defined(element.message) && Is.defined(element.comment));
+ });
+ }
+ PackageJsonFormat.is = is;
})(PackageJsonFormat || (PackageJsonFormat = {}));
class Line {
- constructor(indent = 0) {
- this.buffer = [];
- if (indent > 0) {
- this.buffer.push(new Array(indent + 1).join(' '));
- }
- }
- append(value) {
- this.buffer.push(value);
- return this;
- }
- toString() {
- return this.buffer.join('');
- }
+ constructor(indent = 0) {
+ this.buffer = [];
+ if (indent > 0) {
+ this.buffer.push(new Array(indent + 1).join(' '));
+ }
+ }
+ append(value) {
+ this.buffer.push(value);
+ return this;
+ }
+ toString() {
+ return this.buffer.join('');
+ }
}
exports.Line = Line;
class TextModel {
- constructor(contents) {
- this._lines = contents.split(/\r\n|\r|\n/);
- }
- get lines() {
- return this._lines;
- }
+ constructor(contents) {
+ this._lines = contents.split(/\r\n|\r|\n/);
+ }
+ get lines() {
+ return this._lines;
+ }
}
class XLF {
- constructor(project) {
- this.project = project;
- this.buffer = [];
- this.files = Object.create(null);
- this.numberOfMessages = 0;
- }
- toString() {
- this.appendHeader();
- for (let file in this.files) {
- this.appendNewLine(``, 2);
- for (let item of this.files[file]) {
- this.addStringItem(file, item);
- }
- this.appendNewLine('', 2);
- }
- this.appendFooter();
- return this.buffer.join('\r\n');
- }
- addFile(original, keys, messages) {
- if (keys.length === 0) {
- console.log('No keys in ' + original);
- return;
- }
- if (keys.length !== messages.length) {
- throw new Error(`Unmatching keys(${keys.length}) and messages(${messages.length}).`);
- }
- this.numberOfMessages += keys.length;
- this.files[original] = [];
- let existingKeys = new Set();
- for (let i = 0; i < keys.length; i++) {
- let key = keys[i];
- let realKey;
- let comment;
- if (Is.string(key)) {
- realKey = key;
- comment = undefined;
- }
- else if (LocalizeInfo.is(key)) {
- realKey = key.key;
- if (key.comment && key.comment.length > 0) {
- comment = key.comment.map(comment => encodeEntities(comment)).join('\r\n');
- }
- }
- if (!realKey || existingKeys.has(realKey)) {
- continue;
- }
- existingKeys.add(realKey);
- let message = encodeEntities(messages[i]);
- this.files[original].push({ id: realKey, message: message, comment: comment });
- }
- }
- addStringItem(file, item) {
- if (!item.id || item.message === undefined || item.message === null) {
- throw new Error(`No item ID or value specified: ${JSON.stringify(item)}. File: ${file}`);
- }
- if (item.message.length === 0) {
- log(`Item with id ${item.id} in file ${file} has an empty message.`);
- }
- this.appendNewLine(``, 4);
- this.appendNewLine(``, 6);
- if (item.comment) {
- this.appendNewLine(`${item.comment}`, 6);
- }
- this.appendNewLine('', 4);
- }
- appendHeader() {
- this.appendNewLine('', 0);
- this.appendNewLine('', 0);
- }
- appendFooter() {
- this.appendNewLine('', 0);
- }
- appendNewLine(content, indent) {
- let line = new Line(indent);
- line.append(content);
- this.buffer.push(line.toString());
- }
+ constructor(project) {
+ this.project = project;
+ this.buffer = [];
+ this.files = Object.create(null);
+ this.numberOfMessages = 0;
+ }
+ toString() {
+ this.appendHeader();
+ const files = Object.keys(this.files).sort();
+ for (const file of files) {
+ this.appendNewLine(``, 2);
+ const items = this.files[file].sort((a, b) => {
+ return a.id < b.id ? -1 : a.id > b.id ? 1 : 0;
+ });
+ for (const item of items) {
+ this.addStringItem(file, item);
+ }
+ this.appendNewLine('');
+ }
+ this.appendFooter();
+ return this.buffer.join('\r\n');
+ }
+ addFile(original, keys, messages) {
+ if (keys.length === 0) {
+ console.log('No keys in ' + original);
+ return;
+ }
+ if (keys.length !== messages.length) {
+ throw new Error(`Unmatching keys(${keys.length}) and messages(${messages.length}).`);
+ }
+ this.numberOfMessages += keys.length;
+ this.files[original] = [];
+ let existingKeys = new Set();
+ for (let i = 0; i < keys.length; i++) {
+ let key = keys[i];
+ let realKey;
+ let comment;
+ if (Is.string(key)) {
+ realKey = key;
+ comment = undefined;
+ }
+ else if (LocalizeInfo.is(key)) {
+ realKey = key.key;
+ if (key.comment && key.comment.length > 0) {
+ comment = key.comment.map(comment => encodeEntities(comment)).join('\r\n');
+ }
+ }
+ if (!realKey || existingKeys.has(realKey)) {
+ continue;
+ }
+ existingKeys.add(realKey);
+ let message = encodeEntities(messages[i]);
+ this.files[original].push({ id: realKey, message: message, comment: comment });
+ }
+ }
+ addStringItem(file, item) {
+ if (!item.id || item.message === undefined || item.message === null) {
+ throw new Error(`No item ID or value specified: ${JSON.stringify(item)}. File: ${file}`);
+ }
+ if (item.message.length === 0) {
+ log(`Item with id ${item.id} in file ${file} has an empty message.`);
+ }
+ this.appendNewLine(``, 4);
+ this.appendNewLine(``, 6);
+ if (item.comment) {
+ this.appendNewLine(`${item.comment}`, 6);
+ }
+ this.appendNewLine('', 4);
+ }
+ appendHeader() {
+ this.appendNewLine('', 0);
+ this.appendNewLine('', 0);
+ }
+ appendFooter() {
+ this.appendNewLine('', 0);
+ }
+ appendNewLine(content, indent) {
+ let line = new Line(indent);
+ line.append(content);
+ this.buffer.push(line.toString());
+ }
}
exports.XLF = XLF;
XLF.parsePseudo = function (xlfString) {
- return new Promise((resolve) => {
- let parser = new xml2js.Parser();
- let files = [];
- parser.parseString(xlfString, function (_err, result) {
- const fileNodes = result['xliff']['file'];
- fileNodes.forEach(file => {
- const originalFilePath = file.$.original;
- const messages = {};
- const transUnits = file.body[0]['trans-unit'];
- if (transUnits) {
- transUnits.forEach((unit) => {
- const key = unit.$.id;
- const val = pseudify(unit.source[0]['_'].toString());
- if (key && val) {
- messages[key] = decodeEntities(val);
- }
- });
- files.push({ messages: messages, originalFilePath: originalFilePath, language: 'ps' });
- }
- });
- resolve(files);
- });
- });
+ return new Promise((resolve) => {
+ let parser = new xml2js.Parser();
+ let files = [];
+ parser.parseString(xlfString, function (_err, result) {
+ const fileNodes = result['xliff']['file'];
+ fileNodes.forEach(file => {
+ const originalFilePath = file.$.original;
+ const messages = {};
+ const transUnits = file.body[0]['trans-unit'];
+ if (transUnits) {
+ transUnits.forEach((unit) => {
+ const key = unit.$.id;
+ const val = pseudify(unit.source[0]['_'].toString());
+ if (key && val) {
+ messages[key] = decodeEntities(val);
+ }
+ });
+ files.push({ messages: messages, originalFilePath: originalFilePath, language: 'ps' });
+ }
+ });
+ resolve(files);
+ });
+ });
};
XLF.parse = function (xlfString) {
- return new Promise((resolve, reject) => {
- let parser = new xml2js.Parser();
- let files = [];
- parser.parseString(xlfString, function (err, result) {
- if (err) {
- reject(new Error(`XLF parsing error: Failed to parse XLIFF string. ${err}`));
- }
- const fileNodes = result['xliff']['file'];
- if (!fileNodes) {
- reject(new Error(`XLF parsing error: XLIFF file does not contain "xliff" or "file" node(s) required for parsing.`));
- }
- fileNodes.forEach((file) => {
- const originalFilePath = file.$.original;
- if (!originalFilePath) {
- reject(new Error(`XLF parsing error: XLIFF file node does not contain original attribute to determine the original location of the resource file.`));
- }
- let language = file.$['target-language'];
- if (!language) {
- reject(new Error(`XLF parsing error: XLIFF file node does not contain target-language attribute to determine translated language.`));
- }
- const messages = {};
- const transUnits = file.body[0]['trans-unit'];
- if (transUnits) {
- transUnits.forEach((unit) => {
- const key = unit.$.id;
- if (!unit.target) {
- return; // No translation available
- }
- let val = unit.target[0];
- if (typeof val !== 'string') {
- // We allow empty source values so support them for translations as well.
- val = val._ ? val._ : '';
- }
- if (!key) {
- reject(new Error(`XLF parsing error: trans-unit ${JSON.stringify(unit, undefined, 0)} defined in file ${originalFilePath} is missing the ID attribute.`));
- return;
- }
- messages[key] = decodeEntities(val);
- });
- files.push({ messages: messages, originalFilePath: originalFilePath, language: language.toLowerCase() });
- }
- });
- resolve(files);
- });
- });
+ return new Promise((resolve, reject) => {
+ let parser = new xml2js.Parser();
+ let files = [];
+ parser.parseString(xlfString, function (err, result) {
+ if (err) {
+ reject(new Error(`XLF parsing error: Failed to parse XLIFF string. ${err}`));
+ }
+ const fileNodes = result['xliff']['file'];
+ if (!fileNodes) {
+ reject(new Error(`XLF parsing error: XLIFF file does not contain "xliff" or "file" node(s) required for parsing.`));
+ }
+ fileNodes.forEach((file) => {
+ const originalFilePath = file.$.original;
+ if (!originalFilePath) {
+ reject(new Error(`XLF parsing error: XLIFF file node does not contain original attribute to determine the original location of the resource file.`));
+ }
+ let language = file.$['target-language'];
+ if (!language) {
+ reject(new Error(`XLF parsing error: XLIFF file node does not contain target-language attribute to determine translated language.`));
+ }
+ const messages = {};
+ const transUnits = file.body[0]['trans-unit'];
+ if (transUnits) {
+ transUnits.forEach((unit) => {
+ const key = unit.$.id;
+ if (!unit.target) {
+ return; // No translation available
+ }
+ let val = unit.target[0];
+ if (typeof val !== 'string') {
+ // We allow empty source values so support them for translations as well.
+ val = val._ ? val._ : '';
+ }
+ if (!key) {
+ reject(new Error(`XLF parsing error: trans-unit ${JSON.stringify(unit, undefined, 0)} defined in file ${originalFilePath} is missing the ID attribute.`));
+ return;
+ }
+ messages[key] = decodeEntities(val);
+ });
+ files.push({ messages: messages, originalFilePath: originalFilePath, language: language.toLowerCase() });
+ }
+ });
+ resolve(files);
+ });
+ });
};
class Limiter {
- constructor(maxDegreeOfParalellism) {
- this.maxDegreeOfParalellism = maxDegreeOfParalellism;
- this.outstandingPromises = [];
- this.runningPromises = 0;
- }
- queue(factory) {
- return new Promise((c, e) => {
- this.outstandingPromises.push({ factory, c, e });
- this.consume();
- });
- }
- consume() {
- while (this.outstandingPromises.length && this.runningPromises < this.maxDegreeOfParalellism) {
- const iLimitedTask = this.outstandingPromises.shift();
- this.runningPromises++;
- const promise = iLimitedTask.factory();
- promise.then(iLimitedTask.c).catch(iLimitedTask.e);
- promise.then(() => this.consumed()).catch(() => this.consumed());
- }
- }
- consumed() {
- this.runningPromises--;
- this.consume();
- }
+ constructor(maxDegreeOfParalellism) {
+ this.maxDegreeOfParalellism = maxDegreeOfParalellism;
+ this.outstandingPromises = [];
+ this.runningPromises = 0;
+ }
+ queue(factory) {
+ return new Promise((c, e) => {
+ this.outstandingPromises.push({ factory, c, e });
+ this.consume();
+ });
+ }
+ consume() {
+ while (this.outstandingPromises.length && this.runningPromises < this.maxDegreeOfParalellism) {
+ const iLimitedTask = this.outstandingPromises.shift();
+ this.runningPromises++;
+ const promise = iLimitedTask.factory();
+ promise.then(iLimitedTask.c).catch(iLimitedTask.e);
+ promise.then(() => this.consumed()).catch(() => this.consumed());
+ }
+ }
+ consumed() {
+ this.runningPromises--;
+ this.consume();
+ }
}
exports.Limiter = Limiter;
function sortLanguages(languages) {
- return languages.sort((a, b) => {
- return a.id < b.id ? -1 : (a.id > b.id ? 1 : 0);
- });
+ return languages.sort((a, b) => {
+ return a.id < b.id ? -1 : (a.id > b.id ? 1 : 0);
+ });
}
function stripComments(content) {
- /**
- * First capturing group matches double quoted string
- * Second matches single quotes string
- * Third matches block comments
- * Fourth matches line comments
- */
- const regexp = /("(?:[^\\\"]*(?:\\.)?)*")|('(?:[^\\\']*(?:\\.)?)*')|(\/\*(?:\r?\n|.)*?\*\/)|(\/{2,}.*?(?:(?:\r?\n)|$))/g;
- let result = content.replace(regexp, (match, _m1, _m2, m3, m4) => {
- // Only one of m1, m2, m3, m4 matches
- if (m3) {
- // A block comment. Replace with nothing
- return '';
- }
- else if (m4) {
- // A line comment. If it ends in \r?\n then keep it.
- let length = m4.length;
- if (length > 2 && m4[length - 1] === '\n') {
- return m4[length - 2] === '\r' ? '\r\n' : '\n';
- }
- else {
- return '';
- }
- }
- else {
- // We match a string
- return match;
- }
- });
- return result;
+ /**
+ * First capturing group matches double quoted string
+ * Second matches single quotes string
+ * Third matches block comments
+ * Fourth matches line comments
+ */
+ const regexp = /("(?:[^\\\"]*(?:\\.)?)*")|('(?:[^\\\']*(?:\\.)?)*')|(\/\*(?:\r?\n|.)*?\*\/)|(\/{2,}.*?(?:(?:\r?\n)|$))/g;
+ let result = content.replace(regexp, (match, _m1, _m2, m3, m4) => {
+ // Only one of m1, m2, m3, m4 matches
+ if (m3) {
+ // A block comment. Replace with nothing
+ return '';
+ }
+ else if (m4) {
+ // A line comment. If it ends in \r?\n then keep it.
+ let length = m4.length;
+ if (length > 2 && m4[length - 1] === '\n') {
+ return m4[length - 2] === '\r' ? '\r\n' : '\n';
+ }
+ else {
+ return '';
+ }
+ }
+ else {
+ // We match a string
+ return match;
+ }
+ });
+ return result;
}
function escapeCharacters(value) {
- const result = [];
- for (let i = 0; i < value.length; i++) {
- const ch = value.charAt(i);
- switch (ch) {
- case '\'':
- result.push('\\\'');
- break;
- case '"':
- result.push('\\"');
- break;
- case '\\':
- result.push('\\\\');
- break;
- case '\n':
- result.push('\\n');
- break;
- case '\r':
- result.push('\\r');
- break;
- case '\t':
- result.push('\\t');
- break;
- case '\b':
- result.push('\\b');
- break;
- case '\f':
- result.push('\\f');
- break;
- default:
- result.push(ch);
- }
- }
- return result.join('');
+ const result = [];
+ for (let i = 0; i < value.length; i++) {
+ const ch = value.charAt(i);
+ switch (ch) {
+ case '\'':
+ result.push('\\\'');
+ break;
+ case '"':
+ result.push('\\"');
+ break;
+ case '\\':
+ result.push('\\\\');
+ break;
+ case '\n':
+ result.push('\\n');
+ break;
+ case '\r':
+ result.push('\\r');
+ break;
+ case '\t':
+ result.push('\\t');
+ break;
+ case '\b':
+ result.push('\\b');
+ break;
+ case '\f':
+ result.push('\\f');
+ break;
+ default:
+ result.push(ch);
+ }
+ }
+ return result.join('');
}
function processCoreBundleFormat(fileHeader, languages, json, emitter) {
- let keysSection = json.keys;
- let messageSection = json.messages;
- let bundleSection = json.bundles;
- let statistics = Object.create(null);
- let defaultMessages = Object.create(null);
- let modules = Object.keys(keysSection);
- modules.forEach((module) => {
- let keys = keysSection[module];
- let messages = messageSection[module];
- if (!messages || keys.length !== messages.length) {
- emitter.emit('error', `Message for module ${module} corrupted. Mismatch in number of keys and messages.`);
- return;
- }
- let messageMap = Object.create(null);
- defaultMessages[module] = messageMap;
- keys.map((key, i) => {
- if (typeof key === 'string') {
- messageMap[key] = messages[i];
- }
- else {
- messageMap[key.key] = messages[i];
- }
- });
- });
- let languageDirectory = path.join(__dirname, '..', '..', '..', 'vscode-loc', 'i18n');
- if (!fs.existsSync(languageDirectory)) {
- log(`No VS Code localization repository found. Looking at ${languageDirectory}`);
- log(`To bundle translations please check out the vscode-loc repository as a sibling of the vscode repository.`);
- }
- let sortedLanguages = sortLanguages(languages);
- sortedLanguages.forEach((language) => {
- if (process.env['VSCODE_BUILD_VERBOSE']) {
- log(`Generating nls bundles for: ${language.id}`);
- }
- statistics[language.id] = 0;
- let localizedModules = Object.create(null);
- let languageFolderName = language.translationId || language.id;
- let i18nFile = path.join(languageDirectory, `vscode-language-pack-${languageFolderName}`, 'translations', 'main.i18n.json');
- let allMessages;
- if (fs.existsSync(i18nFile)) {
- let content = stripComments(fs.readFileSync(i18nFile, 'utf8'));
- allMessages = JSON.parse(content);
- }
- modules.forEach((module) => {
- let order = keysSection[module];
- let moduleMessage;
- if (allMessages) {
- moduleMessage = allMessages.contents[module];
- }
- if (!moduleMessage) {
- if (process.env['VSCODE_BUILD_VERBOSE']) {
- log(`No localized messages found for module ${module}. Using default messages.`);
- }
- moduleMessage = defaultMessages[module];
- statistics[language.id] = statistics[language.id] + Object.keys(moduleMessage).length;
- }
- let localizedMessages = [];
- order.forEach((keyInfo) => {
- let key = null;
- if (typeof keyInfo === 'string') {
- key = keyInfo;
- }
- else {
- key = keyInfo.key;
- }
- let message = moduleMessage[key];
- if (!message) {
- if (process.env['VSCODE_BUILD_VERBOSE']) {
- log(`No localized message found for key ${key} in module ${module}. Using default message.`);
- }
- message = defaultMessages[module][key];
- statistics[language.id] = statistics[language.id] + 1;
- }
- localizedMessages.push(message);
- });
- localizedModules[module] = localizedMessages;
- });
- Object.keys(bundleSection).forEach((bundle) => {
- let modules = bundleSection[bundle];
- let contents = [
- fileHeader,
- `define("${bundle}.nls.${language.id}", {`
- ];
- modules.forEach((module, index) => {
- contents.push(`\t"${module}": [`);
- let messages = localizedModules[module];
- if (!messages) {
- emitter.emit('error', `Didn't find messages for module ${module}.`);
- return;
- }
- messages.forEach((message, index) => {
- contents.push(`\t\t"${escapeCharacters(message)}${index < messages.length ? '",' : '"'}`);
- });
- contents.push(index < modules.length - 1 ? '\t],' : '\t]');
- });
- contents.push('});');
- emitter.queue(new File({ path: bundle + '.nls.' + language.id + '.js', contents: Buffer.from(contents.join('\n'), 'utf-8') }));
- });
- });
- Object.keys(statistics).forEach(key => {
- let value = statistics[key];
- log(`${key} has ${value} untranslated strings.`);
- });
- sortedLanguages.forEach(language => {
- let stats = statistics[language.id];
- if (Is.undef(stats)) {
- log(`\tNo translations found for language ${language.id}. Using default language instead.`);
- }
- });
+ let keysSection = json.keys;
+ let messageSection = json.messages;
+ let bundleSection = json.bundles;
+ let statistics = Object.create(null);
+ let defaultMessages = Object.create(null);
+ let modules = Object.keys(keysSection);
+ modules.forEach((module) => {
+ let keys = keysSection[module];
+ let messages = messageSection[module];
+ if (!messages || keys.length !== messages.length) {
+ emitter.emit('error', `Message for module ${module} corrupted. Mismatch in number of keys and messages.`);
+ return;
+ }
+ let messageMap = Object.create(null);
+ defaultMessages[module] = messageMap;
+ keys.map((key, i) => {
+ if (typeof key === 'string') {
+ messageMap[key] = messages[i];
+ }
+ else {
+ messageMap[key.key] = messages[i];
+ }
+ });
+ });
+ let languageDirectory = path.join(__dirname, '..', '..', '..', 'vscode-loc', 'i18n');
+ if (!fs.existsSync(languageDirectory)) {
+ log(`No VS Code localization repository found. Looking at ${languageDirectory}`);
+ log(`To bundle translations please check out the vscode-loc repository as a sibling of the vscode repository.`);
+ }
+ let sortedLanguages = sortLanguages(languages);
+ sortedLanguages.forEach((language) => {
+ if (process.env['VSCODE_BUILD_VERBOSE']) {
+ log(`Generating nls bundles for: ${language.id}`);
+ }
+ statistics[language.id] = 0;
+ let localizedModules = Object.create(null);
+ let languageFolderName = language.translationId || language.id;
+ let i18nFile = path.join(languageDirectory, `vscode-language-pack-${languageFolderName}`, 'translations', 'main.i18n.json');
+ let allMessages;
+ if (fs.existsSync(i18nFile)) {
+ let content = stripComments(fs.readFileSync(i18nFile, 'utf8'));
+ allMessages = JSON.parse(content);
+ }
+ modules.forEach((module) => {
+ let order = keysSection[module];
+ let moduleMessage;
+ if (allMessages) {
+ moduleMessage = allMessages.contents[module];
+ }
+ if (!moduleMessage) {
+ if (process.env['VSCODE_BUILD_VERBOSE']) {
+ log(`No localized messages found for module ${module}. Using default messages.`);
+ }
+ moduleMessage = defaultMessages[module];
+ statistics[language.id] = statistics[language.id] + Object.keys(moduleMessage).length;
+ }
+ let localizedMessages = [];
+ order.forEach((keyInfo) => {
+ let key = null;
+ if (typeof keyInfo === 'string') {
+ key = keyInfo;
+ }
+ else {
+ key = keyInfo.key;
+ }
+ let message = moduleMessage[key];
+ if (!message) {
+ if (process.env['VSCODE_BUILD_VERBOSE']) {
+ log(`No localized message found for key ${key} in module ${module}. Using default message.`);
+ }
+ message = defaultMessages[module][key];
+ statistics[language.id] = statistics[language.id] + 1;
+ }
+ localizedMessages.push(message);
+ });
+ localizedModules[module] = localizedMessages;
+ });
+ Object.keys(bundleSection).forEach((bundle) => {
+ let modules = bundleSection[bundle];
+ let contents = [
+ fileHeader,
+ `define("${bundle}.nls.${language.id}", {`
+ ];
+ modules.forEach((module, index) => {
+ contents.push(`\t"${module}": [`);
+ let messages = localizedModules[module];
+ if (!messages) {
+ emitter.emit('error', `Didn't find messages for module ${module}.`);
+ return;
+ }
+ messages.forEach((message, index) => {
+ contents.push(`\t\t"${escapeCharacters(message)}${index < messages.length ? '",' : '"'}`);
+ });
+ contents.push(index < modules.length - 1 ? '\t],' : '\t]');
+ });
+ contents.push('});');
+ emitter.queue(new File({ path: bundle + '.nls.' + language.id + '.js', contents: Buffer.from(contents.join('\n'), 'utf-8') }));
+ });
+ });
+ Object.keys(statistics).forEach(key => {
+ let value = statistics[key];
+ log(`${key} has ${value} untranslated strings.`);
+ });
+ sortedLanguages.forEach(language => {
+ let stats = statistics[language.id];
+ if (Is.undef(stats)) {
+ log(`\tNo translations found for language ${language.id}. Using default language instead.`);
+ }
+ });
}
function processNlsFiles(opts) {
- return event_stream_1.through(function (file) {
- let fileName = path.basename(file.path);
- if (fileName === 'nls.metadata.json') {
- let json = null;
- if (file.isBuffer()) {
- json = JSON.parse(file.contents.toString('utf8'));
- }
- else {
- this.emit('error', `Failed to read component file: ${file.relative}`);
- return;
- }
- if (BundledFormat.is(json)) {
- processCoreBundleFormat(opts.fileHeader, opts.languages, json, this);
- }
- }
- this.queue(file);
- });
+ return event_stream_1.through(function (file) {
+ let fileName = path.basename(file.path);
+ if (fileName === 'nls.metadata.json') {
+ let json = null;
+ if (file.isBuffer()) {
+ json = JSON.parse(file.contents.toString('utf8'));
+ }
+ else {
+ this.emit('error', `Failed to read component file: ${file.relative}`);
+ return;
+ }
+ if (BundledFormat.is(json)) {
+ processCoreBundleFormat(opts.fileHeader, opts.languages, json, this);
+ }
+ }
+ this.queue(file);
+ });
}
exports.processNlsFiles = processNlsFiles;
const editorProject = 'vscode-editor', workbenchProject = 'vscode-workbench', extensionsProject = 'vscode-extensions', setupProject = 'vscode-setup';
function getResource(sourceFile) {
- let resource;
- if (/^vs\/platform/.test(sourceFile)) {
- return { name: 'vs/platform', project: editorProject };
- }
- else if (/^vs\/editor\/contrib/.test(sourceFile)) {
- return { name: 'vs/editor/contrib', project: editorProject };
- }
- else if (/^vs\/editor/.test(sourceFile)) {
- return { name: 'vs/editor', project: editorProject };
- }
- else if (/^vs\/base/.test(sourceFile)) {
- return { name: 'vs/base', project: editorProject };
- }
- else if (/^vs\/code/.test(sourceFile)) {
- return { name: 'vs/code', project: workbenchProject };
- }
- else if (/^vs\/workbench\/contrib/.test(sourceFile)) {
- resource = sourceFile.split('/', 4).join('/');
- return { name: resource, project: workbenchProject };
- }
- else if (/^vs\/workbench\/services/.test(sourceFile)) {
- resource = sourceFile.split('/', 4).join('/');
- return { name: resource, project: workbenchProject };
- }
- else if (/^vs\/workbench/.test(sourceFile)) {
- return { name: 'vs/workbench', project: workbenchProject };
- }
- throw new Error(`Could not identify the XLF bundle for ${sourceFile}`);
+ let resource;
+ if (/^vs\/platform/.test(sourceFile)) {
+ return { name: 'vs/platform', project: editorProject };
+ }
+ else if (/^vs\/editor\/contrib/.test(sourceFile)) {
+ return { name: 'vs/editor/contrib', project: editorProject };
+ }
+ else if (/^vs\/editor/.test(sourceFile)) {
+ return { name: 'vs/editor', project: editorProject };
+ }
+ else if (/^vs\/base/.test(sourceFile)) {
+ return { name: 'vs/base', project: editorProject };
+ }
+ else if (/^vs\/code/.test(sourceFile)) {
+ return { name: 'vs/code', project: workbenchProject };
+ }
+ else if (/^vs\/workbench\/contrib/.test(sourceFile)) {
+ resource = sourceFile.split('/', 4).join('/');
+ return { name: resource, project: workbenchProject };
+ }
+ else if (/^vs\/workbench\/services/.test(sourceFile)) {
+ resource = sourceFile.split('/', 4).join('/');
+ return { name: resource, project: workbenchProject };
+ }
+ else if (/^vs\/workbench/.test(sourceFile)) {
+ return { name: 'vs/workbench', project: workbenchProject };
+ }
+ throw new Error(`Could not identify the XLF bundle for ${sourceFile}`);
}
exports.getResource = getResource;
function createXlfFilesForCoreBundle() {
- return event_stream_1.through(function (file) {
- const basename = path.basename(file.path);
- if (basename === 'nls.metadata.json') {
- if (file.isBuffer()) {
- const xlfs = Object.create(null);
- const json = JSON.parse(file.contents.toString('utf8'));
- for (let coreModule in json.keys) {
- const projectResource = getResource(coreModule);
- const resource = projectResource.name;
- const project = projectResource.project;
- const keys = json.keys[coreModule];
- const messages = json.messages[coreModule];
- if (keys.length !== messages.length) {
- this.emit('error', `There is a mismatch between keys and messages in ${file.relative} for module ${coreModule}`);
- return;
- }
- else {
- let xlf = xlfs[resource];
- if (!xlf) {
- xlf = new XLF(project);
- xlfs[resource] = xlf;
- }
- xlf.addFile(`src/${coreModule}`, keys, messages);
- }
- }
- for (let resource in xlfs) {
- const xlf = xlfs[resource];
- const filePath = `${xlf.project}/${resource.replace(/\//g, '_')}.xlf`;
- const xlfFile = new File({
- path: filePath,
- contents: Buffer.from(xlf.toString(), 'utf8')
- });
- this.queue(xlfFile);
- }
- }
- else {
- this.emit('error', new Error(`File ${file.relative} is not using a buffer content`));
- return;
- }
- }
- else {
- this.emit('error', new Error(`File ${file.relative} is not a core meta data file.`));
- return;
- }
- });
+ return event_stream_1.through(function (file) {
+ const basename = path.basename(file.path);
+ if (basename === 'nls.metadata.json') {
+ if (file.isBuffer()) {
+ const xlfs = Object.create(null);
+ const json = JSON.parse(file.contents.toString('utf8'));
+ for (let coreModule in json.keys) {
+ const projectResource = getResource(coreModule);
+ const resource = projectResource.name;
+ const project = projectResource.project;
+ const keys = json.keys[coreModule];
+ const messages = json.messages[coreModule];
+ if (keys.length !== messages.length) {
+ this.emit('error', `There is a mismatch between keys and messages in ${file.relative} for module ${coreModule}`);
+ return;
+ }
+ else {
+ let xlf = xlfs[resource];
+ if (!xlf) {
+ xlf = new XLF(project);
+ xlfs[resource] = xlf;
+ }
+ xlf.addFile(`src/${coreModule}`, keys, messages);
+ }
+ }
+ for (let resource in xlfs) {
+ const xlf = xlfs[resource];
+ const filePath = `${xlf.project}/${resource.replace(/\//g, '_')}.xlf`;
+ const xlfFile = new File({
+ path: filePath,
+ contents: Buffer.from(xlf.toString(), 'utf8')
+ });
+ this.queue(xlfFile);
+ }
+ }
+ else {
+ this.emit('error', new Error(`File ${file.relative} is not using a buffer content`));
+ return;
+ }
+ }
+ else {
+ this.emit('error', new Error(`File ${file.relative} is not a core meta data file.`));
+ return;
+ }
+ });
}
exports.createXlfFilesForCoreBundle = createXlfFilesForCoreBundle;
function createXlfFilesForExtensions() {
- let counter = 0;
- let folderStreamEnded = false;
- let folderStreamEndEmitted = false;
- return event_stream_1.through(function (extensionFolder) {
- const folderStream = this;
- const stat = fs.statSync(extensionFolder.path);
- if (!stat.isDirectory()) {
- return;
- }
- let extensionName = path.basename(extensionFolder.path);
- if (extensionName === 'node_modules') {
- return;
- }
- counter++;
- let _xlf;
- function getXlf() {
- if (!_xlf) {
- _xlf = new XLF(extensionsProject);
- }
- return _xlf;
- }
- gulp.src([`.build/extensions/${extensionName}/package.nls.json`, `.build/extensions/${extensionName}/**/nls.metadata.json`], { allowEmpty: true }).pipe(event_stream_1.through(function (file) {
- if (file.isBuffer()) {
- const buffer = file.contents;
- const basename = path.basename(file.path);
- if (basename === 'package.nls.json') {
- const json = JSON.parse(buffer.toString('utf8'));
- const keys = Object.keys(json);
- const messages = keys.map((key) => {
- const value = json[key];
- if (Is.string(value)) {
- return value;
- }
- else if (value) {
- return value.message;
- }
- else {
- return `Unknown message for key: ${key}`;
- }
- });
- getXlf().addFile(`extensions/${extensionName}/package`, keys, messages);
- }
- else if (basename === 'nls.metadata.json') {
- const json = JSON.parse(buffer.toString('utf8'));
- const relPath = path.relative(`.build/extensions/${extensionName}`, path.dirname(file.path));
- for (let file in json) {
- const fileContent = json[file];
- getXlf().addFile(`extensions/${extensionName}/${relPath}/${file}`, fileContent.keys, fileContent.messages);
- }
- }
- else {
- this.emit('error', new Error(`${file.path} is not a valid extension nls file`));
- return;
- }
- }
- }, function () {
- if (_xlf) {
- let xlfFile = new File({
- path: path.join(extensionsProject, extensionName + '.xlf'),
- contents: Buffer.from(_xlf.toString(), 'utf8')
- });
- folderStream.queue(xlfFile);
- }
- this.queue(null);
- counter--;
- if (counter === 0 && folderStreamEnded && !folderStreamEndEmitted) {
- folderStreamEndEmitted = true;
- folderStream.queue(null);
- }
- }));
- }, function () {
- folderStreamEnded = true;
- if (counter === 0) {
- folderStreamEndEmitted = true;
- this.queue(null);
- }
- });
+ let counter = 0;
+ let folderStreamEnded = false;
+ let folderStreamEndEmitted = false;
+ return event_stream_1.through(function (extensionFolder) {
+ const folderStream = this;
+ const stat = fs.statSync(extensionFolder.path);
+ if (!stat.isDirectory()) {
+ return;
+ }
+ let extensionName = path.basename(extensionFolder.path);
+ if (extensionName === 'node_modules') {
+ return;
+ }
+ counter++;
+ let _xlf;
+ function getXlf() {
+ if (!_xlf) {
+ _xlf = new XLF(extensionsProject);
+ }
+ return _xlf;
+ }
+ gulp.src([`.build/extensions/${extensionName}/package.nls.json`, `.build/extensions/${extensionName}/**/nls.metadata.json`], { allowEmpty: true }).pipe(event_stream_1.through(function (file) {
+ if (file.isBuffer()) {
+ const buffer = file.contents;
+ const basename = path.basename(file.path);
+ if (basename === 'package.nls.json') {
+ const json = JSON.parse(buffer.toString('utf8'));
+ const keys = Object.keys(json);
+ const messages = keys.map((key) => {
+ const value = json[key];
+ if (Is.string(value)) {
+ return value;
+ }
+ else if (value) {
+ return value.message;
+ }
+ else {
+ return `Unknown message for key: ${key}`;
+ }
+ });
+ getXlf().addFile(`extensions/${extensionName}/package`, keys, messages);
+ }
+ else if (basename === 'nls.metadata.json') {
+ const json = JSON.parse(buffer.toString('utf8'));
+ const relPath = path.relative(`.build/extensions/${extensionName}`, path.dirname(file.path));
+ for (let file in json) {
+ const fileContent = json[file];
+ getXlf().addFile(`extensions/${extensionName}/${relPath}/${file}`, fileContent.keys, fileContent.messages);
+ }
+ }
+ else {
+ this.emit('error', new Error(`${file.path} is not a valid extension nls file`));
+ return;
+ }
+ }
+ }, function () {
+ if (_xlf) {
+ let xlfFile = new File({
+ path: path.join(extensionsProject, extensionName + '.xlf'),
+ contents: Buffer.from(_xlf.toString(), 'utf8')
+ });
+ folderStream.queue(xlfFile);
+ }
+ this.queue(null);
+ counter--;
+ if (counter === 0 && folderStreamEnded && !folderStreamEndEmitted) {
+ folderStreamEndEmitted = true;
+ folderStream.queue(null);
+ }
+ }));
+ }, function () {
+ folderStreamEnded = true;
+ if (counter === 0) {
+ folderStreamEndEmitted = true;
+ this.queue(null);
+ }
+ });
}
exports.createXlfFilesForExtensions = createXlfFilesForExtensions;
function createXlfFilesForIsl() {
- return event_stream_1.through(function (file) {
- let projectName, resourceFile;
- if (path.basename(file.path) === 'Default.isl') {
- projectName = setupProject;
- resourceFile = 'setup_default.xlf';
- }
- else {
- projectName = workbenchProject;
- resourceFile = 'setup_messages.xlf';
- }
- let xlf = new XLF(projectName), keys = [], messages = [];
- let model = new TextModel(file.contents.toString());
- let inMessageSection = false;
- model.lines.forEach(line => {
- if (line.length === 0) {
- return;
- }
- let firstChar = line.charAt(0);
- switch (firstChar) {
- case ';':
- // Comment line;
- return;
- case '[':
- inMessageSection = '[Messages]' === line || '[CustomMessages]' === line;
- return;
- }
- if (!inMessageSection) {
- return;
- }
- let sections = line.split('=');
- if (sections.length !== 2) {
- throw new Error(`Badly formatted message found: ${line}`);
- }
- else {
- let key = sections[0];
- let value = sections[1];
- if (key.length > 0 && value.length > 0) {
- keys.push(key);
- messages.push(value);
- }
- }
- });
- const originalPath = file.path.substring(file.cwd.length + 1, file.path.split('.')[0].length).replace(/\\/g, '/');
- xlf.addFile(originalPath, keys, messages);
- // Emit only upon all ISL files combined into single XLF instance
- const newFilePath = path.join(projectName, resourceFile);
- const xlfFile = new File({ path: newFilePath, contents: Buffer.from(xlf.toString(), 'utf-8') });
- this.queue(xlfFile);
- });
+ return event_stream_1.through(function (file) {
+ let projectName, resourceFile;
+ if (path.basename(file.path) === 'messages.en.isl') {
+ projectName = setupProject;
+ resourceFile = 'messages.xlf';
+ }
+ else {
+ throw new Error(`Unknown input file ${file.path}`);
+ }
+ let xlf = new XLF(projectName), keys = [], messages = [];
+ let model = new TextModel(file.contents.toString());
+ let inMessageSection = false;
+ model.lines.forEach(line => {
+ if (line.length === 0) {
+ return;
+ }
+ let firstChar = line.charAt(0);
+ switch (firstChar) {
+ case ';':
+ // Comment line;
+ return;
+ case '[':
+ inMessageSection = '[Messages]' === line || '[CustomMessages]' === line;
+ return;
+ }
+ if (!inMessageSection) {
+ return;
+ }
+ let sections = line.split('=');
+ if (sections.length !== 2) {
+ throw new Error(`Badly formatted message found: ${line}`);
+ }
+ else {
+ let key = sections[0];
+ let value = sections[1];
+ if (key.length > 0 && value.length > 0) {
+ keys.push(key);
+ messages.push(value);
+ }
+ }
+ });
+ const originalPath = file.path.substring(file.cwd.length + 1, file.path.split('.')[0].length).replace(/\\/g, '/');
+ xlf.addFile(originalPath, keys, messages);
+ // Emit only upon all ISL files combined into single XLF instance
+ const newFilePath = path.join(projectName, resourceFile);
+ const xlfFile = new File({ path: newFilePath, contents: Buffer.from(xlf.toString(), 'utf-8') });
+ this.queue(xlfFile);
+ });
}
exports.createXlfFilesForIsl = createXlfFilesForIsl;
function pushXlfFiles(apiHostname, username, password) {
- let tryGetPromises = [];
- let updateCreatePromises = [];
- return event_stream_1.through(function (file) {
- const project = path.dirname(file.relative);
- const fileName = path.basename(file.path);
- const slug = fileName.substr(0, fileName.length - '.xlf'.length);
- const credentials = `${username}:${password}`;
- // Check if resource already exists, if not, then create it.
- let promise = tryGetResource(project, slug, apiHostname, credentials);
- tryGetPromises.push(promise);
- promise.then(exists => {
- if (exists) {
- promise = updateResource(project, slug, file, apiHostname, credentials);
- }
- else {
- promise = createResource(project, slug, file, apiHostname, credentials);
- }
- updateCreatePromises.push(promise);
- });
- }, function () {
- // End the pipe only after all the communication with Transifex API happened
- Promise.all(tryGetPromises).then(() => {
- Promise.all(updateCreatePromises).then(() => {
- this.queue(null);
- }).catch((reason) => { throw new Error(reason); });
- }).catch((reason) => { throw new Error(reason); });
- });
+ let tryGetPromises = [];
+ let updateCreatePromises = [];
+ return event_stream_1.through(function (file) {
+ const project = path.dirname(file.relative);
+ const fileName = path.basename(file.path);
+ const slug = fileName.substr(0, fileName.length - '.xlf'.length);
+ const credentials = `${username}:${password}`;
+ // Check if resource already exists, if not, then create it.
+ let promise = tryGetResource(project, slug, apiHostname, credentials);
+ tryGetPromises.push(promise);
+ promise.then(exists => {
+ if (exists) {
+ promise = updateResource(project, slug, file, apiHostname, credentials);
+ }
+ else {
+ promise = createResource(project, slug, file, apiHostname, credentials);
+ }
+ updateCreatePromises.push(promise);
+ });
+ }, function () {
+ // End the pipe only after all the communication with Transifex API happened
+ Promise.all(tryGetPromises).then(() => {
+ Promise.all(updateCreatePromises).then(() => {
+ this.queue(null);
+ }).catch((reason) => { throw new Error(reason); });
+ }).catch((reason) => { throw new Error(reason); });
+ });
}
exports.pushXlfFiles = pushXlfFiles;
function getAllResources(project, apiHostname, username, password) {
- return new Promise((resolve, reject) => {
- const credentials = `${username}:${password}`;
- const options = {
- hostname: apiHostname,
- path: `/api/2/project/${project}/resources`,
- auth: credentials,
- method: 'GET'
- };
- const request = https.request(options, (res) => {
- let buffer = [];
- res.on('data', (chunk) => buffer.push(chunk));
- res.on('end', () => {
- if (res.statusCode === 200) {
- let json = JSON.parse(Buffer.concat(buffer).toString());
- if (Array.isArray(json)) {
- resolve(json.map(o => o.slug));
- return;
- }
- reject(`Unexpected data format. Response code: ${res.statusCode}.`);
- }
- else {
- reject(`No resources in ${project} returned no data. Response code: ${res.statusCode}.`);
- }
- });
- });
- request.on('error', (err) => {
- reject(`Failed to query resources in ${project} with the following error: ${err}. ${options.path}`);
- });
- request.end();
- });
+ return new Promise((resolve, reject) => {
+ const credentials = `${username}:${password}`;
+ const options = {
+ hostname: apiHostname,
+ path: `/api/2/project/${project}/resources`,
+ auth: credentials,
+ method: 'GET'
+ };
+ const request = https.request(options, (res) => {
+ let buffer = [];
+ res.on('data', (chunk) => buffer.push(chunk));
+ res.on('end', () => {
+ if (res.statusCode === 200) {
+ let json = JSON.parse(Buffer.concat(buffer).toString());
+ if (Array.isArray(json)) {
+ resolve(json.map(o => o.slug));
+ return;
+ }
+ reject(`Unexpected data format. Response code: ${res.statusCode}.`);
+ }
+ else {
+ reject(`No resources in ${project} returned no data. Response code: ${res.statusCode}.`);
+ }
+ });
+ });
+ request.on('error', (err) => {
+ reject(`Failed to query resources in ${project} with the following error: ${err}. ${options.path}`);
+ });
+ request.end();
+ });
}
function findObsoleteResources(apiHostname, username, password) {
- let resourcesByProject = Object.create(null);
- resourcesByProject[extensionsProject] = [].concat(exports.externalExtensionsWithTranslations); // clone
- return event_stream_1.through(function (file) {
- const project = path.dirname(file.relative);
- const fileName = path.basename(file.path);
- const slug = fileName.substr(0, fileName.length - '.xlf'.length);
- let slugs = resourcesByProject[project];
- if (!slugs) {
- resourcesByProject[project] = slugs = [];
- }
- slugs.push(slug);
- this.push(file);
- }, function () {
- const json = JSON.parse(fs.readFileSync('./build/lib/i18n.resources.json', 'utf8'));
- let i18Resources = [...json.editor, ...json.workbench].map((r) => r.project + '/' + r.name.replace(/\//g, '_'));
- let extractedResources = [];
- for (let project of [workbenchProject, editorProject]) {
- for (let resource of resourcesByProject[project]) {
- if (resource !== 'setup_messages') {
- extractedResources.push(project + '/' + resource);
- }
- }
- }
- if (i18Resources.length !== extractedResources.length) {
- console.log(`[i18n] Obsolete resources in file 'build/lib/i18n.resources.json': JSON.stringify(${i18Resources.filter(p => extractedResources.indexOf(p) === -1)})`);
- console.log(`[i18n] Missing resources in file 'build/lib/i18n.resources.json': JSON.stringify(${extractedResources.filter(p => i18Resources.indexOf(p) === -1)})`);
- }
- let promises = [];
- for (let project in resourcesByProject) {
- promises.push(getAllResources(project, apiHostname, username, password).then(resources => {
- let expectedResources = resourcesByProject[project];
- let unusedResources = resources.filter(resource => resource && expectedResources.indexOf(resource) === -1);
- if (unusedResources.length) {
- console.log(`[transifex] Obsolete resources in project '${project}': ${unusedResources.join(', ')}`);
- }
- }));
- }
- return Promise.all(promises).then(_ => {
- this.push(null);
- }).catch((reason) => { throw new Error(reason); });
- });
+ let resourcesByProject = Object.create(null);
+ resourcesByProject[extensionsProject] = [].concat(exports.externalExtensionsWithTranslations); // clone
+ return event_stream_1.through(function (file) {
+ const project = path.dirname(file.relative);
+ const fileName = path.basename(file.path);
+ const slug = fileName.substr(0, fileName.length - '.xlf'.length);
+ let slugs = resourcesByProject[project];
+ if (!slugs) {
+ resourcesByProject[project] = slugs = [];
+ }
+ slugs.push(slug);
+ this.push(file);
+ }, function () {
+ const json = JSON.parse(fs.readFileSync('./build/lib/i18n.resources.json', 'utf8'));
+ let i18Resources = [...json.editor, ...json.workbench].map((r) => r.project + '/' + r.name.replace(/\//g, '_'));
+ let extractedResources = [];
+ for (let project of [workbenchProject, editorProject]) {
+ for (let resource of resourcesByProject[project]) {
+ if (resource !== 'setup_messages') {
+ extractedResources.push(project + '/' + resource);
+ }
+ }
+ }
+ if (i18Resources.length !== extractedResources.length) {
+ console.log(`[i18n] Obsolete resources in file 'build/lib/i18n.resources.json': JSON.stringify(${i18Resources.filter(p => extractedResources.indexOf(p) === -1)})`);
+ console.log(`[i18n] Missing resources in file 'build/lib/i18n.resources.json': JSON.stringify(${extractedResources.filter(p => i18Resources.indexOf(p) === -1)})`);
+ }
+ let promises = [];
+ for (let project in resourcesByProject) {
+ promises.push(getAllResources(project, apiHostname, username, password).then(resources => {
+ let expectedResources = resourcesByProject[project];
+ let unusedResources = resources.filter(resource => resource && expectedResources.indexOf(resource) === -1);
+ if (unusedResources.length) {
+ console.log(`[transifex] Obsolete resources in project '${project}': ${unusedResources.join(', ')}`);
+ }
+ }));
+ }
+ return Promise.all(promises).then(_ => {
+ this.push(null);
+ }).catch((reason) => { throw new Error(reason); });
+ });
}
exports.findObsoleteResources = findObsoleteResources;
function tryGetResource(project, slug, apiHostname, credentials) {
- return new Promise((resolve, reject) => {
- const options = {
- hostname: apiHostname,
- path: `/api/2/project/${project}/resource/${slug}/?details`,
- auth: credentials,
- method: 'GET'
- };
- const request = https.request(options, (response) => {
- if (response.statusCode === 404) {
- resolve(false);
- }
- else if (response.statusCode === 200) {
- resolve(true);
- }
- else {
- reject(`Failed to query resource ${project}/${slug}. Response: ${response.statusCode} ${response.statusMessage}`);
- }
- });
- request.on('error', (err) => {
- reject(`Failed to get ${project}/${slug} on Transifex: ${err}`);
- });
- request.end();
- });
+ return new Promise((resolve, reject) => {
+ const options = {
+ hostname: apiHostname,
+ path: `/api/2/project/${project}/resource/${slug}/?details`,
+ auth: credentials,
+ method: 'GET'
+ };
+ const request = https.request(options, (response) => {
+ if (response.statusCode === 404) {
+ resolve(false);
+ }
+ else if (response.statusCode === 200) {
+ resolve(true);
+ }
+ else {
+ reject(`Failed to query resource ${project}/${slug}. Response: ${response.statusCode} ${response.statusMessage}`);
+ }
+ });
+ request.on('error', (err) => {
+ reject(`Failed to get ${project}/${slug} on Transifex: ${err}`);
+ });
+ request.end();
+ });
}
function createResource(project, slug, xlfFile, apiHostname, credentials) {
- return new Promise((_resolve, reject) => {
- const data = JSON.stringify({
- 'content': xlfFile.contents.toString(),
- 'name': slug,
- 'slug': slug,
- 'i18n_type': 'XLIFF'
- });
- const options = {
- hostname: apiHostname,
- path: `/api/2/project/${project}/resources`,
- headers: {
- 'Content-Type': 'application/json',
- 'Content-Length': Buffer.byteLength(data)
- },
- auth: credentials,
- method: 'POST'
- };
- let request = https.request(options, (res) => {
- if (res.statusCode === 201) {
- log(`Resource ${project}/${slug} successfully created on Transifex.`);
- }
- else {
- reject(`Something went wrong in the request creating ${slug} in ${project}. ${res.statusCode}`);
- }
- });
- request.on('error', (err) => {
- reject(`Failed to create ${project}/${slug} on Transifex: ${err}`);
- });
- request.write(data);
- request.end();
- });
+ return new Promise((_resolve, reject) => {
+ const data = JSON.stringify({
+ 'content': xlfFile.contents.toString(),
+ 'name': slug,
+ 'slug': slug,
+ 'i18n_type': 'XLIFF'
+ });
+ const options = {
+ hostname: apiHostname,
+ path: `/api/2/project/${project}/resources`,
+ headers: {
+ 'Content-Type': 'application/json',
+ 'Content-Length': Buffer.byteLength(data)
+ },
+ auth: credentials,
+ method: 'POST'
+ };
+ let request = https.request(options, (res) => {
+ if (res.statusCode === 201) {
+ log(`Resource ${project}/${slug} successfully created on Transifex.`);
+ }
+ else {
+ reject(`Something went wrong in the request creating ${slug} in ${project}. ${res.statusCode}`);
+ }
+ });
+ request.on('error', (err) => {
+ reject(`Failed to create ${project}/${slug} on Transifex: ${err}`);
+ });
+ request.write(data);
+ request.end();
+ });
}
/**
* The following link provides information about how Transifex handles updates of a resource file:
* https://dev.befoolish.co/tx-docs/public/projects/updating-content#what-happens-when-you-update-files
*/
function updateResource(project, slug, xlfFile, apiHostname, credentials) {
- return new Promise((resolve, reject) => {
- const data = JSON.stringify({ content: xlfFile.contents.toString() });
- const options = {
- hostname: apiHostname,
- path: `/api/2/project/${project}/resource/${slug}/content`,
- headers: {
- 'Content-Type': 'application/json',
- 'Content-Length': Buffer.byteLength(data)
- },
- auth: credentials,
- method: 'PUT'
- };
- let request = https.request(options, (res) => {
- if (res.statusCode === 200) {
- res.setEncoding('utf8');
- let responseBuffer = '';
- res.on('data', function (chunk) {
- responseBuffer += chunk;
- });
- res.on('end', () => {
- const response = JSON.parse(responseBuffer);
- log(`Resource ${project}/${slug} successfully updated on Transifex. Strings added: ${response.strings_added}, updated: ${response.strings_added}, deleted: ${response.strings_added}`);
- resolve();
- });
- }
- else {
- reject(`Something went wrong in the request updating ${slug} in ${project}. ${res.statusCode}`);
- }
- });
- request.on('error', (err) => {
- reject(`Failed to update ${project}/${slug} on Transifex: ${err}`);
- });
- request.write(data);
- request.end();
- });
+ return new Promise((resolve, reject) => {
+ const data = JSON.stringify({ content: xlfFile.contents.toString() });
+ const options = {
+ hostname: apiHostname,
+ path: `/api/2/project/${project}/resource/${slug}/content`,
+ headers: {
+ 'Content-Type': 'application/json',
+ 'Content-Length': Buffer.byteLength(data)
+ },
+ auth: credentials,
+ method: 'PUT'
+ };
+ let request = https.request(options, (res) => {
+ if (res.statusCode === 200) {
+ res.setEncoding('utf8');
+ let responseBuffer = '';
+ res.on('data', function (chunk) {
+ responseBuffer += chunk;
+ });
+ res.on('end', () => {
+ const response = JSON.parse(responseBuffer);
+ log(`Resource ${project}/${slug} successfully updated on Transifex. Strings added: ${response.strings_added}, updated: ${response.strings_added}, deleted: ${response.strings_added}`);
+ resolve();
+ });
+ }
+ else {
+ reject(`Something went wrong in the request updating ${slug} in ${project}. ${res.statusCode}`);
+ }
+ });
+ request.on('error', (err) => {
+ reject(`Failed to update ${project}/${slug} on Transifex: ${err}`);
+ });
+ request.write(data);
+ request.end();
+ });
}
-// cache resources
-let _coreAndExtensionResources;
-function pullCoreAndExtensionsXlfFiles(apiHostname, username, password, language, externalExtensions) {
- if (!_coreAndExtensionResources) {
- _coreAndExtensionResources = [];
- // editor and workbench
- const json = JSON.parse(fs.readFileSync('./build/lib/i18n.resources.json', 'utf8'));
- _coreAndExtensionResources.push(...json.editor);
- _coreAndExtensionResources.push(...json.workbench);
- // extensions
- let extensionsToLocalize = Object.create(null);
- glob.sync('.build/extensions/**/*.nls.json').forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true);
- glob.sync('.build/extensions/*/node_modules/vscode-nls').forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true);
- Object.keys(extensionsToLocalize).forEach(extension => {
- _coreAndExtensionResources.push({ name: extension, project: extensionsProject });
- });
- if (externalExtensions) {
- for (let resourceName in externalExtensions) {
- _coreAndExtensionResources.push({ name: resourceName, project: extensionsProject });
- }
- }
- }
- return pullXlfFiles(apiHostname, username, password, language, _coreAndExtensionResources);
-}
-exports.pullCoreAndExtensionsXlfFiles = pullCoreAndExtensionsXlfFiles;
function pullSetupXlfFiles(apiHostname, username, password, language, includeDefault) {
- let setupResources = [{ name: 'setup_messages', project: workbenchProject }];
- if (includeDefault) {
- setupResources.push({ name: 'setup_default', project: setupProject });
- }
- return pullXlfFiles(apiHostname, username, password, language, setupResources);
+ let setupResources = [{ name: 'setup_messages', project: workbenchProject }];
+ if (includeDefault) {
+ setupResources.push({ name: 'setup_default', project: setupProject });
+ }
+ return pullXlfFiles(apiHostname, username, password, language, setupResources);
}
exports.pullSetupXlfFiles = pullSetupXlfFiles;
function pullXlfFiles(apiHostname, username, password, language, resources) {
- const credentials = `${username}:${password}`;
- let expectedTranslationsCount = resources.length;
- let translationsRetrieved = 0, called = false;
- return event_stream_1.readable(function (_count, callback) {
- // Mark end of stream when all resources were retrieved
- if (translationsRetrieved === expectedTranslationsCount) {
- return this.emit('end');
- }
- if (!called) {
- called = true;
- const stream = this;
- resources.map(function (resource) {
- retrieveResource(language, resource, apiHostname, credentials).then((file) => {
- if (file) {
- stream.emit('data', file);
- }
- translationsRetrieved++;
- }).catch(error => { throw new Error(error); });
- });
- }
- callback();
- });
+ const credentials = `${username}:${password}`;
+ let expectedTranslationsCount = resources.length;
+ let translationsRetrieved = 0, called = false;
+ return event_stream_1.readable(function (_count, callback) {
+ // Mark end of stream when all resources were retrieved
+ if (translationsRetrieved === expectedTranslationsCount) {
+ return this.emit('end');
+ }
+ if (!called) {
+ called = true;
+ const stream = this;
+ resources.map(function (resource) {
+ retrieveResource(language, resource, apiHostname, credentials).then((file) => {
+ if (file) {
+ stream.emit('data', file);
+ }
+ translationsRetrieved++;
+ }).catch(error => { throw new Error(error); });
+ });
+ }
+ callback();
+ });
}
const limiter = new Limiter(NUMBER_OF_CONCURRENT_DOWNLOADS);
function retrieveResource(language, resource, apiHostname, credentials) {
- return limiter.queue(() => new Promise((resolve, reject) => {
- const slug = resource.name.replace(/\//g, '_');
- const project = resource.project;
- let transifexLanguageId = language.id === 'ps' ? 'en' : language.translationId || language.id;
- const options = {
- hostname: apiHostname,
- path: `/api/2/project/${project}/resource/${slug}/translation/${transifexLanguageId}?file&mode=onlyreviewed`,
- auth: credentials,
- port: 443,
- method: 'GET'
- };
- console.log('[transifex] Fetching ' + options.path);
- let request = https.request(options, (res) => {
- let xlfBuffer = [];
- res.on('data', (chunk) => xlfBuffer.push(chunk));
- res.on('end', () => {
- if (res.statusCode === 200) {
- resolve(new File({ contents: Buffer.concat(xlfBuffer), path: `${project}/${slug}.xlf` }));
- }
- else if (res.statusCode === 404) {
- console.log(`[transifex] ${slug} in ${project} returned no data.`);
- resolve(null);
- }
- else {
- reject(`${slug} in ${project} returned no data. Response code: ${res.statusCode}.`);
- }
- });
- });
- request.on('error', (err) => {
- reject(`Failed to query resource ${slug} with the following error: ${err}. ${options.path}`);
- });
- request.end();
- }));
+ return limiter.queue(() => new Promise((resolve, reject) => {
+ const slug = resource.name.replace(/\//g, '_');
+ const project = resource.project;
+ let transifexLanguageId = language.id === 'ps' ? 'en' : language.translationId || language.id;
+ const options = {
+ hostname: apiHostname,
+ path: `/api/2/project/${project}/resource/${slug}/translation/${transifexLanguageId}?file&mode=onlyreviewed`,
+ auth: credentials,
+ port: 443,
+ method: 'GET'
+ };
+ console.log('[transifex] Fetching ' + options.path);
+ let request = https.request(options, (res) => {
+ let xlfBuffer = [];
+ res.on('data', (chunk) => xlfBuffer.push(chunk));
+ res.on('end', () => {
+ if (res.statusCode === 200) {
+ resolve(new File({ contents: Buffer.concat(xlfBuffer), path: `${project}/${slug}.xlf` }));
+ }
+ else if (res.statusCode === 404) {
+ console.log(`[transifex] ${slug} in ${project} returned no data.`);
+ resolve(null);
+ }
+ else {
+ reject(`${slug} in ${project} returned no data. Response code: ${res.statusCode}.`);
+ }
+ });
+ });
+ request.on('error', (err) => {
+ reject(`Failed to query resource ${slug} with the following error: ${err}. ${options.path}`);
+ });
+ request.end();
+ }));
}
function prepareI18nFiles() {
- let parsePromises = [];
- return event_stream_1.through(function (xlf) {
- let stream = this;
- let parsePromise = XLF.parse(xlf.contents.toString());
- parsePromises.push(parsePromise);
- parsePromise.then(resolvedFiles => {
- resolvedFiles.forEach(file => {
- let translatedFile = createI18nFile(file.originalFilePath, file.messages);
- stream.queue(translatedFile);
- });
- });
- }, function () {
- Promise.all(parsePromises)
- .then(() => { this.queue(null); })
- .catch(reason => { throw new Error(reason); });
- });
+ let parsePromises = [];
+ return event_stream_1.through(function (xlf) {
+ let stream = this;
+ let parsePromise = XLF.parse(xlf.contents.toString());
+ parsePromises.push(parsePromise);
+ parsePromise.then(resolvedFiles => {
+ resolvedFiles.forEach(file => {
+ let translatedFile = createI18nFile(file.originalFilePath, file.messages);
+ stream.queue(translatedFile);
+ });
+ });
+ }, function () {
+ Promise.all(parsePromises)
+ .then(() => { this.queue(null); })
+ .catch(reason => { throw new Error(reason); });
+ });
}
exports.prepareI18nFiles = prepareI18nFiles;
function createI18nFile(originalFilePath, messages) {
- let result = Object.create(null);
- result[''] = [
- '--------------------------------------------------------------------------------------------',
- 'Copyright (c) Microsoft Corporation. All rights reserved.',
- 'Licensed under the MIT License. See License.txt in the project root for license information.',
- '--------------------------------------------------------------------------------------------',
- 'Do not edit this file. It is machine generated.'
- ];
- for (let key of Object.keys(messages)) {
- result[key] = messages[key];
- }
- let content = JSON.stringify(result, null, '\t');
- if (process.platform === 'win32') {
- content = content.replace(/\n/g, '\r\n');
- }
- return new File({
- path: path.join(originalFilePath + '.i18n.json'),
- contents: Buffer.from(content, 'utf8')
- });
+ let result = Object.create(null);
+ result[''] = [
+ '--------------------------------------------------------------------------------------------',
+ 'Copyright (c) Microsoft Corporation. All rights reserved.',
+ 'Licensed under the MIT License. See License.txt in the project root for license information.',
+ '--------------------------------------------------------------------------------------------',
+ 'Do not edit this file. It is machine generated.'
+ ];
+ for (let key of Object.keys(messages)) {
+ result[key] = messages[key];
+ }
+ let content = JSON.stringify(result, null, '\t');
+ if (process.platform === 'win32') {
+ content = content.replace(/\n/g, '\r\n');
+ }
+ return new File({
+ path: path.join(originalFilePath + '.i18n.json'),
+ contents: Buffer.from(content, 'utf8')
+ });
}
const i18nPackVersion = '1.0.0';
-function pullI18nPackFiles(apiHostname, username, password, language, resultingTranslationPaths) {
- return pullCoreAndExtensionsXlfFiles(apiHostname, username, password, language, exports.externalExtensionsWithTranslations)
- .pipe(prepareI18nPackFiles(exports.externalExtensionsWithTranslations, resultingTranslationPaths, language.id === 'ps'));
-}
-exports.pullI18nPackFiles = pullI18nPackFiles;
function prepareI18nPackFiles(externalExtensions, resultingTranslationPaths, pseudo = false) {
- let parsePromises = [];
- let mainPack = { version: i18nPackVersion, contents: {} };
- let extensionsPacks = {};
- let errors = [];
- return event_stream_1.through(function (xlf) {
- let project = path.basename(path.dirname(xlf.relative));
- let resource = path.basename(xlf.relative, '.xlf');
- let contents = xlf.contents.toString();
- let parsePromise = pseudo ? XLF.parsePseudo(contents) : XLF.parse(contents);
- parsePromises.push(parsePromise);
- parsePromise.then(resolvedFiles => {
- resolvedFiles.forEach(file => {
- const path = file.originalFilePath;
- const firstSlash = path.indexOf('/');
- if (project === extensionsProject) {
- let extPack = extensionsPacks[resource];
- if (!extPack) {
- extPack = extensionsPacks[resource] = { version: i18nPackVersion, contents: {} };
- }
- const externalId = externalExtensions[resource];
- if (!externalId) { // internal extension: remove 'extensions/extensionId/' segnent
- const secondSlash = path.indexOf('/', firstSlash + 1);
- extPack.contents[path.substr(secondSlash + 1)] = file.messages;
- }
- else {
- extPack.contents[path] = file.messages;
- }
- }
- else {
- mainPack.contents[path.substr(firstSlash + 1)] = file.messages;
- }
- });
- }).catch(reason => {
- errors.push(reason);
- });
- }, function () {
- Promise.all(parsePromises)
- .then(() => {
- if (errors.length > 0) {
- throw errors;
- }
- const translatedMainFile = createI18nFile('./main', mainPack);
- resultingTranslationPaths.push({ id: 'vscode', resourceName: 'main.i18n.json' });
- this.queue(translatedMainFile);
- for (let extension in extensionsPacks) {
- const translatedExtFile = createI18nFile(`extensions/${extension}`, extensionsPacks[extension]);
- this.queue(translatedExtFile);
- const externalExtensionId = externalExtensions[extension];
- if (externalExtensionId) {
- resultingTranslationPaths.push({ id: externalExtensionId, resourceName: `extensions/${extension}.i18n.json` });
- }
- else {
- resultingTranslationPaths.push({ id: `vscode.${extension}`, resourceName: `extensions/${extension}.i18n.json` });
- }
- }
- this.queue(null);
- })
- .catch((reason) => {
- this.emit('error', reason);
- });
- });
+ let parsePromises = [];
+ let mainPack = { version: i18nPackVersion, contents: {} };
+ let extensionsPacks = {};
+ let errors = [];
+ return event_stream_1.through(function (xlf) {
+ let project = path.basename(path.dirname(path.dirname(xlf.relative)));
+ let resource = path.basename(xlf.relative, '.xlf');
+ let contents = xlf.contents.toString();
+ log(`Found ${project}: ${resource}`);
+ let parsePromise = pseudo ? XLF.parsePseudo(contents) : XLF.parse(contents);
+ parsePromises.push(parsePromise);
+ parsePromise.then(resolvedFiles => {
+ resolvedFiles.forEach(file => {
+ const path = file.originalFilePath;
+ const firstSlash = path.indexOf('/');
+ if (project === extensionsProject) {
+ let extPack = extensionsPacks[resource];
+ if (!extPack) {
+ extPack = extensionsPacks[resource] = { version: i18nPackVersion, contents: {} };
+ }
+ const externalId = externalExtensions[resource];
+ if (!externalId) { // internal extension: remove 'extensions/extensionId/' segnent
+ const secondSlash = path.indexOf('/', firstSlash + 1);
+ extPack.contents[path.substr(secondSlash + 1)] = file.messages;
+ }
+ else {
+ extPack.contents[path] = file.messages;
+ }
+ }
+ else {
+ mainPack.contents[path.substr(firstSlash + 1)] = file.messages;
+ }
+ });
+ }).catch(reason => {
+ errors.push(reason);
+ });
+ }, function () {
+ Promise.all(parsePromises)
+ .then(() => {
+ if (errors.length > 0) {
+ throw errors;
+ }
+ const translatedMainFile = createI18nFile('./main', mainPack);
+ resultingTranslationPaths.push({ id: 'vscode', resourceName: 'main.i18n.json' });
+ this.queue(translatedMainFile);
+ for (let extension in extensionsPacks) {
+ const translatedExtFile = createI18nFile(`extensions/${extension}`, extensionsPacks[extension]);
+ this.queue(translatedExtFile);
+ const externalExtensionId = externalExtensions[extension];
+ if (externalExtensionId) {
+ resultingTranslationPaths.push({ id: externalExtensionId, resourceName: `extensions/${extension}.i18n.json` });
+ }
+ else {
+ resultingTranslationPaths.push({ id: `vscode.${extension}`, resourceName: `extensions/${extension}.i18n.json` });
+ }
+ }
+ this.queue(null);
+ })
+ .catch((reason) => {
+ this.emit('error', reason);
+ });
+ });
}
exports.prepareI18nPackFiles = prepareI18nPackFiles;
function prepareIslFiles(language, innoSetupConfig) {
- let parsePromises = [];
- return event_stream_1.through(function (xlf) {
- let stream = this;
- let parsePromise = XLF.parse(xlf.contents.toString());
- parsePromises.push(parsePromise);
- parsePromise.then(resolvedFiles => {
- resolvedFiles.forEach(file => {
- if (path.basename(file.originalFilePath) === 'Default' && !innoSetupConfig.defaultInfo) {
- return;
- }
- let translatedFile = createIslFile(file.originalFilePath, file.messages, language, innoSetupConfig);
- stream.queue(translatedFile);
- });
- }).catch(reason => {
- this.emit('error', reason);
- });
- }, function () {
- Promise.all(parsePromises)
- .then(() => { this.queue(null); })
- .catch(reason => {
- this.emit('error', reason);
- });
- });
+ let parsePromises = [];
+ return event_stream_1.through(function (xlf) {
+ let stream = this;
+ let parsePromise = XLF.parse(xlf.contents.toString());
+ parsePromises.push(parsePromise);
+ parsePromise.then(resolvedFiles => {
+ resolvedFiles.forEach(file => {
+ let translatedFile = createIslFile(file.originalFilePath, file.messages, language, innoSetupConfig);
+ stream.queue(translatedFile);
+ });
+ }).catch(reason => {
+ this.emit('error', reason);
+ });
+ }, function () {
+ Promise.all(parsePromises)
+ .then(() => { this.queue(null); })
+ .catch(reason => {
+ this.emit('error', reason);
+ });
+ });
}
exports.prepareIslFiles = prepareIslFiles;
function createIslFile(originalFilePath, messages, language, innoSetup) {
- let content = [];
- let originalContent;
- if (path.basename(originalFilePath) === 'Default') {
- originalContent = new TextModel(fs.readFileSync(originalFilePath + '.isl', 'utf8'));
- }
- else {
- originalContent = new TextModel(fs.readFileSync(originalFilePath + '.en.isl', 'utf8'));
- }
- originalContent.lines.forEach(line => {
- if (line.length > 0) {
- let firstChar = line.charAt(0);
- if (firstChar === '[' || firstChar === ';') {
- content.push(line);
- }
- else {
- let sections = line.split('=');
- let key = sections[0];
- let translated = line;
- if (key) {
- if (key === 'LanguageName') {
- translated = `${key}=${innoSetup.defaultInfo.name}`;
- }
- else if (key === 'LanguageID') {
- translated = `${key}=${innoSetup.defaultInfo.id}`;
- }
- else if (key === 'LanguageCodePage') {
- translated = `${key}=${innoSetup.codePage.substr(2)}`;
- }
- else {
- let translatedMessage = messages[key];
- if (translatedMessage) {
- translated = `${key}=${translatedMessage}`;
- }
- }
- }
- content.push(translated);
- }
- }
- });
- const basename = path.basename(originalFilePath);
- const filePath = `${basename}.${language.id}.isl`;
- const encoded = iconv.encode(Buffer.from(content.join('\r\n'), 'utf8').toString(), innoSetup.codePage);
- return new File({
- path: filePath,
- contents: Buffer.from(encoded),
- });
+ let content = [];
+ let originalContent;
+ if (path.basename(originalFilePath) === 'Default') {
+ originalContent = new TextModel(fs.readFileSync(originalFilePath + '.isl', 'utf8'));
+ }
+ else {
+ originalContent = new TextModel(fs.readFileSync(originalFilePath + '.en.isl', 'utf8'));
+ }
+ originalContent.lines.forEach(line => {
+ if (line.length > 0) {
+ let firstChar = line.charAt(0);
+ if (firstChar === '[' || firstChar === ';') {
+ content.push(line);
+ }
+ else {
+ let sections = line.split('=');
+ let key = sections[0];
+ let translated = line;
+ if (key) {
+ let translatedMessage = messages[key];
+ if (translatedMessage) {
+ translated = `${key}=${translatedMessage}`;
+ }
+ }
+ content.push(translated);
+ }
+ }
+ });
+ const basename = path.basename(originalFilePath);
+ const filePath = `${basename}.${language.id}.isl`;
+ const encoded = iconv.encode(Buffer.from(content.join('\r\n'), 'utf8').toString(), innoSetup.codePage);
+ return new File({
+ path: filePath,
+ contents: Buffer.from(encoded),
+ });
}
function encodeEntities(value) {
- let result = [];
- for (let i = 0; i < value.length; i++) {
- let ch = value[i];
- switch (ch) {
- case '<':
- result.push('<');
- break;
- case '>':
- result.push('>');
- break;
- case '&':
- result.push('&');
- break;
- default:
- result.push(ch);
- }
- }
- return result.join('');
+ let result = [];
+ for (let i = 0; i < value.length; i++) {
+ let ch = value[i];
+ switch (ch) {
+ case '<':
+ result.push('<');
+ break;
+ case '>':
+ result.push('>');
+ break;
+ case '&':
+ result.push('&');
+ break;
+ default:
+ result.push(ch);
+ }
+ }
+ return result.join('');
}
function decodeEntities(value) {
- return value.replace(/</g, '<').replace(/>/g, '>').replace(/&/g, '&');
+ return value.replace(/</g, '<').replace(/>/g, '>').replace(/&/g, '&');
}
function pseudify(message) {
- return '\uFF3B' + message.replace(/[aouei]/g, '$&$&') + '\uFF3D';
+ return '\uFF3B' + message.replace(/[aouei]/g, '$&$&') + '\uFF3D';
}
diff --git a/lib/vscode/build/lib/i18n.ts b/lib/vscode/build/lib/i18n.ts
index 746c481b262d..5e00e9d6c6e6 100644
--- a/lib/vscode/build/lib/i18n.ts
+++ b/lib/vscode/build/lib/i18n.ts
@@ -10,7 +10,6 @@ import { through, readable, ThroughStream } from 'event-stream';
import * as File from 'vinyl';
import * as Is from 'is';
import * as xml2js from 'xml2js';
-import * as glob from 'glob';
import * as https from 'https';
import * as gulp from 'gulp';
import * as fancyLog from 'fancy-log';
@@ -31,10 +30,6 @@ export interface Language {
export interface InnoSetup {
codePage: string; //code page for encoding (http://www.jrsoftware.org/ishelp/index.php?topic=langoptionssection)
- defaultInfo?: {
- name: string; // inno setup language name
- id: string; // locale identifier (https://msdn.microsoft.com/en-us/library/dd318693.aspx)
- };
}
export const defaultLanguages: Language[] = [
@@ -198,14 +193,17 @@ export class XLF {
public toString(): string {
this.appendHeader();
- for (let file in this.files) {
+ const files = Object.keys(this.files).sort();
+ for (const file of files) {
this.appendNewLine(``, 2);
- for (let item of this.files[file]) {
+ const items = this.files[file].sort((a: Item, b: Item) => {
+ return a.id < b.id ? -1 : a.id > b.id ? 1 : 0;
+ });
+ for (const item of items) {
this.addStringItem(file, item);
}
- this.appendNewLine('', 2);
+ this.appendNewLine('