diff --git a/common/config/rush/pnpm-lock.yaml b/common/config/rush/pnpm-lock.yaml
index de4cd97d3727..334fd043b6d5 100644
--- a/common/config/rush/pnpm-lock.yaml
+++ b/common/config/rush/pnpm-lock.yaml
@@ -17,6 +17,7 @@ specifiers:
'@rush-temp/arm-containerregistry': file:./projects/arm-containerregistry.tgz
'@rush-temp/arm-containerservice': file:./projects/arm-containerservice.tgz
'@rush-temp/arm-databricks': file:./projects/arm-databricks.tgz
+ '@rush-temp/arm-datafactory': file:./projects/arm-datafactory.tgz
'@rush-temp/arm-desktopvirtualization': file:./projects/arm-desktopvirtualization.tgz
'@rush-temp/arm-eventgrid': file:./projects/arm-eventgrid.tgz
'@rush-temp/arm-eventhub': file:./projects/arm-eventhub.tgz
@@ -168,6 +169,7 @@ dependencies:
'@rush-temp/arm-containerregistry': file:projects/arm-containerregistry.tgz
'@rush-temp/arm-containerservice': file:projects/arm-containerservice.tgz
'@rush-temp/arm-databricks': file:projects/arm-databricks.tgz
+ '@rush-temp/arm-datafactory': file:projects/arm-datafactory.tgz
'@rush-temp/arm-desktopvirtualization': file:projects/arm-desktopvirtualization.tgz
'@rush-temp/arm-eventgrid': file:projects/arm-eventgrid.tgz
'@rush-temp/arm-eventhub': file:projects/arm-eventhub.tgz
@@ -8866,9 +8868,9 @@ packages:
uglify-js: 3.14.2
dev: false
- file:projects/arm-desktopvirtualization.tgz:
- resolution: {integrity: sha512-z1ZpQDQ9WTgpWBQfKTGm9Wd3I/8iDHqaSJPBiEnswAYrp8iVwfC4bsI8g0yLstDwFE8wdm4kP+HdpzrSZMzNEg==, tarball: file:projects/arm-desktopvirtualization.tgz}
- name: '@rush-temp/arm-desktopvirtualization'
+ file:projects/arm-datafactory.tgz:
+ resolution: {integrity: sha512-3EKS8IZlDsApPCN8vKxe5PYYgeDlkuK9tlfw3VRRdiU4VwkGOFeG9Ja+POz0hEqcocWR8dCa18s4MXivCOEtYg==, tarball: file:projects/arm-datafactory.tgz}
+ name: '@rush-temp/arm-datafactory'
version: 0.0.0
dependencies:
'@azure/identity': 2.0.0-beta.6
@@ -8890,6 +8892,27 @@ packages:
- supports-color
dev: false
+ file:projects/arm-desktopvirtualization.tgz:
+ resolution: {integrity: sha512-9tYJwIUDFQDpXDZtTbVerkmrnUcASq7GxOdJx/27I87kpWEHNyHR5MymGEbvWs2Y3wd/9Kz49aR/aVAr7s4LJg==, tarball: file:projects/arm-desktopvirtualization.tgz}
+ name: '@rush-temp/arm-desktopvirtualization'
+ version: 0.0.0
+ dependencies:
+ '@azure/identity': 2.0.0-beta.6
+ '@microsoft/api-extractor': 7.18.17
+ '@rollup/plugin-commonjs': 11.0.2_rollup@1.32.1
+ '@rollup/plugin-json': 4.1.0_rollup@1.32.1
+ '@rollup/plugin-multi-entry': 3.0.1_rollup@1.32.1
+ '@rollup/plugin-node-resolve': 8.4.0_rollup@1.32.1
+ cross-env: 7.0.3
+ mkdirp: 1.0.4
+ mocha: 7.2.0
+ rollup: 1.32.1
+ rollup-plugin-sourcemaps: 0.4.2_rollup@1.32.1
+ tslib: 2.3.1
+ typescript: 4.2.4
+ uglify-js: 3.14.2
+ dev: false
+
file:projects/arm-eventgrid.tgz:
resolution: {integrity: sha512-wRR2xwHMk8CUACJYBJgH9MUekJBJ4J2ilCWvL1L+6t1Gwn4JCOQWKogbFnohaZtujwJFLh1pJC48ZS1XiV9dOQ==, tarball: file:projects/arm-eventgrid.tgz}
name: '@rush-temp/arm-eventgrid'
@@ -11145,7 +11168,7 @@ packages:
dev: false
file:projects/eventgrid.tgz:
- resolution: {integrity: sha512-a7ok5XWs7nBhPSjqBoOpEyIADVTx3DCVxF6IBvx8nI9bQNJAgvMSEFQwQ0vkyqW2Xp9DXhXWBhBOrnx09hldqA==, tarball: file:projects/eventgrid.tgz}
+ resolution: {integrity: sha512-8gU5FFbkNjNAarqfpkhmSf/agfq7aOZLTVsKRv81V7Gan+oKBq76y+uFqN1JXIqJgBo7dFkxqdqL/uDG27rtFQ==, tarball: file:projects/eventgrid.tgz}
name: '@rush-temp/eventgrid'
version: 0.0.0
dependencies:
@@ -12355,7 +12378,7 @@ packages:
dev: false
file:projects/perf-service-bus.tgz:
- resolution: {integrity: sha512-b6AGPVr1lihogGBiieVyO3ohpQBiZG6Ch909QgYl7Ysed0tvD+FaPqnIzmn+Tla579Ia7Z+j9Qz1mv/tUFrknw==, tarball: file:projects/perf-service-bus.tgz}
+ resolution: {integrity: sha512-spRBrMEsacl7wJqZkvLB+qmrs8n5JOqGjO3UNWLBce5R74kW0qyigBcxNXzSNukOMmshoTKvjvQGkmk2EgfJzw==, tarball: file:projects/perf-service-bus.tgz}
name: '@rush-temp/perf-service-bus'
version: 0.0.0
dependencies:
diff --git a/rush.json b/rush.json
index a88c36569058..74e1d3470411 100644
--- a/rush.json
+++ b/rush.json
@@ -1065,6 +1065,11 @@
"packageName": "@azure/arm-desktopvirtualization",
"projectFolder": "sdk/desktopvirtualization/arm-desktopvirtualization",
"versionPolicyName": "management"
+ },
+ {
+ "packageName": "@azure/arm-datafactory",
+ "projectFolder": "sdk/datafactory/arm-datafactory",
+ "versionPolicyName": "management"
}
]
-}
+}
\ No newline at end of file
diff --git a/sdk/datafactory/arm-datafactory/CHANGELOG.md b/sdk/datafactory/arm-datafactory/CHANGELOG.md
new file mode 100644
index 000000000000..fb1566f0aca0
--- /dev/null
+++ b/sdk/datafactory/arm-datafactory/CHANGELOG.md
@@ -0,0 +1,14 @@
+## 9.0.0-beta.1 (2021-11-10)
+
+This is the first preview for the new version of the `@azure/arm-datafactory` package that follows the new [guidelines for TypeScript SDKs](https://azure.github.io/azure-sdk/typescript_introduction.html) for Azure services.
+
+While this package remains auto generated, the SDK generator itself has undergone changes to comply with the above guidelines in order to generate packages that are idiomatic to the JavaScript/TypeScript ecosystem and consistent with other packages for Azure services. For more on this, please see [State of the Azure SDK 2021](https://devblogs.microsoft.com/azure-sdk/state-of-the-azure-sdk-2021/).
+
+Please note that this version has breaking changes, all of which were made after careful consideration during the authoring of the guidelines and user studies.
+
+**Noteworthy changes and features**
+- Authentication: The packages `@azure/ms-rest-nodeauth` or `@azure/ms-rest-browserauth` are no longer supported. Use package [@azure/identity](https://www.npmjs.com/package/@azure/identity) instead. Select a credential from Azure Identity examples based on the authentication method of your choice.
+- Callbacks: Method overloads that used callbacks have been removed and the use of promises is encouraged instead.
+- List operations now return an iterable result that follows the `PagedAsyncIterableIterator` interface as opposed to the previous model where you had to make a new request using the link to the next page.
+- Long running operations i.e. the Lro related object returned by methods whose names started with `begin`, now uses `pollUntilDone` to check whether the request is finished, instead of `pollUntilFinished`. To get the final result, use the corresponding method that will have the suffix `AndWait`.
+- The SDK only supports ECMAScript 2015 (ES6) and beyond, all projects that referenced this SDK should be upgraded to use ES6.
diff --git a/sdk/datafactory/arm-datafactory/LICENSE.txt b/sdk/datafactory/arm-datafactory/LICENSE
similarity index 99%
rename from sdk/datafactory/arm-datafactory/LICENSE.txt
rename to sdk/datafactory/arm-datafactory/LICENSE
index 2d3163745319..ccb63b166732 100644
--- a/sdk/datafactory/arm-datafactory/LICENSE.txt
+++ b/sdk/datafactory/arm-datafactory/LICENSE
@@ -18,4 +18,4 @@ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
+SOFTWARE.
\ No newline at end of file
diff --git a/sdk/datafactory/arm-datafactory/README.md b/sdk/datafactory/arm-datafactory/README.md
index c8855fdbf843..511244915bbc 100644
--- a/sdk/datafactory/arm-datafactory/README.md
+++ b/sdk/datafactory/arm-datafactory/README.md
@@ -1,116 +1,94 @@
-## Azure DataFactoryManagementClient SDK for JavaScript
+# Azure DataFactoryManagement client library for JavaScript
-This package contains an isomorphic SDK (runs both in node.js and in browsers) for DataFactoryManagementClient.
+This package contains an isomorphic SDK (runs both in Node.js and in browsers) for Azure DataFactoryManagement client.
+
+The Azure Data Factory V2 management API provides a RESTful set of web services that interact with Azure Data Factory V2 services.
+
+[Source code](https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/datafactory/arm-datafactory) |
+[Package (NPM)](https://www.npmjs.com/package/@azure/arm-datafactory) |
+[API reference documentation](https://docs.microsoft.com/javascript/api/@azure/arm-datafactory) |
+[Samples](https://github.com/Azure-Samples/azure-samples-js-management)
+
+## Getting started
### Currently supported environments
- [LTS versions of Node.js](https://nodejs.org/about/releases/)
-- Latest versions of Safari, Chrome, Edge, and Firefox.
+- Latest versions of Safari, Chrome, Edge and Firefox.
### Prerequisites
-You must have an [Azure subscription](https://azure.microsoft.com/free/).
+- An [Azure subscription][azure_sub].
-### How to install
+### Install the `@azure/arm-datafactory` package
-To use this SDK in your project, you will need to install two packages.
-
-- `@azure/arm-datafactory` that contains the client.
-- `@azure/identity` that provides different mechanisms for the client to authenticate your requests using Azure Active Directory.
-
-Install both packages using the below command:
+Install the Azure DataFactoryManagement client library for JavaScript with `npm`:
```bash
-npm install --save @azure/arm-datafactory @azure/identity
+npm install @azure/arm-datafactory
```
-> **Note**: You may have used either `@azure/ms-rest-nodeauth` or `@azure/ms-rest-browserauth` in the past. These packages are in maintenance mode receiving critical bug fixes, but no new features.
-> If you are on a [Node.js that has LTS status](https://nodejs.org/about/releases/), or are writing a client side browser application, we strongly encourage you to upgrade to `@azure/identity` which uses the latest versions of Azure Active Directory and MSAL APIs and provides more authentication options.
+### Create and authenticate a `DataFactoryManagementClient`
+
+To create a client object to access the Azure DataFactoryManagement API, you will need the `endpoint` of your Azure DataFactoryManagement resource and a `credential`. The Azure DataFactoryManagement client can use Azure Active Directory credentials to authenticate.
+You can find the endpoint for your Azure DataFactoryManagement resource in the [Azure Portal][azure_portal].
-### How to use
+You can authenticate with Azure Active Directory using a credential from the [@azure/identity][azure_identity] library or [an existing AAD Token](https://github.com/Azure/azure-sdk-for-js/blob/master/sdk/identity/identity/samples/AzureIdentityExamples.md#authenticating-with-a-pre-fetched-access-token).
-- If you are writing a client side browser application,
- - Follow the instructions in the section on Authenticating client side browser applications in [Azure Identity examples](https://aka.ms/azsdk/js/identity/examples) to register your application in the Microsoft identity platform and set the right permissions.
- - Copy the client ID and tenant ID from the Overview section of your app registration in Azure portal and use it in the browser sample below.
-- If you are writing a server side application,
- - [Select a credential from `@azure/identity` based on the authentication method of your choice](https://aka.ms/azsdk/js/identity/examples)
- - Complete the set up steps required by the credential if any.
- - Use the credential you picked in the place of `DefaultAzureCredential` in the Node.js sample below.
+To use the [DefaultAzureCredential][defaultazurecredential] provider shown below, or other credential providers provided with the Azure SDK, please install the `@azure/identity` package:
-In the below samples, we pass the credential and the Azure subscription id to instantiate the client.
-Once the client is created, explore the operations on it either in your favorite editor or in our [API reference documentation](https://docs.microsoft.com/javascript/api) to get started.
+```bash
+npm install @azure/identity
+```
-#### nodejs - Authentication, client creation, and list operations as an example written in JavaScript.
+You will also need to **register a new AAD application and grant access to Azure DataFactoryManagement** by assigning the suitable role to your service principal (note: roles such as `"Owner"` will not grant the necessary permissions).
+Set the values of the client ID, tenant ID, and client secret of the AAD application as environment variables: `AZURE_CLIENT_ID`, `AZURE_TENANT_ID`, `AZURE_CLIENT_SECRET`.
-##### Sample code
+For more information about how to create an Azure AD Application check out [this guide](https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal).
```javascript
-const { DefaultAzureCredential } = require("@azure/identity");
const { DataFactoryManagementClient } = require("@azure/arm-datafactory");
-const subscriptionId = process.env["AZURE_SUBSCRIPTION_ID"];
-
-// Use `DefaultAzureCredential` or any other credential of your choice based on https://aka.ms/azsdk/js/identity/examples
-// Please note that you can also use credentials from the `@azure/ms-rest-nodeauth` package instead.
-const creds = new DefaultAzureCredential();
-const client = new DataFactoryManagementClient(creds, subscriptionId);
-client.operations
- .list()
- .then((result) => {
- console.log("The result is:");
- console.log(result);
- })
- .catch((err) => {
- console.log("An error occurred:");
- console.error(err);
- });
+const { DefaultAzureCredential } = require("@azure/identity");
+const subscriptionId = "00000000-0000-0000-0000-000000000000";
+const client = new DataFactoryManagementClient(new DefaultAzureCredential(), subscriptionId);
```
-#### browser - Authentication, client creation, and list operations as an example written in JavaScript.
-
-In browser applications, we recommend using the `InteractiveBrowserCredential` that interactively authenticates using the default system browser.
-
-- See [Single-page application: App registration guide](https://docs.microsoft.com/azure/active-directory/develop/scenario-spa-app-registration) to configure your app registration for the browser.
-- Note down the client Id from the previous step and use it in the browser sample below.
-
-##### Sample code
-
-- index.html
-
-```html
-
-
-
- @azure/arm-datafactory sample
-
-
-
-
-
-
-
+## Key concepts
+
+### DataFactoryManagementClient
+
+`DataFactoryManagementClient` is the primary interface for developers using the Azure DataFactoryManagement client library. Explore the methods on this client object to understand the different features of the Azure DataFactoryManagement service that you can access.
+
+## Troubleshooting
+
+### Logging
+
+Enabling logging may help uncover useful information about failures. In order to see a log of HTTP requests and responses, set the `AZURE_LOG_LEVEL` environment variable to `info`. Alternatively, logging can be enabled at runtime by calling `setLogLevel` in the `@azure/logger`:
+
+```javascript
+const { setLogLevel } = require("@azure/logger");
+setLogLevel("info");
```
+For more detailed instructions on how to enable logs, you can look at the [@azure/logger package docs](https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/core/logger).
+
+## Next steps
+
+Please take a look at the [samples](https://github.com/Azure-Samples/azure-samples-js-management) directory for detailed examples on how to use this library.
+
+## Contributing
+
+If you'd like to contribute to this library, please read the [contributing guide](https://github.com/Azure/azure-sdk-for-js/blob/main/CONTRIBUTING.md) to learn more about how to build and test the code.
+
## Related projects
-- [Microsoft Azure SDK for Javascript](https://github.com/Azure/azure-sdk-for-js)
+- [Microsoft Azure SDK for JavaScript](https://github.com/Azure/azure-sdk-for-js)
+
+![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-js%2Fsdk%2Fdatafactory%2Farm-datafactory%2FREADME.png)
-![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-js/sdk/datafactory/arm-datafactory/README.png)
+[azure_cli]: https://docs.microsoft.com/cli/azure
+[azure_sub]: https://azure.microsoft.com/free/
+[azure_sub]: https://azure.microsoft.com/free/
+[azure_portal]: https://portal.azure.com
+[azure_identity]: https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/identity/identity
+[defaultazurecredential]: https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/identity/identity#defaultazurecredential
diff --git a/sdk/datafactory/arm-datafactory/_meta.json b/sdk/datafactory/arm-datafactory/_meta.json
new file mode 100644
index 000000000000..7cdf80443e31
--- /dev/null
+++ b/sdk/datafactory/arm-datafactory/_meta.json
@@ -0,0 +1,7 @@
+{
+ "commit": "dc33f3f6452de718c91fdd4f1572fdd95dea4a43",
+ "readme": "specification/datafactory/resource-manager/readme.md",
+ "autorest_command": "autorest --version=3.1.3 --typescript --modelerfour.lenient-model-deduplication --head-as-boolean=true --license-header=MICROSOFT_MIT_NO_VERSION --generate-test --typescript-sdks-folder=D:\\mydev\\azure-sdk-for-js ../azure-rest-api-specs/specification/datafactory/resource-manager/readme.md --use=@autorest/typescript@6.0.0-beta.14",
+ "repository_url": "https://github.com/Azure/azure-rest-api-specs.git",
+ "use": "@autorest/typescript@6.0.0-beta.14"
+}
\ No newline at end of file
diff --git a/sdk/datafactory/arm-datafactory/api-extractor.json b/sdk/datafactory/arm-datafactory/api-extractor.json
new file mode 100644
index 000000000000..488370c1585e
--- /dev/null
+++ b/sdk/datafactory/arm-datafactory/api-extractor.json
@@ -0,0 +1,18 @@
+{
+ "$schema": "https://developer.microsoft.com/json-schemas/api-extractor/v7/api-extractor.schema.json",
+ "mainEntryPointFilePath": "./dist-esm/src/index.d.ts",
+ "docModel": { "enabled": true },
+ "apiReport": { "enabled": true, "reportFolder": "./review" },
+ "dtsRollup": {
+ "enabled": true,
+ "untrimmedFilePath": "",
+ "publicTrimmedFilePath": "./types/arm-datafactory.d.ts"
+ },
+ "messages": {
+ "tsdocMessageReporting": { "default": { "logLevel": "none" } },
+ "extractorMessageReporting": {
+ "ae-missing-release-tag": { "logLevel": "none" },
+ "ae-unresolved-link": { "logLevel": "none" }
+ }
+ }
+}
diff --git a/sdk/datafactory/arm-datafactory/package.json b/sdk/datafactory/arm-datafactory/package.json
index b58ebbd9c0e7..ef04c105a371 100644
--- a/sdk/datafactory/arm-datafactory/package.json
+++ b/sdk/datafactory/arm-datafactory/package.json
@@ -1,58 +1,90 @@
{
"name": "@azure/arm-datafactory",
+ "sdk-type": "mgmt",
"author": "Microsoft Corporation",
- "description": "DataFactoryManagementClient Library with typescript type definitions for node.js and browser.",
- "version": "7.8.0",
+ "description": "A generated SDK for DataFactoryManagementClient.",
+ "version": "9.0.0-beta.1",
+ "engines": { "node": ">=12.0.0" },
"dependencies": {
- "@azure/ms-rest-azure-js": "^2.1.0",
- "@azure/ms-rest-js": "^2.2.0",
- "@azure/core-auth": "^1.1.4",
- "tslib": "^1.10.0"
+ "@azure/core-lro": "^2.2.0",
+ "@azure/abort-controller": "^1.0.0",
+ "@azure/core-paging": "^1.2.0",
+ "@azure/core-client": "^1.0.0",
+ "@azure/core-auth": "^1.3.0",
+ "@azure/core-rest-pipeline": "^1.1.0",
+ "tslib": "^2.2.0"
},
- "keywords": [
- "node",
- "azure",
- "typescript",
- "browser",
- "isomorphic"
- ],
+ "keywords": ["node", "azure", "typescript", "browser", "isomorphic"],
"license": "MIT",
- "main": "./dist/arm-datafactory.js",
- "module": "./esm/dataFactoryManagementClient.js",
- "types": "./esm/dataFactoryManagementClient.d.ts",
+ "main": "./dist/index.js",
+ "module": "./dist-esm/src/index.js",
+ "types": "./types/arm-datafactory.d.ts",
"devDependencies": {
- "typescript": "^3.6.0",
- "rollup": "^1.18.0",
- "rollup-plugin-node-resolve": "^5.2.0",
+ "@microsoft/api-extractor": "^7.18.11",
+ "@rollup/plugin-commonjs": "11.0.2",
+ "@rollup/plugin-json": "^4.0.0",
+ "@rollup/plugin-multi-entry": "^3.0.0",
+ "@rollup/plugin-node-resolve": "^8.0.0",
+ "mkdirp": "^1.0.4",
+ "rollup": "^1.16.3",
"rollup-plugin-sourcemaps": "^0.4.2",
- "uglify-js": "^3.6.0"
+ "typescript": "~4.2.0",
+ "uglify-js": "^3.4.9",
+ "@azure/identity": "^2.0.1",
+ "@azure-tools/test-recorder": "^1.0.0",
+ "mocha": "^7.1.1",
+ "cross-env": "^7.0.2"
},
"homepage": "https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/datafactory/arm-datafactory",
"repository": {
"type": "git",
"url": "https://github.com/Azure/azure-sdk-for-js.git"
},
- "bugs": {
- "url": "https://github.com/Azure/azure-sdk-for-js/issues"
- },
+ "bugs": { "url": "https://github.com/Azure/azure-sdk-for-js/issues" },
"files": [
"dist/**/*.js",
"dist/**/*.js.map",
"dist/**/*.d.ts",
"dist/**/*.d.ts.map",
- "esm/**/*.js",
- "esm/**/*.js.map",
- "esm/**/*.d.ts",
- "esm/**/*.d.ts.map",
+ "dist-esm/**/*.js",
+ "dist-esm/**/*.js.map",
+ "dist-esm/**/*.d.ts",
+ "dist-esm/**/*.d.ts.map",
"src/**/*.ts",
"README.md",
+ "LICENSE",
"rollup.config.js",
- "tsconfig.json"
+ "tsconfig.json",
+ "review/*",
+ "CHANGELOG.md",
+ "types/*"
],
"scripts": {
- "build": "tsc && rollup -c rollup.config.js && npm run minify",
- "minify": "uglifyjs -c -m --comments --source-map \"content='./dist/arm-datafactory.js.map'\" -o ./dist/arm-datafactory.min.js ./dist/arm-datafactory.js",
- "prepack": "npm install && npm run build"
+ "build": "npm run clean && tsc && rollup -c 2>&1 && npm run minify && mkdirp ./review && npm run extract-api",
+ "minify": "uglifyjs -c -m --comments --source-map \"content='./dist/index.js.map'\" -o ./dist/index.min.js ./dist/index.js",
+ "prepack": "npm run build",
+ "pack": "npm pack 2>&1",
+ "extract-api": "api-extractor run --local",
+ "lint": "echo skipped",
+ "audit": "echo skipped",
+ "clean": "echo skipped",
+ "build:node": "echo skipped",
+ "build:browser": "echo skipped",
+ "build:test": "echo skipped",
+ "build:samples": "echo skipped.",
+ "check-format": "echo skipped",
+ "execute:samples": "echo skipped",
+ "format": "echo skipped",
+ "test": "npm run integration-test",
+ "test:node": "echo skipped",
+ "test:browser": "echo skipped",
+ "unit-test": "npm run unit-test:node && npm run unit-test:browser",
+ "unit-test:node": "cross-env TEST_MODE=playback npm run integration-test:node",
+ "unit-test:browser": "echo skipped",
+ "integration-test": "npm run integration-test:node && npm run integration-test:browser",
+ "integration-test:node": "mocha -r esm --require ts-node/register --timeout 1200000 --full-trace test/*.ts",
+ "integration-test:browser": "echo skipped",
+ "docs": "echo skipped"
},
"sideEffects": false,
"autoPublish": true
diff --git a/sdk/datafactory/arm-datafactory/recordings/node/my_test/recording_sample_test.js b/sdk/datafactory/arm-datafactory/recordings/node/my_test/recording_sample_test.js
new file mode 100644
index 000000000000..0f6ecb857acf
--- /dev/null
+++ b/sdk/datafactory/arm-datafactory/recordings/node/my_test/recording_sample_test.js
@@ -0,0 +1,5 @@
+let nock = require('nock');
+
+module.exports.hash = "7a6be38bb8cb644d64b4094802301751";
+
+module.exports.testInfo = {"uniqueName":{},"newDate":{}}
diff --git a/sdk/datafactory/arm-datafactory/review/arm-datafactory.api.md b/sdk/datafactory/arm-datafactory/review/arm-datafactory.api.md
new file mode 100644
index 000000000000..e3acd7fa88c0
--- /dev/null
+++ b/sdk/datafactory/arm-datafactory/review/arm-datafactory.api.md
@@ -0,0 +1,8221 @@
+## API Report File for "@azure/arm-datafactory"
+
+> Do not edit this file. It is a report generated by [API Extractor](https://api-extractor.com/).
+
+```ts
+
+import * as coreAuth from '@azure/core-auth';
+import * as coreClient from '@azure/core-client';
+import { PagedAsyncIterableIterator } from '@azure/core-paging';
+import { PollerLike } from '@azure/core-lro';
+import { PollOperationState } from '@azure/core-lro';
+
+// @public
+export interface AccessPolicyResponse {
+ accessToken?: string;
+ dataPlaneUrl?: string;
+ policy?: UserAccessPolicy;
+}
+
+// @public
+export interface Activity {
+ [property: string]: any;
+ dependsOn?: ActivityDependency[];
+ description?: string;
+ name: string;
+ type: "Container" | "Execution" | "Copy" | "HDInsightHive" | "HDInsightPig" | "HDInsightMapReduce" | "HDInsightStreaming" | "HDInsightSpark" | "ExecuteSSISPackage" | "Custom" | "SqlServerStoredProcedure" | "ExecutePipeline" | "Delete" | "AzureDataExplorerCommand" | "Lookup" | "WebActivity" | "GetMetadata" | "IfCondition" | "Switch" | "ForEach" | "AzureMLBatchExecution" | "AzureMLUpdateResource" | "AzureMLExecutePipeline" | "DataLakeAnalyticsU-SQL" | "Wait" | "Until" | "Validation" | "Filter" | "DatabricksNotebook" | "DatabricksSparkJar" | "DatabricksSparkPython" | "SetVariable" | "AppendVariable" | "AzureFunctionActivity" | "WebHook" | "ExecuteDataFlow" | "ExecuteWranglingDataflow";
+ userProperties?: UserProperty[];
+}
+
+// @public
+export interface ActivityDependency {
+ [property: string]: any;
+ activity: string;
+ dependencyConditions: DependencyCondition[];
+}
+
+// @public
+export interface ActivityPolicy {
+ [property: string]: any;
+ retry?: Record;
+ retryIntervalInSeconds?: number;
+ secureInput?: boolean;
+ secureOutput?: boolean;
+ timeout?: Record;
+}
+
+// @public
+export interface ActivityRun {
+ [property: string]: any;
+ readonly activityName?: string;
+ readonly activityRunEnd?: Date;
+ readonly activityRunId?: string;
+ readonly activityRunStart?: Date;
+ readonly activityType?: string;
+ readonly durationInMs?: number;
+ readonly error?: Record;
+ readonly input?: Record;
+ readonly linkedServiceName?: string;
+ readonly output?: Record;
+ readonly pipelineName?: string;
+ readonly pipelineRunId?: string;
+ readonly status?: string;
+}
+
+// @public
+export interface ActivityRuns {
+ queryByPipelineRun(resourceGroupName: string, factoryName: string, runId: string, filterParameters: RunFilterParameters, options?: ActivityRunsQueryByPipelineRunOptionalParams): Promise;
+}
+
+// @public
+export interface ActivityRunsQueryByPipelineRunOptionalParams extends coreClient.OperationOptions {
+}
+
+// @public
+export type ActivityRunsQueryByPipelineRunResponse = ActivityRunsQueryResponse;
+
+// @public
+export interface ActivityRunsQueryResponse {
+ continuationToken?: string;
+ value: ActivityRun[];
+}
+
+// @public (undocumented)
+export type ActivityUnion = Activity | ControlActivityUnion | ExecutionActivityUnion | ExecuteWranglingDataflowActivity;
+
+// @public
+export interface AddDataFlowToDebugSessionResponse {
+ jobVersion?: string;
+}
+
+// @public
+export interface AdditionalColumns {
+ name?: Record;
+ value?: Record;
+}
+
+// @public
+export type AmazonMWSLinkedService = LinkedService & {
+ type: "AmazonMWS";
+ endpoint: Record;
+ marketplaceID: Record;
+ sellerID: Record;
+ mwsAuthToken?: SecretBaseUnion;
+ accessKeyId: Record;
+ secretKey?: SecretBaseUnion;
+ useEncryptedEndpoints?: Record;
+ useHostVerification?: Record;
+ usePeerVerification?: Record;
+ encryptedCredential?: Record;
+};
+
+// @public
+export type AmazonMWSObjectDataset = Dataset & {
+ type: "AmazonMWSObject";
+ tableName?: Record;
+};
+
+// @public
+export type AmazonMWSSource = TabularSource & {
+ type: "AmazonMWSSource";
+ query?: Record;
+};
+
+// @public
+export type AmazonRdsForOracleLinkedService = LinkedService & {
+ type: "AmazonRdsForOracle";
+ connectionString: Record;
+ password?: SecretBaseUnion;
+ encryptedCredential?: Record;
+};
+
+// @public
+export type AmazonRdsForOraclePartitionOption = string;
+
+// @public
+export interface AmazonRdsForOraclePartitionSettings {
+ partitionColumnName?: Record;
+ partitionLowerBound?: Record;
+ partitionNames?: Record;
+ partitionUpperBound?: Record;
+}
+
+// @public
+export type AmazonRdsForOracleSource = CopySource & {
+ type: "AmazonRdsForOracleSource";
+ oracleReaderQuery?: Record;
+ queryTimeout?: Record;
+ partitionOption?: Record;
+ partitionSettings?: AmazonRdsForOraclePartitionSettings;
+ additionalColumns?: Record;
+};
+
+// @public
+export type AmazonRdsForOracleTableDataset = Dataset & {
+ type: "AmazonRdsForOracleTable";
+ schemaTypePropertiesSchema?: Record;
+ table?: Record;
+};
+
+// @public
+export type AmazonRdsForSqlServerLinkedService = LinkedService & {
+ type: "AmazonRdsForSqlServer";
+ connectionString: Record;
+ userName?: Record;
+ password?: SecretBaseUnion;
+ encryptedCredential?: Record;
+ alwaysEncryptedSettings?: SqlAlwaysEncryptedProperties;
+};
+
+// @public
+export type AmazonRdsForSqlServerSource = TabularSource & {
+ type: "AmazonRdsForSqlServerSource";
+ sqlReaderQuery?: Record;
+ sqlReaderStoredProcedureName?: Record;
+ storedProcedureParameters?: {
+ [propertyName: string]: StoredProcedureParameter;
+ };
+ produceAdditionalTypes?: Record;
+ partitionOption?: Record;
+ partitionSettings?: SqlPartitionSettings;
+};
+
+// @public
+export type AmazonRdsForSqlServerTableDataset = Dataset & {
+ type: "AmazonRdsForSqlServerTable";
+ schemaTypePropertiesSchema?: Record;
+ table?: Record;
+};
+
+// @public
+export type AmazonRedshiftLinkedService = LinkedService & {
+ type: "AmazonRedshift";
+ server: Record;
+ username?: Record;
+ password?: SecretBaseUnion;
+ database: Record;
+ port?: Record;
+ encryptedCredential?: Record;
+};
+
+// @public
+export type AmazonRedshiftSource = TabularSource & {
+ type: "AmazonRedshiftSource";
+ query?: Record;
+ redshiftUnloadSettings?: RedshiftUnloadSettings;
+};
+
+// @public
+export type AmazonRedshiftTableDataset = Dataset & {
+ type: "AmazonRedshiftTable";
+ tableName?: Record;
+ table?: Record;
+ schemaTypePropertiesSchema?: Record;
+};
+
+// @public
+export type AmazonS3CompatibleLinkedService = LinkedService & {
+ type: "AmazonS3Compatible";
+ accessKeyId?: Record;
+ secretAccessKey?: SecretBaseUnion;
+ serviceUrl?: Record;
+ forcePathStyle?: Record;
+ encryptedCredential?: Record;
+};
+
+// @public
+export type AmazonS3CompatibleLocation = DatasetLocation & {
+ type: "AmazonS3CompatibleLocation";
+ bucketName?: Record;
+ version?: Record;
+};
+
+// @public
+export type AmazonS3CompatibleReadSettings = StoreReadSettings & {
+ type: "AmazonS3CompatibleReadSettings";
+ recursive?: Record;
+ wildcardFolderPath?: Record;
+ wildcardFileName?: Record;
+ prefix?: Record;
+ fileListPath?: Record;
+ enablePartitionDiscovery?: boolean;
+ partitionRootPath?: Record;
+ deleteFilesAfterCompletion?: Record;
+ modifiedDatetimeStart?: Record;
+ modifiedDatetimeEnd?: Record;
+};
+
+// @public
+export type AmazonS3Dataset = Dataset & {
+ type: "AmazonS3Object";
+ bucketName: Record;
+ key?: Record;
+ prefix?: Record;
+ version?: Record;
+ modifiedDatetimeStart?: Record;
+ modifiedDatetimeEnd?: Record;
+ format?: DatasetStorageFormatUnion;
+ compression?: DatasetCompression;
+};
+
+// @public
+export type AmazonS3LinkedService = LinkedService & {
+ type: "AmazonS3";
+ authenticationType?: Record;
+ accessKeyId?: Record;
+ secretAccessKey?: SecretBaseUnion;
+ serviceUrl?: Record;
+ sessionToken?: SecretBaseUnion;
+ encryptedCredential?: Record;
+};
+
+// @public
+export type AmazonS3Location = DatasetLocation & {
+ type: "AmazonS3Location";
+ bucketName?: Record;
+ version?: Record;
+};
+
+// @public
+export type AmazonS3ReadSettings = StoreReadSettings & {
+ type: "AmazonS3ReadSettings";
+ recursive?: Record;
+ wildcardFolderPath?: Record;
+ wildcardFileName?: Record;
+ prefix?: Record;
+ fileListPath?: Record;
+ enablePartitionDiscovery?: boolean;
+ partitionRootPath?: Record;
+ deleteFilesAfterCompletion?: Record;
+ modifiedDatetimeStart?: Record;
+ modifiedDatetimeEnd?: Record;
+};
+
+// @public
+export type AppendVariableActivity = ControlActivity & {
+ type: "AppendVariable";
+ variableName?: string;
+ value?: Record;
+};
+
+// @public
+export interface ArmIdWrapper {
+ readonly id?: string;
+}
+
+// @public
+export type AvroCompressionCodec = string;
+
+// @public
+export type AvroDataset = Dataset & {
+ type: "Avro";
+ location?: DatasetLocationUnion;
+ avroCompressionCodec?: Record;
+ avroCompressionLevel?: number;
+};
+
+// @public
+export type AvroFormat = DatasetStorageFormat & {
+ type: "AvroFormat";
+};
+
+// @public
+export type AvroSink = CopySink & {
+ type: "AvroSink";
+ storeSettings?: StoreWriteSettingsUnion;
+ formatSettings?: AvroWriteSettings;
+};
+
+// @public
+export type AvroSource = CopySource & {
+ type: "AvroSource";
+ storeSettings?: StoreReadSettingsUnion;
+ additionalColumns?: Record;
+};
+
+// @public
+export type AvroWriteSettings = FormatWriteSettings & {
+ type: "AvroWriteSettings";
+ recordName?: string;
+ recordNamespace?: string;
+ maxRowsPerFile?: Record;
+ fileNamePrefix?: Record;
+};
+
+// @public
+export type AzPowerShellSetup = CustomSetupBase & {
+ type: "AzPowerShellSetup";
+ version: string;
+};
+
+// @public
+export type AzureBatchLinkedService = LinkedService & {
+ type: "AzureBatch";
+ accountName: Record;
+ accessKey?: SecretBaseUnion;
+ batchUri: Record;
+ poolName: Record;
+ linkedServiceName: LinkedServiceReference;
+ encryptedCredential?: Record;
+ credential?: CredentialReference;
+};
+
+// @public
+export type AzureBlobDataset = Dataset & {
+ type: "AzureBlob";
+ folderPath?: Record;
+ tableRootLocation?: Record;
+ fileName?: Record;
+ modifiedDatetimeStart?: Record;
+ modifiedDatetimeEnd?: Record;
+ format?: DatasetStorageFormatUnion;
+ compression?: DatasetCompression;
+};
+
+// @public
+export type AzureBlobFSDataset = Dataset & {
+ type: "AzureBlobFSFile";
+ folderPath?: Record;
+ fileName?: Record;
+ format?: DatasetStorageFormatUnion;
+ compression?: DatasetCompression;
+};
+
+// @public
+export type AzureBlobFSLinkedService = LinkedService & {
+ type: "AzureBlobFS";
+ url: Record;
+ accountKey?: Record;
+ servicePrincipalId?: Record;
+ servicePrincipalKey?: SecretBaseUnion;
+ tenant?: Record;
+ azureCloudType?: Record;
+ encryptedCredential?: Record;
+ credential?: CredentialReference;
+};
+
+// @public
+export type AzureBlobFSLocation = DatasetLocation & {
+ type: "AzureBlobFSLocation";
+ fileSystem?: Record;
+};
+
+// @public
+export type AzureBlobFSReadSettings = StoreReadSettings & {
+ type: "AzureBlobFSReadSettings";
+ recursive?: Record;
+ wildcardFolderPath?: Record;
+ wildcardFileName?: Record;
+ fileListPath?: Record;
+ enablePartitionDiscovery?: boolean;
+ partitionRootPath?: Record;
+ deleteFilesAfterCompletion?: Record;
+ modifiedDatetimeStart?: Record;
+ modifiedDatetimeEnd?: Record;
+};
+
+// @public
+export type AzureBlobFSSink = CopySink & {
+ type: "AzureBlobFSSink";
+ copyBehavior?: Record;
+ metadata?: MetadataItem[];
+};
+
+// @public
+export type AzureBlobFSSource = CopySource & {
+ type: "AzureBlobFSSource";
+ treatEmptyAsNull?: Record;
+ skipHeaderLineCount?: Record;
+ recursive?: Record;
+};
+
+// @public
+export type AzureBlobFSWriteSettings = StoreWriteSettings & {
+ type: "AzureBlobFSWriteSettings";
+ blockSizeInMB?: Record;
+};
+
+// @public
+export type AzureBlobStorageLinkedService = LinkedService & {
+ type: "AzureBlobStorage";
+ connectionString?: Record;
+ accountKey?: AzureKeyVaultSecretReference;
+ sasUri?: Record;
+ sasToken?: AzureKeyVaultSecretReference;
+ serviceEndpoint?: string;
+ servicePrincipalId?: Record;
+ servicePrincipalKey?: SecretBaseUnion;
+ tenant?: Record;
+ azureCloudType?: Record;
+ accountKind?: string;
+ encryptedCredential?: string;
+ credential?: CredentialReference;
+};
+
+// @public
+export type AzureBlobStorageLocation = DatasetLocation & {
+ type: "AzureBlobStorageLocation";
+ container?: Record;
+};
+
+// @public
+export type AzureBlobStorageReadSettings = StoreReadSettings & {
+ type: "AzureBlobStorageReadSettings";
+ recursive?: Record;
+ wildcardFolderPath?: Record;
+ wildcardFileName?: Record;
+ prefix?: Record;
+ fileListPath?: Record;
+ enablePartitionDiscovery?: boolean;
+ partitionRootPath?: Record;
+ deleteFilesAfterCompletion?: Record;
+ modifiedDatetimeStart?: Record;
+ modifiedDatetimeEnd?: Record;
+};
+
+// @public
+export type AzureBlobStorageWriteSettings = StoreWriteSettings & {
+ type: "AzureBlobStorageWriteSettings";
+ blockSizeInMB?: Record;
+};
+
+// @public
+export type AzureDatabricksDeltaLakeDataset = Dataset & {
+ type: "AzureDatabricksDeltaLakeDataset";
+ table?: Record;
+ database?: Record;
+};
+
+// @public
+export type AzureDatabricksDeltaLakeExportCommand = ExportSettings & {
+ type: "AzureDatabricksDeltaLakeExportCommand";
+ dateFormat?: Record;
+ timestampFormat?: Record;
+};
+
+// @public
+export type AzureDatabricksDeltaLakeImportCommand = ImportSettings & {
+ type: "AzureDatabricksDeltaLakeImportCommand";
+ dateFormat?: Record;
+ timestampFormat?: Record;
+};
+
+// @public
+export type AzureDatabricksDeltaLakeLinkedService = LinkedService & {
+ type: "AzureDatabricksDeltaLake";
+ domain: Record;
+ accessToken?: SecretBaseUnion;
+ clusterId?: Record;
+ encryptedCredential?: Record;
+};
+
+// @public
+export type AzureDatabricksDeltaLakeSink = CopySink & {
+ type: "AzureDatabricksDeltaLakeSink";
+ preCopyScript?: Record;
+ importSettings?: AzureDatabricksDeltaLakeImportCommand;
+};
+
+// @public
+export type AzureDatabricksDeltaLakeSource = CopySource & {
+ type: "AzureDatabricksDeltaLakeSource";
+ query?: Record;
+ exportSettings?: AzureDatabricksDeltaLakeExportCommand;
+};
+
+// @public
+export type AzureDatabricksLinkedService = LinkedService & {
+ type: "AzureDatabricks";
+ domain: Record;
+ accessToken?: SecretBaseUnion;
+ authentication?: Record;
+ workspaceResourceId?: Record;
+ existingClusterId?: Record;
+ instancePoolId?: Record;
+ newClusterVersion?: Record;
+ newClusterNumOfWorker?: Record;
+ newClusterNodeType?: Record;
+ newClusterSparkConf?: {
+ [propertyName: string]: Record;
+ };
+ newClusterSparkEnvVars?: {
+ [propertyName: string]: Record;
+ };
+ newClusterCustomTags?: {
+ [propertyName: string]: Record;
+ };
+ newClusterLogDestination?: Record;
+ newClusterDriverNodeType?: Record;
+ newClusterInitScripts?: Record;
+ newClusterEnableElasticDisk?: Record;
+ encryptedCredential?: Record;
+ policyId?: Record;
+ credential?: CredentialReference;
+};
+
+// @public
+export type AzureDataExplorerCommandActivity = ExecutionActivity & {
+ type: "AzureDataExplorerCommand";
+ command: Record;
+ commandTimeout?: Record;
+};
+
+// @public
+export type AzureDataExplorerLinkedService = LinkedService & {
+ type: "AzureDataExplorer";
+ endpoint: Record;
+ servicePrincipalId?: Record;
+ servicePrincipalKey?: SecretBaseUnion;
+ database: Record;
+ tenant?: Record;
+ credential?: CredentialReference;
+};
+
+// @public
+export type AzureDataExplorerSink = CopySink & {
+ type: "AzureDataExplorerSink";
+ ingestionMappingName?: Record;
+ ingestionMappingAsJson?: Record;
+ flushImmediately?: Record;
+};
+
+// @public
+export type AzureDataExplorerSource = CopySource & {
+ type: "AzureDataExplorerSource";
+ query: Record;
+ noTruncation?: Record;
+ queryTimeout?: Record;
+ additionalColumns?: Record;
+};
+
+// @public
+export type AzureDataExplorerTableDataset = Dataset & {
+ type: "AzureDataExplorerTable";
+ table?: Record;
+};
+
+// @public
+export type AzureDataLakeAnalyticsLinkedService = LinkedService & {
+ type: "AzureDataLakeAnalytics";
+ accountName: Record;
+ servicePrincipalId?: Record;
+ servicePrincipalKey?: SecretBaseUnion;
+ tenant: Record;
+ subscriptionId?: Record;
+ resourceGroupName?: Record;
+ dataLakeAnalyticsUri?: Record;
+ encryptedCredential?: Record;
+};
+
+// @public
+export type AzureDataLakeStoreDataset = Dataset & {
+ type: "AzureDataLakeStoreFile";
+ folderPath?: Record;
+ fileName?: Record;
+ format?: DatasetStorageFormatUnion;
+ compression?: DatasetCompression;
+};
+
+// @public
+export type AzureDataLakeStoreLinkedService = LinkedService & {
+ type: "AzureDataLakeStore";
+ dataLakeStoreUri: Record;
+ servicePrincipalId?: Record;
+ servicePrincipalKey?: SecretBaseUnion;
+ tenant?: Record;
+ azureCloudType?: Record;
+ accountName?: Record;
+ subscriptionId?: Record;
+ resourceGroupName?: Record;
+ encryptedCredential?: Record;
+ credential?: CredentialReference;
+};
+
+// @public
+export type AzureDataLakeStoreLocation = DatasetLocation & {
+ type: "AzureDataLakeStoreLocation";
+};
+
+// @public
+export type AzureDataLakeStoreReadSettings = StoreReadSettings & {
+ type: "AzureDataLakeStoreReadSettings";
+ recursive?: Record;
+ wildcardFolderPath?: Record;
+ wildcardFileName?: Record;
+ fileListPath?: Record;
+ listAfter?: Record;
+ listBefore?: Record;
+ enablePartitionDiscovery?: boolean;
+ partitionRootPath?: Record;
+ deleteFilesAfterCompletion?: Record;
+ modifiedDatetimeStart?: Record;
+ modifiedDatetimeEnd?: Record;
+};
+
+// @public
+export type AzureDataLakeStoreSink = CopySink & {
+ type: "AzureDataLakeStoreSink";
+ copyBehavior?: Record;
+ enableAdlsSingleFileParallel?: Record;
+};
+
+// @public
+export type AzureDataLakeStoreSource = CopySource & {
+ type: "AzureDataLakeStoreSource";
+ recursive?: Record;
+};
+
+// @public
+export type AzureDataLakeStoreWriteSettings = StoreWriteSettings & {
+ type: "AzureDataLakeStoreWriteSettings";
+ expiryDateTime?: Record;
+};
+
+// @public
+export type AzureFileStorageLinkedService = LinkedService & {
+ type: "AzureFileStorage";
+ host?: Record;
+ userId?: Record;
+ password?: SecretBaseUnion;
+ connectionString?: Record;
+ accountKey?: AzureKeyVaultSecretReference;
+ sasUri?: Record;
+ sasToken?: AzureKeyVaultSecretReference;
+ fileShare?: Record;
+ snapshot?: Record;
+ encryptedCredential?: Record;
+};
+
+// @public
+export type AzureFileStorageLocation = DatasetLocation & {
+ type: "AzureFileStorageLocation";
+};
+
+// @public
+export type AzureFileStorageReadSettings = StoreReadSettings & {
+ type: "AzureFileStorageReadSettings";
+ recursive?: Record;
+ wildcardFolderPath?: Record;
+ wildcardFileName?: Record;
+ prefix?: Record;
+ fileListPath?: Record;
+ enablePartitionDiscovery?: boolean;
+ partitionRootPath?: Record;
+ deleteFilesAfterCompletion?: Record;
+ modifiedDatetimeStart?: Record;
+ modifiedDatetimeEnd?: Record;
+};
+
+// @public
+export type AzureFileStorageWriteSettings = StoreWriteSettings & {
+ type: "AzureFileStorageWriteSettings";
+};
+
+// @public
+export type AzureFunctionActivity = ExecutionActivity & {
+ type: "AzureFunctionActivity";
+ method: AzureFunctionActivityMethod;
+ functionName: Record;
+ headers?: Record;
+ body?: Record;
+};
+
+// @public
+export type AzureFunctionActivityMethod = string;
+
+// @public
+export type AzureFunctionLinkedService = LinkedService & {
+ type: "AzureFunction";
+ functionAppUrl: Record;
+ functionKey?: SecretBaseUnion;
+ encryptedCredential?: Record;
+ credential?: CredentialReference;
+ resourceId?: Record;
+ authentication?: Record;
+};
+
+// @public
+export type AzureKeyVaultLinkedService = LinkedService & {
+ type: "AzureKeyVault";
+ baseUrl: Record;
+ credential?: CredentialReference;
+};
+
+// @public
+export type AzureKeyVaultSecretReference = SecretBase & {
+ type: "AzureKeyVaultSecret";
+ store: LinkedServiceReference;
+ secretName: Record;
+ secretVersion?: Record;
+};
+
+// @public
+export type AzureMariaDBLinkedService = LinkedService & {
+ type: "AzureMariaDB";
+ connectionString?: Record;
+ pwd?: AzureKeyVaultSecretReference;
+ encryptedCredential?: Record;
+};
+
+// @public
+export type AzureMariaDBSource = TabularSource & {
+ type: "AzureMariaDBSource";
+ query?: Record;
+};
+
+// @public
+export type AzureMariaDBTableDataset = Dataset & {
+ type: "AzureMariaDBTable";
+ tableName?: Record;
+};
+
+// @public
+export type AzureMLBatchExecutionActivity = ExecutionActivity & {
+ type: "AzureMLBatchExecution";
+ globalParameters?: {
+ [propertyName: string]: Record;
+ };
+ webServiceOutputs?: {
+ [propertyName: string]: AzureMLWebServiceFile;
+ };
+ webServiceInputs?: {
+ [propertyName: string]: AzureMLWebServiceFile;
+ };
+};
+
+// @public
+export type AzureMLExecutePipelineActivity = ExecutionActivity & {
+ type: "AzureMLExecutePipeline";
+ mlPipelineId?: Record;
+ mlPipelineEndpointId?: Record;
+ version?: Record;
+ experimentName?: Record;
+ mlPipelineParameters?: Record;
+ dataPathAssignments?: Record;
+ mlParentRunId?: Record;
+ continueOnStepFailure?: Record;
+};
+
+// @public
+export type AzureMLLinkedService = LinkedService & {
+ type: "AzureML";
+ mlEndpoint: Record;
+ apiKey: SecretBaseUnion;
+ updateResourceEndpoint?: Record;
+ servicePrincipalId?: Record;
+ servicePrincipalKey?: SecretBaseUnion;
+ tenant?: Record;
+ encryptedCredential?: Record;
+ authentication?: Record