diff --git a/workflows_definition/.gitattributes b/workflows_definition/.gitattributes new file mode 100644 index 0000000000..4d75d59008 --- /dev/null +++ b/workflows_definition/.gitattributes @@ -0,0 +1,2 @@ +# This allows generated code to be indexed correctly +*.py linguist-generated=false \ No newline at end of file diff --git a/workflows_definition/.gitignore b/workflows_definition/.gitignore new file mode 100644 index 0000000000..477b772901 --- /dev/null +++ b/workflows_definition/.gitignore @@ -0,0 +1,8 @@ +.venv/ +venv/ +src/*.egg-info/ +__pycache__/ +.pytest_cache/ +.python-version +.DS_Store +pyrightconfig.json diff --git a/workflows_definition/.speakeasy/gen.lock b/workflows_definition/.speakeasy/gen.lock new file mode 100644 index 0000000000..514d9d26ac --- /dev/null +++ b/workflows_definition/.speakeasy/gen.lock @@ -0,0 +1,136 @@ +lockVersion: 2.0.0 +id: 3022dd0c-646e-49db-aa49-adc05b1c9424 +management: + docChecksum: 8a2ff4be25db818a35891d602baa4a25 + docVersion: 1.0.0 + speakeasyVersion: 1.355.0 + generationVersion: 2.387.0 + releaseVersion: 1.3.0 + configChecksum: 799c82ebae524a77160bfaf6b9b97b7a + repoURL: https://github.com/epilot-dev/sdk-python.git + repoSubDirectory: workflows_definition + installationURL: https://github.com/epilot-dev/sdk-python.git#subdirectory=workflows_definition +features: + python: + additionalDependencies: 1.0.0 + core: 5.3.4 + defaultEnabledRetries: 0.2.0 + deprecations: 3.0.0 + envVarSecurityUsage: 0.3.1 + flattening: 3.0.0 + globalSecurity: 3.0.1 + globalSecurityCallbacks: 1.0.0 + globalSecurityFlattening: 1.0.0 + globalServerURLs: 3.0.0 + responseFormat: 1.0.0 + retries: 3.0.0 + sdkHooks: 1.0.0 + unions: 3.0.1 +generatedFiles: + - src/openapi/sdkconfiguration.py + - src/openapi/closing_reason_sdk.py + - src/openapi/workflows.py + - src/openapi/sdk.py + - .vscode/settings.json + - poetry.toml + - py.typed + - pylintrc + - pyproject.toml + - scripts/compile.sh + - scripts/publish.sh + - src/openapi/__init__.py + - src/openapi/basesdk.py + - src/openapi/httpclient.py + - src/openapi/py.typed + - src/openapi/types/__init__.py + - src/openapi/types/basemodel.py + - src/openapi/utils/__init__.py + - src/openapi/utils/annotations.py + - src/openapi/utils/enums.py + - src/openapi/utils/eventstreaming.py + - src/openapi/utils/forms.py + - src/openapi/utils/headers.py + - src/openapi/utils/logger.py + - src/openapi/utils/metadata.py + - src/openapi/utils/queryparams.py + - src/openapi/utils/requestbodies.py + - src/openapi/utils/retries.py + - src/openapi/utils/security.py + - src/openapi/utils/serializers.py + - src/openapi/utils/url.py + - src/openapi/utils/values.py + - src/openapi/models/sdkerror.py + - src/openapi/models/errorresp.py + - src/openapi/models/changereasonstatusop.py + - src/openapi/models/changereasonstatusreq.py + - src/openapi/models/closingreasonsstatus.py + - src/openapi/models/closingreason.py + - src/openapi/models/closingreasons.py + - src/openapi/models/getallclosingreasonsop.py + - src/openapi/models/workflowdefinition.py + - src/openapi/models/updateentityattributes.py + - src/openapi/models/section.py + - src/openapi/models/itemtype.py + - src/openapi/models/step.py + - src/openapi/models/steprequirement.py + - src/openapi/models/stepjourney.py + - src/openapi/models/ecpdetails.py + - src/openapi/models/steptype.py + - src/openapi/models/dynamicduedate.py + - src/openapi/models/stepdescription.py + - src/openapi/models/closingreasonid.py + - src/openapi/models/deletedefinitionop.py + - src/openapi/models/definitionnotfoundresp.py + - src/openapi/models/getdefinitionop.py + - src/openapi/models/maxallowedlimit.py + - src/openapi/models/closingreasonsids.py + - src/openapi/models/getworkflowclosingreasonsop.py + - src/openapi/models/setworkflowclosingreasonsop.py + - src/openapi/models/updatedefinitionop.py + - src/openapi/models/security.py + - src/openapi/models/__init__.py + - docs/models/errorresp.md + - docs/models/changereasonstatusrequest.md + - docs/models/changereasonstatusreq.md + - docs/models/closingreasonsstatus.md + - docs/models/closingreason.md + - docs/models/closingreasons.md + - docs/models/getallclosingreasonsrequest.md + - docs/models/flow.md + - docs/models/workflowdefinition.md + - docs/models/source.md + - docs/models/target.md + - docs/models/updateentityattributes.md + - docs/models/section.md + - docs/models/itemtype.md + - docs/models/automationconfig.md + - docs/models/step.md + - docs/models/condition.md + - docs/models/steprequirement.md + - docs/models/stepjourney.md + - docs/models/ecpdetails.md + - docs/models/steptype.md + - docs/models/actiontypecondition.md + - docs/models/timeperiod.md + - docs/models/dynamicduedate.md + - docs/models/stepdescription.md + - docs/models/closingreasonid.md + - docs/models/deletedefinitionrequest.md + - docs/models/definitionnotfoundresp.md + - docs/models/getdefinitionrequest.md + - docs/models/maxallowedlimit.md + - docs/models/closingreasonsids.md + - docs/models/getworkflowclosingreasonsrequest.md + - docs/models/setworkflowclosingreasonsrequest.md + - docs/models/updatedefinitionrequest.md + - docs/models/security.md + - docs/sdks/sdk/README.md + - docs/models/utils/retryconfig.md + - docs/sdks/closingreasonsdk/README.md + - docs/sdks/workflows/README.md + - USAGE.md + - .gitattributes + - src/openapi/_hooks/sdkhooks.py + - src/openapi/_hooks/types.py + - src/openapi/_hooks/__init__.py + - CONTRIBUTING.md diff --git a/workflows_definition/.vscode/settings.json b/workflows_definition/.vscode/settings.json new file mode 100644 index 0000000000..8d79f0abb7 --- /dev/null +++ b/workflows_definition/.vscode/settings.json @@ -0,0 +1,6 @@ +{ + "python.testing.pytestArgs": ["tests", "-vv"], + "python.testing.unittestEnabled": false, + "python.testing.pytestEnabled": true, + "pylint.args": ["--rcfile=pylintrc"] +} diff --git a/workflows_definition/CONTRIBUTING.md b/workflows_definition/CONTRIBUTING.md new file mode 100644 index 0000000000..d585717fca --- /dev/null +++ b/workflows_definition/CONTRIBUTING.md @@ -0,0 +1,26 @@ +# Contributing to This Repository + +Thank you for your interest in contributing to this repository. Please note that this repository contains generated code. As such, we do not accept direct changes or pull requests. Instead, we encourage you to follow the guidelines below to report issues and suggest improvements. + +## How to Report Issues + +If you encounter any bugs or have suggestions for improvements, please open an issue on GitHub. When reporting an issue, please provide as much detail as possible to help us reproduce the problem. This includes: + +- A clear and descriptive title +- Steps to reproduce the issue +- Expected and actual behavior +- Any relevant logs, screenshots, or error messages +- Information about your environment (e.g., operating system, software versions) + - For example can be collected using the `npx envinfo` command from your terminal if you have Node.js installed + +## Issue Triage and Upstream Fixes + +We will review and triage issues as quickly as possible. Our goal is to address bugs and incorporate improvements in the upstream source code. Fixes will be included in the next generation of the generated code. + +## Contact + +If you have any questions or need further assistance, please feel free to reach out by opening an issue. + +Thank you for your understanding and cooperation! + +The Maintainers diff --git a/workflows_definition/README.md b/workflows_definition/README.md index a1825dc80c..d83ab27b62 100755 --- a/workflows_definition/README.md +++ b/workflows_definition/README.md @@ -1,60 +1,325 @@ # openapi - + ## SDK Installation +PIP ```bash pip install git+https://github.com/epilot-dev/sdk-python.git#subdirectory=workflows_definition ``` - +Poetry +```bash +poetry add git+https://github.com/epilot-dev/sdk-python.git#subdirectory=workflows_definition +``` + + + ## SDK Example Usage - + +### Example + ```python -import sdk -from sdk.models import operations, shared +# Synchronous Example +from openapi import SDK -s = sdk.SDK( - security=shared.Security( - bearer_auth="Bearer YOUR_BEARER_TOKEN_HERE", - ), +s = SDK( + bearer_auth="", ) -req = operations.ChangeReasonStatusRequest( - change_reason_status_req=shared.ChangeReasonStatusReq( - status="INACTIVE", - ), - reason_id="deserunt", +s.closing_reason.change_reason_status(reason_id="") + +# Use the SDK ... +``` + +
+ +The same SDK client can also be used to make asychronous requests by importing asyncio. +```python +# Asynchronous Example +import asyncio +from openapi import SDK + +async def main(): + s = SDK( + bearer_auth="", + ) + await s.closing_reason.change_reason_status_async(reason_id="") + # Use the SDK ... + +asyncio.run(main()) +``` + + + +## Available Resources and Operations + +### [closing_reason](docs/sdks/closingreasonsdk/README.md) + +* [change_reason_status](docs/sdks/closingreasonsdk/README.md#change_reason_status) - changeReasonStatus +* [create_closing_reason](docs/sdks/closingreasonsdk/README.md#create_closing_reason) - createClosingReason +* [get_all_closing_reasons](docs/sdks/closingreasonsdk/README.md#get_all_closing_reasons) - getAllClosingReasons + +### [workflows](docs/sdks/workflows/README.md) + +* [create_definition](docs/sdks/workflows/README.md#create_definition) - createDefinition +* [delete_definition](docs/sdks/workflows/README.md#delete_definition) - deleteDefinition +* [get_definition](docs/sdks/workflows/README.md#get_definition) - getDefinition +* [get_definitions](docs/sdks/workflows/README.md#get_definitions) - getDefinitions +* [get_max_allowed_limit](docs/sdks/workflows/README.md#get_max_allowed_limit) - getMaxAllowedLimit +* [get_workflow_closing_reasons](docs/sdks/workflows/README.md#get_workflow_closing_reasons) - getWorkflowClosingReasons +* [set_workflow_closing_reasons](docs/sdks/workflows/README.md#set_workflow_closing_reasons) - setWorkflowClosingReasons +* [update_definition](docs/sdks/workflows/README.md#update_definition) - updateDefinition + + + +## Retries + +Some of the endpoints in this SDK support retries. If you use the SDK without any configuration, it will fall back to the default retry strategy provided by the API. However, the default retry strategy can be overridden on a per-operation basis, or across the entire SDK. + +To change the default retry strategy for a single API call, simply provide a `RetryConfig` object to the call: +```python +from openapi import SDK +from sdk.utils import BackoffStrategy, RetryConfig + +s = SDK( + bearer_auth="", ) - -res = s.closing_reason.change_reason_status(req) -if res.status_code == 200: - # handle response + +s.closing_reason.change_reason_status(reason_id="", + RetryConfig("backoff", BackoffStrategy(1, 50, 1.1, 100), False)) + +# Use the SDK ... + ``` - - -## SDK Available Operations +If you'd like to override the default retry strategy for all operations that support retries, you can use the `retry_config` optional parameter when initializing the SDK: +```python +from openapi import SDK +from sdk.utils import BackoffStrategy, RetryConfig +s = SDK( + retry_config=RetryConfig("backoff", BackoffStrategy(1, 50, 1.1, 100), False), + bearer_auth="", +) + + +s.closing_reason.change_reason_status(reason_id="") + +# Use the SDK ... + +``` + + + +## Error Handling + +Handling errors in this SDK should largely match your expectations. All operations return a response object or raise an error. If Error objects are specified in your OpenAPI Spec, the SDK will raise the appropriate Error type. + +| Error Object | Status Code | Content Type | +| ---------------- | ---------------- | ---------------- | +| models.ErrorResp | 400,500 | application/json | +| models.SDKError | 4xx-5xx | */* | + +### Example + +```python +from openapi import SDK, models + +s = SDK( + bearer_auth="", +) + + +try: + s.closing_reason.change_reason_status(reason_id="") + +except models.ErrorResp as e: + # handle exception + raise(e) +except models.SDKError as e: + # handle exception + raise(e) + +# Use the SDK ... + +``` + -### closing_reason + +## Server Selection + +### Select Server by Index + +You can override the default server globally by passing a server index to the `server_idx: int` optional parameter when initializing the SDK client instance. The selected server will then be used as the default on the operations that use it. This table lists the indexes associated with the available servers: + +| # | Server | Variables | +| - | ------ | --------- | +| 0 | `https://workflows-definition.sls.epilot.io` | None | + +#### Example + +```python +from openapi import SDK + +s = SDK( + server_idx=0, + bearer_auth="", +) + + +s.closing_reason.change_reason_status(reason_id="") + +# Use the SDK ... + +``` + + +### Override Server URL Per-Client + +The default server can also be overridden globally by passing a URL to the `server_url: str` optional parameter when initializing the SDK client instance. For example: +```python +from openapi import SDK + +s = SDK( + server_url="https://workflows-definition.sls.epilot.io", + bearer_auth="", +) + + +s.closing_reason.change_reason_status(reason_id="") + +# Use the SDK ... + +``` + + + +## Custom HTTP Client + +The Python SDK makes API calls using the [httpx](https://www.python-httpx.org/) HTTP library. In order to provide a convenient way to configure timeouts, cookies, proxies, custom headers, and other low-level configuration, you can initialize the SDK client with your own HTTP client instance. +Depending on whether you are using the sync or async version of the SDK, you can pass an instance of `HttpClient` or `AsyncHttpClient` respectively, which are Protocol's ensuring that the client has the necessary methods to make API calls. +This allows you to wrap the client with your own custom logic, such as adding custom headers, logging, or error handling, or you can just pass an instance of `httpx.Client` or `httpx.AsyncClient` directly. + +For example, you could specify a header for every request that this sdk makes as follows: +```python +from openapi import SDK +import httpx + +http_client = httpx.Client(headers={"x-custom-header": "someValue"}) +s = SDK(client=http_client) +``` + +or you could wrap the client with your own custom logic: +```python +from openapi import SDK +from openapi.httpclient import AsyncHttpClient +import httpx + +class CustomClient(AsyncHttpClient): + client: AsyncHttpClient + + def __init__(self, client: AsyncHttpClient): + self.client = client + + async def send( + self, + request: httpx.Request, + *, + stream: bool = False, + auth: Union[ + httpx._types.AuthTypes, httpx._client.UseClientDefault, None + ] = httpx.USE_CLIENT_DEFAULT, + follow_redirects: Union[ + bool, httpx._client.UseClientDefault + ] = httpx.USE_CLIENT_DEFAULT, + ) -> httpx.Response: + request.headers["Client-Level-Header"] = "added by client" + + return await self.client.send( + request, stream=stream, auth=auth, follow_redirects=follow_redirects + ) + + def build_request( + self, + method: str, + url: httpx._types.URLTypes, + *, + content: Optional[httpx._types.RequestContent] = None, + data: Optional[httpx._types.RequestData] = None, + files: Optional[httpx._types.RequestFiles] = None, + json: Optional[Any] = None, + params: Optional[httpx._types.QueryParamTypes] = None, + headers: Optional[httpx._types.HeaderTypes] = None, + cookies: Optional[httpx._types.CookieTypes] = None, + timeout: Union[ + httpx._types.TimeoutTypes, httpx._client.UseClientDefault + ] = httpx.USE_CLIENT_DEFAULT, + extensions: Optional[httpx._types.RequestExtensions] = None, + ) -> httpx.Request: + return self.client.build_request( + method, + url, + content=content, + data=data, + files=files, + json=json, + params=params, + headers=headers, + cookies=cookies, + timeout=timeout, + extensions=extensions, + ) + +s = SDK(async_client=CustomClient(httpx.AsyncClient())) +``` + + + +## Authentication + +### Per-Client Security Schemes + +This SDK supports the following security scheme globally: + +| Name | Type | Scheme | +| ------------- | ------------- | ------------- | +| `bearer_auth` | http | HTTP Bearer | + +To authenticate with the API the `null` parameter must be set when initializing the SDK client instance. For example: +```python +from openapi import SDK + +s = SDK( + bearer_auth="", +) + + +s.closing_reason.change_reason_status(reason_id="") + +# Use the SDK ... + +``` + + + +## Debugging + +To emit debug logs for SDK requests and responses you can pass a logger object directly into your SDK object. + +```python +from openapi import SDK +import logging + +logging.basicConfig(level=logging.DEBUG) +s = SDK(debug_logger=logging.getLogger("openapi")) +``` + -* `change_reason_status` - changeReasonStatus -* `create_closing_reason` - createClosingReason -* `get_all_closing_reasons` - getAllClosingReasons + -### workflows -* `create_definition` - createDefinition -* `delete_definition` - deleteDefinition -* `get_definition` - getDefinition -* `get_definitions` - getDefinitions -* `get_max_allowed_limit` - getMaxAllowedLimit -* `get_workflow_closing_reasons` - getWorkflowClosingReasons -* `set_workflow_closing_reasons` - setWorkflowClosingReasons -* `update_definition` - updateDefinition - ### SDK Generated by [Speakeasy](https://docs.speakeasyapi.dev/docs/using-speakeasy/client-sdks) diff --git a/workflows_definition/RELEASES.md b/workflows_definition/RELEASES.md index 123baccfc1..9056923530 100644 --- a/workflows_definition/RELEASES.md +++ b/workflows_definition/RELEASES.md @@ -34,4 +34,12 @@ Based on: ### Changes Based on: - OpenAPI Doc 1.0.0 https://docs.api.epilot.io/workflows-definition.yaml -- Speakeasy CLI 1.19.2 (2.16.5) https://github.com/speakeasy-api/speakeasy \ No newline at end of file +- Speakeasy CLI 1.19.2 (2.16.5) https://github.com/speakeasy-api/speakeasy + +## 2024-08-07 00:25:42 +### Changes +Based on: +- OpenAPI Doc 1.0.0 https://docs.api.epilot.io/workflows-definition.yaml +- Speakeasy CLI 1.355.0 (2.387.0) https://github.com/speakeasy-api/speakeasy +### Generated +- [python v1.3.0] workflows_definition \ No newline at end of file diff --git a/workflows_definition/USAGE.md b/workflows_definition/USAGE.md old mode 100755 new mode 100644 index bc7b4fadbf..480d35666c --- a/workflows_definition/USAGE.md +++ b/workflows_definition/USAGE.md @@ -1,25 +1,33 @@ - + ```python -import sdk -from sdk.models import operations, shared +# Synchronous Example +from openapi import SDK -s = sdk.SDK( - security=shared.Security( - bearer_auth="Bearer YOUR_BEARER_TOKEN_HERE", - ), +s = SDK( + bearer_auth="", ) -req = operations.ChangeReasonStatusRequest( - change_reason_status_req=shared.ChangeReasonStatusReq( - status="INACTIVE", - ), - reason_id="deserunt", -) - -res = s.closing_reason.change_reason_status(req) +s.closing_reason.change_reason_status(reason_id="") + +# Use the SDK ... +``` + +
+ +The same SDK client can also be used to make asychronous requests by importing asyncio. +```python +# Asynchronous Example +import asyncio +from openapi import SDK + +async def main(): + s = SDK( + bearer_auth="", + ) + await s.closing_reason.change_reason_status_async(reason_id="") + # Use the SDK ... -if res.status_code == 200: - # handle response +asyncio.run(main()) ``` - \ No newline at end of file + \ No newline at end of file diff --git a/workflows_definition/docs/models/actiontypecondition.md b/workflows_definition/docs/models/actiontypecondition.md new file mode 100644 index 0000000000..0b5f24cc8b --- /dev/null +++ b/workflows_definition/docs/models/actiontypecondition.md @@ -0,0 +1,9 @@ +# ActionTypeCondition + + +## Values + +| Name | Value | +| ------------------ | ------------------ | +| `WORKFLOW_STARTED` | WORKFLOW_STARTED | +| `STEP_CLOSED` | STEP_CLOSED | \ No newline at end of file diff --git a/workflows_definition/docs/models/automationconfig.md b/workflows_definition/docs/models/automationconfig.md new file mode 100644 index 0000000000..e0608b94dc --- /dev/null +++ b/workflows_definition/docs/models/automationconfig.md @@ -0,0 +1,8 @@ +# AutomationConfig + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------- | -------------------------------------- | -------------------------------------- | -------------------------------------- | +| `flow_id` | *str* | :heavy_check_mark: | Id of the configured automation to run | \ No newline at end of file diff --git a/workflows_definition/docs/models/changereasonstatusreq.md b/workflows_definition/docs/models/changereasonstatusreq.md new file mode 100644 index 0000000000..206365dc27 --- /dev/null +++ b/workflows_definition/docs/models/changereasonstatusreq.md @@ -0,0 +1,8 @@ +# ChangeReasonStatusReq + + +## Fields + +| Field | Type | Required | Description | +| ---------------------------------------------------------------- | ---------------------------------------------------------------- | ---------------------------------------------------------------- | ---------------------------------------------------------------- | +| `status` | [models.ClosingReasonsStatus](../models/closingreasonsstatus.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/workflows_definition/docs/models/changereasonstatusrequest.md b/workflows_definition/docs/models/changereasonstatusrequest.md new file mode 100644 index 0000000000..6919fae5fb --- /dev/null +++ b/workflows_definition/docs/models/changereasonstatusrequest.md @@ -0,0 +1,9 @@ +# ChangeReasonStatusRequest + + +## Fields + +| Field | Type | Required | Description | +| ---------------------------------------------------------------------------- | ---------------------------------------------------------------------------- | ---------------------------------------------------------------------------- | ---------------------------------------------------------------------------- | +| `reason_id` | *str* | :heavy_check_mark: | N/A | +| `change_reason_status_req` | [Optional[models.ChangeReasonStatusReq]](../models/changereasonstatusreq.md) | :heavy_minus_sign: | change the status of a closing reason | \ No newline at end of file diff --git a/workflows_definition/docs/models/closingreason.md b/workflows_definition/docs/models/closingreason.md new file mode 100644 index 0000000000..bc86a9557a --- /dev/null +++ b/workflows_definition/docs/models/closingreason.md @@ -0,0 +1,14 @@ +# ClosingReason + +One Closing reason for a workflow + + +## Fields + +| Field | Type | Required | Description | +| ---------------------------------------------------------------- | ---------------------------------------------------------------- | ---------------------------------------------------------------- | ---------------------------------------------------------------- | +| `status` | [models.ClosingReasonsStatus](../models/closingreasonsstatus.md) | :heavy_check_mark: | N/A | +| `title` | *str* | :heavy_check_mark: | N/A | +| `creation_time` | *Optional[str]* | :heavy_minus_sign: | N/A | +| `id` | *Optional[str]* | :heavy_minus_sign: | N/A | +| `last_update_time` | *Optional[str]* | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/workflows_definition/docs/models/closingreasonid.md b/workflows_definition/docs/models/closingreasonid.md new file mode 100644 index 0000000000..3f125cffcc --- /dev/null +++ b/workflows_definition/docs/models/closingreasonid.md @@ -0,0 +1,8 @@ +# ClosingReasonID + + +## Fields + +| Field | Type | Required | Description | Example | +| ------------------ | ------------------ | ------------------ | ------------------ | ------------------ | +| `id` | *str* | :heavy_check_mark: | N/A | x739cew | \ No newline at end of file diff --git a/workflows_definition/docs/models/closingreasons.md b/workflows_definition/docs/models/closingreasons.md new file mode 100644 index 0000000000..69cd22140a --- /dev/null +++ b/workflows_definition/docs/models/closingreasons.md @@ -0,0 +1,8 @@ +# ClosingReasons + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | +| `reasons` | List[[models.ClosingReason](../models/closingreason.md)] | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/workflows_definition/docs/models/closingreasonsids.md b/workflows_definition/docs/models/closingreasonsids.md new file mode 100644 index 0000000000..c5e24736f9 --- /dev/null +++ b/workflows_definition/docs/models/closingreasonsids.md @@ -0,0 +1,8 @@ +# ClosingReasonsIds + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------------------------ | ------------------------------------------------------------ | ------------------------------------------------------------ | ------------------------------------------------------------ | +| `reasons` | List[[models.ClosingReasonID](../models/closingreasonid.md)] | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/workflows_definition/docs/models/closingreasonsstatus.md b/workflows_definition/docs/models/closingreasonsstatus.md new file mode 100644 index 0000000000..b76021aaac --- /dev/null +++ b/workflows_definition/docs/models/closingreasonsstatus.md @@ -0,0 +1,9 @@ +# ClosingReasonsStatus + + +## Values + +| Name | Value | +| ---------- | ---------- | +| `ACTIVE` | ACTIVE | +| `INACTIVE` | INACTIVE | \ No newline at end of file diff --git a/workflows_definition/docs/models/condition.md b/workflows_definition/docs/models/condition.md new file mode 100644 index 0000000000..ddb895be97 --- /dev/null +++ b/workflows_definition/docs/models/condition.md @@ -0,0 +1,8 @@ +# Condition + + +## Values + +| Name | Value | +| -------- | -------- | +| `CLOSED` | CLOSED | \ No newline at end of file diff --git a/workflows_definition/docs/models/definitionnotfoundresp.md b/workflows_definition/docs/models/definitionnotfoundresp.md new file mode 100644 index 0000000000..f839d1b4d0 --- /dev/null +++ b/workflows_definition/docs/models/definitionnotfoundresp.md @@ -0,0 +1,10 @@ +# DefinitionNotFoundResp + +Definition could be not found + + +## Fields + +| Field | Type | Required | Description | +| ------------------ | ------------------ | ------------------ | ------------------ | +| `message` | *Optional[str]* | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/workflows_definition/docs/models/deletedefinitionrequest.md b/workflows_definition/docs/models/deletedefinitionrequest.md new file mode 100644 index 0000000000..ed9bb335d1 --- /dev/null +++ b/workflows_definition/docs/models/deletedefinitionrequest.md @@ -0,0 +1,8 @@ +# DeleteDefinitionRequest + + +## Fields + +| Field | Type | Required | Description | Example | +| ----------------------------------- | ----------------------------------- | ----------------------------------- | ----------------------------------- | ----------------------------------- | +| `definition_id` | *str* | :heavy_check_mark: | Id of the definition to de deleted. | CustomerRequest | \ No newline at end of file diff --git a/workflows_definition/docs/models/dynamicduedate.md b/workflows_definition/docs/models/dynamicduedate.md new file mode 100644 index 0000000000..7cbf353f5b --- /dev/null +++ b/workflows_definition/docs/models/dynamicduedate.md @@ -0,0 +1,13 @@ +# DynamicDueDate + +set a Duedate for a step then a specific + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------------- | -------------------------------------------------------------- | -------------------------------------------------------------- | -------------------------------------------------------------- | +| `action_type_condition` | [models.ActionTypeCondition](../models/actiontypecondition.md) | :heavy_check_mark: | N/A | +| `number_of_units` | *float* | :heavy_check_mark: | N/A | +| `time_period` | [models.TimePeriod](../models/timeperiod.md) | :heavy_check_mark: | N/A | +| `step_id` | *Optional[str]* | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/workflows_definition/docs/models/ecpdetails.md b/workflows_definition/docs/models/ecpdetails.md new file mode 100644 index 0000000000..7eb2ffb004 --- /dev/null +++ b/workflows_definition/docs/models/ecpdetails.md @@ -0,0 +1,13 @@ +# ECPDetails + +Details regarding ECP for the workflow step + + +## Fields + +| Field | Type | Required | Description | +| -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | -------------------------------------------------------- | +| `description` | *Optional[str]* | :heavy_minus_sign: | N/A | +| `enabled` | *Optional[bool]* | :heavy_minus_sign: | N/A | +| `journey` | [Optional[models.StepJourney]](../models/stepjourney.md) | :heavy_minus_sign: | N/A | +| `label` | *Optional[str]* | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/workflows_definition/docs/models/errorresp.md b/workflows_definition/docs/models/errorresp.md new file mode 100644 index 0000000000..03b756e06f --- /dev/null +++ b/workflows_definition/docs/models/errorresp.md @@ -0,0 +1,10 @@ +# ErrorResp + +Other errors + + +## Fields + +| Field | Type | Required | Description | +| ------------------ | ------------------ | ------------------ | ------------------ | +| `message` | *Optional[str]* | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/workflows_definition/docs/models/flow.md b/workflows_definition/docs/models/flow.md new file mode 100644 index 0000000000..6dca95de3e --- /dev/null +++ b/workflows_definition/docs/models/flow.md @@ -0,0 +1,17 @@ +# Flow + + +## Supported Types + +### `models.Section` + +```python +value: models.Section = /* values here */ +``` + +### `models.Step` + +```python +value: models.Step = /* values here */ +``` + diff --git a/workflows_definition/docs/models/getallclosingreasonsrequest.md b/workflows_definition/docs/models/getallclosingreasonsrequest.md new file mode 100644 index 0000000000..86888699f2 --- /dev/null +++ b/workflows_definition/docs/models/getallclosingreasonsrequest.md @@ -0,0 +1,8 @@ +# GetAllClosingReasonsRequest + + +## Fields + +| Field | Type | Required | Description | Example | +| ---------------------------------------------------- | ---------------------------------------------------- | ---------------------------------------------------- | ---------------------------------------------------- | ---------------------------------------------------- | +| `include_inactive` | *Optional[bool]* | :heavy_minus_sign: | Filter Closing Reasons by status like active inactiv | true | \ No newline at end of file diff --git a/workflows_definition/docs/models/getdefinitionrequest.md b/workflows_definition/docs/models/getdefinitionrequest.md new file mode 100644 index 0000000000..29699a25ae --- /dev/null +++ b/workflows_definition/docs/models/getdefinitionrequest.md @@ -0,0 +1,8 @@ +# GetDefinitionRequest + + +## Fields + +| Field | Type | Required | Description | Example | +| ---------------------------------------------------------- | ---------------------------------------------------------- | ---------------------------------------------------------- | ---------------------------------------------------------- | ---------------------------------------------------------- | +| `definition_id` | *str* | :heavy_check_mark: | Short uuid (length 8) to identify the Workflow Definition. | 7hj28a | \ No newline at end of file diff --git a/workflows_definition/docs/models/getworkflowclosingreasonsrequest.md b/workflows_definition/docs/models/getworkflowclosingreasonsrequest.md new file mode 100644 index 0000000000..e83a71c030 --- /dev/null +++ b/workflows_definition/docs/models/getworkflowclosingreasonsrequest.md @@ -0,0 +1,8 @@ +# GetWorkflowClosingReasonsRequest + + +## Fields + +| Field | Type | Required | Description | Example | +| --------------------------- | --------------------------- | --------------------------- | --------------------------- | --------------------------- | +| `definition_id` | *str* | :heavy_check_mark: | ID of a workflow definition | fxcwfw | \ No newline at end of file diff --git a/workflows_definition/docs/models/itemtype.md b/workflows_definition/docs/models/itemtype.md new file mode 100644 index 0000000000..9d5135e839 --- /dev/null +++ b/workflows_definition/docs/models/itemtype.md @@ -0,0 +1,9 @@ +# ItemType + + +## Values + +| Name | Value | +| --------- | --------- | +| `STEP` | STEP | +| `SECTION` | SECTION | \ No newline at end of file diff --git a/workflows_definition/docs/models/maxallowedlimit.md b/workflows_definition/docs/models/maxallowedlimit.md new file mode 100644 index 0000000000..9f6a5064d2 --- /dev/null +++ b/workflows_definition/docs/models/maxallowedlimit.md @@ -0,0 +1,9 @@ +# MaxAllowedLimit + + +## Fields + +| Field | Type | Required | Description | +| ------------------------- | ------------------------- | ------------------------- | ------------------------- | +| `current_no_of_workflows` | *Optional[float]* | :heavy_minus_sign: | N/A | +| `max_allowed` | *Optional[float]* | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/workflows_definition/docs/models/section.md b/workflows_definition/docs/models/section.md new file mode 100644 index 0000000000..bd9c468d2a --- /dev/null +++ b/workflows_definition/docs/models/section.md @@ -0,0 +1,14 @@ +# Section + +A group of Steps that define the progress of the Workflow + + +## Fields + +| Field | Type | Required | Description | +| ---------------------------------------- | ---------------------------------------- | ---------------------------------------- | ---------------------------------------- | +| `name` | *str* | :heavy_check_mark: | N/A | +| `order` | *float* | :heavy_check_mark: | N/A | +| `steps` | List[[models.Step](../models/step.md)] | :heavy_check_mark: | N/A | +| `type` | [models.ItemType](../models/itemtype.md) | :heavy_check_mark: | N/A | +| `id` | *Optional[str]* | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/workflows_definition/docs/models/security.md b/workflows_definition/docs/models/security.md new file mode 100644 index 0000000000..f218fa1efc --- /dev/null +++ b/workflows_definition/docs/models/security.md @@ -0,0 +1,8 @@ +# Security + + +## Fields + +| Field | Type | Required | Description | +| ------------------ | ------------------ | ------------------ | ------------------ | +| `bearer_auth` | *str* | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/workflows_definition/docs/models/setworkflowclosingreasonsrequest.md b/workflows_definition/docs/models/setworkflowclosingreasonsrequest.md new file mode 100644 index 0000000000..a095d1a127 --- /dev/null +++ b/workflows_definition/docs/models/setworkflowclosingreasonsrequest.md @@ -0,0 +1,9 @@ +# SetWorkflowClosingReasonsRequest + + +## Fields + +| Field | Type | Required | Description | Example | +| ---------------------------------------------------------- | ---------------------------------------------------------- | ---------------------------------------------------------- | ---------------------------------------------------------- | ---------------------------------------------------------- | +| `closing_reasons_ids` | [models.ClosingReasonsIds](../models/closingreasonsids.md) | :heavy_check_mark: | set all closing reasons for a specific definition | | +| `definition_id` | *str* | :heavy_check_mark: | ID of a workflow definition | 7889 | \ No newline at end of file diff --git a/workflows_definition/docs/models/source.md b/workflows_definition/docs/models/source.md new file mode 100644 index 0000000000..06268eff35 --- /dev/null +++ b/workflows_definition/docs/models/source.md @@ -0,0 +1,10 @@ +# Source + + +## Values + +| Name | Value | +| ----------------- | ----------------- | +| `WORKFLOW_STATUS` | workflow_status | +| `CURRENT_SECTION` | current_section | +| `CURRENT_STEP` | current_step | \ No newline at end of file diff --git a/workflows_definition/docs/models/step.md b/workflows_definition/docs/models/step.md new file mode 100644 index 0000000000..0d008e36f9 --- /dev/null +++ b/workflows_definition/docs/models/step.md @@ -0,0 +1,24 @@ +# Step + +Action that needs to be done in a Workflow + + +## Fields + +| Field | Type | Required | Description | Example | +| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| `name` | *str* | :heavy_check_mark: | N/A | | +| `order` | *float* | :heavy_check_mark: | N/A | | +| `type` | [models.ItemType](../models/itemtype.md) | :heavy_check_mark: | N/A | | +| `assigned_to` | List[*str*] | :heavy_minus_sign: | N/A | | +| `automation_config` | [Optional[models.AutomationConfig]](../models/automationconfig.md) | :heavy_minus_sign: | N/A | | +| `description` | [Optional[models.StepDescription]](../models/stepdescription.md) | :heavy_minus_sign: | Longer information regarding Task | | +| `due_date` | *Optional[str]* | :heavy_minus_sign: | N/A | 2021-04-27T12:00:00.000Z | +| `dynamic_due_date` | [Optional[models.DynamicDueDate]](../models/dynamicduedate.md) | :heavy_minus_sign: | set a Duedate for a step then a specific | | +| `ecp` | [Optional[models.ECPDetails]](../models/ecpdetails.md) | :heavy_minus_sign: | Details regarding ECP for the workflow step | | +| `execution_type` | [Optional[models.StepType]](../models/steptype.md) | :heavy_minus_sign: | N/A | | +| `id` | *Optional[str]* | :heavy_minus_sign: | N/A | | +| `installer` | [Optional[models.ECPDetails]](../models/ecpdetails.md) | :heavy_minus_sign: | Details regarding ECP for the workflow step | | +| `journey` | [Optional[models.StepJourney]](../models/stepjourney.md) | :heavy_minus_sign: | N/A | | +| `requirements` | List[[models.StepRequirement](../models/steprequirement.md)] | :heavy_minus_sign: | requirements that need to be fulfilled in order to enable the step execution | | +| ~~`user_ids`~~ | List[*float*] | :heavy_minus_sign: | : warning: ** DEPRECATED **: This will be removed in a future release, please migrate away from it as soon as possible.

This field is deprecated. Please use assignedTo | | \ No newline at end of file diff --git a/workflows_definition/docs/models/stepdescription.md b/workflows_definition/docs/models/stepdescription.md new file mode 100644 index 0000000000..c78ad90af0 --- /dev/null +++ b/workflows_definition/docs/models/stepdescription.md @@ -0,0 +1,11 @@ +# StepDescription + +Longer information regarding Task + + +## Fields + +| Field | Type | Required | Description | +| ------------------ | ------------------ | ------------------ | ------------------ | +| `enabled` | *Optional[bool]* | :heavy_minus_sign: | N/A | +| `value` | *Optional[str]* | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/workflows_definition/docs/models/stepjourney.md b/workflows_definition/docs/models/stepjourney.md new file mode 100644 index 0000000000..03111fed33 --- /dev/null +++ b/workflows_definition/docs/models/stepjourney.md @@ -0,0 +1,10 @@ +# StepJourney + + +## Fields + +| Field | Type | Required | Description | +| ------------------ | ------------------ | ------------------ | ------------------ | +| `id` | *Optional[str]* | :heavy_minus_sign: | N/A | +| `journey_id` | *Optional[str]* | :heavy_minus_sign: | N/A | +| `name` | *Optional[str]* | :heavy_minus_sign: | N/A | \ No newline at end of file diff --git a/workflows_definition/docs/models/steprequirement.md b/workflows_definition/docs/models/steprequirement.md new file mode 100644 index 0000000000..accb956090 --- /dev/null +++ b/workflows_definition/docs/models/steprequirement.md @@ -0,0 +1,12 @@ +# StepRequirement + +describe the requirement for step enablement + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------------ | ------------------------------------------ | ------------------------------------------ | ------------------------------------------ | +| `condition` | [models.Condition](../models/condition.md) | :heavy_check_mark: | N/A | +| `definition_id` | *str* | :heavy_check_mark: | N/A | +| `type` | [models.ItemType](../models/itemtype.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/workflows_definition/docs/models/steptype.md b/workflows_definition/docs/models/steptype.md new file mode 100644 index 0000000000..c52f47ba5b --- /dev/null +++ b/workflows_definition/docs/models/steptype.md @@ -0,0 +1,9 @@ +# StepType + + +## Values + +| Name | Value | +| ------------ | ------------ | +| `MANUAL` | MANUAL | +| `AUTOMATION` | AUTOMATION | \ No newline at end of file diff --git a/workflows_definition/docs/models/target.md b/workflows_definition/docs/models/target.md new file mode 100644 index 0000000000..d56e270af4 --- /dev/null +++ b/workflows_definition/docs/models/target.md @@ -0,0 +1,9 @@ +# Target + + +## Fields + +| Field | Type | Required | Description | Example | +| ------------------ | ------------------ | ------------------ | ------------------ | ------------------ | +| `entity_attribute` | *str* | :heavy_check_mark: | N/A | my_status | +| `entity_schema` | *str* | :heavy_check_mark: | N/A | opportunity | \ No newline at end of file diff --git a/workflows_definition/docs/models/timeperiod.md b/workflows_definition/docs/models/timeperiod.md new file mode 100644 index 0000000000..0c7ea26a69 --- /dev/null +++ b/workflows_definition/docs/models/timeperiod.md @@ -0,0 +1,10 @@ +# TimePeriod + + +## Values + +| Name | Value | +| -------- | -------- | +| `DAYS` | days | +| `WEEKS` | weeks | +| `MONTHS` | months | \ No newline at end of file diff --git a/workflows_definition/docs/models/updatedefinitionrequest.md b/workflows_definition/docs/models/updatedefinitionrequest.md new file mode 100644 index 0000000000..35fdd22095 --- /dev/null +++ b/workflows_definition/docs/models/updatedefinitionrequest.md @@ -0,0 +1,9 @@ +# UpdateDefinitionRequest + + +## Fields + +| Field | Type | Required | Description | Example | +| ------------------------------------------------------------ | ------------------------------------------------------------ | ------------------------------------------------------------ | ------------------------------------------------------------ | ------------------------------------------------------------ | +| `workflow_definition` | [models.WorkflowDefinition](../models/workflowdefinition.md) | :heavy_check_mark: | Workflow Definition payload | | +| `definition_id` | *str* | :heavy_check_mark: | Short uuid (length 8) to identify the Workflow Definition. | 7hj28a | \ No newline at end of file diff --git a/workflows_definition/docs/models/updateentityattributes.md b/workflows_definition/docs/models/updateentityattributes.md new file mode 100644 index 0000000000..6b25d72925 --- /dev/null +++ b/workflows_definition/docs/models/updateentityattributes.md @@ -0,0 +1,9 @@ +# UpdateEntityAttributes + + +## Fields + +| Field | Type | Required | Description | +| ------------------------------------ | ------------------------------------ | ------------------------------------ | ------------------------------------ | +| `source` | [models.Source](../models/source.md) | :heavy_check_mark: | N/A | +| `target` | [models.Target](../models/target.md) | :heavy_check_mark: | N/A | \ No newline at end of file diff --git a/workflows_definition/docs/models/utils/retryconfig.md b/workflows_definition/docs/models/utils/retryconfig.md new file mode 100644 index 0000000000..69dd549ec7 --- /dev/null +++ b/workflows_definition/docs/models/utils/retryconfig.md @@ -0,0 +1,24 @@ +# RetryConfig + +Allows customizing the default retry configuration. Only usable with methods that mention they support retries. + +## Fields + +| Name | Type | Description | Example | +| ------------------------- | ----------------------------------- | --------------------------------------- | --------- | +| `strategy` | `*str*` | The retry strategy to use. | `backoff` | +| `backoff` | [BackoffStrategy](#backoffstrategy) | Configuration for the backoff strategy. | | +| `retry_connection_errors` | `*bool*` | Whether to retry on connection errors. | `true` | + +## BackoffStrategy + +The backoff strategy allows retrying a request with an exponential backoff between each retry. + +### Fields + +| Name | Type | Description | Example | +| ------------------ | --------- | ----------------------------------------- | -------- | +| `initial_interval` | `*int*` | The initial interval in milliseconds. | `500` | +| `max_interval` | `*int*` | The maximum interval in milliseconds. | `60000` | +| `exponent` | `*float*` | The exponent to use for the backoff. | `1.5` | +| `max_elapsed_time` | `*int*` | The maximum elapsed time in milliseconds. | `300000` | \ No newline at end of file diff --git a/workflows_definition/docs/models/workflowdefinition.md b/workflows_definition/docs/models/workflowdefinition.md new file mode 100644 index 0000000000..c97b56c8ea --- /dev/null +++ b/workflows_definition/docs/models/workflowdefinition.md @@ -0,0 +1,20 @@ +# WorkflowDefinition + + +## Fields + +| Field | Type | Required | Description | Example | +| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| `flow` | List[[models.Flow](../models/flow.md)] | :heavy_check_mark: | N/A | | +| `name` | *str* | :heavy_check_mark: | N/A | | +| `assigned_to` | List[*str*] | :heavy_minus_sign: | N/A | | +| `closing_reasons` | List[[models.ClosingReasonID](../models/closingreasonid.md)] | :heavy_minus_sign: | N/A | | +| `creation_time` | *Optional[str]* | :heavy_minus_sign: | ISO String Date & Time | 2021-04-27T12:01:13.000Z | +| `description` | *Optional[str]* | :heavy_minus_sign: | N/A | | +| `due_date` | *Optional[str]* | :heavy_minus_sign: | N/A | 2021-04-27T12:00:00.000Z | +| `dynamic_due_date` | [Optional[models.DynamicDueDate]](../models/dynamicduedate.md) | :heavy_minus_sign: | set a Duedate for a step then a specific | | +| `enable_ecp_workflow` | *Optional[bool]* | :heavy_minus_sign: | Indicates whether this workflow is available for End Customer Portal or not. By default it's not. | | +| `id` | *Optional[str]* | :heavy_minus_sign: | N/A | | +| `last_update_time` | *Optional[str]* | :heavy_minus_sign: | ISO String Date & Time | 2021-04-27T12:01:13.000Z | +| `update_entity_attributes` | List[[models.UpdateEntityAttributes](../models/updateentityattributes.md)] | :heavy_minus_sign: | N/A | | +| ~~`user_ids`~~ | List[*float*] | :heavy_minus_sign: | : warning: ** DEPRECATED **: This will be removed in a future release, please migrate away from it as soon as possible.

This field is deprecated. Please use assignedTo | | \ No newline at end of file diff --git a/workflows_definition/docs/sdks/closingreasonsdk/README.md b/workflows_definition/docs/sdks/closingreasonsdk/README.md new file mode 100644 index 0000000000..21b6cc8263 --- /dev/null +++ b/workflows_definition/docs/sdks/closingreasonsdk/README.md @@ -0,0 +1,125 @@ +# ClosingReasonSDK +(*closing_reason*) + +### Available Operations + +* [change_reason_status](#change_reason_status) - changeReasonStatus +* [create_closing_reason](#create_closing_reason) - createClosingReason +* [get_all_closing_reasons](#get_all_closing_reasons) - getAllClosingReasons + +## change_reason_status + +Change the status of a Closing Reason (eg. ACTIVE to INACTIVE). + +### Example Usage + +```python +from openapi import SDK + +s = SDK( + bearer_auth="", +) + + +s.closing_reason.change_reason_status(reason_id="") + +# Use the SDK ... + +``` + +### Parameters + +| Parameter | Type | Required | Description | +| ------------------------------------------------------------------------------- | ------------------------------------------------------------------------------- | ------------------------------------------------------------------------------- | ------------------------------------------------------------------------------- | +| `reason_id` | *str* | :heavy_check_mark: | N/A | +| `change_reason_status_req` | [Optional[models.ChangeReasonStatusReq]](../../models/changereasonstatusreq.md) | :heavy_minus_sign: | change the status of a closing reason | +| `retries` | [Optional[utils.RetryConfig]](../../models/utils/retryconfig.md) | :heavy_minus_sign: | Configuration to override the default retry behavior of the client. | + +### Errors + +| Error Object | Status Code | Content Type | +| ---------------- | ---------------- | ---------------- | +| models.ErrorResp | 400,500 | application/json | +| models.SDKError | 4xx-5xx | */* | + +## create_closing_reason + +A created Closing Reason is stored for the organization and will be displayed in the library of reasons. + +### Example Usage + +```python +import openapi +from openapi import SDK + +s = SDK( + bearer_auth="", +) + + +res = s.closing_reason.create_closing_reason(request={ + "status": openapi.ClosingReasonsStatus.ACTIVE, + "title": "better offer", +}) + +if res is not None: + # handle response + pass + +``` + +### Parameters + +| Parameter | Type | Required | Description | +| ------------------------------------------------------------------- | ------------------------------------------------------------------- | ------------------------------------------------------------------- | ------------------------------------------------------------------- | +| `request` | [models.ClosingReason](../../models/closingreason.md) | :heavy_check_mark: | The request object to use for the request. | +| `retries` | [Optional[utils.RetryConfig]](../../models/utils/retryconfig.md) | :heavy_minus_sign: | Configuration to override the default retry behavior of the client. | + + +### Response + +**[models.ClosingReason](../../models/closingreason.md)** +### Errors + +| Error Object | Status Code | Content Type | +| --------------- | --------------- | --------------- | +| models.SDKError | 4xx-5xx | */* | + +## get_all_closing_reasons + +Get all Closing Reasons defined in the organization by default all Active. + +### Example Usage + +```python +from openapi import SDK + +s = SDK( + bearer_auth="", +) + + +res = s.closing_reason.get_all_closing_reasons(include_inactive=True) + +if res is not None: + # handle response + pass + +``` + +### Parameters + +| Parameter | Type | Required | Description | Example | +| ------------------------------------------------------------------- | ------------------------------------------------------------------- | ------------------------------------------------------------------- | ------------------------------------------------------------------- | ------------------------------------------------------------------- | +| `include_inactive` | *Optional[bool]* | :heavy_minus_sign: | Filter Closing Reasons by status like active inactiv | true | +| `retries` | [Optional[utils.RetryConfig]](../../models/utils/retryconfig.md) | :heavy_minus_sign: | Configuration to override the default retry behavior of the client. | | + + +### Response + +**[models.ClosingReasons](../../models/closingreasons.md)** +### Errors + +| Error Object | Status Code | Content Type | +| --------------- | --------------- | --------------- | +| models.SDKError | 4xx-5xx | */* | diff --git a/workflows_definition/docs/sdks/sdk/README.md b/workflows_definition/docs/sdks/sdk/README.md new file mode 100644 index 0000000000..c7cf50bc9f --- /dev/null +++ b/workflows_definition/docs/sdks/sdk/README.md @@ -0,0 +1,10 @@ +# SDK + + +## Overview + +Workflows Definitions: Service for Workflow Definitions for different processes inside of an Organization + + +### Available Operations + diff --git a/workflows_definition/docs/sdks/workflows/README.md b/workflows_definition/docs/sdks/workflows/README.md new file mode 100644 index 0000000000..5fe76c985b --- /dev/null +++ b/workflows_definition/docs/sdks/workflows/README.md @@ -0,0 +1,479 @@ +# Workflows +(*workflows*) + +### Available Operations + +* [create_definition](#create_definition) - createDefinition +* [delete_definition](#delete_definition) - deleteDefinition +* [get_definition](#get_definition) - getDefinition +* [get_definitions](#get_definitions) - getDefinitions +* [get_max_allowed_limit](#get_max_allowed_limit) - getMaxAllowedLimit +* [get_workflow_closing_reasons](#get_workflow_closing_reasons) - getWorkflowClosingReasons +* [set_workflow_closing_reasons](#set_workflow_closing_reasons) - setWorkflowClosingReasons +* [update_definition](#update_definition) - updateDefinition + +## create_definition + +Create a Workflow Definition. + +### Example Usage + +```python +import openapi +from openapi import SDK + +s = SDK( + bearer_auth="", +) + + +res = s.workflows.create_definition(request={ + "flow": [ + { + "name": "Initial Information Gathering", + "order": 1, + "steps": [ + { + "name": "Call client and confirm address and product", + "order": 1, + "type": openapi.ItemType.STEP, + "assigned_to": [ + "123482", + ], + "due_date": "2021-04-27T12:00:00.000Z", + }, + { + "name": "Check product availability", + "order": 2, + "type": openapi.ItemType.STEP, + "assigned_to": [ + "123482", + ], + "due_date": "2021-04-27T12:00:00.000Z", + }, + { + "name": "Send email confirming contact with the client", + "order": 3, + "type": openapi.ItemType.STEP, + "assigned_to": [ + "123482", + ], + "due_date": "2021-04-27T12:00:00.000Z", + }, + ], + "type": openapi.ItemType.SECTION, + }, + { + "name": "Print and send catalog", + "order": 2, + "steps": [ + { + "name": "", + "order": 4108.47, + "type": openapi.ItemType.STEP, + "due_date": "2021-04-27T12:00:00.000Z", + }, + ], + "type": openapi.ItemType.STEP, + }, + ], + "name": "Lead Qualification", + "assigned_to": [ + "952802", + "80225", + ], + "closing_reasons": [ + { + "id": "x739cew", + }, + ], + "creation_time": "2021-04-27T12:01:13.000Z", + "description": "Lead Qualification description", + "due_date": "2022-08-04T12:00:00.000Z", + "last_update_time": "2021-04-27T12:01:13.000Z", + "update_entity_attributes": [ + { + "source": openapi.Source.CURRENT_SECTION, + "target": { + "entity_attribute": "my_status", + "entity_schema": "opportunity", + }, + }, + ], +}) + +if res is not None: + # handle response + pass + +``` + +### Parameters + +| Parameter | Type | Required | Description | +| ------------------------------------------------------------------- | ------------------------------------------------------------------- | ------------------------------------------------------------------- | ------------------------------------------------------------------- | +| `request` | [models.WorkflowDefinition](../../models/workflowdefinition.md) | :heavy_check_mark: | The request object to use for the request. | +| `retries` | [Optional[utils.RetryConfig]](../../models/utils/retryconfig.md) | :heavy_minus_sign: | Configuration to override the default retry behavior of the client. | + + +### Response + +**[models.WorkflowDefinition](../../models/workflowdefinition.md)** +### Errors + +| Error Object | Status Code | Content Type | +| ---------------- | ---------------- | ---------------- | +| models.ErrorResp | 400,401,500 | application/json | +| models.SDKError | 4xx-5xx | */* | + +## delete_definition + +Delete Workflow Definition. + +### Example Usage + +```python +from openapi import SDK + +s = SDK( + bearer_auth="", +) + + +s.workflows.delete_definition(definition_id="CustomerRequest") + +# Use the SDK ... + +``` + +### Parameters + +| Parameter | Type | Required | Description | Example | +| ------------------------------------------------------------------- | ------------------------------------------------------------------- | ------------------------------------------------------------------- | ------------------------------------------------------------------- | ------------------------------------------------------------------- | +| `definition_id` | *str* | :heavy_check_mark: | Id of the definition to de deleted. | CustomerRequest | +| `retries` | [Optional[utils.RetryConfig]](../../models/utils/retryconfig.md) | :heavy_minus_sign: | Configuration to override the default retry behavior of the client. | | + +### Errors + +| Error Object | Status Code | Content Type | +| ---------------- | ---------------- | ---------------- | +| models.ErrorResp | 401 | application/json | +| models.SDKError | 4xx-5xx | */* | + +## get_definition + +Get specific Definition by id from the Organization. + +### Example Usage + +```python +from openapi import SDK + +s = SDK( + bearer_auth="", +) + + +res = s.workflows.get_definition(definition_id="7hj28a") + +if res is not None: + # handle response + pass + +``` + +### Parameters + +| Parameter | Type | Required | Description | Example | +| ------------------------------------------------------------------- | ------------------------------------------------------------------- | ------------------------------------------------------------------- | ------------------------------------------------------------------- | ------------------------------------------------------------------- | +| `definition_id` | *str* | :heavy_check_mark: | Short uuid (length 8) to identify the Workflow Definition. | 7hj28a | +| `retries` | [Optional[utils.RetryConfig]](../../models/utils/retryconfig.md) | :heavy_minus_sign: | Configuration to override the default retry behavior of the client. | | + + +### Response + +**[models.WorkflowDefinition](../../models/workflowdefinition.md)** +### Errors + +| Error Object | Status Code | Content Type | +| ----------------------------- | ----------------------------- | ----------------------------- | +| models.ErrorResp | 400,401,500 | application/json | +| models.DefinitionNotFoundResp | 404 | application/json | +| models.SDKError | 4xx-5xx | */* | + +## get_definitions + +Retrieve all Workflow Definitions from an Organization + +### Example Usage + +```python +from openapi import SDK + +s = SDK( + bearer_auth="", +) + + +res = s.workflows.get_definitions() + +if res is not None: + # handle response + pass + +``` + +### Parameters + +| Parameter | Type | Required | Description | +| ------------------------------------------------------------------- | ------------------------------------------------------------------- | ------------------------------------------------------------------- | ------------------------------------------------------------------- | +| `retries` | [Optional[utils.RetryConfig]](../../models/utils/retryconfig.md) | :heavy_minus_sign: | Configuration to override the default retry behavior of the client. | + + +### Response + +**[List[models.WorkflowDefinition]](../../models/.md)** +### Errors + +| Error Object | Status Code | Content Type | +| ---------------- | ---------------- | ---------------- | +| models.ErrorResp | 500 | application/json | +| models.SDKError | 4xx-5xx | */* | + +## get_max_allowed_limit + +Get limits and number of created executions for an Organization. + +### Example Usage + +```python +from openapi import SDK + +s = SDK( + bearer_auth="", +) + + +res = s.workflows.get_max_allowed_limit() + +if res is not None: + # handle response + pass + +``` + +### Parameters + +| Parameter | Type | Required | Description | +| ------------------------------------------------------------------- | ------------------------------------------------------------------- | ------------------------------------------------------------------- | ------------------------------------------------------------------- | +| `retries` | [Optional[utils.RetryConfig]](../../models/utils/retryconfig.md) | :heavy_minus_sign: | Configuration to override the default retry behavior of the client. | + + +### Response + +**[models.MaxAllowedLimit](../../models/maxallowedlimit.md)** +### Errors + +| Error Object | Status Code | Content Type | +| ---------------- | ---------------- | ---------------- | +| models.ErrorResp | 500 | application/json | +| models.SDKError | 4xx-5xx | */* | + +## get_workflow_closing_reasons + +Returns all closing reasons defined for the workflow. + +### Example Usage + +```python +from openapi import SDK + +s = SDK( + bearer_auth="", +) + + +res = s.workflows.get_workflow_closing_reasons(definition_id="fxcwfw") + +if res is not None: + # handle response + pass + +``` + +### Parameters + +| Parameter | Type | Required | Description | Example | +| ------------------------------------------------------------------- | ------------------------------------------------------------------- | ------------------------------------------------------------------- | ------------------------------------------------------------------- | ------------------------------------------------------------------- | +| `definition_id` | *str* | :heavy_check_mark: | ID of a workflow definition | fxcwfw | +| `retries` | [Optional[utils.RetryConfig]](../../models/utils/retryconfig.md) | :heavy_minus_sign: | Configuration to override the default retry behavior of the client. | | + + +### Response + +**[models.ClosingReasonsIds](../../models/closingreasonsids.md)** +### Errors + +| Error Object | Status Code | Content Type | +| --------------- | --------------- | --------------- | +| models.SDKError | 4xx-5xx | */* | + +## set_workflow_closing_reasons + +Sets which closing reasons are defined for this workflow, based on the entire closing reasons catalog. + +### Example Usage + +```python +from openapi import SDK + +s = SDK( + bearer_auth="", +) + + +s.workflows.set_workflow_closing_reasons(definition_id="7889", closing_reasons_ids={ + "reasons": [ + { + "id": "x739cew", + }, + ], +}) + +# Use the SDK ... + +``` + +### Parameters + +| Parameter | Type | Required | Description | Example | +| ------------------------------------------------------------------- | ------------------------------------------------------------------- | ------------------------------------------------------------------- | ------------------------------------------------------------------- | ------------------------------------------------------------------- | +| `definition_id` | *str* | :heavy_check_mark: | ID of a workflow definition | 7889 | +| `closing_reasons_ids` | [models.ClosingReasonsIds](../../models/closingreasonsids.md) | :heavy_check_mark: | set all closing reasons for a specific definition | | +| `retries` | [Optional[utils.RetryConfig]](../../models/utils/retryconfig.md) | :heavy_minus_sign: | Configuration to override the default retry behavior of the client. | | + +### Errors + +| Error Object | Status Code | Content Type | +| --------------- | --------------- | --------------- | +| models.SDKError | 4xx-5xx | */* | + +## update_definition + +Update Workflow Definition. + +### Example Usage + +```python +import openapi +from openapi import SDK + +s = SDK( + bearer_auth="", +) + + +res = s.workflows.update_definition(definition_id="7hj28a", workflow_definition={ + "flow": [ + { + "name": "Initial Information Gathering", + "order": 1, + "steps": [ + { + "name": "Call client and confirm address and product", + "order": 1, + "type": openapi.ItemType.STEP, + "assigned_to": [ + "8988", + ], + "due_date": "2021-04-27T12:00:00.000Z", + "id": "2hja82a", + }, + { + "name": "Check product availability", + "order": 2, + "type": openapi.ItemType.STEP, + "assigned_to": [ + "8988", + ], + "due_date": "2021-04-27T12:00:00.000Z", + "id": "ga92ha2", + }, + { + "name": "Send email confirming contact with the client", + "order": 3, + "type": openapi.ItemType.STEP, + "assigned_to": [ + "8988", + ], + "due_date": "2021-04-27T12:00:00.000Z", + "id": "jga92ha", + }, + ], + "type": openapi.ItemType.SECTION, + "id": "5892na2", + }, + { + "name": "Print and send catalog", + "order": 2, + "steps": [ + { + "name": "", + "order": 4279.02, + "type": openapi.ItemType.SECTION, + "due_date": "2021-04-27T12:00:00.000Z", + }, + ], + "type": openapi.ItemType.STEP, + "id": "0a7g22a", + }, + ], + "name": "Lead Qualification", + "assigned_to": [ + "952802", + "80225", + ], + "closing_reasons": [ + { + "id": "x739cew", + }, + ], + "creation_time": "2021-08-04T21:13:50.373Z", + "due_date": "2022-08-04T12:00:00.000Z", + "id": "25n2k52ja", + "last_update_time": "2021-08-04T21:13:50.373Z", + "update_entity_attributes": [ + { + "source": openapi.Source.CURRENT_STEP, + "target": { + "entity_attribute": "my_status", + "entity_schema": "opportunity", + }, + }, + ], +}) + +if res is not None: + # handle response + pass + +``` + +### Parameters + +| Parameter | Type | Required | Description | Example | +| ------------------------------------------------------------------- | ------------------------------------------------------------------- | ------------------------------------------------------------------- | ------------------------------------------------------------------- | ------------------------------------------------------------------- | +| `definition_id` | *str* | :heavy_check_mark: | Short uuid (length 8) to identify the Workflow Definition. | 7hj28a | +| `workflow_definition` | [models.WorkflowDefinition](../../models/workflowdefinition.md) | :heavy_check_mark: | Workflow Definition payload | | +| `retries` | [Optional[utils.RetryConfig]](../../models/utils/retryconfig.md) | :heavy_minus_sign: | Configuration to override the default retry behavior of the client. | | + + +### Response + +**[models.WorkflowDefinition](../../models/workflowdefinition.md)** +### Errors + +| Error Object | Status Code | Content Type | +| ---------------- | ---------------- | ---------------- | +| models.ErrorResp | 400,401,500 | application/json | +| models.SDKError | 4xx-5xx | */* | diff --git a/workflows_definition/gen.yaml b/workflows_definition/gen.yaml index 38abb52c11..46cfa0fd2b 100755 --- a/workflows_definition/gen.yaml +++ b/workflows_definition/gen.yaml @@ -1,16 +1,41 @@ -configVersion: 1.0.0 -management: - docChecksum: 3f8335b3dac5dd26e396d579dbc082e7 - docVersion: 1.0.0 - speakeasyVersion: 1.19.2 - generationVersion: 2.16.5 +configVersion: 2.0.0 generation: - telemetryEnabled: false sdkClassName: SDK + usageSnippets: + optionalPropertyRendering: withExample + fixes: + nameResolutionDec2023: false + parameterOrderingFeb2024: false + requestResponseComponentNamesFeb2024: false + auth: + oAuth2ClientCredentialsEnabled: false sdkFlattening: true - singleTagPerOp: false + telemetryEnabled: false python: - version: 1.2.2 + version: 1.3.0 + additionalDependencies: + dev: {} + main: {} author: Speakeasy + authors: + - Speakeasy + clientServerStatusCodesAsErrors: true description: Python Client SDK Generated by Speakeasy + enumFormat: enum + flattenGlobalSecurity: true + flattenRequests: false + imports: + option: openapi + paths: + callbacks: "" + errors: "" + operations: "" + shared: "" + webhooks: "" + inputModelSuffix: input + maxMethodParams: 4 + methodArguments: infer-optional-args + outputModelSuffix: output packageName: openapi + responseFormat: flat + templateVersion: v2 diff --git a/workflows_definition/poetry.toml b/workflows_definition/poetry.toml new file mode 100644 index 0000000000..ab1033bd37 --- /dev/null +++ b/workflows_definition/poetry.toml @@ -0,0 +1,2 @@ +[virtualenvs] +in-project = true diff --git a/workflows_definition/py.typed b/workflows_definition/py.typed new file mode 100644 index 0000000000..3e38f1a929 --- /dev/null +++ b/workflows_definition/py.typed @@ -0,0 +1 @@ +# Marker file for PEP 561. The package enables type hints. diff --git a/workflows_definition/pylintrc b/workflows_definition/pylintrc old mode 100755 new mode 100644 index 79b8008d09..5080038628 --- a/workflows_definition/pylintrc +++ b/workflows_definition/pylintrc @@ -59,10 +59,11 @@ ignore-paths= # Emacs file locks ignore-patterns=^\.# -# List of module names for which member attributes should not be checked -# (useful for modules/projects where namespaces are manipulated during runtime -# and thus existing member attributes cannot be deduced by static analysis). It -# supports qualified module names, as well as Unix pattern matching. +# List of module names for which member attributes should not be checked and +# will not be imported (useful for modules/projects where namespaces are +# manipulated during runtime and thus existing member attributes cannot be +# deduced by static analysis). It supports qualified module names, as well as +# Unix pattern matching. ignored-modules= # Python code to execute, usually for sys.path manipulation such as @@ -88,11 +89,17 @@ persistent=yes # Minimum Python version to use for version dependent checks. Will default to # the version used to run pylint. -py-version=3.9 +py-version=3.8 # Discover python modules and packages in the file system subtree. recursive=no +# Add paths to the list of the source roots. Supports globbing patterns. The +# source root is an absolute path or a path relative to the current working +# directory used to determine a package namespace for modules located under the +# source root. +source-roots=src + # When enabled, pylint would attempt to guess common misconfiguration and emit # user-friendly hints instead of false-positive error messages. suggestion-mode=yes @@ -116,20 +123,15 @@ argument-naming-style=snake_case #argument-rgx= # Naming style matching correct attribute names. -attr-naming-style=snake_case +#attr-naming-style=snake_case # Regular expression matching correct attribute names. Overrides attr-naming- # style. If left empty, attribute names will be checked with the set naming # style. -#attr-rgx= +attr-rgx=[^\W\d][^\W]*|__.*__$ # Bad variable names which should always be refused, separated by a comma. -bad-names=foo, - bar, - baz, - toto, - tutu, - tata +bad-names= # Bad variable names regexes, separated by a comma. If names match any regex, # they will always be refused @@ -185,6 +187,7 @@ good-names=i, ex, Run, _, + e, id # Good variable names regexes, separated by a comma. If names match any regex, @@ -229,6 +232,10 @@ no-docstring-rgx=^_ # These decorators are taken in consideration only for invalid-name. property-classes=abc.abstractproperty +# Regular expression matching correct type alias names. If left empty, type +# alias names will be checked with the set naming style. +typealias-rgx=.* + # Regular expression matching correct type variable names. If left empty, type # variable names will be checked with the set naming style. #typevar-rgx= @@ -251,15 +258,12 @@ check-protected-access-in-special-methods=no defining-attr-methods=__init__, __new__, setUp, + asyncSetUp, __post_init__ # List of member names, which should be excluded from the protected access # warning. -exclude-protected=_asdict, - _fields, - _replace, - _source, - _make +exclude-protected=_asdict,_fields,_replace,_source,_make,os._exit # List of valid names for the first argument in a class method. valid-classmethod-first-arg=cls @@ -422,6 +426,8 @@ disable=raw-checker-failed, suppressed-message, useless-suppression, deprecated-pragma, + use-implicit-booleaness-not-comparison-to-string, + use-implicit-booleaness-not-comparison-to-zero, use-symbolic-message-instead, trailing-whitespace, line-too-long, @@ -434,18 +440,27 @@ disable=raw-checker-failed, broad-exception-raised, too-few-public-methods, too-many-branches, - chained-comparison, duplicate-code, trailing-newlines, too-many-public-methods, too-many-locals, - too-many-lines + too-many-lines, + using-constant-test, + too-many-statements, + cyclic-import, + too-many-nested-blocks, + too-many-boolean-expressions, + no-else-raise, + bare-except, + broad-exception-caught, + fixme, + relative-beyond-top-level # Enable the message, report, category or checker with the given id(s). You can # either give multiple identifier separated by comma (,) or put this option # multiple time (only on the command line, not in the configuration file where # it should appear only once). See also the "--disable" option for examples. -enable=c-extension-no-member +enable= [METHOD_ARGS] @@ -491,8 +506,9 @@ evaluation=max(0, 0 if fatal else 10.0 - ((float(5 * error + warning + refactor # used to format the message information. See doc for all details. msg-template= -# Set the output format. Available formats are text, parseable, colorized, json -# and msvs (visual studio). You can also give a reporter class, e.g. +# Set the output format. Available formats are: text, parseable, colorized, +# json2 (improved json format), json (old json format) and msvs (visual +# studio). You can also give a reporter class, e.g. # mypackage.mymodule.MyReporterClass. #output-format= @@ -526,8 +542,8 @@ min-similarity-lines=4 # Limits count of emitted suggestions for spelling mistakes. max-spelling-suggestions=4 -# Spelling dictionary name. Available dictionaries: none. To make it work, -# install the 'python-enchant' package. +# Spelling dictionary name. No available dictionaries : You need to install +# both the python package and the system dependency for enchant to work. spelling-dict= # List of comma separated words that should be considered directives if they @@ -620,7 +636,7 @@ additional-builtins= allow-global-unused-variables=yes # List of names allowed to shadow builtins -allowed-redefined-builtins= +allowed-redefined-builtins=id,object # List of strings which can identify a callback function by name. A callback # name must start or end with one of those strings. diff --git a/workflows_definition/pyproject.toml b/workflows_definition/pyproject.toml new file mode 100644 index 0000000000..0602b0b3d8 --- /dev/null +++ b/workflows_definition/pyproject.toml @@ -0,0 +1,55 @@ +[tool.poetry] +name = "openapi" +version = "1.3.0" +description = "Python Client SDK Generated by Speakeasy" +authors = ["Speakeasy",] +readme = "README.md" +repository = "https://github.com/epilot-dev/sdk-python.git" +packages = [ + { include = "openapi", from = "src" } +] +include = ["py.typed", "src/openapi/py.typed"] + +[tool.setuptools.package-data] +"*" = ["py.typed", "src/openapi/py.typed"] + +[virtualenvs] +in-project = true + +[tool.poetry.dependencies] +python = "^3.8" +httpx = "^0.27.0" +jsonpath-python = "^1.0.6" +pydantic = "~2.8.2" +python-dateutil = "^2.9.0.post0" +typing-inspect = "^0.9.0" + +[tool.poetry.group.dev.dependencies] +mypy = "==1.10.1" +pylint = "==3.2.3" +pyright = "==1.1.374" +types-python-dateutil = "^2.9.0.20240316" + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" + +[tool.pytest.ini_options] +pythonpath = ["src"] + +[tool.mypy] +disable_error_code = "misc" + +[[tool.mypy.overrides]] +module = "typing_inspect" +ignore_missing_imports = true + +[[tool.mypy.overrides]] +module = "jsonpath" +ignore_missing_imports = true + +[tool.pyright] +venvPath = "." +venv = ".venv" + + diff --git a/workflows_definition/scripts/compile.sh b/workflows_definition/scripts/compile.sh new file mode 100755 index 0000000000..aa49772e2a --- /dev/null +++ b/workflows_definition/scripts/compile.sh @@ -0,0 +1,83 @@ +#!/usr/bin/env bash + +set -o pipefail # Ensure pipeline failures are propagated + +# Use temporary files to store outputs and exit statuses +declare -A output_files +declare -A status_files + +# Function to run a command with temporary output and status files +run_command() { + local cmd="$1" + local key="$2" + local output_file="$3" + local status_file="$4" + + # Run the command and store output and exit status + { + eval "$cmd" + echo $? > "$status_file" + } &> "$output_file" & +} + +# Create temporary files for outputs and statuses +for cmd in compileall pylint mypy pyright; do + output_files[$cmd]=$(mktemp) + status_files[$cmd]=$(mktemp) +done + +# Collect PIDs for background processes +declare -a pids + +# Run commands in parallel using temporary files +echo "Running python -m compileall" +run_command 'poetry run python -m compileall -q . && echo "Success"' 'compileall' "${output_files[compileall]}" "${status_files[compileall]}" +pids+=($!) + +echo "Running pylint" +run_command 'poetry run pylint src' 'pylint' "${output_files[pylint]}" "${status_files[pylint]}" +pids+=($!) + +echo "Running mypy" +run_command 'poetry run mypy src' 'mypy' "${output_files[mypy]}" "${status_files[mypy]}" +pids+=($!) + +echo "Running pyright (optional)" +run_command 'if command -v pyright > /dev/null 2>&1; then pyright src; else echo "pyright not found, skipping"; fi' 'pyright' "${output_files[pyright]}" "${status_files[pyright]}" +pids+=($!) + +# Wait for all processes to complete +echo "Waiting for processes to complete" +for pid in "${pids[@]}"; do + wait "$pid" +done + +# Print output sequentially and check for failures +failed=false +for key in "${!output_files[@]}"; do + echo "--- Output from Command: $key ---" + echo + cat "${output_files[$key]}" + echo # Empty line for separation + echo "--- End of Output from Command: $key ---" + echo + + exit_status=$(cat "${status_files[$key]}") + if [ "$exit_status" -ne 0 ]; then + echo "Command $key failed with exit status $exit_status" >&2 + failed=true + fi +done + +# Clean up temporary files +for tmp_file in "${output_files[@]}" "${status_files[@]}"; do + rm -f "$tmp_file" +done + +if $failed; then + echo "One or more commands failed." >&2 + exit 1 +else + echo "All commands completed successfully." + exit 0 +fi diff --git a/workflows_definition/scripts/publish.sh b/workflows_definition/scripts/publish.sh new file mode 100755 index 0000000000..1ee7194cd7 --- /dev/null +++ b/workflows_definition/scripts/publish.sh @@ -0,0 +1,5 @@ +#!/usr/bin/env bash + +export POETRY_PYPI_TOKEN_PYPI=${PYPI_TOKEN} + +poetry publish --build --skip-existing diff --git a/workflows_definition/setup.py b/workflows_definition/setup.py deleted file mode 100755 index 997ae148da..0000000000 --- a/workflows_definition/setup.py +++ /dev/null @@ -1,39 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" - -import setuptools - -try: - with open("README.md", "r") as fh: - long_description = fh.read() -except FileNotFoundError: - long_description = "" - -setuptools.setup( - name="openapi", - version="1.2.2", - author="Speakeasy", - description="Python Client SDK Generated by Speakeasy", - long_description=long_description, - long_description_content_type="text/markdown", - packages=setuptools.find_packages(where="src"), - install_requires=[ - "certifi==2022.12.07", - "charset-normalizer==2.1.1", - "dataclasses-json-speakeasy==0.5.8", - "idna==3.3", - "marshmallow==3.17.1", - "marshmallow-enum==1.5.1", - "mypy-extensions==0.4.3", - "packaging==21.3", - "pyparsing==3.0.9", - "python-dateutil==2.8.2", - "requests==2.28.1", - "six==1.16.0", - "typing-inspect==0.8.0", - "typing_extensions==4.3.0", - "urllib3==1.26.12", - "pylint==2.16.2", - ], - package_dir={'': 'src'}, - python_requires='>=3.9' -) diff --git a/workflows_definition/src/openapi/__init__.py b/workflows_definition/src/openapi/__init__.py new file mode 100644 index 0000000000..68138c477e --- /dev/null +++ b/workflows_definition/src/openapi/__init__.py @@ -0,0 +1,5 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from .sdk import * +from .sdkconfiguration import * +from .models import * diff --git a/workflows_definition/src/openapi/_hooks/__init__.py b/workflows_definition/src/openapi/_hooks/__init__.py new file mode 100644 index 0000000000..2ee66cdd59 --- /dev/null +++ b/workflows_definition/src/openapi/_hooks/__init__.py @@ -0,0 +1,5 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from .sdkhooks import * +from .types import * +from .registration import * diff --git a/workflows_definition/src/openapi/_hooks/registration.py b/workflows_definition/src/openapi/_hooks/registration.py new file mode 100644 index 0000000000..1db6a52934 --- /dev/null +++ b/workflows_definition/src/openapi/_hooks/registration.py @@ -0,0 +1,13 @@ +from .types import Hooks + + +# This file is only ever generated once on the first generation and then is free to be modified. +# Any hooks you wish to add should be registered in the init_hooks function. Feel free to define them +# in this file or in separate files in the hooks folder. + + +def init_hooks(hooks: Hooks): + # pylint: disable=unused-argument + """Add hooks by calling hooks.register{sdk_init/before_request/after_success/after_error}Hook + with an instance of a hook that implements that specific Hook interface + Hooks are registered per SDK instance, and are valid for the lifetime of the SDK instance""" diff --git a/workflows_definition/src/openapi/_hooks/sdkhooks.py b/workflows_definition/src/openapi/_hooks/sdkhooks.py new file mode 100644 index 0000000000..2897aa9bb2 --- /dev/null +++ b/workflows_definition/src/openapi/_hooks/sdkhooks.py @@ -0,0 +1,57 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +import httpx +from .types import SDKInitHook, BeforeRequestContext, BeforeRequestHook, AfterSuccessContext, AfterSuccessHook, AfterErrorContext, AfterErrorHook, Hooks +from .registration import init_hooks +from typing import List, Optional, Tuple +from openapi.httpclient import HttpClient + +class SDKHooks(Hooks): + def __init__(self) -> None: + self.sdk_init_hooks: List[SDKInitHook] = [] + self.before_request_hooks: List[BeforeRequestHook] = [] + self.after_success_hooks: List[AfterSuccessHook] = [] + self.after_error_hooks: List[AfterErrorHook] = [] + init_hooks(self) + + def register_sdk_init_hook(self, hook: SDKInitHook) -> None: + self.sdk_init_hooks.append(hook) + + def register_before_request_hook(self, hook: BeforeRequestHook) -> None: + self.before_request_hooks.append(hook) + + def register_after_success_hook(self, hook: AfterSuccessHook) -> None: + self.after_success_hooks.append(hook) + + def register_after_error_hook(self, hook: AfterErrorHook) -> None: + self.after_error_hooks.append(hook) + + def sdk_init(self, base_url: str, client: HttpClient) -> Tuple[str, HttpClient]: + for hook in self.sdk_init_hooks: + base_url, client = hook.sdk_init(base_url, client) + return base_url, client + + def before_request(self, hook_ctx: BeforeRequestContext, request: httpx.Request) -> httpx.Request: + for hook in self.before_request_hooks: + out = hook.before_request(hook_ctx, request) + if isinstance(out, Exception): + raise out + request = out + + return request + + def after_success(self, hook_ctx: AfterSuccessContext, response: httpx.Response) -> httpx.Response: + for hook in self.after_success_hooks: + out = hook.after_success(hook_ctx, response) + if isinstance(out, Exception): + raise out + response = out + return response + + def after_error(self, hook_ctx: AfterErrorContext, response: Optional[httpx.Response], error: Optional[Exception]) -> Tuple[Optional[httpx.Response], Optional[Exception]]: + for hook in self.after_error_hooks: + result = hook.after_error(hook_ctx, response, error) + if isinstance(result, Exception): + raise result + response, error = result + return response, error diff --git a/workflows_definition/src/openapi/_hooks/types.py b/workflows_definition/src/openapi/_hooks/types.py new file mode 100644 index 0000000000..877c1e55d8 --- /dev/null +++ b/workflows_definition/src/openapi/_hooks/types.py @@ -0,0 +1,76 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + + +from abc import ABC, abstractmethod +import httpx +from openapi.httpclient import HttpClient +from typing import Any, Callable, List, Optional, Tuple, Union + + +class HookContext: + operation_id: str + oauth2_scopes: Optional[List[str]] = None + security_source: Optional[Union[Any, Callable[[], Any]]] = None + + def __init__(self, operation_id: str, oauth2_scopes: Optional[List[str]], security_source: Optional[Union[Any, Callable[[], Any]]]): + self.operation_id = operation_id + self.oauth2_scopes = oauth2_scopes + self.security_source = security_source + + +class BeforeRequestContext(HookContext): + def __init__(self, hook_ctx: HookContext): + super().__init__(hook_ctx.operation_id, hook_ctx.oauth2_scopes, hook_ctx.security_source) + + +class AfterSuccessContext(HookContext): + def __init__(self, hook_ctx: HookContext): + super().__init__(hook_ctx.operation_id, hook_ctx.oauth2_scopes, hook_ctx.security_source) + + + +class AfterErrorContext(HookContext): + def __init__(self, hook_ctx: HookContext): + super().__init__(hook_ctx.operation_id, hook_ctx.oauth2_scopes, hook_ctx.security_source) + + +class SDKInitHook(ABC): + @abstractmethod + def sdk_init(self, base_url: str, client: HttpClient) -> Tuple[str, HttpClient]: + pass + + +class BeforeRequestHook(ABC): + @abstractmethod + def before_request(self, hook_ctx: BeforeRequestContext, request: httpx.Request) -> Union[httpx.Request, Exception]: + pass + + +class AfterSuccessHook(ABC): + @abstractmethod + def after_success(self, hook_ctx: AfterSuccessContext, response: httpx.Response) -> Union[httpx.Response, Exception]: + pass + + +class AfterErrorHook(ABC): + @abstractmethod + def after_error(self, hook_ctx: AfterErrorContext, response: Optional[httpx.Response], error: Optional[Exception]) -> Union[Tuple[Optional[httpx.Response], Optional[Exception]], Exception]: + pass + + +class Hooks(ABC): + @abstractmethod + def register_sdk_init_hook(self, hook: SDKInitHook): + pass + + @abstractmethod + def register_before_request_hook(self, hook: BeforeRequestHook): + pass + + @abstractmethod + def register_after_success_hook(self, hook: AfterSuccessHook): + pass + + @abstractmethod + def register_after_error_hook(self, hook: AfterErrorHook): + pass diff --git a/workflows_definition/src/openapi/basesdk.py b/workflows_definition/src/openapi/basesdk.py new file mode 100644 index 0000000000..185d09b59d --- /dev/null +++ b/workflows_definition/src/openapi/basesdk.py @@ -0,0 +1,253 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from .sdkconfiguration import SDKConfiguration +import httpx +from openapi import models, utils +from openapi._hooks import AfterErrorContext, AfterSuccessContext, BeforeRequestContext +from openapi.utils import RetryConfig, SerializedRequestBody, get_body_content +from typing import Callable, List, Optional, Tuple + +class BaseSDK: + sdk_configuration: SDKConfiguration + + def __init__(self, sdk_config: SDKConfiguration) -> None: + self.sdk_configuration = sdk_config + + def get_url(self, base_url, url_variables): + sdk_url, sdk_variables = self.sdk_configuration.get_server_details() + + if base_url is None: + base_url = sdk_url + + if url_variables is None: + url_variables = sdk_variables + + return utils.template_url(base_url, url_variables) + + def build_request( + self, + method, + path, + base_url, + url_variables, + request, + request_body_required, + request_has_path_params, + request_has_query_params, + user_agent_header, + accept_header_value, + _globals=None, + security=None, + timeout_ms: Optional[int] = None, + get_serialized_body: Optional[ + Callable[[], Optional[SerializedRequestBody]] + ] = None, + url_override: Optional[str] = None, + ) -> httpx.Request: + client = self.sdk_configuration.client + + query_params = {} + + url = url_override + if url is None: + url = utils.generate_url( + self.get_url(base_url, url_variables), + path, + request if request_has_path_params else None, + _globals if request_has_path_params else None, + ) + + query_params = utils.get_query_params( + request if request_has_query_params else None, + _globals if request_has_query_params else None, + ) + + headers = utils.get_headers(request, _globals) + headers["Accept"] = accept_header_value + headers[user_agent_header] = self.sdk_configuration.user_agent + + if security is not None: + if callable(security): + security = security() + + if security is not None: + security_headers, security_query_params = utils.get_security(security) + headers = {**headers, **security_headers} + query_params = {**query_params, **security_query_params} + + serialized_request_body = SerializedRequestBody("application/octet-stream") + if get_serialized_body is not None: + rb = get_serialized_body() + if request_body_required and rb is None: + raise ValueError("request body is required") + + if rb is not None: + serialized_request_body = rb + + if ( + serialized_request_body.media_type is not None + and serialized_request_body.media_type + not in ( + "multipart/form-data", + "multipart/mixed", + ) + ): + headers["content-type"] = serialized_request_body.media_type + + timeout = timeout_ms / 1000 if timeout_ms is not None else None + + return client.build_request( + method, + url, + params=query_params, + content=serialized_request_body.content, + data=serialized_request_body.data, + files=serialized_request_body.files, + headers=headers, + timeout=timeout, + ) + + def do_request( + self, + hook_ctx, + request, + error_status_codes, + stream=False, + retry_config: Optional[Tuple[RetryConfig, List[str]]] = None, + ) -> httpx.Response: + client = self.sdk_configuration.client + logger = self.sdk_configuration.debug_logger + + def do(): + http_res = None + try: + req = self.sdk_configuration.get_hooks().before_request( + BeforeRequestContext(hook_ctx), request + ) + logger.debug( + "Request:\nMethod: %s\nURL: %s\nHeaders: %s\nBody: %s", + req.method, + req.url, + req.headers, + get_body_content(req) + ) + http_res = client.send(req, stream=stream) + except Exception as e: + _, e = self.sdk_configuration.get_hooks().after_error( + AfterErrorContext(hook_ctx), None, e + ) + if e is not None: + logger.debug("Request Exception", exc_info=True) + raise e + + if http_res is None: + logger.debug("Raising no response SDK error") + raise models.SDKError("No response received") + + logger.debug( + "Response:\nStatus Code: %s\nURL: %s\nHeaders: %s\nBody: %s", + http_res.status_code, + http_res.url, + http_res.headers, + "" if stream else http_res.text + ) + + if utils.match_status_codes(error_status_codes, http_res.status_code): + result, err = self.sdk_configuration.get_hooks().after_error( + AfterErrorContext(hook_ctx), http_res, None + ) + if err is not None: + logger.debug("Request Exception", exc_info=True) + raise err + if result is not None: + http_res = result + else: + logger.debug("Raising unexpected SDK error") + raise models.SDKError("Unexpected error occurred") + + return http_res + + if retry_config is not None: + http_res = utils.retry(do, utils.Retries(retry_config[0], retry_config[1])) + else: + http_res = do() + + if not utils.match_status_codes(error_status_codes, http_res.status_code): + http_res = self.sdk_configuration.get_hooks().after_success( + AfterSuccessContext(hook_ctx), http_res + ) + + return http_res + + async def do_request_async( + self, + hook_ctx, + request, + error_status_codes, + stream=False, + retry_config: Optional[Tuple[RetryConfig, List[str]]] = None, + ) -> httpx.Response: + client = self.sdk_configuration.async_client + logger = self.sdk_configuration.debug_logger + async def do(): + http_res = None + try: + req = self.sdk_configuration.get_hooks().before_request( + BeforeRequestContext(hook_ctx), request + ) + logger.debug( + "Request:\nMethod: %s\nURL: %s\nHeaders: %s\nBody: %s", + req.method, + req.url, + req.headers, + get_body_content(req) + ) + http_res = await client.send(req, stream=stream) + except Exception as e: + _, e = self.sdk_configuration.get_hooks().after_error( + AfterErrorContext(hook_ctx), None, e + ) + if e is not None: + logger.debug("Request Exception", exc_info=True) + raise e + + if http_res is None: + logger.debug("Raising no response SDK error") + raise models.SDKError("No response received") + + logger.debug( + "Response:\nStatus Code: %s\nURL: %s\nHeaders: %s\nBody: %s", + http_res.status_code, + http_res.url, + http_res.headers, + "" if stream else http_res.text + ) + + if utils.match_status_codes(error_status_codes, http_res.status_code): + result, err = self.sdk_configuration.get_hooks().after_error( + AfterErrorContext(hook_ctx), http_res, None + ) + if err is not None: + logger.debug("Request Exception", exc_info=True) + raise err + if result is not None: + http_res = result + else: + logger.debug("Raising unexpected SDK error") + raise models.SDKError("Unexpected error occurred") + + return http_res + + if retry_config is not None: + http_res = await utils.retry_async( + do, utils.Retries(retry_config[0], retry_config[1]) + ) + else: + http_res = await do() + + if not utils.match_status_codes(error_status_codes, http_res.status_code): + http_res = self.sdk_configuration.get_hooks().after_success( + AfterSuccessContext(hook_ctx), http_res + ) + + return http_res diff --git a/workflows_definition/src/openapi/closing_reason_sdk.py b/workflows_definition/src/openapi/closing_reason_sdk.py new file mode 100644 index 0000000000..b6f56acef1 --- /dev/null +++ b/workflows_definition/src/openapi/closing_reason_sdk.py @@ -0,0 +1,472 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from .basesdk import BaseSDK +from openapi import models, utils +from openapi._hooks import HookContext +from openapi.types import BaseModel, OptionalNullable, UNSET +from typing import Any, Optional, Union, cast + +class ClosingReasonSDK(BaseSDK): + + + def change_reason_status( + self, *, + reason_id: str, + change_reason_status_req: Optional[Union[models.ChangeReasonStatusReq, models.ChangeReasonStatusReqTypedDict]] = None, + retries: OptionalNullable[utils.RetryConfig] = UNSET, + server_url: Optional[str] = None, + timeout_ms: Optional[int] = None, + ): + r"""changeReasonStatus + + Change the status of a Closing Reason (eg. ACTIVE to INACTIVE). + + :param reason_id: + :param change_reason_status_req: change the status of a closing reason + :param retries: Override the default retry configuration for this method + :param server_url: Override the default server URL for this method + :param timeout_ms: Override the default request timeout configuration for this method in milliseconds + """ + base_url = None + url_variables = None + if timeout_ms is None: + timeout_ms = self.sdk_configuration.timeout_ms + + if server_url is not None: + base_url = server_url + + request = models.ChangeReasonStatusRequest( + reason_id=reason_id, + change_reason_status_req=utils.get_pydantic_model(change_reason_status_req, Optional[models.ChangeReasonStatusReq]), + ) + + req = self.build_request( + method="PATCH", + path="/v1/workflows/closing-reasons/{reasonId}", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=False, + request_has_path_params=True, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + security=self.sdk_configuration.security, + get_serialized_body=lambda: utils.serialize_request_body(request.change_reason_status_req, False, True, "json", Optional[models.ChangeReasonStatusReq]), + timeout_ms=timeout_ms, + ) + + if retries == UNSET: + if self.sdk_configuration.retry_config is not UNSET: + retries = self.sdk_configuration.retry_config + + retry_config = None + if isinstance(retries, utils.RetryConfig): + retry_config = (retries, [ + "429", + "500", + "502", + "503", + "504" + ]) + + http_res = self.do_request( + hook_ctx=HookContext(operation_id="changeReasonStatus", oauth2_scopes=[], security_source=self.sdk_configuration.security), + request=req, + error_status_codes=["400","4XX","500","5XX"], + retry_config=retry_config + ) + + data: Any = None + if utils.match_response(http_res, "202", "*"): + return + if utils.match_response(http_res, ["400","500"], "application/json"): + data = utils.unmarshal_json(http_res.text, models.ErrorRespData) + raise models.ErrorResp(data=data) + if utils.match_response(http_res, ["4XX","5XX"], "*"): + raise models.SDKError("API error occurred", http_res.status_code, http_res.text, http_res) + + content_type = http_res.headers.get("Content-Type") + raise models.SDKError(f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", http_res.status_code, http_res.text, http_res) + + + + async def change_reason_status_async( + self, *, + reason_id: str, + change_reason_status_req: Optional[Union[models.ChangeReasonStatusReq, models.ChangeReasonStatusReqTypedDict]] = None, + retries: OptionalNullable[utils.RetryConfig] = UNSET, + server_url: Optional[str] = None, + timeout_ms: Optional[int] = None, + ): + r"""changeReasonStatus + + Change the status of a Closing Reason (eg. ACTIVE to INACTIVE). + + :param reason_id: + :param change_reason_status_req: change the status of a closing reason + :param retries: Override the default retry configuration for this method + :param server_url: Override the default server URL for this method + :param timeout_ms: Override the default request timeout configuration for this method in milliseconds + """ + base_url = None + url_variables = None + if timeout_ms is None: + timeout_ms = self.sdk_configuration.timeout_ms + + if server_url is not None: + base_url = server_url + + request = models.ChangeReasonStatusRequest( + reason_id=reason_id, + change_reason_status_req=utils.get_pydantic_model(change_reason_status_req, Optional[models.ChangeReasonStatusReq]), + ) + + req = self.build_request( + method="PATCH", + path="/v1/workflows/closing-reasons/{reasonId}", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=False, + request_has_path_params=True, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + security=self.sdk_configuration.security, + get_serialized_body=lambda: utils.serialize_request_body(request.change_reason_status_req, False, True, "json", Optional[models.ChangeReasonStatusReq]), + timeout_ms=timeout_ms, + ) + + if retries == UNSET: + if self.sdk_configuration.retry_config is not UNSET: + retries = self.sdk_configuration.retry_config + + retry_config = None + if isinstance(retries, utils.RetryConfig): + retry_config = (retries, [ + "429", + "500", + "502", + "503", + "504" + ]) + + http_res = await self.do_request_async( + hook_ctx=HookContext(operation_id="changeReasonStatus", oauth2_scopes=[], security_source=self.sdk_configuration.security), + request=req, + error_status_codes=["400","4XX","500","5XX"], + retry_config=retry_config + ) + + data: Any = None + if utils.match_response(http_res, "202", "*"): + return + if utils.match_response(http_res, ["400","500"], "application/json"): + data = utils.unmarshal_json(http_res.text, models.ErrorRespData) + raise models.ErrorResp(data=data) + if utils.match_response(http_res, ["4XX","5XX"], "*"): + raise models.SDKError("API error occurred", http_res.status_code, http_res.text, http_res) + + content_type = http_res.headers.get("Content-Type") + raise models.SDKError(f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", http_res.status_code, http_res.text, http_res) + + + + def create_closing_reason( + self, *, + request: Union[models.ClosingReason, models.ClosingReasonTypedDict], + retries: OptionalNullable[utils.RetryConfig] = UNSET, + server_url: Optional[str] = None, + timeout_ms: Optional[int] = None, + ) -> Optional[models.ClosingReason]: + r"""createClosingReason + + A created Closing Reason is stored for the organization and will be displayed in the library of reasons. + + :param request: The request object to send. + :param retries: Override the default retry configuration for this method + :param server_url: Override the default server URL for this method + :param timeout_ms: Override the default request timeout configuration for this method in milliseconds + """ + base_url = None + url_variables = None + if timeout_ms is None: + timeout_ms = self.sdk_configuration.timeout_ms + + if server_url is not None: + base_url = server_url + + if not isinstance(request, BaseModel): + request = utils.unmarshal(request, models.ClosingReason) + request = cast(models.ClosingReason, request) + + req = self.build_request( + method="POST", + path="/v1/workflows/closing-reasons", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=True, + request_has_path_params=False, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + security=self.sdk_configuration.security, + get_serialized_body=lambda: utils.serialize_request_body(request, False, False, "json", models.ClosingReason), + timeout_ms=timeout_ms, + ) + + if retries == UNSET: + if self.sdk_configuration.retry_config is not UNSET: + retries = self.sdk_configuration.retry_config + + retry_config = None + if isinstance(retries, utils.RetryConfig): + retry_config = (retries, [ + "429", + "500", + "502", + "503", + "504" + ]) + + http_res = self.do_request( + hook_ctx=HookContext(operation_id="createClosingReason", oauth2_scopes=[], security_source=self.sdk_configuration.security), + request=req, + error_status_codes=["4XX","5XX"], + retry_config=retry_config + ) + + if utils.match_response(http_res, "201", "application/json"): + return utils.unmarshal_json(http_res.text, Optional[models.ClosingReason]) + if utils.match_response(http_res, ["4XX","5XX"], "*"): + raise models.SDKError("API error occurred", http_res.status_code, http_res.text, http_res) + + content_type = http_res.headers.get("Content-Type") + raise models.SDKError(f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", http_res.status_code, http_res.text, http_res) + + + + async def create_closing_reason_async( + self, *, + request: Union[models.ClosingReason, models.ClosingReasonTypedDict], + retries: OptionalNullable[utils.RetryConfig] = UNSET, + server_url: Optional[str] = None, + timeout_ms: Optional[int] = None, + ) -> Optional[models.ClosingReason]: + r"""createClosingReason + + A created Closing Reason is stored for the organization and will be displayed in the library of reasons. + + :param request: The request object to send. + :param retries: Override the default retry configuration for this method + :param server_url: Override the default server URL for this method + :param timeout_ms: Override the default request timeout configuration for this method in milliseconds + """ + base_url = None + url_variables = None + if timeout_ms is None: + timeout_ms = self.sdk_configuration.timeout_ms + + if server_url is not None: + base_url = server_url + + if not isinstance(request, BaseModel): + request = utils.unmarshal(request, models.ClosingReason) + request = cast(models.ClosingReason, request) + + req = self.build_request( + method="POST", + path="/v1/workflows/closing-reasons", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=True, + request_has_path_params=False, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + security=self.sdk_configuration.security, + get_serialized_body=lambda: utils.serialize_request_body(request, False, False, "json", models.ClosingReason), + timeout_ms=timeout_ms, + ) + + if retries == UNSET: + if self.sdk_configuration.retry_config is not UNSET: + retries = self.sdk_configuration.retry_config + + retry_config = None + if isinstance(retries, utils.RetryConfig): + retry_config = (retries, [ + "429", + "500", + "502", + "503", + "504" + ]) + + http_res = await self.do_request_async( + hook_ctx=HookContext(operation_id="createClosingReason", oauth2_scopes=[], security_source=self.sdk_configuration.security), + request=req, + error_status_codes=["4XX","5XX"], + retry_config=retry_config + ) + + if utils.match_response(http_res, "201", "application/json"): + return utils.unmarshal_json(http_res.text, Optional[models.ClosingReason]) + if utils.match_response(http_res, ["4XX","5XX"], "*"): + raise models.SDKError("API error occurred", http_res.status_code, http_res.text, http_res) + + content_type = http_res.headers.get("Content-Type") + raise models.SDKError(f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", http_res.status_code, http_res.text, http_res) + + + + def get_all_closing_reasons( + self, *, + include_inactive: Optional[bool] = None, + retries: OptionalNullable[utils.RetryConfig] = UNSET, + server_url: Optional[str] = None, + timeout_ms: Optional[int] = None, + ) -> Optional[models.ClosingReasons]: + r"""getAllClosingReasons + + Get all Closing Reasons defined in the organization by default all Active. + + :param include_inactive: Filter Closing Reasons by status like active inactiv + :param retries: Override the default retry configuration for this method + :param server_url: Override the default server URL for this method + :param timeout_ms: Override the default request timeout configuration for this method in milliseconds + """ + base_url = None + url_variables = None + if timeout_ms is None: + timeout_ms = self.sdk_configuration.timeout_ms + + if server_url is not None: + base_url = server_url + + request = models.GetAllClosingReasonsRequest( + include_inactive=include_inactive, + ) + + req = self.build_request( + method="GET", + path="/v1/workflows/closing-reasons", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=False, + request_has_path_params=False, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + security=self.sdk_configuration.security, + timeout_ms=timeout_ms, + ) + + if retries == UNSET: + if self.sdk_configuration.retry_config is not UNSET: + retries = self.sdk_configuration.retry_config + + retry_config = None + if isinstance(retries, utils.RetryConfig): + retry_config = (retries, [ + "429", + "500", + "502", + "503", + "504" + ]) + + http_res = self.do_request( + hook_ctx=HookContext(operation_id="getAllClosingReasons", oauth2_scopes=[], security_source=self.sdk_configuration.security), + request=req, + error_status_codes=["4XX","5XX"], + retry_config=retry_config + ) + + if utils.match_response(http_res, "200", "application/json"): + return utils.unmarshal_json(http_res.text, Optional[models.ClosingReasons]) + if utils.match_response(http_res, ["4XX","5XX"], "*"): + raise models.SDKError("API error occurred", http_res.status_code, http_res.text, http_res) + + content_type = http_res.headers.get("Content-Type") + raise models.SDKError(f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", http_res.status_code, http_res.text, http_res) + + + + async def get_all_closing_reasons_async( + self, *, + include_inactive: Optional[bool] = None, + retries: OptionalNullable[utils.RetryConfig] = UNSET, + server_url: Optional[str] = None, + timeout_ms: Optional[int] = None, + ) -> Optional[models.ClosingReasons]: + r"""getAllClosingReasons + + Get all Closing Reasons defined in the organization by default all Active. + + :param include_inactive: Filter Closing Reasons by status like active inactiv + :param retries: Override the default retry configuration for this method + :param server_url: Override the default server URL for this method + :param timeout_ms: Override the default request timeout configuration for this method in milliseconds + """ + base_url = None + url_variables = None + if timeout_ms is None: + timeout_ms = self.sdk_configuration.timeout_ms + + if server_url is not None: + base_url = server_url + + request = models.GetAllClosingReasonsRequest( + include_inactive=include_inactive, + ) + + req = self.build_request( + method="GET", + path="/v1/workflows/closing-reasons", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=False, + request_has_path_params=False, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + security=self.sdk_configuration.security, + timeout_ms=timeout_ms, + ) + + if retries == UNSET: + if self.sdk_configuration.retry_config is not UNSET: + retries = self.sdk_configuration.retry_config + + retry_config = None + if isinstance(retries, utils.RetryConfig): + retry_config = (retries, [ + "429", + "500", + "502", + "503", + "504" + ]) + + http_res = await self.do_request_async( + hook_ctx=HookContext(operation_id="getAllClosingReasons", oauth2_scopes=[], security_source=self.sdk_configuration.security), + request=req, + error_status_codes=["4XX","5XX"], + retry_config=retry_config + ) + + if utils.match_response(http_res, "200", "application/json"): + return utils.unmarshal_json(http_res.text, Optional[models.ClosingReasons]) + if utils.match_response(http_res, ["4XX","5XX"], "*"): + raise models.SDKError("API error occurred", http_res.status_code, http_res.text, http_res) + + content_type = http_res.headers.get("Content-Type") + raise models.SDKError(f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", http_res.status_code, http_res.text, http_res) + + diff --git a/workflows_definition/src/openapi/httpclient.py b/workflows_definition/src/openapi/httpclient.py new file mode 100644 index 0000000000..36b642a0eb --- /dev/null +++ b/workflows_definition/src/openapi/httpclient.py @@ -0,0 +1,78 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +# pyright: reportReturnType = false +from typing_extensions import Protocol, runtime_checkable +import httpx +from typing import Any, Optional, Union + + +@runtime_checkable +class HttpClient(Protocol): + def send( + self, + request: httpx.Request, + *, + stream: bool = False, + auth: Union[ + httpx._types.AuthTypes, httpx._client.UseClientDefault, None + ] = httpx.USE_CLIENT_DEFAULT, + follow_redirects: Union[ + bool, httpx._client.UseClientDefault + ] = httpx.USE_CLIENT_DEFAULT, + ) -> httpx.Response: + pass + + def build_request( + self, + method: str, + url: httpx._types.URLTypes, + *, + content: Optional[httpx._types.RequestContent] = None, + data: Optional[httpx._types.RequestData] = None, + files: Optional[httpx._types.RequestFiles] = None, + json: Optional[Any] = None, + params: Optional[httpx._types.QueryParamTypes] = None, + headers: Optional[httpx._types.HeaderTypes] = None, + cookies: Optional[httpx._types.CookieTypes] = None, + timeout: Union[ + httpx._types.TimeoutTypes, httpx._client.UseClientDefault + ] = httpx.USE_CLIENT_DEFAULT, + extensions: Optional[httpx._types.RequestExtensions] = None, + ) -> httpx.Request: + pass + + +@runtime_checkable +class AsyncHttpClient(Protocol): + async def send( + self, + request: httpx.Request, + *, + stream: bool = False, + auth: Union[ + httpx._types.AuthTypes, httpx._client.UseClientDefault, None + ] = httpx.USE_CLIENT_DEFAULT, + follow_redirects: Union[ + bool, httpx._client.UseClientDefault + ] = httpx.USE_CLIENT_DEFAULT, + ) -> httpx.Response: + pass + + def build_request( + self, + method: str, + url: httpx._types.URLTypes, + *, + content: Optional[httpx._types.RequestContent] = None, + data: Optional[httpx._types.RequestData] = None, + files: Optional[httpx._types.RequestFiles] = None, + json: Optional[Any] = None, + params: Optional[httpx._types.QueryParamTypes] = None, + headers: Optional[httpx._types.HeaderTypes] = None, + cookies: Optional[httpx._types.CookieTypes] = None, + timeout: Union[ + httpx._types.TimeoutTypes, httpx._client.UseClientDefault + ] = httpx.USE_CLIENT_DEFAULT, + extensions: Optional[httpx._types.RequestExtensions] = None, + ) -> httpx.Request: + pass diff --git a/workflows_definition/src/openapi/models/__init__.py b/workflows_definition/src/openapi/models/__init__.py new file mode 100644 index 0000000000..463a85dc66 --- /dev/null +++ b/workflows_definition/src/openapi/models/__init__.py @@ -0,0 +1,33 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from .changereasonstatusop import ChangeReasonStatusRequest, ChangeReasonStatusRequestTypedDict +from .changereasonstatusreq import ChangeReasonStatusReq, ChangeReasonStatusReqTypedDict +from .closingreason import ClosingReason, ClosingReasonTypedDict +from .closingreasonid import ClosingReasonID, ClosingReasonIDTypedDict +from .closingreasons import ClosingReasons, ClosingReasonsTypedDict +from .closingreasonsids import ClosingReasonsIds, ClosingReasonsIdsTypedDict +from .closingreasonsstatus import ClosingReasonsStatus +from .definitionnotfoundresp import DefinitionNotFoundResp, DefinitionNotFoundRespData +from .deletedefinitionop import DeleteDefinitionRequest, DeleteDefinitionRequestTypedDict +from .dynamicduedate import ActionTypeCondition, DynamicDueDate, DynamicDueDateTypedDict, TimePeriod +from .ecpdetails import ECPDetails, ECPDetailsTypedDict +from .errorresp import ErrorResp, ErrorRespData +from .getallclosingreasonsop import GetAllClosingReasonsRequest, GetAllClosingReasonsRequestTypedDict +from .getdefinitionop import GetDefinitionRequest, GetDefinitionRequestTypedDict +from .getworkflowclosingreasonsop import GetWorkflowClosingReasonsRequest, GetWorkflowClosingReasonsRequestTypedDict +from .itemtype import ItemType +from .maxallowedlimit import MaxAllowedLimit, MaxAllowedLimitTypedDict +from .sdkerror import SDKError +from .section import Section, SectionTypedDict +from .security import Security, SecurityTypedDict +from .setworkflowclosingreasonsop import SetWorkflowClosingReasonsRequest, SetWorkflowClosingReasonsRequestTypedDict +from .step import AutomationConfig, AutomationConfigTypedDict, Step, StepTypedDict +from .stepdescription import StepDescription, StepDescriptionTypedDict +from .stepjourney import StepJourney, StepJourneyTypedDict +from .steprequirement import Condition, StepRequirement, StepRequirementTypedDict +from .steptype import StepType +from .updatedefinitionop import UpdateDefinitionRequest, UpdateDefinitionRequestTypedDict +from .updateentityattributes import Source, Target, TargetTypedDict, UpdateEntityAttributes, UpdateEntityAttributesTypedDict +from .workflowdefinition import Flow, FlowTypedDict, WorkflowDefinition, WorkflowDefinitionTypedDict + +__all__ = ["ActionTypeCondition", "AutomationConfig", "AutomationConfigTypedDict", "ChangeReasonStatusReq", "ChangeReasonStatusReqTypedDict", "ChangeReasonStatusRequest", "ChangeReasonStatusRequestTypedDict", "ClosingReason", "ClosingReasonID", "ClosingReasonIDTypedDict", "ClosingReasonTypedDict", "ClosingReasons", "ClosingReasonsIds", "ClosingReasonsIdsTypedDict", "ClosingReasonsStatus", "ClosingReasonsTypedDict", "Condition", "DefinitionNotFoundResp", "DefinitionNotFoundRespData", "DeleteDefinitionRequest", "DeleteDefinitionRequestTypedDict", "DynamicDueDate", "DynamicDueDateTypedDict", "ECPDetails", "ECPDetailsTypedDict", "ErrorResp", "ErrorRespData", "Flow", "FlowTypedDict", "GetAllClosingReasonsRequest", "GetAllClosingReasonsRequestTypedDict", "GetDefinitionRequest", "GetDefinitionRequestTypedDict", "GetWorkflowClosingReasonsRequest", "GetWorkflowClosingReasonsRequestTypedDict", "ItemType", "MaxAllowedLimit", "MaxAllowedLimitTypedDict", "SDKError", "Section", "SectionTypedDict", "Security", "SecurityTypedDict", "SetWorkflowClosingReasonsRequest", "SetWorkflowClosingReasonsRequestTypedDict", "Source", "Step", "StepDescription", "StepDescriptionTypedDict", "StepJourney", "StepJourneyTypedDict", "StepRequirement", "StepRequirementTypedDict", "StepType", "StepTypedDict", "Target", "TargetTypedDict", "TimePeriod", "UpdateDefinitionRequest", "UpdateDefinitionRequestTypedDict", "UpdateEntityAttributes", "UpdateEntityAttributesTypedDict", "WorkflowDefinition", "WorkflowDefinitionTypedDict"] diff --git a/workflows_definition/src/openapi/models/changereasonstatusop.py b/workflows_definition/src/openapi/models/changereasonstatusop.py new file mode 100644 index 0000000000..1c0328d6fe --- /dev/null +++ b/workflows_definition/src/openapi/models/changereasonstatusop.py @@ -0,0 +1,22 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +from .changereasonstatusreq import ChangeReasonStatusReq, ChangeReasonStatusReqTypedDict +from openapi.types import BaseModel +from openapi.utils import FieldMetadata, PathParamMetadata, RequestMetadata +import pydantic +from typing import Optional, TypedDict +from typing_extensions import Annotated, NotRequired + + +class ChangeReasonStatusRequestTypedDict(TypedDict): + reason_id: str + change_reason_status_req: NotRequired[ChangeReasonStatusReqTypedDict] + r"""change the status of a closing reason""" + + +class ChangeReasonStatusRequest(BaseModel): + reason_id: Annotated[str, pydantic.Field(alias="reasonId"), FieldMetadata(path=PathParamMetadata(style="simple", explode=False))] + change_reason_status_req: Annotated[Optional[ChangeReasonStatusReq], FieldMetadata(request=RequestMetadata(media_type="application/json"))] = None + r"""change the status of a closing reason""" + diff --git a/workflows_definition/src/openapi/models/changereasonstatusreq.py b/workflows_definition/src/openapi/models/changereasonstatusreq.py new file mode 100644 index 0000000000..f3ccdbf577 --- /dev/null +++ b/workflows_definition/src/openapi/models/changereasonstatusreq.py @@ -0,0 +1,15 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +from .closingreasonsstatus import ClosingReasonsStatus +from openapi.types import BaseModel +from typing import TypedDict + + +class ChangeReasonStatusReqTypedDict(TypedDict): + status: ClosingReasonsStatus + + +class ChangeReasonStatusReq(BaseModel): + status: ClosingReasonsStatus + diff --git a/workflows_definition/src/openapi/models/closingreason.py b/workflows_definition/src/openapi/models/closingreason.py new file mode 100644 index 0000000000..ad0efdb4a6 --- /dev/null +++ b/workflows_definition/src/openapi/models/closingreason.py @@ -0,0 +1,29 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +from .closingreasonsstatus import ClosingReasonsStatus +from openapi.types import BaseModel +import pydantic +from typing import Optional, TypedDict +from typing_extensions import Annotated, NotRequired + + +class ClosingReasonTypedDict(TypedDict): + r"""One Closing reason for a workflow""" + + status: ClosingReasonsStatus + title: str + creation_time: NotRequired[str] + id: NotRequired[str] + last_update_time: NotRequired[str] + + +class ClosingReason(BaseModel): + r"""One Closing reason for a workflow""" + + status: ClosingReasonsStatus + title: str + creation_time: Annotated[Optional[str], pydantic.Field(alias="creationTime")] = None + id: Optional[str] = None + last_update_time: Annotated[Optional[str], pydantic.Field(alias="lastUpdateTime")] = None + diff --git a/workflows_definition/src/openapi/models/closingreasonid.py b/workflows_definition/src/openapi/models/closingreasonid.py new file mode 100644 index 0000000000..30c746f32d --- /dev/null +++ b/workflows_definition/src/openapi/models/closingreasonid.py @@ -0,0 +1,14 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +from openapi.types import BaseModel +from typing import TypedDict + + +class ClosingReasonIDTypedDict(TypedDict): + id: str + + +class ClosingReasonID(BaseModel): + id: str + diff --git a/workflows_definition/src/openapi/models/closingreasons.py b/workflows_definition/src/openapi/models/closingreasons.py new file mode 100644 index 0000000000..b3268d4ff9 --- /dev/null +++ b/workflows_definition/src/openapi/models/closingreasons.py @@ -0,0 +1,15 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +from .closingreason import ClosingReason, ClosingReasonTypedDict +from openapi.types import BaseModel +from typing import List, TypedDict + + +class ClosingReasonsTypedDict(TypedDict): + reasons: List[ClosingReasonTypedDict] + + +class ClosingReasons(BaseModel): + reasons: List[ClosingReason] + diff --git a/workflows_definition/src/openapi/models/closingreasonsids.py b/workflows_definition/src/openapi/models/closingreasonsids.py new file mode 100644 index 0000000000..ffc7a14ea2 --- /dev/null +++ b/workflows_definition/src/openapi/models/closingreasonsids.py @@ -0,0 +1,15 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +from .closingreasonid import ClosingReasonID, ClosingReasonIDTypedDict +from openapi.types import BaseModel +from typing import List, TypedDict + + +class ClosingReasonsIdsTypedDict(TypedDict): + reasons: List[ClosingReasonIDTypedDict] + + +class ClosingReasonsIds(BaseModel): + reasons: List[ClosingReasonID] + diff --git a/workflows_definition/src/openapi/models/closingreasonsstatus.py b/workflows_definition/src/openapi/models/closingreasonsstatus.py new file mode 100644 index 0000000000..78dab36118 --- /dev/null +++ b/workflows_definition/src/openapi/models/closingreasonsstatus.py @@ -0,0 +1,9 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +from enum import Enum + + +class ClosingReasonsStatus(str, Enum): + ACTIVE = "ACTIVE" + INACTIVE = "INACTIVE" diff --git a/workflows_definition/src/openapi/models/definitionnotfoundresp.py b/workflows_definition/src/openapi/models/definitionnotfoundresp.py new file mode 100644 index 0000000000..7b0f58e9bb --- /dev/null +++ b/workflows_definition/src/openapi/models/definitionnotfoundresp.py @@ -0,0 +1,22 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +from openapi import utils +from openapi.types import BaseModel +from typing import Optional + +class DefinitionNotFoundRespData(BaseModel): + message: Optional[str] = None + + + +class DefinitionNotFoundResp(Exception): + r"""Definition could be not found""" + data: DefinitionNotFoundRespData + + def __init__(self, data: DefinitionNotFoundRespData): + self.data = data + + def __str__(self) -> str: + return utils.marshal_json(self.data, DefinitionNotFoundRespData) + diff --git a/workflows_definition/src/openapi/models/deletedefinitionop.py b/workflows_definition/src/openapi/models/deletedefinitionop.py new file mode 100644 index 0000000000..9b723dd1ed --- /dev/null +++ b/workflows_definition/src/openapi/models/deletedefinitionop.py @@ -0,0 +1,19 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +from openapi.types import BaseModel +from openapi.utils import FieldMetadata, PathParamMetadata +import pydantic +from typing import TypedDict +from typing_extensions import Annotated + + +class DeleteDefinitionRequestTypedDict(TypedDict): + definition_id: str + r"""Id of the definition to de deleted.""" + + +class DeleteDefinitionRequest(BaseModel): + definition_id: Annotated[str, pydantic.Field(alias="definitionId"), FieldMetadata(path=PathParamMetadata(style="simple", explode=False))] + r"""Id of the definition to de deleted.""" + diff --git a/workflows_definition/src/openapi/models/dynamicduedate.py b/workflows_definition/src/openapi/models/dynamicduedate.py new file mode 100644 index 0000000000..db7bdba0a6 --- /dev/null +++ b/workflows_definition/src/openapi/models/dynamicduedate.py @@ -0,0 +1,36 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +from enum import Enum +from openapi.types import BaseModel +import pydantic +from typing import Optional, TypedDict +from typing_extensions import Annotated, NotRequired + + +class ActionTypeCondition(str, Enum): + WORKFLOW_STARTED = "WORKFLOW_STARTED" + STEP_CLOSED = "STEP_CLOSED" + +class TimePeriod(str, Enum): + DAYS = "days" + WEEKS = "weeks" + MONTHS = "months" + +class DynamicDueDateTypedDict(TypedDict): + r"""set a Duedate for a step then a specific""" + + action_type_condition: ActionTypeCondition + number_of_units: float + time_period: TimePeriod + step_id: NotRequired[str] + + +class DynamicDueDate(BaseModel): + r"""set a Duedate for a step then a specific""" + + action_type_condition: Annotated[ActionTypeCondition, pydantic.Field(alias="actionTypeCondition")] + number_of_units: Annotated[float, pydantic.Field(alias="numberOfUnits")] + time_period: Annotated[TimePeriod, pydantic.Field(alias="timePeriod")] + step_id: Annotated[Optional[str], pydantic.Field(alias="stepId")] = None + diff --git a/workflows_definition/src/openapi/models/ecpdetails.py b/workflows_definition/src/openapi/models/ecpdetails.py new file mode 100644 index 0000000000..f619a48fab --- /dev/null +++ b/workflows_definition/src/openapi/models/ecpdetails.py @@ -0,0 +1,26 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +from .stepjourney import StepJourney, StepJourneyTypedDict +from openapi.types import BaseModel +from typing import Optional, TypedDict +from typing_extensions import NotRequired + + +class ECPDetailsTypedDict(TypedDict): + r"""Details regarding ECP for the workflow step""" + + description: NotRequired[str] + enabled: NotRequired[bool] + journey: NotRequired[StepJourneyTypedDict] + label: NotRequired[str] + + +class ECPDetails(BaseModel): + r"""Details regarding ECP for the workflow step""" + + description: Optional[str] = None + enabled: Optional[bool] = None + journey: Optional[StepJourney] = None + label: Optional[str] = None + diff --git a/workflows_definition/src/openapi/models/errorresp.py b/workflows_definition/src/openapi/models/errorresp.py new file mode 100644 index 0000000000..44d87634e4 --- /dev/null +++ b/workflows_definition/src/openapi/models/errorresp.py @@ -0,0 +1,22 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +from openapi import utils +from openapi.types import BaseModel +from typing import Optional + +class ErrorRespData(BaseModel): + message: Optional[str] = None + + + +class ErrorResp(Exception): + r"""Other errors""" + data: ErrorRespData + + def __init__(self, data: ErrorRespData): + self.data = data + + def __str__(self) -> str: + return utils.marshal_json(self.data, ErrorRespData) + diff --git a/workflows_definition/src/openapi/models/getallclosingreasonsop.py b/workflows_definition/src/openapi/models/getallclosingreasonsop.py new file mode 100644 index 0000000000..e809ef6b91 --- /dev/null +++ b/workflows_definition/src/openapi/models/getallclosingreasonsop.py @@ -0,0 +1,19 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +from openapi.types import BaseModel +from openapi.utils import FieldMetadata, QueryParamMetadata +import pydantic +from typing import Optional, TypedDict +from typing_extensions import Annotated, NotRequired + + +class GetAllClosingReasonsRequestTypedDict(TypedDict): + include_inactive: NotRequired[bool] + r"""Filter Closing Reasons by status like active inactiv""" + + +class GetAllClosingReasonsRequest(BaseModel): + include_inactive: Annotated[Optional[bool], pydantic.Field(alias="includeInactive"), FieldMetadata(query=QueryParamMetadata(style="form", explode=True))] = None + r"""Filter Closing Reasons by status like active inactiv""" + diff --git a/workflows_definition/src/openapi/models/getdefinitionop.py b/workflows_definition/src/openapi/models/getdefinitionop.py new file mode 100644 index 0000000000..193709ee30 --- /dev/null +++ b/workflows_definition/src/openapi/models/getdefinitionop.py @@ -0,0 +1,19 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +from openapi.types import BaseModel +from openapi.utils import FieldMetadata, PathParamMetadata +import pydantic +from typing import TypedDict +from typing_extensions import Annotated + + +class GetDefinitionRequestTypedDict(TypedDict): + definition_id: str + r"""Short uuid (length 8) to identify the Workflow Definition.""" + + +class GetDefinitionRequest(BaseModel): + definition_id: Annotated[str, pydantic.Field(alias="definitionId"), FieldMetadata(path=PathParamMetadata(style="simple", explode=False))] + r"""Short uuid (length 8) to identify the Workflow Definition.""" + diff --git a/workflows_definition/src/openapi/models/getworkflowclosingreasonsop.py b/workflows_definition/src/openapi/models/getworkflowclosingreasonsop.py new file mode 100644 index 0000000000..d694b7675e --- /dev/null +++ b/workflows_definition/src/openapi/models/getworkflowclosingreasonsop.py @@ -0,0 +1,19 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +from openapi.types import BaseModel +from openapi.utils import FieldMetadata, PathParamMetadata +import pydantic +from typing import TypedDict +from typing_extensions import Annotated + + +class GetWorkflowClosingReasonsRequestTypedDict(TypedDict): + definition_id: str + r"""ID of a workflow definition""" + + +class GetWorkflowClosingReasonsRequest(BaseModel): + definition_id: Annotated[str, pydantic.Field(alias="definitionId"), FieldMetadata(path=PathParamMetadata(style="simple", explode=False))] + r"""ID of a workflow definition""" + diff --git a/workflows_definition/src/openapi/models/itemtype.py b/workflows_definition/src/openapi/models/itemtype.py new file mode 100644 index 0000000000..e768ac4635 --- /dev/null +++ b/workflows_definition/src/openapi/models/itemtype.py @@ -0,0 +1,9 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +from enum import Enum + + +class ItemType(str, Enum): + STEP = "STEP" + SECTION = "SECTION" diff --git a/workflows_definition/src/openapi/models/maxallowedlimit.py b/workflows_definition/src/openapi/models/maxallowedlimit.py new file mode 100644 index 0000000000..14c4881f95 --- /dev/null +++ b/workflows_definition/src/openapi/models/maxallowedlimit.py @@ -0,0 +1,18 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +from openapi.types import BaseModel +import pydantic +from typing import Optional, TypedDict +from typing_extensions import Annotated, NotRequired + + +class MaxAllowedLimitTypedDict(TypedDict): + current_no_of_workflows: NotRequired[float] + max_allowed: NotRequired[float] + + +class MaxAllowedLimit(BaseModel): + current_no_of_workflows: Annotated[Optional[float], pydantic.Field(alias="currentNoOfWorkflows")] = None + max_allowed: Annotated[Optional[float], pydantic.Field(alias="maxAllowed")] = None + diff --git a/workflows_definition/src/openapi/models/sdkerror.py b/workflows_definition/src/openapi/models/sdkerror.py new file mode 100644 index 0000000000..03216cbf51 --- /dev/null +++ b/workflows_definition/src/openapi/models/sdkerror.py @@ -0,0 +1,22 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from dataclasses import dataclass +from typing import Optional +import httpx + + +@dataclass +class SDKError(Exception): + """Represents an error returned by the API.""" + + message: str + status_code: int = -1 + body: str = "" + raw_response: Optional[httpx.Response] = None + + def __str__(self): + body = "" + if len(self.body) > 0: + body = f"\n{self.body}" + + return f"{self.message}: Status {self.status_code}{body}" diff --git a/workflows_definition/src/openapi/models/section.py b/workflows_definition/src/openapi/models/section.py new file mode 100644 index 0000000000..d04d60b125 --- /dev/null +++ b/workflows_definition/src/openapi/models/section.py @@ -0,0 +1,29 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +from .itemtype import ItemType +from .step import Step, StepTypedDict +from openapi.types import BaseModel +from typing import List, Optional, TypedDict +from typing_extensions import NotRequired + + +class SectionTypedDict(TypedDict): + r"""A group of Steps that define the progress of the Workflow""" + + name: str + order: float + steps: List[StepTypedDict] + type: ItemType + id: NotRequired[str] + + +class Section(BaseModel): + r"""A group of Steps that define the progress of the Workflow""" + + name: str + order: float + steps: List[Step] + type: ItemType + id: Optional[str] = None + diff --git a/workflows_definition/src/openapi/models/security.py b/workflows_definition/src/openapi/models/security.py new file mode 100644 index 0000000000..62a4572bb3 --- /dev/null +++ b/workflows_definition/src/openapi/models/security.py @@ -0,0 +1,16 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +from openapi.types import BaseModel +from openapi.utils import FieldMetadata, SecurityMetadata +from typing import TypedDict +from typing_extensions import Annotated + + +class SecurityTypedDict(TypedDict): + bearer_auth: str + + +class Security(BaseModel): + bearer_auth: Annotated[str, FieldMetadata(security=SecurityMetadata(scheme=True, scheme_type="http", sub_type="bearer", field_name="Authorization"))] + diff --git a/workflows_definition/src/openapi/models/setworkflowclosingreasonsop.py b/workflows_definition/src/openapi/models/setworkflowclosingreasonsop.py new file mode 100644 index 0000000000..6eac37fc36 --- /dev/null +++ b/workflows_definition/src/openapi/models/setworkflowclosingreasonsop.py @@ -0,0 +1,24 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +from .closingreasonsids import ClosingReasonsIds, ClosingReasonsIdsTypedDict +from openapi.types import BaseModel +from openapi.utils import FieldMetadata, PathParamMetadata, RequestMetadata +import pydantic +from typing import TypedDict +from typing_extensions import Annotated + + +class SetWorkflowClosingReasonsRequestTypedDict(TypedDict): + closing_reasons_ids: ClosingReasonsIdsTypedDict + r"""set all closing reasons for a specific definition""" + definition_id: str + r"""ID of a workflow definition""" + + +class SetWorkflowClosingReasonsRequest(BaseModel): + closing_reasons_ids: Annotated[ClosingReasonsIds, FieldMetadata(request=RequestMetadata(media_type="application/json"))] + r"""set all closing reasons for a specific definition""" + definition_id: Annotated[str, pydantic.Field(alias="definitionId"), FieldMetadata(path=PathParamMetadata(style="simple", explode=False))] + r"""ID of a workflow definition""" + diff --git a/workflows_definition/src/openapi/models/step.py b/workflows_definition/src/openapi/models/step.py new file mode 100644 index 0000000000..56c5978c58 --- /dev/null +++ b/workflows_definition/src/openapi/models/step.py @@ -0,0 +1,77 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +from .dynamicduedate import DynamicDueDate, DynamicDueDateTypedDict +from .ecpdetails import ECPDetails, ECPDetailsTypedDict +from .itemtype import ItemType +from .stepdescription import StepDescription, StepDescriptionTypedDict +from .stepjourney import StepJourney, StepJourneyTypedDict +from .steprequirement import StepRequirement, StepRequirementTypedDict +from .steptype import StepType +from openapi.types import BaseModel +import pydantic +from typing import List, Optional, TypedDict +from typing_extensions import Annotated, NotRequired + + +class AutomationConfigTypedDict(TypedDict): + flow_id: str + r"""Id of the configured automation to run""" + + +class AutomationConfig(BaseModel): + flow_id: Annotated[str, pydantic.Field(alias="flowId")] + r"""Id of the configured automation to run""" + + +class StepTypedDict(TypedDict): + r"""Action that needs to be done in a Workflow""" + + name: str + order: float + type: ItemType + assigned_to: NotRequired[List[str]] + automation_config: NotRequired[AutomationConfigTypedDict] + description: NotRequired[StepDescriptionTypedDict] + r"""Longer information regarding Task""" + due_date: NotRequired[str] + dynamic_due_date: NotRequired[DynamicDueDateTypedDict] + r"""set a Duedate for a step then a specific""" + ecp: NotRequired[ECPDetailsTypedDict] + r"""Details regarding ECP for the workflow step""" + execution_type: NotRequired[StepType] + id: NotRequired[str] + installer: NotRequired[ECPDetailsTypedDict] + r"""Details regarding ECP for the workflow step""" + journey: NotRequired[StepJourneyTypedDict] + requirements: NotRequired[List[StepRequirementTypedDict]] + r"""requirements that need to be fulfilled in order to enable the step execution""" + user_ids: NotRequired[List[float]] + r"""This field is deprecated. Please use assignedTo""" + + +class Step(BaseModel): + r"""Action that needs to be done in a Workflow""" + + name: str + order: float + type: ItemType + assigned_to: Annotated[Optional[List[str]], pydantic.Field(alias="assignedTo")] = None + automation_config: Annotated[Optional[AutomationConfig], pydantic.Field(alias="automationConfig")] = None + description: Optional[StepDescription] = None + r"""Longer information regarding Task""" + due_date: Annotated[Optional[str], pydantic.Field(alias="dueDate")] = None + dynamic_due_date: Annotated[Optional[DynamicDueDate], pydantic.Field(alias="dynamicDueDate")] = None + r"""set a Duedate for a step then a specific""" + ecp: Optional[ECPDetails] = None + r"""Details regarding ECP for the workflow step""" + execution_type: Annotated[Optional[StepType], pydantic.Field(alias="executionType")] = None + id: Optional[str] = None + installer: Optional[ECPDetails] = None + r"""Details regarding ECP for the workflow step""" + journey: Optional[StepJourney] = None + requirements: Optional[List[StepRequirement]] = None + r"""requirements that need to be fulfilled in order to enable the step execution""" + user_ids: Annotated[Optional[List[float]], pydantic.Field(deprecated="warning: ** DEPRECATED ** - This will be removed in a future release, please migrate away from it as soon as possible.", alias="userIds")] = None + r"""This field is deprecated. Please use assignedTo""" + diff --git a/workflows_definition/src/openapi/models/stepdescription.py b/workflows_definition/src/openapi/models/stepdescription.py new file mode 100644 index 0000000000..77eccee0a5 --- /dev/null +++ b/workflows_definition/src/openapi/models/stepdescription.py @@ -0,0 +1,21 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +from openapi.types import BaseModel +from typing import Optional, TypedDict +from typing_extensions import NotRequired + + +class StepDescriptionTypedDict(TypedDict): + r"""Longer information regarding Task""" + + enabled: NotRequired[bool] + value: NotRequired[str] + + +class StepDescription(BaseModel): + r"""Longer information regarding Task""" + + enabled: Optional[bool] = None + value: Optional[str] = None + diff --git a/workflows_definition/src/openapi/models/stepjourney.py b/workflows_definition/src/openapi/models/stepjourney.py new file mode 100644 index 0000000000..cd78dd2e9f --- /dev/null +++ b/workflows_definition/src/openapi/models/stepjourney.py @@ -0,0 +1,20 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +from openapi.types import BaseModel +import pydantic +from typing import Optional, TypedDict +from typing_extensions import Annotated, NotRequired + + +class StepJourneyTypedDict(TypedDict): + id: NotRequired[str] + journey_id: NotRequired[str] + name: NotRequired[str] + + +class StepJourney(BaseModel): + id: Optional[str] = None + journey_id: Annotated[Optional[str], pydantic.Field(alias="journeyId")] = None + name: Optional[str] = None + diff --git a/workflows_definition/src/openapi/models/steprequirement.py b/workflows_definition/src/openapi/models/steprequirement.py new file mode 100644 index 0000000000..89926ea5cb --- /dev/null +++ b/workflows_definition/src/openapi/models/steprequirement.py @@ -0,0 +1,29 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +from .itemtype import ItemType +from enum import Enum +from openapi.types import BaseModel +import pydantic +from typing import TypedDict +from typing_extensions import Annotated + + +class Condition(str, Enum): + CLOSED = "CLOSED" + +class StepRequirementTypedDict(TypedDict): + r"""describe the requirement for step enablement""" + + condition: Condition + definition_id: str + type: ItemType + + +class StepRequirement(BaseModel): + r"""describe the requirement for step enablement""" + + condition: Condition + definition_id: Annotated[str, pydantic.Field(alias="definitionId")] + type: ItemType + diff --git a/workflows_definition/src/sdk/models/shared/steptype_enum.py b/workflows_definition/src/openapi/models/steptype.py old mode 100755 new mode 100644 similarity index 51% rename from workflows_definition/src/sdk/models/shared/steptype_enum.py rename to workflows_definition/src/openapi/models/steptype.py index bd26773ba4..0cab0724d7 --- a/workflows_definition/src/sdk/models/shared/steptype_enum.py +++ b/workflows_definition/src/openapi/models/steptype.py @@ -1,8 +1,9 @@ -"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" from __future__ import annotations from enum import Enum -class StepTypeEnum(str, Enum): + +class StepType(str, Enum): MANUAL = "MANUAL" AUTOMATION = "AUTOMATION" diff --git a/workflows_definition/src/openapi/models/updatedefinitionop.py b/workflows_definition/src/openapi/models/updatedefinitionop.py new file mode 100644 index 0000000000..b1b99f703d --- /dev/null +++ b/workflows_definition/src/openapi/models/updatedefinitionop.py @@ -0,0 +1,24 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +from .workflowdefinition import WorkflowDefinition, WorkflowDefinitionTypedDict +from openapi.types import BaseModel +from openapi.utils import FieldMetadata, PathParamMetadata, RequestMetadata +import pydantic +from typing import TypedDict +from typing_extensions import Annotated + + +class UpdateDefinitionRequestTypedDict(TypedDict): + workflow_definition: WorkflowDefinitionTypedDict + r"""Workflow Definition payload""" + definition_id: str + r"""Short uuid (length 8) to identify the Workflow Definition.""" + + +class UpdateDefinitionRequest(BaseModel): + workflow_definition: Annotated[WorkflowDefinition, FieldMetadata(request=RequestMetadata(media_type="application/json"))] + r"""Workflow Definition payload""" + definition_id: Annotated[str, pydantic.Field(alias="definitionId"), FieldMetadata(path=PathParamMetadata(style="simple", explode=False))] + r"""Short uuid (length 8) to identify the Workflow Definition.""" + diff --git a/workflows_definition/src/openapi/models/updateentityattributes.py b/workflows_definition/src/openapi/models/updateentityattributes.py new file mode 100644 index 0000000000..900d4c7fb3 --- /dev/null +++ b/workflows_definition/src/openapi/models/updateentityattributes.py @@ -0,0 +1,34 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +from enum import Enum +from openapi.types import BaseModel +import pydantic +from typing import TypedDict +from typing_extensions import Annotated + + +class Source(str, Enum): + WORKFLOW_STATUS = "workflow_status" + CURRENT_SECTION = "current_section" + CURRENT_STEP = "current_step" + +class TargetTypedDict(TypedDict): + entity_attribute: str + entity_schema: str + + +class Target(BaseModel): + entity_attribute: Annotated[str, pydantic.Field(alias="entityAttribute")] + entity_schema: Annotated[str, pydantic.Field(alias="entitySchema")] + + +class UpdateEntityAttributesTypedDict(TypedDict): + source: Source + target: TargetTypedDict + + +class UpdateEntityAttributes(BaseModel): + source: Source + target: Target + diff --git a/workflows_definition/src/openapi/models/workflowdefinition.py b/workflows_definition/src/openapi/models/workflowdefinition.py new file mode 100644 index 0000000000..03d81e2d6d --- /dev/null +++ b/workflows_definition/src/openapi/models/workflowdefinition.py @@ -0,0 +1,61 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from __future__ import annotations +from .closingreasonid import ClosingReasonID, ClosingReasonIDTypedDict +from .dynamicduedate import DynamicDueDate, DynamicDueDateTypedDict +from .section import Section, SectionTypedDict +from .step import Step, StepTypedDict +from .updateentityattributes import UpdateEntityAttributes, UpdateEntityAttributesTypedDict +from openapi.types import BaseModel +import pydantic +from typing import List, Optional, TypedDict, Union +from typing_extensions import Annotated, NotRequired + + +class WorkflowDefinitionTypedDict(TypedDict): + flow: List[FlowTypedDict] + name: str + assigned_to: NotRequired[List[str]] + closing_reasons: NotRequired[List[ClosingReasonIDTypedDict]] + creation_time: NotRequired[str] + r"""ISO String Date & Time""" + description: NotRequired[str] + due_date: NotRequired[str] + dynamic_due_date: NotRequired[DynamicDueDateTypedDict] + r"""set a Duedate for a step then a specific""" + enable_ecp_workflow: NotRequired[bool] + r"""Indicates whether this workflow is available for End Customer Portal or not. By default it's not.""" + id: NotRequired[str] + last_update_time: NotRequired[str] + r"""ISO String Date & Time""" + update_entity_attributes: NotRequired[List[UpdateEntityAttributesTypedDict]] + user_ids: NotRequired[List[float]] + r"""This field is deprecated. Please use assignedTo""" + + +class WorkflowDefinition(BaseModel): + flow: List[Flow] + name: str + assigned_to: Annotated[Optional[List[str]], pydantic.Field(alias="assignedTo")] = None + closing_reasons: Annotated[Optional[List[ClosingReasonID]], pydantic.Field(alias="closingReasons")] = None + creation_time: Annotated[Optional[str], pydantic.Field(alias="creationTime")] = None + r"""ISO String Date & Time""" + description: Optional[str] = None + due_date: Annotated[Optional[str], pydantic.Field(alias="dueDate")] = None + dynamic_due_date: Annotated[Optional[DynamicDueDate], pydantic.Field(alias="dynamicDueDate")] = None + r"""set a Duedate for a step then a specific""" + enable_ecp_workflow: Annotated[Optional[bool], pydantic.Field(alias="enableECPWorkflow")] = None + r"""Indicates whether this workflow is available for End Customer Portal or not. By default it's not.""" + id: Optional[str] = None + last_update_time: Annotated[Optional[str], pydantic.Field(alias="lastUpdateTime")] = None + r"""ISO String Date & Time""" + update_entity_attributes: Annotated[Optional[List[UpdateEntityAttributes]], pydantic.Field(alias="updateEntityAttributes")] = None + user_ids: Annotated[Optional[List[float]], pydantic.Field(deprecated="warning: ** DEPRECATED ** - This will be removed in a future release, please migrate away from it as soon as possible.", alias="userIds")] = None + r"""This field is deprecated. Please use assignedTo""" + + +FlowTypedDict = Union[SectionTypedDict, StepTypedDict] + + +Flow = Union[Section, Step] + diff --git a/workflows_definition/src/openapi/py.typed b/workflows_definition/src/openapi/py.typed new file mode 100644 index 0000000000..3e38f1a929 --- /dev/null +++ b/workflows_definition/src/openapi/py.typed @@ -0,0 +1 @@ +# Marker file for PEP 561. The package enables type hints. diff --git a/workflows_definition/src/openapi/sdk.py b/workflows_definition/src/openapi/sdk.py new file mode 100644 index 0000000000..764a62faae --- /dev/null +++ b/workflows_definition/src/openapi/sdk.py @@ -0,0 +1,100 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from .basesdk import BaseSDK +from .httpclient import AsyncHttpClient, HttpClient +from .sdkconfiguration import SDKConfiguration +from .utils.logger import Logger, NoOpLogger +from .utils.retries import RetryConfig +import httpx +from openapi import models, utils +from openapi._hooks import SDKHooks +from openapi.closing_reason_sdk import ClosingReasonSDK +from openapi.types import OptionalNullable, UNSET +from openapi.workflows import Workflows +from typing import Any, Callable, Dict, Optional, Union + +class SDK(BaseSDK): + r"""Workflows Definitions: Service for Workflow Definitions for different processes inside of an Organization + + """ + closing_reason: ClosingReasonSDK + workflows: Workflows + def __init__( + self, + bearer_auth: Union[str, Callable[[], str]], + server_idx: Optional[int] = None, + server_url: Optional[str] = None, + url_params: Optional[Dict[str, str]] = None, + client: Optional[HttpClient] = None, + async_client: Optional[AsyncHttpClient] = None, + retry_config: OptionalNullable[RetryConfig] = UNSET, + timeout_ms: Optional[int] = None, + debug_logger: Optional[Logger] = None + ) -> None: + r"""Instantiates the SDK configuring it with the provided parameters. + + :param bearer_auth: The bearer_auth required for authentication + :param server_idx: The index of the server to use for all methods + :param server_url: The server URL to use for all methods + :param url_params: Parameters to optionally template the server URL with + :param client: The HTTP client to use for all synchronous methods + :param async_client: The Async HTTP client to use for all asynchronous methods + :param retry_config: The retry configuration to use for all supported methods + :param timeout_ms: Optional request timeout applied to each operation in milliseconds + """ + if client is None: + client = httpx.Client() + + assert issubclass( + type(client), HttpClient + ), "The provided client must implement the HttpClient protocol." + + if async_client is None: + async_client = httpx.AsyncClient() + + if debug_logger is None: + debug_logger = NoOpLogger() + + assert issubclass( + type(async_client), AsyncHttpClient + ), "The provided async_client must implement the AsyncHttpClient protocol." + + security: Any = None + if callable(bearer_auth): + security = lambda: models.Security(bearer_auth = bearer_auth()) # pylint: disable=unnecessary-lambda-assignment + else: + security = models.Security(bearer_auth = bearer_auth) + + if server_url is not None: + if url_params is not None: + server_url = utils.template_url(server_url, url_params) + + + BaseSDK.__init__(self, SDKConfiguration( + client=client, + async_client=async_client, + security=security, + server_url=server_url, + server_idx=server_idx, + retry_config=retry_config, + timeout_ms=timeout_ms, + debug_logger=debug_logger + )) + + hooks = SDKHooks() + + current_server_url, *_ = self.sdk_configuration.get_server_details() + server_url, self.sdk_configuration.client = hooks.sdk_init(current_server_url, self.sdk_configuration.client) + if current_server_url != server_url: + self.sdk_configuration.server_url = server_url + + # pylint: disable=protected-access + self.sdk_configuration.__dict__["_hooks"] = hooks + + self._init_sdks() + + + def _init_sdks(self): + self.closing_reason = ClosingReasonSDK(self.sdk_configuration) + self.workflows = Workflows(self.sdk_configuration) + diff --git a/workflows_definition/src/openapi/sdkconfiguration.py b/workflows_definition/src/openapi/sdkconfiguration.py new file mode 100644 index 0000000000..e6b16fd199 --- /dev/null +++ b/workflows_definition/src/openapi/sdkconfiguration.py @@ -0,0 +1,48 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + + +from ._hooks import SDKHooks +from .httpclient import AsyncHttpClient, HttpClient +from .utils import Logger, RetryConfig, remove_suffix +from dataclasses import dataclass +from openapi import models +from openapi.types import OptionalNullable, UNSET +from pydantic import Field +from typing import Callable, Dict, Optional, Tuple, Union + + +SERVERS = [ + "https://workflows-definition.sls.epilot.io", +] +"""Contains the list of servers available to the SDK""" + +@dataclass +class SDKConfiguration: + client: HttpClient + async_client: AsyncHttpClient + debug_logger: Logger + security: Optional[Union[models.Security,Callable[[], models.Security]]] = None + server_url: Optional[str] = "" + server_idx: Optional[int] = 0 + language: str = "python" + openapi_doc_version: str = "1.0.0" + sdk_version: str = "1.3.0" + gen_version: str = "2.387.0" + user_agent: str = "speakeasy-sdk/python 1.3.0 2.387.0 1.0.0 openapi" + retry_config: OptionalNullable[RetryConfig] = Field(default_factory=lambda: UNSET) + timeout_ms: Optional[int] = None + + def __post_init__(self): + self._hooks = SDKHooks() + + def get_server_details(self) -> Tuple[str, Dict[str, str]]: + if self.server_url is not None and self.server_url: + return remove_suffix(self.server_url, "/"), {} + if self.server_idx is None: + self.server_idx = 0 + + return SERVERS[self.server_idx], {} + + + def get_hooks(self) -> SDKHooks: + return self._hooks diff --git a/workflows_definition/src/openapi/types/__init__.py b/workflows_definition/src/openapi/types/__init__.py new file mode 100644 index 0000000000..fc76fe0c55 --- /dev/null +++ b/workflows_definition/src/openapi/types/__init__.py @@ -0,0 +1,21 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from .basemodel import ( + BaseModel, + Nullable, + OptionalNullable, + UnrecognizedInt, + UnrecognizedStr, + UNSET, + UNSET_SENTINEL, +) + +__all__ = [ + "BaseModel", + "Nullable", + "OptionalNullable", + "UnrecognizedInt", + "UnrecognizedStr", + "UNSET", + "UNSET_SENTINEL", +] diff --git a/workflows_definition/src/openapi/types/basemodel.py b/workflows_definition/src/openapi/types/basemodel.py new file mode 100644 index 0000000000..a6187efa6d --- /dev/null +++ b/workflows_definition/src/openapi/types/basemodel.py @@ -0,0 +1,39 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from pydantic import ConfigDict, model_serializer +from pydantic import BaseModel as PydanticBaseModel +from typing import TYPE_CHECKING, Literal, Optional, TypeVar, Union, NewType +from typing_extensions import TypeAliasType, TypeAlias + + +class BaseModel(PydanticBaseModel): + model_config = ConfigDict( + populate_by_name=True, arbitrary_types_allowed=True, protected_namespaces=() + ) + + +class Unset(BaseModel): + @model_serializer(mode="plain") + def serialize_model(self): + return UNSET_SENTINEL + + def __bool__(self) -> Literal[False]: + return False + + +UNSET = Unset() +UNSET_SENTINEL = "~?~unset~?~sentinel~?~" + + +T = TypeVar("T") +if TYPE_CHECKING: + Nullable: TypeAlias = Union[T, None] + OptionalNullable: TypeAlias = Union[Optional[Nullable[T]], Unset] +else: + Nullable = TypeAliasType("Nullable", Union[T, None], type_params=(T,)) + OptionalNullable = TypeAliasType( + "OptionalNullable", Union[Optional[Nullable[T]], Unset], type_params=(T,) + ) + +UnrecognizedInt = NewType("UnrecognizedInt", int) +UnrecognizedStr = NewType("UnrecognizedStr", str) diff --git a/workflows_definition/src/openapi/utils/__init__.py b/workflows_definition/src/openapi/utils/__init__.py new file mode 100644 index 0000000000..95aa1b60c9 --- /dev/null +++ b/workflows_definition/src/openapi/utils/__init__.py @@ -0,0 +1,84 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from .annotations import get_discriminator +from .enums import OpenEnumMeta +from .headers import get_headers, get_response_headers +from .metadata import ( + FieldMetadata, + find_metadata, + FormMetadata, + HeaderMetadata, + MultipartFormMetadata, + PathParamMetadata, + QueryParamMetadata, + RequestMetadata, + SecurityMetadata, +) +from .queryparams import get_query_params +from .retries import BackoffStrategy, Retries, retry, retry_async, RetryConfig +from .requestbodies import serialize_request_body, SerializedRequestBody +from .security import get_security +from .serializers import ( + get_pydantic_model, + marshal_json, + unmarshal, + unmarshal_json, + serialize_decimal, + serialize_float, + serialize_int, + stream_to_text, + validate_decimal, + validate_float, + validate_int, + validate_open_enum, +) +from .url import generate_url, template_url, remove_suffix +from .values import get_global_from_env, match_content_type, match_status_codes, match_response +from .logger import Logger, get_body_content, NoOpLogger + +__all__ = [ + "BackoffStrategy", + "FieldMetadata", + "find_metadata", + "FormMetadata", + "generate_url", + "get_body_content", + "get_discriminator", + "get_global_from_env", + "get_headers", + "get_pydantic_model", + "get_query_params", + "get_response_headers", + "get_security", + "HeaderMetadata", + "Logger", + "marshal_json", + "match_content_type", + "match_status_codes", + "match_response", + "MultipartFormMetadata", + "NoOpLogger", + "OpenEnumMeta", + "PathParamMetadata", + "QueryParamMetadata", + "remove_suffix", + "Retries", + "retry", + "retry_async", + "RetryConfig", + "RequestMetadata", + "SecurityMetadata", + "serialize_decimal", + "serialize_float", + "serialize_int", + "serialize_request_body", + "SerializedRequestBody", + "stream_to_text", + "template_url", + "unmarshal", + "unmarshal_json", + "validate_decimal", + "validate_float", + "validate_int", + "validate_open_enum", +] diff --git a/workflows_definition/src/openapi/utils/annotations.py b/workflows_definition/src/openapi/utils/annotations.py new file mode 100644 index 0000000000..0d17472b30 --- /dev/null +++ b/workflows_definition/src/openapi/utils/annotations.py @@ -0,0 +1,19 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from typing import Any + +def get_discriminator(model: Any, fieldname: str, key: str) -> str: + if isinstance(model, dict): + try: + return f'{model.get(key)}' + except AttributeError as e: + raise ValueError(f'Could not find discriminator key {key} in {model}') from e + + if hasattr(model, fieldname): + return f'{getattr(model, fieldname)}' + + fieldname = fieldname.upper() + if hasattr(model, fieldname): + return f'{getattr(model, fieldname)}' + + raise ValueError(f'Could not find discriminator field {fieldname} in {model}') diff --git a/workflows_definition/src/openapi/utils/enums.py b/workflows_definition/src/openapi/utils/enums.py new file mode 100644 index 0000000000..c650b10cb2 --- /dev/null +++ b/workflows_definition/src/openapi/utils/enums.py @@ -0,0 +1,34 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +import enum + + +class OpenEnumMeta(enum.EnumMeta): + def __call__( + cls, value, names=None, *, module=None, qualname=None, type=None, start=1 + ): + # The `type` kwarg also happens to be a built-in that pylint flags as + # redeclared. Safe to ignore this lint rule with this scope. + # pylint: disable=redefined-builtin + + if names is not None: + return super().__call__( + value, + names=names, + module=module, + qualname=qualname, + type=type, + start=start, + ) + + try: + return super().__call__( + value, + names=names, # pyright: ignore[reportArgumentType] + module=module, + qualname=qualname, + type=type, + start=start, + ) + except ValueError: + return value diff --git a/workflows_definition/src/openapi/utils/eventstreaming.py b/workflows_definition/src/openapi/utils/eventstreaming.py new file mode 100644 index 0000000000..553b386b3b --- /dev/null +++ b/workflows_definition/src/openapi/utils/eventstreaming.py @@ -0,0 +1,178 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +import re +import json +from typing import Callable, TypeVar, Optional, Generator, AsyncGenerator, Tuple +import httpx + +T = TypeVar("T") + + +class ServerEvent: + id: Optional[str] = None + event: Optional[str] = None + data: Optional[str] = None + retry: Optional[int] = None + + +MESSAGE_BOUNDARIES = [ + b"\r\n\r\n", + b"\n\n", + b"\r\r", +] + + +async def stream_events_async( + response: httpx.Response, + decoder: Callable[[str], T], + sentinel: Optional[str] = None, +) -> AsyncGenerator[T, None]: + buffer = bytearray() + position = 0 + discard = False + async for chunk in response.aiter_bytes(): + # We've encountered the sentinel value and should no longer process + # incoming data. Instead we throw new data away until the server closes + # the connection. + if discard: + continue + + buffer += chunk + for i in range(position, len(buffer)): + char = buffer[i : i + 1] + seq: Optional[bytes] = None + if char in [b"\r", b"\n"]: + for boundary in MESSAGE_BOUNDARIES: + seq = _peek_sequence(i, buffer, boundary) + if seq is not None: + break + if seq is None: + continue + + block = buffer[position:i] + position = i + len(seq) + event, discard = _parse_event(block, decoder, sentinel) + if event is not None: + yield event + + if position > 0: + buffer = buffer[position:] + position = 0 + + event, discard = _parse_event(buffer, decoder, sentinel) + if event is not None: + yield event + + +def stream_events( + response: httpx.Response, + decoder: Callable[[str], T], + sentinel: Optional[str] = None, +) -> Generator[T, None, None]: + buffer = bytearray() + position = 0 + discard = False + for chunk in response.iter_bytes(): + # We've encountered the sentinel value and should no longer process + # incoming data. Instead we throw new data away until the server closes + # the connection. + if discard: + continue + + buffer += chunk + for i in range(position, len(buffer)): + char = buffer[i : i + 1] + seq: Optional[bytes] = None + if char in [b"\r", b"\n"]: + for boundary in MESSAGE_BOUNDARIES: + seq = _peek_sequence(i, buffer, boundary) + if seq is not None: + break + if seq is None: + continue + + block = buffer[position:i] + position = i + len(seq) + event, discard = _parse_event(block, decoder, sentinel) + if event is not None: + yield event + + if position > 0: + buffer = buffer[position:] + position = 0 + + event, discard = _parse_event(buffer, decoder, sentinel) + if event is not None: + yield event + + +def _parse_event( + raw: bytearray, decoder: Callable[[str], T], sentinel: Optional[str] = None +) -> Tuple[Optional[T], bool]: + block = raw.decode() + lines = re.split(r"\r?\n|\r", block) + publish = False + event = ServerEvent() + data = "" + for line in lines: + if not line: + continue + + delim = line.find(":") + if delim <= 0: + continue + + field = line[0:delim] + value = line[delim + 1 :] if delim < len(line) - 1 else "" + if len(value) and value[0] == " ": + value = value[1:] + + if field == "event": + event.event = value + publish = True + elif field == "data": + data += value + "\n" + publish = True + elif field == "id": + event.id = value + publish = True + elif field == "retry": + event.retry = int(value) if value.isdigit() else None + publish = True + + if sentinel and data == f"{sentinel}\n": + return None, True + + if data: + data = data[:-1] + event.data = data + + data_is_primitive = ( + data.isnumeric() or data == "true" or data == "false" or data == "null" + ) + data_is_json = ( + data.startswith("{") or data.startswith("[") or data.startswith('"') + ) + + if data_is_primitive or data_is_json: + try: + event.data = json.loads(data) + except Exception: + pass + + out = None + if publish: + out = decoder(json.dumps(event.__dict__)) + + return out, False + + +def _peek_sequence(position: int, buffer: bytearray, sequence: bytes): + if len(sequence) > (len(buffer) - position): + return None + + for i, seq in enumerate(sequence): + if buffer[position + i] != seq: + return None + + return sequence diff --git a/workflows_definition/src/openapi/utils/forms.py b/workflows_definition/src/openapi/utils/forms.py new file mode 100644 index 0000000000..07f9b2359a --- /dev/null +++ b/workflows_definition/src/openapi/utils/forms.py @@ -0,0 +1,207 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from typing import ( + Any, + Dict, + get_type_hints, + List, + Tuple, +) +from pydantic import BaseModel +from pydantic.fields import FieldInfo + +from .serializers import marshal_json + +from .metadata import ( + FormMetadata, + MultipartFormMetadata, + find_field_metadata, +) +from .values import _val_to_string + + +def _populate_form( + field_name: str, + explode: bool, + obj: Any, + delimiter: str, + form: Dict[str, List[str]], +): + if obj is None: + return form + + if isinstance(obj, BaseModel): + items = [] + + obj_fields: Dict[str, FieldInfo] = obj.__class__.model_fields + for name in obj_fields: + obj_field = obj_fields[name] + obj_field_name = obj_field.alias if obj_field.alias is not None else name + if obj_field_name == "": + continue + + val = getattr(obj, name) + if val is None: + continue + + if explode: + form[obj_field_name] = [_val_to_string(val)] + else: + items.append(f"{obj_field_name}{delimiter}{_val_to_string(val)}") + + if len(items) > 0: + form[field_name] = [delimiter.join(items)] + elif isinstance(obj, Dict): + items = [] + for key, value in obj.items(): + if value is None: + continue + + if explode: + form[key] = [_val_to_string(value)] + else: + items.append(f"{key}{delimiter}{_val_to_string(value)}") + + if len(items) > 0: + form[field_name] = [delimiter.join(items)] + elif isinstance(obj, List): + items = [] + + for value in obj: + if value is None: + continue + + if explode: + if not field_name in form: + form[field_name] = [] + form[field_name].append(_val_to_string(value)) + else: + items.append(_val_to_string(value)) + + if len(items) > 0: + form[field_name] = [delimiter.join([str(item) for item in items])] + else: + form[field_name] = [_val_to_string(obj)] + + return form + + +def serialize_multipart_form( + media_type: str, request: Any +) -> Tuple[str, Dict[str, Any], Dict[str, Any]]: + form: Dict[str, Any] = {} + files: Dict[str, Any] = {} + + if not isinstance(request, BaseModel): + raise TypeError("invalid request body type") + + request_fields: Dict[str, FieldInfo] = request.__class__.model_fields + request_field_types = get_type_hints(request.__class__) + + for name in request_fields: + field = request_fields[name] + + val = getattr(request, name) + if val is None: + continue + + field_metadata = find_field_metadata(field, MultipartFormMetadata) + if not field_metadata: + continue + + f_name = field.alias if field.alias is not None else name + + if field_metadata.file: + file_fields: Dict[str, FieldInfo] = val.__class__.model_fields + + file_name = "" + field_name = "" + content = None + content_type = None + + for file_field_name in file_fields: + file_field = file_fields[file_field_name] + + file_metadata = find_field_metadata(file_field, MultipartFormMetadata) + if file_metadata is None: + continue + + if file_metadata.content: + content = getattr(val, file_field_name, None) + elif file_field_name == "content_type": + content_type = getattr(val, file_field_name, None) + else: + field_name = ( + file_field.alias + if file_field.alias is not None + else file_field_name + ) + file_name = getattr(val, file_field_name) + + if field_name == "" or file_name == "" or content is None: + raise ValueError("invalid multipart/form-data file") + + if content_type is not None: + files[field_name] = (file_name, content, content_type) + else: + files[field_name] = (file_name, content) + elif field_metadata.json: + files[f_name] = ( + None, + marshal_json(val, request_field_types[name]), + "application/json", + ) + else: + if isinstance(val, List): + values = [] + + for value in val: + if value is None: + continue + values.append(_val_to_string(value)) + + form[f_name + "[]"] = values + else: + form[f_name] = _val_to_string(val) + return media_type, form, files + + +def serialize_form_data(data: Any) -> Dict[str, Any]: + form: Dict[str, List[str]] = {} + + if isinstance(data, BaseModel): + data_fields: Dict[str, FieldInfo] = data.__class__.model_fields + data_field_types = get_type_hints(data.__class__) + for name in data_fields: + field = data_fields[name] + + val = getattr(data, name) + if val is None: + continue + + metadata = find_field_metadata(field, FormMetadata) + if metadata is None: + continue + + f_name = field.alias if field.alias is not None else name + + if metadata.json: + form[f_name] = [marshal_json(val, data_field_types[name])] + else: + if metadata.style == "form": + _populate_form( + f_name, + metadata.explode, + val, + ",", + form, + ) + else: + raise ValueError(f"Invalid form style for field {name}") + elif isinstance(data, Dict): + for key, value in data.items(): + form[key] = [_val_to_string(value)] + else: + raise TypeError(f"Invalid request body type {type(data)} for form data") + + return form diff --git a/workflows_definition/src/openapi/utils/headers.py b/workflows_definition/src/openapi/utils/headers.py new file mode 100644 index 0000000000..e14a0f4a8e --- /dev/null +++ b/workflows_definition/src/openapi/utils/headers.py @@ -0,0 +1,136 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from typing import ( + Any, + Dict, + List, + Optional, +) +from httpx import Headers +from pydantic import BaseModel +from pydantic.fields import FieldInfo + +from .metadata import ( + HeaderMetadata, + find_field_metadata, +) + +from .values import _populate_from_globals, _val_to_string + + +def get_headers(headers_params: Any, gbls: Optional[Any] = None) -> Dict[str, str]: + headers: Dict[str, str] = {} + + globals_already_populated = [] + if headers_params is not None: + globals_already_populated = _populate_headers(headers_params, gbls, headers, []) + if gbls is not None: + _populate_headers(gbls, None, headers, globals_already_populated) + + return headers + + +def _populate_headers( + headers_params: Any, + gbls: Any, + header_values: Dict[str, str], + skip_fields: List[str], +) -> List[str]: + globals_already_populated: List[str] = [] + + if not isinstance(headers_params, BaseModel): + return globals_already_populated + + param_fields: Dict[str, FieldInfo] = headers_params.__class__.model_fields + for name in param_fields: + if name in skip_fields: + continue + + field = param_fields[name] + f_name = field.alias if field.alias is not None else name + + metadata = find_field_metadata(field, HeaderMetadata) + if metadata is None: + continue + + value, global_found = _populate_from_globals( + name, getattr(headers_params, name), HeaderMetadata, gbls + ) + if global_found: + globals_already_populated.append(name) + value = _serialize_header(metadata.explode, value) + + if value != "": + header_values[f_name] = value + + return globals_already_populated + + +def _serialize_header(explode: bool, obj: Any) -> str: + if obj is None: + return "" + + if isinstance(obj, BaseModel): + items = [] + obj_fields: Dict[str, FieldInfo] = obj.__class__.model_fields + for name in obj_fields: + obj_field = obj_fields[name] + obj_param_metadata = find_field_metadata(obj_field, HeaderMetadata) + + if not obj_param_metadata: + continue + + f_name = obj_field.alias if obj_field.alias is not None else name + + val = getattr(obj, name) + if val is None: + continue + + if explode: + items.append(f"{f_name}={_val_to_string(val)}") + else: + items.append(f_name) + items.append(_val_to_string(val)) + + if len(items) > 0: + return ",".join(items) + elif isinstance(obj, Dict): + items = [] + + for key, value in obj.items(): + if value is None: + continue + + if explode: + items.append(f"{key}={_val_to_string(value)}") + else: + items.append(key) + items.append(_val_to_string(value)) + + if len(items) > 0: + return ",".join([str(item) for item in items]) + elif isinstance(obj, List): + items = [] + + for value in obj: + if value is None: + continue + + items.append(_val_to_string(value)) + + if len(items) > 0: + return ",".join(items) + else: + return f"{_val_to_string(obj)}" + + return "" + + +def get_response_headers(headers: Headers) -> Dict[str, List[str]]: + res: Dict[str, List[str]] = {} + for k, v in headers.items(): + if not k in res: + res[k] = [] + + res[k].append(v) + return res diff --git a/workflows_definition/src/openapi/utils/logger.py b/workflows_definition/src/openapi/utils/logger.py new file mode 100644 index 0000000000..7e4bbeac26 --- /dev/null +++ b/workflows_definition/src/openapi/utils/logger.py @@ -0,0 +1,16 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +import httpx +from typing import Any, Protocol + +class Logger(Protocol): + def debug(self, msg: str, *args: Any, **kwargs: Any) -> None: + pass + +class NoOpLogger: + def debug(self, msg: str, *args: Any, **kwargs: Any) -> None: + pass + +def get_body_content(req: httpx.Request) -> str: + return "" if not hasattr(req, "_content") else str(req.content) + diff --git a/workflows_definition/src/openapi/utils/metadata.py b/workflows_definition/src/openapi/utils/metadata.py new file mode 100644 index 0000000000..173b3e5ce6 --- /dev/null +++ b/workflows_definition/src/openapi/utils/metadata.py @@ -0,0 +1,118 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from typing import Optional, Type, TypeVar, Union +from dataclasses import dataclass +from pydantic.fields import FieldInfo + + +T = TypeVar("T") + + +@dataclass +class SecurityMetadata: + option: bool = False + scheme: bool = False + scheme_type: Optional[str] = None + sub_type: Optional[str] = None + field_name: Optional[str] = None + + def get_field_name(self, default: str) -> str: + return self.field_name or default + + +@dataclass +class ParamMetadata: + serialization: Optional[str] = None + style: str = "simple" + explode: bool = False + + +@dataclass +class PathParamMetadata(ParamMetadata): + pass + + +@dataclass +class QueryParamMetadata(ParamMetadata): + style: str = "form" + explode: bool = True + + +@dataclass +class HeaderMetadata(ParamMetadata): + pass + + +@dataclass +class RequestMetadata: + media_type: str = "application/octet-stream" + + +@dataclass +class MultipartFormMetadata: + file: bool = False + content: bool = False + json: bool = False + + +@dataclass +class FormMetadata: + json: bool = False + style: str = "form" + explode: bool = True + + +class FieldMetadata: + security: Optional[SecurityMetadata] = None + path: Optional[PathParamMetadata] = None + query: Optional[QueryParamMetadata] = None + header: Optional[HeaderMetadata] = None + request: Optional[RequestMetadata] = None + form: Optional[FormMetadata] = None + multipart: Optional[MultipartFormMetadata] = None + + def __init__( + self, + security: Optional[SecurityMetadata] = None, + path: Optional[Union[PathParamMetadata, bool]] = None, + query: Optional[Union[QueryParamMetadata, bool]] = None, + header: Optional[Union[HeaderMetadata, bool]] = None, + request: Optional[Union[RequestMetadata, bool]] = None, + form: Optional[Union[FormMetadata, bool]] = None, + multipart: Optional[Union[MultipartFormMetadata, bool]] = None, + ): + self.security = security + self.path = PathParamMetadata() if isinstance(path, bool) else path + self.query = QueryParamMetadata() if isinstance(query, bool) else query + self.header = HeaderMetadata() if isinstance(header, bool) else header + self.request = RequestMetadata() if isinstance(request, bool) else request + self.form = FormMetadata() if isinstance(form, bool) else form + self.multipart = ( + MultipartFormMetadata() if isinstance(multipart, bool) else multipart + ) + + +def find_field_metadata(field_info: FieldInfo, metadata_type: Type[T]) -> Optional[T]: + metadata = find_metadata(field_info, FieldMetadata) + if not metadata: + return None + + fields = metadata.__dict__ + + for field in fields: + if isinstance(fields[field], metadata_type): + return fields[field] + + return None + + +def find_metadata(field_info: FieldInfo, metadata_type: Type[T]) -> Optional[T]: + metadata = field_info.metadata + if not metadata: + return None + + for md in metadata: + if isinstance(md, metadata_type): + return md + + return None diff --git a/workflows_definition/src/openapi/utils/queryparams.py b/workflows_definition/src/openapi/utils/queryparams.py new file mode 100644 index 0000000000..1c8c58340d --- /dev/null +++ b/workflows_definition/src/openapi/utils/queryparams.py @@ -0,0 +1,203 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from typing import ( + Any, + Dict, + get_type_hints, + List, + Optional, +) + +from pydantic import BaseModel +from pydantic.fields import FieldInfo + +from .metadata import ( + QueryParamMetadata, + find_field_metadata, +) +from .values import _get_serialized_params, _populate_from_globals, _val_to_string +from .forms import _populate_form + + +def get_query_params( + query_params: Any, + gbls: Optional[Any] = None, +) -> Dict[str, List[str]]: + params: Dict[str, List[str]] = {} + + globals_already_populated = _populate_query_params(query_params, gbls, params, []) + if gbls is not None: + _populate_query_params(gbls, None, params, globals_already_populated) + + return params + + +def _populate_query_params( + query_params: Any, + gbls: Any, + query_param_values: Dict[str, List[str]], + skip_fields: List[str], +) -> List[str]: + globals_already_populated: List[str] = [] + + if not isinstance(query_params, BaseModel): + return globals_already_populated + + param_fields: Dict[str, FieldInfo] = query_params.__class__.model_fields + param_field_types = get_type_hints(query_params.__class__) + for name in param_fields: + if name in skip_fields: + continue + + field = param_fields[name] + + metadata = find_field_metadata(field, QueryParamMetadata) + if not metadata: + continue + + value = getattr(query_params, name) if query_params is not None else None + + value, global_found = _populate_from_globals( + name, value, QueryParamMetadata, gbls + ) + if global_found: + globals_already_populated.append(name) + + f_name = field.alias if field.alias is not None else name + serialization = metadata.serialization + if serialization is not None: + serialized_parms = _get_serialized_params( + metadata, f_name, value, param_field_types[name] + ) + for key, value in serialized_parms.items(): + if key in query_param_values: + query_param_values[key].extend(value) + else: + query_param_values[key] = [value] + else: + style = metadata.style + if style == "deepObject": + _populate_deep_object_query_params(f_name, value, query_param_values) + elif style == "form": + _populate_delimited_query_params( + metadata, f_name, value, ",", query_param_values + ) + elif style == "pipeDelimited": + _populate_delimited_query_params( + metadata, f_name, value, "|", query_param_values + ) + else: + raise NotImplementedError( + f"query param style {style} not yet supported" + ) + + return globals_already_populated + + +def _populate_deep_object_query_params( + field_name: str, + obj: Any, + params: Dict[str, List[str]], +): + if obj is None: + return + + if isinstance(obj, BaseModel): + _populate_deep_object_query_params_basemodel(field_name, obj, params) + elif isinstance(obj, Dict): + _populate_deep_object_query_params_dict(field_name, obj, params) + + +def _populate_deep_object_query_params_basemodel( + prior_params_key: str, + obj: Any, + params: Dict[str, List[str]], +): + if obj is None: + return + + if not isinstance(obj, BaseModel): + return + + obj_fields: Dict[str, FieldInfo] = obj.__class__.model_fields + for name in obj_fields: + obj_field = obj_fields[name] + + f_name = obj_field.alias if obj_field.alias is not None else name + + params_key = f"{prior_params_key}[{f_name}]" + + obj_param_metadata = find_field_metadata(obj_field, QueryParamMetadata) + if obj_param_metadata is None: + continue + + obj_val = getattr(obj, name) + if obj_val is None: + continue + + if isinstance(obj_val, BaseModel): + _populate_deep_object_query_params_basemodel(params_key, obj_val, params) + elif isinstance(obj_val, Dict): + _populate_deep_object_query_params_dict(params_key, obj_val, params) + elif isinstance(obj_val, List): + _populate_deep_object_query_params_list(params_key, obj_val, params) + else: + params[params_key] = [_val_to_string(obj_val)] + + +def _populate_deep_object_query_params_dict( + prior_params_key: str, + value: Dict, + params: Dict[str, List[str]], +): + if value is None: + return + + for key, val in value.items(): + if val is None: + continue + + params_key = f"{prior_params_key}[{key}]" + + if isinstance(val, BaseModel): + _populate_deep_object_query_params_basemodel(params_key, val, params) + elif isinstance(val, Dict): + _populate_deep_object_query_params_dict(params_key, val, params) + elif isinstance(val, List): + _populate_deep_object_query_params_list(params_key, val, params) + else: + params[params_key] = [_val_to_string(val)] + + +def _populate_deep_object_query_params_list( + params_key: str, + value: List, + params: Dict[str, List[str]], +): + if value is None: + return + + for val in value: + if val is None: + continue + + if params.get(params_key) is None: + params[params_key] = [] + + params[params_key].append(_val_to_string(val)) + + +def _populate_delimited_query_params( + metadata: QueryParamMetadata, + field_name: str, + obj: Any, + delimiter: str, + query_param_values: Dict[str, List[str]], +): + _populate_form( + field_name, + metadata.explode, + obj, + delimiter, + query_param_values, + ) diff --git a/workflows_definition/src/openapi/utils/requestbodies.py b/workflows_definition/src/openapi/utils/requestbodies.py new file mode 100644 index 0000000000..4f586ae79a --- /dev/null +++ b/workflows_definition/src/openapi/utils/requestbodies.py @@ -0,0 +1,66 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +import io +from dataclasses import dataclass +import re +from typing import ( + Any, + Optional, +) + +from .forms import serialize_form_data, serialize_multipart_form + +from .serializers import marshal_json + +SERIALIZATION_METHOD_TO_CONTENT_TYPE = { + "json": "application/json", + "form": "application/x-www-form-urlencoded", + "multipart": "multipart/form-data", + "raw": "application/octet-stream", + "string": "text/plain", +} + + +@dataclass +class SerializedRequestBody: + media_type: str + content: Optional[Any] = None + data: Optional[Any] = None + files: Optional[Any] = None + + +def serialize_request_body( + request_body: Any, + nullable: bool, + optional: bool, + serialization_method: str, + request_body_type, +) -> Optional[SerializedRequestBody]: + if request_body is None: + if not nullable and optional: + return None + + media_type = SERIALIZATION_METHOD_TO_CONTENT_TYPE[serialization_method] + + serialized_request_body = SerializedRequestBody(media_type) + + if re.match(r"(application|text)\/.*?\+*json.*", media_type) is not None: + serialized_request_body.content = marshal_json(request_body, request_body_type) + elif re.match(r"multipart\/.*", media_type) is not None: + ( + serialized_request_body.media_type, + serialized_request_body.data, + serialized_request_body.files, + ) = serialize_multipart_form(media_type, request_body) + elif re.match(r"application\/x-www-form-urlencoded.*", media_type) is not None: + serialized_request_body.data = serialize_form_data(request_body) + elif isinstance(request_body, (bytes, bytearray, io.BytesIO, io.BufferedReader)): + serialized_request_body.content = request_body + elif isinstance(request_body, str): + serialized_request_body.content = request_body + else: + raise TypeError( + f"invalid request body type {type(request_body)} for mediaType {media_type}" + ) + + return serialized_request_body diff --git a/workflows_definition/src/openapi/utils/retries.py b/workflows_definition/src/openapi/utils/retries.py new file mode 100644 index 0000000000..a06f927946 --- /dev/null +++ b/workflows_definition/src/openapi/utils/retries.py @@ -0,0 +1,216 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +import random +import time +from typing import List + +import httpx + + +class BackoffStrategy: + initial_interval: int + max_interval: int + exponent: float + max_elapsed_time: int + + def __init__( + self, + initial_interval: int, + max_interval: int, + exponent: float, + max_elapsed_time: int, + ): + self.initial_interval = initial_interval + self.max_interval = max_interval + self.exponent = exponent + self.max_elapsed_time = max_elapsed_time + + +class RetryConfig: + strategy: str + backoff: BackoffStrategy + retry_connection_errors: bool + + def __init__( + self, strategy: str, backoff: BackoffStrategy, retry_connection_errors: bool + ): + self.strategy = strategy + self.backoff = backoff + self.retry_connection_errors = retry_connection_errors + + +class Retries: + config: RetryConfig + status_codes: List[str] + + def __init__(self, config: RetryConfig, status_codes: List[str]): + self.config = config + self.status_codes = status_codes + + +class TemporaryError(Exception): + response: httpx.Response + + def __init__(self, response: httpx.Response): + self.response = response + + +class PermanentError(Exception): + inner: Exception + + def __init__(self, inner: Exception): + self.inner = inner + + +def retry(func, retries: Retries): + if retries.config.strategy == "backoff": + + def do_request() -> httpx.Response: + res: httpx.Response + try: + res = func() + + for code in retries.status_codes: + if "X" in code.upper(): + code_range = int(code[0]) + + status_major = res.status_code / 100 + + if code_range <= status_major < code_range + 1: + raise TemporaryError(res) + else: + parsed_code = int(code) + + if res.status_code == parsed_code: + raise TemporaryError(res) + except httpx.ConnectError as exception: + if retries.config.retry_connection_errors: + raise + + raise PermanentError(exception) from exception + except httpx.TimeoutException as exception: + if retries.config.retry_connection_errors: + raise + + raise PermanentError(exception) from exception + except TemporaryError: + raise + except Exception as exception: + raise PermanentError(exception) from exception + + return res + + return retry_with_backoff( + do_request, + retries.config.backoff.initial_interval, + retries.config.backoff.max_interval, + retries.config.backoff.exponent, + retries.config.backoff.max_elapsed_time, + ) + + return func() + + +async def retry_async(func, retries: Retries): + if retries.config.strategy == "backoff": + + async def do_request() -> httpx.Response: + res: httpx.Response + try: + res = await func() + + for code in retries.status_codes: + if "X" in code.upper(): + code_range = int(code[0]) + + status_major = res.status_code / 100 + + if code_range <= status_major < code_range + 1: + raise TemporaryError(res) + else: + parsed_code = int(code) + + if res.status_code == parsed_code: + raise TemporaryError(res) + except httpx.ConnectError as exception: + if retries.config.retry_connection_errors: + raise + + raise PermanentError(exception) from exception + except httpx.TimeoutException as exception: + if retries.config.retry_connection_errors: + raise + + raise PermanentError(exception) from exception + except TemporaryError: + raise + except Exception as exception: + raise PermanentError(exception) from exception + + return res + + return await retry_with_backoff_async( + do_request, + retries.config.backoff.initial_interval, + retries.config.backoff.max_interval, + retries.config.backoff.exponent, + retries.config.backoff.max_elapsed_time, + ) + + return await func() + + +def retry_with_backoff( + func, + initial_interval=500, + max_interval=60000, + exponent=1.5, + max_elapsed_time=3600000, +): + start = round(time.time() * 1000) + retries = 0 + + while True: + try: + return func() + except PermanentError as exception: + raise exception.inner + except Exception as exception: # pylint: disable=broad-exception-caught + now = round(time.time() * 1000) + if now - start > max_elapsed_time: + if isinstance(exception, TemporaryError): + return exception.response + + raise + sleep = (initial_interval / 1000) * exponent**retries + random.uniform(0, 1) + sleep = min(sleep, max_interval / 1000) + time.sleep(sleep) + retries += 1 + + +async def retry_with_backoff_async( + func, + initial_interval=500, + max_interval=60000, + exponent=1.5, + max_elapsed_time=3600000, +): + start = round(time.time() * 1000) + retries = 0 + + while True: + try: + return await func() + except PermanentError as exception: + raise exception.inner + except Exception as exception: # pylint: disable=broad-exception-caught + now = round(time.time() * 1000) + if now - start > max_elapsed_time: + if isinstance(exception, TemporaryError): + return exception.response + + raise + sleep = (initial_interval / 1000) * exponent**retries + random.uniform(0, 1) + sleep = min(sleep, max_interval / 1000) + time.sleep(sleep) + retries += 1 diff --git a/workflows_definition/src/openapi/utils/security.py b/workflows_definition/src/openapi/utils/security.py new file mode 100644 index 0000000000..aab4cb65ca --- /dev/null +++ b/workflows_definition/src/openapi/utils/security.py @@ -0,0 +1,168 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +import base64 +from typing import ( + Any, + Dict, + List, + Tuple, +) +from pydantic import BaseModel +from pydantic.fields import FieldInfo + +from .metadata import ( + SecurityMetadata, + find_field_metadata, +) + + + +def get_security(security: Any) -> Tuple[Dict[str, str], Dict[str, List[str]]]: + headers: Dict[str, str] = {} + query_params: Dict[str, List[str]] = {} + + if security is None: + return headers, query_params + + if not isinstance(security, BaseModel): + raise TypeError("security must be a pydantic model") + + sec_fields: Dict[str, FieldInfo] = security.__class__.model_fields + for name in sec_fields: + sec_field = sec_fields[name] + + value = getattr(security, name) + if value is None: + continue + + metadata = find_field_metadata(sec_field, SecurityMetadata) + if metadata is None: + continue + if metadata.option: + _parse_security_option(headers, query_params, value) + return headers, query_params + if metadata.scheme: + # Special case for basic auth which could be a flattened model + if metadata.sub_type == "basic" and not isinstance(value, BaseModel): + _parse_security_scheme(headers, query_params, metadata, name, security) + else: + _parse_security_scheme(headers, query_params, metadata, name, value) + + return headers, query_params + + +def _parse_security_option( + headers: Dict[str, str], query_params: Dict[str, List[str]], option: Any +): + if not isinstance(option, BaseModel): + raise TypeError("security option must be a pydantic model") + + opt_fields: Dict[str, FieldInfo] = option.__class__.model_fields + for name in opt_fields: + opt_field = opt_fields[name] + + metadata = find_field_metadata(opt_field, SecurityMetadata) + if metadata is None or not metadata.scheme: + continue + _parse_security_scheme( + headers, query_params, metadata, name, getattr(option, name) + ) + + +def _parse_security_scheme( + headers: Dict[str, str], + query_params: Dict[str, List[str]], + scheme_metadata: SecurityMetadata, + field_name: str, + scheme: Any, +): + scheme_type = scheme_metadata.scheme_type + sub_type = scheme_metadata.sub_type + + if isinstance(scheme, BaseModel): + if scheme_type == "http" and sub_type == "basic": + _parse_basic_auth_scheme(headers, scheme) + return + + scheme_fields: Dict[str, FieldInfo] = scheme.__class__.model_fields + for name in scheme_fields: + scheme_field = scheme_fields[name] + + metadata = find_field_metadata(scheme_field, SecurityMetadata) + if metadata is None or metadata.field_name is None: + continue + + value = getattr(scheme, name) + + _parse_security_scheme_value( + headers, query_params, scheme_metadata, metadata, name, value + ) + else: + _parse_security_scheme_value( + headers, query_params, scheme_metadata, scheme_metadata, field_name, scheme + ) + + +def _parse_security_scheme_value( + headers: Dict[str, str], + query_params: Dict[str, List[str]], + scheme_metadata: SecurityMetadata, + security_metadata: SecurityMetadata, + field_name: str, + value: Any, +): + scheme_type = scheme_metadata.scheme_type + sub_type = scheme_metadata.sub_type + + header_name = security_metadata.get_field_name(field_name) + + if scheme_type == "apiKey": + if sub_type == "header": + headers[header_name] = value + elif sub_type == "query": + query_params[header_name] = [value] + else: + raise ValueError("sub type {sub_type} not supported") + elif scheme_type == "openIdConnect": + headers[header_name] = _apply_bearer(value) + elif scheme_type == "oauth2": + if sub_type != "client_credentials": + headers[header_name] = _apply_bearer(value) + elif scheme_type == "http": + if sub_type == "bearer": + headers[header_name] = _apply_bearer(value) + else: + raise ValueError("sub type {sub_type} not supported") + else: + raise ValueError("scheme type {scheme_type} not supported") + + +def _apply_bearer(token: str) -> str: + return token.lower().startswith("bearer ") and token or f"Bearer {token}" + + +def _parse_basic_auth_scheme(headers: Dict[str, str], scheme: Any): + username = "" + password = "" + + if not isinstance(scheme, BaseModel): + raise TypeError("basic auth scheme must be a pydantic model") + + scheme_fields: Dict[str, FieldInfo] = scheme.__class__.model_fields + for name in scheme_fields: + scheme_field = scheme_fields[name] + + metadata = find_field_metadata(scheme_field, SecurityMetadata) + if metadata is None or metadata.field_name is None: + continue + + field_name = metadata.field_name + value = getattr(scheme, name) + + if field_name == "username": + username = value + if field_name == "password": + password = value + + data = f"{username}:{password}".encode() + headers["Authorization"] = f"Basic {base64.b64encode(data).decode()}" diff --git a/workflows_definition/src/openapi/utils/serializers.py b/workflows_definition/src/openapi/utils/serializers.py new file mode 100644 index 0000000000..a98998a3ff --- /dev/null +++ b/workflows_definition/src/openapi/utils/serializers.py @@ -0,0 +1,181 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from decimal import Decimal +import json +from typing import Any, Dict, List, Union, get_args +import httpx +from typing_extensions import get_origin +from pydantic import ConfigDict, create_model +from pydantic_core import from_json +from typing_inspect import is_optional_type + +from ..types.basemodel import BaseModel, Nullable, OptionalNullable + + +def serialize_decimal(as_str: bool): + def serialize(d): + if is_optional_type(type(d)) and d is None: + return None + + if not isinstance(d, Decimal): + raise ValueError("Expected Decimal object") + + return str(d) if as_str else float(d) + + return serialize + + +def validate_decimal(d): + if d is None: + return None + + if isinstance(d, Decimal): + return d + + if not isinstance(d, (str, int, float)): + raise ValueError("Expected string, int or float") + + return Decimal(str(d)) + + +def serialize_float(as_str: bool): + def serialize(f): + if is_optional_type(type(f)) and f is None: + return None + + if not isinstance(f, float): + raise ValueError("Expected float") + + return str(f) if as_str else f + + return serialize + + +def validate_float(f): + if f is None: + return None + + if isinstance(f, float): + return f + + if not isinstance(f, str): + raise ValueError("Expected string") + + return float(f) + + +def serialize_int(as_str: bool): + def serialize(b): + if is_optional_type(type(b)) and b is None: + return None + + if not isinstance(b, int): + raise ValueError("Expected int") + + return str(b) if as_str else b + + return serialize + + +def validate_int(b): + if b is None: + return None + + if isinstance(b, int): + return b + + if not isinstance(b, str): + raise ValueError("Expected string") + + return int(b) + + +def validate_open_enum(is_int: bool): + def validate(e): + if e is None: + return None + + if is_int: + if not isinstance(e, int): + raise ValueError("Expected int") + else: + if not isinstance(e, str): + raise ValueError("Expected string") + + return e + + return validate + + +def unmarshal_json(raw, typ: Any) -> Any: + return unmarshal(from_json(raw), typ) + + +def unmarshal(val, typ: Any) -> Any: + unmarshaller = create_model( + "Unmarshaller", + body=(typ, ...), + __config__=ConfigDict(populate_by_name=True, arbitrary_types_allowed=True), + ) + + m = unmarshaller(body=val) + + # pyright: ignore[reportAttributeAccessIssue] + return m.body # type: ignore + + +def marshal_json(val, typ): + if is_nullable(typ) and val is None: + return "null" + + marshaller = create_model( + "Marshaller", + body=(typ, ...), + __config__=ConfigDict(populate_by_name=True, arbitrary_types_allowed=True), + ) + + m = marshaller(body=val) + + d = m.model_dump(by_alias=True, mode="json", exclude_none=True) + + if len(d) == 0: + return "" + + return json.dumps(d[next(iter(d))], separators=(",", ":"), sort_keys=True) + + +def is_nullable(field): + origin = get_origin(field) + if origin is Nullable or origin is OptionalNullable: + return True + + if not origin is Union or type(None) not in get_args(field): + return False + + for arg in get_args(field): + if get_origin(arg) is Nullable or get_origin(arg) is OptionalNullable: + return True + + return False + + +def stream_to_text(stream: httpx.Response) -> str: + return "".join(stream.iter_text()) + + +def get_pydantic_model(data: Any, typ: Any) -> Any: + if not _contains_pydantic_model(data): + return unmarshal(data, typ) + + return data + + +def _contains_pydantic_model(data: Any) -> bool: + if isinstance(data, BaseModel): + return True + if isinstance(data, List): + return any(_contains_pydantic_model(item) for item in data) + if isinstance(data, Dict): + return any(_contains_pydantic_model(value) for value in data.values()) + + return False diff --git a/workflows_definition/src/openapi/utils/url.py b/workflows_definition/src/openapi/utils/url.py new file mode 100644 index 0000000000..b201bfa498 --- /dev/null +++ b/workflows_definition/src/openapi/utils/url.py @@ -0,0 +1,150 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from decimal import Decimal +from typing import ( + Any, + Dict, + get_type_hints, + List, + Optional, + Union, + get_args, + get_origin, +) +from pydantic import BaseModel +from pydantic.fields import FieldInfo + +from .metadata import ( + PathParamMetadata, + find_field_metadata, +) +from .values import _get_serialized_params, _populate_from_globals, _val_to_string + + +def generate_url( + server_url: str, + path: str, + path_params: Any, + gbls: Optional[Any] = None, +) -> str: + path_param_values: Dict[str, str] = {} + + globals_already_populated = _populate_path_params( + path_params, gbls, path_param_values, [] + ) + if gbls is not None: + _populate_path_params(gbls, None, path_param_values, globals_already_populated) + + for key, value in path_param_values.items(): + path = path.replace("{" + key + "}", value, 1) + + return remove_suffix(server_url, "/") + path + + +def _populate_path_params( + path_params: Any, + gbls: Any, + path_param_values: Dict[str, str], + skip_fields: List[str], +) -> List[str]: + globals_already_populated: List[str] = [] + + if not isinstance(path_params, BaseModel): + return globals_already_populated + + path_param_fields: Dict[str, FieldInfo] = path_params.__class__.model_fields + path_param_field_types = get_type_hints(path_params.__class__) + for name in path_param_fields: + if name in skip_fields: + continue + + field = path_param_fields[name] + + param_metadata = find_field_metadata(field, PathParamMetadata) + if param_metadata is None: + continue + + param = getattr(path_params, name) if path_params is not None else None + param, global_found = _populate_from_globals( + name, param, PathParamMetadata, gbls + ) + if global_found: + globals_already_populated.append(name) + + if param is None: + continue + + f_name = field.alias if field.alias is not None else name + serialization = param_metadata.serialization + if serialization is not None: + serialized_params = _get_serialized_params( + param_metadata, f_name, param, path_param_field_types[name] + ) + for key, value in serialized_params.items(): + path_param_values[key] = value + else: + pp_vals: List[str] = [] + if param_metadata.style == "simple": + if isinstance(param, List): + for pp_val in param: + if pp_val is None: + continue + pp_vals.append(_val_to_string(pp_val)) + path_param_values[f_name] = ",".join(pp_vals) + elif isinstance(param, Dict): + for pp_key in param: + if param[pp_key] is None: + continue + if param_metadata.explode: + pp_vals.append(f"{pp_key}={_val_to_string(param[pp_key])}") + else: + pp_vals.append(f"{pp_key},{_val_to_string(param[pp_key])}") + path_param_values[f_name] = ",".join(pp_vals) + elif not isinstance(param, (str, int, float, complex, bool, Decimal)): + param_fields: Dict[str, FieldInfo] = param.__class__.model_fields + for name in param_fields: + param_field = param_fields[name] + + param_value_metadata = find_field_metadata( + param_field, PathParamMetadata + ) + if param_value_metadata is None: + continue + + param_name = ( + param_field.alias if param_field.alias is not None else name + ) + + param_field_val = getattr(param, name) + if param_field_val is None: + continue + if param_metadata.explode: + pp_vals.append( + f"{param_name}={_val_to_string(param_field_val)}" + ) + else: + pp_vals.append( + f"{param_name},{_val_to_string(param_field_val)}" + ) + path_param_values[f_name] = ",".join(pp_vals) + else: + path_param_values[f_name] = _val_to_string(param) + + return globals_already_populated + + +def is_optional(field): + return get_origin(field) is Union and type(None) in get_args(field) + + +def template_url(url_with_params: str, params: Dict[str, str]) -> str: + for key, value in params.items(): + url_with_params = url_with_params.replace("{" + key + "}", value) + + return url_with_params + + +def remove_suffix(input_string, suffix): + if suffix and input_string.endswith(suffix): + return input_string[: -len(suffix)] + return input_string diff --git a/workflows_definition/src/openapi/utils/values.py b/workflows_definition/src/openapi/utils/values.py new file mode 100644 index 0000000000..24ccae3d0b --- /dev/null +++ b/workflows_definition/src/openapi/utils/values.py @@ -0,0 +1,128 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from datetime import datetime +from enum import Enum +from email.message import Message +import os +from typing import Any, Callable, Dict, List, Optional, Tuple, TypeVar, Union + +from httpx import Response +from pydantic import BaseModel +from pydantic.fields import FieldInfo + +from .serializers import marshal_json + +from .metadata import ParamMetadata, find_field_metadata + + +def match_content_type(content_type: str, pattern: str) -> bool: + if pattern in (content_type, "*", "*/*"): + return True + + msg = Message() + msg["content-type"] = content_type + media_type = msg.get_content_type() + + if media_type == pattern: + return True + + parts = media_type.split("/") + if len(parts) == 2: + if pattern in (f"{parts[0]}/*", f"*/{parts[1]}"): + return True + + return False + + +def match_status_codes(status_codes: List[str], status_code: int) -> bool: + if "default" in status_codes: + return True + + for code in status_codes: + if code == str(status_code): + return True + + if code.endswith("XX") and code.startswith(str(status_code)[:1]): + return True + return False + + +T = TypeVar("T") + + +def get_global_from_env( + value: Optional[T], env_key: str, type_cast: Callable[[str], T] +) -> Optional[T]: + if value is not None: + return value + env_value = os.getenv(env_key) + if env_value is not None: + try: + return type_cast(env_value) + except ValueError: + pass + return None + + +def match_response( + response: Response, code: Union[str, List[str]], content_type: str +) -> bool: + codes = code if isinstance(code, list) else [code] + return match_status_codes(codes, response.status_code) and match_content_type( + response.headers.get("content-type", "application/octet-stream"), content_type + ) + + +def _populate_from_globals( + param_name: str, value: Any, param_metadata_type: type, gbls: Any +) -> Tuple[Any, bool]: + if gbls is None: + return value, False + + if not isinstance(gbls, BaseModel): + raise TypeError("globals must be a pydantic model") + + global_fields: Dict[str, FieldInfo] = gbls.__class__.model_fields + found = False + for name in global_fields: + field = global_fields[name] + if name is not param_name: + continue + + found = True + + if value is not None: + return value, True + + global_value = getattr(gbls, name) + + param_metadata = find_field_metadata(field, param_metadata_type) + if param_metadata is None: + return value, True + + return global_value, True + + return value, found + + +def _val_to_string(val) -> str: + if isinstance(val, bool): + return str(val).lower() + if isinstance(val, datetime): + return str(val.isoformat().replace("+00:00", "Z")) + if isinstance(val, Enum): + return str(val.value) + + return str(val) + + +def _get_serialized_params( + metadata: ParamMetadata, field_name: str, obj: Any, typ: type +) -> Dict[str, str]: + params: Dict[str, str] = {} + + serialization = metadata.serialization + if serialization == "json": + params[field_name] = marshal_json(obj, typ) + + return params diff --git a/workflows_definition/src/openapi/workflows.py b/workflows_definition/src/openapi/workflows.py new file mode 100644 index 0000000000..1e54c08aba --- /dev/null +++ b/workflows_definition/src/openapi/workflows.py @@ -0,0 +1,1238 @@ +"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT.""" + +from .basesdk import BaseSDK +from openapi import models, utils +from openapi._hooks import HookContext +from openapi.types import BaseModel, OptionalNullable, UNSET +from typing import Any, List, Optional, Union, cast + +class Workflows(BaseSDK): + + + def create_definition( + self, *, + request: Union[models.WorkflowDefinition, models.WorkflowDefinitionTypedDict], + retries: OptionalNullable[utils.RetryConfig] = UNSET, + server_url: Optional[str] = None, + timeout_ms: Optional[int] = None, + ) -> Optional[models.WorkflowDefinition]: + r"""createDefinition + + Create a Workflow Definition. + + :param request: The request object to send. + :param retries: Override the default retry configuration for this method + :param server_url: Override the default server URL for this method + :param timeout_ms: Override the default request timeout configuration for this method in milliseconds + """ + base_url = None + url_variables = None + if timeout_ms is None: + timeout_ms = self.sdk_configuration.timeout_ms + + if server_url is not None: + base_url = server_url + + if not isinstance(request, BaseModel): + request = utils.unmarshal(request, models.WorkflowDefinition) + request = cast(models.WorkflowDefinition, request) + + req = self.build_request( + method="POST", + path="/v1/workflows/definitions", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=True, + request_has_path_params=False, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + security=self.sdk_configuration.security, + get_serialized_body=lambda: utils.serialize_request_body(request, False, False, "json", models.WorkflowDefinition), + timeout_ms=timeout_ms, + ) + + if retries == UNSET: + if self.sdk_configuration.retry_config is not UNSET: + retries = self.sdk_configuration.retry_config + + retry_config = None + if isinstance(retries, utils.RetryConfig): + retry_config = (retries, [ + "429", + "500", + "502", + "503", + "504" + ]) + + http_res = self.do_request( + hook_ctx=HookContext(operation_id="createDefinition", oauth2_scopes=[], security_source=self.sdk_configuration.security), + request=req, + error_status_codes=["400","401","4XX","500","5XX"], + retry_config=retry_config + ) + + data: Any = None + if utils.match_response(http_res, "200", "application/json"): + return utils.unmarshal_json(http_res.text, Optional[models.WorkflowDefinition]) + if utils.match_response(http_res, ["400","401","500"], "application/json"): + data = utils.unmarshal_json(http_res.text, models.ErrorRespData) + raise models.ErrorResp(data=data) + if utils.match_response(http_res, ["4XX","5XX"], "*"): + raise models.SDKError("API error occurred", http_res.status_code, http_res.text, http_res) + + content_type = http_res.headers.get("Content-Type") + raise models.SDKError(f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", http_res.status_code, http_res.text, http_res) + + + + async def create_definition_async( + self, *, + request: Union[models.WorkflowDefinition, models.WorkflowDefinitionTypedDict], + retries: OptionalNullable[utils.RetryConfig] = UNSET, + server_url: Optional[str] = None, + timeout_ms: Optional[int] = None, + ) -> Optional[models.WorkflowDefinition]: + r"""createDefinition + + Create a Workflow Definition. + + :param request: The request object to send. + :param retries: Override the default retry configuration for this method + :param server_url: Override the default server URL for this method + :param timeout_ms: Override the default request timeout configuration for this method in milliseconds + """ + base_url = None + url_variables = None + if timeout_ms is None: + timeout_ms = self.sdk_configuration.timeout_ms + + if server_url is not None: + base_url = server_url + + if not isinstance(request, BaseModel): + request = utils.unmarshal(request, models.WorkflowDefinition) + request = cast(models.WorkflowDefinition, request) + + req = self.build_request( + method="POST", + path="/v1/workflows/definitions", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=True, + request_has_path_params=False, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + security=self.sdk_configuration.security, + get_serialized_body=lambda: utils.serialize_request_body(request, False, False, "json", models.WorkflowDefinition), + timeout_ms=timeout_ms, + ) + + if retries == UNSET: + if self.sdk_configuration.retry_config is not UNSET: + retries = self.sdk_configuration.retry_config + + retry_config = None + if isinstance(retries, utils.RetryConfig): + retry_config = (retries, [ + "429", + "500", + "502", + "503", + "504" + ]) + + http_res = await self.do_request_async( + hook_ctx=HookContext(operation_id="createDefinition", oauth2_scopes=[], security_source=self.sdk_configuration.security), + request=req, + error_status_codes=["400","401","4XX","500","5XX"], + retry_config=retry_config + ) + + data: Any = None + if utils.match_response(http_res, "200", "application/json"): + return utils.unmarshal_json(http_res.text, Optional[models.WorkflowDefinition]) + if utils.match_response(http_res, ["400","401","500"], "application/json"): + data = utils.unmarshal_json(http_res.text, models.ErrorRespData) + raise models.ErrorResp(data=data) + if utils.match_response(http_res, ["4XX","5XX"], "*"): + raise models.SDKError("API error occurred", http_res.status_code, http_res.text, http_res) + + content_type = http_res.headers.get("Content-Type") + raise models.SDKError(f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", http_res.status_code, http_res.text, http_res) + + + + def delete_definition( + self, *, + definition_id: str, + retries: OptionalNullable[utils.RetryConfig] = UNSET, + server_url: Optional[str] = None, + timeout_ms: Optional[int] = None, + ): + r"""deleteDefinition + + Delete Workflow Definition. + + :param definition_id: Id of the definition to de deleted. + :param retries: Override the default retry configuration for this method + :param server_url: Override the default server URL for this method + :param timeout_ms: Override the default request timeout configuration for this method in milliseconds + """ + base_url = None + url_variables = None + if timeout_ms is None: + timeout_ms = self.sdk_configuration.timeout_ms + + if server_url is not None: + base_url = server_url + + request = models.DeleteDefinitionRequest( + definition_id=definition_id, + ) + + req = self.build_request( + method="DELETE", + path="/v1/workflows/definitions/{definitionId}", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=False, + request_has_path_params=True, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + security=self.sdk_configuration.security, + timeout_ms=timeout_ms, + ) + + if retries == UNSET: + if self.sdk_configuration.retry_config is not UNSET: + retries = self.sdk_configuration.retry_config + + retry_config = None + if isinstance(retries, utils.RetryConfig): + retry_config = (retries, [ + "429", + "500", + "502", + "503", + "504" + ]) + + http_res = self.do_request( + hook_ctx=HookContext(operation_id="deleteDefinition", oauth2_scopes=[], security_source=self.sdk_configuration.security), + request=req, + error_status_codes=["401","404","4XX","5XX"], + retry_config=retry_config + ) + + data: Any = None + if utils.match_response(http_res, "204", "*"): + return + if utils.match_response(http_res, "401", "application/json"): + data = utils.unmarshal_json(http_res.text, models.ErrorRespData) + raise models.ErrorResp(data=data) + if utils.match_response(http_res, ["404","4XX","5XX"], "*"): + raise models.SDKError("API error occurred", http_res.status_code, http_res.text, http_res) + + content_type = http_res.headers.get("Content-Type") + raise models.SDKError(f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", http_res.status_code, http_res.text, http_res) + + + + async def delete_definition_async( + self, *, + definition_id: str, + retries: OptionalNullable[utils.RetryConfig] = UNSET, + server_url: Optional[str] = None, + timeout_ms: Optional[int] = None, + ): + r"""deleteDefinition + + Delete Workflow Definition. + + :param definition_id: Id of the definition to de deleted. + :param retries: Override the default retry configuration for this method + :param server_url: Override the default server URL for this method + :param timeout_ms: Override the default request timeout configuration for this method in milliseconds + """ + base_url = None + url_variables = None + if timeout_ms is None: + timeout_ms = self.sdk_configuration.timeout_ms + + if server_url is not None: + base_url = server_url + + request = models.DeleteDefinitionRequest( + definition_id=definition_id, + ) + + req = self.build_request( + method="DELETE", + path="/v1/workflows/definitions/{definitionId}", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=False, + request_has_path_params=True, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + security=self.sdk_configuration.security, + timeout_ms=timeout_ms, + ) + + if retries == UNSET: + if self.sdk_configuration.retry_config is not UNSET: + retries = self.sdk_configuration.retry_config + + retry_config = None + if isinstance(retries, utils.RetryConfig): + retry_config = (retries, [ + "429", + "500", + "502", + "503", + "504" + ]) + + http_res = await self.do_request_async( + hook_ctx=HookContext(operation_id="deleteDefinition", oauth2_scopes=[], security_source=self.sdk_configuration.security), + request=req, + error_status_codes=["401","404","4XX","5XX"], + retry_config=retry_config + ) + + data: Any = None + if utils.match_response(http_res, "204", "*"): + return + if utils.match_response(http_res, "401", "application/json"): + data = utils.unmarshal_json(http_res.text, models.ErrorRespData) + raise models.ErrorResp(data=data) + if utils.match_response(http_res, ["404","4XX","5XX"], "*"): + raise models.SDKError("API error occurred", http_res.status_code, http_res.text, http_res) + + content_type = http_res.headers.get("Content-Type") + raise models.SDKError(f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", http_res.status_code, http_res.text, http_res) + + + + def get_definition( + self, *, + definition_id: str, + retries: OptionalNullable[utils.RetryConfig] = UNSET, + server_url: Optional[str] = None, + timeout_ms: Optional[int] = None, + ) -> Optional[models.WorkflowDefinition]: + r"""getDefinition + + Get specific Definition by id from the Organization. + + :param definition_id: Short uuid (length 8) to identify the Workflow Definition. + :param retries: Override the default retry configuration for this method + :param server_url: Override the default server URL for this method + :param timeout_ms: Override the default request timeout configuration for this method in milliseconds + """ + base_url = None + url_variables = None + if timeout_ms is None: + timeout_ms = self.sdk_configuration.timeout_ms + + if server_url is not None: + base_url = server_url + + request = models.GetDefinitionRequest( + definition_id=definition_id, + ) + + req = self.build_request( + method="GET", + path="/v1/workflows/definitions/{definitionId}", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=False, + request_has_path_params=True, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + security=self.sdk_configuration.security, + timeout_ms=timeout_ms, + ) + + if retries == UNSET: + if self.sdk_configuration.retry_config is not UNSET: + retries = self.sdk_configuration.retry_config + + retry_config = None + if isinstance(retries, utils.RetryConfig): + retry_config = (retries, [ + "429", + "500", + "502", + "503", + "504" + ]) + + http_res = self.do_request( + hook_ctx=HookContext(operation_id="getDefinition", oauth2_scopes=[], security_source=self.sdk_configuration.security), + request=req, + error_status_codes=["400","401","404","4XX","500","5XX"], + retry_config=retry_config + ) + + data: Any = None + if utils.match_response(http_res, "200", "application/json"): + return utils.unmarshal_json(http_res.text, Optional[models.WorkflowDefinition]) + if utils.match_response(http_res, ["400","401","500"], "application/json"): + data = utils.unmarshal_json(http_res.text, models.ErrorRespData) + raise models.ErrorResp(data=data) + if utils.match_response(http_res, "404", "application/json"): + data = utils.unmarshal_json(http_res.text, models.DefinitionNotFoundRespData) + raise models.DefinitionNotFoundResp(data=data) + if utils.match_response(http_res, ["4XX","5XX"], "*"): + raise models.SDKError("API error occurred", http_res.status_code, http_res.text, http_res) + + content_type = http_res.headers.get("Content-Type") + raise models.SDKError(f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", http_res.status_code, http_res.text, http_res) + + + + async def get_definition_async( + self, *, + definition_id: str, + retries: OptionalNullable[utils.RetryConfig] = UNSET, + server_url: Optional[str] = None, + timeout_ms: Optional[int] = None, + ) -> Optional[models.WorkflowDefinition]: + r"""getDefinition + + Get specific Definition by id from the Organization. + + :param definition_id: Short uuid (length 8) to identify the Workflow Definition. + :param retries: Override the default retry configuration for this method + :param server_url: Override the default server URL for this method + :param timeout_ms: Override the default request timeout configuration for this method in milliseconds + """ + base_url = None + url_variables = None + if timeout_ms is None: + timeout_ms = self.sdk_configuration.timeout_ms + + if server_url is not None: + base_url = server_url + + request = models.GetDefinitionRequest( + definition_id=definition_id, + ) + + req = self.build_request( + method="GET", + path="/v1/workflows/definitions/{definitionId}", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=False, + request_has_path_params=True, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + security=self.sdk_configuration.security, + timeout_ms=timeout_ms, + ) + + if retries == UNSET: + if self.sdk_configuration.retry_config is not UNSET: + retries = self.sdk_configuration.retry_config + + retry_config = None + if isinstance(retries, utils.RetryConfig): + retry_config = (retries, [ + "429", + "500", + "502", + "503", + "504" + ]) + + http_res = await self.do_request_async( + hook_ctx=HookContext(operation_id="getDefinition", oauth2_scopes=[], security_source=self.sdk_configuration.security), + request=req, + error_status_codes=["400","401","404","4XX","500","5XX"], + retry_config=retry_config + ) + + data: Any = None + if utils.match_response(http_res, "200", "application/json"): + return utils.unmarshal_json(http_res.text, Optional[models.WorkflowDefinition]) + if utils.match_response(http_res, ["400","401","500"], "application/json"): + data = utils.unmarshal_json(http_res.text, models.ErrorRespData) + raise models.ErrorResp(data=data) + if utils.match_response(http_res, "404", "application/json"): + data = utils.unmarshal_json(http_res.text, models.DefinitionNotFoundRespData) + raise models.DefinitionNotFoundResp(data=data) + if utils.match_response(http_res, ["4XX","5XX"], "*"): + raise models.SDKError("API error occurred", http_res.status_code, http_res.text, http_res) + + content_type = http_res.headers.get("Content-Type") + raise models.SDKError(f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", http_res.status_code, http_res.text, http_res) + + + + def get_definitions( + self, *, + retries: OptionalNullable[utils.RetryConfig] = UNSET, + server_url: Optional[str] = None, + timeout_ms: Optional[int] = None, + ) -> Optional[List[models.WorkflowDefinition]]: + r"""getDefinitions + + Retrieve all Workflow Definitions from an Organization + + :param retries: Override the default retry configuration for this method + :param server_url: Override the default server URL for this method + :param timeout_ms: Override the default request timeout configuration for this method in milliseconds + """ + base_url = None + url_variables = None + if timeout_ms is None: + timeout_ms = self.sdk_configuration.timeout_ms + + if server_url is not None: + base_url = server_url + req = self.build_request( + method="GET", + path="/v1/workflows/definitions", + base_url=base_url, + url_variables=url_variables, + request=None, + request_body_required=False, + request_has_path_params=False, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + security=self.sdk_configuration.security, + timeout_ms=timeout_ms, + ) + + if retries == UNSET: + if self.sdk_configuration.retry_config is not UNSET: + retries = self.sdk_configuration.retry_config + + retry_config = None + if isinstance(retries, utils.RetryConfig): + retry_config = (retries, [ + "429", + "500", + "502", + "503", + "504" + ]) + + http_res = self.do_request( + hook_ctx=HookContext(operation_id="getDefinitions", oauth2_scopes=[], security_source=self.sdk_configuration.security), + request=req, + error_status_codes=["4XX","500","5XX"], + retry_config=retry_config + ) + + data: Any = None + if utils.match_response(http_res, "200", "application/json"): + return utils.unmarshal_json(http_res.text, Optional[List[models.WorkflowDefinition]]) + if utils.match_response(http_res, "500", "application/json"): + data = utils.unmarshal_json(http_res.text, models.ErrorRespData) + raise models.ErrorResp(data=data) + if utils.match_response(http_res, ["4XX","5XX"], "*"): + raise models.SDKError("API error occurred", http_res.status_code, http_res.text, http_res) + + content_type = http_res.headers.get("Content-Type") + raise models.SDKError(f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", http_res.status_code, http_res.text, http_res) + + + + async def get_definitions_async( + self, *, + retries: OptionalNullable[utils.RetryConfig] = UNSET, + server_url: Optional[str] = None, + timeout_ms: Optional[int] = None, + ) -> Optional[List[models.WorkflowDefinition]]: + r"""getDefinitions + + Retrieve all Workflow Definitions from an Organization + + :param retries: Override the default retry configuration for this method + :param server_url: Override the default server URL for this method + :param timeout_ms: Override the default request timeout configuration for this method in milliseconds + """ + base_url = None + url_variables = None + if timeout_ms is None: + timeout_ms = self.sdk_configuration.timeout_ms + + if server_url is not None: + base_url = server_url + req = self.build_request( + method="GET", + path="/v1/workflows/definitions", + base_url=base_url, + url_variables=url_variables, + request=None, + request_body_required=False, + request_has_path_params=False, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + security=self.sdk_configuration.security, + timeout_ms=timeout_ms, + ) + + if retries == UNSET: + if self.sdk_configuration.retry_config is not UNSET: + retries = self.sdk_configuration.retry_config + + retry_config = None + if isinstance(retries, utils.RetryConfig): + retry_config = (retries, [ + "429", + "500", + "502", + "503", + "504" + ]) + + http_res = await self.do_request_async( + hook_ctx=HookContext(operation_id="getDefinitions", oauth2_scopes=[], security_source=self.sdk_configuration.security), + request=req, + error_status_codes=["4XX","500","5XX"], + retry_config=retry_config + ) + + data: Any = None + if utils.match_response(http_res, "200", "application/json"): + return utils.unmarshal_json(http_res.text, Optional[List[models.WorkflowDefinition]]) + if utils.match_response(http_res, "500", "application/json"): + data = utils.unmarshal_json(http_res.text, models.ErrorRespData) + raise models.ErrorResp(data=data) + if utils.match_response(http_res, ["4XX","5XX"], "*"): + raise models.SDKError("API error occurred", http_res.status_code, http_res.text, http_res) + + content_type = http_res.headers.get("Content-Type") + raise models.SDKError(f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", http_res.status_code, http_res.text, http_res) + + + + def get_max_allowed_limit( + self, *, + retries: OptionalNullable[utils.RetryConfig] = UNSET, + server_url: Optional[str] = None, + timeout_ms: Optional[int] = None, + ) -> Optional[models.MaxAllowedLimit]: + r"""getMaxAllowedLimit + + Get limits and number of created executions for an Organization. + + :param retries: Override the default retry configuration for this method + :param server_url: Override the default server URL for this method + :param timeout_ms: Override the default request timeout configuration for this method in milliseconds + """ + base_url = None + url_variables = None + if timeout_ms is None: + timeout_ms = self.sdk_configuration.timeout_ms + + if server_url is not None: + base_url = server_url + req = self.build_request( + method="GET", + path="/v1/workflows/limits/max-allowed", + base_url=base_url, + url_variables=url_variables, + request=None, + request_body_required=False, + request_has_path_params=False, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + security=self.sdk_configuration.security, + timeout_ms=timeout_ms, + ) + + if retries == UNSET: + if self.sdk_configuration.retry_config is not UNSET: + retries = self.sdk_configuration.retry_config + + retry_config = None + if isinstance(retries, utils.RetryConfig): + retry_config = (retries, [ + "429", + "500", + "502", + "503", + "504" + ]) + + http_res = self.do_request( + hook_ctx=HookContext(operation_id="getMaxAllowedLimit", oauth2_scopes=[], security_source=self.sdk_configuration.security), + request=req, + error_status_codes=["4XX","500","5XX"], + retry_config=retry_config + ) + + data: Any = None + if utils.match_response(http_res, "200", "application/json"): + return utils.unmarshal_json(http_res.text, Optional[models.MaxAllowedLimit]) + if utils.match_response(http_res, "500", "application/json"): + data = utils.unmarshal_json(http_res.text, models.ErrorRespData) + raise models.ErrorResp(data=data) + if utils.match_response(http_res, ["4XX","5XX"], "*"): + raise models.SDKError("API error occurred", http_res.status_code, http_res.text, http_res) + + content_type = http_res.headers.get("Content-Type") + raise models.SDKError(f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", http_res.status_code, http_res.text, http_res) + + + + async def get_max_allowed_limit_async( + self, *, + retries: OptionalNullable[utils.RetryConfig] = UNSET, + server_url: Optional[str] = None, + timeout_ms: Optional[int] = None, + ) -> Optional[models.MaxAllowedLimit]: + r"""getMaxAllowedLimit + + Get limits and number of created executions for an Organization. + + :param retries: Override the default retry configuration for this method + :param server_url: Override the default server URL for this method + :param timeout_ms: Override the default request timeout configuration for this method in milliseconds + """ + base_url = None + url_variables = None + if timeout_ms is None: + timeout_ms = self.sdk_configuration.timeout_ms + + if server_url is not None: + base_url = server_url + req = self.build_request( + method="GET", + path="/v1/workflows/limits/max-allowed", + base_url=base_url, + url_variables=url_variables, + request=None, + request_body_required=False, + request_has_path_params=False, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + security=self.sdk_configuration.security, + timeout_ms=timeout_ms, + ) + + if retries == UNSET: + if self.sdk_configuration.retry_config is not UNSET: + retries = self.sdk_configuration.retry_config + + retry_config = None + if isinstance(retries, utils.RetryConfig): + retry_config = (retries, [ + "429", + "500", + "502", + "503", + "504" + ]) + + http_res = await self.do_request_async( + hook_ctx=HookContext(operation_id="getMaxAllowedLimit", oauth2_scopes=[], security_source=self.sdk_configuration.security), + request=req, + error_status_codes=["4XX","500","5XX"], + retry_config=retry_config + ) + + data: Any = None + if utils.match_response(http_res, "200", "application/json"): + return utils.unmarshal_json(http_res.text, Optional[models.MaxAllowedLimit]) + if utils.match_response(http_res, "500", "application/json"): + data = utils.unmarshal_json(http_res.text, models.ErrorRespData) + raise models.ErrorResp(data=data) + if utils.match_response(http_res, ["4XX","5XX"], "*"): + raise models.SDKError("API error occurred", http_res.status_code, http_res.text, http_res) + + content_type = http_res.headers.get("Content-Type") + raise models.SDKError(f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", http_res.status_code, http_res.text, http_res) + + + + def get_workflow_closing_reasons( + self, *, + definition_id: str, + retries: OptionalNullable[utils.RetryConfig] = UNSET, + server_url: Optional[str] = None, + timeout_ms: Optional[int] = None, + ) -> Optional[models.ClosingReasonsIds]: + r"""getWorkflowClosingReasons + + Returns all closing reasons defined for the workflow. + + :param definition_id: ID of a workflow definition + :param retries: Override the default retry configuration for this method + :param server_url: Override the default server URL for this method + :param timeout_ms: Override the default request timeout configuration for this method in milliseconds + """ + base_url = None + url_variables = None + if timeout_ms is None: + timeout_ms = self.sdk_configuration.timeout_ms + + if server_url is not None: + base_url = server_url + + request = models.GetWorkflowClosingReasonsRequest( + definition_id=definition_id, + ) + + req = self.build_request( + method="GET", + path="/v1/workflows/definitions/{definitionId}/closing-reasons", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=False, + request_has_path_params=True, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + security=self.sdk_configuration.security, + timeout_ms=timeout_ms, + ) + + if retries == UNSET: + if self.sdk_configuration.retry_config is not UNSET: + retries = self.sdk_configuration.retry_config + + retry_config = None + if isinstance(retries, utils.RetryConfig): + retry_config = (retries, [ + "429", + "500", + "502", + "503", + "504" + ]) + + http_res = self.do_request( + hook_ctx=HookContext(operation_id="getWorkflowClosingReasons", oauth2_scopes=[], security_source=self.sdk_configuration.security), + request=req, + error_status_codes=["4XX","5XX"], + retry_config=retry_config + ) + + if utils.match_response(http_res, "200", "application/json"): + return utils.unmarshal_json(http_res.text, Optional[models.ClosingReasonsIds]) + if utils.match_response(http_res, ["4XX","5XX"], "*"): + raise models.SDKError("API error occurred", http_res.status_code, http_res.text, http_res) + + content_type = http_res.headers.get("Content-Type") + raise models.SDKError(f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", http_res.status_code, http_res.text, http_res) + + + + async def get_workflow_closing_reasons_async( + self, *, + definition_id: str, + retries: OptionalNullable[utils.RetryConfig] = UNSET, + server_url: Optional[str] = None, + timeout_ms: Optional[int] = None, + ) -> Optional[models.ClosingReasonsIds]: + r"""getWorkflowClosingReasons + + Returns all closing reasons defined for the workflow. + + :param definition_id: ID of a workflow definition + :param retries: Override the default retry configuration for this method + :param server_url: Override the default server URL for this method + :param timeout_ms: Override the default request timeout configuration for this method in milliseconds + """ + base_url = None + url_variables = None + if timeout_ms is None: + timeout_ms = self.sdk_configuration.timeout_ms + + if server_url is not None: + base_url = server_url + + request = models.GetWorkflowClosingReasonsRequest( + definition_id=definition_id, + ) + + req = self.build_request( + method="GET", + path="/v1/workflows/definitions/{definitionId}/closing-reasons", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=False, + request_has_path_params=True, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + security=self.sdk_configuration.security, + timeout_ms=timeout_ms, + ) + + if retries == UNSET: + if self.sdk_configuration.retry_config is not UNSET: + retries = self.sdk_configuration.retry_config + + retry_config = None + if isinstance(retries, utils.RetryConfig): + retry_config = (retries, [ + "429", + "500", + "502", + "503", + "504" + ]) + + http_res = await self.do_request_async( + hook_ctx=HookContext(operation_id="getWorkflowClosingReasons", oauth2_scopes=[], security_source=self.sdk_configuration.security), + request=req, + error_status_codes=["4XX","5XX"], + retry_config=retry_config + ) + + if utils.match_response(http_res, "200", "application/json"): + return utils.unmarshal_json(http_res.text, Optional[models.ClosingReasonsIds]) + if utils.match_response(http_res, ["4XX","5XX"], "*"): + raise models.SDKError("API error occurred", http_res.status_code, http_res.text, http_res) + + content_type = http_res.headers.get("Content-Type") + raise models.SDKError(f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", http_res.status_code, http_res.text, http_res) + + + + def set_workflow_closing_reasons( + self, *, + definition_id: str, + closing_reasons_ids: Union[models.ClosingReasonsIds, models.ClosingReasonsIdsTypedDict], + retries: OptionalNullable[utils.RetryConfig] = UNSET, + server_url: Optional[str] = None, + timeout_ms: Optional[int] = None, + ): + r"""setWorkflowClosingReasons + + Sets which closing reasons are defined for this workflow, based on the entire closing reasons catalog. + + :param definition_id: ID of a workflow definition + :param closing_reasons_ids: set all closing reasons for a specific definition + :param retries: Override the default retry configuration for this method + :param server_url: Override the default server URL for this method + :param timeout_ms: Override the default request timeout configuration for this method in milliseconds + """ + base_url = None + url_variables = None + if timeout_ms is None: + timeout_ms = self.sdk_configuration.timeout_ms + + if server_url is not None: + base_url = server_url + + request = models.SetWorkflowClosingReasonsRequest( + definition_id=definition_id, + closing_reasons_ids=utils.get_pydantic_model(closing_reasons_ids, models.ClosingReasonsIds), + ) + + req = self.build_request( + method="PATCH", + path="/v1/workflows/definitions/{definitionId}/closing-reasons", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=True, + request_has_path_params=True, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="*/*", + security=self.sdk_configuration.security, + get_serialized_body=lambda: utils.serialize_request_body(request.closing_reasons_ids, False, False, "json", models.ClosingReasonsIds), + timeout_ms=timeout_ms, + ) + + if retries == UNSET: + if self.sdk_configuration.retry_config is not UNSET: + retries = self.sdk_configuration.retry_config + + retry_config = None + if isinstance(retries, utils.RetryConfig): + retry_config = (retries, [ + "429", + "500", + "502", + "503", + "504" + ]) + + http_res = self.do_request( + hook_ctx=HookContext(operation_id="setWorkflowClosingReasons", oauth2_scopes=[], security_source=self.sdk_configuration.security), + request=req, + error_status_codes=["4XX","5XX"], + retry_config=retry_config + ) + + if utils.match_response(http_res, "201", "*"): + return + if utils.match_response(http_res, ["4XX","5XX"], "*"): + raise models.SDKError("API error occurred", http_res.status_code, http_res.text, http_res) + + content_type = http_res.headers.get("Content-Type") + raise models.SDKError(f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", http_res.status_code, http_res.text, http_res) + + + + async def set_workflow_closing_reasons_async( + self, *, + definition_id: str, + closing_reasons_ids: Union[models.ClosingReasonsIds, models.ClosingReasonsIdsTypedDict], + retries: OptionalNullable[utils.RetryConfig] = UNSET, + server_url: Optional[str] = None, + timeout_ms: Optional[int] = None, + ): + r"""setWorkflowClosingReasons + + Sets which closing reasons are defined for this workflow, based on the entire closing reasons catalog. + + :param definition_id: ID of a workflow definition + :param closing_reasons_ids: set all closing reasons for a specific definition + :param retries: Override the default retry configuration for this method + :param server_url: Override the default server URL for this method + :param timeout_ms: Override the default request timeout configuration for this method in milliseconds + """ + base_url = None + url_variables = None + if timeout_ms is None: + timeout_ms = self.sdk_configuration.timeout_ms + + if server_url is not None: + base_url = server_url + + request = models.SetWorkflowClosingReasonsRequest( + definition_id=definition_id, + closing_reasons_ids=utils.get_pydantic_model(closing_reasons_ids, models.ClosingReasonsIds), + ) + + req = self.build_request( + method="PATCH", + path="/v1/workflows/definitions/{definitionId}/closing-reasons", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=True, + request_has_path_params=True, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="*/*", + security=self.sdk_configuration.security, + get_serialized_body=lambda: utils.serialize_request_body(request.closing_reasons_ids, False, False, "json", models.ClosingReasonsIds), + timeout_ms=timeout_ms, + ) + + if retries == UNSET: + if self.sdk_configuration.retry_config is not UNSET: + retries = self.sdk_configuration.retry_config + + retry_config = None + if isinstance(retries, utils.RetryConfig): + retry_config = (retries, [ + "429", + "500", + "502", + "503", + "504" + ]) + + http_res = await self.do_request_async( + hook_ctx=HookContext(operation_id="setWorkflowClosingReasons", oauth2_scopes=[], security_source=self.sdk_configuration.security), + request=req, + error_status_codes=["4XX","5XX"], + retry_config=retry_config + ) + + if utils.match_response(http_res, "201", "*"): + return + if utils.match_response(http_res, ["4XX","5XX"], "*"): + raise models.SDKError("API error occurred", http_res.status_code, http_res.text, http_res) + + content_type = http_res.headers.get("Content-Type") + raise models.SDKError(f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", http_res.status_code, http_res.text, http_res) + + + + def update_definition( + self, *, + definition_id: str, + workflow_definition: Union[models.WorkflowDefinition, models.WorkflowDefinitionTypedDict], + retries: OptionalNullable[utils.RetryConfig] = UNSET, + server_url: Optional[str] = None, + timeout_ms: Optional[int] = None, + ) -> Optional[models.WorkflowDefinition]: + r"""updateDefinition + + Update Workflow Definition. + + :param definition_id: Short uuid (length 8) to identify the Workflow Definition. + :param workflow_definition: Workflow Definition payload + :param retries: Override the default retry configuration for this method + :param server_url: Override the default server URL for this method + :param timeout_ms: Override the default request timeout configuration for this method in milliseconds + """ + base_url = None + url_variables = None + if timeout_ms is None: + timeout_ms = self.sdk_configuration.timeout_ms + + if server_url is not None: + base_url = server_url + + request = models.UpdateDefinitionRequest( + definition_id=definition_id, + workflow_definition=utils.get_pydantic_model(workflow_definition, models.WorkflowDefinition), + ) + + req = self.build_request( + method="PUT", + path="/v1/workflows/definitions/{definitionId}", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=True, + request_has_path_params=True, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + security=self.sdk_configuration.security, + get_serialized_body=lambda: utils.serialize_request_body(request.workflow_definition, False, False, "json", models.WorkflowDefinition), + timeout_ms=timeout_ms, + ) + + if retries == UNSET: + if self.sdk_configuration.retry_config is not UNSET: + retries = self.sdk_configuration.retry_config + + retry_config = None + if isinstance(retries, utils.RetryConfig): + retry_config = (retries, [ + "429", + "500", + "502", + "503", + "504" + ]) + + http_res = self.do_request( + hook_ctx=HookContext(operation_id="updateDefinition", oauth2_scopes=[], security_source=self.sdk_configuration.security), + request=req, + error_status_codes=["400","401","4XX","500","5XX"], + retry_config=retry_config + ) + + data: Any = None + if utils.match_response(http_res, "200", "application/json"): + return utils.unmarshal_json(http_res.text, Optional[models.WorkflowDefinition]) + if utils.match_response(http_res, ["400","401","500"], "application/json"): + data = utils.unmarshal_json(http_res.text, models.ErrorRespData) + raise models.ErrorResp(data=data) + if utils.match_response(http_res, ["4XX","5XX"], "*"): + raise models.SDKError("API error occurred", http_res.status_code, http_res.text, http_res) + + content_type = http_res.headers.get("Content-Type") + raise models.SDKError(f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", http_res.status_code, http_res.text, http_res) + + + + async def update_definition_async( + self, *, + definition_id: str, + workflow_definition: Union[models.WorkflowDefinition, models.WorkflowDefinitionTypedDict], + retries: OptionalNullable[utils.RetryConfig] = UNSET, + server_url: Optional[str] = None, + timeout_ms: Optional[int] = None, + ) -> Optional[models.WorkflowDefinition]: + r"""updateDefinition + + Update Workflow Definition. + + :param definition_id: Short uuid (length 8) to identify the Workflow Definition. + :param workflow_definition: Workflow Definition payload + :param retries: Override the default retry configuration for this method + :param server_url: Override the default server URL for this method + :param timeout_ms: Override the default request timeout configuration for this method in milliseconds + """ + base_url = None + url_variables = None + if timeout_ms is None: + timeout_ms = self.sdk_configuration.timeout_ms + + if server_url is not None: + base_url = server_url + + request = models.UpdateDefinitionRequest( + definition_id=definition_id, + workflow_definition=utils.get_pydantic_model(workflow_definition, models.WorkflowDefinition), + ) + + req = self.build_request( + method="PUT", + path="/v1/workflows/definitions/{definitionId}", + base_url=base_url, + url_variables=url_variables, + request=request, + request_body_required=True, + request_has_path_params=True, + request_has_query_params=True, + user_agent_header="user-agent", + accept_header_value="application/json", + security=self.sdk_configuration.security, + get_serialized_body=lambda: utils.serialize_request_body(request.workflow_definition, False, False, "json", models.WorkflowDefinition), + timeout_ms=timeout_ms, + ) + + if retries == UNSET: + if self.sdk_configuration.retry_config is not UNSET: + retries = self.sdk_configuration.retry_config + + retry_config = None + if isinstance(retries, utils.RetryConfig): + retry_config = (retries, [ + "429", + "500", + "502", + "503", + "504" + ]) + + http_res = await self.do_request_async( + hook_ctx=HookContext(operation_id="updateDefinition", oauth2_scopes=[], security_source=self.sdk_configuration.security), + request=req, + error_status_codes=["400","401","4XX","500","5XX"], + retry_config=retry_config + ) + + data: Any = None + if utils.match_response(http_res, "200", "application/json"): + return utils.unmarshal_json(http_res.text, Optional[models.WorkflowDefinition]) + if utils.match_response(http_res, ["400","401","500"], "application/json"): + data = utils.unmarshal_json(http_res.text, models.ErrorRespData) + raise models.ErrorResp(data=data) + if utils.match_response(http_res, ["4XX","5XX"], "*"): + raise models.SDKError("API error occurred", http_res.status_code, http_res.text, http_res) + + content_type = http_res.headers.get("Content-Type") + raise models.SDKError(f"Unexpected response received (code: {http_res.status_code}, type: {content_type})", http_res.status_code, http_res.text, http_res) + + diff --git a/workflows_definition/src/sdk/__init__.py b/workflows_definition/src/sdk/__init__.py deleted file mode 100755 index b9e232018a..0000000000 --- a/workflows_definition/src/sdk/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" - -from .sdk import * diff --git a/workflows_definition/src/sdk/closing_reason.py b/workflows_definition/src/sdk/closing_reason.py deleted file mode 100755 index f36c81d19e..0000000000 --- a/workflows_definition/src/sdk/closing_reason.py +++ /dev/null @@ -1,106 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" - -import requests as requests_http -from . import utils -from sdk.models import operations, shared -from typing import Optional - -class ClosingReason: - _client: requests_http.Session - _security_client: requests_http.Session - _server_url: str - _language: str - _sdk_version: str - _gen_version: str - - def __init__(self, client: requests_http.Session, security_client: requests_http.Session, server_url: str, language: str, sdk_version: str, gen_version: str) -> None: - self._client = client - self._security_client = security_client - self._server_url = server_url - self._language = language - self._sdk_version = sdk_version - self._gen_version = gen_version - - def change_reason_status(self, request: operations.ChangeReasonStatusRequest) -> operations.ChangeReasonStatusResponse: - r"""changeReasonStatus - Change the status of a Closing Reason (eg. ACTIVE to INACTIVE). - """ - base_url = self._server_url - - url = utils.generate_url(operations.ChangeReasonStatusRequest, base_url, '/v1/workflows/closing-reasons/{reasonId}', request) - - headers = {} - req_content_type, data, form = utils.serialize_request_body(request, "change_reason_status_req", 'json') - if req_content_type not in ('multipart/form-data', 'multipart/mixed'): - headers['content-type'] = req_content_type - - client = self._security_client - - http_res = client.request('PATCH', url, data=data, files=form, headers=headers) - content_type = http_res.headers.get('Content-Type') - - res = operations.ChangeReasonStatusResponse(status_code=http_res.status_code, content_type=content_type, raw_response=http_res) - - if http_res.status_code == 202: - pass - elif http_res.status_code in [400, 500]: - if utils.match_content_type(content_type, 'application/json'): - out = utils.unmarshal_json(http_res.text, Optional[shared.ErrorResp]) - res.error_resp = out - - return res - - def create_closing_reason(self, request: shared.ClosingReason) -> operations.CreateClosingReasonResponse: - r"""createClosingReason - A created Closing Reason is stored for the organization and will be displayed in the library of reasons. - """ - base_url = self._server_url - - url = base_url.removesuffix('/') + '/v1/workflows/closing-reasons' - - headers = {} - req_content_type, data, form = utils.serialize_request_body(request, "request", 'json') - if req_content_type not in ('multipart/form-data', 'multipart/mixed'): - headers['content-type'] = req_content_type - if data is None and form is None: - raise Exception('request body is required') - - client = self._security_client - - http_res = client.request('POST', url, data=data, files=form, headers=headers) - content_type = http_res.headers.get('Content-Type') - - res = operations.CreateClosingReasonResponse(status_code=http_res.status_code, content_type=content_type, raw_response=http_res) - - if http_res.status_code == 201: - if utils.match_content_type(content_type, 'application/json'): - out = utils.unmarshal_json(http_res.text, Optional[shared.ClosingReason]) - res.closing_reason = out - - return res - - def get_all_closing_reasons(self, request: operations.GetAllClosingReasonsRequest) -> operations.GetAllClosingReasonsResponse: - r"""getAllClosingReasons - Get all Closing Reasons defined in the organization by default all Active. - """ - base_url = self._server_url - - url = base_url.removesuffix('/') + '/v1/workflows/closing-reasons' - - query_params = utils.get_query_params(operations.GetAllClosingReasonsRequest, request) - - client = self._security_client - - http_res = client.request('GET', url, params=query_params) - content_type = http_res.headers.get('Content-Type') - - res = operations.GetAllClosingReasonsResponse(status_code=http_res.status_code, content_type=content_type, raw_response=http_res) - - if http_res.status_code == 200: - if utils.match_content_type(content_type, 'application/json'): - out = utils.unmarshal_json(http_res.text, Optional[shared.ClosingReasons]) - res.closing_reasons = out - - return res - - \ No newline at end of file diff --git a/workflows_definition/src/sdk/models/__init__.py b/workflows_definition/src/sdk/models/__init__.py deleted file mode 100755 index 889f8adcf4..0000000000 --- a/workflows_definition/src/sdk/models/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" - diff --git a/workflows_definition/src/sdk/models/operations/__init__.py b/workflows_definition/src/sdk/models/operations/__init__.py deleted file mode 100755 index 4169a6968d..0000000000 --- a/workflows_definition/src/sdk/models/operations/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" - -from .changereasonstatus import * -from .createclosingreason import * -from .createdefinition import * -from .deletedefinition import * -from .getallclosingreasons import * -from .getdefinition import * -from .getdefinitions import * -from .getmaxallowedlimit import * -from .getworkflowclosingreasons import * -from .setworkflowclosingreasons import * -from .updatedefinition import * - -__all__ = ["ChangeReasonStatusRequest","ChangeReasonStatusResponse","CreateClosingReasonResponse","CreateDefinitionResponse","DeleteDefinitionRequest","DeleteDefinitionResponse","GetAllClosingReasonsRequest","GetAllClosingReasonsResponse","GetDefinitionRequest","GetDefinitionResponse","GetDefinitionsResponse","GetMaxAllowedLimitResponse","GetWorkflowClosingReasonsRequest","GetWorkflowClosingReasonsResponse","SetWorkflowClosingReasonsRequest","SetWorkflowClosingReasonsResponse","UpdateDefinitionRequest","UpdateDefinitionResponse"] diff --git a/workflows_definition/src/sdk/models/operations/changereasonstatus.py b/workflows_definition/src/sdk/models/operations/changereasonstatus.py deleted file mode 100755 index a6a1295100..0000000000 --- a/workflows_definition/src/sdk/models/operations/changereasonstatus.py +++ /dev/null @@ -1,27 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" - -from __future__ import annotations -import dataclasses -import requests as requests_http -from ..shared import changereasonstatusreq as shared_changereasonstatusreq -from ..shared import errorresp as shared_errorresp -from typing import Optional - - -@dataclasses.dataclass -class ChangeReasonStatusRequest: - - reason_id: str = dataclasses.field(metadata={'path_param': { 'field_name': 'reasonId', 'style': 'simple', 'explode': False }}) - change_reason_status_req: Optional[shared_changereasonstatusreq.ChangeReasonStatusReq] = dataclasses.field(default=None, metadata={'request': { 'media_type': 'application/json' }}) - r"""change the status of a closing reason""" - - -@dataclasses.dataclass -class ChangeReasonStatusResponse: - - content_type: str = dataclasses.field() - status_code: int = dataclasses.field() - error_resp: Optional[shared_errorresp.ErrorResp] = dataclasses.field(default=None) - r"""bad request""" - raw_response: Optional[requests_http.Response] = dataclasses.field(default=None) - \ No newline at end of file diff --git a/workflows_definition/src/sdk/models/operations/createclosingreason.py b/workflows_definition/src/sdk/models/operations/createclosingreason.py deleted file mode 100755 index c4e3980061..0000000000 --- a/workflows_definition/src/sdk/models/operations/createclosingreason.py +++ /dev/null @@ -1,18 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" - -from __future__ import annotations -import dataclasses -import requests as requests_http -from ..shared import closingreason as shared_closingreason -from typing import Optional - - -@dataclasses.dataclass -class CreateClosingReasonResponse: - - content_type: str = dataclasses.field() - status_code: int = dataclasses.field() - closing_reason: Optional[shared_closingreason.ClosingReason] = dataclasses.field(default=None) - r"""closing reason is stored successfully in the repository""" - raw_response: Optional[requests_http.Response] = dataclasses.field(default=None) - \ No newline at end of file diff --git a/workflows_definition/src/sdk/models/operations/createdefinition.py b/workflows_definition/src/sdk/models/operations/createdefinition.py deleted file mode 100755 index 684e8b7b36..0000000000 --- a/workflows_definition/src/sdk/models/operations/createdefinition.py +++ /dev/null @@ -1,21 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" - -from __future__ import annotations -import dataclasses -import requests as requests_http -from ..shared import errorresp as shared_errorresp -from ..shared import workflowdefinition as shared_workflowdefinition -from typing import Optional - - -@dataclasses.dataclass -class CreateDefinitionResponse: - - content_type: str = dataclasses.field() - status_code: int = dataclasses.field() - error_resp: Optional[shared_errorresp.ErrorResp] = dataclasses.field(default=None) - r"""Validation Errors""" - raw_response: Optional[requests_http.Response] = dataclasses.field(default=None) - workflow_definition: Optional[shared_workflowdefinition.WorkflowDefinition] = dataclasses.field(default=None) - r"""Success - if the definition is created successfully""" - \ No newline at end of file diff --git a/workflows_definition/src/sdk/models/operations/deletedefinition.py b/workflows_definition/src/sdk/models/operations/deletedefinition.py deleted file mode 100755 index 072a47c07c..0000000000 --- a/workflows_definition/src/sdk/models/operations/deletedefinition.py +++ /dev/null @@ -1,25 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" - -from __future__ import annotations -import dataclasses -import requests as requests_http -from ..shared import errorresp as shared_errorresp -from typing import Optional - - -@dataclasses.dataclass -class DeleteDefinitionRequest: - - definition_id: str = dataclasses.field(metadata={'path_param': { 'field_name': 'definitionId', 'style': 'simple', 'explode': False }}) - r"""Id of the definition to de deleted.""" - - -@dataclasses.dataclass -class DeleteDefinitionResponse: - - content_type: str = dataclasses.field() - status_code: int = dataclasses.field() - error_resp: Optional[shared_errorresp.ErrorResp] = dataclasses.field(default=None) - r"""Failed to authenticate""" - raw_response: Optional[requests_http.Response] = dataclasses.field(default=None) - \ No newline at end of file diff --git a/workflows_definition/src/sdk/models/operations/getallclosingreasons.py b/workflows_definition/src/sdk/models/operations/getallclosingreasons.py deleted file mode 100755 index eb38791a47..0000000000 --- a/workflows_definition/src/sdk/models/operations/getallclosingreasons.py +++ /dev/null @@ -1,25 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" - -from __future__ import annotations -import dataclasses -import requests as requests_http -from ..shared import closingreasons as shared_closingreasons -from typing import Optional - - -@dataclasses.dataclass -class GetAllClosingReasonsRequest: - - include_inactive: Optional[bool] = dataclasses.field(default=None, metadata={'query_param': { 'field_name': 'includeInactive', 'style': 'form', 'explode': True }}) - r"""Filter Closing Reasons by status like active inactiv""" - - -@dataclasses.dataclass -class GetAllClosingReasonsResponse: - - content_type: str = dataclasses.field() - status_code: int = dataclasses.field() - closing_reasons: Optional[shared_closingreasons.ClosingReasons] = dataclasses.field(default=None) - r"""Returns the entire catalog of closing reasons per organization""" - raw_response: Optional[requests_http.Response] = dataclasses.field(default=None) - \ No newline at end of file diff --git a/workflows_definition/src/sdk/models/operations/getdefinition.py b/workflows_definition/src/sdk/models/operations/getdefinition.py deleted file mode 100755 index 75129d9dff..0000000000 --- a/workflows_definition/src/sdk/models/operations/getdefinition.py +++ /dev/null @@ -1,30 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" - -from __future__ import annotations -import dataclasses -import requests as requests_http -from ..shared import errorresp as shared_errorresp -from ..shared import workflowdefinition as shared_workflowdefinition -from typing import Any, Optional - - -@dataclasses.dataclass -class GetDefinitionRequest: - - definition_id: str = dataclasses.field(metadata={'path_param': { 'field_name': 'definitionId', 'style': 'simple', 'explode': False }}) - r"""Short uuid (length 8) to identify the Workflow Definition.""" - - -@dataclasses.dataclass -class GetDefinitionResponse: - - content_type: str = dataclasses.field() - status_code: int = dataclasses.field() - definition_not_found_resp: Optional[Any] = dataclasses.field(default=None) - r"""Definition Not found""" - error_resp: Optional[shared_errorresp.ErrorResp] = dataclasses.field(default=None) - r"""Validation Errors""" - raw_response: Optional[requests_http.Response] = dataclasses.field(default=None) - workflow_definition: Optional[shared_workflowdefinition.WorkflowDefinition] = dataclasses.field(default=None) - r"""Returns the Workflow definition""" - \ No newline at end of file diff --git a/workflows_definition/src/sdk/models/operations/getdefinitions.py b/workflows_definition/src/sdk/models/operations/getdefinitions.py deleted file mode 100755 index a4c55c8901..0000000000 --- a/workflows_definition/src/sdk/models/operations/getdefinitions.py +++ /dev/null @@ -1,21 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" - -from __future__ import annotations -import dataclasses -import requests as requests_http -from ..shared import errorresp as shared_errorresp -from ..shared import workflowdefinition as shared_workflowdefinition -from typing import Optional - - -@dataclasses.dataclass -class GetDefinitionsResponse: - - content_type: str = dataclasses.field() - status_code: int = dataclasses.field() - error_resp: Optional[shared_errorresp.ErrorResp] = dataclasses.field(default=None) - r"""Other errors""" - raw_response: Optional[requests_http.Response] = dataclasses.field(default=None) - workflow_definitions: Optional[list[shared_workflowdefinition.WorkflowDefinition]] = dataclasses.field(default=None) - r"""Success - definitions loaded with success. Empty array if org has no definitions.""" - \ No newline at end of file diff --git a/workflows_definition/src/sdk/models/operations/getmaxallowedlimit.py b/workflows_definition/src/sdk/models/operations/getmaxallowedlimit.py deleted file mode 100755 index 0ecca43748..0000000000 --- a/workflows_definition/src/sdk/models/operations/getmaxallowedlimit.py +++ /dev/null @@ -1,21 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" - -from __future__ import annotations -import dataclasses -import requests as requests_http -from ..shared import errorresp as shared_errorresp -from ..shared import maxallowedlimit as shared_maxallowedlimit -from typing import Optional - - -@dataclasses.dataclass -class GetMaxAllowedLimitResponse: - - content_type: str = dataclasses.field() - status_code: int = dataclasses.field() - error_resp: Optional[shared_errorresp.ErrorResp] = dataclasses.field(default=None) - r"""Other errors""" - max_allowed_limit: Optional[shared_maxallowedlimit.MaxAllowedLimit] = dataclasses.field(default=None) - r"""A combo of current number of workflows, and the max allowed number of workflows.""" - raw_response: Optional[requests_http.Response] = dataclasses.field(default=None) - \ No newline at end of file diff --git a/workflows_definition/src/sdk/models/operations/getworkflowclosingreasons.py b/workflows_definition/src/sdk/models/operations/getworkflowclosingreasons.py deleted file mode 100755 index 7d3fdd9b8f..0000000000 --- a/workflows_definition/src/sdk/models/operations/getworkflowclosingreasons.py +++ /dev/null @@ -1,25 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" - -from __future__ import annotations -import dataclasses -import requests as requests_http -from ..shared import closingreasonsids as shared_closingreasonsids -from typing import Optional - - -@dataclasses.dataclass -class GetWorkflowClosingReasonsRequest: - - definition_id: str = dataclasses.field(metadata={'path_param': { 'field_name': 'definitionId', 'style': 'simple', 'explode': False }}) - r"""ID of a workflow definition""" - - -@dataclasses.dataclass -class GetWorkflowClosingReasonsResponse: - - content_type: str = dataclasses.field() - status_code: int = dataclasses.field() - closing_reasons_ids: Optional[shared_closingreasonsids.ClosingReasonsIds] = dataclasses.field(default=None) - r"""Returns the entire catalog of closing reasons for a specific workflow""" - raw_response: Optional[requests_http.Response] = dataclasses.field(default=None) - \ No newline at end of file diff --git a/workflows_definition/src/sdk/models/operations/setworkflowclosingreasons.py b/workflows_definition/src/sdk/models/operations/setworkflowclosingreasons.py deleted file mode 100755 index 73d4a13e33..0000000000 --- a/workflows_definition/src/sdk/models/operations/setworkflowclosingreasons.py +++ /dev/null @@ -1,25 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" - -from __future__ import annotations -import dataclasses -import requests as requests_http -from ..shared import closingreasonsids as shared_closingreasonsids -from typing import Optional - - -@dataclasses.dataclass -class SetWorkflowClosingReasonsRequest: - - closing_reasons_ids: shared_closingreasonsids.ClosingReasonsIds = dataclasses.field(metadata={'request': { 'media_type': 'application/json' }}) - r"""set all closing reasons for a specific definition""" - definition_id: str = dataclasses.field(metadata={'path_param': { 'field_name': 'definitionId', 'style': 'simple', 'explode': False }}) - r"""ID of a workflow definition""" - - -@dataclasses.dataclass -class SetWorkflowClosingReasonsResponse: - - content_type: str = dataclasses.field() - status_code: int = dataclasses.field() - raw_response: Optional[requests_http.Response] = dataclasses.field(default=None) - \ No newline at end of file diff --git a/workflows_definition/src/sdk/models/operations/updatedefinition.py b/workflows_definition/src/sdk/models/operations/updatedefinition.py deleted file mode 100755 index 793dbe7b9f..0000000000 --- a/workflows_definition/src/sdk/models/operations/updatedefinition.py +++ /dev/null @@ -1,30 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" - -from __future__ import annotations -import dataclasses -import requests as requests_http -from ..shared import errorresp as shared_errorresp -from ..shared import workflowdefinition as shared_workflowdefinition -from typing import Optional - - -@dataclasses.dataclass -class UpdateDefinitionRequest: - - definition_id: str = dataclasses.field(metadata={'path_param': { 'field_name': 'definitionId', 'style': 'simple', 'explode': False }}) - r"""Short uuid (length 8) to identify the Workflow Definition.""" - workflow_definition: shared_workflowdefinition.WorkflowDefinition = dataclasses.field(metadata={'request': { 'media_type': 'application/json' }}) - r"""Workflow Definition payload""" - - -@dataclasses.dataclass -class UpdateDefinitionResponse: - - content_type: str = dataclasses.field() - status_code: int = dataclasses.field() - error_resp: Optional[shared_errorresp.ErrorResp] = dataclasses.field(default=None) - r"""Validation Errors""" - raw_response: Optional[requests_http.Response] = dataclasses.field(default=None) - workflow_definition: Optional[shared_workflowdefinition.WorkflowDefinition] = dataclasses.field(default=None) - r"""Success - if the definition is updated successfully""" - \ No newline at end of file diff --git a/workflows_definition/src/sdk/models/shared/__init__.py b/workflows_definition/src/sdk/models/shared/__init__.py deleted file mode 100755 index e2d4de35cb..0000000000 --- a/workflows_definition/src/sdk/models/shared/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" - -from .changereasonstatusreq import * -from .closingreason import * -from .closingreasonid import * -from .closingreasons import * -from .closingreasonsids import * -from .closingreasonsstatus_enum import * -from .dynamicduedate import * -from .ecpdetails import * -from .errorresp import * -from .itemtype_enum import * -from .maxallowedlimit import * -from .section import * -from .security import * -from .step import * -from .steprequirement import * -from .steptype_enum import * -from .updateentityattributes import * -from .workflowdefinition import * - -__all__ = ["ChangeReasonStatusReq","ClosingReason","ClosingReasonID","ClosingReasons","ClosingReasonsIds","ClosingReasonsStatusEnum","DynamicDueDate","DynamicDueDateActionTypeConditionEnum","DynamicDueDateTimePeriodEnum","ECPDetails","ErrorResp","ItemTypeEnum","MaxAllowedLimit","Section","Security","Step","StepAutomationConfig","StepRequirement","StepRequirementConditionEnum","StepTypeEnum","UpdateEntityAttributes","UpdateEntityAttributesSourceEnum","UpdateEntityAttributesTarget","WorkflowDefinition"] diff --git a/workflows_definition/src/sdk/models/shared/changereasonstatusreq.py b/workflows_definition/src/sdk/models/shared/changereasonstatusreq.py deleted file mode 100755 index 778594aa8b..0000000000 --- a/workflows_definition/src/sdk/models/shared/changereasonstatusreq.py +++ /dev/null @@ -1,16 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" - -from __future__ import annotations -import dataclasses -from ..shared import closingreasonsstatus_enum as shared_closingreasonsstatus_enum -from dataclasses_json import Undefined, dataclass_json -from sdk import utils - - -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class ChangeReasonStatusReq: - r"""change the status of a closing reason""" - - status: shared_closingreasonsstatus_enum.ClosingReasonsStatusEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('status') }}) - \ No newline at end of file diff --git a/workflows_definition/src/sdk/models/shared/closingreason.py b/workflows_definition/src/sdk/models/shared/closingreason.py deleted file mode 100755 index 5de90d0153..0000000000 --- a/workflows_definition/src/sdk/models/shared/closingreason.py +++ /dev/null @@ -1,21 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" - -from __future__ import annotations -import dataclasses -from ..shared import closingreasonsstatus_enum as shared_closingreasonsstatus_enum -from dataclasses_json import Undefined, dataclass_json -from sdk import utils -from typing import Optional - - -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class ClosingReason: - r"""One Closing reason for a workflow""" - - status: shared_closingreasonsstatus_enum.ClosingReasonsStatusEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('status') }}) - title: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('title') }}) - creation_time: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('creationTime'), 'exclude': lambda f: f is None }}) - id: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('id'), 'exclude': lambda f: f is None }}) - last_update_time: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('lastUpdateTime'), 'exclude': lambda f: f is None }}) - \ No newline at end of file diff --git a/workflows_definition/src/sdk/models/shared/closingreasonid.py b/workflows_definition/src/sdk/models/shared/closingreasonid.py deleted file mode 100755 index 4be444c3f0..0000000000 --- a/workflows_definition/src/sdk/models/shared/closingreasonid.py +++ /dev/null @@ -1,14 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" - -from __future__ import annotations -import dataclasses -from dataclasses_json import Undefined, dataclass_json -from sdk import utils - - -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class ClosingReasonID: - - id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('id') }}) - \ No newline at end of file diff --git a/workflows_definition/src/sdk/models/shared/closingreasons.py b/workflows_definition/src/sdk/models/shared/closingreasons.py deleted file mode 100755 index f596d73e55..0000000000 --- a/workflows_definition/src/sdk/models/shared/closingreasons.py +++ /dev/null @@ -1,16 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" - -from __future__ import annotations -import dataclasses -from ..shared import closingreason as shared_closingreason -from dataclasses_json import Undefined, dataclass_json -from sdk import utils - - -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class ClosingReasons: - r"""Returns the entire catalog of closing reasons per organization""" - - reasons: list[shared_closingreason.ClosingReason] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('reasons') }}) - \ No newline at end of file diff --git a/workflows_definition/src/sdk/models/shared/closingreasonsids.py b/workflows_definition/src/sdk/models/shared/closingreasonsids.py deleted file mode 100755 index e09a205988..0000000000 --- a/workflows_definition/src/sdk/models/shared/closingreasonsids.py +++ /dev/null @@ -1,16 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" - -from __future__ import annotations -import dataclasses -from ..shared import closingreasonid as shared_closingreasonid -from dataclasses_json import Undefined, dataclass_json -from sdk import utils - - -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class ClosingReasonsIds: - r"""Returns the entire catalog of closing reasons for a specific workflow""" - - reasons: list[shared_closingreasonid.ClosingReasonID] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('reasons') }}) - \ No newline at end of file diff --git a/workflows_definition/src/sdk/models/shared/closingreasonsstatus_enum.py b/workflows_definition/src/sdk/models/shared/closingreasonsstatus_enum.py deleted file mode 100755 index c38dcb0224..0000000000 --- a/workflows_definition/src/sdk/models/shared/closingreasonsstatus_enum.py +++ /dev/null @@ -1,8 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" - -from __future__ import annotations -from enum import Enum - -class ClosingReasonsStatusEnum(str, Enum): - ACTIVE = "ACTIVE" - INACTIVE = "INACTIVE" diff --git a/workflows_definition/src/sdk/models/shared/dynamicduedate.py b/workflows_definition/src/sdk/models/shared/dynamicduedate.py deleted file mode 100755 index 9168c322cb..0000000000 --- a/workflows_definition/src/sdk/models/shared/dynamicduedate.py +++ /dev/null @@ -1,29 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" - -from __future__ import annotations -import dataclasses -from dataclasses_json import Undefined, dataclass_json -from enum import Enum -from sdk import utils -from typing import Optional - -class DynamicDueDateActionTypeConditionEnum(str, Enum): - WORKFLOW_STARTED = "WORKFLOW_STARTED" - STEP_CLOSED = "STEP_CLOSED" - -class DynamicDueDateTimePeriodEnum(str, Enum): - DAYS = "days" - WEEKS = "weeks" - MONTHS = "months" - - -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class DynamicDueDate: - r"""set a Duedate for a step then a specific""" - - action_type_condition: DynamicDueDateActionTypeConditionEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('actionTypeCondition') }}) - number_of_units: float = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('numberOfUnits') }}) - time_period: DynamicDueDateTimePeriodEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('timePeriod') }}) - step_id: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('stepId'), 'exclude': lambda f: f is None }}) - \ No newline at end of file diff --git a/workflows_definition/src/sdk/models/shared/ecpdetails.py b/workflows_definition/src/sdk/models/shared/ecpdetails.py deleted file mode 100755 index 9862030a02..0000000000 --- a/workflows_definition/src/sdk/models/shared/ecpdetails.py +++ /dev/null @@ -1,16 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" - -from __future__ import annotations -import dataclasses -from dataclasses_json import Undefined, dataclass_json -from sdk import utils -from typing import Optional - - -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class ECPDetails: - r"""Details regarding ECP for the workflow step""" - - label: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('label'), 'exclude': lambda f: f is None }}) - \ No newline at end of file diff --git a/workflows_definition/src/sdk/models/shared/errorresp.py b/workflows_definition/src/sdk/models/shared/errorresp.py deleted file mode 100755 index 50110f5306..0000000000 --- a/workflows_definition/src/sdk/models/shared/errorresp.py +++ /dev/null @@ -1,16 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" - -from __future__ import annotations -import dataclasses -from dataclasses_json import Undefined, dataclass_json -from sdk import utils -from typing import Optional - - -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class ErrorResp: - r"""bad request""" - - message: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('message'), 'exclude': lambda f: f is None }}) - \ No newline at end of file diff --git a/workflows_definition/src/sdk/models/shared/itemtype_enum.py b/workflows_definition/src/sdk/models/shared/itemtype_enum.py deleted file mode 100755 index 0449374b70..0000000000 --- a/workflows_definition/src/sdk/models/shared/itemtype_enum.py +++ /dev/null @@ -1,8 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" - -from __future__ import annotations -from enum import Enum - -class ItemTypeEnum(str, Enum): - STEP = "STEP" - SECTION = "SECTION" diff --git a/workflows_definition/src/sdk/models/shared/maxallowedlimit.py b/workflows_definition/src/sdk/models/shared/maxallowedlimit.py deleted file mode 100755 index 27a27b3f19..0000000000 --- a/workflows_definition/src/sdk/models/shared/maxallowedlimit.py +++ /dev/null @@ -1,17 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" - -from __future__ import annotations -import dataclasses -from dataclasses_json import Undefined, dataclass_json -from sdk import utils -from typing import Optional - - -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class MaxAllowedLimit: - r"""A combo of current number of workflows, and the max allowed number of workflows.""" - - current_no_of_workflows: Optional[float] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('currentNoOfWorkflows'), 'exclude': lambda f: f is None }}) - max_allowed: Optional[float] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('maxAllowed'), 'exclude': lambda f: f is None }}) - \ No newline at end of file diff --git a/workflows_definition/src/sdk/models/shared/section.py b/workflows_definition/src/sdk/models/shared/section.py deleted file mode 100755 index 7d3aaadede..0000000000 --- a/workflows_definition/src/sdk/models/shared/section.py +++ /dev/null @@ -1,22 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" - -from __future__ import annotations -import dataclasses -from ..shared import itemtype_enum as shared_itemtype_enum -from ..shared import step as shared_step -from dataclasses_json import Undefined, dataclass_json -from sdk import utils -from typing import Optional - - -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class Section: - r"""A group of Steps that define the progress of the Workflow""" - - name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('name') }}) - order: float = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('order') }}) - steps: list[shared_step.Step] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('steps') }}) - type: shared_itemtype_enum.ItemTypeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('type') }}) - id: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('id'), 'exclude': lambda f: f is None }}) - \ No newline at end of file diff --git a/workflows_definition/src/sdk/models/shared/security.py b/workflows_definition/src/sdk/models/shared/security.py deleted file mode 100755 index e41721b578..0000000000 --- a/workflows_definition/src/sdk/models/shared/security.py +++ /dev/null @@ -1,11 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" - -from __future__ import annotations -import dataclasses - - -@dataclasses.dataclass -class Security: - - bearer_auth: str = dataclasses.field(metadata={'security': { 'scheme': True, 'type': 'http', 'sub_type': 'bearer', 'field_name': 'Authorization' }}) - \ No newline at end of file diff --git a/workflows_definition/src/sdk/models/shared/step.py b/workflows_definition/src/sdk/models/shared/step.py deleted file mode 100755 index 721ab3632c..0000000000 --- a/workflows_definition/src/sdk/models/shared/step.py +++ /dev/null @@ -1,44 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" - -from __future__ import annotations -import dataclasses -from ..shared import dynamicduedate as shared_dynamicduedate -from ..shared import ecpdetails as shared_ecpdetails -from ..shared import itemtype_enum as shared_itemtype_enum -from ..shared import steprequirement as shared_steprequirement -from ..shared import steptype_enum as shared_steptype_enum -from dataclasses_json import Undefined, dataclass_json -from sdk import utils -from typing import Optional - - -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class StepAutomationConfig: - - flow_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('flowId') }}) - r"""Id of the configured automation to run""" - - -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class Step: - r"""Action that needs to be done in a Workflow""" - - name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('name') }}) - order: float = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('order') }}) - type: shared_itemtype_enum.ItemTypeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('type') }}) - assigned_to: Optional[list[str]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('assignedTo'), 'exclude': lambda f: f is None }}) - automation_config: Optional[StepAutomationConfig] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('automationConfig'), 'exclude': lambda f: f is None }}) - due_date: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('dueDate'), 'exclude': lambda f: f is None }}) - dynamic_due_date: Optional[shared_dynamicduedate.DynamicDueDate] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('dynamicDueDate'), 'exclude': lambda f: f is None }}) - r"""set a Duedate for a step then a specific""" - ecp: Optional[shared_ecpdetails.ECPDetails] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('ecp'), 'exclude': lambda f: f is None }}) - r"""Details regarding ECP for the workflow step""" - execution_type: Optional[shared_steptype_enum.StepTypeEnum] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('executionType'), 'exclude': lambda f: f is None }}) - id: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('id'), 'exclude': lambda f: f is None }}) - requirements: Optional[list[shared_steprequirement.StepRequirement]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('requirements'), 'exclude': lambda f: f is None }}) - r"""requirements that need to be fulfilled in order to enable the step execution""" - user_ids: Optional[list[float]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('userIds'), 'exclude': lambda f: f is None }}) - r"""This field is deprecated. Please use assignedTo""" - \ No newline at end of file diff --git a/workflows_definition/src/sdk/models/shared/steprequirement.py b/workflows_definition/src/sdk/models/shared/steprequirement.py deleted file mode 100755 index da66f00759..0000000000 --- a/workflows_definition/src/sdk/models/shared/steprequirement.py +++ /dev/null @@ -1,22 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" - -from __future__ import annotations -import dataclasses -from ..shared import itemtype_enum as shared_itemtype_enum -from dataclasses_json import Undefined, dataclass_json -from enum import Enum -from sdk import utils - -class StepRequirementConditionEnum(str, Enum): - CLOSED = "CLOSED" - - -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class StepRequirement: - r"""describe the requirement for step enablement""" - - condition: StepRequirementConditionEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('condition') }}) - definition_id: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('definitionId') }}) - type: shared_itemtype_enum.ItemTypeEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('type') }}) - \ No newline at end of file diff --git a/workflows_definition/src/sdk/models/shared/updateentityattributes.py b/workflows_definition/src/sdk/models/shared/updateentityattributes.py deleted file mode 100755 index 8092aca87c..0000000000 --- a/workflows_definition/src/sdk/models/shared/updateentityattributes.py +++ /dev/null @@ -1,29 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" - -from __future__ import annotations -import dataclasses -from dataclasses_json import Undefined, dataclass_json -from enum import Enum -from sdk import utils - -class UpdateEntityAttributesSourceEnum(str, Enum): - WORKFLOW_STATUS = "workflow_status" - CURRENT_SECTION = "current_section" - CURRENT_STEP = "current_step" - - -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class UpdateEntityAttributesTarget: - - entity_attribute: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('entityAttribute') }}) - entity_schema: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('entitySchema') }}) - - -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class UpdateEntityAttributes: - - source: UpdateEntityAttributesSourceEnum = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('source') }}) - target: UpdateEntityAttributesTarget = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('target') }}) - \ No newline at end of file diff --git a/workflows_definition/src/sdk/models/shared/workflowdefinition.py b/workflows_definition/src/sdk/models/shared/workflowdefinition.py deleted file mode 100755 index 1ca247d15e..0000000000 --- a/workflows_definition/src/sdk/models/shared/workflowdefinition.py +++ /dev/null @@ -1,36 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" - -from __future__ import annotations -import dataclasses -from ..shared import closingreasonid as shared_closingreasonid -from ..shared import dynamicduedate as shared_dynamicduedate -from ..shared import updateentityattributes as shared_updateentityattributes -from dataclasses_json import Undefined, dataclass_json -from sdk import utils -from typing import Any, Optional - - -@dataclass_json(undefined=Undefined.EXCLUDE) -@dataclasses.dataclass -class WorkflowDefinition: - r"""Workflow Definition payload""" - - flow: list[Any] = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('flow') }}) - name: str = dataclasses.field(metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('name') }}) - assigned_to: Optional[list[str]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('assignedTo'), 'exclude': lambda f: f is None }}) - closing_reasons: Optional[list[shared_closingreasonid.ClosingReasonID]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('closingReasons'), 'exclude': lambda f: f is None }}) - creation_time: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('creationTime'), 'exclude': lambda f: f is None }}) - r"""ISO String Date & Time""" - description: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('description'), 'exclude': lambda f: f is None }}) - due_date: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('dueDate'), 'exclude': lambda f: f is None }}) - dynamic_due_date: Optional[shared_dynamicduedate.DynamicDueDate] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('dynamicDueDate'), 'exclude': lambda f: f is None }}) - r"""set a Duedate for a step then a specific""" - enable_ecp_workflow: Optional[bool] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('enableECPWorkflow'), 'exclude': lambda f: f is None }}) - r"""Indicates whether this workflow is available for End Customer Portal or not. By default it's not.""" - id: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('id'), 'exclude': lambda f: f is None }}) - last_update_time: Optional[str] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('lastUpdateTime'), 'exclude': lambda f: f is None }}) - r"""ISO String Date & Time""" - update_entity_attributes: Optional[list[shared_updateentityattributes.UpdateEntityAttributes]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('updateEntityAttributes'), 'exclude': lambda f: f is None }}) - user_ids: Optional[list[float]] = dataclasses.field(default=None, metadata={'dataclasses_json': { 'letter_case': utils.get_field_name('userIds'), 'exclude': lambda f: f is None }}) - r"""This field is deprecated. Please use assignedTo""" - \ No newline at end of file diff --git a/workflows_definition/src/sdk/sdk.py b/workflows_definition/src/sdk/sdk.py deleted file mode 100755 index 4ea77f02e0..0000000000 --- a/workflows_definition/src/sdk/sdk.py +++ /dev/null @@ -1,81 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" - -import requests as requests_http -from . import utils -from .closing_reason import ClosingReason -from .workflows import Workflows -from sdk.models import shared - -SERVERS = [ - "https://workflows-definition.sls.epilot.io", -] -"""Contains the list of servers available to the SDK""" - -class SDK: - r"""Service for Workflow Definitions for different processes inside of an Organization - - """ - closing_reason: ClosingReason - workflows: Workflows - - _client: requests_http.Session - _security_client: requests_http.Session - _server_url: str = SERVERS[0] - _language: str = "python" - _sdk_version: str = "1.2.2" - _gen_version: str = "2.16.5" - - def __init__(self, - security: shared.Security = None, - server_url: str = None, - url_params: dict[str, str] = None, - client: requests_http.Session = None - ) -> None: - """Instantiates the SDK configuring it with the provided parameters. - - :param security: The security details required for authentication - :type security: shared.Security - :param server_url: The server URL to use for all operations - :type server_url: str - :param url_params: Parameters to optionally template the server URL with - :type url_params: dict[str, str] - :param client: The requests.Session HTTP client to use for all operations - :type client: requests_http.Session - """ - self._client = requests_http.Session() - - - if server_url is not None: - if url_params is not None: - self._server_url = utils.template_url(server_url, url_params) - else: - self._server_url = server_url - - if client is not None: - self._client = client - - self._security_client = utils.configure_security_client(self._client, security) - - - self._init_sdks() - - def _init_sdks(self): - self.closing_reason = ClosingReason( - self._client, - self._security_client, - self._server_url, - self._language, - self._sdk_version, - self._gen_version - ) - - self.workflows = Workflows( - self._client, - self._security_client, - self._server_url, - self._language, - self._sdk_version, - self._gen_version - ) - - \ No newline at end of file diff --git a/workflows_definition/src/sdk/utils/__init__.py b/workflows_definition/src/sdk/utils/__init__.py deleted file mode 100755 index 94b739857f..0000000000 --- a/workflows_definition/src/sdk/utils/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" - -from .retries import * -from .utils import * diff --git a/workflows_definition/src/sdk/utils/retries.py b/workflows_definition/src/sdk/utils/retries.py deleted file mode 100755 index c6251d948e..0000000000 --- a/workflows_definition/src/sdk/utils/retries.py +++ /dev/null @@ -1,118 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" - -import random -import time - -import requests - - -class BackoffStrategy: - initial_interval: int - max_interval: int - exponent: float - max_elapsed_time: int - - def __init__(self, initial_interval: int, max_interval: int, exponent: float, max_elapsed_time: int): - self.initial_interval = initial_interval - self.max_interval = max_interval - self.exponent = exponent - self.max_elapsed_time = max_elapsed_time - - -class RetryConfig: - strategy: str - backoff: BackoffStrategy - retry_connection_errors: bool - - def __init__(self, strategy: str, retry_connection_errors: bool): - self.strategy = strategy - self.retry_connection_errors = retry_connection_errors - - -class Retries: - config: RetryConfig - status_codes: list[str] - - def __init__(self, config: RetryConfig, status_codes: list[str]): - self.config = config - self.status_codes = status_codes - - -class TemporaryError(Exception): - response: requests.Response - - def __init__(self, response: requests.Response): - self.response = response - - -class PermanentError(Exception): - inner: Exception - - def __init__(self, inner: Exception): - self.inner = inner - - -def retry(func, retries: Retries): - if retries.config.strategy == 'backoff': - def do_request(): - res: requests.Response - try: - res = func() - - for code in retries.status_codes: - if "X" in code.upper(): - code_range = int(code[0]) - - status_major = res.status_code / 100 - - if status_major >= code_range and status_major < code_range + 1: - raise TemporaryError(res) - else: - parsed_code = int(code) - - if res.status_code == parsed_code: - raise TemporaryError(res) - except requests.exceptions.ConnectionError as exception: - if not retries.config.config.retry_connection_errors: - raise - - raise PermanentError(exception) from exception - except requests.exceptions.Timeout as exception: - if not retries.config.config.retry_connection_errors: - raise - - raise PermanentError(exception) from exception - except TemporaryError: - raise - except Exception as exception: - raise PermanentError(exception) from exception - - return res - - return retry_with_backoff(do_request, retries.config.backoff.initial_interval, retries.config.backoff.max_interval, retries.config.backoff.exponent, retries.config.backoff.max_elapsed_time) - - return func() - - -def retry_with_backoff(func, initial_interval=500, max_interval=60000, exponent=1.5, max_elapsed_time=3600000): - start = round(time.time()*1000) - retries = 0 - - while True: - try: - return func() - except PermanentError as exception: - raise exception.inner - except Exception as exception: # pylint: disable=broad-exception-caught - now = round(time.time()*1000) - if now - start > max_elapsed_time: - if isinstance(exception, TemporaryError): - return exception.response - - raise - sleep = ((initial_interval/1000) * - exponent**retries + random.uniform(0, 1)) - if sleep > max_interval/1000: - sleep = max_interval/1000 - time.sleep(sleep) - retries += 1 diff --git a/workflows_definition/src/sdk/utils/utils.py b/workflows_definition/src/sdk/utils/utils.py deleted file mode 100755 index 9d4fba3248..0000000000 --- a/workflows_definition/src/sdk/utils/utils.py +++ /dev/null @@ -1,735 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" - -import base64 -import json -import re -from dataclasses import Field, dataclass, fields, is_dataclass, make_dataclass -from datetime import date, datetime -from email.message import Message -from enum import Enum -from typing import Any, Callable, Optional, Tuple, Union, get_args, get_origin -from xmlrpc.client import boolean - -import dateutil.parser -import requests -from dataclasses_json import DataClassJsonMixin - - -class SecurityClient: - client: requests.Session - query_params: dict[str, str] = {} - - def __init__(self, client: requests.Session): - self.client = client - - def request(self, method, url, **kwargs): - params = kwargs.get('params', {}) - kwargs["params"] = self.query_params | params - - return self.client.request(method, url, **kwargs) - - -def configure_security_client(client: requests.Session, security: dataclass): - client = SecurityClient(client) - - if security is None: - return client - - sec_fields: Tuple[Field, ...] = fields(security) - for sec_field in sec_fields: - value = getattr(security, sec_field.name) - if value is None: - continue - - metadata = sec_field.metadata.get('security') - if metadata is None: - continue - if metadata.get('option'): - _parse_security_option(client, value) - return client - if metadata.get('scheme'): - # Special case for basic auth which could be a flattened struct - if metadata.get("sub_type") == "basic" and not is_dataclass(value): - _parse_security_scheme(client, metadata, security) - else: - _parse_security_scheme(client, metadata, value) - - return client - - -def _parse_security_option(client: SecurityClient, option: dataclass): - opt_fields: Tuple[Field, ...] = fields(option) - for opt_field in opt_fields: - metadata = opt_field.metadata.get('security') - if metadata is None or metadata.get('scheme') is None: - continue - _parse_security_scheme( - client, metadata, getattr(option, opt_field.name)) - - -def _parse_security_scheme(client: SecurityClient, scheme_metadata: dict, scheme: any): - scheme_type = scheme_metadata.get('type') - sub_type = scheme_metadata.get('sub_type') - - if is_dataclass(scheme): - if scheme_type == 'http' and sub_type == 'basic': - _parse_basic_auth_scheme(client, scheme) - return - - scheme_fields: Tuple[Field, ...] = fields(scheme) - for scheme_field in scheme_fields: - metadata = scheme_field.metadata.get('security') - if metadata is None or metadata.get('field_name') is None: - continue - - value = getattr(scheme, scheme_field.name) - - _parse_security_scheme_value( - client, scheme_metadata, metadata, value) - else: - _parse_security_scheme_value( - client, scheme_metadata, scheme_metadata, scheme) - - -def _parse_security_scheme_value(client: SecurityClient, scheme_metadata: dict, security_metadata: dict, value: any): - scheme_type = scheme_metadata.get('type') - sub_type = scheme_metadata.get('sub_type') - - header_name = security_metadata.get('field_name') - - if scheme_type == "apiKey": - if sub_type == 'header': - client.client.headers[header_name] = value - elif sub_type == 'query': - client.query_params[header_name] = value - elif sub_type == 'cookie': - client.client.cookies[header_name] = value - else: - raise Exception('not supported') - elif scheme_type == "openIdConnect": - client.client.headers[header_name] = value - elif scheme_type == 'oauth2': - client.client.headers[header_name] = value - elif scheme_type == 'http': - if sub_type == 'bearer': - client.client.headers[header_name] = value - else: - raise Exception('not supported') - else: - raise Exception('not supported') - - -def _parse_basic_auth_scheme(client: SecurityClient, scheme: dataclass): - username = "" - password = "" - - scheme_fields: Tuple[Field, ...] = fields(scheme) - for scheme_field in scheme_fields: - metadata = scheme_field.metadata.get('security') - if metadata is None or metadata.get('field_name') is None: - continue - - field_name = metadata.get('field_name') - value = getattr(scheme, scheme_field.name) - - if field_name == 'username': - username = value - if field_name == 'password': - password = value - - data = f'{username}:{password}'.encode() - client.client.headers['Authorization'] = f'Basic {base64.b64encode(data).decode()}' - - -def generate_url(clazz: type, server_url: str, path: str, path_params: dataclass, gbls: dict[str, dict[str, dict[str, Any]]] = None) -> str: - path_param_fields: Tuple[Field, ...] = fields(clazz) - for field in path_param_fields: - request_metadata = field.metadata.get('request') - if request_metadata is not None: - continue - - param_metadata = field.metadata.get('path_param') - if param_metadata is None: - continue - - if param_metadata.get('style', 'simple') == 'simple': - param = getattr( - path_params, field.name) if path_params is not None else None - param = _populate_from_globals( - field.name, param, 'pathParam', gbls) - - if param is None: - continue - - if isinstance(param, list): - pp_vals: list[str] = [] - for pp_val in param: - if pp_val is None: - continue - pp_vals.append(_val_to_string(pp_val)) - path = path.replace( - '{' + param_metadata.get('field_name', field.name) + '}', ",".join(pp_vals), 1) - elif isinstance(param, dict): - pp_vals: list[str] = [] - for pp_key in param: - if param[pp_key] is None: - continue - if param_metadata.get('explode'): - pp_vals.append( - f"{pp_key}={_val_to_string(param[pp_key])}") - else: - pp_vals.append( - f"{pp_key},{_val_to_string(param[pp_key])}") - path = path.replace( - '{' + param_metadata.get('field_name', field.name) + '}', ",".join(pp_vals), 1) - elif not isinstance(param, (str, int, float, complex, bool)): - pp_vals: list[str] = [] - param_fields: Tuple[Field, ...] = fields(param) - for param_field in param_fields: - param_value_metadata = param_field.metadata.get( - 'path_param') - if not param_value_metadata: - continue - - parm_name = param_value_metadata.get( - 'field_name', field.name) - - param_field_val = getattr(param, param_field.name) - if param_field_val is None: - continue - if param_metadata.get('explode'): - pp_vals.append( - f"{parm_name}={_val_to_string(param_field_val)}") - else: - pp_vals.append( - f"{parm_name},{_val_to_string(param_field_val)}") - path = path.replace( - '{' + param_metadata.get('field_name', field.name) + '}', ",".join(pp_vals), 1) - else: - path = path.replace( - '{' + param_metadata.get('field_name', field.name) + '}', _val_to_string(param), 1) - - return server_url.removesuffix("/") + path - - -def is_optional(field): - return get_origin(field) is Union and type(None) in get_args(field) - - -def template_url(url_with_params: str, params: dict[str, str]) -> str: - for key, value in params.items(): - url_with_params = url_with_params.replace( - '{' + key + '}', value) - - return url_with_params - - -def get_query_params(clazz: type, query_params: dataclass, gbls: dict[str, dict[str, dict[str, Any]]] = None) -> dict[str, list[str]]: - params: dict[str, list[str]] = {} - - param_fields: Tuple[Field, ...] = fields(clazz) - for field in param_fields: - request_metadata = field.metadata.get('request') - if request_metadata is not None: - continue - - metadata = field.metadata.get('query_param') - if not metadata: - continue - - param_name = field.name - value = getattr( - query_params, param_name) if query_params is not None else None - - value = _populate_from_globals(param_name, value, 'queryParam', gbls) - - f_name = metadata.get("field_name") - serialization = metadata.get('serialization', '') - if serialization != '': - params = params | _get_serialized_query_params( - metadata, f_name, value) - else: - style = metadata.get('style', 'form') - if style == 'deepObject': - params = params | _get_deep_object_query_params( - metadata, f_name, value) - elif style == 'form': - params = params | _get_form_query_params( - metadata, f_name, value) - else: - raise Exception('not yet implemented') - return params - - -def get_headers(headers_params: dataclass) -> dict[str, str]: - if headers_params is None: - return {} - - headers: dict[str, str] = {} - - param_fields: Tuple[Field, ...] = fields(headers_params) - for field in param_fields: - metadata = field.metadata.get('header') - if not metadata: - continue - - value = _serialize_header(metadata.get( - 'explode', False), getattr(headers_params, field.name)) - - if value != '': - headers[metadata.get('field_name', field.name)] = value - - return headers - - -def _get_serialized_query_params(metadata: dict, field_name: str, obj: any) -> dict[str, list[str]]: - params: dict[str, list[str]] = {} - - serialization = metadata.get('serialization', '') - if serialization == 'json': - params[metadata.get("field_name", field_name)] = marshal_json(obj) - - return params - - -def _get_deep_object_query_params(metadata: dict, field_name: str, obj: any) -> dict[str, list[str]]: - params: dict[str, list[str]] = {} - - if obj is None: - return params - - if is_dataclass(obj): - obj_fields: Tuple[Field, ...] = fields(obj) - for obj_field in obj_fields: - obj_param_metadata = obj_field.metadata.get('query_param') - if not obj_param_metadata: - continue - - obj_val = getattr(obj, obj_field.name) - if obj_val is None: - continue - - if isinstance(obj_val, list): - for val in obj_val: - if val is None: - continue - - if params.get(f'{metadata.get("field_name", field_name)}[{obj_param_metadata.get("field_name", obj_field.name)}]') is None: - params[f'{metadata.get("field_name", field_name)}[{obj_param_metadata.get("field_name", obj_field.name)}]'] = [ - ] - - params[ - f'{metadata.get("field_name", field_name)}[{obj_param_metadata.get("field_name", obj_field.name)}]'].append(_val_to_string(val)) - else: - params[ - f'{metadata.get("field_name", field_name)}[{obj_param_metadata.get("field_name", obj_field.name)}]'] = [ - _val_to_string(obj_val)] - elif isinstance(obj, dict): - for key, value in obj.items(): - if value is None: - continue - - if isinstance(value, list): - for val in value: - if val is None: - continue - - if params.get(f'{metadata.get("field_name", field_name)}[{key}]') is None: - params[f'{metadata.get("field_name", field_name)}[{key}]'] = [ - ] - - params[ - f'{metadata.get("field_name", field_name)}[{key}]'].append(_val_to_string(val)) - else: - params[f'{metadata.get("field_name", field_name)}[{key}]'] = [ - _val_to_string(value)] - return params - - -def _get_query_param_field_name(obj_field: Field) -> str: - obj_param_metadata = obj_field.metadata.get('query_param') - - if not obj_param_metadata: - return "" - - return obj_param_metadata.get("field_name", obj_field.name) - - -def _get_form_query_params(metadata: dict, field_name: str, obj: any) -> dict[str, list[str]]: - return _populate_form(field_name, metadata.get("explode", True), obj, _get_query_param_field_name) - - -SERIALIZATION_METHOD_TO_CONTENT_TYPE = { - 'json': 'application/json', - 'form': 'application/x-www-form-urlencoded', - 'multipart': 'multipart/form-data', - 'raw': 'application/octet-stream', - 'string': 'text/plain', -} - - -def serialize_request_body(request: dataclass, request_field_name: str, serialization_method: str) -> Tuple[str, any, any]: - if request is None: - return None, None, None, None - - if not is_dataclass(request) or not hasattr(request, request_field_name): - return serialize_content_type(request_field_name, SERIALIZATION_METHOD_TO_CONTENT_TYPE[serialization_method], request) - - request_val = getattr(request, request_field_name) - - request_fields: Tuple[Field, ...] = fields(request) - request_metadata = None - - for field in request_fields: - if field.name == request_field_name: - request_metadata = field.metadata.get('request') - break - - if request_metadata is None: - raise Exception('invalid request type') - - return serialize_content_type(request_field_name, request_metadata.get('media_type', 'application/octet-stream'), request_val) - - -def serialize_content_type(field_name: str, media_type: str, request: dataclass) -> Tuple[str, any, list[list[any]]]: - if re.match(r'(application|text)\/.*?\+*json.*', media_type) is not None: - return media_type, marshal_json(request), None - if re.match(r'multipart\/.*', media_type) is not None: - return serialize_multipart_form(media_type, request) - if re.match(r'application\/x-www-form-urlencoded.*', media_type) is not None: - return media_type, serialize_form_data(field_name, request), None - if isinstance(request, (bytes, bytearray)): - return media_type, request, None - if isinstance(request, str): - return media_type, request, None - - raise Exception( - f"invalid request body type {type(request)} for mediaType {media_type}") - - -def serialize_multipart_form(media_type: str, request: dataclass) -> Tuple[str, any, list[list[any]]]: - form: list[list[any]] = [] - request_fields = fields(request) - - for field in request_fields: - val = getattr(request, field.name) - if val is None: - continue - - field_metadata = field.metadata.get('multipart_form') - if not field_metadata: - continue - - if field_metadata.get("file") is True: - file_fields = fields(val) - - file_name = "" - field_name = "" - content = bytes() - - for file_field in file_fields: - file_metadata = file_field.metadata.get('multipart_form') - if file_metadata is None: - continue - - if file_metadata.get("content") is True: - content = getattr(val, file_field.name) - else: - field_name = file_metadata.get( - "field_name", file_field.name) - file_name = getattr(val, file_field.name) - if field_name == "" or file_name == "" or content == bytes(): - raise Exception('invalid multipart/form-data file') - - form.append([field_name, [file_name, content]]) - elif field_metadata.get("json") is True: - to_append = [field_metadata.get("field_name", field.name), [ - None, marshal_json(val), "application/json"]] - form.append(to_append) - else: - field_name = field_metadata.get( - "field_name", field.name) - if isinstance(val, list): - for value in val: - if value is None: - continue - form.append( - [field_name + "[]", [None, _val_to_string(value)]]) - else: - form.append([field_name, [None, _val_to_string(val)]]) - return media_type, None, form - - -def serialize_dict(original: dict, explode: bool, field_name, existing: Optional[dict[str, list[str]]]) -> dict[ - str, list[str]]: - if existing is None: - existing = [] - - if explode is True: - for key, val in original.items(): - if key not in existing: - existing[key] = [] - existing[key].append(val) - else: - temp = [] - for key, val in original.items(): - temp.append(str(key)) - temp.append(str(val)) - if field_name not in existing: - existing[field_name] = [] - existing[field_name].append(",".join(temp)) - return existing - - -def serialize_form_data(field_name: str, data: dataclass) -> dict[str, any]: - form: dict[str, list[str]] = {} - - if is_dataclass(data): - for field in fields(data): - val = getattr(data, field.name) - if val is None: - continue - - metadata = field.metadata.get('form') - if metadata is None: - continue - - field_name = metadata.get('field_name', field.name) - - if metadata.get('json'): - form[field_name] = [marshal_json(val)] - else: - if metadata.get('style', 'form') == 'form': - form = form | _populate_form( - field_name, metadata.get('explode', True), val, _get_form_field_name) - else: - raise Exception( - f'Invalid form style for field {field.name}') - elif isinstance(data, dict): - for key, value in data.items(): - form[key] = [_val_to_string(value)] - else: - raise Exception(f'Invalid request body type for field {field_name}') - - return form - - -def _get_form_field_name(obj_field: Field) -> str: - obj_param_metadata = obj_field.metadata.get('form') - - if not obj_param_metadata: - return "" - - return obj_param_metadata.get("field_name", obj_field.name) - - -def _populate_form(field_name: str, explode: boolean, obj: any, get_field_name_func: Callable) -> dict[str, list[str]]: - params: dict[str, list[str]] = {} - - if obj is None: - return params - - if is_dataclass(obj): - items = [] - - obj_fields: Tuple[Field, ...] = fields(obj) - for obj_field in obj_fields: - obj_field_name = get_field_name_func(obj_field) - if obj_field_name == '': - continue - - val = getattr(obj, obj_field.name) - if val is None: - continue - - if explode: - params[obj_field_name] = [_val_to_string(val)] - else: - items.append( - f'{obj_field_name},{_val_to_string(val)}') - - if len(items) > 0: - params[field_name] = [','.join(items)] - elif isinstance(obj, dict): - items = [] - for key, value in obj.items(): - if value is None: - continue - - if explode: - params[key] = _val_to_string(value) - else: - items.append(f'{key},{_val_to_string(value)}') - - if len(items) > 0: - params[field_name] = [','.join(items)] - elif isinstance(obj, list): - items = [] - - for value in obj: - if value is None: - continue - - if explode: - if not field_name in params: - params[field_name] = [] - params[field_name].append(_val_to_string(value)) - else: - items.append(_val_to_string(value)) - - if len(items) > 0: - params[field_name] = [','.join([str(item) for item in items])] - else: - params[field_name] = [_val_to_string(obj)] - - return params - - -def _serialize_header(explode: bool, obj: any) -> str: - if obj is None: - return '' - - if is_dataclass(obj): - items = [] - obj_fields: Tuple[Field, ...] = fields(obj) - for obj_field in obj_fields: - obj_param_metadata = obj_field.metadata.get('header') - - if not obj_param_metadata: - continue - - obj_field_name = obj_param_metadata.get( - 'field_name', obj_field.name) - if obj_field_name == '': - continue - - val = getattr(obj, obj_field.name) - if val is None: - continue - - if explode: - items.append( - f'{obj_field_name}={_val_to_string(val)}') - else: - items.append(obj_field_name) - items.append(_val_to_string(val)) - - if len(items) > 0: - return ','.join(items) - elif isinstance(obj, dict): - items = [] - - for key, value in obj.items(): - if value is None: - continue - - if explode: - items.append(f'{key}={_val_to_string(value)}') - else: - items.append(key) - items.append(_val_to_string(value)) - - if len(items) > 0: - return ','.join([str(item) for item in items]) - elif isinstance(obj, list): - items = [] - - for value in obj: - if value is None: - continue - - items.append(_val_to_string(value)) - - if len(items) > 0: - return ','.join(items) - else: - return f'{_val_to_string(obj)}' - - return '' - - -def unmarshal_json(data, typ): - unmarhsal = make_dataclass('Unmarhsal', [('res', typ)], - bases=(DataClassJsonMixin,)) - json_dict = json.loads(data) - out = unmarhsal.from_dict({"res": json_dict}) - return out.res - - -def marshal_json(val): - marshal = make_dataclass('Marshal', [('res', type(val))], - bases=(DataClassJsonMixin,)) - marshaller = marshal(res=val) - json_dict = marshaller.to_dict() - return json.dumps(json_dict["res"]) - - -def match_content_type(content_type: str, pattern: str) -> boolean: - if pattern in (content_type, "*", "*/*"): - return True - - msg = Message() - msg['content-type'] = content_type - media_type = msg.get_content_type() - - if media_type == pattern: - return True - - parts = media_type.split("/") - if len(parts) == 2: - if pattern in (f'{parts[0]}/*', f'*/{parts[1]}'): - return True - - return False - - -def datetimeisoformat(optional: bool): - def isoformatoptional(val): - if optional and val is None: - return None - return _val_to_string(val) - - return isoformatoptional - - -def dateisoformat(optional: bool): - def isoformatoptional(val): - if optional and val is None: - return None - return date.isoformat(val) - - return isoformatoptional - - -def datefromisoformat(date_str: str): - return dateutil.parser.parse(date_str).date() - - -def get_field_name(name): - def override(_, _field_name=name): - return _field_name - - return override - - -def _val_to_string(val): - if isinstance(val, bool): - return str(val).lower() - if isinstance(val, datetime): - return val.isoformat().replace('+00:00', 'Z') - if isinstance(val, Enum): - return val.value - - return str(val) - - -def _populate_from_globals(param_name: str, value: any, param_type: str, gbls: dict[str, dict[str, dict[str, Any]]]): - if value is None and gbls is not None: - if 'parameters' in gbls: - if param_type in gbls['parameters']: - if param_name in gbls['parameters'][param_type]: - global_value = gbls['parameters'][param_type][param_name] - if global_value is not None: - value = global_value - - return value diff --git a/workflows_definition/src/sdk/workflows.py b/workflows_definition/src/sdk/workflows.py deleted file mode 100755 index b442b7f15d..0000000000 --- a/workflows_definition/src/sdk/workflows.py +++ /dev/null @@ -1,250 +0,0 @@ -"""Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT.""" - -import requests as requests_http -from . import utils -from sdk.models import operations, shared -from typing import Any, Optional - -class Workflows: - _client: requests_http.Session - _security_client: requests_http.Session - _server_url: str - _language: str - _sdk_version: str - _gen_version: str - - def __init__(self, client: requests_http.Session, security_client: requests_http.Session, server_url: str, language: str, sdk_version: str, gen_version: str) -> None: - self._client = client - self._security_client = security_client - self._server_url = server_url - self._language = language - self._sdk_version = sdk_version - self._gen_version = gen_version - - def create_definition(self, request: shared.WorkflowDefinition) -> operations.CreateDefinitionResponse: - r"""createDefinition - Create a Workflow Definition. - """ - base_url = self._server_url - - url = base_url.removesuffix('/') + '/v1/workflows/definitions' - - headers = {} - req_content_type, data, form = utils.serialize_request_body(request, "request", 'json') - if req_content_type not in ('multipart/form-data', 'multipart/mixed'): - headers['content-type'] = req_content_type - if data is None and form is None: - raise Exception('request body is required') - - client = self._security_client - - http_res = client.request('POST', url, data=data, files=form, headers=headers) - content_type = http_res.headers.get('Content-Type') - - res = operations.CreateDefinitionResponse(status_code=http_res.status_code, content_type=content_type, raw_response=http_res) - - if http_res.status_code == 200: - if utils.match_content_type(content_type, 'application/json'): - out = utils.unmarshal_json(http_res.text, Optional[shared.WorkflowDefinition]) - res.workflow_definition = out - elif http_res.status_code in [400, 401, 500]: - if utils.match_content_type(content_type, 'application/json'): - out = utils.unmarshal_json(http_res.text, Optional[shared.ErrorResp]) - res.error_resp = out - - return res - - def delete_definition(self, request: operations.DeleteDefinitionRequest) -> operations.DeleteDefinitionResponse: - r"""deleteDefinition - Delete Workflow Definition. - """ - base_url = self._server_url - - url = utils.generate_url(operations.DeleteDefinitionRequest, base_url, '/v1/workflows/definitions/{definitionId}', request) - - - client = self._security_client - - http_res = client.request('DELETE', url) - content_type = http_res.headers.get('Content-Type') - - res = operations.DeleteDefinitionResponse(status_code=http_res.status_code, content_type=content_type, raw_response=http_res) - - if http_res.status_code in [204, 404]: - pass - elif http_res.status_code == 401: - if utils.match_content_type(content_type, 'application/json'): - out = utils.unmarshal_json(http_res.text, Optional[shared.ErrorResp]) - res.error_resp = out - - return res - - def get_definition(self, request: operations.GetDefinitionRequest) -> operations.GetDefinitionResponse: - r"""getDefinition - Get specific Definition by id from the Organization. - """ - base_url = self._server_url - - url = utils.generate_url(operations.GetDefinitionRequest, base_url, '/v1/workflows/definitions/{definitionId}', request) - - - client = self._security_client - - http_res = client.request('GET', url) - content_type = http_res.headers.get('Content-Type') - - res = operations.GetDefinitionResponse(status_code=http_res.status_code, content_type=content_type, raw_response=http_res) - - if http_res.status_code == 200: - if utils.match_content_type(content_type, 'application/json'): - out = utils.unmarshal_json(http_res.text, Optional[shared.WorkflowDefinition]) - res.workflow_definition = out - elif http_res.status_code in [400, 401, 500]: - if utils.match_content_type(content_type, 'application/json'): - out = utils.unmarshal_json(http_res.text, Optional[shared.ErrorResp]) - res.error_resp = out - elif http_res.status_code == 404: - if utils.match_content_type(content_type, 'application/json'): - out = utils.unmarshal_json(http_res.text, Optional[Any]) - res.definition_not_found_resp = out - - return res - - def get_definitions(self) -> operations.GetDefinitionsResponse: - r"""getDefinitions - Retrieve all Workflow Definitions from an Organization - """ - base_url = self._server_url - - url = base_url.removesuffix('/') + '/v1/workflows/definitions' - - - client = self._security_client - - http_res = client.request('GET', url) - content_type = http_res.headers.get('Content-Type') - - res = operations.GetDefinitionsResponse(status_code=http_res.status_code, content_type=content_type, raw_response=http_res) - - if http_res.status_code == 200: - if utils.match_content_type(content_type, 'application/json'): - out = utils.unmarshal_json(http_res.text, Optional[list[shared.WorkflowDefinition]]) - res.workflow_definitions = out - elif http_res.status_code == 500: - if utils.match_content_type(content_type, 'application/json'): - out = utils.unmarshal_json(http_res.text, Optional[shared.ErrorResp]) - res.error_resp = out - - return res - - def get_max_allowed_limit(self) -> operations.GetMaxAllowedLimitResponse: - r"""getMaxAllowedLimit - Get limits and number of created executions for an Organization. - """ - base_url = self._server_url - - url = base_url.removesuffix('/') + '/v1/workflows/limits/max-allowed' - - - client = self._security_client - - http_res = client.request('GET', url) - content_type = http_res.headers.get('Content-Type') - - res = operations.GetMaxAllowedLimitResponse(status_code=http_res.status_code, content_type=content_type, raw_response=http_res) - - if http_res.status_code == 200: - if utils.match_content_type(content_type, 'application/json'): - out = utils.unmarshal_json(http_res.text, Optional[shared.MaxAllowedLimit]) - res.max_allowed_limit = out - elif http_res.status_code == 500: - if utils.match_content_type(content_type, 'application/json'): - out = utils.unmarshal_json(http_res.text, Optional[shared.ErrorResp]) - res.error_resp = out - - return res - - def get_workflow_closing_reasons(self, request: operations.GetWorkflowClosingReasonsRequest) -> operations.GetWorkflowClosingReasonsResponse: - r"""getWorkflowClosingReasons - Returns all closing reasons defined for the workflow. - """ - base_url = self._server_url - - url = utils.generate_url(operations.GetWorkflowClosingReasonsRequest, base_url, '/v1/workflows/definitions/{definitionId}/closing-reasons', request) - - - client = self._security_client - - http_res = client.request('GET', url) - content_type = http_res.headers.get('Content-Type') - - res = operations.GetWorkflowClosingReasonsResponse(status_code=http_res.status_code, content_type=content_type, raw_response=http_res) - - if http_res.status_code == 200: - if utils.match_content_type(content_type, 'application/json'): - out = utils.unmarshal_json(http_res.text, Optional[shared.ClosingReasonsIds]) - res.closing_reasons_ids = out - - return res - - def set_workflow_closing_reasons(self, request: operations.SetWorkflowClosingReasonsRequest) -> operations.SetWorkflowClosingReasonsResponse: - r"""setWorkflowClosingReasons - Sets which closing reasons are defined for this workflow, based on the entire closing reasons catalog. - """ - base_url = self._server_url - - url = utils.generate_url(operations.SetWorkflowClosingReasonsRequest, base_url, '/v1/workflows/definitions/{definitionId}/closing-reasons', request) - - headers = {} - req_content_type, data, form = utils.serialize_request_body(request, "closing_reasons_ids", 'json') - if req_content_type not in ('multipart/form-data', 'multipart/mixed'): - headers['content-type'] = req_content_type - if data is None and form is None: - raise Exception('request body is required') - - client = self._security_client - - http_res = client.request('PATCH', url, data=data, files=form, headers=headers) - content_type = http_res.headers.get('Content-Type') - - res = operations.SetWorkflowClosingReasonsResponse(status_code=http_res.status_code, content_type=content_type, raw_response=http_res) - - if http_res.status_code == 201: - pass - - return res - - def update_definition(self, request: operations.UpdateDefinitionRequest) -> operations.UpdateDefinitionResponse: - r"""updateDefinition - Update Workflow Definition. - """ - base_url = self._server_url - - url = utils.generate_url(operations.UpdateDefinitionRequest, base_url, '/v1/workflows/definitions/{definitionId}', request) - - headers = {} - req_content_type, data, form = utils.serialize_request_body(request, "workflow_definition", 'json') - if req_content_type not in ('multipart/form-data', 'multipart/mixed'): - headers['content-type'] = req_content_type - if data is None and form is None: - raise Exception('request body is required') - - client = self._security_client - - http_res = client.request('PUT', url, data=data, files=form, headers=headers) - content_type = http_res.headers.get('Content-Type') - - res = operations.UpdateDefinitionResponse(status_code=http_res.status_code, content_type=content_type, raw_response=http_res) - - if http_res.status_code == 200: - if utils.match_content_type(content_type, 'application/json'): - out = utils.unmarshal_json(http_res.text, Optional[shared.WorkflowDefinition]) - res.workflow_definition = out - elif http_res.status_code in [400, 401, 500]: - if utils.match_content_type(content_type, 'application/json'): - out = utils.unmarshal_json(http_res.text, Optional[shared.ErrorResp]) - res.error_resp = out - - return res - - \ No newline at end of file