From 71f80362af806e682f8dbaf0f7ed0dca671ab06f Mon Sep 17 00:00:00 2001 From: "ci.datadog-api-spec" Date: Wed, 6 Nov 2024 10:10:12 +0000 Subject: [PATCH] Regenerate client from commit de2d6d77 of spec repo --- .apigentools-info | 8 +- .generator/schemas/v2/openapi.yaml | 506 ++++++++++++++++++ ...al-job-returns-Bad-Request-response.frozen | 1 + ...rical-job-returns-Bad-Request-response.yml | 20 + ...ical-job-returns-Not-Found-response.frozen | 1 + ...torical-job-returns-Not-Found-response.yml | 21 + ...-historical-job-returns-OK-response.frozen | 1 + ...l-a-historical-job-returns-OK-response.yml | 43 ++ ...signal-returns-Bad-Request-response.frozen | 1 + ...-a-signal-returns-Bad-Request-response.yml | 26 + ...ng-job-returns-Bad-Request-response.frozen | 1 + ...sting-job-returns-Bad-Request-response.yml | 21 + ...ting-job-returns-Not-Found-response.frozen | 1 + ...xisting-job-returns-Not-Found-response.yml | 21 + ...etails-returns-Bad-Request-response.frozen | 1 + ...s-details-returns-Bad-Request-response.yml | 20 + ...-details-returns-Not-Found-response.frozen | 1 + ...b-s-details-returns-Not-Found-response.yml | 21 + ...a-job-s-details-returns-OK-response.frozen | 1 + ...et-a-job-s-details-returns-OK-response.yml | 51 ++ ...historical-jobs-returns-OK-response.frozen | 1 + ...st-historical-jobs-returns-OK-response.yml | 210 ++++++++ ...al-job-returns-Bad-Request-response.frozen | 1 + ...rical-job-returns-Bad-Request-response.yml | 70 +++ ...ical-job-returns-Not-Found-response.frozen | 1 + ...torical-job-returns-Not-Found-response.yml | 24 + ...job-returns-Status-created-response.frozen | 1 + ...al-job-returns-Status-created-response.yml | 69 +++ .../CancelHistoricalJob.rb | 8 + .../ConvertJobResultToSignal.rb | 21 + .../DeleteHistoricalJob.rb | 5 + .../security-monitoring/GetHistoricalJob.rb | 8 + .../security-monitoring/ListHistoricalJobs.rb | 5 + .../security-monitoring/RunHistoricalJob.rb | 44 ++ features/scenarios_model_mapping.rb | 21 + features/v2/given.json | 12 + features/v2/security_monitoring.feature | 136 +++++ features/v2/undo.json | 36 ++ lib/datadog_api_client/inflector.rb | 18 + .../v2/api/security_monitoring_api.rb | 397 ++++++++++++++ .../v2/models/calculated_field.rb | 144 +++++ ...nvert_job_results_to_signals_attributes.rb | 200 +++++++ .../convert_job_results_to_signals_data.rb | 115 ++++ ...onvert_job_results_to_signals_data_type.rb | 26 + .../convert_job_results_to_signals_request.rb | 105 ++++ .../v2/models/historical_job_data_type.rb | 26 + .../v2/models/historical_job_list_meta.rb | 123 +++++ .../v2/models/historical_job_response.rb | 125 +++++ .../historical_job_response_attributes.rb | 175 ++++++ .../v2/models/job_create_response.rb | 105 ++++ .../v2/models/job_create_response_data.rb | 115 ++++ .../v2/models/job_definition.rb | 357 ++++++++++++ .../v2/models/job_definition_from_rule.rb | 223 ++++++++ .../models/list_historical_jobs_response.rb | 117 ++++ .../v2/models/run_historical_job_request.rb | 105 ++++ .../run_historical_job_request_attributes.rb | 125 +++++ .../models/run_historical_job_request_data.rb | 115 ++++ .../run_historical_job_request_data_type.rb | 26 + 58 files changed, 4178 insertions(+), 4 deletions(-) create mode 100644 cassettes/features/v2/security_monitoring/Cancel-a-historical-job-returns-Bad-Request-response.frozen create mode 100644 cassettes/features/v2/security_monitoring/Cancel-a-historical-job-returns-Bad-Request-response.yml create mode 100644 cassettes/features/v2/security_monitoring/Cancel-a-historical-job-returns-Not-Found-response.frozen create mode 100644 cassettes/features/v2/security_monitoring/Cancel-a-historical-job-returns-Not-Found-response.yml create mode 100644 cassettes/features/v2/security_monitoring/Cancel-a-historical-job-returns-OK-response.frozen create mode 100644 cassettes/features/v2/security_monitoring/Cancel-a-historical-job-returns-OK-response.yml create mode 100644 cassettes/features/v2/security_monitoring/Convert-a-job-result-to-a-signal-returns-Bad-Request-response.frozen create mode 100644 cassettes/features/v2/security_monitoring/Convert-a-job-result-to-a-signal-returns-Bad-Request-response.yml create mode 100644 cassettes/features/v2/security_monitoring/Delete-an-existing-job-returns-Bad-Request-response.frozen create mode 100644 cassettes/features/v2/security_monitoring/Delete-an-existing-job-returns-Bad-Request-response.yml create mode 100644 cassettes/features/v2/security_monitoring/Delete-an-existing-job-returns-Not-Found-response.frozen create mode 100644 cassettes/features/v2/security_monitoring/Delete-an-existing-job-returns-Not-Found-response.yml create mode 100644 cassettes/features/v2/security_monitoring/Get-a-job-s-details-returns-Bad-Request-response.frozen create mode 100644 cassettes/features/v2/security_monitoring/Get-a-job-s-details-returns-Bad-Request-response.yml create mode 100644 cassettes/features/v2/security_monitoring/Get-a-job-s-details-returns-Not-Found-response.frozen create mode 100644 cassettes/features/v2/security_monitoring/Get-a-job-s-details-returns-Not-Found-response.yml create mode 100644 cassettes/features/v2/security_monitoring/Get-a-job-s-details-returns-OK-response.frozen create mode 100644 cassettes/features/v2/security_monitoring/Get-a-job-s-details-returns-OK-response.yml create mode 100644 cassettes/features/v2/security_monitoring/List-historical-jobs-returns-OK-response.frozen create mode 100644 cassettes/features/v2/security_monitoring/List-historical-jobs-returns-OK-response.yml create mode 100644 cassettes/features/v2/security_monitoring/Run-a-historical-job-returns-Bad-Request-response.frozen create mode 100644 cassettes/features/v2/security_monitoring/Run-a-historical-job-returns-Bad-Request-response.yml create mode 100644 cassettes/features/v2/security_monitoring/Run-a-historical-job-returns-Not-Found-response.frozen create mode 100644 cassettes/features/v2/security_monitoring/Run-a-historical-job-returns-Not-Found-response.yml create mode 100644 cassettes/features/v2/security_monitoring/Run-a-historical-job-returns-Status-created-response.frozen create mode 100644 cassettes/features/v2/security_monitoring/Run-a-historical-job-returns-Status-created-response.yml create mode 100644 examples/v2/security-monitoring/CancelHistoricalJob.rb create mode 100644 examples/v2/security-monitoring/ConvertJobResultToSignal.rb create mode 100644 examples/v2/security-monitoring/DeleteHistoricalJob.rb create mode 100644 examples/v2/security-monitoring/GetHistoricalJob.rb create mode 100644 examples/v2/security-monitoring/ListHistoricalJobs.rb create mode 100644 examples/v2/security-monitoring/RunHistoricalJob.rb create mode 100644 lib/datadog_api_client/v2/models/calculated_field.rb create mode 100644 lib/datadog_api_client/v2/models/convert_job_results_to_signals_attributes.rb create mode 100644 lib/datadog_api_client/v2/models/convert_job_results_to_signals_data.rb create mode 100644 lib/datadog_api_client/v2/models/convert_job_results_to_signals_data_type.rb create mode 100644 lib/datadog_api_client/v2/models/convert_job_results_to_signals_request.rb create mode 100644 lib/datadog_api_client/v2/models/historical_job_data_type.rb create mode 100644 lib/datadog_api_client/v2/models/historical_job_list_meta.rb create mode 100644 lib/datadog_api_client/v2/models/historical_job_response.rb create mode 100644 lib/datadog_api_client/v2/models/historical_job_response_attributes.rb create mode 100644 lib/datadog_api_client/v2/models/job_create_response.rb create mode 100644 lib/datadog_api_client/v2/models/job_create_response_data.rb create mode 100644 lib/datadog_api_client/v2/models/job_definition.rb create mode 100644 lib/datadog_api_client/v2/models/job_definition_from_rule.rb create mode 100644 lib/datadog_api_client/v2/models/list_historical_jobs_response.rb create mode 100644 lib/datadog_api_client/v2/models/run_historical_job_request.rb create mode 100644 lib/datadog_api_client/v2/models/run_historical_job_request_attributes.rb create mode 100644 lib/datadog_api_client/v2/models/run_historical_job_request_data.rb create mode 100644 lib/datadog_api_client/v2/models/run_historical_job_request_data_type.rb diff --git a/.apigentools-info b/.apigentools-info index db05ce20d56..307b4f7a793 100644 --- a/.apigentools-info +++ b/.apigentools-info @@ -4,13 +4,13 @@ "spec_versions": { "v1": { "apigentools_version": "1.6.6", - "regenerated": "2024-11-05 21:07:13.880957", - "spec_repo_commit": "6c0fa1b6" + "regenerated": "2024-11-06 10:09:46.270818", + "spec_repo_commit": "de2d6d77" }, "v2": { "apigentools_version": "1.6.6", - "regenerated": "2024-11-05 21:07:13.899409", - "spec_repo_commit": "6c0fa1b6" + "regenerated": "2024-11-06 10:09:46.289631", + "spec_repo_commit": "de2d6d77" } } } \ No newline at end of file diff --git a/.generator/schemas/v2/openapi.yaml b/.generator/schemas/v2/openapi.yaml index 925dfca233a..49de5825bf6 100644 --- a/.generator/schemas/v2/openapi.yaml +++ b/.generator/schemas/v2/openapi.yaml @@ -293,6 +293,13 @@ components: required: true schema: type: string + HistoricalJobID: + description: The ID of the job. + in: path + name: job_id + required: true + schema: + type: string IncidentAttachmentFilterQueryParameter: description: Specifies which types of attachments are included in the response. explode: false @@ -3658,6 +3665,21 @@ components: from the other indexes type: string type: object + CalculatedField: + description: Calculated field. + properties: + expression: + description: Expression. + example: '@request_end_timestamp - @request_start_timestamp' + type: string + name: + description: Field name. + example: response_time + type: string + required: + - name + - expression + type: object Case: description: A case properties: @@ -5540,6 +5562,59 @@ components: - IDENTITY - GZIP - DEFLATE + ConvertJobResultsToSignalsAttributes: + description: Attributes for converting historical job results to signals. + properties: + id: + description: Request ID. + type: string + jobResultIds: + description: Job result IDs. + example: + - '' + items: + type: string + type: array + notifications: + description: Notifications sent. + example: + - '' + items: + type: string + type: array + signalMessage: + description: Message of generated signals. + example: A large number of failed login attempts. + type: string + signalSeverity: + $ref: '#/components/schemas/SecurityMonitoringRuleSeverity' + required: + - jobResultIds + - signalSeverity + - signalMessage + - notifications + type: object + ConvertJobResultsToSignalsData: + description: Data for converting historical job results to signals. + properties: + attributes: + $ref: '#/components/schemas/ConvertJobResultsToSignalsAttributes' + type: + $ref: '#/components/schemas/ConvertJobResultsToSignalsDataType' + type: object + ConvertJobResultsToSignalsDataType: + description: Type of payload. + enum: + - historicalDetectionsJobResultSignalConversion + type: string + x-enum-varnames: + - HISTORICALDETECTIONSJOBRESULTSIGNALCONVERSION + ConvertJobResultsToSignalsRequest: + description: Request for converting historical job results to signals. + properties: + data: + $ref: '#/components/schemas/ConvertJobResultsToSignalsData' + type: object CostAttributionAggregates: description: An array of available aggregates. items: @@ -10090,6 +10165,61 @@ components: required: - message type: object + HistoricalJobDataType: + description: Type of payload. + enum: + - historicalDetectionsJob + type: string + x-enum-varnames: + - HISTORICALDETECTIONSJOB + HistoricalJobListMeta: + description: Metadata about the list of jobs. + properties: + totalCount: + description: Number of jobs in the list. + format: int32 + maximum: 2147483647 + type: integer + type: object + HistoricalJobResponse: + description: Historical job response. + properties: + attributes: + $ref: '#/components/schemas/HistoricalJobResponseAttributes' + id: + description: ID of the job. + type: string + type: + $ref: '#/components/schemas/HistoricalJobDataType' + type: object + HistoricalJobResponseAttributes: + description: Historical job attributes. + properties: + CreatedByName: + description: The name of the user who created the job. + type: string + createdAt: + description: Time when the job was created. + type: string + createdByHandle: + description: The handle of the user who created the job. + type: string + createdFromRuleId: + description: ID of the rule used to create the job (if it is created from + a rule). + type: string + jobDefinition: + $ref: '#/components/schemas/JobDefinition' + jobName: + description: Job name. + type: string + jobStatus: + description: Job status. + type: string + modifiedAt: + description: Last modification time of the job. + type: string + type: object HourlyUsage: description: Hourly usage for a product family for an org. properties: @@ -12520,6 +12650,150 @@ components: description: Jira project key type: string type: object + JobCreateResponse: + description: Run a historical job response. + properties: + data: + $ref: '#/components/schemas/JobCreateResponseData' + type: object + JobCreateResponseData: + description: The definition of `JobCreateResponseData` object. + properties: + id: + description: ID of the created job. + type: string + type: + $ref: '#/components/schemas/HistoricalJobDataType' + type: object + JobDefinition: + description: Definition of a historical job. + properties: + calculatedFields: + description: Calculated fields. + items: + $ref: '#/components/schemas/CalculatedField' + type: array + cases: + description: Cases used for generating job results. + items: + $ref: '#/components/schemas/SecurityMonitoringRuleCaseCreate' + type: array + filters: + description: Additional queries to filter matched events before they are + processed. This field is deprecated for log detection, signal correlation, + and workload security rules. + items: + $ref: '#/components/schemas/SecurityMonitoringFilter' + type: array + from: + description: Starting time of data analyzed by the job. + example: 1729843470000 + format: int64 + type: integer + groupSignalsBy: + description: Fields used to group results. + items: + type: string + type: array + index: + description: Index used to load the data. + example: cloud_siem + type: string + message: + description: Message for generated results. + example: A large number of failed log-in attempts. + type: string + name: + description: Job name. + example: Excessive number of failed attempts. + type: string + options: + $ref: '#/components/schemas/SecurityMonitoringRuleOptions' + projectedPerQuery: + description: Query projections. + items: + type: string + type: array + queries: + description: Queries for selecting logs analyzed by the job. + items: + $ref: '#/components/schemas/SecurityMonitoringStandardRuleQuery' + type: array + referenceTables: + description: Reference tables for the rule. + items: + $ref: '#/components/schemas/SecurityMonitoringReferenceTable' + type: array + tags: + description: Tags for generated signals. + items: + type: string + type: array + thirdPartyCases: + description: Cases for generating results from third-party rules. Only available + for third-party rules. + example: [] + items: + $ref: '#/components/schemas/SecurityMonitoringThirdPartyRuleCaseCreate' + type: array + to: + description: Ending time of data analyzed by the job. + example: 1729847070000 + format: int64 + type: integer + type: + description: Job type. + type: string + required: + - from + - to + - index + - name + - cases + - queries + - message + type: object + JobDefinitionFromRule: + description: Definition of a historical job based on a security monitoring rule. + properties: + caseIndex: + description: Index of the rule case applied by the job. + example: 0 + format: int32 + maximum: 9 + type: integer + from: + description: Starting time of data analyzed by the job. + example: 1729843470000 + format: int64 + type: integer + id: + description: ID of the detection rule used to create the job. + example: abc-def-ghi + type: string + index: + description: Index used to load the data. + example: cloud_siem + type: string + notifications: + description: Notifications sent when the job is completed. + example: + - '@sns-cloudtrail-results' + items: + type: string + type: array + to: + description: Ending time of data analyzed by the job. + example: 1729847070000 + format: int64 + type: integer + required: + - id + - from + - to + - index + - caseIndex + type: object LeakedKey: description: The definition of LeakedKey object. properties: @@ -12750,6 +13024,17 @@ components: - data - meta type: object + ListHistoricalJobsResponse: + description: List of historical jobs. + properties: + data: + description: Array containing the list of historical jobs. + items: + $ref: '#/components/schemas/HistoricalJobResponse' + type: array + meta: + $ref: '#/components/schemas/HistoricalJobListMeta' + type: object ListPowerpacksResponse: description: Response object which includes all powerpack configurations. properties: @@ -19578,6 +19863,38 @@ components: $ref: '#/components/schemas/RumMetricResponseData' type: array type: object + RunHistoricalJobRequest: + description: Run a historical job request. + properties: + data: + $ref: '#/components/schemas/RunHistoricalJobRequestData' + type: object + RunHistoricalJobRequestAttributes: + description: Run a historical job request. + properties: + fromRule: + $ref: '#/components/schemas/JobDefinitionFromRule' + id: + description: Request ID. + type: string + jobDefinition: + $ref: '#/components/schemas/JobDefinition' + type: object + RunHistoricalJobRequestData: + description: Data for running a historical job request. + properties: + attributes: + $ref: '#/components/schemas/RunHistoricalJobRequestAttributes' + type: + $ref: '#/components/schemas/RunHistoricalJobRequestDataType' + type: object + RunHistoricalJobRequestDataType: + description: Type of data. + enum: + - historicalDetectionsJobCreate + type: string + x-enum-varnames: + - HISTORICALDETECTIONSJOBCREATE SAMLAssertionAttribute: description: SAML assertion attribute. properties: @@ -39589,6 +39906,195 @@ paths: permissions: - incident_settings_write x-unstable: '**Note**: This endpoint is deprecated.' + /api/v2/siem-historical-detections/jobs: + get: + description: List historical jobs. + operationId: ListHistoricalJobs + parameters: + - $ref: '#/components/parameters/PageSize' + - $ref: '#/components/parameters/PageNumber' + - description: The order of the jobs in results. + example: status + in: query + name: sort + required: false + schema: + type: string + - description: Query used to filter items from the fetched list. + example: security:attack status:high + in: query + name: filter[query] + required: false + schema: + type: string + responses: + '200': + content: + application/json: + schema: + $ref: '#/components/schemas/ListHistoricalJobsResponse' + description: OK + '400': + $ref: '#/components/responses/BadRequestResponse' + '403': + $ref: '#/components/responses/NotAuthorizedResponse' + '429': + $ref: '#/components/responses/TooManyRequestsResponse' + security: + - apiKeyAuth: [] + appKeyAuth: [] + - AuthZ: [] + summary: List historical jobs + tags: + - Security Monitoring + post: + description: Run a historical job. + operationId: RunHistoricalJob + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/RunHistoricalJobRequest' + required: true + responses: + '201': + content: + application/json: + schema: + $ref: '#/components/schemas/JobCreateResponse' + description: Status created + '400': + $ref: '#/components/responses/BadRequestResponse' + '401': + $ref: '#/components/responses/ConcurrentModificationResponse' + '403': + $ref: '#/components/responses/NotAuthorizedResponse' + '404': + $ref: '#/components/responses/NotFoundResponse' + '429': + $ref: '#/components/responses/TooManyRequestsResponse' + security: + - apiKeyAuth: [] + appKeyAuth: [] + - AuthZ: [] + summary: Run a historical job + tags: + - Security Monitoring + x-codegen-request-body-name: body + /api/v2/siem-historical-detections/jobs/signal_convert: + post: + description: Convert a job result to a signal. + operationId: ConvertJobResultToSignal + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/ConvertJobResultsToSignalsRequest' + required: true + responses: + '204': + description: OK + '400': + $ref: '#/components/responses/BadRequestResponse' + '401': + $ref: '#/components/responses/ConcurrentModificationResponse' + '403': + $ref: '#/components/responses/NotAuthorizedResponse' + '404': + $ref: '#/components/responses/NotFoundResponse' + '429': + $ref: '#/components/responses/TooManyRequestsResponse' + security: + - apiKeyAuth: [] + appKeyAuth: [] + - AuthZ: [] + summary: Convert a job result to a signal + tags: + - Security Monitoring + x-codegen-request-body-name: body + /api/v2/siem-historical-detections/jobs/{job_id}: + delete: + description: Delete an existing job. + operationId: DeleteHistoricalJob + parameters: + - $ref: '#/components/parameters/HistoricalJobID' + responses: + '204': + description: OK + '400': + $ref: '#/components/responses/BadRequestResponse' + '401': + $ref: '#/components/responses/ConcurrentModificationResponse' + '403': + $ref: '#/components/responses/NotAuthorizedResponse' + '404': + $ref: '#/components/responses/NotFoundResponse' + '409': + $ref: '#/components/responses/ConflictResponse' + '429': + $ref: '#/components/responses/TooManyRequestsResponse' + security: + - apiKeyAuth: [] + appKeyAuth: [] + - AuthZ: [] + summary: Delete an existing job + tags: + - Security Monitoring + get: + description: Get a job's details. + operationId: GetHistoricalJob + parameters: + - $ref: '#/components/parameters/HistoricalJobID' + responses: + '200': + content: + application/json: + schema: + $ref: '#/components/schemas/HistoricalJobResponse' + description: OK + '400': + $ref: '#/components/responses/BadRequestResponse' + '403': + $ref: '#/components/responses/NotAuthorizedResponse' + '404': + $ref: '#/components/responses/NotFoundResponse' + '429': + $ref: '#/components/responses/TooManyRequestsResponse' + security: + - apiKeyAuth: [] + appKeyAuth: [] + - AuthZ: [] + summary: Get a job's details + tags: + - Security Monitoring + /api/v2/siem-historical-detections/jobs/{job_id}/cancel: + patch: + description: Cancel a historical job. + operationId: CancelHistoricalJob + parameters: + - $ref: '#/components/parameters/HistoricalJobID' + responses: + '204': + description: OK + '400': + $ref: '#/components/responses/BadRequestResponse' + '401': + $ref: '#/components/responses/ConcurrentModificationResponse' + '403': + $ref: '#/components/responses/NotAuthorizedResponse' + '404': + $ref: '#/components/responses/NotFoundResponse' + '409': + $ref: '#/components/responses/ConflictResponse' + '429': + $ref: '#/components/responses/TooManyRequestsResponse' + security: + - apiKeyAuth: [] + appKeyAuth: [] + - AuthZ: [] + summary: Cancel a historical job + tags: + - Security Monitoring /api/v2/slo/report: post: description: 'Create a job to generate an SLO report. The report job is processed diff --git a/cassettes/features/v2/security_monitoring/Cancel-a-historical-job-returns-Bad-Request-response.frozen b/cassettes/features/v2/security_monitoring/Cancel-a-historical-job-returns-Bad-Request-response.frozen new file mode 100644 index 00000000000..8fc8d5999f7 --- /dev/null +++ b/cassettes/features/v2/security_monitoring/Cancel-a-historical-job-returns-Bad-Request-response.frozen @@ -0,0 +1 @@ +2024-11-06T09:58:59.172Z \ No newline at end of file diff --git a/cassettes/features/v2/security_monitoring/Cancel-a-historical-job-returns-Bad-Request-response.yml b/cassettes/features/v2/security_monitoring/Cancel-a-historical-job-returns-Bad-Request-response.yml new file mode 100644 index 00000000000..c3f53e9d6e6 --- /dev/null +++ b/cassettes/features/v2/security_monitoring/Cancel-a-historical-job-returns-Bad-Request-response.yml @@ -0,0 +1,20 @@ +http_interactions: +- recorded_at: Wed, 06 Nov 2024 09:58:59 GMT + request: + body: null + headers: + Accept: + - '*/*' + method: PATCH + uri: https://api.datadoghq.com/api/v2/siem-historical-detections/jobs/inva-lid/cancel + response: + body: + encoding: UTF-8 + string: '{"errors":[{"status":"400","detail":"invalid jobId"}]}' + headers: + Content-Type: + - application/vnd.api+json + status: + code: 400 + message: Bad Request +recorded_with: VCR 6.0.0 diff --git a/cassettes/features/v2/security_monitoring/Cancel-a-historical-job-returns-Not-Found-response.frozen b/cassettes/features/v2/security_monitoring/Cancel-a-historical-job-returns-Not-Found-response.frozen new file mode 100644 index 00000000000..3eec91611a5 --- /dev/null +++ b/cassettes/features/v2/security_monitoring/Cancel-a-historical-job-returns-Not-Found-response.frozen @@ -0,0 +1 @@ +2024-11-06T09:58:59.666Z \ No newline at end of file diff --git a/cassettes/features/v2/security_monitoring/Cancel-a-historical-job-returns-Not-Found-response.yml b/cassettes/features/v2/security_monitoring/Cancel-a-historical-job-returns-Not-Found-response.yml new file mode 100644 index 00000000000..24307687669 --- /dev/null +++ b/cassettes/features/v2/security_monitoring/Cancel-a-historical-job-returns-Not-Found-response.yml @@ -0,0 +1,21 @@ +http_interactions: +- recorded_at: Wed, 06 Nov 2024 09:58:59 GMT + request: + body: null + headers: + Accept: + - '*/*' + method: PATCH + uri: https://api.datadoghq.com/api/v2/siem-historical-detections/jobs/8e2a37fb-b0c8-4761-a7f0-0a8d6a98ba93/cancel + response: + body: + encoding: UTF-8 + string: '{"errors":[{"status":"404","title":"Not Found","detail":"Job 8e2a37fb-b0c8-4761-a7f0-0a8d6a98ba93 + was not found."}]}' + headers: + Content-Type: + - application/vnd.api+json + status: + code: 404 + message: Not Found +recorded_with: VCR 6.0.0 diff --git a/cassettes/features/v2/security_monitoring/Cancel-a-historical-job-returns-OK-response.frozen b/cassettes/features/v2/security_monitoring/Cancel-a-historical-job-returns-OK-response.frozen new file mode 100644 index 00000000000..c01d2d75757 --- /dev/null +++ b/cassettes/features/v2/security_monitoring/Cancel-a-historical-job-returns-OK-response.frozen @@ -0,0 +1 @@ +2024-11-06T09:59:00.016Z \ No newline at end of file diff --git a/cassettes/features/v2/security_monitoring/Cancel-a-historical-job-returns-OK-response.yml b/cassettes/features/v2/security_monitoring/Cancel-a-historical-job-returns-OK-response.yml new file mode 100644 index 00000000000..c1eb1ae291a --- /dev/null +++ b/cassettes/features/v2/security_monitoring/Cancel-a-historical-job-returns-OK-response.yml @@ -0,0 +1,43 @@ +http_interactions: +- recorded_at: Wed, 06 Nov 2024 09:59:00 GMT + request: + body: + encoding: UTF-8 + string: '{"data":{"attributes":{"jobDefinition":{"cases":[{"condition":"a > + 1","name":"Condition 1","notifications":[],"status":"info"}],"filters":[],"from":1730387522611,"index":"main","message":"A + large number of failed log-in attempts.","name":"Excessive number of failed + attempts.","options":{"evaluationWindow":900,"keepAlive":3600,"maxSignalDuration":86400},"queries":[{"aggregation":"count","distinctFields":[],"groupByFields":[],"query":"source:non_existing_src_weekend"}],"tags":[],"to":1730387532611,"type":"log_detection"}},"type":"historicalDetectionsJobCreate"}}' + headers: + Accept: + - application/json + Content-Type: + - application/json + method: POST + uri: https://api.datadoghq.com/api/v2/siem-historical-detections/jobs + response: + body: + encoding: UTF-8 + string: '{"data":{"id":"7ccb33e8-5c53-4db2-a9a1-78289d711be7","type":"historicalDetectionsJob"}}' + headers: + Content-Type: + - application/vnd.api+json + status: + code: 201 + message: Created +- recorded_at: Wed, 06 Nov 2024 09:59:00 GMT + request: + body: null + headers: + Accept: + - '*/*' + method: PATCH + uri: https://api.datadoghq.com/api/v2/siem-historical-detections/jobs/7ccb33e8-5c53-4db2-a9a1-78289d711be7/cancel + response: + body: + encoding: UTF-8 + string: '' + headers: {} + status: + code: 204 + message: No Content +recorded_with: VCR 6.0.0 diff --git a/cassettes/features/v2/security_monitoring/Convert-a-job-result-to-a-signal-returns-Bad-Request-response.frozen b/cassettes/features/v2/security_monitoring/Convert-a-job-result-to-a-signal-returns-Bad-Request-response.frozen new file mode 100644 index 00000000000..c1b53466a4d --- /dev/null +++ b/cassettes/features/v2/security_monitoring/Convert-a-job-result-to-a-signal-returns-Bad-Request-response.frozen @@ -0,0 +1 @@ +2024-11-06T09:59:00.971Z \ No newline at end of file diff --git a/cassettes/features/v2/security_monitoring/Convert-a-job-result-to-a-signal-returns-Bad-Request-response.yml b/cassettes/features/v2/security_monitoring/Convert-a-job-result-to-a-signal-returns-Bad-Request-response.yml new file mode 100644 index 00000000000..03c16cc0daa --- /dev/null +++ b/cassettes/features/v2/security_monitoring/Convert-a-job-result-to-a-signal-returns-Bad-Request-response.yml @@ -0,0 +1,26 @@ +http_interactions: +- recorded_at: Wed, 06 Nov 2024 09:59:00 GMT + request: + body: + encoding: UTF-8 + string: '{"data":{"attributes":{"jobResultIds":[""],"notifications":[""],"signalMessage":"A + large number of failed log-in attempts.","signalSeverity":"critical"},"type":"historicalDetectionsJobResultSignalConversion"}}' + headers: + Accept: + - '*/*' + Content-Type: + - application/json + method: POST + uri: https://api.datadoghq.com/api/v2/siem-historical-detections/jobs/signal_convert + response: + body: + encoding: UTF-8 + string: '{"errors":[{"status":"400","title":"Generic Error","detail":"empty + jobResultId provided"}]}' + headers: + Content-Type: + - application/vnd.api+json + status: + code: 400 + message: Bad Request +recorded_with: VCR 6.0.0 diff --git a/cassettes/features/v2/security_monitoring/Delete-an-existing-job-returns-Bad-Request-response.frozen b/cassettes/features/v2/security_monitoring/Delete-an-existing-job-returns-Bad-Request-response.frozen new file mode 100644 index 00000000000..90e8ca9a7c3 --- /dev/null +++ b/cassettes/features/v2/security_monitoring/Delete-an-existing-job-returns-Bad-Request-response.frozen @@ -0,0 +1 @@ +2024-11-06T09:59:14.420Z \ No newline at end of file diff --git a/cassettes/features/v2/security_monitoring/Delete-an-existing-job-returns-Bad-Request-response.yml b/cassettes/features/v2/security_monitoring/Delete-an-existing-job-returns-Bad-Request-response.yml new file mode 100644 index 00000000000..39c014a7695 --- /dev/null +++ b/cassettes/features/v2/security_monitoring/Delete-an-existing-job-returns-Bad-Request-response.yml @@ -0,0 +1,21 @@ +http_interactions: +- recorded_at: Wed, 06 Nov 2024 09:59:14 GMT + request: + body: null + headers: + Accept: + - '*/*' + method: DELETE + uri: https://api.datadoghq.com/api/v2/siem-historical-detections/jobs/inva-lid + response: + body: + encoding: UTF-8 + string: '{"errors":[{"status":"400","title":"Generic Error","detail":"invalid + jobId"}]}' + headers: + Content-Type: + - application/vnd.api+json + status: + code: 400 + message: Bad Request +recorded_with: VCR 6.0.0 diff --git a/cassettes/features/v2/security_monitoring/Delete-an-existing-job-returns-Not-Found-response.frozen b/cassettes/features/v2/security_monitoring/Delete-an-existing-job-returns-Not-Found-response.frozen new file mode 100644 index 00000000000..b1f9fdfc266 --- /dev/null +++ b/cassettes/features/v2/security_monitoring/Delete-an-existing-job-returns-Not-Found-response.frozen @@ -0,0 +1 @@ +2024-11-06T09:59:14.749Z \ No newline at end of file diff --git a/cassettes/features/v2/security_monitoring/Delete-an-existing-job-returns-Not-Found-response.yml b/cassettes/features/v2/security_monitoring/Delete-an-existing-job-returns-Not-Found-response.yml new file mode 100644 index 00000000000..b0e725e9aab --- /dev/null +++ b/cassettes/features/v2/security_monitoring/Delete-an-existing-job-returns-Not-Found-response.yml @@ -0,0 +1,21 @@ +http_interactions: +- recorded_at: Wed, 06 Nov 2024 09:59:14 GMT + request: + body: null + headers: + Accept: + - '*/*' + method: DELETE + uri: https://api.datadoghq.com/api/v2/siem-historical-detections/jobs/8e2a37fb-b0c8-4761-a7f0-0a8d6a98ba93 + response: + body: + encoding: UTF-8 + string: '{"errors":[{"status":"404","title":"Not Found","detail":"Job 8e2a37fb-b0c8-4761-a7f0-0a8d6a98ba93 + was not found."}]}' + headers: + Content-Type: + - application/vnd.api+json + status: + code: 404 + message: Not Found +recorded_with: VCR 6.0.0 diff --git a/cassettes/features/v2/security_monitoring/Get-a-job-s-details-returns-Bad-Request-response.frozen b/cassettes/features/v2/security_monitoring/Get-a-job-s-details-returns-Bad-Request-response.frozen new file mode 100644 index 00000000000..b98bd67d8d2 --- /dev/null +++ b/cassettes/features/v2/security_monitoring/Get-a-job-s-details-returns-Bad-Request-response.frozen @@ -0,0 +1 @@ +2024-11-06T09:59:17.369Z \ No newline at end of file diff --git a/cassettes/features/v2/security_monitoring/Get-a-job-s-details-returns-Bad-Request-response.yml b/cassettes/features/v2/security_monitoring/Get-a-job-s-details-returns-Bad-Request-response.yml new file mode 100644 index 00000000000..279c4e34065 --- /dev/null +++ b/cassettes/features/v2/security_monitoring/Get-a-job-s-details-returns-Bad-Request-response.yml @@ -0,0 +1,20 @@ +http_interactions: +- recorded_at: Wed, 06 Nov 2024 09:59:17 GMT + request: + body: null + headers: + Accept: + - application/json + method: GET + uri: https://api.datadoghq.com/api/v2/siem-historical-detections/jobs/inva-lid + response: + body: + encoding: UTF-8 + string: '{"errors":[{"status":"400","detail":"invalid jobId"}]}' + headers: + Content-Type: + - application/vnd.api+json + status: + code: 400 + message: Bad Request +recorded_with: VCR 6.0.0 diff --git a/cassettes/features/v2/security_monitoring/Get-a-job-s-details-returns-Not-Found-response.frozen b/cassettes/features/v2/security_monitoring/Get-a-job-s-details-returns-Not-Found-response.frozen new file mode 100644 index 00000000000..08e35be6ba2 --- /dev/null +++ b/cassettes/features/v2/security_monitoring/Get-a-job-s-details-returns-Not-Found-response.frozen @@ -0,0 +1 @@ +2024-11-06T09:59:17.691Z \ No newline at end of file diff --git a/cassettes/features/v2/security_monitoring/Get-a-job-s-details-returns-Not-Found-response.yml b/cassettes/features/v2/security_monitoring/Get-a-job-s-details-returns-Not-Found-response.yml new file mode 100644 index 00000000000..3861b8f4168 --- /dev/null +++ b/cassettes/features/v2/security_monitoring/Get-a-job-s-details-returns-Not-Found-response.yml @@ -0,0 +1,21 @@ +http_interactions: +- recorded_at: Wed, 06 Nov 2024 09:59:17 GMT + request: + body: null + headers: + Accept: + - application/json + method: GET + uri: https://api.datadoghq.com/api/v2/siem-historical-detections/jobs/8e2a37fb-b0c8-4761-a7f0-0a8d6a98ba93 + response: + body: + encoding: UTF-8 + string: '{"errors":[{"status":"404","title":"Not Found","detail":"Job 8e2a37fb-b0c8-4761-a7f0-0a8d6a98ba93 + was not found."}]}' + headers: + Content-Type: + - application/vnd.api+json + status: + code: 404 + message: Not Found +recorded_with: VCR 6.0.0 diff --git a/cassettes/features/v2/security_monitoring/Get-a-job-s-details-returns-OK-response.frozen b/cassettes/features/v2/security_monitoring/Get-a-job-s-details-returns-OK-response.frozen new file mode 100644 index 00000000000..f9c4c45757d --- /dev/null +++ b/cassettes/features/v2/security_monitoring/Get-a-job-s-details-returns-OK-response.frozen @@ -0,0 +1 @@ +2024-11-06T09:59:18.018Z \ No newline at end of file diff --git a/cassettes/features/v2/security_monitoring/Get-a-job-s-details-returns-OK-response.yml b/cassettes/features/v2/security_monitoring/Get-a-job-s-details-returns-OK-response.yml new file mode 100644 index 00000000000..d2f07f948a0 --- /dev/null +++ b/cassettes/features/v2/security_monitoring/Get-a-job-s-details-returns-OK-response.yml @@ -0,0 +1,51 @@ +http_interactions: +- recorded_at: Wed, 06 Nov 2024 09:59:18 GMT + request: + body: + encoding: UTF-8 + string: '{"data":{"attributes":{"jobDefinition":{"cases":[{"condition":"a > + 1","name":"Condition 1","notifications":[],"status":"info"}],"filters":[],"from":1730387522611,"index":"main","message":"A + large number of failed log-in attempts.","name":"Excessive number of failed + attempts.","options":{"evaluationWindow":900,"keepAlive":3600,"maxSignalDuration":86400},"queries":[{"aggregation":"count","distinctFields":[],"groupByFields":[],"query":"source:non_existing_src_weekend"}],"tags":[],"to":1730387532611,"type":"log_detection"}},"type":"historicalDetectionsJobCreate"}}' + headers: + Accept: + - application/json + Content-Type: + - application/json + method: POST + uri: https://api.datadoghq.com/api/v2/siem-historical-detections/jobs + response: + body: + encoding: UTF-8 + string: '{"data":{"id":"67e31e2c-7cdc-4587-920e-de2518700100","type":"historicalDetectionsJob"}}' + headers: + Content-Type: + - application/vnd.api+json + status: + code: 201 + message: Created +- recorded_at: Wed, 06 Nov 2024 09:59:18 GMT + request: + body: null + headers: + Accept: + - application/json + method: GET + uri: https://api.datadoghq.com/api/v2/siem-historical-detections/jobs/67e31e2c-7cdc-4587-920e-de2518700100 + response: + body: + encoding: UTF-8 + string: '{"data":{"id":"67e31e2c-7cdc-4587-920e-de2518700100","type":"historicalDetectionsJob","attributes":{"createdAt":"2024-11-06 + 09:59:18.538427+00","createdByHandle":"frog@datadoghq.com","jobDefinition":{"from":1730387522611,"to":1730387532611,"index":"main","name":"Excessive + number of failed attempts.","cases":[{"name":"Condition 1","status":"info","notifications":[],"condition":"a + \u003e 1"}],"queries":[{"query":"source:non_existing_src_weekend","groupByFields":[],"hasOptionalGroupByFields":false,"distinctFields":[],"aggregation":"count","name":""}],"options":{"evaluationWindow":900,"detectionMethod":"threshold","maxSignalDuration":86400,"keepAlive":3600},"message":"A + large number of failed log-in attempts.","tags":[],"type":"log_detection","filters":[]},"jobName":"Excessive + number of failed attempts.","jobStatus":"pending","modifiedAt":"2024-11-06 + 09:59:18.538427+00"}}}' + headers: + Content-Type: + - application/vnd.api+json + status: + code: 200 + message: OK +recorded_with: VCR 6.0.0 diff --git a/cassettes/features/v2/security_monitoring/List-historical-jobs-returns-OK-response.frozen b/cassettes/features/v2/security_monitoring/List-historical-jobs-returns-OK-response.frozen new file mode 100644 index 00000000000..542d9481389 --- /dev/null +++ b/cassettes/features/v2/security_monitoring/List-historical-jobs-returns-OK-response.frozen @@ -0,0 +1 @@ +2024-11-06T09:59:24.103Z \ No newline at end of file diff --git a/cassettes/features/v2/security_monitoring/List-historical-jobs-returns-OK-response.yml b/cassettes/features/v2/security_monitoring/List-historical-jobs-returns-OK-response.yml new file mode 100644 index 00000000000..c17308c28f0 --- /dev/null +++ b/cassettes/features/v2/security_monitoring/List-historical-jobs-returns-OK-response.yml @@ -0,0 +1,210 @@ +http_interactions: +- recorded_at: Wed, 06 Nov 2024 09:59:24 GMT + request: + body: + encoding: UTF-8 + string: '{"data":{"attributes":{"jobDefinition":{"cases":[{"condition":"a > + 1","name":"Condition 1","notifications":[],"status":"info"}],"filters":[],"from":1730387522611,"index":"main","message":"A + large number of failed log-in attempts.","name":"Excessive number of failed + attempts.","options":{"evaluationWindow":900,"keepAlive":3600,"maxSignalDuration":86400},"queries":[{"aggregation":"count","distinctFields":[],"groupByFields":[],"query":"source:non_existing_src_weekend"}],"tags":[],"to":1730387532611,"type":"log_detection"}},"type":"historicalDetectionsJobCreate"}}' + headers: + Accept: + - application/json + Content-Type: + - application/json + method: POST + uri: https://api.datadoghq.com/api/v2/siem-historical-detections/jobs + response: + body: + encoding: UTF-8 + string: '{"data":{"id":"e4d69a20-0b0c-4e99-8d34-596811b30087","type":"historicalDetectionsJob"}}' + headers: + Content-Type: + - application/vnd.api+json + status: + code: 201 + message: Created +- recorded_at: Wed, 06 Nov 2024 09:59:24 GMT + request: + body: null + headers: + Accept: + - application/json + method: GET + uri: https://api.datadoghq.com/api/v2/siem-historical-detections/jobs + response: + body: + encoding: UTF-8 + string: '{"data":[{"id":"f4201468-7e8c-4195-9676-8910f9d7500e","type":"historicalDetectionsJob","attributes":{"createdAt":"2024-11-05 + 13:59:30.390792+00","createdByHandle":"frog@datadoghq.com","jobDefinition":{"from":1730387522611,"to":1730387532611,"index":"main","name":"Excessive + number of failed attempts.","cases":[{"name":"Condition 1","status":"info","notifications":[],"condition":"a + \u003e 1"}],"queries":[{"query":"source:non_existing_src_weekend","groupByFields":[],"hasOptionalGroupByFields":false,"distinctFields":[],"aggregation":"count","name":""}],"options":{"evaluationWindow":900,"detectionMethod":"threshold","maxSignalDuration":86400,"keepAlive":3600},"message":"A + large number of failed log-in attempts.","tags":[],"type":"log_detection","filters":[]},"jobName":"Excessive + number of failed attempts.","jobStatus":"cancelled","modifiedAt":"2024-11-05 + 13:59:30.915146+00"}},{"id":"592f7f33-ebb1-41f2-bf10-35eaa094181b","type":"historicalDetectionsJob","attributes":{"createdAt":"2024-11-05 + 14:19:46.630346+00","createdByHandle":"frog@datadoghq.com","jobDefinition":{"from":1730387522611,"to":1730387532611,"index":"main","name":"Excessive + number of failed attempts.","cases":[{"name":"Condition 1","status":"info","notifications":[],"condition":"a + \u003e 1"}],"queries":[{"query":"source:non_existing_src_weekend","groupByFields":[],"hasOptionalGroupByFields":false,"distinctFields":[],"aggregation":"count","name":""}],"options":{"evaluationWindow":900,"detectionMethod":"threshold","maxSignalDuration":86400,"keepAlive":3600},"message":"A + large number of failed log-in attempts.","tags":[],"type":"log_detection","filters":[]},"jobName":"Excessive + number of failed attempts.","jobStatus":"cancelled","modifiedAt":"2024-11-05 + 14:19:47.114398+00"}},{"id":"19db310f-85d6-43e4-ae23-0b008040d27e","type":"historicalDetectionsJob","attributes":{"createdAt":"2024-11-05 + 14:22:33.260861+00","createdByHandle":"frog@datadoghq.com","jobDefinition":{"from":1730387522611,"to":1730387532611,"index":"main","name":"Excessive + number of failed attempts.","cases":[{"name":"Condition 1","status":"info","notifications":[],"condition":"a + \u003e 1"}],"queries":[{"query":"source:non_existing_src_weekend","groupByFields":[],"hasOptionalGroupByFields":false,"distinctFields":[],"aggregation":"count","name":""}],"options":{"evaluationWindow":900,"detectionMethod":"threshold","maxSignalDuration":86400,"keepAlive":3600},"message":"A + large number of failed log-in attempts.","tags":[],"type":"log_detection","filters":[]},"jobName":"Excessive + number of failed attempts.","jobStatus":"cancelled","modifiedAt":"2024-11-05 + 14:22:33.804062+00"}},{"id":"77686fbf-7d0a-4bc5-9cb3-aa1d7a191129","type":"historicalDetectionsJob","attributes":{"createdAt":"2024-11-05 + 15:58:28.659388+00","createdByHandle":"frog@datadoghq.com","jobDefinition":{"from":1730387522611,"to":1730387532611,"index":"main","name":"Excessive + number of failed attempts.","cases":[{"name":"Condition 1","status":"info","notifications":[],"condition":"a + \u003e 1"}],"queries":[{"query":"source:non_existing_src_weekend","groupByFields":[],"hasOptionalGroupByFields":false,"distinctFields":[],"aggregation":"count","name":""}],"options":{"evaluationWindow":900,"detectionMethod":"threshold","maxSignalDuration":86400,"keepAlive":3600},"message":"A + large number of failed log-in attempts.","tags":[],"type":"log_detection","filters":[]},"jobName":"Excessive + number of failed attempts.","jobStatus":"cancelled","modifiedAt":"2024-11-05 + 15:58:29.25643+00"}},{"id":"059980c3-7dee-4e48-9ba9-aa39152e033f","type":"historicalDetectionsJob","attributes":{"createdAt":"2024-11-05 + 15:58:59.637136+00","createdByHandle":"frog@datadoghq.com","jobDefinition":{"from":1730387522611,"to":1730387532611,"index":"main","name":"Excessive + number of failed attempts.","cases":[{"name":"Condition 1","status":"info","notifications":[],"condition":"a + \u003e 1"}],"queries":[{"query":"source:non_existing_src_weekend","groupByFields":[],"hasOptionalGroupByFields":false,"distinctFields":[],"aggregation":"count","name":""}],"options":{"evaluationWindow":900,"detectionMethod":"threshold","maxSignalDuration":86400,"keepAlive":3600},"message":"A + large number of failed log-in attempts.","tags":[],"type":"log_detection","filters":[]},"jobName":"Excessive + number of failed attempts.","jobStatus":"cancelled","modifiedAt":"2024-11-05 + 15:59:00.166804+00"}},{"id":"b6b7e8f7-1c77-40b0-837d-41b1e1e94958","type":"historicalDetectionsJob","attributes":{"createdAt":"2024-11-05 + 15:59:12.832779+00","createdByHandle":"frog@datadoghq.com","jobDefinition":{"from":1730387522611,"to":1730387532611,"index":"main","name":"Excessive + number of failed attempts.","cases":[{"name":"Condition 1","status":"info","notifications":[],"condition":"a + \u003e 1"}],"queries":[{"query":"source:non_existing_src_weekend","groupByFields":[],"hasOptionalGroupByFields":false,"distinctFields":[],"aggregation":"count","name":""}],"options":{"evaluationWindow":900,"detectionMethod":"threshold","maxSignalDuration":86400,"keepAlive":3600},"message":"A + large number of failed log-in attempts.","tags":[],"type":"log_detection","filters":[]},"jobName":"Excessive + number of failed attempts.","jobStatus":"cancelled","modifiedAt":"2024-11-05 + 15:59:13.372278+00"}},{"id":"2926cb2b-beb7-457a-9752-b9aeec5c90dc","type":"historicalDetectionsJob","attributes":{"createdAt":"2024-11-05 + 16:06:30.970504+00","createdByHandle":"frog@datadoghq.com","jobDefinition":{"from":1730387522611,"to":1730387532611,"index":"main","name":"Excessive + number of failed attempts.","cases":[{"name":"Condition 1","status":"info","notifications":[],"condition":"a + \u003e 1"}],"queries":[{"query":"source:non_existing_src_weekend","groupByFields":[],"hasOptionalGroupByFields":false,"distinctFields":[],"aggregation":"count","name":""}],"options":{"evaluationWindow":900,"detectionMethod":"threshold","maxSignalDuration":86400,"keepAlive":3600},"message":"A + large number of failed log-in attempts.","tags":[],"type":"log_detection","filters":[]},"jobName":"Excessive + number of failed attempts.","jobStatus":"cancelled","modifiedAt":"2024-11-05 + 16:06:31.423093+00"}},{"id":"386eb79b-4bcc-457d-af83-bd223fe802bd","type":"historicalDetectionsJob","attributes":{"createdAt":"2024-11-05 + 16:06:02.90626+00","createdByHandle":"frog@datadoghq.com","jobDefinition":{"from":1730387522611,"to":1730387532611,"index":"main","name":"Excessive + number of failed attempts.","cases":[{"name":"Condition 1","status":"info","notifications":[],"condition":"a + \u003e 1"}],"queries":[{"query":"source:non_existing_src_weekend","groupByFields":[],"hasOptionalGroupByFields":false,"distinctFields":[],"aggregation":"count","name":""}],"options":{"evaluationWindow":900,"detectionMethod":"threshold","maxSignalDuration":86400,"keepAlive":3600},"message":"A + large number of failed log-in attempts.","tags":[],"type":"log_detection","filters":[]},"jobName":"Excessive + number of failed attempts.","jobStatus":"cancelled","modifiedAt":"2024-11-05 + 16:06:03.515111+00"}},{"id":"0a5f4f41-1dff-49d9-897f-6673d7018112","type":"historicalDetectionsJob","attributes":{"createdAt":"2024-11-05 + 16:28:06.540461+00","createdByHandle":"frog@datadoghq.com","jobDefinition":{"from":1730387522611,"to":1730387532611,"index":"main","name":"Excessive + number of failed attempts.","cases":[{"name":"Condition 1","status":"info","notifications":[],"condition":"a + \u003e 1"}],"queries":[{"query":"source:non_existing_src_weekend","groupByFields":[],"hasOptionalGroupByFields":false,"distinctFields":[],"aggregation":"count","name":""}],"options":{"evaluationWindow":900,"detectionMethod":"threshold","maxSignalDuration":86400,"keepAlive":3600},"message":"A + large number of failed log-in attempts.","tags":[],"type":"log_detection","filters":[]},"jobName":"Excessive + number of failed attempts.","jobStatus":"cancelled","modifiedAt":"2024-11-05 + 16:28:07.018469+00"}},{"id":"b714b426-0d8e-48d9-9365-8ff72c0b2cca","type":"historicalDetectionsJob","attributes":{"createdAt":"2024-11-05 + 17:53:33.762991+00","createdByHandle":"frog@datadoghq.com","jobDefinition":{"from":1730387522611,"to":1730387532611,"index":"main","name":"Excessive + number of failed attempts.","cases":[{"name":"Condition 1","status":"info","notifications":[],"condition":"a + \u003e 1"}],"queries":[{"query":"source:non_existing_src_weekend","groupByFields":[],"hasOptionalGroupByFields":false,"distinctFields":[],"aggregation":"count","name":""}],"options":{"evaluationWindow":900,"detectionMethod":"threshold","maxSignalDuration":86400,"keepAlive":3600},"message":"A + large number of failed log-in attempts.","tags":[],"type":"log_detection","filters":[]},"jobName":"Excessive + number of failed attempts.","jobStatus":"cancelled","modifiedAt":"2024-11-05 + 17:53:34.311915+00"}},{"id":"7ecdb6d0-ee07-4381-8088-12bf6a624092","type":"historicalDetectionsJob","attributes":{"createdAt":"2024-11-05 + 17:57:40.301696+00","createdByHandle":"frog@datadoghq.com","jobDefinition":{"from":1730387522611,"to":1730387532611,"index":"main","name":"Excessive + number of failed attempts.","cases":[{"name":"Condition 1","status":"info","notifications":[],"condition":"a + \u003e 1"}],"queries":[{"query":"source:non_existing_src_weekend","groupByFields":[],"hasOptionalGroupByFields":false,"distinctFields":[],"aggregation":"count","name":""}],"options":{"evaluationWindow":900,"detectionMethod":"threshold","maxSignalDuration":86400,"keepAlive":3600},"message":"A + large number of failed log-in attempts.","tags":[],"type":"log_detection","filters":[]},"jobName":"Excessive + number of failed attempts.","jobStatus":"cancelled","modifiedAt":"2024-11-05 + 17:57:40.846472+00"}},{"id":"f792b1b5-df69-42e0-b846-006b70e63891","type":"historicalDetectionsJob","attributes":{"createdAt":"2024-11-05 + 18:06:01.499057+00","createdByHandle":"frog@datadoghq.com","jobDefinition":{"from":1730387522611,"to":1730387532611,"index":"main","name":"Excessive + number of failed attempts.","cases":[{"name":"Condition 1","status":"info","notifications":[],"condition":"a + \u003e 1"}],"queries":[{"query":"source:non_existing_src_weekend","groupByFields":[],"hasOptionalGroupByFields":false,"distinctFields":[],"aggregation":"count","name":""}],"options":{"evaluationWindow":900,"detectionMethod":"threshold","maxSignalDuration":86400,"keepAlive":3600},"message":"A + large number of failed log-in attempts.","tags":[],"type":"log_detection","filters":[]},"jobName":"Excessive + number of failed attempts.","jobStatus":"cancelled","modifiedAt":"2024-11-05 + 18:06:02.080517+00"}},{"id":"893a886f-d43f-49eb-88c0-15cdd6274be2","type":"historicalDetectionsJob","attributes":{"createdAt":"2024-11-06 + 08:26:35.190657+00","createdByHandle":"frog@datadoghq.com","jobDefinition":{"from":1730387522611,"to":1730387532611,"index":"main","name":"Excessive + number of failed attempts.","cases":[{"name":"Condition 1","status":"info","notifications":[],"condition":"a + \u003e 1"}],"queries":[{"query":"source:non_existing_src_weekend","groupByFields":[],"hasOptionalGroupByFields":false,"distinctFields":[],"aggregation":"count","name":""}],"options":{"evaluationWindow":900,"detectionMethod":"threshold","maxSignalDuration":86400,"keepAlive":3600},"message":"A + large number of failed log-in attempts.","tags":[],"type":"log_detection","filters":[]},"jobName":"Excessive + number of failed attempts.","jobStatus":"cancelled","modifiedAt":"2024-11-06 + 08:26:35.637478+00"}},{"id":"c5a7bb86-4b52-4945-9844-7df94998f0b5","type":"historicalDetectionsJob","attributes":{"createdAt":"2024-11-06 + 09:07:46.084806+00","createdByHandle":"9919ec9b-ebc7-49ee-8dc8-03626e717cca","createdByName":"CI + Account","jobDefinition":{"from":1730387522611,"to":1730387532611,"index":"main","name":"Excessive + number of failed attempts.","cases":[{"name":"Condition 1","status":"info","notifications":[],"condition":"a + \u003e 1"}],"queries":[{"query":"source:non_existing_src_weekend","groupByFields":[],"hasOptionalGroupByFields":false,"distinctFields":[],"aggregation":"count","name":""}],"options":{"evaluationWindow":900,"detectionMethod":"threshold","maxSignalDuration":86400,"keepAlive":3600},"message":"A + large number of failed log-in attempts.","tags":[],"type":"log_detection","filters":[]},"jobName":"Excessive + number of failed attempts.","jobStatus":"cancelled","modifiedAt":"2024-11-06 + 09:07:46.304323+00"}},{"id":"cf68cefe-3ba3-47af-b744-e0135581511c","type":"historicalDetectionsJob","attributes":{"createdAt":"2024-11-05 + 13:51:24.583514+00","createdByHandle":"frog@datadoghq.com","jobDefinition":{"from":1730387522611,"to":1730387532611,"index":"main","name":"Excessive + number of failed attempts.","cases":[{"name":"Condition 1","status":"info","notifications":[],"condition":"a + \u003e 1"}],"queries":[{"query":"source:non_existing_src_weekend","groupByFields":[],"hasOptionalGroupByFields":false,"distinctFields":[],"aggregation":"count","name":""}],"options":{"evaluationWindow":900,"detectionMethod":"threshold","maxSignalDuration":86400,"keepAlive":3600},"message":"A + large number of failed log-in attempts.","tags":[],"type":"log_detection","filters":[]},"jobName":"Excessive + number of failed attempts.","jobStatus":"cancelled","modifiedAt":"2024-11-05 + 13:51:25.1163+00"}},{"id":"e2f06007-a01a-4fea-a2b6-9a32655b707c","type":"historicalDetectionsJob","attributes":{"createdAt":"2024-11-06 + 09:07:46.529736+00","createdByHandle":"9919ec9b-ebc7-49ee-8dc8-03626e717cca","createdByName":"CI + Account","jobDefinition":{"from":1730387522611,"to":1730387532611,"index":"main","name":"Excessive + number of failed attempts.","cases":[{"name":"Condition 1","status":"info","notifications":[],"condition":"a + \u003e 1"}],"queries":[{"query":"source:non_existing_src_weekend","groupByFields":[],"hasOptionalGroupByFields":false,"distinctFields":[],"aggregation":"count","name":""}],"options":{"evaluationWindow":900,"detectionMethod":"threshold","maxSignalDuration":86400,"keepAlive":3600},"message":"A + large number of failed log-in attempts.","tags":[],"type":"log_detection","filters":[]},"jobName":"Excessive + number of failed attempts.","jobStatus":"cancelled","modifiedAt":"2024-11-06 + 09:07:46.684504+00"}},{"id":"fb287991-c29e-4286-bc35-2a25dcc4ce0f","type":"historicalDetectionsJob","attributes":{"createdAt":"2024-11-05 + 13:51:52.255903+00","createdByHandle":"frog@datadoghq.com","jobDefinition":{"from":1730387522611,"to":1730387532611,"index":"main","name":"Excessive + number of failed attempts.","cases":[{"name":"Condition 1","status":"info","notifications":[],"condition":"a + \u003e 1"}],"queries":[{"query":"source:non_existing_src_weekend","groupByFields":[],"hasOptionalGroupByFields":false,"distinctFields":[],"aggregation":"count","name":""}],"options":{"evaluationWindow":900,"detectionMethod":"threshold","maxSignalDuration":86400,"keepAlive":3600},"message":"A + large number of failed log-in attempts.","tags":[],"type":"log_detection","filters":[]},"jobName":"Excessive + number of failed attempts.","jobStatus":"completed","modifiedAt":"2024-11-05 + 13:52:47.774936+00"}},{"id":"02e7e96f-c1d0-4fce-b2b7-9d92ce9a8cc0","type":"historicalDetectionsJob","attributes":{"createdAt":"2024-11-06 + 09:07:47.237256+00","createdByHandle":"9919ec9b-ebc7-49ee-8dc8-03626e717cca","createdByName":"CI + Account","jobDefinition":{"from":1730387522611,"to":1730387532611,"index":"main","name":"Excessive + number of failed attempts.","cases":[{"name":"Condition 1","status":"info","notifications":[],"condition":"a + \u003e 1"}],"queries":[{"query":"source:non_existing_src_weekend","groupByFields":[],"hasOptionalGroupByFields":false,"distinctFields":[],"aggregation":"count","name":""}],"options":{"evaluationWindow":900,"detectionMethod":"threshold","maxSignalDuration":86400,"keepAlive":3600},"message":"A + large number of failed log-in attempts.","tags":[],"type":"log_detection","filters":[]},"jobName":"Excessive + number of failed attempts.","jobStatus":"completed","modifiedAt":"2024-11-06 + 09:08:59.357224+00"}},{"id":"f2831f53-f857-471d-99e5-8e362bf2d72c","type":"historicalDetectionsJob","attributes":{"createdAt":"2024-11-05 + 13:51:55.928099+00","createdByHandle":"frog@datadoghq.com","jobDefinition":{"from":1730387522611,"to":1730387532611,"index":"main","name":"Excessive + number of failed attempts.","cases":[{"name":"Condition 1","status":"info","notifications":[],"condition":"a + \u003e 1"}],"queries":[{"query":"source:non_existing_src_weekend","groupByFields":[],"hasOptionalGroupByFields":false,"distinctFields":[],"aggregation":"count","name":""}],"options":{"evaluationWindow":900,"detectionMethod":"threshold","maxSignalDuration":86400,"keepAlive":3600},"message":"A + large number of failed log-in attempts.","tags":[],"type":"log_detection","filters":[]},"jobName":"Excessive + number of failed attempts.","jobStatus":"completed","modifiedAt":"2024-11-05 + 13:52:47.774936+00"}},{"id":"2d2749c0-6e00-47f2-8918-348515c60802","type":"historicalDetectionsJob","attributes":{"createdAt":"2024-11-05 + 13:51:45.465157+00","createdByHandle":"frog@datadoghq.com","jobDefinition":{"from":1730387522611,"to":1730387532611,"index":"main","name":"Excessive + number of failed attempts.","cases":[{"name":"Condition 1","status":"info","notifications":[],"condition":"a + \u003e 1"}],"queries":[{"query":"source:non_existing_src_weekend","groupByFields":[],"hasOptionalGroupByFields":false,"distinctFields":[],"aggregation":"count","name":""}],"options":{"evaluationWindow":900,"detectionMethod":"threshold","maxSignalDuration":86400,"keepAlive":3600},"message":"A + large number of failed log-in attempts.","tags":[],"type":"log_detection","filters":[]},"jobName":"Excessive + number of failed attempts.","jobStatus":"completed","modifiedAt":"2024-11-05 + 13:52:47.774936+00"}},{"id":"c7c574b6-0b45-4c56-96b1-d8abb6b13b8a","type":"historicalDetectionsJob","attributes":{"createdAt":"2024-11-06 + 09:07:47.445733+00","createdByHandle":"9919ec9b-ebc7-49ee-8dc8-03626e717cca","createdByName":"CI + Account","jobDefinition":{"from":1730387522611,"to":1730387532611,"index":"main","name":"Excessive + number of failed attempts.","cases":[{"name":"Condition 1","status":"info","notifications":[],"condition":"a + \u003e 1"}],"queries":[{"query":"source:non_existing_src_weekend","groupByFields":[],"hasOptionalGroupByFields":false,"distinctFields":[],"aggregation":"count","name":""}],"options":{"evaluationWindow":900,"detectionMethod":"threshold","maxSignalDuration":86400,"keepAlive":3600},"message":"A + large number of failed log-in attempts.","tags":[],"type":"log_detection","filters":[]},"jobName":"Excessive + number of failed attempts.","jobStatus":"completed","modifiedAt":"2024-11-06 + 09:08:59.357224+00"}},{"id":"ab1e354b-570a-4702-b6f0-d722223c4fc1","type":"historicalDetectionsJob","attributes":{"createdAt":"2024-11-06 + 09:07:48.280935+00","createdByHandle":"9919ec9b-ebc7-49ee-8dc8-03626e717cca","createdByName":"CI + Account","jobDefinition":{"from":1730387522611,"to":1730387532611,"index":"main","name":"Excessive + number of failed attempts.","cases":[{"name":"Condition 1","status":"info","notifications":[],"condition":"a + \u003e 1"}],"queries":[{"query":"source:non_existing_src_weekend","groupByFields":[],"hasOptionalGroupByFields":false,"distinctFields":[],"aggregation":"count","name":""}],"options":{"evaluationWindow":900,"detectionMethod":"threshold","maxSignalDuration":86400,"keepAlive":3600},"message":"A + large number of failed log-in attempts.","tags":[],"type":"log_detection","filters":[]},"jobName":"Excessive + number of failed attempts.","jobStatus":"completed","modifiedAt":"2024-11-06 + 09:08:59.357224+00"}},{"id":"748f0b80-3751-4aaf-8979-2830a3489da5","type":"historicalDetectionsJob","attributes":{"createdAt":"2024-11-05 + 13:55:59.872106+00","createdByHandle":"frog@datadoghq.com","jobDefinition":{"from":1730387522611,"to":1730387532611,"index":"main","name":"Excessive + number of failed attempts.","cases":[{"name":"Condition 1","status":"info","notifications":[],"condition":"a + \u003e 1"}],"queries":[{"query":"source:non_existing_src_weekend","groupByFields":[],"hasOptionalGroupByFields":false,"distinctFields":[],"aggregation":"count","name":""}],"options":{"evaluationWindow":900,"detectionMethod":"threshold","maxSignalDuration":86400,"keepAlive":3600},"message":"A + large number of failed log-in attempts.","tags":[],"type":"log_detection","filters":[]},"jobName":"Excessive + number of failed attempts.","jobStatus":"cancelled","modifiedAt":"2024-11-05 + 13:56:00.478047+00"}},{"id":"7ccb33e8-5c53-4db2-a9a1-78289d711be7","type":"historicalDetectionsJob","attributes":{"createdAt":"2024-11-06 + 09:59:00.403771+00","createdByHandle":"frog@datadoghq.com","jobDefinition":{"from":1730387522611,"to":1730387532611,"index":"main","name":"Excessive + number of failed attempts.","cases":[{"name":"Condition 1","status":"info","notifications":[],"condition":"a + \u003e 1"}],"queries":[{"query":"source:non_existing_src_weekend","groupByFields":[],"hasOptionalGroupByFields":false,"distinctFields":[],"aggregation":"count","name":""}],"options":{"evaluationWindow":900,"detectionMethod":"threshold","maxSignalDuration":86400,"keepAlive":3600},"message":"A + large number of failed log-in attempts.","tags":[],"type":"log_detection","filters":[]},"jobName":"Excessive + number of failed attempts.","jobStatus":"cancelled","modifiedAt":"2024-11-06 + 09:59:00.952402+00"}},{"id":"e4d69a20-0b0c-4e99-8d34-596811b30087","type":"historicalDetectionsJob","attributes":{"createdAt":"2024-11-06 + 09:59:24.54597+00","createdByHandle":"frog@datadoghq.com","jobDefinition":{"from":1730387522611,"to":1730387532611,"index":"main","name":"Excessive + number of failed attempts.","cases":[{"name":"Condition 1","status":"info","notifications":[],"condition":"a + \u003e 1"}],"queries":[{"query":"source:non_existing_src_weekend","groupByFields":[],"hasOptionalGroupByFields":false,"distinctFields":[],"aggregation":"count","name":""}],"options":{"evaluationWindow":900,"detectionMethod":"threshold","maxSignalDuration":86400,"keepAlive":3600},"message":"A + large number of failed log-in attempts.","tags":[],"type":"log_detection","filters":[]},"jobName":"Excessive + number of failed attempts.","jobStatus":"running","modifiedAt":"2024-11-06 + 09:59:24.747231+00"}},{"id":"67e31e2c-7cdc-4587-920e-de2518700100","type":"historicalDetectionsJob","attributes":{"createdAt":"2024-11-06 + 09:59:18.538427+00","createdByHandle":"frog@datadoghq.com","jobDefinition":{"from":1730387522611,"to":1730387532611,"index":"main","name":"Excessive + number of failed attempts.","cases":[{"name":"Condition 1","status":"info","notifications":[],"condition":"a + \u003e 1"}],"queries":[{"query":"source:non_existing_src_weekend","groupByFields":[],"hasOptionalGroupByFields":false,"distinctFields":[],"aggregation":"count","name":""}],"options":{"evaluationWindow":900,"detectionMethod":"threshold","maxSignalDuration":86400,"keepAlive":3600},"message":"A + large number of failed log-in attempts.","tags":[],"type":"log_detection","filters":[]},"jobName":"Excessive + number of failed attempts.","jobStatus":"running","modifiedAt":"2024-11-06 + 09:59:24.683176+00"}},{"id":"080ba2d8-c38d-47dd-9681-8e591b1186d0","type":"historicalDetectionsJob","attributes":{"createdAt":"2024-10-30 + 13:02:07.512289+00","createdByHandle":"frog@datadoghq.com","createdFromRuleId":"olk-znb-qc7","jobDefinition":{"from":1730201035064,"to":1730204635115,"index":"main","name":"Test-Typescript-Run_a_historical_job_returns_Status_created_response-1730293326","cases":[{"name":"","status":"info","notifications":[],"condition":"a + \u003e 0"}],"queries":[{"query":"@test:true","groupByFields":[],"hasOptionalGroupByFields":false,"distinctFields":[],"aggregation":"count","name":""}],"options":{"evaluationWindow":900,"detectionMethod":"threshold","maxSignalDuration":86400,"keepAlive":3600},"message":"Test + rule","tags":[],"type":"log_detection","filters":[]},"jobName":"Test-Typescript-Run_a_historical_job_returns_Status_created_response-1730293326","jobStatus":"completed","modifiedAt":"2024-10-30 + 13:03:20.870751+00"}}],"meta":{"totalCount":27}}' + headers: + Content-Type: + - application/vnd.api+json + status: + code: 200 + message: OK +recorded_with: VCR 6.0.0 diff --git a/cassettes/features/v2/security_monitoring/Run-a-historical-job-returns-Bad-Request-response.frozen b/cassettes/features/v2/security_monitoring/Run-a-historical-job-returns-Bad-Request-response.frozen new file mode 100644 index 00000000000..e8019bd6061 --- /dev/null +++ b/cassettes/features/v2/security_monitoring/Run-a-historical-job-returns-Bad-Request-response.frozen @@ -0,0 +1 @@ +2024-11-06T09:59:25.915Z \ No newline at end of file diff --git a/cassettes/features/v2/security_monitoring/Run-a-historical-job-returns-Bad-Request-response.yml b/cassettes/features/v2/security_monitoring/Run-a-historical-job-returns-Bad-Request-response.yml new file mode 100644 index 00000000000..c8a26059600 --- /dev/null +++ b/cassettes/features/v2/security_monitoring/Run-a-historical-job-returns-Bad-Request-response.yml @@ -0,0 +1,70 @@ +http_interactions: +- recorded_at: Wed, 06 Nov 2024 09:59:25 GMT + request: + body: + encoding: UTF-8 + string: '{"cases":[{"condition":"a > 0","name":"","notifications":[],"status":"info"}],"filters":[],"isEnabled":true,"message":"Test + rule","name":"Test-Run_a_historical_job_returns_Bad_Request_response-1730887165","options":{"evaluationWindow":900,"keepAlive":3600,"maxSignalDuration":86400},"queries":[{"aggregation":"count","distinctFields":[],"groupByFields":[],"metrics":[],"query":"@test:true"}],"tags":[],"type":"log_detection"}' + headers: + Accept: + - application/json + Content-Type: + - application/json + method: POST + uri: https://api.datadoghq.com/api/v2/security_monitoring/rules + response: + body: + encoding: UTF-8 + string: '{"id":"cun-ih2-7h9","version":1,"name":"Test-Run_a_historical_job_returns_Bad_Request_response-1730887165","createdAt":1730887166249,"creationAuthorId":1445416,"isDefault":false,"isPartner":false,"isEnabled":true,"isBeta":false,"isDeleted":false,"isDeprecated":false,"queries":[{"query":"@test:true","groupByFields":[],"hasOptionalGroupByFields":false,"distinctFields":[],"aggregation":"count","name":""}],"options":{"keepAlive":3600,"maxSignalDuration":86400,"detectionMethod":"threshold","evaluationWindow":900},"cases":[{"name":"","status":"info","notifications":[],"condition":"a + > 0"}],"message":"Test rule","tags":[],"hasExtendedTitle":false,"type":"log_detection","filters":[]} + + ' + headers: + Content-Type: + - application/json + status: + code: 200 + message: OK +- recorded_at: Wed, 06 Nov 2024 09:59:25 GMT + request: + body: + encoding: UTF-8 + string: '{"data":{"attributes":{"jobDefinition":{"cases":[{"condition":"a > + 1","name":"Condition 1","notifications":[],"status":"info"}],"filters":[],"from":1730387522611,"index":"non_existing_index","message":"A + large number of failed log-in attempts.","name":"Excessive number of failed + attempts.","options":{"evaluationWindow":900,"keepAlive":3600,"maxSignalDuration":86400},"queries":[{"aggregation":"count","distinctFields":[],"groupByFields":[],"query":"source:non_existing_src_weekend"}],"tags":[],"to":1730391122611,"type":"log_detection"}},"type":"historicalDetectionsJobCreate"}}' + headers: + Accept: + - application/json + Content-Type: + - application/json + method: POST + uri: https://api.datadoghq.com/api/v2/siem-historical-detections/jobs + response: + body: + encoding: UTF-8 + string: '{"errors":["input_validation_error(Field ''index'' is invalid: Invalid + index): Index must exist"]}' + headers: + Content-Type: + - application/json + status: + code: 400 + message: Bad Request +- recorded_at: Wed, 06 Nov 2024 09:59:25 GMT + request: + body: null + headers: + Accept: + - '*/*' + method: DELETE + uri: https://api.datadoghq.com/api/v2/security_monitoring/rules/cun-ih2-7h9 + response: + body: + encoding: UTF-8 + string: '' + headers: {} + status: + code: 204 + message: No Content +recorded_with: VCR 6.0.0 diff --git a/cassettes/features/v2/security_monitoring/Run-a-historical-job-returns-Not-Found-response.frozen b/cassettes/features/v2/security_monitoring/Run-a-historical-job-returns-Not-Found-response.frozen new file mode 100644 index 00000000000..620b0a67c84 --- /dev/null +++ b/cassettes/features/v2/security_monitoring/Run-a-historical-job-returns-Not-Found-response.frozen @@ -0,0 +1 @@ +2024-11-06T09:59:27.036Z \ No newline at end of file diff --git a/cassettes/features/v2/security_monitoring/Run-a-historical-job-returns-Not-Found-response.yml b/cassettes/features/v2/security_monitoring/Run-a-historical-job-returns-Not-Found-response.yml new file mode 100644 index 00000000000..e56ade3f193 --- /dev/null +++ b/cassettes/features/v2/security_monitoring/Run-a-historical-job-returns-Not-Found-response.yml @@ -0,0 +1,24 @@ +http_interactions: +- recorded_at: Wed, 06 Nov 2024 09:59:27 GMT + request: + body: + encoding: UTF-8 + string: '{"data":{"attributes":{"fromRule":{"caseIndex":0,"from":1730201035064,"id":"non-existng","index":"main","notifications":[],"to":1730204635115}},"type":"historicalDetectionsJobCreate"}}' + headers: + Accept: + - application/json + Content-Type: + - application/json + method: POST + uri: https://api.datadoghq.com/api/v2/siem-historical-detections/jobs + response: + body: + encoding: UTF-8 + string: '{"errors":[{"status":"404","detail":"failed to get rule details"}]}' + headers: + Content-Type: + - application/vnd.api+json + status: + code: 404 + message: Not Found +recorded_with: VCR 6.0.0 diff --git a/cassettes/features/v2/security_monitoring/Run-a-historical-job-returns-Status-created-response.frozen b/cassettes/features/v2/security_monitoring/Run-a-historical-job-returns-Status-created-response.frozen new file mode 100644 index 00000000000..c63f2e6d01b --- /dev/null +++ b/cassettes/features/v2/security_monitoring/Run-a-historical-job-returns-Status-created-response.frozen @@ -0,0 +1 @@ +2024-11-06T09:59:27.350Z \ No newline at end of file diff --git a/cassettes/features/v2/security_monitoring/Run-a-historical-job-returns-Status-created-response.yml b/cassettes/features/v2/security_monitoring/Run-a-historical-job-returns-Status-created-response.yml new file mode 100644 index 00000000000..66ce54b941d --- /dev/null +++ b/cassettes/features/v2/security_monitoring/Run-a-historical-job-returns-Status-created-response.yml @@ -0,0 +1,69 @@ +http_interactions: +- recorded_at: Wed, 06 Nov 2024 09:59:27 GMT + request: + body: + encoding: UTF-8 + string: '{"cases":[{"condition":"a > 0","name":"","notifications":[],"status":"info"}],"filters":[],"isEnabled":true,"message":"Test + rule","name":"Test-Run_a_historical_job_returns_Status_created_response-1730887167","options":{"evaluationWindow":900,"keepAlive":3600,"maxSignalDuration":86400},"queries":[{"aggregation":"count","distinctFields":[],"groupByFields":[],"metrics":[],"query":"@test:true"}],"tags":[],"type":"log_detection"}' + headers: + Accept: + - application/json + Content-Type: + - application/json + method: POST + uri: https://api.datadoghq.com/api/v2/security_monitoring/rules + response: + body: + encoding: UTF-8 + string: '{"id":"kkm-zmu-h0x","version":1,"name":"Test-Run_a_historical_job_returns_Status_created_response-1730887167","createdAt":1730887167685,"creationAuthorId":1445416,"isDefault":false,"isPartner":false,"isEnabled":true,"isBeta":false,"isDeleted":false,"isDeprecated":false,"queries":[{"query":"@test:true","groupByFields":[],"hasOptionalGroupByFields":false,"distinctFields":[],"aggregation":"count","name":""}],"options":{"keepAlive":3600,"maxSignalDuration":86400,"detectionMethod":"threshold","evaluationWindow":900},"cases":[{"name":"","status":"info","notifications":[],"condition":"a + > 0"}],"message":"Test rule","tags":[],"hasExtendedTitle":false,"type":"log_detection","filters":[]} + + ' + headers: + Content-Type: + - application/json + status: + code: 200 + message: OK +- recorded_at: Wed, 06 Nov 2024 09:59:27 GMT + request: + body: + encoding: UTF-8 + string: '{"data":{"attributes":{"jobDefinition":{"cases":[{"condition":"a > + 1","name":"Condition 1","notifications":[],"status":"info"}],"filters":[],"from":1730387522611,"index":"main","message":"A + large number of failed log-in attempts.","name":"Excessive number of failed + attempts.","options":{"evaluationWindow":900,"keepAlive":3600,"maxSignalDuration":86400},"queries":[{"aggregation":"count","distinctFields":[],"groupByFields":[],"query":"source:non_existing_src_weekend"}],"tags":[],"to":1730387532611,"type":"log_detection"}},"type":"historicalDetectionsJobCreate"}}' + headers: + Accept: + - application/json + Content-Type: + - application/json + method: POST + uri: https://api.datadoghq.com/api/v2/siem-historical-detections/jobs + response: + body: + encoding: UTF-8 + string: '{"data":{"id":"d23eac60-7104-4377-9301-8740ab1e7d35","type":"historicalDetectionsJob"}}' + headers: + Content-Type: + - application/vnd.api+json + status: + code: 201 + message: Created +- recorded_at: Wed, 06 Nov 2024 09:59:27 GMT + request: + body: null + headers: + Accept: + - '*/*' + method: DELETE + uri: https://api.datadoghq.com/api/v2/security_monitoring/rules/kkm-zmu-h0x + response: + body: + encoding: UTF-8 + string: '' + headers: {} + status: + code: 204 + message: No Content +recorded_with: VCR 6.0.0 diff --git a/examples/v2/security-monitoring/CancelHistoricalJob.rb b/examples/v2/security-monitoring/CancelHistoricalJob.rb new file mode 100644 index 00000000000..bb98d481127 --- /dev/null +++ b/examples/v2/security-monitoring/CancelHistoricalJob.rb @@ -0,0 +1,8 @@ +# Cancel a historical job returns "OK" response + +require "datadog_api_client" +api_instance = DatadogAPIClient::V2::SecurityMonitoringAPI.new + +# there is a valid "historical_job" in the system +HISTORICAL_JOB_DATA_ID = ENV["HISTORICAL_JOB_DATA_ID"] +api_instance.cancel_historical_job(HISTORICAL_JOB_DATA_ID) diff --git a/examples/v2/security-monitoring/ConvertJobResultToSignal.rb b/examples/v2/security-monitoring/ConvertJobResultToSignal.rb new file mode 100644 index 00000000000..7f76a867058 --- /dev/null +++ b/examples/v2/security-monitoring/ConvertJobResultToSignal.rb @@ -0,0 +1,21 @@ +# Convert a job result to a signal returns "OK" response + +require "datadog_api_client" +api_instance = DatadogAPIClient::V2::SecurityMonitoringAPI.new + +body = DatadogAPIClient::V2::ConvertJobResultsToSignalsRequest.new({ + data: DatadogAPIClient::V2::ConvertJobResultsToSignalsData.new({ + attributes: DatadogAPIClient::V2::ConvertJobResultsToSignalsAttributes.new({ + job_result_ids: [ + "", + ], + notifications: [ + "", + ], + signal_message: "A large number of failed login attempts.", + signal_severity: DatadogAPIClient::V2::SecurityMonitoringRuleSeverity::CRITICAL, + }), + type: DatadogAPIClient::V2::ConvertJobResultsToSignalsDataType::HISTORICALDETECTIONSJOBRESULTSIGNALCONVERSION, + }), +}) +api_instance.convert_job_result_to_signal(body) diff --git a/examples/v2/security-monitoring/DeleteHistoricalJob.rb b/examples/v2/security-monitoring/DeleteHistoricalJob.rb new file mode 100644 index 00000000000..222bf1d7101 --- /dev/null +++ b/examples/v2/security-monitoring/DeleteHistoricalJob.rb @@ -0,0 +1,5 @@ +# Delete an existing job returns "OK" response + +require "datadog_api_client" +api_instance = DatadogAPIClient::V2::SecurityMonitoringAPI.new +api_instance.delete_historical_job("job_id") diff --git a/examples/v2/security-monitoring/GetHistoricalJob.rb b/examples/v2/security-monitoring/GetHistoricalJob.rb new file mode 100644 index 00000000000..c965bcc0b2d --- /dev/null +++ b/examples/v2/security-monitoring/GetHistoricalJob.rb @@ -0,0 +1,8 @@ +# Get a job's details returns "OK" response + +require "datadog_api_client" +api_instance = DatadogAPIClient::V2::SecurityMonitoringAPI.new + +# there is a valid "historical_job" in the system +HISTORICAL_JOB_DATA_ID = ENV["HISTORICAL_JOB_DATA_ID"] +p api_instance.get_historical_job(HISTORICAL_JOB_DATA_ID) diff --git a/examples/v2/security-monitoring/ListHistoricalJobs.rb b/examples/v2/security-monitoring/ListHistoricalJobs.rb new file mode 100644 index 00000000000..308fd10a40e --- /dev/null +++ b/examples/v2/security-monitoring/ListHistoricalJobs.rb @@ -0,0 +1,5 @@ +# List historical jobs returns "OK" response + +require "datadog_api_client" +api_instance = DatadogAPIClient::V2::SecurityMonitoringAPI.new +p api_instance.list_historical_jobs() diff --git a/examples/v2/security-monitoring/RunHistoricalJob.rb b/examples/v2/security-monitoring/RunHistoricalJob.rb new file mode 100644 index 00000000000..e75b2ac835c --- /dev/null +++ b/examples/v2/security-monitoring/RunHistoricalJob.rb @@ -0,0 +1,44 @@ +# Run a historical job returns "Status created" response + +require "datadog_api_client" +api_instance = DatadogAPIClient::V2::SecurityMonitoringAPI.new + +body = DatadogAPIClient::V2::RunHistoricalJobRequest.new({ + data: DatadogAPIClient::V2::RunHistoricalJobRequestData.new({ + type: DatadogAPIClient::V2::RunHistoricalJobRequestDataType::HISTORICALDETECTIONSJOBCREATE, + attributes: DatadogAPIClient::V2::RunHistoricalJobRequestAttributes.new({ + job_definition: DatadogAPIClient::V2::JobDefinition.new({ + type: "log_detection", + name: "Excessive number of failed attempts.", + queries: [ + DatadogAPIClient::V2::SecurityMonitoringStandardRuleQuery.new({ + query: "source:non_existing_src_weekend", + aggregation: DatadogAPIClient::V2::SecurityMonitoringRuleQueryAggregation::COUNT, + group_by_fields: [], + distinct_fields: [], + }), + ], + filters: [], + cases: [ + DatadogAPIClient::V2::SecurityMonitoringRuleCaseCreate.new({ + name: "Condition 1", + status: DatadogAPIClient::V2::SecurityMonitoringRuleSeverity::INFO, + notifications: [], + condition: "a > 1", + }), + ], + options: DatadogAPIClient::V2::SecurityMonitoringRuleOptions.new({ + keep_alive: DatadogAPIClient::V2::SecurityMonitoringRuleKeepAlive::ONE_HOUR, + max_signal_duration: DatadogAPIClient::V2::SecurityMonitoringRuleMaxSignalDuration::ONE_DAY, + evaluation_window: DatadogAPIClient::V2::SecurityMonitoringRuleEvaluationWindow::FIFTEEN_MINUTES, + }), + message: "A large number of failed log-in attempts.", + tags: [], + from: 1730387522611, + to: 1730387532611, + index: "main", + }), + }), + }), +}) +p api_instance.run_historical_job(body) diff --git a/features/scenarios_model_mapping.rb b/features/scenarios_model_mapping.rb index b096dd2e20e..401d50b1727 100644 --- a/features/scenarios_model_mapping.rb +++ b/features/scenarios_model_mapping.rb @@ -1800,6 +1800,27 @@ "signal_id" => "String", "body" => "SecurityMonitoringSignalStateUpdateRequest", }, + "v2.ListHistoricalJobs" => { + "page_size" => "Integer", + "page_number" => "Integer", + "sort" => "String", + "filter_query" => "String", + }, + "v2.RunHistoricalJob" => { + "body" => "RunHistoricalJobRequest", + }, + "v2.ConvertJobResultToSignal" => { + "body" => "ConvertJobResultsToSignalsRequest", + }, + "v2.DeleteHistoricalJob" => { + "job_id" => "String", + }, + "v2.GetHistoricalJob" => { + "job_id" => "String", + }, + "v2.CancelHistoricalJob" => { + "job_id" => "String", + }, "v2.ListPowerpacks" => { "page_limit" => "Integer", "page_offset" => "Integer", diff --git a/features/v2/given.json b/features/v2/given.json index d5a158b247b..661392766b5 100644 --- a/features/v2/given.json +++ b/features/v2/given.json @@ -649,6 +649,18 @@ "tag": "Service Definition", "operationId": "CreateOrUpdateServiceDefinitions" }, + { + "parameters": [ + { + "name": "body", + "value": "{\n \"data\": {\n \"type\": \"historicalDetectionsJobCreate\",\n \"attributes\": {\n \"jobDefinition\": {\n \"type\": \"log_detection\",\n \"name\": \"Excessive number of failed attempts.\",\n \"queries\": [\n {\n \"query\": \"source:non_existing_src_weekend\",\n \"aggregation\": \"count\",\n \"groupByFields\": [],\n \"distinctFields\": []\n }\n ],\n \"filters\": [],\n \"cases\": [\n {\n \"name\": \"Condition 1\",\n \"status\": \"info\",\n \"notifications\": [],\n \"condition\": \"a > 1\"\n }\n ],\n \"options\": {\n \"keepAlive\": 3600,\n \"maxSignalDuration\": 86400,\n \"evaluationWindow\": 900\n },\n \"message\": \"A large number of failed login attempts.\",\n \"tags\": [],\n \"from\": 1730387522611,\n \"to\": 1730387532611,\n \"index\": \"main\"\n }\n }\n }\n}" + } + ], + "step": "there is a valid \"historical_job\" in the system", + "key": "historical_job", + "tag": "Security Monitoring", + "operationId": "RunHistoricalJob" + }, { "parameters": [ { diff --git a/features/v2/security_monitoring.feature b/features/v2/security_monitoring.feature index 901edb86b49..dc6e9a6071d 100644 --- a/features/v2/security_monitoring.feature +++ b/features/v2/security_monitoring.feature @@ -9,6 +9,35 @@ Feature: Security Monitoring And a valid "appKeyAuth" key in the system And an instance of "SecurityMonitoring" API + @team:DataDog/k9-cloud-security-platform + Scenario: Cancel a historical job returns "Bad Request" response + Given new "CancelHistoricalJob" request + And request contains "job_id" parameter with value "inva-lid" + When the request is sent + Then the response status is 400 Bad Request + + @generated @skip @team:DataDog/k9-cloud-security-platform + Scenario: Cancel a historical job returns "Conflict" response + Given new "CancelHistoricalJob" request + And request contains "job_id" parameter from "REPLACE.ME" + When the request is sent + Then the response status is 409 Conflict + + @team:DataDog/k9-cloud-security-platform + Scenario: Cancel a historical job returns "Not Found" response + Given new "CancelHistoricalJob" request + And request contains "job_id" parameter with value "8e2a37fb-b0c8-4761-a7f0-0a8d6a98ba93" + When the request is sent + Then the response status is 404 Not Found + + @team:DataDog/k9-cloud-security-platform + Scenario: Cancel a historical job returns "OK" response + Given new "CancelHistoricalJob" request + And there is a valid "historical_job" in the system + And request contains "job_id" parameter from "historical_job.data.id" + When the request is sent + Then the response status is 204 No Content + @generated @skip @team:DataDog/k9-cloud-security-platform Scenario: Change the related incidents of a security signal returns "Bad Request" response Given new "EditSecurityMonitoringSignalIncidents" request @@ -57,6 +86,27 @@ Feature: Security Monitoring When the request is sent Then the response status is 200 OK + @team:DataDog/k9-cloud-security-platform + Scenario: Convert a job result to a signal returns "Bad Request" response + Given new "ConvertJobResultToSignal" request + And body with value {"data": {"attributes": {"jobResultIds": [""], "notifications": [""], "signalMessage": "A large number of failed log-in attempts.", "signalSeverity": "critical"}, "type": "historicalDetectionsJobResultSignalConversion"}} + When the request is sent + Then the response status is 400 Bad Request + + @generated @skip @team:DataDog/k9-cloud-security-platform + Scenario: Convert a job result to a signal returns "Not Found" response + Given new "ConvertJobResultToSignal" request + And body with value {"data": {"attributes": {"jobResultIds": [""], "notifications": [""], "signalMessage": "A large number of failed login attempts.", "signalSeverity": "critical"}, "type": "historicalDetectionsJobResultSignalConversion"}} + When the request is sent + Then the response status is 404 Not Found + + @generated @skip @team:DataDog/k9-cloud-security-platform + Scenario: Convert a job result to a signal returns "OK" response + Given new "ConvertJobResultToSignal" request + And body with value {"data": {"attributes": {"jobResultIds": [""], "notifications": [""], "signalMessage": "A large number of failed login attempts.", "signalSeverity": "critical"}, "type": "historicalDetectionsJobResultSignalConversion"}} + When the request is sent + Then the response status is 204 OK + @skip @team:DataDog/k9-cloud-security-platform Scenario: Convert a rule from JSON to Terraform returns "Bad Request" response Given new "ConvertSecurityMonitoringRuleFromJSONToTerraform" request @@ -282,6 +332,34 @@ Feature: Security Monitoring When the request is sent Then the response status is 204 OK + @team:DataDog/k9-cloud-security-platform + Scenario: Delete an existing job returns "Bad Request" response + Given new "DeleteHistoricalJob" request + And request contains "job_id" parameter with value "inva-lid" + When the request is sent + Then the response status is 400 Bad Request + + @generated @skip @team:DataDog/k9-cloud-security-platform + Scenario: Delete an existing job returns "Conflict" response + Given new "DeleteHistoricalJob" request + And request contains "job_id" parameter from "REPLACE.ME" + When the request is sent + Then the response status is 409 Conflict + + @team:DataDog/k9-cloud-security-platform + Scenario: Delete an existing job returns "Not Found" response + Given new "DeleteHistoricalJob" request + And request contains "job_id" parameter with value "8e2a37fb-b0c8-4761-a7f0-0a8d6a98ba93" + When the request is sent + Then the response status is 404 Not Found + + @generated @skip @team:DataDog/k9-cloud-security-platform + Scenario: Delete an existing job returns "OK" response + Given new "DeleteHistoricalJob" request + And request contains "job_id" parameter from "REPLACE.ME" + When the request is sent + Then the response status is 204 OK + @generated @skip @team:DataDog/k9-cloud-security-platform Scenario: Delete an existing rule returns "Not Found" response Given new "DeleteSecurityMonitoringRule" request @@ -332,6 +410,28 @@ Feature: Security Monitoring Then the response status is 200 OK And the response "data.attributes.evaluation" is equal to "pass" + @team:DataDog/k9-cloud-security-platform + Scenario: Get a job's details returns "Bad Request" response + Given new "GetHistoricalJob" request + And request contains "job_id" parameter with value "inva-lid" + When the request is sent + Then the response status is 400 Bad Request + + @team:DataDog/k9-cloud-security-platform + Scenario: Get a job's details returns "Not Found" response + Given new "GetHistoricalJob" request + And request contains "job_id" parameter with value "8e2a37fb-b0c8-4761-a7f0-0a8d6a98ba93" + When the request is sent + Then the response status is 404 Not Found + + @team:DataDog/k9-cloud-security-platform + Scenario: Get a job's details returns "OK" response + Given new "GetHistoricalJob" request + And there is a valid "historical_job" in the system + And request contains "job_id" parameter from "historical_job.data.id" + When the request is sent + Then the response status is 200 OK + @generated @skip @team:DataDog/k9-cloud-security-platform Scenario: Get a list of security signals returns "Bad Request" response Given new "SearchSecurityMonitoringSignals" request @@ -493,6 +593,19 @@ Feature: Security Monitoring When the request is sent Then the response status is 200 OK + @generated @skip @team:DataDog/k9-cloud-security-platform + Scenario: List historical jobs returns "Bad Request" response + Given new "ListHistoricalJobs" request + When the request is sent + Then the response status is 400 Bad Request + + @team:DataDog/k9-cloud-security-platform + Scenario: List historical jobs returns "OK" response + Given new "ListHistoricalJobs" request + And there is a valid "historical_job" in the system + When the request is sent + Then the response status is 200 OK + @generated @skip @team:DataDog/k9-cloud-security-platform Scenario: List rules returns "Bad Request" response Given new "ListSecurityMonitoringRules" request @@ -561,6 +674,29 @@ Feature: Security Monitoring When the request is sent Then the response status is 200 OK + @team:DataDog/k9-cloud-security-platform + Scenario: Run a historical job returns "Bad Request" response + Given new "RunHistoricalJob" request + And there is a valid "security_rule" in the system + And body with value {"data":{"type":"historicalDetectionsJobCreate","attributes":{"jobDefinition":{"type":"log_detection","name":"Excessive number of failed attempts.","queries":[{"query":"source:non_existing_src_weekend","aggregation":"count","groupByFields":[],"distinctFields":[]}],"filters":[],"cases":[{"name":"Condition 1","status":"info","notifications":[],"condition":"a > 1"}],"options":{"keepAlive":3600,"maxSignalDuration":86400,"evaluationWindow":900},"message":"A large number of failed log-in attempts.","tags":[],"from":1730387522611,"to":1730391122611,"index":"non_existing_index"}}}} + When the request is sent + Then the response status is 400 Bad Request + + @team:DataDog/k9-cloud-security-platform + Scenario: Run a historical job returns "Not Found" response + Given new "RunHistoricalJob" request + And body with value {"data": { "type": "historicalDetectionsJobCreate", "attributes": {"fromRule": {"caseIndex": 0, "from": 1730201035064, "id": "non-existng", "index": "main", "notifications": [], "to": 1730204635115}}}} + When the request is sent + Then the response status is 404 Not Found + + @team:DataDog/k9-cloud-security-platform + Scenario: Run a historical job returns "Status created" response + Given new "RunHistoricalJob" request + And there is a valid "security_rule" in the system + And body with value {"data":{"type":"historicalDetectionsJobCreate","attributes":{"jobDefinition":{"type":"log_detection","name":"Excessive number of failed attempts.","queries":[{"query":"source:non_existing_src_weekend","aggregation":"count","groupByFields":[],"distinctFields":[]}],"filters":[],"cases":[{"name":"Condition 1","status":"info","notifications":[],"condition":"a > 1"}],"options":{"keepAlive":3600,"maxSignalDuration":86400,"evaluationWindow":900},"message":"A large number of failed log-in attempts.","tags":[],"from":1730387522611,"to":1730387532611,"index":"main"}}}} + When the request is sent + Then the response status is 201 Status created + @skip @team:DataDog/k9-cloud-security-platform Scenario: Test a rule returns "Bad Request" response Given new "TestSecurityMonitoringRule" request diff --git a/features/v2/undo.json b/features/v2/undo.json index 81c6a29e22c..0e2a5942a9d 100644 --- a/features/v2/undo.json +++ b/features/v2/undo.json @@ -2272,6 +2272,42 @@ "type": "idempotent" } }, + "ListHistoricalJobs": { + "tag": "Security Monitoring", + "undo": { + "type": "safe" + } + }, + "RunHistoricalJob": { + "tag": "Security Monitoring", + "undo": { + "type": "idempotent" + } + }, + "ConvertJobResultToSignal": { + "tag": "Security Monitoring", + "undo": { + "type": "idempotent" + } + }, + "DeleteHistoricalJob": { + "tag": "Security Monitoring", + "undo": { + "type": "idempotent" + } + }, + "GetHistoricalJob": { + "tag": "Security Monitoring", + "undo": { + "type": "safe" + } + }, + "CancelHistoricalJob": { + "tag": "Security Monitoring", + "undo": { + "type": "idempotent" + } + }, "CreateSLOReportJob": { "tag": "Service Level Objectives", "undo": { diff --git a/lib/datadog_api_client/inflector.rb b/lib/datadog_api_client/inflector.rb index 512f2e34deb..c710c79f446 100644 --- a/lib/datadog_api_client/inflector.rb +++ b/lib/datadog_api_client/inflector.rb @@ -1016,6 +1016,7 @@ def overrides "v2.bulk_mute_findings_request_properties" => "BulkMuteFindingsRequestProperties", "v2.bulk_mute_findings_response" => "BulkMuteFindingsResponse", "v2.bulk_mute_findings_response_data" => "BulkMuteFindingsResponseData", + "v2.calculated_field" => "CalculatedField", "v2.case" => "Case", "v2.case3rd_party_ticket_status" => "Case3rdPartyTicketStatus", "v2.case_assign" => "CaseAssign", @@ -1203,6 +1204,10 @@ def overrides "v2.containers_response_links" => "ContainersResponseLinks", "v2.container_type" => "ContainerType", "v2.content_encoding" => "ContentEncoding", + "v2.convert_job_results_to_signals_attributes" => "ConvertJobResultsToSignalsAttributes", + "v2.convert_job_results_to_signals_data" => "ConvertJobResultsToSignalsData", + "v2.convert_job_results_to_signals_data_type" => "ConvertJobResultsToSignalsDataType", + "v2.convert_job_results_to_signals_request" => "ConvertJobResultsToSignalsRequest", "v2.cost_attribution_aggregates_body" => "CostAttributionAggregatesBody", "v2.cost_attribution_type" => "CostAttributionType", "v2.cost_by_org" => "CostByOrg", @@ -1476,6 +1481,10 @@ def overrides "v2.get_interfaces_response" => "GetInterfacesResponse", "v2.get_team_memberships_sort" => "GetTeamMembershipsSort", "v2.group_scalar_column" => "GroupScalarColumn", + "v2.historical_job_data_type" => "HistoricalJobDataType", + "v2.historical_job_list_meta" => "HistoricalJobListMeta", + "v2.historical_job_response" => "HistoricalJobResponse", + "v2.historical_job_response_attributes" => "HistoricalJobResponseAttributes", "v2.hourly_usage" => "HourlyUsage", "v2.hourly_usage_attributes" => "HourlyUsageAttributes", "v2.hourly_usage_measurement" => "HourlyUsageMeasurement", @@ -1635,6 +1644,10 @@ def overrides "v2.jira_integration_metadata_issues_item" => "JiraIntegrationMetadataIssuesItem", "v2.jira_issue" => "JiraIssue", "v2.jira_issue_result" => "JiraIssueResult", + "v2.job_create_response" => "JobCreateResponse", + "v2.job_create_response_data" => "JobCreateResponseData", + "v2.job_definition" => "JobDefinition", + "v2.job_definition_from_rule" => "JobDefinitionFromRule", "v2.jsonapi_error_item" => "JSONAPIErrorItem", "v2.jsonapi_error_response" => "JSONAPIErrorResponse", "v2.leaked_key" => "LeakedKey", @@ -1656,6 +1669,7 @@ def overrides "v2.list_findings_meta" => "ListFindingsMeta", "v2.list_findings_page" => "ListFindingsPage", "v2.list_findings_response" => "ListFindingsResponse", + "v2.list_historical_jobs_response" => "ListHistoricalJobsResponse", "v2.list_powerpacks_response" => "ListPowerpacksResponse", "v2.list_rules_response" => "ListRulesResponse", "v2.list_rules_response_data_item" => "ListRulesResponseDataItem", @@ -2132,6 +2146,10 @@ def overrides "v2.rum_sort" => "RUMSort", "v2.rum_sort_order" => "RUMSortOrder", "v2.rum_warning" => "RUMWarning", + "v2.run_historical_job_request" => "RunHistoricalJobRequest", + "v2.run_historical_job_request_attributes" => "RunHistoricalJobRequestAttributes", + "v2.run_historical_job_request_data" => "RunHistoricalJobRequestData", + "v2.run_historical_job_request_data_type" => "RunHistoricalJobRequestDataType", "v2.saml_assertion_attribute" => "SAMLAssertionAttribute", "v2.saml_assertion_attribute_attributes" => "SAMLAssertionAttributeAttributes", "v2.saml_assertion_attributes_type" => "SAMLAssertionAttributesType", diff --git a/lib/datadog_api_client/v2/api/security_monitoring_api.rb b/lib/datadog_api_client/v2/api/security_monitoring_api.rb index d58f4ad638a..755fbc52393 100644 --- a/lib/datadog_api_client/v2/api/security_monitoring_api.rb +++ b/lib/datadog_api_client/v2/api/security_monitoring_api.rb @@ -23,6 +23,71 @@ def initialize(api_client = DatadogAPIClient::APIClient.default) @api_client = api_client end + # Cancel a historical job. + # + # @see #cancel_historical_job_with_http_info + def cancel_historical_job(job_id, opts = {}) + cancel_historical_job_with_http_info(job_id, opts) + nil + end + + # Cancel a historical job. + # + # Cancel a historical job. + # + # @param job_id [String] The ID of the job. + # @param opts [Hash] the optional parameters + # @return [Array<(nil, Integer, Hash)>] nil, response status code and response headers + def cancel_historical_job_with_http_info(job_id, opts = {}) + + if @api_client.config.debugging + @api_client.config.logger.debug 'Calling API: SecurityMonitoringAPI.cancel_historical_job ...' + end + # verify the required parameter 'job_id' is set + if @api_client.config.client_side_validation && job_id.nil? + fail ArgumentError, "Missing the required parameter 'job_id' when calling SecurityMonitoringAPI.cancel_historical_job" + end + # resource path + local_var_path = '/api/v2/siem-historical-detections/jobs/{job_id}/cancel'.sub('{job_id}', CGI.escape(job_id.to_s).gsub('%2F', '/')) + + # query parameters + query_params = opts[:query_params] || {} + + # header parameters + header_params = opts[:header_params] || {} + # HTTP header 'Accept' (if needed) + header_params['Accept'] = @api_client.select_header_accept(['*/*']) + + # form parameters + form_params = opts[:form_params] || {} + + # http body (model) + post_body = opts[:debug_body] + + # return_type + return_type = opts[:debug_return_type] + + # auth_names + auth_names = opts[:debug_auth_names] || [:apiKeyAuth, :appKeyAuth, :AuthZ] + + new_options = opts.merge( + :operation => :cancel_historical_job, + :header_params => header_params, + :query_params => query_params, + :form_params => form_params, + :body => post_body, + :auth_names => auth_names, + :return_type => return_type, + :api_version => "V2" + ) + + data, status_code, headers = @api_client.call_api(Net::HTTP::Patch, local_var_path, new_options) + if @api_client.config.debugging + @api_client.config.logger.debug "API called: SecurityMonitoringAPI#cancel_historical_job\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}" + end + return data, status_code, headers + end + # Convert an existing rule from JSON to Terraform. # # @see #convert_existing_security_monitoring_rule_with_http_info @@ -89,6 +154,73 @@ def convert_existing_security_monitoring_rule_with_http_info(rule_id, opts = {}) return data, status_code, headers end + # Convert a job result to a signal. + # + # @see #convert_job_result_to_signal_with_http_info + def convert_job_result_to_signal(body, opts = {}) + convert_job_result_to_signal_with_http_info(body, opts) + nil + end + + # Convert a job result to a signal. + # + # Convert a job result to a signal. + # + # @param body [ConvertJobResultsToSignalsRequest] + # @param opts [Hash] the optional parameters + # @return [Array<(nil, Integer, Hash)>] nil, response status code and response headers + def convert_job_result_to_signal_with_http_info(body, opts = {}) + + if @api_client.config.debugging + @api_client.config.logger.debug 'Calling API: SecurityMonitoringAPI.convert_job_result_to_signal ...' + end + # verify the required parameter 'body' is set + if @api_client.config.client_side_validation && body.nil? + fail ArgumentError, "Missing the required parameter 'body' when calling SecurityMonitoringAPI.convert_job_result_to_signal" + end + # resource path + local_var_path = '/api/v2/siem-historical-detections/jobs/signal_convert' + + # query parameters + query_params = opts[:query_params] || {} + + # header parameters + header_params = opts[:header_params] || {} + # HTTP header 'Accept' (if needed) + header_params['Accept'] = @api_client.select_header_accept(['*/*']) + # HTTP header 'Content-Type' + header_params['Content-Type'] = @api_client.select_header_content_type(['application/json']) + + # form parameters + form_params = opts[:form_params] || {} + + # http body (model) + post_body = opts[:debug_body] || @api_client.object_to_http_body(body) + + # return_type + return_type = opts[:debug_return_type] + + # auth_names + auth_names = opts[:debug_auth_names] || [:apiKeyAuth, :appKeyAuth, :AuthZ] + + new_options = opts.merge( + :operation => :convert_job_result_to_signal, + :header_params => header_params, + :query_params => query_params, + :form_params => form_params, + :body => post_body, + :auth_names => auth_names, + :return_type => return_type, + :api_version => "V2" + ) + + data, status_code, headers = @api_client.call_api(Net::HTTP::Post, local_var_path, new_options) + if @api_client.config.debugging + @api_client.config.logger.debug "API called: SecurityMonitoringAPI#convert_job_result_to_signal\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}" + end + return data, status_code, headers + end + # Convert a rule from JSON to Terraform. # # @see #convert_security_monitoring_rule_from_json_to_terraform_with_http_info @@ -361,6 +493,71 @@ def create_security_monitoring_suppression_with_http_info(body, opts = {}) return data, status_code, headers end + # Delete an existing job. + # + # @see #delete_historical_job_with_http_info + def delete_historical_job(job_id, opts = {}) + delete_historical_job_with_http_info(job_id, opts) + nil + end + + # Delete an existing job. + # + # Delete an existing job. + # + # @param job_id [String] The ID of the job. + # @param opts [Hash] the optional parameters + # @return [Array<(nil, Integer, Hash)>] nil, response status code and response headers + def delete_historical_job_with_http_info(job_id, opts = {}) + + if @api_client.config.debugging + @api_client.config.logger.debug 'Calling API: SecurityMonitoringAPI.delete_historical_job ...' + end + # verify the required parameter 'job_id' is set + if @api_client.config.client_side_validation && job_id.nil? + fail ArgumentError, "Missing the required parameter 'job_id' when calling SecurityMonitoringAPI.delete_historical_job" + end + # resource path + local_var_path = '/api/v2/siem-historical-detections/jobs/{job_id}'.sub('{job_id}', CGI.escape(job_id.to_s).gsub('%2F', '/')) + + # query parameters + query_params = opts[:query_params] || {} + + # header parameters + header_params = opts[:header_params] || {} + # HTTP header 'Accept' (if needed) + header_params['Accept'] = @api_client.select_header_accept(['*/*']) + + # form parameters + form_params = opts[:form_params] || {} + + # http body (model) + post_body = opts[:debug_body] + + # return_type + return_type = opts[:debug_return_type] + + # auth_names + auth_names = opts[:debug_auth_names] || [:apiKeyAuth, :appKeyAuth, :AuthZ] + + new_options = opts.merge( + :operation => :delete_historical_job, + :header_params => header_params, + :query_params => query_params, + :form_params => form_params, + :body => post_body, + :auth_names => auth_names, + :return_type => return_type, + :api_version => "V2" + ) + + data, status_code, headers = @api_client.call_api(Net::HTTP::Delete, local_var_path, new_options) + if @api_client.config.debugging + @api_client.config.logger.debug "API called: SecurityMonitoringAPI#delete_historical_job\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}" + end + return data, status_code, headers + end + # Delete a security filter. # # @see #delete_security_filter_with_http_info @@ -848,6 +1045,71 @@ def get_finding_with_http_info(finding_id, opts = {}) return data, status_code, headers end + # Get a job's details. + # + # @see #get_historical_job_with_http_info + def get_historical_job(job_id, opts = {}) + data, _status_code, _headers = get_historical_job_with_http_info(job_id, opts) + data + end + + # Get a job's details. + # + # Get a job's details. + # + # @param job_id [String] The ID of the job. + # @param opts [Hash] the optional parameters + # @return [Array<(HistoricalJobResponse, Integer, Hash)>] HistoricalJobResponse data, response status code and response headers + def get_historical_job_with_http_info(job_id, opts = {}) + + if @api_client.config.debugging + @api_client.config.logger.debug 'Calling API: SecurityMonitoringAPI.get_historical_job ...' + end + # verify the required parameter 'job_id' is set + if @api_client.config.client_side_validation && job_id.nil? + fail ArgumentError, "Missing the required parameter 'job_id' when calling SecurityMonitoringAPI.get_historical_job" + end + # resource path + local_var_path = '/api/v2/siem-historical-detections/jobs/{job_id}'.sub('{job_id}', CGI.escape(job_id.to_s).gsub('%2F', '/')) + + # query parameters + query_params = opts[:query_params] || {} + + # header parameters + header_params = opts[:header_params] || {} + # HTTP header 'Accept' (if needed) + header_params['Accept'] = @api_client.select_header_accept(['application/json']) + + # form parameters + form_params = opts[:form_params] || {} + + # http body (model) + post_body = opts[:debug_body] + + # return_type + return_type = opts[:debug_return_type] || 'HistoricalJobResponse' + + # auth_names + auth_names = opts[:debug_auth_names] || [:apiKeyAuth, :appKeyAuth, :AuthZ] + + new_options = opts.merge( + :operation => :get_historical_job, + :header_params => header_params, + :query_params => query_params, + :form_params => form_params, + :body => post_body, + :auth_names => auth_names, + :return_type => return_type, + :api_version => "V2" + ) + + data, status_code, headers = @api_client.call_api(Net::HTTP::Get, local_var_path, new_options) + if @api_client.config.debugging + @api_client.config.logger.debug "API called: SecurityMonitoringAPI#get_historical_job\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}" + end + return data, status_code, headers + end + # Get a security filter. # # @see #get_security_filter_with_http_info @@ -1272,6 +1534,74 @@ def list_findings_with_pagination(opts = {}) end end + # List historical jobs. + # + # @see #list_historical_jobs_with_http_info + def list_historical_jobs(opts = {}) + data, _status_code, _headers = list_historical_jobs_with_http_info(opts) + data + end + + # List historical jobs. + # + # List historical jobs. + # + # @param opts [Hash] the optional parameters + # @option opts [Integer] :page_size Size for a given page. The maximum allowed value is 100. + # @option opts [Integer] :page_number Specific page number to return. + # @option opts [String] :sort The order of the jobs in results. + # @option opts [String] :filter_query Query used to filter items from the fetched list. + # @return [Array<(ListHistoricalJobsResponse, Integer, Hash)>] ListHistoricalJobsResponse data, response status code and response headers + def list_historical_jobs_with_http_info(opts = {}) + + if @api_client.config.debugging + @api_client.config.logger.debug 'Calling API: SecurityMonitoringAPI.list_historical_jobs ...' + end + # resource path + local_var_path = '/api/v2/siem-historical-detections/jobs' + + # query parameters + query_params = opts[:query_params] || {} + query_params[:'page[size]'] = opts[:'page_size'] if !opts[:'page_size'].nil? + query_params[:'page[number]'] = opts[:'page_number'] if !opts[:'page_number'].nil? + query_params[:'sort'] = opts[:'sort'] if !opts[:'sort'].nil? + query_params[:'filter[query]'] = opts[:'filter_query'] if !opts[:'filter_query'].nil? + + # header parameters + header_params = opts[:header_params] || {} + # HTTP header 'Accept' (if needed) + header_params['Accept'] = @api_client.select_header_accept(['application/json']) + + # form parameters + form_params = opts[:form_params] || {} + + # http body (model) + post_body = opts[:debug_body] + + # return_type + return_type = opts[:debug_return_type] || 'ListHistoricalJobsResponse' + + # auth_names + auth_names = opts[:debug_auth_names] || [:apiKeyAuth, :appKeyAuth, :AuthZ] + + new_options = opts.merge( + :operation => :list_historical_jobs, + :header_params => header_params, + :query_params => query_params, + :form_params => form_params, + :body => post_body, + :auth_names => auth_names, + :return_type => return_type, + :api_version => "V2" + ) + + data, status_code, headers = @api_client.call_api(Net::HTTP::Get, local_var_path, new_options) + if @api_client.config.debugging + @api_client.config.logger.debug "API called: SecurityMonitoringAPI#list_historical_jobs\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}" + end + return data, status_code, headers + end + # Get all security filters. # # @see #list_security_filters_with_http_info @@ -1631,6 +1961,73 @@ def mute_findings_with_http_info(body, opts = {}) return data, status_code, headers end + # Run a historical job. + # + # @see #run_historical_job_with_http_info + def run_historical_job(body, opts = {}) + data, _status_code, _headers = run_historical_job_with_http_info(body, opts) + data + end + + # Run a historical job. + # + # Run a historical job. + # + # @param body [RunHistoricalJobRequest] + # @param opts [Hash] the optional parameters + # @return [Array<(JobCreateResponse, Integer, Hash)>] JobCreateResponse data, response status code and response headers + def run_historical_job_with_http_info(body, opts = {}) + + if @api_client.config.debugging + @api_client.config.logger.debug 'Calling API: SecurityMonitoringAPI.run_historical_job ...' + end + # verify the required parameter 'body' is set + if @api_client.config.client_side_validation && body.nil? + fail ArgumentError, "Missing the required parameter 'body' when calling SecurityMonitoringAPI.run_historical_job" + end + # resource path + local_var_path = '/api/v2/siem-historical-detections/jobs' + + # query parameters + query_params = opts[:query_params] || {} + + # header parameters + header_params = opts[:header_params] || {} + # HTTP header 'Accept' (if needed) + header_params['Accept'] = @api_client.select_header_accept(['application/json']) + # HTTP header 'Content-Type' + header_params['Content-Type'] = @api_client.select_header_content_type(['application/json']) + + # form parameters + form_params = opts[:form_params] || {} + + # http body (model) + post_body = opts[:debug_body] || @api_client.object_to_http_body(body) + + # return_type + return_type = opts[:debug_return_type] || 'JobCreateResponse' + + # auth_names + auth_names = opts[:debug_auth_names] || [:apiKeyAuth, :appKeyAuth, :AuthZ] + + new_options = opts.merge( + :operation => :run_historical_job, + :header_params => header_params, + :query_params => query_params, + :form_params => form_params, + :body => post_body, + :auth_names => auth_names, + :return_type => return_type, + :api_version => "V2" + ) + + data, status_code, headers = @api_client.call_api(Net::HTTP::Post, local_var_path, new_options) + if @api_client.config.debugging + @api_client.config.logger.debug "API called: SecurityMonitoringAPI#run_historical_job\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}" + end + return data, status_code, headers + end + # Get a list of security signals. # # @see #search_security_monitoring_signals_with_http_info diff --git a/lib/datadog_api_client/v2/models/calculated_field.rb b/lib/datadog_api_client/v2/models/calculated_field.rb new file mode 100644 index 00000000000..717077d5a7a --- /dev/null +++ b/lib/datadog_api_client/v2/models/calculated_field.rb @@ -0,0 +1,144 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # Calculated field. + class CalculatedField + include BaseGenericModel + + # Expression. + attr_reader :expression + + # Field name. + attr_reader :name + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'expression' => :'expression', + :'name' => :'name' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'expression' => :'String', + :'name' => :'String' + } + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::CalculatedField` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'expression') + self.expression = attributes[:'expression'] + end + + if attributes.key?(:'name') + self.name = attributes[:'name'] + end + end + + # Check to see if the all the properties in the model are valid + # @return true if the model is valid + # @!visibility private + def valid? + return false if @expression.nil? + return false if @name.nil? + true + end + + # Custom attribute writer method with validation + # @param expression [Object] Object to be assigned + # @!visibility private + def expression=(expression) + if expression.nil? + fail ArgumentError, 'invalid value for "expression", expression cannot be nil.' + end + @expression = expression + end + + # Custom attribute writer method with validation + # @param name [Object] Object to be assigned + # @!visibility private + def name=(name) + if name.nil? + fail ArgumentError, 'invalid value for "name", name cannot be nil.' + end + @name = name + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + expression == o.expression && + name == o.name + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [expression, name].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/convert_job_results_to_signals_attributes.rb b/lib/datadog_api_client/v2/models/convert_job_results_to_signals_attributes.rb new file mode 100644 index 00000000000..68243cccbed --- /dev/null +++ b/lib/datadog_api_client/v2/models/convert_job_results_to_signals_attributes.rb @@ -0,0 +1,200 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # Attributes for converting historical job results to signals. + class ConvertJobResultsToSignalsAttributes + include BaseGenericModel + + # Request ID. + attr_accessor :id + + # Job result IDs. + attr_reader :job_result_ids + + # Notifications sent. + attr_reader :notifications + + # Message of generated signals. + attr_reader :signal_message + + # Severity of the Security Signal. + attr_reader :signal_severity + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'id' => :'id', + :'job_result_ids' => :'jobResultIds', + :'notifications' => :'notifications', + :'signal_message' => :'signalMessage', + :'signal_severity' => :'signalSeverity' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'id' => :'String', + :'job_result_ids' => :'Array', + :'notifications' => :'Array', + :'signal_message' => :'String', + :'signal_severity' => :'SecurityMonitoringRuleSeverity' + } + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::ConvertJobResultsToSignalsAttributes` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'id') + self.id = attributes[:'id'] + end + + if attributes.key?(:'job_result_ids') + if (value = attributes[:'job_result_ids']).is_a?(Array) + self.job_result_ids = value + end + end + + if attributes.key?(:'notifications') + if (value = attributes[:'notifications']).is_a?(Array) + self.notifications = value + end + end + + if attributes.key?(:'signal_message') + self.signal_message = attributes[:'signal_message'] + end + + if attributes.key?(:'signal_severity') + self.signal_severity = attributes[:'signal_severity'] + end + end + + # Check to see if the all the properties in the model are valid + # @return true if the model is valid + # @!visibility private + def valid? + return false if @job_result_ids.nil? + return false if @notifications.nil? + return false if @signal_message.nil? + return false if @signal_severity.nil? + true + end + + # Custom attribute writer method with validation + # @param job_result_ids [Object] Object to be assigned + # @!visibility private + def job_result_ids=(job_result_ids) + if job_result_ids.nil? + fail ArgumentError, 'invalid value for "job_result_ids", job_result_ids cannot be nil.' + end + @job_result_ids = job_result_ids + end + + # Custom attribute writer method with validation + # @param notifications [Object] Object to be assigned + # @!visibility private + def notifications=(notifications) + if notifications.nil? + fail ArgumentError, 'invalid value for "notifications", notifications cannot be nil.' + end + @notifications = notifications + end + + # Custom attribute writer method with validation + # @param signal_message [Object] Object to be assigned + # @!visibility private + def signal_message=(signal_message) + if signal_message.nil? + fail ArgumentError, 'invalid value for "signal_message", signal_message cannot be nil.' + end + @signal_message = signal_message + end + + # Custom attribute writer method with validation + # @param signal_severity [Object] Object to be assigned + # @!visibility private + def signal_severity=(signal_severity) + if signal_severity.nil? + fail ArgumentError, 'invalid value for "signal_severity", signal_severity cannot be nil.' + end + @signal_severity = signal_severity + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + id == o.id && + job_result_ids == o.job_result_ids && + notifications == o.notifications && + signal_message == o.signal_message && + signal_severity == o.signal_severity + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [id, job_result_ids, notifications, signal_message, signal_severity].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/convert_job_results_to_signals_data.rb b/lib/datadog_api_client/v2/models/convert_job_results_to_signals_data.rb new file mode 100644 index 00000000000..8a71ff6b71d --- /dev/null +++ b/lib/datadog_api_client/v2/models/convert_job_results_to_signals_data.rb @@ -0,0 +1,115 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # Data for converting historical job results to signals. + class ConvertJobResultsToSignalsData + include BaseGenericModel + + # Attributes for converting historical job results to signals. + attr_accessor :attributes + + # Type of payload. + attr_accessor :type + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'attributes' => :'attributes', + :'type' => :'type' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'attributes' => :'ConvertJobResultsToSignalsAttributes', + :'type' => :'ConvertJobResultsToSignalsDataType' + } + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::ConvertJobResultsToSignalsData` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'attributes') + self.attributes = attributes[:'attributes'] + end + + if attributes.key?(:'type') + self.type = attributes[:'type'] + end + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + attributes == o.attributes && + type == o.type + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [attributes, type].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/convert_job_results_to_signals_data_type.rb b/lib/datadog_api_client/v2/models/convert_job_results_to_signals_data_type.rb new file mode 100644 index 00000000000..52b27fb9219 --- /dev/null +++ b/lib/datadog_api_client/v2/models/convert_job_results_to_signals_data_type.rb @@ -0,0 +1,26 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # Type of payload. + class ConvertJobResultsToSignalsDataType + include BaseEnumModel + + HISTORICALDETECTIONSJOBRESULTSIGNALCONVERSION = "historicalDetectionsJobResultSignalConversion".freeze + end +end diff --git a/lib/datadog_api_client/v2/models/convert_job_results_to_signals_request.rb b/lib/datadog_api_client/v2/models/convert_job_results_to_signals_request.rb new file mode 100644 index 00000000000..caf040e9588 --- /dev/null +++ b/lib/datadog_api_client/v2/models/convert_job_results_to_signals_request.rb @@ -0,0 +1,105 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # Request for converting historical job results to signals. + class ConvertJobResultsToSignalsRequest + include BaseGenericModel + + # Data for converting historical job results to signals. + attr_accessor :data + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'data' => :'data' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'data' => :'ConvertJobResultsToSignalsData' + } + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::ConvertJobResultsToSignalsRequest` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'data') + self.data = attributes[:'data'] + end + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + data == o.data + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [data].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/historical_job_data_type.rb b/lib/datadog_api_client/v2/models/historical_job_data_type.rb new file mode 100644 index 00000000000..a9ad15728d9 --- /dev/null +++ b/lib/datadog_api_client/v2/models/historical_job_data_type.rb @@ -0,0 +1,26 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # Type of payload. + class HistoricalJobDataType + include BaseEnumModel + + HISTORICALDETECTIONSJOB = "historicalDetectionsJob".freeze + end +end diff --git a/lib/datadog_api_client/v2/models/historical_job_list_meta.rb b/lib/datadog_api_client/v2/models/historical_job_list_meta.rb new file mode 100644 index 00000000000..ea1d0d8afb1 --- /dev/null +++ b/lib/datadog_api_client/v2/models/historical_job_list_meta.rb @@ -0,0 +1,123 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # Metadata about the list of jobs. + class HistoricalJobListMeta + include BaseGenericModel + + # Number of jobs in the list. + attr_reader :total_count + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'total_count' => :'totalCount' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'total_count' => :'Integer' + } + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::HistoricalJobListMeta` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'total_count') + self.total_count = attributes[:'total_count'] + end + end + + # Check to see if the all the properties in the model are valid + # @return true if the model is valid + # @!visibility private + def valid? + return false if !@total_count.nil? && @total_count > 2147483647 + true + end + + # Custom attribute writer method with validation + # @param total_count [Object] Object to be assigned + # @!visibility private + def total_count=(total_count) + if !total_count.nil? && total_count > 2147483647 + fail ArgumentError, 'invalid value for "total_count", must be smaller than or equal to 2147483647.' + end + @total_count = total_count + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + total_count == o.total_count + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [total_count].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/historical_job_response.rb b/lib/datadog_api_client/v2/models/historical_job_response.rb new file mode 100644 index 00000000000..0494cc28a62 --- /dev/null +++ b/lib/datadog_api_client/v2/models/historical_job_response.rb @@ -0,0 +1,125 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # Historical job response. + class HistoricalJobResponse + include BaseGenericModel + + # Historical job attributes. + attr_accessor :attributes + + # ID of the job. + attr_accessor :id + + # Type of payload. + attr_accessor :type + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'attributes' => :'attributes', + :'id' => :'id', + :'type' => :'type' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'attributes' => :'HistoricalJobResponseAttributes', + :'id' => :'String', + :'type' => :'HistoricalJobDataType' + } + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::HistoricalJobResponse` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'attributes') + self.attributes = attributes[:'attributes'] + end + + if attributes.key?(:'id') + self.id = attributes[:'id'] + end + + if attributes.key?(:'type') + self.type = attributes[:'type'] + end + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + attributes == o.attributes && + id == o.id && + type == o.type + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [attributes, id, type].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/historical_job_response_attributes.rb b/lib/datadog_api_client/v2/models/historical_job_response_attributes.rb new file mode 100644 index 00000000000..f1a851dfbc3 --- /dev/null +++ b/lib/datadog_api_client/v2/models/historical_job_response_attributes.rb @@ -0,0 +1,175 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # Historical job attributes. + class HistoricalJobResponseAttributes + include BaseGenericModel + + # The name of the user who created the job. + attr_accessor :created_by_name + + # Time when the job was created. + attr_accessor :created_at + + # The handle of the user who created the job. + attr_accessor :created_by_handle + + # ID of the rule used to create the job (if it is created from a rule). + attr_accessor :created_from_rule_id + + # Definition of a historical job. + attr_accessor :job_definition + + # Job name. + attr_accessor :job_name + + # Job status. + attr_accessor :job_status + + # Last modification time of the job. + attr_accessor :modified_at + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'created_by_name' => :'CreatedByName', + :'created_at' => :'createdAt', + :'created_by_handle' => :'createdByHandle', + :'created_from_rule_id' => :'createdFromRuleId', + :'job_definition' => :'jobDefinition', + :'job_name' => :'jobName', + :'job_status' => :'jobStatus', + :'modified_at' => :'modifiedAt' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'created_by_name' => :'String', + :'created_at' => :'String', + :'created_by_handle' => :'String', + :'created_from_rule_id' => :'String', + :'job_definition' => :'JobDefinition', + :'job_name' => :'String', + :'job_status' => :'String', + :'modified_at' => :'String' + } + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::HistoricalJobResponseAttributes` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'created_by_name') + self.created_by_name = attributes[:'created_by_name'] + end + + if attributes.key?(:'created_at') + self.created_at = attributes[:'created_at'] + end + + if attributes.key?(:'created_by_handle') + self.created_by_handle = attributes[:'created_by_handle'] + end + + if attributes.key?(:'created_from_rule_id') + self.created_from_rule_id = attributes[:'created_from_rule_id'] + end + + if attributes.key?(:'job_definition') + self.job_definition = attributes[:'job_definition'] + end + + if attributes.key?(:'job_name') + self.job_name = attributes[:'job_name'] + end + + if attributes.key?(:'job_status') + self.job_status = attributes[:'job_status'] + end + + if attributes.key?(:'modified_at') + self.modified_at = attributes[:'modified_at'] + end + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + created_by_name == o.created_by_name && + created_at == o.created_at && + created_by_handle == o.created_by_handle && + created_from_rule_id == o.created_from_rule_id && + job_definition == o.job_definition && + job_name == o.job_name && + job_status == o.job_status && + modified_at == o.modified_at + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [created_by_name, created_at, created_by_handle, created_from_rule_id, job_definition, job_name, job_status, modified_at].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/job_create_response.rb b/lib/datadog_api_client/v2/models/job_create_response.rb new file mode 100644 index 00000000000..33716a1e1bb --- /dev/null +++ b/lib/datadog_api_client/v2/models/job_create_response.rb @@ -0,0 +1,105 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # Run a historical job response. + class JobCreateResponse + include BaseGenericModel + + # The definition of `JobCreateResponseData` object. + attr_accessor :data + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'data' => :'data' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'data' => :'JobCreateResponseData' + } + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::JobCreateResponse` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'data') + self.data = attributes[:'data'] + end + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + data == o.data + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [data].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/job_create_response_data.rb b/lib/datadog_api_client/v2/models/job_create_response_data.rb new file mode 100644 index 00000000000..d00288e5467 --- /dev/null +++ b/lib/datadog_api_client/v2/models/job_create_response_data.rb @@ -0,0 +1,115 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The definition of `JobCreateResponseData` object. + class JobCreateResponseData + include BaseGenericModel + + # ID of the created job. + attr_accessor :id + + # Type of payload. + attr_accessor :type + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'id' => :'id', + :'type' => :'type' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'id' => :'String', + :'type' => :'HistoricalJobDataType' + } + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::JobCreateResponseData` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'id') + self.id = attributes[:'id'] + end + + if attributes.key?(:'type') + self.type = attributes[:'type'] + end + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + id == o.id && + type == o.type + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [id, type].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/job_definition.rb b/lib/datadog_api_client/v2/models/job_definition.rb new file mode 100644 index 00000000000..22c088d733e --- /dev/null +++ b/lib/datadog_api_client/v2/models/job_definition.rb @@ -0,0 +1,357 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # Definition of a historical job. + class JobDefinition + include BaseGenericModel + + # Calculated fields. + attr_accessor :calculated_fields + + # Cases used for generating job results. + attr_reader :cases + + # Additional queries to filter matched events before they are processed. This field is deprecated for log detection, signal correlation, and workload security rules. + attr_accessor :filters + + # Starting time of data analyzed by the job. + attr_reader :from + + # Fields used to group results. + attr_accessor :group_signals_by + + # Index used to load the data. + attr_reader :index + + # Message for generated results. + attr_reader :message + + # Job name. + attr_reader :name + + # Options on rules. + attr_accessor :options + + # Query projections. + attr_accessor :projected_per_query + + # Queries for selecting logs analyzed by the job. + attr_reader :queries + + # Reference tables for the rule. + attr_accessor :reference_tables + + # Tags for generated signals. + attr_accessor :tags + + # Cases for generating results from third-party rules. Only available for third-party rules. + attr_accessor :third_party_cases + + # Ending time of data analyzed by the job. + attr_reader :to + + # Job type. + attr_accessor :type + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'calculated_fields' => :'calculatedFields', + :'cases' => :'cases', + :'filters' => :'filters', + :'from' => :'from', + :'group_signals_by' => :'groupSignalsBy', + :'index' => :'index', + :'message' => :'message', + :'name' => :'name', + :'options' => :'options', + :'projected_per_query' => :'projectedPerQuery', + :'queries' => :'queries', + :'reference_tables' => :'referenceTables', + :'tags' => :'tags', + :'third_party_cases' => :'thirdPartyCases', + :'to' => :'to', + :'type' => :'type' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'calculated_fields' => :'Array', + :'cases' => :'Array', + :'filters' => :'Array', + :'from' => :'Integer', + :'group_signals_by' => :'Array', + :'index' => :'String', + :'message' => :'String', + :'name' => :'String', + :'options' => :'SecurityMonitoringRuleOptions', + :'projected_per_query' => :'Array', + :'queries' => :'Array', + :'reference_tables' => :'Array', + :'tags' => :'Array', + :'third_party_cases' => :'Array', + :'to' => :'Integer', + :'type' => :'String' + } + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::JobDefinition` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'calculated_fields') + if (value = attributes[:'calculated_fields']).is_a?(Array) + self.calculated_fields = value + end + end + + if attributes.key?(:'cases') + if (value = attributes[:'cases']).is_a?(Array) + self.cases = value + end + end + + if attributes.key?(:'filters') + if (value = attributes[:'filters']).is_a?(Array) + self.filters = value + end + end + + if attributes.key?(:'from') + self.from = attributes[:'from'] + end + + if attributes.key?(:'group_signals_by') + if (value = attributes[:'group_signals_by']).is_a?(Array) + self.group_signals_by = value + end + end + + if attributes.key?(:'index') + self.index = attributes[:'index'] + end + + if attributes.key?(:'message') + self.message = attributes[:'message'] + end + + if attributes.key?(:'name') + self.name = attributes[:'name'] + end + + if attributes.key?(:'options') + self.options = attributes[:'options'] + end + + if attributes.key?(:'projected_per_query') + if (value = attributes[:'projected_per_query']).is_a?(Array) + self.projected_per_query = value + end + end + + if attributes.key?(:'queries') + if (value = attributes[:'queries']).is_a?(Array) + self.queries = value + end + end + + if attributes.key?(:'reference_tables') + if (value = attributes[:'reference_tables']).is_a?(Array) + self.reference_tables = value + end + end + + if attributes.key?(:'tags') + if (value = attributes[:'tags']).is_a?(Array) + self.tags = value + end + end + + if attributes.key?(:'third_party_cases') + if (value = attributes[:'third_party_cases']).is_a?(Array) + self.third_party_cases = value + end + end + + if attributes.key?(:'to') + self.to = attributes[:'to'] + end + + if attributes.key?(:'type') + self.type = attributes[:'type'] + end + end + + # Check to see if the all the properties in the model are valid + # @return true if the model is valid + # @!visibility private + def valid? + return false if @cases.nil? + return false if @from.nil? + return false if @index.nil? + return false if @message.nil? + return false if @name.nil? + return false if @queries.nil? + return false if @to.nil? + true + end + + # Custom attribute writer method with validation + # @param cases [Object] Object to be assigned + # @!visibility private + def cases=(cases) + if cases.nil? + fail ArgumentError, 'invalid value for "cases", cases cannot be nil.' + end + @cases = cases + end + + # Custom attribute writer method with validation + # @param from [Object] Object to be assigned + # @!visibility private + def from=(from) + if from.nil? + fail ArgumentError, 'invalid value for "from", from cannot be nil.' + end + @from = from + end + + # Custom attribute writer method with validation + # @param index [Object] Object to be assigned + # @!visibility private + def index=(index) + if index.nil? + fail ArgumentError, 'invalid value for "index", index cannot be nil.' + end + @index = index + end + + # Custom attribute writer method with validation + # @param message [Object] Object to be assigned + # @!visibility private + def message=(message) + if message.nil? + fail ArgumentError, 'invalid value for "message", message cannot be nil.' + end + @message = message + end + + # Custom attribute writer method with validation + # @param name [Object] Object to be assigned + # @!visibility private + def name=(name) + if name.nil? + fail ArgumentError, 'invalid value for "name", name cannot be nil.' + end + @name = name + end + + # Custom attribute writer method with validation + # @param queries [Object] Object to be assigned + # @!visibility private + def queries=(queries) + if queries.nil? + fail ArgumentError, 'invalid value for "queries", queries cannot be nil.' + end + @queries = queries + end + + # Custom attribute writer method with validation + # @param to [Object] Object to be assigned + # @!visibility private + def to=(to) + if to.nil? + fail ArgumentError, 'invalid value for "to", to cannot be nil.' + end + @to = to + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + calculated_fields == o.calculated_fields && + cases == o.cases && + filters == o.filters && + from == o.from && + group_signals_by == o.group_signals_by && + index == o.index && + message == o.message && + name == o.name && + options == o.options && + projected_per_query == o.projected_per_query && + queries == o.queries && + reference_tables == o.reference_tables && + tags == o.tags && + third_party_cases == o.third_party_cases && + to == o.to && + type == o.type + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [calculated_fields, cases, filters, from, group_signals_by, index, message, name, options, projected_per_query, queries, reference_tables, tags, third_party_cases, to, type].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/job_definition_from_rule.rb b/lib/datadog_api_client/v2/models/job_definition_from_rule.rb new file mode 100644 index 00000000000..ee15743b13d --- /dev/null +++ b/lib/datadog_api_client/v2/models/job_definition_from_rule.rb @@ -0,0 +1,223 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # Definition of a historical job based on a security monitoring rule. + class JobDefinitionFromRule + include BaseGenericModel + + # Index of the rule case applied by the job. + attr_reader :case_index + + # Starting time of data analyzed by the job. + attr_reader :from + + # ID of the detection rule used to create the job. + attr_reader :id + + # Index used to load the data. + attr_reader :index + + # Notifications sent when the job is completed. + attr_accessor :notifications + + # Ending time of data analyzed by the job. + attr_reader :to + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'case_index' => :'caseIndex', + :'from' => :'from', + :'id' => :'id', + :'index' => :'index', + :'notifications' => :'notifications', + :'to' => :'to' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'case_index' => :'Integer', + :'from' => :'Integer', + :'id' => :'String', + :'index' => :'String', + :'notifications' => :'Array', + :'to' => :'Integer' + } + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::JobDefinitionFromRule` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'case_index') + self.case_index = attributes[:'case_index'] + end + + if attributes.key?(:'from') + self.from = attributes[:'from'] + end + + if attributes.key?(:'id') + self.id = attributes[:'id'] + end + + if attributes.key?(:'index') + self.index = attributes[:'index'] + end + + if attributes.key?(:'notifications') + if (value = attributes[:'notifications']).is_a?(Array) + self.notifications = value + end + end + + if attributes.key?(:'to') + self.to = attributes[:'to'] + end + end + + # Check to see if the all the properties in the model are valid + # @return true if the model is valid + # @!visibility private + def valid? + return false if @case_index.nil? + return false if @case_index > 9 + return false if @from.nil? + return false if @id.nil? + return false if @index.nil? + return false if @to.nil? + true + end + + # Custom attribute writer method with validation + # @param case_index [Object] Object to be assigned + # @!visibility private + def case_index=(case_index) + if case_index.nil? + fail ArgumentError, 'invalid value for "case_index", case_index cannot be nil.' + end + if case_index > 9 + fail ArgumentError, 'invalid value for "case_index", must be smaller than or equal to 9.' + end + @case_index = case_index + end + + # Custom attribute writer method with validation + # @param from [Object] Object to be assigned + # @!visibility private + def from=(from) + if from.nil? + fail ArgumentError, 'invalid value for "from", from cannot be nil.' + end + @from = from + end + + # Custom attribute writer method with validation + # @param id [Object] Object to be assigned + # @!visibility private + def id=(id) + if id.nil? + fail ArgumentError, 'invalid value for "id", id cannot be nil.' + end + @id = id + end + + # Custom attribute writer method with validation + # @param index [Object] Object to be assigned + # @!visibility private + def index=(index) + if index.nil? + fail ArgumentError, 'invalid value for "index", index cannot be nil.' + end + @index = index + end + + # Custom attribute writer method with validation + # @param to [Object] Object to be assigned + # @!visibility private + def to=(to) + if to.nil? + fail ArgumentError, 'invalid value for "to", to cannot be nil.' + end + @to = to + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + case_index == o.case_index && + from == o.from && + id == o.id && + index == o.index && + notifications == o.notifications && + to == o.to + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [case_index, from, id, index, notifications, to].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/list_historical_jobs_response.rb b/lib/datadog_api_client/v2/models/list_historical_jobs_response.rb new file mode 100644 index 00000000000..dc04d2f0662 --- /dev/null +++ b/lib/datadog_api_client/v2/models/list_historical_jobs_response.rb @@ -0,0 +1,117 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # List of historical jobs. + class ListHistoricalJobsResponse + include BaseGenericModel + + # Array containing the list of historical jobs. + attr_accessor :data + + # Metadata about the list of jobs. + attr_accessor :meta + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'data' => :'data', + :'meta' => :'meta' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'data' => :'Array', + :'meta' => :'HistoricalJobListMeta' + } + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::ListHistoricalJobsResponse` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'data') + if (value = attributes[:'data']).is_a?(Array) + self.data = value + end + end + + if attributes.key?(:'meta') + self.meta = attributes[:'meta'] + end + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + data == o.data && + meta == o.meta + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [data, meta].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/run_historical_job_request.rb b/lib/datadog_api_client/v2/models/run_historical_job_request.rb new file mode 100644 index 00000000000..ce037b8c6ed --- /dev/null +++ b/lib/datadog_api_client/v2/models/run_historical_job_request.rb @@ -0,0 +1,105 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # Run a historical job request. + class RunHistoricalJobRequest + include BaseGenericModel + + # Data for running a historical job request. + attr_accessor :data + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'data' => :'data' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'data' => :'RunHistoricalJobRequestData' + } + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::RunHistoricalJobRequest` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'data') + self.data = attributes[:'data'] + end + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + data == o.data + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [data].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/run_historical_job_request_attributes.rb b/lib/datadog_api_client/v2/models/run_historical_job_request_attributes.rb new file mode 100644 index 00000000000..a6fb45081e2 --- /dev/null +++ b/lib/datadog_api_client/v2/models/run_historical_job_request_attributes.rb @@ -0,0 +1,125 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # Run a historical job request. + class RunHistoricalJobRequestAttributes + include BaseGenericModel + + # Definition of a historical job based on a security monitoring rule. + attr_accessor :from_rule + + # Request ID. + attr_accessor :id + + # Definition of a historical job. + attr_accessor :job_definition + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'from_rule' => :'fromRule', + :'id' => :'id', + :'job_definition' => :'jobDefinition' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'from_rule' => :'JobDefinitionFromRule', + :'id' => :'String', + :'job_definition' => :'JobDefinition' + } + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::RunHistoricalJobRequestAttributes` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'from_rule') + self.from_rule = attributes[:'from_rule'] + end + + if attributes.key?(:'id') + self.id = attributes[:'id'] + end + + if attributes.key?(:'job_definition') + self.job_definition = attributes[:'job_definition'] + end + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + from_rule == o.from_rule && + id == o.id && + job_definition == o.job_definition + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [from_rule, id, job_definition].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/run_historical_job_request_data.rb b/lib/datadog_api_client/v2/models/run_historical_job_request_data.rb new file mode 100644 index 00000000000..24d6e2aeeca --- /dev/null +++ b/lib/datadog_api_client/v2/models/run_historical_job_request_data.rb @@ -0,0 +1,115 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # Data for running a historical job request. + class RunHistoricalJobRequestData + include BaseGenericModel + + # Run a historical job request. + attr_accessor :attributes + + # Type of data. + attr_accessor :type + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'attributes' => :'attributes', + :'type' => :'type' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'attributes' => :'RunHistoricalJobRequestAttributes', + :'type' => :'RunHistoricalJobRequestDataType' + } + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::RunHistoricalJobRequestData` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'attributes') + self.attributes = attributes[:'attributes'] + end + + if attributes.key?(:'type') + self.type = attributes[:'type'] + end + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + attributes == o.attributes && + type == o.type + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [attributes, type].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/run_historical_job_request_data_type.rb b/lib/datadog_api_client/v2/models/run_historical_job_request_data_type.rb new file mode 100644 index 00000000000..13c8e046f35 --- /dev/null +++ b/lib/datadog_api_client/v2/models/run_historical_job_request_data_type.rb @@ -0,0 +1,26 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # Type of data. + class RunHistoricalJobRequestDataType + include BaseEnumModel + + HISTORICALDETECTIONSJOBCREATE = "historicalDetectionsJobCreate".freeze + end +end