diff --git a/.ci/bwcVersions b/.ci/bwcVersions
index 6a5db93053e3b..1e3b913c5cb5a 100644
--- a/.ci/bwcVersions
+++ b/.ci/bwcVersions
@@ -28,3 +28,5 @@ BWC_VERSION:
- "2.11.1"
- "2.11.2"
- "2.12.0"
+ - "2.12.1"
+ - "2.13.0"
diff --git a/.github/ISSUE_TEMPLATE/meta.yml b/.github/ISSUE_TEMPLATE/meta.yml
new file mode 100644
index 0000000000000..b766a26bc3ff2
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/meta.yml
@@ -0,0 +1,58 @@
+name: ✨ Meta Issue
+description: An issue that collects other issues together to describe a larger project or activity.
+title: '[META]
'
+labels: ['Meta, untriaged']
+body:
+ - type: textarea
+ attributes:
+ label: Please describe the end goal of this project
+ description: A clear and concise description of this project/endeavor. This should be understandable to someone with no context.
+ placeholder: Ex. Views is a way to project indices in OpenSearch, these views act as a focal point for describing the underlying data and how the data is accessed. It allows for restricting the scope and filtering the response consistently.
+ validations:
+ required: true
+ - type: textarea
+ attributes:
+ label: Supporting References
+ description: Please provide links (and descriptions!) to RFCs, design docs, etc
+ validations:
+ required: true
+ - type: textarea
+ attributes:
+ label: Issues
+ description: Please create a list of issues that should be tracked by this meta issue, including a short description. The purpose is to provide everyone on the project with an "at a glance" update of the state us the work being tracked. If you use the format "- [ ]" it will put your list into a checklist.
+ placeholder: Ex. - [ ] https://github.com/opensearch-project/security/issues/3888 Add views to the cluster metadata schema
+ validations:
+ required: true
+ - type: dropdown
+ attributes:
+ label: Related component
+ description: Choose a specific OpenSearch component your project belongs to. If you are unsure of which component to select or if the component is not present, select "Other".
+ multiple: false
+ options:
+ - # Empty first option to force selection
+ - Build
+ - Clients
+ - Cluster Manager
+ - Extensions
+ - Indexing:Performance
+ - Indexing:Replication
+ - Indexing
+ - Libraries
+ - Other
+ - Plugins
+ - Search:Aggregations
+ - Search:Performance
+ - Search:Query Capabilities
+ - Search:Query Insights
+ - Search:Relevance
+ - Search:Remote Search
+ - Search:Resiliency
+ - Search:Searchable Snapshots
+ - Search
+ - Storage:Durability
+ - Storage:Performance
+ - Storage:Remote
+ - Storage:Snapshots
+ - Storage
+ validations:
+ required: true
diff --git a/.github/workflows/check-compatibility.yml b/.github/workflows/check-compatibility.yml
index d6c65ddd446cd..b2f22a90938cc 100644
--- a/.github/workflows/check-compatibility.yml
+++ b/.github/workflows/check-compatibility.yml
@@ -53,7 +53,7 @@ jobs:
name: results.txt
- name: Find Comment
- uses: peter-evans/find-comment@v2
+ uses: peter-evans/find-comment@v3
id: fc
with:
issue-number: ${{ github.event.number }}
@@ -61,7 +61,7 @@ jobs:
body-includes: 'Compatibility status:'
- name: Add comment on the PR
- uses: peter-evans/create-or-update-comment@v3
+ uses: peter-evans/create-or-update-comment@v4
with:
comment-id: ${{ steps.fc.outputs.comment-id }}
issue-number: ${{ github.event.number }}
diff --git a/.github/workflows/gradle-check.yml b/.github/workflows/gradle-check.yml
index 8c33d41c6b2b4..8ac44cc37d27c 100644
--- a/.github/workflows/gradle-check.yml
+++ b/.github/workflows/gradle-check.yml
@@ -78,7 +78,7 @@ jobs:
- name: Create Comment Success
if: ${{ github.event_name == 'pull_request_target' && success() && env.result == 'SUCCESS' }}
- uses: peter-evans/create-or-update-comment@v3
+ uses: peter-evans/create-or-update-comment@v4
with:
issue-number: ${{ env.pr_number }}
body: |
@@ -101,7 +101,7 @@ jobs:
- name: Create Comment Flaky
if: ${{ github.event_name == 'pull_request_target' && success() && env.result != 'SUCCESS' }}
- uses: peter-evans/create-or-update-comment@v3
+ uses: peter-evans/create-or-update-comment@v4
with:
issue-number: ${{ env.pr_number }}
body: |
@@ -111,7 +111,7 @@ jobs:
- name: Create Comment Failure
if: ${{ github.event_name == 'pull_request_target' && failure() }}
- uses: peter-evans/create-or-update-comment@v3
+ uses: peter-evans/create-or-update-comment@v4
with:
issue-number: ${{ env.pr_number }}
body: |
diff --git a/.github/workflows/links.yml b/.github/workflows/links.yml
index 61962c91b4903..1c83821e22804 100644
--- a/.github/workflows/links.yml
+++ b/.github/workflows/links.yml
@@ -13,7 +13,7 @@ jobs:
- uses: actions/checkout@v4
- name: lychee Link Checker
id: lychee
- uses: lycheeverse/lychee-action@v1.9.1
+ uses: lycheeverse/lychee-action@v1.9.3
with:
args: --accept=200,403,429 --exclude-mail **/*.html **/*.md **/*.txt **/*.json --exclude-file .lychee.excludes
fail: true
diff --git a/.github/workflows/maintainer-approval.yml b/.github/workflows/maintainer-approval.yml
index 34e8f57cc1878..fdc2bf16937b4 100644
--- a/.github/workflows/maintainer-approval.yml
+++ b/.github/workflows/maintainer-approval.yml
@@ -9,7 +9,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- id: find-maintainers
- uses: actions/github-script@v7
+ uses: actions/github-script@v7.0.1
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
result-encoding: string
diff --git a/.github/workflows/poc-checklist.yml b/.github/workflows/poc-checklist.yml
index 3d014e000a487..1b4f6b31e02f8 100644
--- a/.github/workflows/poc-checklist.yml
+++ b/.github/workflows/poc-checklist.yml
@@ -11,7 +11,7 @@ jobs:
issues: write
steps:
- name: Add comment
- uses: peter-evans/create-or-update-comment@v3
+ uses: peter-evans/create-or-update-comment@v4
with:
issue-number: ${{ github.event.issue.number }}
body: |
diff --git a/.github/workflows/pull-request-checks.yml b/.github/workflows/pull-request-checks.yml
index 11998e36c2dbb..7efcf529588ed 100644
--- a/.github/workflows/pull-request-checks.yml
+++ b/.github/workflows/pull-request-checks.yml
@@ -17,7 +17,7 @@ jobs:
name: Verify Description Checklist
runs-on: ubuntu-latest
steps:
- - uses: peternied/check-pull-request-description-checklist@v1
+ - uses: peternied/check-pull-request-description-checklist@v1.1
with:
checklist-items: |
New functionality includes testing.
diff --git a/.github/workflows/triage.yml b/.github/workflows/triage.yml
index c305818bdb0a9..83bf4926a8c2d 100644
--- a/.github/workflows/triage.yml
+++ b/.github/workflows/triage.yml
@@ -9,7 +9,7 @@ jobs:
if: github.repository == 'opensearch-project/OpenSearch'
runs-on: ubuntu-latest
steps:
- - uses: actions/github-script@v7
+ - uses: actions/github-script@v7.0.1
with:
script: |
const { issue, repository } = context.payload;
diff --git a/.github/workflows/version.yml b/.github/workflows/version.yml
index f4adef1ff06b0..be2a89ac931e9 100644
--- a/.github/workflows/version.yml
+++ b/.github/workflows/version.yml
@@ -129,7 +129,7 @@ jobs:
- name: Create tracking issue
id: create-issue
- uses: actions/github-script@v6.4.0
+ uses: actions/github-script@v7.0.1
with:
script: |
const body = `
diff --git a/.github/workflows/wrapper.yml b/.github/workflows/wrapper.yml
index 6dd48ca15eaa9..dcf2a09717d28 100644
--- a/.github/workflows/wrapper.yml
+++ b/.github/workflows/wrapper.yml
@@ -8,4 +8,4 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- - uses: gradle/wrapper-validation-action@v1
+ - uses: gradle/wrapper-validation-action@v2
diff --git a/CHANGELOG.md b/CHANGELOG.md
index d23cc8c3cebcd..6a2fb5d9c8d7a 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -16,6 +16,8 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
- Allow to pass the list settings through environment variables (like [], ["a", "b", "c"], ...) ([#10625](https://github.com/opensearch-project/OpenSearch/pull/10625))
- [Admission Control] Integrate CPU AC with ResourceUsageCollector and add CPU AC stats to nodes/stats ([#10887](https://github.com/opensearch-project/OpenSearch/pull/10887))
- [S3 Repository] Add setting to control connection count for sync client ([#12028](https://github.com/opensearch-project/OpenSearch/pull/12028))
+- Views, simplify data access and manipulation by providing a virtual layer over one or more indices ([#11957](https://github.com/opensearch-project/OpenSearch/pull/11957))
+- Add Remote Store Migration Experimental flag and allow mixed mode clusters under same ([#11986](https://github.com/opensearch-project/OpenSearch/pull/11986))
- Add optional section of node analyzers into NodeInfo ([#10296](https://github.com/opensearch-project/OpenSearch/pull/10296))
### Dependencies
@@ -49,6 +51,9 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
- Bump `org.eclipse.jgit` from 6.5.0 to 6.7.0 ([#10147](https://github.com/opensearch-project/OpenSearch/pull/10147))
- Bump OpenTelemetry from 1.30.1 to 1.31.0 ([#10617](https://github.com/opensearch-project/OpenSearch/pull/10617))
- Bump OpenTelemetry from 1.31.0 to 1.32.0 and OpenTelemetry Semconv from 1.21.0-alpha to 1.23.1-alpha ([#11305](https://github.com/opensearch-project/OpenSearch/pull/11305))
+- Bump `org.bouncycastle:bcprov-jdk15to18` to `org.bouncycastle:bcprov-jdk18on` version 1.77 ([#12317](https://github.com/opensearch-project/OpenSearch/pull/12317))
+- Bump `org.bouncycastle:bcmail-jdk15to18` to `org.bouncycastle:bcmail-jdk18on` version 1.77 ([#12317](https://github.com/opensearch-project/OpenSearch/pull/12317))
+- Bump `org.bouncycastle:bcpkix-jdk15to18` to `org.bouncycastle:bcpkix-jdk18on` version 1.77 ([#12317](https://github.com/opensearch-project/OpenSearch/pull/12317))
### Changed
- [CCR] Add getHistoryOperationsFromTranslog method to fetch the history snapshot from translogs ([#3948](https://github.com/opensearch-project/OpenSearch/pull/3948))
@@ -88,173 +93,52 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
- Fix typo in API annotation check message ([11836](https://github.com/opensearch-project/OpenSearch/pull/11836))
- Update supported version for must_exist parameter in update aliases API ([#11872](https://github.com/opensearch-project/OpenSearch/pull/11872))
- [Bug] Check phase name before SearchRequestOperationsListener onPhaseStart ([#12035](https://github.com/opensearch-project/OpenSearch/pull/12035))
+- Fix Span operation names generated from RestActions ([#12005](https://github.com/opensearch-project/OpenSearch/pull/12005))
+- Fix error in RemoteSegmentStoreDirectory when debug logging is enabled ([#12328](https://github.com/opensearch-project/OpenSearch/pull/12328))
### Security
## [Unreleased 2.x]
### Added
-- [Admission control] Add Resource usage collector service and resource usage tracker ([#9890](https://github.com/opensearch-project/OpenSearch/pull/9890))
-- [Admission control] Add enhancements to FS stats to include read/write time, queue size and IO time ([#10541](https://github.com/opensearch-project/OpenSearch/pull/10541))
-- [Remote cluster state] Change file names for remote cluster state ([#10557](https://github.com/opensearch-project/OpenSearch/pull/10557))
-- [Search Pipelines] Add request-scoped state shared between processors (and three new processors) ([#9405](https://github.com/opensearch-project/OpenSearch/pull/9405))
-- Per request phase latency ([#10351](https://github.com/opensearch-project/OpenSearch/issues/10351))
-- [Remote Store] Add repository stats for remote store([#10567](https://github.com/opensearch-project/OpenSearch/pull/10567))
-- [Remote cluster state] Upload global metadata in cluster state to remote store([#10404](https://github.com/opensearch-project/OpenSearch/pull/10404))
-- [Remote cluster state] Download functionality of global metadata from remote store ([#10535](https://github.com/opensearch-project/OpenSearch/pull/10535))
-- [Remote cluster state] Restore global metadata from remote store when local state is lost after quorum loss ([#10404](https://github.com/opensearch-project/OpenSearch/pull/10404))
-- [Remote cluster state] Make index and global metadata upload timeout dynamic cluster settings ([#10814](https://github.com/opensearch-project/OpenSearch/pull/10814))
-- Add search query categorizer ([#10255](https://github.com/opensearch-project/OpenSearch/pull/10255))
-- Per request phase latency ([#10351](https://github.com/opensearch-project/OpenSearch/issues/10351))
-- Add cluster state stats ([#10670](https://github.com/opensearch-project/OpenSearch/pull/10670))
-- Remove ingest processor supports excluding fields ([#10967](https://github.com/opensearch-project/OpenSearch/pull/10967), [#11983](https://github.com/opensearch-project/OpenSearch/pull/11983))
-- [Tiered caching] Enabling serialization for IndicesRequestCache key object ([#10275](https://github.com/opensearch-project/OpenSearch/pull/10275))
-- [Tiered caching] Defining interfaces, listeners and extending IndicesRequestCache with Tiered cache support ([#10753](https://github.com/opensearch-project/OpenSearch/pull/10753))
-- [Remote cluster state] Restore cluster state version during remote state auto restore ([#10853](https://github.com/opensearch-project/OpenSearch/pull/10853))
-- Update the indexRandom function to create more segments for concurrent search tests ([10247](https://github.com/opensearch-project/OpenSearch/pull/10247))
-- Add support for query profiler with concurrent aggregation ([#9248](https://github.com/opensearch-project/OpenSearch/pull/9248))
-- Introduce ConcurrentQueryProfiler to profile query using concurrent segment search path and support concurrency during rewrite and create weight ([10352](https://github.com/opensearch-project/OpenSearch/pull/10352))
-- Implement on behalf of token passing for extensions ([#8679](https://github.com/opensearch-project/OpenSearch/pull/8679))
-- Provide service accounts tokens to extensions ([#9618](https://github.com/opensearch-project/OpenSearch/pull/9618))
-- [Streaming Indexing] Introduce new experimental server HTTP transport based on Netty 4 and Project Reactor (Reactor Netty) ([#9672](https://github.com/opensearch-project/OpenSearch/pull/9672))
-- Enable must_exist parameter for update aliases API ([#11210](https://github.com/opensearch-project/OpenSearch/pull/11210))
-- Add back half_float BKD based sort query optimization ([#11024](https://github.com/opensearch-project/OpenSearch/pull/11024))
-- Request level coordinator slow logs ([#10650](https://github.com/opensearch-project/OpenSearch/pull/10650))
-- Add template snippets support for field and target_field in KV ingest processor ([#10040](https://github.com/opensearch-project/OpenSearch/pull/10040))
-- Allowing pipeline processors to access index mapping info by passing ingest service ref as part of the processor factory parameters ([#10307](https://github.com/opensearch-project/OpenSearch/pull/10307))
-- Add experimental SIMD implementation of B-tree to round down dates ([#11194](https://github.com/opensearch-project/OpenSearch/issues/11194))
-- Make number of segment metadata files in remote segment store configurable ([#11329](https://github.com/opensearch-project/OpenSearch/pull/11329))
-- Allow changing number of replicas of searchable snapshot index ([#11317](https://github.com/opensearch-project/OpenSearch/pull/11317))
-- Adding slf4j license header to LoggerMessageFormat.java ([#11069](https://github.com/opensearch-project/OpenSearch/pull/11069))
-- [BWC and API enforcement] Introduce checks for enforcing the API restrictions ([#11175](https://github.com/opensearch-project/OpenSearch/pull/11175))
-- Maintainer approval check ([#11378](https://github.com/opensearch-project/OpenSearch/pull/11378))
-- Create separate transport action for render search template action ([#11170](https://github.com/opensearch-project/OpenSearch/pull/11170))
-- Add additional handling in SearchTemplateRequest when simulate is set to true ([#11591](https://github.com/opensearch-project/OpenSearch/pull/11591))
-- Introduce cluster level setting `cluster.index.restrict.replication.type` to prevent replication type setting override during index creations([#11583](https://github.com/opensearch-project/OpenSearch/pull/11583))
-- Add match_only_text field that is optimized for storage by trading off positional queries performance ([#6836](https://github.com/opensearch-project/OpenSearch/pull/11039))
-- Add copy ingest processor ([#11870](https://github.com/opensearch-project/OpenSearch/pull/11870))
-- Introduce new feature flag "WRITEABLE_REMOTE_INDEX" to gate the writeable remote index functionality ([#11717](https://github.com/opensearch-project/OpenSearch/pull/11170))
-- Bump OpenTelemetry from 1.32.0 to 1.34.1 ([#11891](https://github.com/opensearch-project/OpenSearch/pull/11891))
-- Support index level allocation filtering for searchable snapshot index ([#11563](https://github.com/opensearch-project/OpenSearch/pull/11563))
-- Add `org.opensearch.rest.MethodHandlers` and `RestController#getAllHandlers` ([11876](https://github.com/opensearch-project/OpenSearch/pull/11876))
-- New DateTime format for RFC3339 compatible date fields ([#11465](https://github.com/opensearch-project/OpenSearch/pull/11465))
-- Add support for Google Application Default Credentials in repository-gcs ([#8394](https://github.com/opensearch-project/OpenSearch/pull/8394))
-- Remove concurrent segment search feature flag for GA launch ([#12074](https://github.com/opensearch-project/OpenSearch/pull/12074))
+- [Tiered caching] Introducing cache plugins and exposing Ehcache as one of the pluggable disk cache option ([#11874](https://github.com/opensearch-project/OpenSearch/pull/11874))
+- Add support for dependencies in plugin descriptor properties with semver range ([#11441](https://github.com/opensearch-project/OpenSearch/pull/11441))
+- Add community_id ingest processor ([#12121](https://github.com/opensearch-project/OpenSearch/pull/12121))
+- Introduce query level setting `index.query.max_nested_depth` limiting nested queries ([#3268](https://github.com/opensearch-project/OpenSearch/issues/3268)
+- Add toString methods to MultiSearchRequest, MultiGetRequest and CreateIndexRequest ([#12163](https://github.com/opensearch-project/OpenSearch/pull/12163))
+- Support for returning scores in matched queries ([#11626](https://github.com/opensearch-project/OpenSearch/pull/11626))
+- Add shard id property to SearchLookup for use in field types provided by plugins ([#1063](https://github.com/opensearch-project/OpenSearch/pull/1063))
### Dependencies
-- Bumps jetty version to 9.4.52.v20230823 to fix GMS-2023-1857 ([#9822](https://github.com/opensearch-project/OpenSearch/pull/9822))
-- Bump Lucene from 9.7.0 to 9.8.0 ([10276](https://github.com/opensearch-project/OpenSearch/pull/10276))
-- Bump `commons-io:commons-io` from 2.13.0 to 2.15.1 ([#10294](https://github.com/opensearch-project/OpenSearch/pull/10294), [#11001](https://github.com/opensearch-project/OpenSearch/pull/11001), [#11002](https://github.com/opensearch-project/OpenSearch/pull/11002), [#11446](https://github.com/opensearch-project/OpenSearch/pull/11446), [#11554](https://github.com/opensearch-project/OpenSearch/pull/11554), [#11560](https://github.com/opensearch-project/OpenSearch/pull/11560), [#11796](https://github.com/opensearch-project/OpenSearch/pull/11796))
-- Bump `com.google.api.grpc:proto-google-common-protos` from 2.10.0 to 2.25.1 ([#10208](https://github.com/opensearch-project/OpenSearch/pull/10208), [#10298](https://github.com/opensearch-project/OpenSearch/pull/10298))
-- Bump `com.netflix.nebula.ospackage-base` from 11.4.0 to 11.6.0 ([#10295](https://github.com/opensearch-project/OpenSearch/pull/10295), [#11630](https://github.com/opensearch-project/OpenSearch/pull/11630))
-- Bump `org.apache.zookeeper:zookeeper` from 3.9.0 to 3.9.1 ([#10506](https://github.com/opensearch-project/OpenSearch/pull/10506))
-- Bump `de.thetaphi:forbiddenapis` from 3.5.1 to 3.6 ([#10508](https://github.com/opensearch-project/OpenSearch/pull/10508))
-- Bump `org.codehaus.woodstox:stax2-api` from 4.2.1 to 4.2.2 ([#10639](https://github.com/opensearch-project/OpenSearch/pull/10639))
-- Bump `org.bouncycastle:bc-fips` from 1.0.2.3 to 1.0.2.4 ([#10297](https://github.com/opensearch-project/OpenSearch/pull/10297))
-- Bump `com.google.http-client:google-http-client` from 1.43.2 to 1.43.3 ([#10635](https://github.com/opensearch-project/OpenSearch/pull/10635))
-- Bump `com.squareup.okio:okio` from 3.5.0 to 3.7.0 ([#10637](https://github.com/opensearch-project/OpenSearch/pull/10637), [#11632](https://github.com/opensearch-project/OpenSearch/pull/11632))
-- Bump `org.apache.logging.log4j:log4j-core` from 2.20.0 to 2.22.1 ([#10858](https://github.com/opensearch-project/OpenSearch/pull/10858), [#11000](https://github.com/opensearch-project/OpenSearch/pull/11000), [#11270](https://github.com/opensearch-project/OpenSearch/pull/11270), [#11695](https://github.com/opensearch-project/OpenSearch/pull/11695))
-- Bump `aws-actions/configure-aws-credentials` from 2 to 4 ([#10504](https://github.com/opensearch-project/OpenSearch/pull/10504))
-- Bump `stefanzweifel/git-auto-commit-action` from 4 to 5 ([#11171](https://github.com/opensearch-project/OpenSearch/pull/11171))
-- Bump `actions/github-script` from 6 to 7 ([#11271](https://github.com/opensearch-project/OpenSearch/pull/11271))
-- Bump `jackson` and `jackson_databind` from 2.15.2 to 2.16.0 ([#11273](https://github.com/opensearch-project/OpenSearch/pull/11273))
-- Bump `netty` from 4.1.100.Final to 4.1.106.Final ([#11294](https://github.com/opensearch-project/OpenSearch/pull/11294), [#11775](https://github.com/opensearch-project/OpenSearch/pull/11775)), [#12034](https://github.com/opensearch-project/OpenSearch/pull/12034))
-- Bump `com.avast.gradle:gradle-docker-compose-plugin` from 0.16.12 to 0.17.6 ([#10163](https://github.com/opensearch-project/OpenSearch/pull/10163), [#11692](https://github.com/opensearch-project/OpenSearch/pull/11692))
-- Bump `com.squareup.okhttp3:okhttp` from 4.11.0 to 4.12.0 ([#10861](https://github.com/opensearch-project/OpenSearch/pull/10861))
-- Bump `org.apache.commons:commons-text` from 1.10.0 to 1.11.0 ([#11344](https://github.com/opensearch-project/OpenSearch/pull/11344))
-- Bump `reactor-netty-core` from 1.1.12 to 1.1.15 ([#11350](https://github.com/opensearch-project/OpenSearch/pull/11350)), ([#12042](https://github.com/opensearch-project/OpenSearch/pull/12042))
-- Bump `com.gradle.enterprise` from 3.14.1 to 3.16.2 ([#11339](https://github.com/opensearch-project/OpenSearch/pull/11339), [#11629](https://github.com/opensearch-project/OpenSearch/pull/11629), [#12056](https://github.com/opensearch-project/OpenSearch/pull/12056))
-- Bump `actions/setup-java` from 3 to 4 ([#11447](https://github.com/opensearch-project/OpenSearch/pull/11447))
-- Bump `commons-net:commons-net` from 3.9.0 to 3.10.0 ([#11450](https://github.com/opensearch-project/OpenSearch/pull/11450))
-- Bump `org.apache.maven:maven-model` from 3.9.4 to 3.9.6 ([#11445](https://github.com/opensearch-project/OpenSearch/pull/11445))
-- Bump `org.apache.xmlbeans:xmlbeans` from 5.1.1 to 5.2.0 ([#11448](https://github.com/opensearch-project/OpenSearch/pull/11448))
-- Bump `logback-core` and `logback-classic` to 1.2.13 ([#11521](https://github.com/opensearch-project/OpenSearch/pull/11521))
-- Bumps `jetty` version from 9.4.52.v20230823 to 9.4.53.v20231009 ([#11539](https://github.com/opensearch-project/OpenSearch/pull/11539))
-- Bump `org.wiremock:wiremock-standalone` from 3.1.0 to 3.3.1 ([#11555](https://github.com/opensearch-project/OpenSearch/pull/11555))
-- Bump `org.apache.commons:commons-compress` from 1.24.0 to 1.25.0 ([#11556](https://github.com/opensearch-project/OpenSearch/pull/11556))
-- Bump `actions/stale` from 8 to 9 ([#11557](https://github.com/opensearch-project/OpenSearch/pull/11557))
-- Bump `com.netflix.nebula:nebula-publishing-plugin` from 20.3.0 to 21.0.0 ([#11671](https://github.com/opensearch-project/OpenSearch/pull/11671))
-- Bump `commons-cli:commons-cli` from 1.5.0 to 1.6.0 ([#10996](https://github.com/opensearch-project/OpenSearch/pull/10996))
-- Bump `com.maxmind.geoip2:geoip2` from 4.1.0 to 4.2.0 ([#11559](https://github.com/opensearch-project/OpenSearch/pull/11559))
-- Bump `org.apache.commons:commons-lang3` from 3.13.0 to 3.14.0 ([#11691](https://github.com/opensearch-project/OpenSearch/pull/11691))
-- Bump `com.maxmind.db:maxmind-db` from 3.0.0 to 3.1.0 ([#11693](https://github.com/opensearch-project/OpenSearch/pull/11693))
-- Bump `net.java.dev.jna:jna` from 5.13.0 to 5.14.0 ([#11798](https://github.com/opensearch-project/OpenSearch/pull/11798))
-- Bump `lycheeverse/lychee-action` from 1.8.0 to 1.9.1 ([#11795](https://github.com/opensearch-project/OpenSearch/pull/11795), [#11887](https://github.com/opensearch-project/OpenSearch/pull/11887))
-- Bump `Lucene` from 9.8.0 to 9.9.2 ([#11421](https://github.com/opensearch-project/OpenSearch/pull/11421)), ([#12063](https://github.com/opensearch-project/OpenSearch/pull/12063))
-- Bump `com.networknt:json-schema-validator` from 1.0.86 to 1.2.0 ([#11886](https://github.com/opensearch-project/OpenSearch/pull/11886), [#11963](https://github.com/opensearch-project/OpenSearch/pull/11963))
-- Bump `com.google.api:gax-httpjson` from 0.103.1 to 2.39.0 ([#11794](https://github.com/opensearch-project/OpenSearch/pull/11794))
-- Bump `com.google.oauth-client:google-oauth-client` from 1.34.1 to 1.35.0 ([#11960](https://github.com/opensearch-project/OpenSearch/pull/11960))
-- Bump `com.diffplug.spotless` from 6.23.2 to 6.25.0 ([#11962](https://github.com/opensearch-project/OpenSearch/pull/11962), [#12055](https://github.com/opensearch-project/OpenSearch/pull/12055))
-- Bump `com.google.cloud:google-cloud-core` from 2.5.10 to 2.30.0 ([#11961](https://github.com/opensearch-project/OpenSearch/pull/11961))
-- Bump `reactor-core` from 3.5.11 to 3.5.14 ([#12042](https://github.com/opensearch-project/OpenSearch/pull/12042))
-- Bump `com.google.http-client:google-http-client-jackson2` from 1.43.3 to 1.44.1 ([#12059](https://github.com/opensearch-project/OpenSearch/pull/12059))
-- Bump `peter-evans/create-issue-from-file` from 4 to 5 ([#12057](https://github.com/opensearch-project/OpenSearch/pull/12057))
+- Bump `peter-evans/find-comment` from 2 to 3 ([#12288](https://github.com/opensearch-project/OpenSearch/pull/12288))
+- Bump `com.google.api.grpc:proto-google-common-protos` from 2.25.1 to 2.33.0 ([#12289](https://github.com/opensearch-project/OpenSearch/pull/12289))
+- Bump `com.squareup.okio:okio` from 3.7.0 to 3.8.0 ([#12290](https://github.com/opensearch-project/OpenSearch/pull/12290))
+- Bump `gradle/wrapper-validation-action` from 1 to 2 ([#12367](https://github.com/opensearch-project/OpenSearch/pull/12367))
+- Bump `netty` from 4.1.106.Final to 4.1.107.Final ([#12372](https://github.com/opensearch-project/OpenSearch/pull/12372))
+- Bump `opentelemetry` from 1.34.1 to 1.35.0 ([#12388](https://github.com/opensearch-project/OpenSearch/pull/12388))
+- Bump Apache Lucene from 9.9.2 to 9.10.0 ([#12392](https://github.com/opensearch-project/OpenSearch/pull/12392))
+- Bump `org.apache.logging.log4j:log4j-core` from 2.22.1 to 2.23.0 ([#12464](https://github.com/opensearch-project/OpenSearch/pull/12464))
+- Bump `antlr4` from 4.11.1 to 4.13.1 ([#12445](https://github.com/opensearch-project/OpenSearch/pull/12445))
+- Bump `com.netflix.nebula.ospackage-base` from 11.8.0 to 11.8.1 ([#12461](https://github.com/opensearch-project/OpenSearch/pull/12461))
+- Bump `peter-evans/create-or-update-comment` from 3 to 4 ([#12462](https://github.com/opensearch-project/OpenSearch/pull/12462))
+- Bump `lycheeverse/lychee-action` from 1.9.1 to 1.9.3 ([#12521](https://github.com/opensearch-project/OpenSearch/pull/12521))
+- Bump `com.azure:azure-core` from 1.39.0 to 1.47.0 ([#12520](https://github.com/opensearch-project/OpenSearch/pull/12520))
### Changed
-- Mute the query profile IT with concurrent execution ([#9840](https://github.com/opensearch-project/OpenSearch/pull/9840))
-- Force merge with `only_expunge_deletes` honors max segment size ([#10036](https://github.com/opensearch-project/OpenSearch/pull/10036))
-- Add the means to extract the contextual properties from HttpChannel, TcpCChannel and TrasportChannel without excessive typecasting ([#10562](https://github.com/opensearch-project/OpenSearch/pull/10562)), ([#11751](https://github.com/opensearch-project/OpenSearch/pull/11751))
-- Introduce new dynamic cluster setting to control slice computation for concurrent segment search ([#9107](https://github.com/opensearch-project/OpenSearch/pull/9107))
-- Search pipelines now support asynchronous request and response processors to avoid blocking on a transport thread ([#10598](https://github.com/opensearch-project/OpenSearch/pull/10598))
-- [Remote Store] Add Remote Store backpressure rejection stats to `_nodes/stats` ([#10524](https://github.com/opensearch-project/OpenSearch/pull/10524))
-- [BUG] Fix java.lang.SecurityException in repository-gcs plugin ([#10642](https://github.com/opensearch-project/OpenSearch/pull/10642))
-- Add telemetry tracer/metric enable flag and integ test. ([#10395](https://github.com/opensearch-project/OpenSearch/pull/10395))
-- Performance improvement for Datetime field caching ([#4558](https://github.com/opensearch-project/OpenSearch/issues/4558))
-- Add instrumentation for indexing in transport bulk action and transport shard bulk action. ([#10273](https://github.com/opensearch-project/OpenSearch/pull/10273))
-- Disallow removing some metadata fields by remove ingest processor ([#10895](https://github.com/opensearch-project/OpenSearch/pull/10895), [#11607](https://github.com/opensearch-project/OpenSearch/pull/11607))
-- Performance improvement for MultiTerm Queries on Keyword fields ([#7057](https://github.com/opensearch-project/OpenSearch/issues/7057))
-- Refactor common parts from the Rounding class into a separate 'round' package ([#11023](https://github.com/opensearch-project/OpenSearch/issues/11023))
-- Performance improvement for date histogram aggregations without sub-aggregations ([#11083](https://github.com/opensearch-project/OpenSearch/pull/11083))
-- Apply the fast filter optimization to composite aggregation of date histogram source ([#11505](https://github.com/opensearch-project/OpenSearch/pull/11083))
-- Disable concurrent aggs for Diversified Sampler and Sampler aggs ([#11087](https://github.com/opensearch-project/OpenSearch/issues/11087))
-- Made leader/follower check timeout setting dynamic ([#10528](https://github.com/opensearch-project/OpenSearch/pull/10528))
-- Improved performance of numeric exact-match queries ([#11209](https://github.com/opensearch-project/OpenSearch/pull/11209))
-- Change error message when per shard document limit is breached ([#11312](https://github.com/opensearch-project/OpenSearch/pull/11312))
-- Improve boolean parsing performance ([#11308](https://github.com/opensearch-project/OpenSearch/pull/11308))
-- Interpret byte array as primitive using VarHandles ([#11362](https://github.com/opensearch-project/OpenSearch/pull/11362))
- Allow composite aggregation to run under a parent filter aggregation ([#11499](https://github.com/opensearch-project/OpenSearch/pull/11499))
-- Automatically add scheme to discovery.ec2.endpoint ([#11512](https://github.com/opensearch-project/OpenSearch/pull/11512))
-- Restore support for Java 8 for RestClient ([#11562](https://github.com/opensearch-project/OpenSearch/pull/11562))
-- Add deleted doc count in _cat/shards ([#11678](https://github.com/opensearch-project/OpenSearch/pull/11678))
-- Capture information for additional query types and aggregation types ([#11582](https://github.com/opensearch-project/OpenSearch/pull/11582))
-- Use slice_size == shard_size heuristic in terms aggs for concurrent segment search and properly calculate the doc_count_error ([#11732](https://github.com/opensearch-project/OpenSearch/pull/11732))
-- Added Support for dynamically adding SearchRequestOperationsListeners with SearchRequestOperationsCompositeListenerFactory ([#11526](https://github.com/opensearch-project/OpenSearch/pull/11526))
-- Ensure Jackson default maximums introduced in 2.16.0 do not conflict with OpenSearch settings ([#11890](https://github.com/opensearch-project/OpenSearch/pull/11890))
-- Extract cluster management for integration tests into JUnit test rule out of OpenSearchIntegTestCase ([#11877](https://github.com/opensearch-project/OpenSearch/pull/11877)), ([#12000](https://github.com/opensearch-project/OpenSearch/pull/12000))
-- Workaround for https://bugs.openjdk.org/browse/JDK-8323659 regression, introduced in JDK-21.0.2 ([#11968](https://github.com/opensearch-project/OpenSearch/pull/11968))
-- Updates IpField to be searchable when only `doc_values` are enabled ([#11508](https://github.com/opensearch-project/OpenSearch/pull/11508))
### Deprecated
### Removed
-- Remove deprecated classes for Rounding ([#10956](https://github.com/opensearch-project/OpenSearch/issues/10956))
### Fixed
-- Fix failure in dissect ingest processor parsing empty brackets ([#9225](https://github.com/opensearch-project/OpenSearch/pull/9255))
-- Fix `class_cast_exception` when passing int to `_version` and other metadata fields in ingest simulate API ([#10101](https://github.com/opensearch-project/OpenSearch/pull/10101))
-- Fix Segment Replication ShardLockObtainFailedException bug during index corruption ([10370](https://github.com/opensearch-project/OpenSearch/pull/10370))
-- Fix some test methods in SimulatePipelineRequestParsingTests never run and fix test failure ([#10496](https://github.com/opensearch-project/OpenSearch/pull/10496))
-- Fix passing wrong parameter when calling newConfigurationException() in DotExpanderProcessor ([#10737](https://github.com/opensearch-project/OpenSearch/pull/10737))
-- Delegating CachingWeightWrapper#count to internal weight object ([#10543](https://github.com/opensearch-project/OpenSearch/pull/10543))
-- Fix per request latency last phase not tracked ([#10934](https://github.com/opensearch-project/OpenSearch/pull/10934))
-- Fix SuggestSearch.testSkipDuplicates by forcing refresh when indexing its test documents ([#11068](https://github.com/opensearch-project/OpenSearch/pull/11068))
-- [BUG] Fix the thread context that is not properly cleared and messes up the traces ([#10873](https://github.com/opensearch-project/OpenSearch/pull/10873))
-- Handle canMatchSearchAfter for frozen context scenario ([#11249](https://github.com/opensearch-project/OpenSearch/pull/11249))
-- Fix the issue with DefaultSpanScope restoring wrong span in the TracerContextStorage upon detach ([#11316](https://github.com/opensearch-project/OpenSearch/issues/11316))
-- Remove shadowJar from `lang-painless` module publication ([#11369](https://github.com/opensearch-project/OpenSearch/issues/11369))
-- Fix remote shards balancer and remove unused variables ([#11167](https://github.com/opensearch-project/OpenSearch/pull/11167))
-- Fix parsing of flat object fields with dots in keys ([#11425](https://github.com/opensearch-project/OpenSearch/pull/11425))
-- Fix bug where replication lag grows post primary relocation ([#11238](https://github.com/opensearch-project/OpenSearch/pull/11238))
-- Fix noop_update_total metric in indexing stats cannot be updated by bulk API ([#11485](https://github.com/opensearch-project/OpenSearch/pull/11485),[#11917](https://github.com/opensearch-project/OpenSearch/pull/11917))
-- Fix for stuck update action in a bulk with `retry_on_conflict` property ([#11152](https://github.com/opensearch-project/OpenSearch/issues/11152))
-- Fix template setting override for replication type ([#11417](https://github.com/opensearch-project/OpenSearch/pull/11417))
-- Fix Automatic addition of protocol broken in #11512 ([#11609](https://github.com/opensearch-project/OpenSearch/pull/11609))
-- Fix issue when calling Delete PIT endpoint and no PITs exist ([#11711](https://github.com/opensearch-project/OpenSearch/pull/11711))
-- Fix tracing context propagation for local transport instrumentation ([#11490](https://github.com/opensearch-project/OpenSearch/pull/11490))
-- Fix parsing of single line comments in `lang-painless` ([#11815](https://github.com/opensearch-project/OpenSearch/issues/11815))
-- Fix memory leak issue in ReorganizingLongHash ([#11953](https://github.com/opensearch-project/OpenSearch/issues/11953))
-- Prevent setting remote_snapshot store type on index creation ([#11867](https://github.com/opensearch-project/OpenSearch/pull/11867))
-- [BUG] Fix remote shards balancer when filtering throttled nodes ([#11724](https://github.com/opensearch-project/OpenSearch/pull/11724))
-- Add advance(int) for numeric values in order to allow point based optimization to kick in ([#12089](https://github.com/opensearch-project/OpenSearch/pull/12089))
+- Fix for deserilization bug in weighted round-robin metadata ([#11679](https://github.com/opensearch-project/OpenSearch/pull/11679))
+- [Revert] [Bug] Check phase name before SearchRequestOperationsListener onPhaseStart ([#12035](https://github.com/opensearch-project/OpenSearch/pull/12035))
+- Add support of special WrappingSearchAsyncActionPhase so the onPhaseStart() will always be followed by onPhaseEnd() within AbstractSearchAsyncAction ([#12293](https://github.com/opensearch-project/OpenSearch/pull/12293))
+- Add a system property to configure YamlParser codepoint limits ([#12298](https://github.com/opensearch-project/OpenSearch/pull/12298))
+- Prevent read beyond slice boundary in ByteArrayIndexInput ([#10481](https://github.com/opensearch-project/OpenSearch/issues/10481))
+- Fix the "highlight.max_analyzer_offset" request parameter with "plain" highlighter ([#10919](https://github.com/opensearch-project/OpenSearch/pull/10919))
+- Warn about deprecated and ignored index.mapper.dynamic index setting ([#11193](https://github.com/opensearch-project/OpenSearch/pull/11193))
+- Fix get task API does not refresh resource stats ([#11531](https://github.com/opensearch-project/OpenSearch/pull/11531))
### Security
diff --git a/TRIAGING.md b/TRIAGING.md
index 47cb44a4f5ba2..3917f1e1442b9 100644
--- a/TRIAGING.md
+++ b/TRIAGING.md
@@ -68,7 +68,7 @@ Yes, there are several labels that are used to identify the 'state' of issues fi
| Outcome | Label | Description | Canned Response |
|--------------|------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| Accepted | `-untriaged` | The issue has the details needed to be directed towards area owners. | "Thanks for filing this issue, please feel free to submit a pull request." |
-| Rejected | N/A | The issue will be closed with a reason for why it was rejected. Reasons might include lack of details, or being outside the scope of the project. | "Thanks for creating this issue; however, it isn't being accepted due to {REASON}. Please feel free to re-open after addressing the reason." |
+| Rejected | N/A | The issue will be closed with a reason for why it was rejected. Reasons might include lack of details, or being outside the scope of the project. | "Thanks for creating this issue; however, it isn't being accepted due to {REASON}. Please feel free to open a new issue after addressing the reason." |
| Area Triage | `+{AREALABEL}` | OpenSearch has many different areas. If it's unclear whether an issue should be accepted, it will be labeled with the area and an owner will be @mentioned for follow-up. | "Thanks for creating this issue; the triage meeting was unsure if this issue should be accepted, @{PERSON} or someone from the area please review and then accept or reject this issue?" |
| Transfer | N/A | If the issue applies to another repository within the OpenSearch Project, it will be transferred accordingly. | "@opensearch-project/triage, can you please transfer this issue to project {REPOSITORY}." Or, if someone at the meeting has permissions, they can start the transfer. |
diff --git a/benchmarks/src/main/java/org/opensearch/benchmark/index/codec/fuzzy/FilterConstructionBenchmark.java b/benchmarks/src/main/java/org/opensearch/benchmark/index/codec/fuzzy/FilterConstructionBenchmark.java
new file mode 100644
index 0000000000000..4e995f5a5067c
--- /dev/null
+++ b/benchmarks/src/main/java/org/opensearch/benchmark/index/codec/fuzzy/FilterConstructionBenchmark.java
@@ -0,0 +1,67 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.benchmark.index.codec.fuzzy;
+
+import org.apache.lucene.util.BytesRef;
+import org.opensearch.common.UUIDs;
+import org.opensearch.index.codec.fuzzy.FuzzySet;
+import org.opensearch.index.codec.fuzzy.FuzzySetFactory;
+import org.opensearch.index.codec.fuzzy.FuzzySetParameters;
+import org.opensearch.index.mapper.IdFieldMapper;
+import org.openjdk.jmh.annotations.Benchmark;
+import org.openjdk.jmh.annotations.BenchmarkMode;
+import org.openjdk.jmh.annotations.Fork;
+import org.openjdk.jmh.annotations.Measurement;
+import org.openjdk.jmh.annotations.Mode;
+import org.openjdk.jmh.annotations.OutputTimeUnit;
+import org.openjdk.jmh.annotations.Param;
+import org.openjdk.jmh.annotations.Scope;
+import org.openjdk.jmh.annotations.Setup;
+import org.openjdk.jmh.annotations.State;
+import org.openjdk.jmh.annotations.Warmup;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.TimeUnit;
+import java.util.stream.Collectors;
+import java.util.stream.IntStream;
+
+@Fork(3)
+@Warmup(iterations = 2)
+@Measurement(iterations = 5, time = 60, timeUnit = TimeUnit.SECONDS)
+@BenchmarkMode(Mode.AverageTime)
+@OutputTimeUnit(TimeUnit.MILLISECONDS)
+@State(Scope.Benchmark)
+public class FilterConstructionBenchmark {
+
+ private List items;
+
+ @Param({ "1000000", "10000000", "50000000" })
+ private int numIds;
+
+ @Param({ "0.0511", "0.1023", "0.2047" })
+ private double fpp;
+
+ private FuzzySetFactory fuzzySetFactory;
+ private String fieldName;
+
+ @Setup
+ public void setupIds() {
+ this.fieldName = IdFieldMapper.NAME;
+ this.items = IntStream.range(0, numIds).mapToObj(i -> new BytesRef(UUIDs.base64UUID())).collect(Collectors.toList());
+ FuzzySetParameters parameters = new FuzzySetParameters(() -> fpp);
+ this.fuzzySetFactory = new FuzzySetFactory(Map.of(fieldName, parameters));
+ }
+
+ @Benchmark
+ public FuzzySet buildFilter() throws IOException {
+ return fuzzySetFactory.createFuzzySet(items.size(), fieldName, () -> items.iterator());
+ }
+}
diff --git a/benchmarks/src/main/java/org/opensearch/benchmark/index/codec/fuzzy/FilterLookupBenchmark.java b/benchmarks/src/main/java/org/opensearch/benchmark/index/codec/fuzzy/FilterLookupBenchmark.java
new file mode 100644
index 0000000000000..383539219830e
--- /dev/null
+++ b/benchmarks/src/main/java/org/opensearch/benchmark/index/codec/fuzzy/FilterLookupBenchmark.java
@@ -0,0 +1,80 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.benchmark.index.codec.fuzzy;
+
+import org.apache.lucene.util.BytesRef;
+import org.opensearch.common.UUIDs;
+import org.opensearch.index.codec.fuzzy.FuzzySet;
+import org.opensearch.index.codec.fuzzy.FuzzySetFactory;
+import org.opensearch.index.codec.fuzzy.FuzzySetParameters;
+import org.opensearch.index.mapper.IdFieldMapper;
+import org.openjdk.jmh.annotations.Benchmark;
+import org.openjdk.jmh.annotations.BenchmarkMode;
+import org.openjdk.jmh.annotations.Fork;
+import org.openjdk.jmh.annotations.Measurement;
+import org.openjdk.jmh.annotations.Mode;
+import org.openjdk.jmh.annotations.OutputTimeUnit;
+import org.openjdk.jmh.annotations.Param;
+import org.openjdk.jmh.annotations.Scope;
+import org.openjdk.jmh.annotations.Setup;
+import org.openjdk.jmh.annotations.State;
+import org.openjdk.jmh.annotations.Warmup;
+import org.openjdk.jmh.infra.Blackhole;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.Map;
+import java.util.Random;
+import java.util.concurrent.TimeUnit;
+import java.util.stream.Collectors;
+import java.util.stream.IntStream;
+
+@Fork(3)
+@Warmup(iterations = 2)
+@Measurement(iterations = 5, time = 60, timeUnit = TimeUnit.SECONDS)
+@BenchmarkMode(Mode.AverageTime)
+@OutputTimeUnit(TimeUnit.MILLISECONDS)
+@State(Scope.Benchmark)
+public class FilterLookupBenchmark {
+
+ @Param({ "50000000", "1000000" })
+ private int numItems;
+
+ @Param({ "1000000" })
+ private int searchKeyCount;
+
+ @Param({ "0.0511", "0.1023", "0.2047" })
+ private double fpp;
+
+ private FuzzySet fuzzySet;
+ private List items;
+ private Random random = new Random();
+
+ @Setup
+ public void setupFilter() throws IOException {
+ String fieldName = IdFieldMapper.NAME;
+ items = IntStream.range(0, numItems).mapToObj(i -> new BytesRef(UUIDs.base64UUID())).collect(Collectors.toList());
+ FuzzySetParameters parameters = new FuzzySetParameters(() -> fpp);
+ fuzzySet = new FuzzySetFactory(Map.of(fieldName, parameters)).createFuzzySet(numItems, fieldName, () -> items.iterator());
+ }
+
+ @Benchmark
+ public void contains_withExistingKeys(Blackhole blackhole) throws IOException {
+ for (int i = 0; i < searchKeyCount; i++) {
+ blackhole.consume(fuzzySet.contains(items.get(random.nextInt(items.size()))) == FuzzySet.Result.MAYBE);
+ }
+ }
+
+ @Benchmark
+ public void contains_withRandomKeys(Blackhole blackhole) throws IOException {
+ for (int i = 0; i < searchKeyCount; i++) {
+ blackhole.consume(fuzzySet.contains(new BytesRef(UUIDs.base64UUID())));
+ }
+ }
+}
diff --git a/build.gradle b/build.gradle
index 375ab91e99e94..2aac4a1e893e9 100644
--- a/build.gradle
+++ b/build.gradle
@@ -55,7 +55,7 @@ plugins {
id 'opensearch.docker-support'
id 'opensearch.global-build-info'
id "com.diffplug.spotless" version "6.25.0" apply false
- id "org.gradle.test-retry" version "1.5.4" apply false
+ id "org.gradle.test-retry" version "1.5.8" apply false
id "test-report-aggregation"
id 'jacoco-report-aggregation'
}
@@ -516,7 +516,6 @@ subprojects {
includeClasses.add("org.opensearch.remotestore.CreateRemoteIndexClusterDefaultDocRep")
includeClasses.add("org.opensearch.remotestore.CreateRemoteIndexIT")
includeClasses.add("org.opensearch.remotestore.CreateRemoteIndexTranslogDisabledIT")
- includeClasses.add("org.opensearch.remotestore.RemoteIndexPrimaryRelocationIT")
includeClasses.add("org.opensearch.remotestore.RemoteStoreBackpressureIT")
includeClasses.add("org.opensearch.remotestore.RemoteStoreIT")
includeClasses.add("org.opensearch.remotestore.RemoteStoreRefreshListenerIT")
diff --git a/buildSrc/src/main/java/org/opensearch/gradle/RepositoriesSetupPlugin.java b/buildSrc/src/main/java/org/opensearch/gradle/RepositoriesSetupPlugin.java
index 8ecfbf40b6c62..0c901b9726992 100644
--- a/buildSrc/src/main/java/org/opensearch/gradle/RepositoriesSetupPlugin.java
+++ b/buildSrc/src/main/java/org/opensearch/gradle/RepositoriesSetupPlugin.java
@@ -94,7 +94,7 @@ public static void configureRepositories(Project project) {
String revision = matcher.group(1);
MavenArtifactRepository luceneRepo = repos.maven(repo -> {
repo.setName("lucene-snapshots");
- repo.setUrl("https://artifacts.opensearch.org/snapshots/lucene/");
+ repo.setUrl("https://ci.opensearch.org/ci/dbc/snapshots/lucene/");
});
repos.exclusiveContent(exclusiveRepo -> {
exclusiveRepo.filter(
diff --git a/buildSrc/src/testKit/thirdPartyAudit/sample_jars/build.gradle b/buildSrc/src/testKit/thirdPartyAudit/sample_jars/build.gradle
index 351b42e5bc921..4b8f52ec07615 100644
--- a/buildSrc/src/testKit/thirdPartyAudit/sample_jars/build.gradle
+++ b/buildSrc/src/testKit/thirdPartyAudit/sample_jars/build.gradle
@@ -17,7 +17,7 @@ repositories {
}
dependencies {
- implementation "org.apache.logging.log4j:log4j-core:2.22.1"
+ implementation "org.apache.logging.log4j:log4j-core:2.23.0"
}
["0.0.1", "0.0.2"].forEach { v ->
diff --git a/buildSrc/version.properties b/buildSrc/version.properties
index 95ae1ddb578a1..39b0335c7ef55 100644
--- a/buildSrc/version.properties
+++ b/buildSrc/version.properties
@@ -1,5 +1,5 @@
opensearch = 3.0.0
-lucene = 9.9.2
+lucene = 9.11.0-snapshot-8a555eb
bundled_jdk_vendor = adoptium
bundled_jdk = 21.0.2+13
@@ -18,7 +18,7 @@ asm = 9.6
jettison = 1.5.4
woodstox = 6.4.0
kotlin = 1.7.10
-antlr4 = 4.11.1
+antlr4 = 4.13.1
guava = 32.1.1-jre
protobuf = 3.22.3
jakarta_annotation = 1.3.5
@@ -26,7 +26,7 @@ jakarta_annotation = 1.3.5
# when updating the JNA version, also update the version in buildSrc/build.gradle
jna = 5.13.0
-netty = 4.1.106.Final
+netty = 4.1.107.Final
joda = 2.12.2
# project reactor
@@ -50,7 +50,7 @@ reactivestreams = 1.0.4
# when updating this version, you need to ensure compatibility with:
# - plugins/ingest-attachment (transitive dependency, check the upstream POM)
# - distribution/tools/plugin-cli
-bouncycastle=1.76
+bouncycastle=1.77
# test dependencies
randomizedrunner = 2.7.1
junit = 4.13.2
@@ -70,5 +70,5 @@ jzlib = 1.1.3
resteasy = 6.2.4.Final
# opentelemetry dependencies
-opentelemetry = 1.34.1
+opentelemetry = 1.35.0
opentelemetrysemconv = 1.23.1-alpha
diff --git a/distribution/packages/build.gradle b/distribution/packages/build.gradle
index ededa7bff34d8..4e85d19986e43 100644
--- a/distribution/packages/build.gradle
+++ b/distribution/packages/build.gradle
@@ -63,7 +63,7 @@ import java.util.regex.Pattern
*/
plugins {
- id "com.netflix.nebula.ospackage-base" version "11.6.0"
+ id "com.netflix.nebula.ospackage-base" version "11.8.1"
}
void addProcessFilesTask(String type, boolean jdk) {
diff --git a/distribution/tools/plugin-cli/src/main/java/org/opensearch/plugins/ListPluginsCommand.java b/distribution/tools/plugin-cli/src/main/java/org/opensearch/plugins/ListPluginsCommand.java
index d269603656114..9ca42ac5f4ec1 100644
--- a/distribution/tools/plugin-cli/src/main/java/org/opensearch/plugins/ListPluginsCommand.java
+++ b/distribution/tools/plugin-cli/src/main/java/org/opensearch/plugins/ListPluginsCommand.java
@@ -78,15 +78,14 @@ private void printPlugin(Environment env, Terminal terminal, Path plugin, String
PluginInfo info = PluginInfo.readFromProperties(env.pluginsDir().resolve(plugin));
terminal.println(Terminal.Verbosity.SILENT, prefix + info.getName());
terminal.println(Terminal.Verbosity.VERBOSE, info.toString(prefix));
- if (info.getOpenSearchVersion().equals(Version.CURRENT) == false) {
+ if (!PluginsService.isPluginVersionCompatible(info, Version.CURRENT)) {
terminal.errorPrintln(
"WARNING: plugin ["
+ info.getName()
+ "] was built for OpenSearch version "
- + info.getVersion()
- + " but version "
+ + info.getOpenSearchVersionRangesString()
+ + " and is not compatible with "
+ Version.CURRENT
- + " is required"
);
}
}
diff --git a/distribution/tools/plugin-cli/src/test/java/org/opensearch/plugins/InstallPluginCommandTests.java b/distribution/tools/plugin-cli/src/test/java/org/opensearch/plugins/InstallPluginCommandTests.java
index f4532f5f83cc4..c264788df20e8 100644
--- a/distribution/tools/plugin-cli/src/test/java/org/opensearch/plugins/InstallPluginCommandTests.java
+++ b/distribution/tools/plugin-cli/src/test/java/org/opensearch/plugins/InstallPluginCommandTests.java
@@ -70,8 +70,10 @@
import org.opensearch.core.util.FileSystemUtils;
import org.opensearch.env.Environment;
import org.opensearch.env.TestEnvironment;
+import org.opensearch.semver.SemverRange;
import org.opensearch.test.OpenSearchTestCase;
import org.opensearch.test.PosixPermissionsResetter;
+import org.opensearch.test.VersionUtils;
import org.junit.After;
import org.junit.Before;
@@ -284,6 +286,35 @@ static void writePlugin(String name, Path structure, String... additionalProps)
writeJar(structure.resolve("plugin.jar"), className);
}
+ static void writePlugin(String name, Path structure, SemverRange opensearchVersionRange, String... additionalProps) throws IOException {
+ String[] properties = Stream.concat(
+ Stream.of(
+ "description",
+ "fake desc",
+ "name",
+ name,
+ "version",
+ "1.0",
+ "dependencies",
+ "{opensearch:\"" + opensearchVersionRange + "\"}",
+ "java.version",
+ System.getProperty("java.specification.version"),
+ "classname",
+ "FakePlugin"
+ ),
+ Arrays.stream(additionalProps)
+ ).toArray(String[]::new);
+ PluginTestUtil.writePluginProperties(structure, properties);
+ String className = name.substring(0, 1).toUpperCase(Locale.ENGLISH) + name.substring(1) + "Plugin";
+ writeJar(structure.resolve("plugin.jar"), className);
+ }
+
+ static Path createPlugin(String name, Path structure, SemverRange opensearchVersionRange, String... additionalProps)
+ throws IOException {
+ writePlugin(name, structure, opensearchVersionRange, additionalProps);
+ return writeZip(structure, null);
+ }
+
static void writePluginSecurityPolicy(Path pluginDir, String... permissions) throws IOException {
StringBuilder securityPolicyContent = new StringBuilder("grant {\n ");
for (String permission : permissions) {
@@ -867,6 +898,32 @@ public void testInstallMisspelledOfficialPlugins() throws Exception {
assertThat(e.getMessage(), containsString("Unknown plugin unknown_plugin"));
}
+ public void testInstallPluginWithCompatibleDependencies() throws Exception {
+ Tuple env = createEnv(fs, temp);
+ Path pluginDir = createPluginDir(temp);
+ String pluginZip = createPlugin("fake", pluginDir, SemverRange.fromString("~" + Version.CURRENT.toString())).toUri()
+ .toURL()
+ .toString();
+ skipJarHellCommand.execute(terminal, Collections.singletonList(pluginZip), false, env.v2());
+ assertThat(terminal.getOutput(), containsString("100%"));
+ }
+
+ public void testInstallPluginWithIncompatibleDependencies() throws Exception {
+ Tuple env = createEnv(fs, temp);
+ Path pluginDir = createPluginDir(temp);
+ // Core version is behind plugin version by one w.r.t patch, hence incompatible
+ Version coreVersion = Version.CURRENT;
+ Version pluginVersion = VersionUtils.getVersion(coreVersion.major, coreVersion.minor, (byte) (coreVersion.revision + 1));
+ String pluginZip = createPlugin("fake", pluginDir, SemverRange.fromString("~" + pluginVersion.toString())).toUri()
+ .toURL()
+ .toString();
+ IllegalArgumentException e = expectThrows(
+ IllegalArgumentException.class,
+ () -> skipJarHellCommand.execute(terminal, Collections.singletonList(pluginZip), false, env.v2())
+ );
+ assertThat(e.getMessage(), containsString("Plugin [fake] was built for OpenSearch version ~" + pluginVersion));
+ }
+
public void testBatchFlag() throws Exception {
MockTerminal terminal = new MockTerminal();
installPlugin(terminal, true);
diff --git a/distribution/tools/plugin-cli/src/test/java/org/opensearch/plugins/ListPluginsCommandTests.java b/distribution/tools/plugin-cli/src/test/java/org/opensearch/plugins/ListPluginsCommandTests.java
index 7bbced38c7adb..6878efce4c804 100644
--- a/distribution/tools/plugin-cli/src/test/java/org/opensearch/plugins/ListPluginsCommandTests.java
+++ b/distribution/tools/plugin-cli/src/test/java/org/opensearch/plugins/ListPluginsCommandTests.java
@@ -278,7 +278,7 @@ public void testExistingIncompatiblePlugin() throws Exception {
buildFakePlugin(env, "fake desc 2", "fake_plugin2", "org.fake2");
MockTerminal terminal = listPlugins(home);
- String message = "plugin [fake_plugin1] was built for OpenSearch version 1.0 but version " + Version.CURRENT + " is required";
+ String message = "plugin [fake_plugin1] was built for OpenSearch version 5.0.0 and is not compatible with " + Version.CURRENT;
assertEquals("fake_plugin1\nfake_plugin2\n", terminal.getOutput());
assertEquals("WARNING: " + message + "\n", terminal.getErrorOutput());
@@ -286,4 +286,41 @@ public void testExistingIncompatiblePlugin() throws Exception {
terminal = listPlugins(home, params);
assertEquals("fake_plugin1\nfake_plugin2\n", terminal.getOutput());
}
+
+ public void testPluginWithDependencies() throws Exception {
+ PluginTestUtil.writePluginProperties(
+ env.pluginsDir().resolve("fake_plugin1"),
+ "description",
+ "fake desc 1",
+ "name",
+ "fake_plugin1",
+ "version",
+ "1.0",
+ "dependencies",
+ "{opensearch:\"" + Version.CURRENT + "\"}",
+ "java.version",
+ System.getProperty("java.specification.version"),
+ "classname",
+ "org.fake1"
+ );
+ String[] params = { "-v" };
+ MockTerminal terminal = listPlugins(home, params);
+ assertEquals(
+ buildMultiline(
+ "Plugins directory: " + env.pluginsDir(),
+ "fake_plugin1",
+ "- Plugin information:",
+ "Name: fake_plugin1",
+ "Description: fake desc 1",
+ "Version: 1.0",
+ "OpenSearch Version: " + Version.CURRENT.toString(),
+ "Java Version: " + System.getProperty("java.specification.version"),
+ "Native Controller: false",
+ "Extended Plugins: []",
+ " * Classname: org.fake1",
+ "Folder name: null"
+ ),
+ terminal.getOutput()
+ );
+ }
}
diff --git a/gradle/code-coverage.gradle b/gradle/code-coverage.gradle
index 822b471e2e034..3ca6b1fe84ea7 100644
--- a/gradle/code-coverage.gradle
+++ b/gradle/code-coverage.gradle
@@ -13,7 +13,7 @@ repositories {
gradlePluginPortal()
// TODO: Find the way to use the repositories from RepositoriesSetupPlugin
maven {
- url = "https://artifacts.opensearch.org/snapshots/lucene/"
+ url = "https://ci.opensearch.org/ci/dbc/snapshots/lucene/"
}
}
@@ -37,7 +37,7 @@ tasks.withType(JacocoReport).configureEach {
if (System.getProperty("tests.coverage")) {
reporting {
reports {
- testCodeCoverageReport(JacocoCoverageReport) {
+ testCodeCoverageReport(JacocoCoverageReport) {
testType = TestSuiteType.UNIT_TEST
}
}
@@ -45,6 +45,6 @@ if (System.getProperty("tests.coverage")) {
// Attach code coverage report task to Gradle check task
project.getTasks().named(JavaBasePlugin.CHECK_TASK_NAME).configure {
- dependsOn tasks.named('testCodeCoverageReport', JacocoReport)
+ dependsOn tasks.named('testCodeCoverageReport', JacocoReport)
}
}
diff --git a/gradle/ide.gradle b/gradle/ide.gradle
index bc442a081adf0..14d6b2982ccd0 100644
--- a/gradle/ide.gradle
+++ b/gradle/ide.gradle
@@ -82,6 +82,9 @@ if (System.getProperty('idea.active') == 'true') {
runConfigurations {
defaults(JUnit) {
vmParameters = '-ea -Djava.locale.providers=SPI,COMPAT'
+ if (BuildParams.runtimeJavaVersion > JavaVersion.VERSION_17) {
+ vmParameters += ' -Djava.security.manager=allow'
+ }
}
}
copyright {
diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties
index f1d76d80bbfa3..82a4add334a7d 100644
--- a/gradle/wrapper/gradle-wrapper.properties
+++ b/gradle/wrapper/gradle-wrapper.properties
@@ -11,7 +11,7 @@
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
-distributionUrl=https\://services.gradle.org/distributions/gradle-8.5-all.zip
+distributionUrl=https\://services.gradle.org/distributions/gradle-8.6-all.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
-distributionSha256Sum=c16d517b50dd28b3f5838f0e844b7520b8f1eb610f2f29de7e4e04a1b7c9c79b
+distributionSha256Sum=85719317abd2112f021d4f41f09ec370534ba288432065f4b477b6a3b652910d
diff --git a/libs/common/src/main/java/org/opensearch/common/network/InetAddresses.java b/libs/common/src/main/java/org/opensearch/common/network/InetAddresses.java
index 0f289c09bbae2..60c0717a28f05 100644
--- a/libs/common/src/main/java/org/opensearch/common/network/InetAddresses.java
+++ b/libs/common/src/main/java/org/opensearch/common/network/InetAddresses.java
@@ -52,7 +52,7 @@ public static boolean isInetAddress(String ipString) {
return ipStringToBytes(ipString) != null;
}
- private static byte[] ipStringToBytes(String ipString) {
+ public static byte[] ipStringToBytes(String ipString) {
// Make a first pass to categorize the characters in this string.
boolean hasColon = false;
boolean hasDot = false;
diff --git a/libs/core/licenses/lucene-core-9.11.0-snapshot-8a555eb.jar.sha1 b/libs/core/licenses/lucene-core-9.11.0-snapshot-8a555eb.jar.sha1
new file mode 100644
index 0000000000000..25beb34873c0c
--- /dev/null
+++ b/libs/core/licenses/lucene-core-9.11.0-snapshot-8a555eb.jar.sha1
@@ -0,0 +1 @@
+fe1cf5663be8bdb6aa757fd4101bc551684c90fb
\ No newline at end of file
diff --git a/libs/core/licenses/lucene-core-9.9.2.jar.sha1 b/libs/core/licenses/lucene-core-9.9.2.jar.sha1
deleted file mode 100644
index 2d03d69369b9f..0000000000000
--- a/libs/core/licenses/lucene-core-9.9.2.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-7699f80220fc80b08413902560904623b88beb9f
\ No newline at end of file
diff --git a/libs/core/src/main/java/org/opensearch/Version.java b/libs/core/src/main/java/org/opensearch/Version.java
index 307da89c18d48..66ba446d4fc54 100644
--- a/libs/core/src/main/java/org/opensearch/Version.java
+++ b/libs/core/src/main/java/org/opensearch/Version.java
@@ -99,7 +99,9 @@ public class Version implements Comparable, ToXContentFragment {
public static final Version V_2_11_1 = new Version(2110199, org.apache.lucene.util.Version.LUCENE_9_7_0);
public static final Version V_2_11_2 = new Version(2110299, org.apache.lucene.util.Version.LUCENE_9_7_0);
public static final Version V_2_12_0 = new Version(2120099, org.apache.lucene.util.Version.LUCENE_9_9_2);
- public static final Version V_3_0_0 = new Version(3000099, org.apache.lucene.util.Version.LUCENE_9_9_2);
+ public static final Version V_2_12_1 = new Version(2120199, org.apache.lucene.util.Version.LUCENE_9_9_2);
+ public static final Version V_2_13_0 = new Version(2130099, org.apache.lucene.util.Version.LUCENE_9_10_0);
+ public static final Version V_3_0_0 = new Version(3000099, org.apache.lucene.util.Version.LUCENE_9_11_0);
public static final Version CURRENT = V_3_0_0;
public static Version fromId(int id) {
diff --git a/libs/core/src/main/java/org/opensearch/core/common/io/stream/StreamInput.java b/libs/core/src/main/java/org/opensearch/core/common/io/stream/StreamInput.java
index 3e996bdee83a2..ea23b3d81a775 100644
--- a/libs/core/src/main/java/org/opensearch/core/common/io/stream/StreamInput.java
+++ b/libs/core/src/main/java/org/opensearch/core/common/io/stream/StreamInput.java
@@ -56,6 +56,7 @@
import org.opensearch.core.concurrency.OpenSearchRejectedExecutionException;
import org.opensearch.core.xcontent.MediaType;
import org.opensearch.core.xcontent.MediaTypeRegistry;
+import org.opensearch.semver.SemverRange;
import java.io.ByteArrayInputStream;
import java.io.EOFException;
@@ -750,6 +751,8 @@ public Object readGenericValue() throws IOException {
return readCollection(StreamInput::readGenericValue, HashSet::new, Collections.emptySet());
case 26:
return readBigInteger();
+ case 27:
+ return readSemverRange();
default:
throw new IOException("Can't read unknown type [" + type + "]");
}
@@ -1090,6 +1093,10 @@ public Version readVersion() throws IOException {
return Version.fromId(readVInt());
}
+ public SemverRange readSemverRange() throws IOException {
+ return SemverRange.fromString(readString());
+ }
+
/** Reads the {@link Version} from the input stream */
public Build readBuild() throws IOException {
// the following is new for opensearch: we write the distribution to support any "forks"
diff --git a/libs/core/src/main/java/org/opensearch/core/common/io/stream/StreamOutput.java b/libs/core/src/main/java/org/opensearch/core/common/io/stream/StreamOutput.java
index 2d69e1c686df3..b7599265aece3 100644
--- a/libs/core/src/main/java/org/opensearch/core/common/io/stream/StreamOutput.java
+++ b/libs/core/src/main/java/org/opensearch/core/common/io/stream/StreamOutput.java
@@ -54,6 +54,7 @@
import org.opensearch.core.common.settings.SecureString;
import org.opensearch.core.common.text.Text;
import org.opensearch.core.concurrency.OpenSearchRejectedExecutionException;
+import org.opensearch.semver.SemverRange;
import java.io.EOFException;
import java.io.FileNotFoundException;
@@ -784,6 +785,10 @@ public final void writeOptionalInstant(@Nullable Instant instant) throws IOExcep
o.writeByte((byte) 26);
o.writeString(v.toString());
});
+ writers.put(SemverRange.class, (o, v) -> {
+ o.writeByte((byte) 27);
+ o.writeSemverRange((SemverRange) v);
+ });
WRITERS = Collections.unmodifiableMap(writers);
}
@@ -1101,6 +1106,10 @@ public void writeVersion(final Version version) throws IOException {
writeVInt(version.id);
}
+ public void writeSemverRange(final SemverRange range) throws IOException {
+ writeString(range.toString());
+ }
+
/** Writes the OpenSearch {@link Build} informn to the output stream */
public void writeBuild(final Build build) throws IOException {
// the following is new for opensearch: we write the distribution name to support any "forks" of the code
diff --git a/libs/core/src/main/java/org/opensearch/semver/SemverRange.java b/libs/core/src/main/java/org/opensearch/semver/SemverRange.java
new file mode 100644
index 0000000000000..da87acc7124aa
--- /dev/null
+++ b/libs/core/src/main/java/org/opensearch/semver/SemverRange.java
@@ -0,0 +1,170 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.semver;
+
+import org.opensearch.Version;
+import org.opensearch.common.Nullable;
+import org.opensearch.core.xcontent.ToXContentFragment;
+import org.opensearch.core.xcontent.XContentBuilder;
+import org.opensearch.semver.expr.Caret;
+import org.opensearch.semver.expr.Equal;
+import org.opensearch.semver.expr.Expression;
+import org.opensearch.semver.expr.Tilde;
+
+import java.io.IOException;
+import java.util.Objects;
+import java.util.Optional;
+
+import static java.util.Arrays.stream;
+
+/**
+ * Represents a single semver range that allows for specifying which {@code org.opensearch.Version}s satisfy the range.
+ * It is composed of a range version and a range operator. Following are the supported operators:
+ *
+ * - '=' Requires exact match with the range version. For example, =1.2.3 range would match only 1.2.3
+ * - '~' Allows for patch version variability starting from the range version. For example, ~1.2.3 range would match versions greater than or equal to 1.2.3 but less than 1.3.0
+ * - '^' Allows for patch and minor version variability starting from the range version. For example, ^1.2.3 range would match versions greater than or equal to 1.2.3 but less than 2.0.0
+ *
+ */
+public class SemverRange implements ToXContentFragment {
+
+ private final Version rangeVersion;
+ private final RangeOperator rangeOperator;
+
+ public SemverRange(final Version rangeVersion, final RangeOperator rangeOperator) {
+ this.rangeVersion = rangeVersion;
+ this.rangeOperator = rangeOperator;
+ }
+
+ /**
+ * Constructs a {@code SemverRange} from its string representation.
+ * @param range given range
+ * @return a {@code SemverRange}
+ */
+ public static SemverRange fromString(final String range) {
+ RangeOperator rangeOperator = RangeOperator.fromRange(range);
+ String version = range.replaceFirst(rangeOperator.asEscapedString(), "");
+ if (!Version.stringHasLength(version)) {
+ throw new IllegalArgumentException("Version cannot be empty");
+ }
+ return new SemverRange(Version.fromString(version), rangeOperator);
+ }
+
+ /**
+ * Return the range operator for this range.
+ * @return range operator
+ */
+ public RangeOperator getRangeOperator() {
+ return rangeOperator;
+ }
+
+ /**
+ * Return the version for this range.
+ * @return the range version
+ */
+ public Version getRangeVersion() {
+ return rangeVersion;
+ }
+
+ /**
+ * Check if range is satisfied by given version string.
+ *
+ * @param versionToEvaluate version to check
+ * @return {@code true} if range is satisfied by version, {@code false} otherwise
+ */
+ public boolean isSatisfiedBy(final String versionToEvaluate) {
+ return isSatisfiedBy(Version.fromString(versionToEvaluate));
+ }
+
+ /**
+ * Check if range is satisfied by given version.
+ *
+ * @param versionToEvaluate version to check
+ * @return {@code true} if range is satisfied by version, {@code false} otherwise
+ * @see #isSatisfiedBy(String)
+ */
+ public boolean isSatisfiedBy(final Version versionToEvaluate) {
+ return this.rangeOperator.expression.evaluate(this.rangeVersion, versionToEvaluate);
+ }
+
+ @Override
+ public boolean equals(@Nullable final Object o) {
+ if (this == o) {
+ return true;
+ }
+ if (o == null || getClass() != o.getClass()) {
+ return false;
+ }
+ SemverRange range = (SemverRange) o;
+ return Objects.equals(rangeVersion, range.rangeVersion) && rangeOperator == range.rangeOperator;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(rangeVersion, rangeOperator);
+ }
+
+ @Override
+ public String toString() {
+ return rangeOperator.asString() + rangeVersion;
+ }
+
+ @Override
+ public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException {
+ return builder.value(toString());
+ }
+
+ /**
+ * A range operator.
+ */
+ public enum RangeOperator {
+
+ EQ("=", new Equal()),
+ TILDE("~", new Tilde()),
+ CARET("^", new Caret()),
+ DEFAULT("", new Equal());
+
+ private final String operator;
+ private final Expression expression;
+
+ RangeOperator(final String operator, final Expression expression) {
+ this.operator = operator;
+ this.expression = expression;
+ }
+
+ /**
+ * String representation of the range operator.
+ *
+ * @return range operator as string
+ */
+ public String asString() {
+ return operator;
+ }
+
+ /**
+ * Escaped string representation of the range operator,
+ * if operator is a regex character.
+ *
+ * @return range operator as escaped string, if operator is a regex character
+ */
+ public String asEscapedString() {
+ if (Objects.equals(operator, "^")) {
+ return "\\^";
+ }
+ return operator;
+ }
+
+ public static RangeOperator fromRange(final String range) {
+ Optional rangeOperator = stream(values()).filter(
+ operator -> operator != DEFAULT && range.startsWith(operator.asString())
+ ).findFirst();
+ return rangeOperator.orElse(DEFAULT);
+ }
+ }
+}
diff --git a/libs/core/src/main/java/org/opensearch/semver/expr/Caret.java b/libs/core/src/main/java/org/opensearch/semver/expr/Caret.java
new file mode 100644
index 0000000000000..ce2b74dde0865
--- /dev/null
+++ b/libs/core/src/main/java/org/opensearch/semver/expr/Caret.java
@@ -0,0 +1,32 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.semver.expr;
+
+import org.opensearch.Version;
+
+/**
+ * Expression to evaluate version compatibility allowing for minor and patch version variability.
+ */
+public class Caret implements Expression {
+
+ /**
+ * Checks if the given version is compatible with the range version allowing for minor and
+ * patch version variability.
+ * Allows all versions starting from the rangeVersion upto next major version (exclusive).
+ * @param rangeVersion the version specified in range
+ * @param versionToEvaluate the version to evaluate
+ * @return {@code true} if the versions are compatible {@code false} otherwise
+ */
+ @Override
+ public boolean evaluate(final Version rangeVersion, final Version versionToEvaluate) {
+ Version lower = rangeVersion;
+ Version upper = Version.fromString((rangeVersion.major + 1) + ".0.0");
+ return versionToEvaluate.onOrAfter(lower) && versionToEvaluate.before(upper);
+ }
+}
diff --git a/libs/core/src/main/java/org/opensearch/semver/expr/Equal.java b/libs/core/src/main/java/org/opensearch/semver/expr/Equal.java
new file mode 100644
index 0000000000000..d3e1d63060b77
--- /dev/null
+++ b/libs/core/src/main/java/org/opensearch/semver/expr/Equal.java
@@ -0,0 +1,29 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.semver.expr;
+
+import org.opensearch.Version;
+
+/**
+ * Expression to evaluate equality of versions.
+ */
+public class Equal implements Expression {
+
+ /**
+ * Checks if a given version matches a certain range version.
+ *
+ * @param rangeVersion the version specified in range
+ * @param versionToEvaluate the version to evaluate
+ * @return {@code true} if the versions are equal {@code false} otherwise
+ */
+ @Override
+ public boolean evaluate(final Version rangeVersion, final Version versionToEvaluate) {
+ return versionToEvaluate.equals(rangeVersion);
+ }
+}
diff --git a/libs/core/src/main/java/org/opensearch/semver/expr/Expression.java b/libs/core/src/main/java/org/opensearch/semver/expr/Expression.java
new file mode 100644
index 0000000000000..68bb4e249836a
--- /dev/null
+++ b/libs/core/src/main/java/org/opensearch/semver/expr/Expression.java
@@ -0,0 +1,26 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.semver.expr;
+
+import org.opensearch.Version;
+
+/**
+ * An evaluation expression.
+ */
+public interface Expression {
+
+ /**
+ * Evaluates an expression.
+ *
+ * @param rangeVersion the version specified in range
+ * @param versionToEvaluate the version to evaluate
+ * @return the result of the expression evaluation
+ */
+ boolean evaluate(final Version rangeVersion, final Version versionToEvaluate);
+}
diff --git a/libs/core/src/main/java/org/opensearch/semver/expr/Tilde.java b/libs/core/src/main/java/org/opensearch/semver/expr/Tilde.java
new file mode 100644
index 0000000000000..5f62ffe62ddeb
--- /dev/null
+++ b/libs/core/src/main/java/org/opensearch/semver/expr/Tilde.java
@@ -0,0 +1,31 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.semver.expr;
+
+import org.opensearch.Version;
+
+/**
+ * Expression to evaluate version compatibility allowing patch version variability.
+ */
+public class Tilde implements Expression {
+
+ /**
+ * Checks if the given version is compatible with a range version allowing for patch version variability.
+ * Allows all versions starting from the rangeVersion upto next minor version (exclusive).
+ * @param rangeVersion the version specified in range
+ * @param versionToEvaluate the version to evaluate
+ * @return {@code true} if the versions are compatible {@code false} otherwise
+ */
+ @Override
+ public boolean evaluate(final Version rangeVersion, final Version versionToEvaluate) {
+ Version lower = rangeVersion;
+ Version upper = Version.fromString(rangeVersion.major + "." + (rangeVersion.minor + 1) + "." + 0);
+ return versionToEvaluate.onOrAfter(lower) && versionToEvaluate.before(upper);
+ }
+}
diff --git a/libs/core/src/main/java/org/opensearch/semver/expr/package-info.java b/libs/core/src/main/java/org/opensearch/semver/expr/package-info.java
new file mode 100644
index 0000000000000..06cf9feaaaf8f
--- /dev/null
+++ b/libs/core/src/main/java/org/opensearch/semver/expr/package-info.java
@@ -0,0 +1,9 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+/** Expressions library module */
+package org.opensearch.semver.expr;
diff --git a/server/src/main/java/org/opensearch/common/cache/tier/package-info.java b/libs/core/src/main/java/org/opensearch/semver/package-info.java
similarity index 70%
rename from server/src/main/java/org/opensearch/common/cache/tier/package-info.java
rename to libs/core/src/main/java/org/opensearch/semver/package-info.java
index 7ad81dbe3073c..ada935582d408 100644
--- a/server/src/main/java/org/opensearch/common/cache/tier/package-info.java
+++ b/libs/core/src/main/java/org/opensearch/semver/package-info.java
@@ -6,5 +6,5 @@
* compatible open source license.
*/
-/** Base package for cache tier support. */
-package org.opensearch.common.cache.tier;
+/** Semver library module */
+package org.opensearch.semver;
diff --git a/libs/core/src/test/java/org/opensearch/semver/SemverRangeTests.java b/libs/core/src/test/java/org/opensearch/semver/SemverRangeTests.java
new file mode 100644
index 0000000000000..af1d95b2561b7
--- /dev/null
+++ b/libs/core/src/test/java/org/opensearch/semver/SemverRangeTests.java
@@ -0,0 +1,105 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.semver;
+
+import org.opensearch.test.OpenSearchTestCase;
+
+public class SemverRangeTests extends OpenSearchTestCase {
+
+ public void testRangeWithEqualsOperator() {
+ SemverRange range = SemverRange.fromString("=1.2.3");
+ assertEquals(range.getRangeOperator(), SemverRange.RangeOperator.EQ);
+ assertTrue(range.isSatisfiedBy("1.2.3"));
+ assertFalse(range.isSatisfiedBy("1.2.4"));
+ assertFalse(range.isSatisfiedBy("1.3.3"));
+ assertFalse(range.isSatisfiedBy("2.2.3"));
+ }
+
+ public void testRangeWithDefaultOperator() {
+ SemverRange range = SemverRange.fromString("1.2.3");
+ assertEquals(range.getRangeOperator(), SemverRange.RangeOperator.DEFAULT);
+ assertTrue(range.isSatisfiedBy("1.2.3"));
+ assertFalse(range.isSatisfiedBy("1.2.4"));
+ assertFalse(range.isSatisfiedBy("1.3.3"));
+ assertFalse(range.isSatisfiedBy("2.2.3"));
+ }
+
+ public void testRangeWithTildeOperator() {
+ SemverRange range = SemverRange.fromString("~2.3.4");
+ assertEquals(range.getRangeOperator(), SemverRange.RangeOperator.TILDE);
+ assertTrue(range.isSatisfiedBy("2.3.4"));
+ assertTrue(range.isSatisfiedBy("2.3.5"));
+ assertTrue(range.isSatisfiedBy("2.3.12"));
+
+ assertFalse(range.isSatisfiedBy("2.3.0"));
+ assertFalse(range.isSatisfiedBy("2.3.3"));
+ assertFalse(range.isSatisfiedBy("2.4.0"));
+ assertFalse(range.isSatisfiedBy("3.0.0"));
+ }
+
+ public void testRangeWithCaretOperator() {
+ SemverRange range = SemverRange.fromString("^2.3.4");
+ assertEquals(range.getRangeOperator(), SemverRange.RangeOperator.CARET);
+ assertTrue(range.isSatisfiedBy("2.3.4"));
+ assertTrue(range.isSatisfiedBy("2.3.5"));
+ assertTrue(range.isSatisfiedBy("2.4.12"));
+
+ assertFalse(range.isSatisfiedBy("2.3.3"));
+ assertFalse(range.isSatisfiedBy("3.0.0"));
+ }
+
+ public void testInvalidRanges() {
+ IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> SemverRange.fromString(""));
+ assertEquals("Version cannot be empty", ex.getMessage());
+
+ ex = expectThrows(IllegalArgumentException.class, () -> SemverRange.fromString("1"));
+ assertTrue(ex.getMessage().contains("the version needs to contain major, minor, and revision, and optionally the build"));
+
+ ex = expectThrows(IllegalArgumentException.class, () -> SemverRange.fromString("1.2"));
+ assertTrue(ex.getMessage().contains("the version needs to contain major, minor, and revision, and optionally the build"));
+
+ ex = expectThrows(IllegalArgumentException.class, () -> SemverRange.fromString("="));
+ assertEquals("Version cannot be empty", ex.getMessage());
+
+ ex = expectThrows(IllegalArgumentException.class, () -> SemverRange.fromString("=1"));
+ assertTrue(ex.getMessage().contains("the version needs to contain major, minor, and revision, and optionally the build"));
+
+ ex = expectThrows(IllegalArgumentException.class, () -> SemverRange.fromString("=1.2"));
+ assertTrue(ex.getMessage().contains("the version needs to contain major, minor, and revision, and optionally the build"));
+
+ ex = expectThrows(IllegalArgumentException.class, () -> SemverRange.fromString("~"));
+ assertEquals("Version cannot be empty", ex.getMessage());
+
+ ex = expectThrows(IllegalArgumentException.class, () -> SemverRange.fromString("~1"));
+ assertTrue(ex.getMessage().contains("the version needs to contain major, minor, and revision, and optionally the build"));
+
+ ex = expectThrows(IllegalArgumentException.class, () -> SemverRange.fromString("~1.2"));
+ assertTrue(ex.getMessage().contains("the version needs to contain major, minor, and revision, and optionally the build"));
+
+ ex = expectThrows(IllegalArgumentException.class, () -> SemverRange.fromString("^"));
+ assertEquals("Version cannot be empty", ex.getMessage());
+
+ ex = expectThrows(IllegalArgumentException.class, () -> SemverRange.fromString("^1"));
+ assertTrue(ex.getMessage().contains("the version needs to contain major, minor, and revision, and optionally the build"));
+
+ ex = expectThrows(IllegalArgumentException.class, () -> SemverRange.fromString("^1.2"));
+ assertTrue(ex.getMessage().contains("the version needs to contain major, minor, and revision, and optionally the build"));
+
+ ex = expectThrows(IllegalArgumentException.class, () -> SemverRange.fromString("$"));
+ assertTrue(ex.getMessage().contains("the version needs to contain major, minor, and revision, and optionally the build"));
+
+ ex = expectThrows(IllegalArgumentException.class, () -> SemverRange.fromString("$1"));
+ assertTrue(ex.getMessage().contains("the version needs to contain major, minor, and revision, and optionally the build"));
+
+ ex = expectThrows(IllegalArgumentException.class, () -> SemverRange.fromString("$1.2"));
+ assertTrue(ex.getMessage().contains("the version needs to contain major, minor, and revision, and optionally the build"));
+
+ expectThrows(NumberFormatException.class, () -> SemverRange.fromString("$1.2.3"));
+ }
+}
diff --git a/libs/core/src/test/java/org/opensearch/semver/expr/CaretTests.java b/libs/core/src/test/java/org/opensearch/semver/expr/CaretTests.java
new file mode 100644
index 0000000000000..3cb168d42cda0
--- /dev/null
+++ b/libs/core/src/test/java/org/opensearch/semver/expr/CaretTests.java
@@ -0,0 +1,30 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.semver.expr;
+
+import org.opensearch.Version;
+import org.opensearch.test.OpenSearchTestCase;
+
+public class CaretTests extends OpenSearchTestCase {
+
+ public void testMinorAndPatchVersionVariability() {
+ Caret caretExpr = new Caret();
+ Version rangeVersion = Version.fromString("1.2.3");
+
+ // Compatible versions
+ assertTrue(caretExpr.evaluate(rangeVersion, Version.fromString("1.2.3")));
+ assertTrue(caretExpr.evaluate(rangeVersion, Version.fromString("1.2.4")));
+ assertTrue(caretExpr.evaluate(rangeVersion, Version.fromString("1.3.3")));
+ assertTrue(caretExpr.evaluate(rangeVersion, Version.fromString("1.9.9")));
+
+ // Incompatible versions
+ assertFalse(caretExpr.evaluate(rangeVersion, Version.fromString("1.2.2")));
+ assertFalse(caretExpr.evaluate(rangeVersion, Version.fromString("2.0.0")));
+ }
+}
diff --git a/libs/core/src/test/java/org/opensearch/semver/expr/EqualTests.java b/libs/core/src/test/java/org/opensearch/semver/expr/EqualTests.java
new file mode 100644
index 0000000000000..fb090865157ed
--- /dev/null
+++ b/libs/core/src/test/java/org/opensearch/semver/expr/EqualTests.java
@@ -0,0 +1,22 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.semver.expr;
+
+import org.opensearch.Version;
+import org.opensearch.test.OpenSearchTestCase;
+
+public class EqualTests extends OpenSearchTestCase {
+
+ public void testEquality() {
+ Equal equalExpr = new Equal();
+ Version rangeVersion = Version.fromString("1.2.3");
+ assertTrue(equalExpr.evaluate(rangeVersion, Version.fromString("1.2.3")));
+ assertFalse(equalExpr.evaluate(rangeVersion, Version.fromString("1.2.4")));
+ }
+}
diff --git a/libs/core/src/test/java/org/opensearch/semver/expr/TildeTests.java b/libs/core/src/test/java/org/opensearch/semver/expr/TildeTests.java
new file mode 100644
index 0000000000000..8666611645c3a
--- /dev/null
+++ b/libs/core/src/test/java/org/opensearch/semver/expr/TildeTests.java
@@ -0,0 +1,29 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.semver.expr;
+
+import org.opensearch.Version;
+import org.opensearch.test.OpenSearchTestCase;
+
+public class TildeTests extends OpenSearchTestCase {
+
+ public void testPatchVersionVariability() {
+ Tilde tildeExpr = new Tilde();
+ Version rangeVersion = Version.fromString("1.2.3");
+
+ assertTrue(tildeExpr.evaluate(rangeVersion, Version.fromString("1.2.3")));
+ assertTrue(tildeExpr.evaluate(rangeVersion, Version.fromString("1.2.4")));
+ assertTrue(tildeExpr.evaluate(rangeVersion, Version.fromString("1.2.9")));
+
+ assertFalse(tildeExpr.evaluate(rangeVersion, Version.fromString("1.2.0")));
+ assertFalse(tildeExpr.evaluate(rangeVersion, Version.fromString("1.2.2")));
+ assertFalse(tildeExpr.evaluate(rangeVersion, Version.fromString("1.3.0")));
+ assertFalse(tildeExpr.evaluate(rangeVersion, Version.fromString("2.0.0")));
+ }
+}
diff --git a/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/DefaultMetricsRegistry.java b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/DefaultMetricsRegistry.java
index d57def9406b17..f38fdd6412d79 100644
--- a/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/DefaultMetricsRegistry.java
+++ b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/DefaultMetricsRegistry.java
@@ -34,6 +34,11 @@ public Counter createUpDownCounter(String name, String description, String unit)
return metricsTelemetry.createUpDownCounter(name, description, unit);
}
+ @Override
+ public Histogram createHistogram(String name, String description, String unit) {
+ return metricsTelemetry.createHistogram(name, description, unit);
+ }
+
@Override
public void close() throws IOException {
metricsTelemetry.close();
diff --git a/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/Histogram.java b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/Histogram.java
new file mode 100644
index 0000000000000..95ada626e21ee
--- /dev/null
+++ b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/Histogram.java
@@ -0,0 +1,35 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.telemetry.metrics;
+
+import org.opensearch.common.annotation.ExperimentalApi;
+import org.opensearch.telemetry.metrics.tags.Tags;
+
+/**
+ * Histogram records the value for an existing metric.
+ * {@opensearch.experimental}
+ */
+@ExperimentalApi
+public interface Histogram {
+
+ /**
+ * record value.
+ * @param value value to be added.
+ */
+ void record(double value);
+
+ /**
+ * record value along with the attributes.
+ *
+ * @param value value to be added.
+ * @param tags attributes/dimensions of the metric.
+ */
+ void record(double value, Tags tags);
+
+}
diff --git a/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/MetricsRegistry.java b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/MetricsRegistry.java
index 61b3df089928b..94d19bda31f34 100644
--- a/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/MetricsRegistry.java
+++ b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/MetricsRegistry.java
@@ -36,4 +36,15 @@ public interface MetricsRegistry extends Closeable {
* @return counter.
*/
Counter createUpDownCounter(String name, String description, String unit);
+
+ /**
+ * Creates the histogram type of Metric. Implementation framework will take care
+ * of the bucketing strategy.
+ *
+ * @param name name of the histogram.
+ * @param description any description about the metric.
+ * @param unit unit of the metric.
+ * @return histogram.
+ */
+ Histogram createHistogram(String name, String description, String unit);
}
diff --git a/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/noop/NoopHistogram.java b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/noop/NoopHistogram.java
new file mode 100644
index 0000000000000..20e72bccad899
--- /dev/null
+++ b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/noop/NoopHistogram.java
@@ -0,0 +1,38 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.telemetry.metrics.noop;
+
+import org.opensearch.common.annotation.InternalApi;
+import org.opensearch.telemetry.metrics.Histogram;
+import org.opensearch.telemetry.metrics.tags.Tags;
+
+/**
+ * No-op {@link Histogram}
+ * {@opensearch.internal}
+ */
+@InternalApi
+public class NoopHistogram implements Histogram {
+
+ /**
+ * No-op Histogram instance
+ */
+ public final static NoopHistogram INSTANCE = new NoopHistogram();
+
+ private NoopHistogram() {}
+
+ @Override
+ public void record(double value) {
+
+ }
+
+ @Override
+ public void record(double value, Tags tags) {
+
+ }
+}
diff --git a/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/noop/NoopMetricsRegistry.java b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/noop/NoopMetricsRegistry.java
index 640c6842a8960..d3dda68cfae71 100644
--- a/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/noop/NoopMetricsRegistry.java
+++ b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/noop/NoopMetricsRegistry.java
@@ -10,6 +10,7 @@
import org.opensearch.common.annotation.InternalApi;
import org.opensearch.telemetry.metrics.Counter;
+import org.opensearch.telemetry.metrics.Histogram;
import org.opensearch.telemetry.metrics.MetricsRegistry;
import java.io.IOException;
@@ -38,6 +39,11 @@ public Counter createUpDownCounter(String name, String description, String unit)
return NoopCounter.INSTANCE;
}
+ @Override
+ public Histogram createHistogram(String name, String description, String unit) {
+ return NoopHistogram.INSTANCE;
+ }
+
@Override
public void close() throws IOException {
diff --git a/libs/telemetry/src/test/java/org/opensearch/telemetry/metrics/DefaultMetricsRegistryTests.java b/libs/telemetry/src/test/java/org/opensearch/telemetry/metrics/DefaultMetricsRegistryTests.java
index 6171641db5f07..02f126075845b 100644
--- a/libs/telemetry/src/test/java/org/opensearch/telemetry/metrics/DefaultMetricsRegistryTests.java
+++ b/libs/telemetry/src/test/java/org/opensearch/telemetry/metrics/DefaultMetricsRegistryTests.java
@@ -48,4 +48,15 @@ public void testUpDownCounter() {
assertSame(mockCounter, counter);
}
+ public void testHistogram() {
+ Histogram mockHistogram = mock(Histogram.class);
+ when(defaultMeterRegistry.createHistogram(any(String.class), any(String.class), any(String.class))).thenReturn(mockHistogram);
+ Histogram histogram = defaultMeterRegistry.createHistogram(
+ "org.opensearch.telemetry.metrics.DefaultMeterRegistryTests.testHistogram",
+ "test up-down counter",
+ "ms"
+ );
+ assertSame(mockHistogram, histogram);
+ }
+
}
diff --git a/libs/x-content/src/main/java/org/opensearch/common/xcontent/XContentContraints.java b/libs/x-content/src/main/java/org/opensearch/common/xcontent/XContentContraints.java
index 4c05f0058f2ed..2f4dada29780d 100644
--- a/libs/x-content/src/main/java/org/opensearch/common/xcontent/XContentContraints.java
+++ b/libs/x-content/src/main/java/org/opensearch/common/xcontent/XContentContraints.java
@@ -19,6 +19,7 @@
*/
@InternalApi
public interface XContentContraints {
+ final String DEFAULT_CODEPOINT_LIMIT_PROPERTY = "opensearch.xcontent.codepoint.max";
final String DEFAULT_MAX_STRING_LEN_PROPERTY = "opensearch.xcontent.string.length.max";
final String DEFAULT_MAX_NAME_LEN_PROPERTY = "opensearch.xcontent.name.length.max";
final String DEFAULT_MAX_DEPTH_PROPERTY = "opensearch.xcontent.depth.max";
@@ -32,4 +33,6 @@ public interface XContentContraints {
final int DEFAULT_MAX_DEPTH = Integer.parseInt(
System.getProperty(DEFAULT_MAX_DEPTH_PROPERTY, "1000" /* StreamReadConstraints.DEFAULT_MAX_DEPTH */)
);
+
+ final int DEFAULT_CODEPOINT_LIMIT = Integer.parseInt(System.getProperty(DEFAULT_CODEPOINT_LIMIT_PROPERTY, "52428800" /* ~50 Mb */));
}
diff --git a/libs/x-content/src/main/java/org/opensearch/common/xcontent/yaml/YamlXContent.java b/libs/x-content/src/main/java/org/opensearch/common/xcontent/yaml/YamlXContent.java
index 3f6a4b3aeead7..0e69c6c33b923 100644
--- a/libs/x-content/src/main/java/org/opensearch/common/xcontent/yaml/YamlXContent.java
+++ b/libs/x-content/src/main/java/org/opensearch/common/xcontent/yaml/YamlXContent.java
@@ -38,6 +38,7 @@
import com.fasterxml.jackson.core.StreamReadFeature;
import com.fasterxml.jackson.core.StreamWriteConstraints;
import com.fasterxml.jackson.dataformat.yaml.YAMLFactory;
+import com.fasterxml.jackson.dataformat.yaml.YAMLFactoryBuilder;
import org.opensearch.common.xcontent.XContentContraints;
import org.opensearch.common.xcontent.XContentType;
@@ -55,6 +56,8 @@
import java.io.Reader;
import java.util.Set;
+import org.yaml.snakeyaml.LoaderOptions;
+
/**
* A YAML based content implementation using Jackson.
*/
@@ -67,7 +70,9 @@ public static XContentBuilder contentBuilder() throws IOException {
public static final YamlXContent yamlXContent;
static {
- yamlFactory = new YAMLFactory();
+ final LoaderOptions loaderOptions = new LoaderOptions();
+ loaderOptions.setCodePointLimit(DEFAULT_CODEPOINT_LIMIT);
+ yamlFactory = new YAMLFactoryBuilder(new YAMLFactory()).loaderOptions(loaderOptions).build();
yamlFactory.configure(JsonParser.Feature.STRICT_DUPLICATE_DETECTION, true);
yamlFactory.setStreamWriteConstraints(StreamWriteConstraints.builder().maxNestingDepth(DEFAULT_MAX_DEPTH).build());
yamlFactory.setStreamReadConstraints(
diff --git a/libs/x-content/src/test/java/org/opensearch/common/xcontent/XContentParserTests.java b/libs/x-content/src/test/java/org/opensearch/common/xcontent/XContentParserTests.java
index 0e431d8ea4277..81a2b0e290121 100644
--- a/libs/x-content/src/test/java/org/opensearch/common/xcontent/XContentParserTests.java
+++ b/libs/x-content/src/test/java/org/opensearch/common/xcontent/XContentParserTests.java
@@ -85,7 +85,8 @@ public class XContentParserTests extends OpenSearchTestCase {
() -> randomAlphaOfLengthBetween(1, SmileXContent.DEFAULT_MAX_STRING_LEN),
/* YAML parser limitation */
XContentType.YAML,
- () -> randomAlphaOfLengthBetween(1, 3140000)
+ /* use 75% of the limit, difficult to get the exact size of the content right */
+ () -> randomRealisticUnicodeOfCodepointLengthBetween(1, (int) (YamlXContent.DEFAULT_CODEPOINT_LIMIT * 0.75))
);
private static final Map> OFF_LIMIT_GENERATORS = Map.of(
@@ -97,7 +98,7 @@ public class XContentParserTests extends OpenSearchTestCase {
() -> randomAlphaOfLength(SmileXContent.DEFAULT_MAX_STRING_LEN + 1),
/* YAML parser limitation */
XContentType.YAML,
- () -> randomRealisticUnicodeOfCodepointLength(3145730)
+ () -> randomRealisticUnicodeOfCodepointLength(YamlXContent.DEFAULT_CODEPOINT_LIMIT + 1)
);
private static final Map> FIELD_NAME_GENERATORS = Map.of(
diff --git a/modules/cache-common/build.gradle b/modules/cache-common/build.gradle
new file mode 100644
index 0000000000000..c7052896e609b
--- /dev/null
+++ b/modules/cache-common/build.gradle
@@ -0,0 +1,17 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+opensearchplugin {
+ description 'Module for caches which are optional and do not require additional security permission'
+ classname 'org.opensearch.cache.common.tier.TieredSpilloverCachePlugin'
+}
+
+test {
+ // TODO: Adding permission in plugin-security.policy doesn't seem to work.
+ systemProperty 'tests.security.manager', 'false'
+}
diff --git a/modules/cache-common/src/main/java/org/opensearch/cache/common/tier/TieredSpilloverCache.java b/modules/cache-common/src/main/java/org/opensearch/cache/common/tier/TieredSpilloverCache.java
new file mode 100644
index 0000000000000..7b64a7e93fe27
--- /dev/null
+++ b/modules/cache-common/src/main/java/org/opensearch/cache/common/tier/TieredSpilloverCache.java
@@ -0,0 +1,335 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.cache.common.tier;
+
+import org.opensearch.common.annotation.ExperimentalApi;
+import org.opensearch.common.cache.CacheType;
+import org.opensearch.common.cache.ICache;
+import org.opensearch.common.cache.LoadAwareCacheLoader;
+import org.opensearch.common.cache.RemovalListener;
+import org.opensearch.common.cache.RemovalNotification;
+import org.opensearch.common.cache.store.config.CacheConfig;
+import org.opensearch.common.settings.Setting;
+import org.opensearch.common.settings.Settings;
+import org.opensearch.common.util.concurrent.ReleasableLock;
+import org.opensearch.common.util.iterable.Iterables;
+
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+import java.util.concurrent.locks.ReadWriteLock;
+import java.util.concurrent.locks.ReentrantReadWriteLock;
+import java.util.function.Function;
+
+/**
+ * This cache spillover the evicted items from heap tier to disk tier. All the new items are first cached on heap
+ * and the items evicted from on heap cache are moved to disk based cache. If disk based cache also gets full,
+ * then items are eventually evicted from it and removed which will result in cache miss.
+ *
+ * @param Type of key
+ * @param Type of value
+ *
+ * @opensearch.experimental
+ */
+@ExperimentalApi
+public class TieredSpilloverCache implements ICache {
+
+ private final ICache diskCache;
+ private final ICache onHeapCache;
+ private final RemovalListener removalListener;
+ ReadWriteLock readWriteLock = new ReentrantReadWriteLock();
+ ReleasableLock readLock = new ReleasableLock(readWriteLock.readLock());
+ ReleasableLock writeLock = new ReleasableLock(readWriteLock.writeLock());
+ /**
+ * Maintains caching tiers in ascending order of cache latency.
+ */
+ private final List> cacheList;
+
+ TieredSpilloverCache(Builder builder) {
+ Objects.requireNonNull(builder.onHeapCacheFactory, "onHeap cache builder can't be null");
+ Objects.requireNonNull(builder.diskCacheFactory, "disk cache builder can't be null");
+ this.removalListener = Objects.requireNonNull(builder.removalListener, "Removal listener can't be null");
+
+ this.onHeapCache = builder.onHeapCacheFactory.create(
+ new CacheConfig.Builder().setRemovalListener(new RemovalListener() {
+ @Override
+ public void onRemoval(RemovalNotification notification) {
+ try (ReleasableLock ignore = writeLock.acquire()) {
+ diskCache.put(notification.getKey(), notification.getValue());
+ }
+ removalListener.onRemoval(notification);
+ }
+ })
+ .setKeyType(builder.cacheConfig.getKeyType())
+ .setValueType(builder.cacheConfig.getValueType())
+ .setSettings(builder.cacheConfig.getSettings())
+ .setWeigher(builder.cacheConfig.getWeigher())
+ .build(),
+ builder.cacheType,
+ builder.cacheFactories
+
+ );
+ this.diskCache = builder.diskCacheFactory.create(builder.cacheConfig, builder.cacheType, builder.cacheFactories);
+ this.cacheList = Arrays.asList(onHeapCache, diskCache);
+ }
+
+ // Package private for testing
+ ICache getOnHeapCache() {
+ return onHeapCache;
+ }
+
+ // Package private for testing
+ ICache getDiskCache() {
+ return diskCache;
+ }
+
+ @Override
+ public V get(K key) {
+ return getValueFromTieredCache().apply(key);
+ }
+
+ @Override
+ public void put(K key, V value) {
+ try (ReleasableLock ignore = writeLock.acquire()) {
+ onHeapCache.put(key, value);
+ }
+ }
+
+ @Override
+ public V computeIfAbsent(K key, LoadAwareCacheLoader loader) throws Exception {
+
+ V cacheValue = getValueFromTieredCache().apply(key);
+ if (cacheValue == null) {
+ // Add the value to the onHeap cache. We are calling computeIfAbsent which does another get inside.
+ // This is needed as there can be many requests for the same key at the same time and we only want to load
+ // the value once.
+ V value = null;
+ try (ReleasableLock ignore = writeLock.acquire()) {
+ value = onHeapCache.computeIfAbsent(key, loader);
+ }
+ return value;
+ }
+ return cacheValue;
+ }
+
+ @Override
+ public void invalidate(K key) {
+ // We are trying to invalidate the key from all caches though it would be present in only of them.
+ // Doing this as we don't know where it is located. We could do a get from both and check that, but what will
+ // also trigger a hit/miss listener event, so ignoring it for now.
+ try (ReleasableLock ignore = writeLock.acquire()) {
+ for (ICache cache : cacheList) {
+ cache.invalidate(key);
+ }
+ }
+ }
+
+ @Override
+ public void invalidateAll() {
+ try (ReleasableLock ignore = writeLock.acquire()) {
+ for (ICache cache : cacheList) {
+ cache.invalidateAll();
+ }
+ }
+ }
+
+ /**
+ * Provides an iteration over both onHeap and disk keys. This is not protected from any mutations to the cache.
+ * @return An iterable over (onHeap + disk) keys
+ */
+ @SuppressWarnings("unchecked")
+ @Override
+ public Iterable keys() {
+ return Iterables.concat(onHeapCache.keys(), diskCache.keys());
+ }
+
+ @Override
+ public long count() {
+ long count = 0;
+ for (ICache cache : cacheList) {
+ count += cache.count();
+ }
+ return count;
+ }
+
+ @Override
+ public void refresh() {
+ try (ReleasableLock ignore = writeLock.acquire()) {
+ for (ICache cache : cacheList) {
+ cache.refresh();
+ }
+ }
+ }
+
+ @Override
+ public void close() throws IOException {
+ for (ICache cache : cacheList) {
+ cache.close();
+ }
+ }
+
+ private Function getValueFromTieredCache() {
+ return key -> {
+ try (ReleasableLock ignore = readLock.acquire()) {
+ for (ICache cache : cacheList) {
+ V value = cache.get(key);
+ if (value != null) {
+ // update hit stats
+ return value;
+ } else {
+ // update miss stats
+ }
+ }
+ }
+ return null;
+ };
+ }
+
+ /**
+ * Factory to create TieredSpilloverCache objects.
+ */
+ public static class TieredSpilloverCacheFactory implements ICache.Factory {
+
+ /**
+ * Defines cache name
+ */
+ public static final String TIERED_SPILLOVER_CACHE_NAME = "tiered_spillover";
+
+ /**
+ * Default constructor
+ */
+ public TieredSpilloverCacheFactory() {}
+
+ @Override
+ public ICache create(CacheConfig config, CacheType cacheType, Map cacheFactories) {
+ Settings settings = config.getSettings();
+ Setting onHeapSetting = TieredSpilloverCacheSettings.TIERED_SPILLOVER_ONHEAP_STORE_NAME.getConcreteSettingForNamespace(
+ cacheType.getSettingPrefix()
+ );
+ String onHeapCacheStoreName = onHeapSetting.get(settings);
+ if (!cacheFactories.containsKey(onHeapCacheStoreName)) {
+ throw new IllegalArgumentException(
+ "No associated onHeapCache found for tieredSpilloverCache for " + "cacheType:" + cacheType
+ );
+ }
+ ICache.Factory onHeapCacheFactory = cacheFactories.get(onHeapCacheStoreName);
+
+ Setting onDiskSetting = TieredSpilloverCacheSettings.TIERED_SPILLOVER_DISK_STORE_NAME.getConcreteSettingForNamespace(
+ cacheType.getSettingPrefix()
+ );
+ String diskCacheStoreName = onDiskSetting.get(settings);
+ if (!cacheFactories.containsKey(diskCacheStoreName)) {
+ throw new IllegalArgumentException(
+ "No associated diskCache found for tieredSpilloverCache for " + "cacheType:" + cacheType
+ );
+ }
+ ICache.Factory diskCacheFactory = cacheFactories.get(diskCacheStoreName);
+ return new Builder().setDiskCacheFactory(diskCacheFactory)
+ .setOnHeapCacheFactory(onHeapCacheFactory)
+ .setRemovalListener(config.getRemovalListener())
+ .setCacheConfig(config)
+ .setCacheType(cacheType)
+ .build();
+ }
+
+ @Override
+ public String getCacheName() {
+ return TIERED_SPILLOVER_CACHE_NAME;
+ }
+ }
+
+ /**
+ * Builder object for tiered spillover cache.
+ * @param Type of key
+ * @param Type of value
+ */
+ public static class Builder {
+ private ICache.Factory onHeapCacheFactory;
+ private ICache.Factory diskCacheFactory;
+ private RemovalListener removalListener;
+ private CacheConfig cacheConfig;
+ private CacheType cacheType;
+ private Map cacheFactories;
+
+ /**
+ * Default constructor
+ */
+ public Builder() {}
+
+ /**
+ * Set onHeap cache factory
+ * @param onHeapCacheFactory Factory for onHeap cache.
+ * @return builder
+ */
+ public Builder setOnHeapCacheFactory(ICache.Factory onHeapCacheFactory) {
+ this.onHeapCacheFactory = onHeapCacheFactory;
+ return this;
+ }
+
+ /**
+ * Set disk cache factory
+ * @param diskCacheFactory Factory for disk cache.
+ * @return builder
+ */
+ public Builder setDiskCacheFactory(ICache.Factory diskCacheFactory) {
+ this.diskCacheFactory = diskCacheFactory;
+ return this;
+ }
+
+ /**
+ * Set removal listener for tiered cache.
+ * @param removalListener Removal listener
+ * @return builder
+ */
+ public Builder setRemovalListener(RemovalListener removalListener) {
+ this.removalListener = removalListener;
+ return this;
+ }
+
+ /**
+ * Set cache config.
+ * @param cacheConfig cache config.
+ * @return builder
+ */
+ public Builder setCacheConfig(CacheConfig cacheConfig) {
+ this.cacheConfig = cacheConfig;
+ return this;
+ }
+
+ /**
+ * Set cache type.
+ * @param cacheType Cache type
+ * @return builder
+ */
+ public Builder setCacheType(CacheType cacheType) {
+ this.cacheType = cacheType;
+ return this;
+ }
+
+ /**
+ * Set cache factories
+ * @param cacheFactories cache factories
+ * @return builder
+ */
+ public Builder setCacheFactories(Map cacheFactories) {
+ this.cacheFactories = cacheFactories;
+ return this;
+ }
+
+ /**
+ * Build tiered spillover cache.
+ * @return TieredSpilloverCache
+ */
+ public TieredSpilloverCache build() {
+ return new TieredSpilloverCache<>(this);
+ }
+ }
+}
diff --git a/modules/cache-common/src/main/java/org/opensearch/cache/common/tier/TieredSpilloverCachePlugin.java b/modules/cache-common/src/main/java/org/opensearch/cache/common/tier/TieredSpilloverCachePlugin.java
new file mode 100644
index 0000000000000..6b0620c5fbede
--- /dev/null
+++ b/modules/cache-common/src/main/java/org/opensearch/cache/common/tier/TieredSpilloverCachePlugin.java
@@ -0,0 +1,62 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.cache.common.tier;
+
+import org.opensearch.common.cache.CacheType;
+import org.opensearch.common.cache.ICache;
+import org.opensearch.common.settings.Setting;
+import org.opensearch.plugins.CachePlugin;
+import org.opensearch.plugins.Plugin;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Plugin for TieredSpilloverCache.
+ */
+public class TieredSpilloverCachePlugin extends Plugin implements CachePlugin {
+
+ /**
+ * Plugin name
+ */
+ public static final String TIERED_CACHE_SPILLOVER_PLUGIN_NAME = "tieredSpilloverCachePlugin";
+
+ /**
+ * Default constructor
+ */
+ public TieredSpilloverCachePlugin() {}
+
+ @Override
+ public Map getCacheFactoryMap() {
+ return Map.of(
+ TieredSpilloverCache.TieredSpilloverCacheFactory.TIERED_SPILLOVER_CACHE_NAME,
+ new TieredSpilloverCache.TieredSpilloverCacheFactory()
+ );
+ }
+
+ @Override
+ public List> getSettings() {
+ List> settingList = new ArrayList<>();
+ for (CacheType cacheType : CacheType.values()) {
+ settingList.add(
+ TieredSpilloverCacheSettings.TIERED_SPILLOVER_ONHEAP_STORE_NAME.getConcreteSettingForNamespace(cacheType.getSettingPrefix())
+ );
+ settingList.add(
+ TieredSpilloverCacheSettings.TIERED_SPILLOVER_DISK_STORE_NAME.getConcreteSettingForNamespace(cacheType.getSettingPrefix())
+ );
+ }
+ return settingList;
+ }
+
+ @Override
+ public String getName() {
+ return TIERED_CACHE_SPILLOVER_PLUGIN_NAME;
+ }
+}
diff --git a/modules/cache-common/src/main/java/org/opensearch/cache/common/tier/TieredSpilloverCacheSettings.java b/modules/cache-common/src/main/java/org/opensearch/cache/common/tier/TieredSpilloverCacheSettings.java
new file mode 100644
index 0000000000000..50b4177f599d1
--- /dev/null
+++ b/modules/cache-common/src/main/java/org/opensearch/cache/common/tier/TieredSpilloverCacheSettings.java
@@ -0,0 +1,43 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.cache.common.tier;
+
+import org.opensearch.common.settings.Setting;
+
+import static org.opensearch.common.settings.Setting.Property.NodeScope;
+
+/**
+ * Settings related to TieredSpilloverCache.
+ */
+public class TieredSpilloverCacheSettings {
+
+ /**
+ * Setting which defines the onHeap cache store to be used in TieredSpilloverCache.
+ *
+ * Pattern: {cache_type}.tiered_spillover.onheap.store.name
+ * Example: indices.request.cache.tiered_spillover.onheap.store.name
+ */
+ public static final Setting.AffixSetting TIERED_SPILLOVER_ONHEAP_STORE_NAME = Setting.suffixKeySetting(
+ TieredSpilloverCache.TieredSpilloverCacheFactory.TIERED_SPILLOVER_CACHE_NAME + ".onheap.store.name",
+ (key) -> Setting.simpleString(key, "", NodeScope)
+ );
+
+ /**
+ * Setting which defines the disk cache store to be used in TieredSpilloverCache.
+ */
+ public static final Setting.AffixSetting TIERED_SPILLOVER_DISK_STORE_NAME = Setting.suffixKeySetting(
+ TieredSpilloverCache.TieredSpilloverCacheFactory.TIERED_SPILLOVER_CACHE_NAME + ".disk.store.name",
+ (key) -> Setting.simpleString(key, "", NodeScope)
+ );
+
+ /**
+ * Default constructor
+ */
+ TieredSpilloverCacheSettings() {}
+}
diff --git a/server/src/main/java/org/opensearch/common/cache/store/enums/package-info.java b/modules/cache-common/src/main/java/org/opensearch/cache/common/tier/package-info.java
similarity index 68%
rename from server/src/main/java/org/opensearch/common/cache/store/enums/package-info.java
rename to modules/cache-common/src/main/java/org/opensearch/cache/common/tier/package-info.java
index 7a4e0fa7201fd..fa2de3c14b5dc 100644
--- a/server/src/main/java/org/opensearch/common/cache/store/enums/package-info.java
+++ b/modules/cache-common/src/main/java/org/opensearch/cache/common/tier/package-info.java
@@ -6,5 +6,5 @@
* compatible open source license.
*/
-/** Package related to tiered cache enums */
-package org.opensearch.common.cache.store.enums;
+/** Package related to cache tiers **/
+package org.opensearch.cache.common.tier;
diff --git a/modules/cache-common/src/main/plugin-metadata/plugin-security.policy b/modules/cache-common/src/main/plugin-metadata/plugin-security.policy
new file mode 100644
index 0000000000000..12fe9f2ddb60b
--- /dev/null
+++ b/modules/cache-common/src/main/plugin-metadata/plugin-security.policy
@@ -0,0 +1,12 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+grant {
+ permission java.lang.RuntimePermission "accessClassInPackage.sun.misc";
+ permission java.lang.RuntimePermission "createClassLoader";
+};
diff --git a/modules/cache-common/src/test/java/org/opensearch/cache/common/tier/TieredSpilloverCachePluginTests.java b/modules/cache-common/src/test/java/org/opensearch/cache/common/tier/TieredSpilloverCachePluginTests.java
new file mode 100644
index 0000000000000..1172a48e97c6a
--- /dev/null
+++ b/modules/cache-common/src/test/java/org/opensearch/cache/common/tier/TieredSpilloverCachePluginTests.java
@@ -0,0 +1,24 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.cache.common.tier;
+
+import org.opensearch.common.cache.ICache;
+import org.opensearch.test.OpenSearchTestCase;
+
+import java.util.Map;
+
+public class TieredSpilloverCachePluginTests extends OpenSearchTestCase {
+
+ public void testGetCacheFactoryMap() {
+ TieredSpilloverCachePlugin tieredSpilloverCachePlugin = new TieredSpilloverCachePlugin();
+ Map map = tieredSpilloverCachePlugin.getCacheFactoryMap();
+ assertNotNull(map.get(TieredSpilloverCache.TieredSpilloverCacheFactory.TIERED_SPILLOVER_CACHE_NAME));
+ assertEquals(TieredSpilloverCachePlugin.TIERED_CACHE_SPILLOVER_PLUGIN_NAME, tieredSpilloverCachePlugin.getName());
+ }
+}
diff --git a/server/src/test/java/org/opensearch/common/cache/tier/TieredSpilloverCacheTests.java b/modules/cache-common/src/test/java/org/opensearch/cache/common/tier/TieredSpilloverCacheTests.java
similarity index 55%
rename from server/src/test/java/org/opensearch/common/cache/tier/TieredSpilloverCacheTests.java
rename to modules/cache-common/src/test/java/org/opensearch/cache/common/tier/TieredSpilloverCacheTests.java
index eb75244c6f8b1..7c9569f5defe2 100644
--- a/server/src/test/java/org/opensearch/common/cache/tier/TieredSpilloverCacheTests.java
+++ b/modules/cache-common/src/test/java/org/opensearch/cache/common/tier/TieredSpilloverCacheTests.java
@@ -6,21 +6,22 @@
* compatible open source license.
*/
-package org.opensearch.common.cache.tier;
+package org.opensearch.cache.common.tier;
+import org.opensearch.common.cache.CacheType;
+import org.opensearch.common.cache.ICache;
import org.opensearch.common.cache.LoadAwareCacheLoader;
-import org.opensearch.common.cache.RemovalReason;
+import org.opensearch.common.cache.RemovalListener;
+import org.opensearch.common.cache.RemovalNotification;
import org.opensearch.common.cache.store.OpenSearchOnHeapCache;
-import org.opensearch.common.cache.store.StoreAwareCache;
-import org.opensearch.common.cache.store.StoreAwareCacheRemovalNotification;
-import org.opensearch.common.cache.store.builders.StoreAwareCacheBuilder;
-import org.opensearch.common.cache.store.enums.CacheStoreType;
-import org.opensearch.common.cache.store.listeners.StoreAwareCacheEventListener;
+import org.opensearch.common.cache.store.builders.ICacheBuilder;
+import org.opensearch.common.cache.store.config.CacheConfig;
+import org.opensearch.common.cache.store.settings.OpenSearchOnHeapCacheSettings;
import org.opensearch.common.metrics.CounterMetric;
+import org.opensearch.common.settings.Settings;
import org.opensearch.test.OpenSearchTestCase;
import java.util.ArrayList;
-import java.util.EnumMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
@@ -31,15 +32,27 @@
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
+import static org.opensearch.common.cache.store.settings.OpenSearchOnHeapCacheSettings.MAXIMUM_SIZE_IN_BYTES_KEY;
+
public class TieredSpilloverCacheTests extends OpenSearchTestCase {
public void testComputeIfAbsentWithoutAnyOnHeapCacheEviction() throws Exception {
int onHeapCacheSize = randomIntBetween(10, 30);
- MockCacheEventListener eventListener = new MockCacheEventListener();
+ int keyValueSize = 50;
+
+ MockCacheRemovalListener removalListener = new MockCacheRemovalListener<>();
TieredSpilloverCache tieredSpilloverCache = intializeTieredSpilloverCache(
onHeapCacheSize,
randomIntBetween(1, 4),
- eventListener,
+ removalListener,
+ Settings.builder()
+ .put(
+ OpenSearchOnHeapCacheSettings.getSettingListForCacheType(CacheType.INDICES_REQUEST_CACHE)
+ .get(MAXIMUM_SIZE_IN_BYTES_KEY)
+ .getKey(),
+ onHeapCacheSize * keyValueSize + "b"
+ )
+ .build(),
0
);
int numOfItems1 = randomIntBetween(1, onHeapCacheSize / 2 - 1);
@@ -51,9 +64,7 @@ public void testComputeIfAbsentWithoutAnyOnHeapCacheEviction() throws Exception
LoadAwareCacheLoader tieredCacheLoader = getLoadAwareCacheLoader();
tieredSpilloverCache.computeIfAbsent(key, tieredCacheLoader);
}
- assertEquals(numOfItems1, eventListener.enumMap.get(CacheStoreType.ON_HEAP).missCount.count());
- assertEquals(0, eventListener.enumMap.get(CacheStoreType.ON_HEAP).hitCount.count());
- assertEquals(0, eventListener.enumMap.get(CacheStoreType.ON_HEAP).evictionsMetric.count());
+ assertEquals(0, removalListener.evictionsMetric.count());
// Try to hit cache again with some randomization.
int numOfItems2 = randomIntBetween(1, onHeapCacheSize / 2 - 1);
@@ -71,28 +82,56 @@ public void testComputeIfAbsentWithoutAnyOnHeapCacheEviction() throws Exception
cacheMiss++;
}
}
- assertEquals(cacheHit, eventListener.enumMap.get(CacheStoreType.ON_HEAP).hitCount.count());
- assertEquals(numOfItems1 + cacheMiss, eventListener.enumMap.get(CacheStoreType.ON_HEAP).missCount.count());
- assertEquals(0, eventListener.enumMap.get(CacheStoreType.ON_HEAP).evictionsMetric.count());
+ assertEquals(0, removalListener.evictionsMetric.count());
}
- public void testComputeIfAbsentWithEvictionsFromOnHeapCache() throws Exception {
+ public void testComputeIfAbsentWithFactoryBasedCacheCreation() throws Exception {
int onHeapCacheSize = randomIntBetween(10, 30);
int diskCacheSize = randomIntBetween(60, 100);
int totalSize = onHeapCacheSize + diskCacheSize;
- MockCacheEventListener eventListener = new MockCacheEventListener();
- StoreAwareCacheBuilder cacheBuilder = new OpenSearchOnHeapCache.Builder().setMaximumWeightInBytes(
- onHeapCacheSize * 50
- ).setWeigher((k, v) -> 50); // Will support onHeapCacheSize entries.
+ int keyValueSize = 50;
+
+ MockCacheRemovalListener removalListener = new MockCacheRemovalListener<>();
+
+ // Set the desired settings needed to create a TieredSpilloverCache object with INDICES_REQUEST_CACHE cacheType.
+ Settings settings = Settings.builder()
+ .put(
+ TieredSpilloverCacheSettings.TIERED_SPILLOVER_ONHEAP_STORE_NAME.getConcreteSettingForNamespace(
+ CacheType.INDICES_REQUEST_CACHE.getSettingPrefix()
+ ).getKey(),
+ OpenSearchOnHeapCache.OpenSearchOnHeapCacheFactory.NAME
+ )
+ .put(
+ TieredSpilloverCacheSettings.TIERED_SPILLOVER_DISK_STORE_NAME.getConcreteSettingForNamespace(
+ CacheType.INDICES_REQUEST_CACHE.getSettingPrefix()
+ ).getKey(),
+ MockOnDiskCache.MockDiskCacheFactory.NAME
+ )
+ .put(
+ OpenSearchOnHeapCacheSettings.getSettingListForCacheType(CacheType.INDICES_REQUEST_CACHE)
+ .get(MAXIMUM_SIZE_IN_BYTES_KEY)
+ .getKey(),
+ onHeapCacheSize * keyValueSize + "b"
+ )
+ .build();
- StoreAwareCacheBuilder diskCacheBuilder = new MockOnDiskCache.Builder().setMaxSize(diskCacheSize)
- .setDeliberateDelay(0);
+ ICache tieredSpilloverICache = new TieredSpilloverCache.TieredSpilloverCacheFactory().create(
+ new CacheConfig.Builder().setKeyType(String.class)
+ .setKeyType(String.class)
+ .setWeigher((k, v) -> keyValueSize)
+ .setRemovalListener(removalListener)
+ .setSettings(settings)
+ .build(),
+ CacheType.INDICES_REQUEST_CACHE,
+ Map.of(
+ OpenSearchOnHeapCache.OpenSearchOnHeapCacheFactory.NAME,
+ new OpenSearchOnHeapCache.OpenSearchOnHeapCacheFactory(),
+ MockOnDiskCache.MockDiskCacheFactory.NAME,
+ new MockOnDiskCache.MockDiskCacheFactory(0, randomIntBetween(100, 300))
+ )
+ );
- TieredSpilloverCache tieredSpilloverCache = new TieredSpilloverCache.Builder()
- .setOnHeapCacheBuilder(cacheBuilder)
- .setOnDiskCacheBuilder(diskCacheBuilder)
- .setListener(eventListener)
- .build();
+ TieredSpilloverCache tieredSpilloverCache = (TieredSpilloverCache) tieredSpilloverICache;
// Put values in cache more than it's size and cause evictions from onHeap.
int numOfItems1 = randomIntBetween(onHeapCacheSize + 1, totalSize);
@@ -103,22 +142,158 @@ public void testComputeIfAbsentWithEvictionsFromOnHeapCache() throws Exception {
LoadAwareCacheLoader tieredCacheLoader = getLoadAwareCacheLoader();
tieredSpilloverCache.computeIfAbsent(key, tieredCacheLoader);
}
- long actualDiskCacheSize = tieredSpilloverCache.getOnDiskCache().get().count();
- assertEquals(numOfItems1, eventListener.enumMap.get(CacheStoreType.ON_HEAP).missCount.count());
- assertEquals(0, eventListener.enumMap.get(CacheStoreType.ON_HEAP).hitCount.count());
- assertEquals(actualDiskCacheSize, eventListener.enumMap.get(CacheStoreType.ON_HEAP).evictionsMetric.count());
+ long actualDiskCacheSize = tieredSpilloverCache.getDiskCache().count();
+ assertEquals(actualDiskCacheSize, removalListener.evictionsMetric.count()); // Evictions from onHeap equal to
+ // disk cache size.
+ tieredSpilloverCache.getOnHeapCache().keys().forEach(onHeapKeys::add);
+ tieredSpilloverCache.getDiskCache().keys().forEach(diskTierKeys::add);
+
+ assertEquals(tieredSpilloverCache.getOnHeapCache().count(), onHeapKeys.size());
+ assertEquals(tieredSpilloverCache.getDiskCache().count(), diskTierKeys.size());
+ }
+
+ public void testWithFactoryCreationWithOnHeapCacheNotPresent() {
+ int onHeapCacheSize = randomIntBetween(10, 30);
+ int keyValueSize = 50;
+ MockCacheRemovalListener removalListener = new MockCacheRemovalListener<>();
+
+ // Set the settings without onHeap cache settings.
+ Settings settings = Settings.builder()
+ .put(
+ TieredSpilloverCacheSettings.TIERED_SPILLOVER_DISK_STORE_NAME.getConcreteSettingForNamespace(
+ CacheType.INDICES_REQUEST_CACHE.getSettingPrefix()
+ ).getKey(),
+ MockOnDiskCache.MockDiskCacheFactory.NAME
+ )
+ .put(
+ OpenSearchOnHeapCacheSettings.getSettingListForCacheType(CacheType.INDICES_REQUEST_CACHE)
+ .get(MAXIMUM_SIZE_IN_BYTES_KEY)
+ .getKey(),
+ onHeapCacheSize * keyValueSize + "b"
+ )
+ .build();
+
+ IllegalArgumentException ex = assertThrows(
+ IllegalArgumentException.class,
+ () -> new TieredSpilloverCache.TieredSpilloverCacheFactory().create(
+ new CacheConfig.Builder().setKeyType(String.class)
+ .setKeyType(String.class)
+ .setWeigher((k, v) -> keyValueSize)
+ .setRemovalListener(removalListener)
+ .setSettings(settings)
+ .build(),
+ CacheType.INDICES_REQUEST_CACHE,
+ Map.of(
+ OpenSearchOnHeapCache.OpenSearchOnHeapCacheFactory.NAME,
+ new OpenSearchOnHeapCache.OpenSearchOnHeapCacheFactory(),
+ MockOnDiskCache.MockDiskCacheFactory.NAME,
+ new MockOnDiskCache.MockDiskCacheFactory(0, randomIntBetween(100, 300))
+ )
+ )
+ );
assertEquals(
- eventListener.enumMap.get(CacheStoreType.ON_HEAP).evictionsMetric.count(),
- eventListener.enumMap.get(CacheStoreType.DISK).cachedCount.count()
+ ex.getMessage(),
+ "No associated onHeapCache found for tieredSpilloverCache for " + "cacheType:" + CacheType.INDICES_REQUEST_CACHE
);
- assertEquals(actualDiskCacheSize, eventListener.enumMap.get(CacheStoreType.DISK).cachedCount.count());
+ }
+
+ public void testWithFactoryCreationWithDiskCacheNotPresent() {
+ int onHeapCacheSize = randomIntBetween(10, 30);
+ int keyValueSize = 50;
+ MockCacheRemovalListener removalListener = new MockCacheRemovalListener<>();
+
+ // Set the settings without onHeap cache settings.
+ Settings settings = Settings.builder()
+ .put(
+ TieredSpilloverCacheSettings.TIERED_SPILLOVER_ONHEAP_STORE_NAME.getConcreteSettingForNamespace(
+ CacheType.INDICES_REQUEST_CACHE.getSettingPrefix()
+ ).getKey(),
+ OpenSearchOnHeapCache.OpenSearchOnHeapCacheFactory.NAME
+ )
+ .put(
+ OpenSearchOnHeapCacheSettings.getSettingListForCacheType(CacheType.INDICES_REQUEST_CACHE)
+ .get(MAXIMUM_SIZE_IN_BYTES_KEY)
+ .getKey(),
+ onHeapCacheSize * keyValueSize + "b"
+ )
+ .build();
+
+ IllegalArgumentException ex = assertThrows(
+ IllegalArgumentException.class,
+ () -> new TieredSpilloverCache.TieredSpilloverCacheFactory().create(
+ new CacheConfig.Builder().setKeyType(String.class)
+ .setKeyType(String.class)
+ .setWeigher((k, v) -> keyValueSize)
+ .setRemovalListener(removalListener)
+ .setSettings(settings)
+ .build(),
+ CacheType.INDICES_REQUEST_CACHE,
+ Map.of(
+ OpenSearchOnHeapCache.OpenSearchOnHeapCacheFactory.NAME,
+ new OpenSearchOnHeapCache.OpenSearchOnHeapCacheFactory(),
+ MockOnDiskCache.MockDiskCacheFactory.NAME,
+ new MockOnDiskCache.MockDiskCacheFactory(0, randomIntBetween(100, 300))
+ )
+ )
+ );
+ assertEquals(
+ ex.getMessage(),
+ "No associated diskCache found for tieredSpilloverCache for " + "cacheType:" + CacheType.INDICES_REQUEST_CACHE
+ );
+ }
+
+ public void testComputeIfAbsentWithEvictionsFromOnHeapCache() throws Exception {
+ int onHeapCacheSize = randomIntBetween(10, 30);
+ int diskCacheSize = randomIntBetween(60, 100);
+ int totalSize = onHeapCacheSize + diskCacheSize;
+ int keyValueSize = 50;
+ MockCacheRemovalListener removalListener = new MockCacheRemovalListener<>();
+ ICache.Factory onHeapCacheFactory = new OpenSearchOnHeapCache.OpenSearchOnHeapCacheFactory();
+ CacheConfig cacheConfig = new CacheConfig.Builder().setKeyType(String.class)
+ .setKeyType(String.class)
+ .setWeigher((k, v) -> keyValueSize)
+ .setRemovalListener(removalListener)
+ .setSettings(
+ Settings.builder()
+ .put(
+ OpenSearchOnHeapCacheSettings.getSettingListForCacheType(CacheType.INDICES_REQUEST_CACHE)
+ .get(MAXIMUM_SIZE_IN_BYTES_KEY)
+ .getKey(),
+ onHeapCacheSize * keyValueSize + "b"
+ )
+ .build()
+ )
+ .build();
+
+ ICache.Factory mockDiskCacheFactory = new MockOnDiskCache.MockDiskCacheFactory(0, diskCacheSize);
+
+ TieredSpilloverCache tieredSpilloverCache = new TieredSpilloverCache.Builder()
+ .setOnHeapCacheFactory(onHeapCacheFactory)
+ .setDiskCacheFactory(mockDiskCacheFactory)
+ .setCacheConfig(cacheConfig)
+ .setRemovalListener(removalListener)
+ .setCacheType(CacheType.INDICES_REQUEST_CACHE)
+ .build();
+
+ // Put values in cache more than it's size and cause evictions from onHeap.
+ int numOfItems1 = randomIntBetween(onHeapCacheSize + 1, totalSize);
+ List onHeapKeys = new ArrayList<>();
+ List diskTierKeys = new ArrayList<>();
+ for (int iter = 0; iter < numOfItems1; iter++) {
+ String key = UUID.randomUUID().toString();
+ LoadAwareCacheLoader tieredCacheLoader = getLoadAwareCacheLoader();
+ tieredSpilloverCache.computeIfAbsent(key, tieredCacheLoader);
+ }
+ long actualDiskCacheSize = tieredSpilloverCache.getDiskCache().count();
+ assertEquals(actualDiskCacheSize, removalListener.evictionsMetric.count()); // Evictions from onHeap equal to
+ // disk cache size.
tieredSpilloverCache.getOnHeapCache().keys().forEach(onHeapKeys::add);
- tieredSpilloverCache.getOnDiskCache().get().keys().forEach(diskTierKeys::add);
+ tieredSpilloverCache.getDiskCache().keys().forEach(diskTierKeys::add);
assertEquals(tieredSpilloverCache.getOnHeapCache().count(), onHeapKeys.size());
- assertEquals(tieredSpilloverCache.getOnDiskCache().get().count(), diskTierKeys.size());
+ assertEquals(tieredSpilloverCache.getDiskCache().count(), diskTierKeys.size());
// Try to hit cache again with some randomization.
int numOfItems2 = randomIntBetween(50, 200);
@@ -146,23 +321,27 @@ public void testComputeIfAbsentWithEvictionsFromOnHeapCache() throws Exception {
tieredSpilloverCache.computeIfAbsent(UUID.randomUUID().toString(), tieredCacheLoader);
cacheMiss++;
}
- // On heap cache misses would also include diskCacheHits as it means it missed onHeap cache.
- assertEquals(numOfItems1 + cacheMiss + diskCacheHit, eventListener.enumMap.get(CacheStoreType.ON_HEAP).missCount.count());
- assertEquals(onHeapCacheHit, eventListener.enumMap.get(CacheStoreType.ON_HEAP).hitCount.count());
- assertEquals(cacheMiss + numOfItems1, eventListener.enumMap.get(CacheStoreType.DISK).missCount.count());
- assertEquals(diskCacheHit, eventListener.enumMap.get(CacheStoreType.DISK).hitCount.count());
}
public void testComputeIfAbsentWithEvictionsFromBothTier() throws Exception {
int onHeapCacheSize = randomIntBetween(10, 30);
int diskCacheSize = randomIntBetween(onHeapCacheSize + 1, 100);
int totalSize = onHeapCacheSize + diskCacheSize;
+ int keyValueSize = 50;
- MockCacheEventListener eventListener = new MockCacheEventListener();
+ MockCacheRemovalListener removalListener = new MockCacheRemovalListener<>();
TieredSpilloverCache tieredSpilloverCache = intializeTieredSpilloverCache(
onHeapCacheSize,
diskCacheSize,
- eventListener,
+ removalListener,
+ Settings.builder()
+ .put(
+ OpenSearchOnHeapCacheSettings.getSettingListForCacheType(CacheType.INDICES_REQUEST_CACHE)
+ .get(MAXIMUM_SIZE_IN_BYTES_KEY)
+ .getKey(),
+ onHeapCacheSize * keyValueSize + "b"
+ )
+ .build(),
0
);
@@ -171,20 +350,28 @@ public void testComputeIfAbsentWithEvictionsFromBothTier() throws Exception {
LoadAwareCacheLoader tieredCacheLoader = getLoadAwareCacheLoader();
tieredSpilloverCache.computeIfAbsent(UUID.randomUUID().toString(), tieredCacheLoader);
}
- assertTrue(eventListener.enumMap.get(CacheStoreType.ON_HEAP).evictionsMetric.count() > 0);
- assertTrue(eventListener.enumMap.get(CacheStoreType.DISK).evictionsMetric.count() > 0);
+ assertTrue(removalListener.evictionsMetric.count() > 0);
}
public void testGetAndCount() throws Exception {
int onHeapCacheSize = randomIntBetween(10, 30);
int diskCacheSize = randomIntBetween(onHeapCacheSize + 1, 100);
+ int keyValueSize = 50;
int totalSize = onHeapCacheSize + diskCacheSize;
- MockCacheEventListener eventListener = new MockCacheEventListener();
+ MockCacheRemovalListener removalListener = new MockCacheRemovalListener<>();
TieredSpilloverCache tieredSpilloverCache = intializeTieredSpilloverCache(
onHeapCacheSize,
diskCacheSize,
- eventListener,
+ removalListener,
+ Settings.builder()
+ .put(
+ OpenSearchOnHeapCacheSettings.getSettingListForCacheType(CacheType.INDICES_REQUEST_CACHE)
+ .get(MAXIMUM_SIZE_IN_BYTES_KEY)
+ .getKey(),
+ onHeapCacheSize * keyValueSize + "b"
+ )
+ .build(),
0
);
@@ -219,57 +406,51 @@ public void testGetAndCount() throws Exception {
assertEquals(numOfItems1, tieredSpilloverCache.count());
}
- public void testWithDiskTierNull() throws Exception {
- int onHeapCacheSize = randomIntBetween(10, 30);
- MockCacheEventListener eventListener = new MockCacheEventListener();
-
- StoreAwareCacheBuilder onHeapCacheBuilder = new OpenSearchOnHeapCache.Builder()
- .setMaximumWeightInBytes(onHeapCacheSize * 20)
- .setWeigher((k, v) -> 20); // Will support upto onHeapCacheSize entries
- TieredSpilloverCache tieredSpilloverCache = new TieredSpilloverCache.Builder()
- .setOnHeapCacheBuilder(onHeapCacheBuilder)
- .setListener(eventListener)
- .build();
-
- int numOfItems = randomIntBetween(onHeapCacheSize + 1, onHeapCacheSize * 3);
- for (int iter = 0; iter < numOfItems; iter++) {
- LoadAwareCacheLoader loadAwareCacheLoader = getLoadAwareCacheLoader();
- tieredSpilloverCache.computeIfAbsent(UUID.randomUUID().toString(), loadAwareCacheLoader);
- }
- assertTrue(eventListener.enumMap.get(CacheStoreType.ON_HEAP).evictionsMetric.count() > 0);
- assertEquals(0, eventListener.enumMap.get(CacheStoreType.DISK).cachedCount.count());
- assertEquals(0, eventListener.enumMap.get(CacheStoreType.DISK).evictionsMetric.count());
- assertEquals(0, eventListener.enumMap.get(CacheStoreType.DISK).missCount.count());
- }
-
public void testPut() {
int onHeapCacheSize = randomIntBetween(10, 30);
int diskCacheSize = randomIntBetween(onHeapCacheSize + 1, 100);
+ int keyValueSize = 50;
- MockCacheEventListener eventListener = new MockCacheEventListener<>();
+ MockCacheRemovalListener removalListener = new MockCacheRemovalListener<>();
TieredSpilloverCache tieredSpilloverCache = intializeTieredSpilloverCache(
onHeapCacheSize,
diskCacheSize,
- eventListener,
+ removalListener,
+ Settings.builder()
+ .put(
+ OpenSearchOnHeapCacheSettings.getSettingListForCacheType(CacheType.INDICES_REQUEST_CACHE)
+ .get(MAXIMUM_SIZE_IN_BYTES_KEY)
+ .getKey(),
+ onHeapCacheSize * keyValueSize + "b"
+ )
+ .build(),
0
);
String key = UUID.randomUUID().toString();
String value = UUID.randomUUID().toString();
tieredSpilloverCache.put(key, value);
- assertEquals(1, eventListener.enumMap.get(CacheStoreType.ON_HEAP).cachedCount.count());
assertEquals(1, tieredSpilloverCache.count());
}
public void testPutAndVerifyNewItemsArePresentOnHeapCache() throws Exception {
int onHeapCacheSize = randomIntBetween(200, 400);
int diskCacheSize = randomIntBetween(450, 800);
+ int keyValueSize = 50;
- MockCacheEventListener eventListener = new MockCacheEventListener<>();
+ MockCacheRemovalListener removalListener = new MockCacheRemovalListener<>();
TieredSpilloverCache tieredSpilloverCache = intializeTieredSpilloverCache(
- onHeapCacheSize,
+ keyValueSize,
diskCacheSize,
- eventListener,
+ removalListener,
+ Settings.builder()
+ .put(
+ OpenSearchOnHeapCacheSettings.getSettingListForCacheType(CacheType.INDICES_REQUEST_CACHE)
+ .get(MAXIMUM_SIZE_IN_BYTES_KEY)
+ .getKey(),
+ (onHeapCacheSize * keyValueSize) + "b"
+ )
+ .build(),
0
);
@@ -281,14 +462,14 @@ public boolean isLoaded() {
}
@Override
- public String load(String key) throws Exception {
+ public String load(String key) {
return UUID.randomUUID().toString();
}
});
}
assertEquals(onHeapCacheSize, tieredSpilloverCache.getOnHeapCache().count());
- assertEquals(0, tieredSpilloverCache.getOnDiskCache().get().count());
+ assertEquals(0, tieredSpilloverCache.getDiskCache().count());
// Again try to put OnHeap cache capacity amount of new items.
List newKeyList = new ArrayList<>();
@@ -318,32 +499,38 @@ public String load(String key) {
for (int i = 0; i < actualOnHeapCacheKeys.size(); i++) {
assertTrue(newKeyList.contains(actualOnHeapCacheKeys.get(i)));
}
-
assertEquals(onHeapCacheSize, tieredSpilloverCache.getOnHeapCache().count());
- assertEquals(onHeapCacheSize, tieredSpilloverCache.getOnDiskCache().get().count());
+ assertEquals(onHeapCacheSize, tieredSpilloverCache.getDiskCache().count());
}
public void testInvalidate() {
int onHeapCacheSize = 1;
int diskCacheSize = 10;
+ int keyValueSize = 20;
- MockCacheEventListener eventListener = new MockCacheEventListener<>();
+ MockCacheRemovalListener removalListener = new MockCacheRemovalListener<>();
TieredSpilloverCache tieredSpilloverCache = intializeTieredSpilloverCache(
onHeapCacheSize,
diskCacheSize,
- eventListener,
+ removalListener,
+ Settings.builder()
+ .put(
+ OpenSearchOnHeapCacheSettings.getSettingListForCacheType(CacheType.INDICES_REQUEST_CACHE)
+ .get(MAXIMUM_SIZE_IN_BYTES_KEY)
+ .getKey(),
+ onHeapCacheSize * keyValueSize + "b"
+ )
+ .build(),
0
);
String key = UUID.randomUUID().toString();
String value = UUID.randomUUID().toString();
// First try to invalidate without the key present in cache.
tieredSpilloverCache.invalidate(key);
- assertEquals(0, eventListener.enumMap.get(CacheStoreType.ON_HEAP).invalidationMetric.count());
// Now try to invalidate with the key present in onHeap cache.
tieredSpilloverCache.put(key, value);
tieredSpilloverCache.invalidate(key);
- assertEquals(1, eventListener.enumMap.get(CacheStoreType.ON_HEAP).invalidationMetric.count());
assertEquals(0, tieredSpilloverCache.count());
tieredSpilloverCache.put(key, value);
@@ -353,20 +540,27 @@ public void testInvalidate() {
assertEquals(2, tieredSpilloverCache.count());
// Again invalidate older key
tieredSpilloverCache.invalidate(key);
- assertEquals(1, eventListener.enumMap.get(CacheStoreType.DISK).invalidationMetric.count());
assertEquals(1, tieredSpilloverCache.count());
}
public void testCacheKeys() throws Exception {
int onHeapCacheSize = randomIntBetween(10, 30);
int diskCacheSize = randomIntBetween(60, 100);
- int totalSize = onHeapCacheSize + diskCacheSize;
+ int keyValueSize = 50;
- MockCacheEventListener eventListener = new MockCacheEventListener<>();
+ MockCacheRemovalListener removalListener = new MockCacheRemovalListener<>();
TieredSpilloverCache tieredSpilloverCache = intializeTieredSpilloverCache(
- onHeapCacheSize,
+ keyValueSize,
diskCacheSize,
- eventListener,
+ removalListener,
+ Settings.builder()
+ .put(
+ OpenSearchOnHeapCacheSettings.getSettingListForCacheType(CacheType.INDICES_REQUEST_CACHE)
+ .get(MAXIMUM_SIZE_IN_BYTES_KEY)
+ .getKey(),
+ onHeapCacheSize * keyValueSize + "b"
+ )
+ .build(),
0
);
List onHeapKeys = new ArrayList<>();
@@ -388,7 +582,7 @@ public void testCacheKeys() throws Exception {
List actualOnHeapKeys = new ArrayList<>();
List actualOnDiskKeys = new ArrayList<>();
Iterable onHeapiterable = tieredSpilloverCache.getOnHeapCache().keys();
- Iterable onDiskiterable = tieredSpilloverCache.getOnDiskCache().get().keys();
+ Iterable onDiskiterable = tieredSpilloverCache.getDiskCache().keys();
onHeapiterable.iterator().forEachRemaining(actualOnHeapKeys::add);
onDiskiterable.iterator().forEachRemaining(actualOnDiskKeys::add);
for (String onHeapKey : onHeapKeys) {
@@ -414,14 +608,14 @@ public void testCacheKeys() throws Exception {
}
public void testRefresh() {
- int onHeapCacheSize = randomIntBetween(10, 30);
int diskCacheSize = randomIntBetween(60, 100);
- MockCacheEventListener eventListener = new MockCacheEventListener<>();
+ MockCacheRemovalListener removalListener = new MockCacheRemovalListener<>();
TieredSpilloverCache tieredSpilloverCache = intializeTieredSpilloverCache(
- onHeapCacheSize,
+ 50,
diskCacheSize,
- eventListener,
+ removalListener,
+ Settings.EMPTY,
0
);
tieredSpilloverCache.refresh();
@@ -430,13 +624,22 @@ public void testRefresh() {
public void testInvalidateAll() throws Exception {
int onHeapCacheSize = randomIntBetween(10, 30);
int diskCacheSize = randomIntBetween(60, 100);
+ int keyValueSize = 50;
int totalSize = onHeapCacheSize + diskCacheSize;
- MockCacheEventListener eventListener = new MockCacheEventListener<>();
+ MockCacheRemovalListener removalListener = new MockCacheRemovalListener<>();
TieredSpilloverCache tieredSpilloverCache = intializeTieredSpilloverCache(
- onHeapCacheSize,
+ keyValueSize,
diskCacheSize,
- eventListener,
+ removalListener,
+ Settings.builder()
+ .put(
+ OpenSearchOnHeapCacheSettings.getSettingListForCacheType(CacheType.INDICES_REQUEST_CACHE)
+ .get(MAXIMUM_SIZE_IN_BYTES_KEY)
+ .getKey(),
+ onHeapCacheSize * keyValueSize + "b"
+ )
+ .build(),
0
);
// Put values in cache more than it's size and cause evictions from onHeap.
@@ -462,13 +665,23 @@ public void testInvalidateAll() throws Exception {
public void testComputeIfAbsentConcurrently() throws Exception {
int onHeapCacheSize = randomIntBetween(100, 300);
int diskCacheSize = randomIntBetween(200, 400);
-
- MockCacheEventListener eventListener = new MockCacheEventListener<>();
+ int keyValueSize = 50;
+
+ MockCacheRemovalListener removalListener = new MockCacheRemovalListener<>();
+ Settings settings = Settings.builder()
+ .put(
+ OpenSearchOnHeapCacheSettings.getSettingListForCacheType(CacheType.INDICES_REQUEST_CACHE)
+ .get(MAXIMUM_SIZE_IN_BYTES_KEY)
+ .getKey(),
+ onHeapCacheSize * keyValueSize + "b"
+ )
+ .build();
TieredSpilloverCache tieredSpilloverCache = intializeTieredSpilloverCache(
- onHeapCacheSize,
+ keyValueSize,
diskCacheSize,
- eventListener,
+ removalListener,
+ settings,
0
);
@@ -485,7 +698,7 @@ public void testComputeIfAbsentConcurrently() throws Exception {
for (int i = 0; i < numberOfSameKeys; i++) {
threads[i] = new Thread(() -> {
try {
- LoadAwareCacheLoader loadAwareCacheLoader = new LoadAwareCacheLoader() {
+ LoadAwareCacheLoader loadAwareCacheLoader = new LoadAwareCacheLoader<>() {
boolean isLoaded = false;
@Override
@@ -494,7 +707,7 @@ public boolean isLoaded() {
}
@Override
- public Object load(Object key) throws Exception {
+ public String load(String key) {
isLoaded = true;
return value;
}
@@ -525,19 +738,31 @@ public Object load(Object key) throws Exception {
public void testConcurrencyForEvictionFlow() throws Exception {
int diskCacheSize = randomIntBetween(450, 800);
- MockCacheEventListener eventListener = new MockCacheEventListener<>();
-
- StoreAwareCacheBuilder cacheBuilder = new OpenSearchOnHeapCache.Builder().setMaximumWeightInBytes(
- 200
- ).setWeigher((k, v) -> 150);
-
- StoreAwareCacheBuilder diskCacheBuilder = new MockOnDiskCache.Builder().setMaxSize(diskCacheSize)
- .setDeliberateDelay(500);
-
+ MockCacheRemovalListener removalListener = new MockCacheRemovalListener<>();
+
+ ICache.Factory onHeapCacheFactory = new OpenSearchOnHeapCache.OpenSearchOnHeapCacheFactory();
+ ICache.Factory diskCacheFactory = new MockOnDiskCache.MockDiskCacheFactory(500, diskCacheSize);
+ CacheConfig cacheConfig = new CacheConfig.Builder().setKeyType(String.class)
+ .setKeyType(String.class)
+ .setWeigher((k, v) -> 150)
+ .setRemovalListener(removalListener)
+ .setSettings(
+ Settings.builder()
+ .put(
+ OpenSearchOnHeapCacheSettings.getSettingListForCacheType(CacheType.INDICES_REQUEST_CACHE)
+ .get(MAXIMUM_SIZE_IN_BYTES_KEY)
+ .getKey(),
+ 200 + "b"
+ )
+ .build()
+ )
+ .build();
TieredSpilloverCache tieredSpilloverCache = new TieredSpilloverCache.Builder()
- .setOnHeapCacheBuilder(cacheBuilder)
- .setOnDiskCacheBuilder(diskCacheBuilder)
- .setListener(eventListener)
+ .setOnHeapCacheFactory(onHeapCacheFactory)
+ .setDiskCacheFactory(diskCacheFactory)
+ .setRemovalListener(removalListener)
+ .setCacheConfig(cacheConfig)
+ .setCacheType(CacheType.INDICES_REQUEST_CACHE)
.build();
String keyToBeEvicted = "key1";
@@ -572,7 +797,7 @@ public String load(String key) {
assertBusy(() -> { assertTrue(loadAwareCacheLoader.isLoaded()); }, 100, TimeUnit.MILLISECONDS); // We wait for new key to be loaded
// after which it eviction flow is
// guaranteed to occur.
- StoreAwareCache onDiskCache = tieredSpilloverCache.getOnDiskCache().get();
+ ICache onDiskCache = tieredSpilloverCache.getDiskCache();
// Now on a different thread, try to get key(above one which got evicted) from tiered cache. We expect this
// should return not null value as it should be present on diskCache.
@@ -589,57 +814,23 @@ public String load(String key) {
countDownLatch.await();
assertNotNull(actualValue.get());
countDownLatch1.await();
- assertEquals(1, eventListener.enumMap.get(CacheStoreType.ON_HEAP).evictionsMetric.count());
+ assertEquals(1, removalListener.evictionsMetric.count());
assertEquals(1, tieredSpilloverCache.getOnHeapCache().count());
assertEquals(1, onDiskCache.count());
assertNotNull(onDiskCache.get(keyToBeEvicted));
}
- class MockCacheEventListener implements StoreAwareCacheEventListener {
-
- EnumMap enumMap = new EnumMap<>(CacheStoreType.class);
-
- MockCacheEventListener() {
- for (CacheStoreType cacheStoreType : CacheStoreType.values()) {
- enumMap.put(cacheStoreType, new TestStatsHolder());
- }
- }
+ class MockCacheRemovalListener implements RemovalListener {
+ final CounterMetric evictionsMetric = new CounterMetric();
@Override
- public void onMiss(K key, CacheStoreType cacheStoreType) {
- enumMap.get(cacheStoreType).missCount.inc();
- }
-
- @Override
- public void onRemoval(StoreAwareCacheRemovalNotification notification) {
- if (notification.getRemovalReason().equals(RemovalReason.EVICTED)) {
- enumMap.get(notification.getCacheStoreType()).evictionsMetric.inc();
- } else if (notification.getRemovalReason().equals(RemovalReason.INVALIDATED)) {
- enumMap.get(notification.getCacheStoreType()).invalidationMetric.inc();
- }
- }
-
- @Override
- public void onHit(K key, V value, CacheStoreType cacheStoreType) {
- enumMap.get(cacheStoreType).hitCount.inc();
- }
-
- @Override
- public void onCached(K key, V value, CacheStoreType cacheStoreType) {
- enumMap.get(cacheStoreType).cachedCount.inc();
- }
-
- class TestStatsHolder {
- final CounterMetric evictionsMetric = new CounterMetric();
- final CounterMetric hitCount = new CounterMetric();
- final CounterMetric missCount = new CounterMetric();
- final CounterMetric cachedCount = new CounterMetric();
- final CounterMetric invalidationMetric = new CounterMetric();
+ public void onRemoval(RemovalNotification notification) {
+ evictionsMetric.inc();
}
}
private LoadAwareCacheLoader getLoadAwareCacheLoader() {
- return new LoadAwareCacheLoader() {
+ return new LoadAwareCacheLoader<>() {
boolean isLoaded = false;
@Override
@@ -656,34 +847,39 @@ public boolean isLoaded() {
}
private TieredSpilloverCache intializeTieredSpilloverCache(
- int onHeapCacheSize,
- int diksCacheSize,
- StoreAwareCacheEventListener eventListener,
+ int keyValueSize,
+ int diskCacheSize,
+ RemovalListener removalListener,
+ Settings settings,
long diskDeliberateDelay
) {
- StoreAwareCacheBuilder diskCacheBuilder = new MockOnDiskCache.Builder().setMaxSize(diksCacheSize)
- .setDeliberateDelay(diskDeliberateDelay);
- StoreAwareCacheBuilder onHeapCacheBuilder = new OpenSearchOnHeapCache.Builder()
- .setMaximumWeightInBytes(onHeapCacheSize * 20)
- .setWeigher((k, v) -> 20); // Will support upto onHeapCacheSize entries
- return new TieredSpilloverCache.Builder().setOnHeapCacheBuilder(onHeapCacheBuilder)
- .setOnDiskCacheBuilder(diskCacheBuilder)
- .setListener(eventListener)
+ ICache.Factory onHeapCacheFactory = new OpenSearchOnHeapCache.OpenSearchOnHeapCacheFactory();
+ CacheConfig cacheConfig = new CacheConfig.Builder().setKeyType(String.class)
+ .setKeyType(String.class)
+ .setWeigher((k, v) -> keyValueSize)
+ .setRemovalListener(removalListener)
+ .setSettings(settings)
+ .build();
+
+ ICache.Factory mockDiskCacheFactory = new MockOnDiskCache.MockDiskCacheFactory(diskDeliberateDelay, diskCacheSize);
+
+ return new TieredSpilloverCache.Builder().setCacheType(CacheType.INDICES_REQUEST_CACHE)
+ .setRemovalListener(removalListener)
+ .setOnHeapCacheFactory(onHeapCacheFactory)
+ .setDiskCacheFactory(mockDiskCacheFactory)
+ .setCacheConfig(cacheConfig)
.build();
}
}
-class MockOnDiskCache implements StoreAwareCache {
+class MockOnDiskCache implements ICache {
Map cache;
int maxSize;
-
long delay;
- StoreAwareCacheEventListener eventListener;
- MockOnDiskCache(int maxSize, StoreAwareCacheEventListener eventListener, long delay) {
+ MockOnDiskCache(int maxSize, long delay) {
this.maxSize = maxSize;
- this.eventListener = eventListener;
this.delay = delay;
this.cache = new ConcurrentHashMap();
}
@@ -691,18 +887,12 @@ class MockOnDiskCache implements StoreAwareCache {
@Override
public V get(K key) {
V value = cache.get(key);
- if (value != null) {
- eventListener.onHit(key, value, CacheStoreType.DISK);
- } else {
- eventListener.onMiss(key, CacheStoreType.DISK);
- }
return value;
}
@Override
public void put(K key, V value) {
if (this.cache.size() >= maxSize) { // For simplification
- eventListener.onRemoval(new StoreAwareCacheRemovalNotification<>(key, value, RemovalReason.EVICTED, CacheStoreType.DISK));
return;
}
try {
@@ -711,11 +901,10 @@ public void put(K key, V value) {
throw new RuntimeException(e);
}
this.cache.put(key, value);
- eventListener.onCached(key, value, CacheStoreType.DISK);
}
@Override
- public V computeIfAbsent(K key, LoadAwareCacheLoader loader) throws Exception {
+ public V computeIfAbsent(K key, LoadAwareCacheLoader loader) {
V value = cache.computeIfAbsent(key, key1 -> {
try {
return loader.load(key);
@@ -723,20 +912,11 @@ public V computeIfAbsent(K key, LoadAwareCacheLoader loader) throws Except
throw new RuntimeException(e);
}
});
- if (!loader.isLoaded()) {
- eventListener.onHit(key, value, CacheStoreType.DISK);
- } else {
- eventListener.onMiss(key, CacheStoreType.DISK);
- eventListener.onCached(key, value, CacheStoreType.DISK);
- }
return value;
}
@Override
public void invalidate(K key) {
- if (this.cache.containsKey(key)) {
- eventListener.onRemoval(new StoreAwareCacheRemovalNotification<>(key, null, RemovalReason.INVALIDATED, CacheStoreType.DISK));
- }
this.cache.remove(key);
}
@@ -759,18 +939,40 @@ public long count() {
public void refresh() {}
@Override
- public CacheStoreType getTierType() {
- return CacheStoreType.DISK;
+ public void close() {
+
+ }
+
+ public static class MockDiskCacheFactory implements Factory {
+
+ static final String NAME = "mockDiskCache";
+ final long delay;
+ final int maxSize;
+
+ MockDiskCacheFactory(long delay, int maxSize) {
+ this.delay = delay;
+ this.maxSize = maxSize;
+ }
+
+ @Override
+ public ICache create(CacheConfig config, CacheType cacheType, Map cacheFactories) {
+ return new Builder().setMaxSize(maxSize).setDeliberateDelay(delay).build();
+ }
+
+ @Override
+ public String getCacheName() {
+ return NAME;
+ }
}
- public static class Builder extends StoreAwareCacheBuilder {
+ public static class Builder extends ICacheBuilder {
int maxSize;
long delay;
@Override
- public StoreAwareCache build() {
- return new MockOnDiskCache(maxSize, this.getEventListener(), delay);
+ public ICache build() {
+ return new MockOnDiskCache(this.maxSize, this.delay);
}
public Builder setMaxSize(int maxSize) {
diff --git a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/CommunityIdProcessor.java b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/CommunityIdProcessor.java
new file mode 100644
index 0000000000000..c968fb2f6c2da
--- /dev/null
+++ b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/CommunityIdProcessor.java
@@ -0,0 +1,647 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.ingest.common;
+
+import org.opensearch.common.hash.MessageDigests;
+import org.opensearch.common.network.InetAddresses;
+import org.opensearch.core.common.Strings;
+import org.opensearch.ingest.AbstractProcessor;
+import org.opensearch.ingest.ConfigurationUtils;
+import org.opensearch.ingest.IngestDocument;
+import org.opensearch.ingest.Processor;
+
+import java.security.MessageDigest;
+import java.util.Arrays;
+import java.util.Base64;
+import java.util.Locale;
+import java.util.Map;
+import java.util.stream.Collectors;
+
+import static org.opensearch.ingest.ConfigurationUtils.newConfigurationException;
+
+/**
+ * Processor that generating community id flow hash for the network flow tuples, the algorithm is defined in
+ * Community ID Flow Hashing.
+ */
+public class CommunityIdProcessor extends AbstractProcessor {
+ public static final String TYPE = "community_id";
+ // the version of the community id flow hashing algorithm
+ private static final String COMMUNITY_ID_HASH_VERSION = "1";
+ // 0 byte for padding
+ private static final byte PADDING_BYTE = 0;
+ // the maximum code number for network protocol, ICMP message type and code as defined by IANA
+ private static final int IANA_COMMON_MAX_NUMBER = 255;
+ // the minimum code number for network protocol, ICMP message type and code as defined by IANA
+ private static final int IANA_COMMON_MIN_NUMBER = 0;
+ // the minimum seed for generating hash
+ private static final int MIN_SEED = 0;
+ // the maximum seed for generating hash
+ private static final int MAX_SEED = 65535;
+ // the minimum port number in transport layer
+ private static final int MIN_PORT = 0;
+ // the maximum port number in transport layer
+ private static final int MAX_PORT = 63335;
+ private static final String ICMP_MESSAGE_TYPE = "type";
+ private static final String ICMP_MESSAGE_CODE = "code";
+ private final String sourceIPField;
+ private final String sourcePortField;
+ private final String destinationIPField;
+ private final String destinationPortField;
+ private final String ianaProtocolNumberField;
+ private final String protocolField;
+ private final String icmpTypeField;
+ private final String icmpCodeField;
+ private final int seed;
+ private final String targetField;
+ private final boolean ignoreMissing;
+
+ CommunityIdProcessor(
+ String tag,
+ String description,
+ String sourceIPField,
+ String sourcePortField,
+ String destinationIPField,
+ String destinationPortField,
+ String ianaProtocolNumberField,
+ String protocolField,
+ String icmpTypeField,
+ String icmpCodeField,
+ int seed,
+ String targetField,
+ boolean ignoreMissing
+ ) {
+ super(tag, description);
+ this.sourceIPField = sourceIPField;
+ this.sourcePortField = sourcePortField;
+ this.destinationIPField = destinationIPField;
+ this.destinationPortField = destinationPortField;
+ this.ianaProtocolNumberField = ianaProtocolNumberField;
+ this.protocolField = protocolField;
+ this.icmpTypeField = icmpTypeField;
+ this.icmpCodeField = icmpCodeField;
+ this.seed = seed;
+ this.targetField = targetField;
+ this.ignoreMissing = ignoreMissing;
+ }
+
+ public String getSourceIPField() {
+ return sourceIPField;
+ }
+
+ public String getSourcePortField() {
+ return sourcePortField;
+ }
+
+ public String getDestinationIPField() {
+ return destinationIPField;
+ }
+
+ public String getDestinationPortField() {
+ return destinationPortField;
+ }
+
+ public String getIANAProtocolNumberField() {
+ return ianaProtocolNumberField;
+ }
+
+ public String getProtocolField() {
+ return protocolField;
+ }
+
+ public String getIcmpTypeField() {
+ return icmpTypeField;
+ }
+
+ public String getIcmpCodeField() {
+ return icmpCodeField;
+ }
+
+ public int getSeed() {
+ return seed;
+ }
+
+ public String getTargetField() {
+ return targetField;
+ }
+
+ public boolean isIgnoreMissing() {
+ return ignoreMissing;
+ }
+
+ @Override
+ public IngestDocument execute(IngestDocument document) {
+ // resolve protocol firstly
+ Protocol protocol = resolveProtocol(document);
+ // exit quietly if protocol cannot be resolved and ignore_missing is true
+ if (protocol == null) {
+ return document;
+ }
+
+ // resolve ip secondly, exit quietly if either source ip or destination ip cannot be resolved and ignore_missing is true
+ byte[] sourceIPByteArray = resolveIP(document, sourceIPField);
+ if (sourceIPByteArray == null) {
+ return document;
+ }
+ byte[] destIPByteArray = resolveIP(document, destinationIPField);
+ if (destIPByteArray == null) {
+ return document;
+ }
+ // source ip and destination ip must have same format, either ipv4 or ipv6
+ if (sourceIPByteArray.length != destIPByteArray.length) {
+ throw new IllegalArgumentException("source ip and destination ip must have same format");
+ }
+
+ // resolve source port and destination port for transport protocols,
+ // exit quietly if either source port or destination port is null nor empty
+ Integer sourcePort = null;
+ Integer destinationPort = null;
+ if (protocol.isTransportProtocol()) {
+ sourcePort = resolvePort(document, sourcePortField);
+ if (sourcePort == null) {
+ return document;
+ }
+
+ destinationPort = resolvePort(document, destinationPortField);
+ if (destinationPort == null) {
+ return document;
+ }
+ }
+
+ // resolve ICMP message type and code, support both ipv4 and ipv6
+ // set source port to icmp type, and set dest port to icmp code, so that we can have a generic way to handle
+ // all protocols
+ boolean isOneway = true;
+ final boolean isICMPProtocol = Protocol.ICMP == protocol || Protocol.ICMP_V6 == protocol;
+ if (isICMPProtocol) {
+ Integer icmpType = resolveICMP(document, icmpTypeField, ICMP_MESSAGE_TYPE);
+ if (icmpType == null) {
+ return document;
+ } else {
+ sourcePort = icmpType;
+ }
+
+ // for the message types which don't have code, fetch the equivalent code from the pre-defined mapper,
+ // and they can be considered to two-way flow
+ Byte equivalentCode = Protocol.ICMP.getProtocolCode() == protocol.getProtocolCode()
+ ? ICMPType.getEquivalentCode(icmpType.byteValue())
+ : ICMPv6Type.getEquivalentCode(icmpType.byteValue());
+ if (equivalentCode != null) {
+ isOneway = false;
+ // for IPv6-ICMP, the pre-defined code is negative byte,
+ // we need to convert it to positive integer for later comparison
+ destinationPort = Protocol.ICMP.getProtocolCode() == protocol.getProtocolCode()
+ ? Integer.valueOf(equivalentCode)
+ : Byte.toUnsignedInt(equivalentCode);
+ } else {
+ // get icmp code from the document if we cannot get equivalent code from the pre-defined mapper
+ Integer icmpCode = resolveICMP(document, icmpCodeField, ICMP_MESSAGE_CODE);
+ if (icmpCode == null) {
+ return document;
+ } else {
+ destinationPort = icmpCode;
+ }
+ }
+ }
+
+ assert (sourcePort != null && destinationPort != null);
+ boolean isLess = compareIPAndPort(sourceIPByteArray, sourcePort, destIPByteArray, destinationPort);
+ // swap ip and port to remove directionality in the flow tuple, smaller ip:port tuple comes first
+ // but for ICMP and IPv6-ICMP, if it's a one-way flow, the flow tuple is considered to be ordered
+ if (!isLess && (!isICMPProtocol || !isOneway)) {
+ byte[] byteArray = sourceIPByteArray;
+ sourceIPByteArray = destIPByteArray;
+ destIPByteArray = byteArray;
+
+ int tempPort = sourcePort;
+ sourcePort = destinationPort;
+ destinationPort = tempPort;
+ }
+
+ // generate flow hash
+ String digest = generateCommunityIDHash(
+ protocol.getProtocolCode(),
+ sourceIPByteArray,
+ destIPByteArray,
+ sourcePort,
+ destinationPort,
+ seed
+ );
+ document.setFieldValue(targetField, digest);
+ return document;
+ }
+
+ @Override
+ public String getType() {
+ return TYPE;
+ }
+
+ /**
+ * Resolve network protocol
+ * @param document the ingesting document
+ * @return the resolved protocol, null if the resolved protocol is null and ignore_missing is true
+ * @throws IllegalArgumentException only if ignoreMissing is false and the field is null, empty, invalid,
+ * or if the field that is found at the provided path is not of the expected type.
+ */
+ private Protocol resolveProtocol(IngestDocument document) {
+ Protocol protocol = null;
+ Integer ianaProtocolNumber = null;
+ String protocolName = null;
+ if (!Strings.isNullOrEmpty(ianaProtocolNumberField)) {
+ ianaProtocolNumber = document.getFieldValue(ianaProtocolNumberField, Integer.class, true);
+ }
+ if (!Strings.isNullOrEmpty(protocolField)) {
+ protocolName = document.getFieldValue(protocolField, String.class, true);
+ }
+ // if iana protocol number is not specified, then resolve protocol name
+ if (ianaProtocolNumber != null) {
+ if (ianaProtocolNumber >= IANA_COMMON_MIN_NUMBER
+ && ianaProtocolNumber <= IANA_COMMON_MAX_NUMBER
+ && Protocol.protocolCodeMap.containsKey(ianaProtocolNumber.byteValue())) {
+ protocol = Protocol.protocolCodeMap.get(ianaProtocolNumber.byteValue());
+ } else {
+ throw new IllegalArgumentException("unsupported iana protocol number [" + ianaProtocolNumber + "]");
+ }
+ } else if (protocolName != null) {
+ Protocol protocolFromName = Protocol.fromProtocolName(protocolName);
+ if (protocolFromName != null) {
+ protocol = protocolFromName;
+ } else {
+ throw new IllegalArgumentException("unsupported protocol [" + protocolName + "]");
+ }
+ }
+
+ // return null if protocol cannot be resolved and ignore_missing is true
+ if (protocol == null) {
+ if (ignoreMissing) {
+ return null;
+ } else {
+ throw new IllegalArgumentException(
+ "cannot resolve protocol by neither iana protocol number field ["
+ + ianaProtocolNumberField
+ + "] nor protocol name field ["
+ + protocolField
+ + "]"
+ );
+ }
+ }
+ return protocol;
+ }
+
+ /**
+ * Resolve ip address
+ * @param document the ingesting document
+ * @param fieldName the ip field to be resolved
+ * @return the byte array of the resolved ip
+ * @throws IllegalArgumentException only if ignoreMissing is false and the field is null, empty, invalid,
+ * or if the field that is found at the provided path is not of the expected type.
+ */
+ private byte[] resolveIP(IngestDocument document, String fieldName) {
+ if (Strings.isNullOrEmpty(fieldName)) {
+ if (ignoreMissing) {
+ return null;
+ } else {
+ throw new IllegalArgumentException("both source ip field path and destination ip field path cannot be null nor empty");
+ }
+ }
+
+ String ipAddress = document.getFieldValue(fieldName, String.class, true);
+ if (Strings.isNullOrEmpty(ipAddress)) {
+ if (ignoreMissing) {
+ return null;
+ } else {
+ throw new IllegalArgumentException("ip address in the field [" + fieldName + "] is null or empty");
+ }
+ }
+
+ byte[] byteArray = InetAddresses.ipStringToBytes(ipAddress);
+ if (byteArray == null) {
+ throw new IllegalArgumentException(
+ "ip address [" + ipAddress + "] in the field [" + fieldName + "] is not a valid ipv4/ipv6 address"
+ );
+ } else {
+ return byteArray;
+ }
+ }
+
+ /**
+ * Resolve port for transport protocols
+ * @param document the ingesting document
+ * @param fieldName the port field to be resolved
+ * @return the resolved port number, null if the resolved port is null and ignoreMissing is true
+ * @throws IllegalArgumentException only if ignoreMissing is false and the field is null, empty, invalid,
+ * or if the field that is found at the provided path is not of the expected type.
+ */
+ private Integer resolvePort(IngestDocument document, String fieldName) {
+ Integer port;
+ if (Strings.isNullOrEmpty(fieldName)) {
+ if (ignoreMissing) {
+ return null;
+ } else {
+ throw new IllegalArgumentException("both source port and destination port field path cannot be null nor empty");
+ }
+ } else {
+ port = document.getFieldValue(fieldName, Integer.class, true);
+ }
+
+ if (port == null) {
+ if (ignoreMissing) {
+ return null;
+ } else {
+ throw new IllegalArgumentException(
+ "both source port and destination port cannot be null, but port in the field path [" + fieldName + "] is null"
+ );
+ }
+ } else if (port < MIN_PORT || port > MAX_PORT) {
+ throw new IllegalArgumentException(
+ "both source port and destination port must be between 0 and 65535, but port in the field path ["
+ + fieldName
+ + "] is ["
+ + port
+ + "]"
+ );
+ }
+ return port;
+ }
+
+ /**
+ * Resolve ICMP's message type and code field
+ * @param document the ingesting document
+ * @param fieldName name of the type or the code field
+ * @param fieldType type or code
+ * @return the resolved value of the specified field, return null if ignore_missing if true and the field doesn't exist or is null,
+ * @throws IllegalArgumentException only if ignoreMissing is false and the field is null, empty, invalid,
+ * or if the field that is found at the provided path is not of the expected type.
+ */
+ private Integer resolveICMP(IngestDocument document, String fieldName, String fieldType) {
+ if (Strings.isNullOrEmpty(fieldName)) {
+ if (ignoreMissing) {
+ return null;
+ } else {
+ throw new IllegalArgumentException("icmp message " + fieldType + " field path cannot be null nor empty");
+ }
+ }
+ Integer fieldValue = document.getFieldValue(fieldName, Integer.class, true);
+ if (fieldValue == null) {
+ if (ignoreMissing) {
+ return null;
+ } else {
+ throw new IllegalArgumentException("icmp message " + fieldType + " cannot be null");
+ }
+ } else if (fieldValue < IANA_COMMON_MIN_NUMBER || fieldValue > IANA_COMMON_MAX_NUMBER) {
+ throw new IllegalArgumentException("invalid icmp message " + fieldType + " [" + fieldValue + "]");
+ } else {
+ return fieldValue;
+ }
+ }
+
+ /**
+ *
+ * @param protocolCode byte of the protocol number
+ * @param sourceIPByteArray bytes of the source ip in the network flow tuple
+ * @param destIPByteArray bytes of the destination ip in the network flow tuple
+ * @param sourcePort source port in the network flow tuple
+ * @param destinationPort destination port in the network flow tuple
+ * @param seed seed for generating hash
+ * @return the generated hash value, use SHA-1
+ */
+ private String generateCommunityIDHash(
+ byte protocolCode,
+ byte[] sourceIPByteArray,
+ byte[] destIPByteArray,
+ Integer sourcePort,
+ Integer destinationPort,
+ int seed
+ ) {
+ MessageDigest messageDigest = MessageDigests.sha1();
+ messageDigest.update(intToTwoByteArray(seed));
+ messageDigest.update(sourceIPByteArray);
+ messageDigest.update(destIPByteArray);
+ messageDigest.update(protocolCode);
+ messageDigest.update(PADDING_BYTE);
+ messageDigest.update(intToTwoByteArray(sourcePort));
+ messageDigest.update(intToTwoByteArray(destinationPort));
+
+ return COMMUNITY_ID_HASH_VERSION + ":" + Base64.getEncoder().encodeToString(messageDigest.digest());
+ }
+
+ /**
+ * Convert an integer to two byte array
+ * @param val the integer which will be consumed to produce a two byte array
+ * @return the two byte array
+ */
+ private byte[] intToTwoByteArray(Integer val) {
+ byte[] byteArray = new byte[2];
+ byteArray[0] = Integer.valueOf(val >>> 8).byteValue();
+ byteArray[1] = val.byteValue();
+ return byteArray;
+ }
+
+ /**
+ * Compare the ip and port, return true if the flow tuple is ordered
+ * @param sourceIPByteArray bytes of the source ip in the network flow tuple
+ * @param destIPByteArray bytes of the destination ip in the network flow tuple
+ * @param sourcePort source port in the network flow tuple
+ * @param destinationPort destination port in the network flow tuple
+ * @return true if sourceIP is less than destinationIP or sourceIP equals to destinationIP
+ * but sourcePort is less than destinationPort
+ */
+ private boolean compareIPAndPort(byte[] sourceIPByteArray, int sourcePort, byte[] destIPByteArray, int destinationPort) {
+ int compareResult = compareByteArray(sourceIPByteArray, destIPByteArray);
+ return compareResult < 0 || compareResult == 0 && sourcePort < destinationPort;
+ }
+
+ /**
+ * Compare two byte array which have same length
+ * @param byteArray1 the first byte array to compare
+ * @param byteArray2 the second byte array to compare
+ * @return 0 if each byte in both two arrays are same, a value less than 0 if byte in the first array is less than
+ * the byte at the same index, a value greater than 0 if byte in the first array is greater than the byte at the same index
+ */
+ private int compareByteArray(byte[] byteArray1, byte[] byteArray2) {
+ assert (byteArray1.length == byteArray2.length);
+ int i = 0;
+ int j = 0;
+ while (i < byteArray1.length && j < byteArray2.length) {
+ int isLess = Byte.compareUnsigned(byteArray1[i], byteArray2[j]);
+ if (isLess == 0) {
+ i++;
+ j++;
+ } else {
+ return isLess;
+ }
+ }
+ return 0;
+ }
+
+ /**
+ * Mapping ICMP's message type and code into a port-like notion for ordering the request or response
+ */
+ enum ICMPType {
+ ECHO_REPLY((byte) 0, (byte) 8),
+ ECHO((byte) 8, (byte) 0),
+ RTR_ADVERT((byte) 9, (byte) 10),
+ RTR_SOLICIT((byte) 10, (byte) 9),
+ TSTAMP((byte) 13, (byte) 14),
+ TSTAMP_REPLY((byte) 14, (byte) 13),
+ INFO((byte) 15, (byte) 16),
+ INFO_REPLY((byte) 16, (byte) 15),
+ MASK((byte) 17, (byte) 18),
+ MASK_REPLY((byte) 18, (byte) 17);
+
+ private final byte type;
+ private final byte code;
+
+ ICMPType(byte type, byte code) {
+ this.type = type;
+ this.code = code;
+ }
+
+ private static final Map ICMPTypeMapper = Arrays.stream(values()).collect(Collectors.toMap(t -> t.type, t -> t.code));
+
+ /**
+ * Takes the message type of ICMP and derives equivalent message code
+ * @param type the message type of ICMP
+ * @return the equivalent message code
+ */
+ public static Byte getEquivalentCode(int type) {
+ return ICMPTypeMapper.get(Integer.valueOf(type).byteValue());
+ }
+ }
+
+ /**
+ * Mapping IPv6-ICMP's message type and code into a port-like notion for ordering the request or response
+ */
+ enum ICMPv6Type {
+ ECHO_REQUEST((byte) 128, (byte) 129),
+ ECHO_REPLY((byte) 129, (byte) 128),
+ MLD_LISTENER_QUERY((byte) 130, (byte) 131),
+ MLD_LISTENER_REPORT((byte) 131, (byte) 130),
+ ND_ROUTER_SOLICIT((byte) 133, (byte) 134),
+ ND_ROUTER_ADVERT((byte) 134, (byte) 133),
+ ND_NEIGHBOR_SOLICIT((byte) 135, (byte) 136),
+ ND_NEIGHBOR_ADVERT((byte) 136, (byte) 135),
+ WRU_REQUEST((byte) 139, (byte) 140),
+ WRU_REPLY((byte) 140, (byte) 139),
+ HAAD_REQUEST((byte) 144, (byte) 145),
+ HAAD_REPLY((byte) 145, (byte) 144);
+
+ private final byte type;
+ private final byte code;
+
+ ICMPv6Type(byte type, byte code) {
+ this.type = type;
+ this.code = code;
+ }
+
+ private static final Map ICMPTypeMapper = Arrays.stream(values()).collect(Collectors.toMap(t -> t.type, t -> t.code));
+
+ /**
+ * Takes the message type of IPv6-ICMP and derives equivalent message code
+ * @param type the message type of IPv6-ICMP
+ * @return the equivalent message code
+ */
+ public static Byte getEquivalentCode(int type) {
+ return ICMPTypeMapper.get(Integer.valueOf(type).byteValue());
+ }
+ }
+
+ /**
+ * An enumeration of the supported network protocols
+ */
+ enum Protocol {
+ ICMP((byte) 1, false),
+ TCP((byte) 6, true),
+ UDP((byte) 17, true),
+ ICMP_V6((byte) 58, false),
+ SCTP((byte) 132, true);
+
+ private final byte protocolCode;
+ private final boolean isTransportProtocol;
+
+ Protocol(int ianaNumber, boolean isTransportProtocol) {
+ this.protocolCode = Integer.valueOf(ianaNumber).byteValue();
+ this.isTransportProtocol = isTransportProtocol;
+ }
+
+ public static final Map protocolCodeMap = Arrays.stream(values())
+ .collect(Collectors.toMap(Protocol::getProtocolCode, p -> p));
+
+ public static Protocol fromProtocolName(String protocolName) {
+ String name = protocolName.toUpperCase(Locale.ROOT);
+ if (name.equals("IPV6-ICMP")) {
+ return Protocol.ICMP_V6;
+ }
+ try {
+ return valueOf(name);
+ } catch (IllegalArgumentException e) {
+ return null;
+ }
+ }
+
+ public byte getProtocolCode() {
+ return this.protocolCode;
+ }
+
+ public boolean isTransportProtocol() {
+ return this.isTransportProtocol;
+ }
+ }
+
+ public static class Factory implements Processor.Factory {
+ @Override
+ public CommunityIdProcessor create(
+ Map registry,
+ String processorTag,
+ String description,
+ Map config
+ ) throws Exception {
+ String sourceIPField = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "source_ip_field");
+ String sourcePortField = ConfigurationUtils.readOptionalStringProperty(TYPE, processorTag, config, "source_port_field");
+ String destinationIPField = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "destination_ip_field");
+ String destinationPortField = ConfigurationUtils.readOptionalStringProperty(
+ TYPE,
+ processorTag,
+ config,
+ "destination_port_field"
+ );
+ String ianaProtocolNumberField = ConfigurationUtils.readOptionalStringProperty(
+ TYPE,
+ processorTag,
+ config,
+ "iana_protocol_number_field"
+ );
+ String protocolField = ConfigurationUtils.readOptionalStringProperty(TYPE, processorTag, config, "protocol_field");
+ String icmpTypeField = ConfigurationUtils.readOptionalStringProperty(TYPE, processorTag, config, "icmp_type_field");
+ String icmpCodeField = ConfigurationUtils.readOptionalStringProperty(TYPE, processorTag, config, "icmp_code_field");
+ int seed = ConfigurationUtils.readIntProperty(TYPE, processorTag, config, "seed", 0);
+ if (seed < MIN_SEED || seed > MAX_SEED) {
+ throw newConfigurationException(TYPE, processorTag, "seed", "seed must be between 0 and 65535");
+ }
+
+ String targetField = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "target_field", "community_id");
+ boolean ignoreMissing = ConfigurationUtils.readBooleanProperty(TYPE, processorTag, config, "ignore_missing", false);
+
+ return new CommunityIdProcessor(
+ processorTag,
+ description,
+ sourceIPField,
+ sourcePortField,
+ destinationIPField,
+ destinationPortField,
+ ianaProtocolNumberField,
+ protocolField,
+ icmpTypeField,
+ icmpCodeField,
+ seed,
+ targetField,
+ ignoreMissing
+ );
+ }
+ }
+}
diff --git a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/IngestCommonModulePlugin.java b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/IngestCommonModulePlugin.java
index 7c1b4841122b0..0f8b248fd5af8 100644
--- a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/IngestCommonModulePlugin.java
+++ b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/IngestCommonModulePlugin.java
@@ -107,6 +107,8 @@ public Map getProcessors(Processor.Parameters paramet
processors.put(HtmlStripProcessor.TYPE, new HtmlStripProcessor.Factory());
processors.put(CsvProcessor.TYPE, new CsvProcessor.Factory());
processors.put(CopyProcessor.TYPE, new CopyProcessor.Factory(parameters.scriptService));
+ processors.put(RemoveByPatternProcessor.TYPE, new RemoveByPatternProcessor.Factory());
+ processors.put(CommunityIdProcessor.TYPE, new CommunityIdProcessor.Factory());
return Collections.unmodifiableMap(processors);
}
diff --git a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/RemoveByPatternProcessor.java b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/RemoveByPatternProcessor.java
new file mode 100644
index 0000000000000..da87f5201db72
--- /dev/null
+++ b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/RemoveByPatternProcessor.java
@@ -0,0 +1,180 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.ingest.common;
+
+import org.opensearch.common.Nullable;
+import org.opensearch.common.ValidationException;
+import org.opensearch.common.regex.Regex;
+import org.opensearch.core.common.Strings;
+import org.opensearch.ingest.AbstractProcessor;
+import org.opensearch.ingest.ConfigurationUtils;
+import org.opensearch.ingest.IngestDocument;
+import org.opensearch.ingest.Processor;
+
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.stream.Collectors;
+
+import static org.opensearch.ingest.ConfigurationUtils.newConfigurationException;
+
+/**
+ * Processor that removes existing fields by field patterns or excluding field patterns.
+ */
+public final class RemoveByPatternProcessor extends AbstractProcessor {
+
+ public static final String TYPE = "remove_by_pattern";
+ private final List fieldPatterns;
+ private final List excludeFieldPatterns;
+
+ RemoveByPatternProcessor(
+ String tag,
+ String description,
+ @Nullable List fieldPatterns,
+ @Nullable List excludeFieldPatterns
+ ) {
+ super(tag, description);
+ if (fieldPatterns != null && excludeFieldPatterns != null || fieldPatterns == null && excludeFieldPatterns == null) {
+ throw new IllegalArgumentException("either fieldPatterns and excludeFieldPatterns must be set");
+ }
+ if (fieldPatterns == null) {
+ this.fieldPatterns = null;
+ this.excludeFieldPatterns = new ArrayList<>(excludeFieldPatterns);
+ } else {
+ this.fieldPatterns = new ArrayList<>(fieldPatterns);
+ this.excludeFieldPatterns = null;
+ }
+ }
+
+ public List getFieldPatterns() {
+ return fieldPatterns;
+ }
+
+ public List getExcludeFieldPatterns() {
+ return excludeFieldPatterns;
+ }
+
+ @Override
+ public IngestDocument execute(IngestDocument document) {
+ Set existingFields = new HashSet<>(document.getSourceAndMetadata().keySet());
+ Set metadataFields = document.getMetadata()
+ .keySet()
+ .stream()
+ .map(IngestDocument.Metadata::getFieldName)
+ .collect(Collectors.toSet());
+
+ if (fieldPatterns != null && !fieldPatterns.isEmpty()) {
+ existingFields.forEach(field -> {
+ // ignore metadata fields such as _index, _id, etc.
+ if (!metadataFields.contains(field)) {
+ final boolean matched = fieldPatterns.stream().anyMatch(pattern -> Regex.simpleMatch(pattern, field));
+ if (matched) {
+ document.removeField(field);
+ }
+ }
+ });
+ }
+
+ if (excludeFieldPatterns != null && !excludeFieldPatterns.isEmpty()) {
+ existingFields.forEach(field -> {
+ // ignore metadata fields such as _index, _id, etc.
+ if (!metadataFields.contains(field)) {
+ final boolean matched = excludeFieldPatterns.stream().anyMatch(pattern -> Regex.simpleMatch(pattern, field));
+ if (!matched) {
+ document.removeField(field);
+ }
+ }
+ });
+ }
+
+ return document;
+ }
+
+ @Override
+ public String getType() {
+ return TYPE;
+ }
+
+ public static final class Factory implements Processor.Factory {
+
+ public Factory() {}
+
+ @Override
+ public RemoveByPatternProcessor create(
+ Map registry,
+ String processorTag,
+ String description,
+ Map config
+ ) throws Exception {
+ final List fieldPatterns = new ArrayList<>();
+ final List excludeFieldPatterns = new ArrayList<>();
+ final Object fieldPattern = ConfigurationUtils.readOptionalObject(config, "field_pattern");
+ final Object excludeFieldPattern = ConfigurationUtils.readOptionalObject(config, "exclude_field_pattern");
+
+ if (fieldPattern == null && excludeFieldPattern == null || fieldPattern != null && excludeFieldPattern != null) {
+ throw newConfigurationException(
+ TYPE,
+ processorTag,
+ "field_pattern",
+ "either field_pattern or exclude_field_pattern must be set"
+ );
+ }
+
+ if (fieldPattern != null) {
+ if (fieldPattern instanceof List) {
+ @SuppressWarnings("unchecked")
+ List fieldPatternList = (List) fieldPattern;
+ fieldPatterns.addAll(fieldPatternList);
+ } else {
+ fieldPatterns.add((String) fieldPattern);
+ }
+ validateFieldPatterns(processorTag, fieldPatterns, "field_pattern");
+ return new RemoveByPatternProcessor(processorTag, description, fieldPatterns, null);
+ } else {
+ if (excludeFieldPattern instanceof List) {
+ @SuppressWarnings("unchecked")
+ List excludeFieldPatternList = (List) excludeFieldPattern;
+ excludeFieldPatterns.addAll(excludeFieldPatternList);
+ } else {
+ excludeFieldPatterns.add((String) excludeFieldPattern);
+ }
+ validateFieldPatterns(processorTag, excludeFieldPatterns, "exclude_field_pattern");
+ return new RemoveByPatternProcessor(processorTag, description, null, excludeFieldPatterns);
+ }
+ }
+
+ private void validateFieldPatterns(String processorTag, List patterns, String patternKey) {
+ List validationErrors = new ArrayList<>();
+ for (String fieldPattern : patterns) {
+ if (fieldPattern.contains("#")) {
+ validationErrors.add(patternKey + " [" + fieldPattern + "] must not contain a '#'");
+ }
+ if (fieldPattern.contains(":")) {
+ validationErrors.add(patternKey + " [" + fieldPattern + "] must not contain a ':'");
+ }
+ if (fieldPattern.startsWith("_")) {
+ validationErrors.add(patternKey + " [" + fieldPattern + "] must not start with '_'");
+ }
+ if (Strings.validFileNameExcludingAstrix(fieldPattern) == false) {
+ validationErrors.add(
+ patternKey + " [" + fieldPattern + "] must not contain the following characters " + Strings.INVALID_FILENAME_CHARS
+ );
+ }
+ }
+
+ if (validationErrors.size() > 0) {
+ ValidationException validationException = new ValidationException();
+ validationException.addValidationErrors(validationErrors);
+ throw newConfigurationException(TYPE, processorTag, patternKey, validationException.getMessage());
+ }
+ }
+ }
+}
diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/CommunityIdProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/CommunityIdProcessorFactoryTests.java
new file mode 100644
index 0000000000000..5edb44b8c64f2
--- /dev/null
+++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/CommunityIdProcessorFactoryTests.java
@@ -0,0 +1,117 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.ingest.common;
+
+import org.opensearch.OpenSearchException;
+import org.opensearch.OpenSearchParseException;
+import org.opensearch.test.OpenSearchTestCase;
+import org.junit.Before;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import static org.hamcrest.CoreMatchers.equalTo;
+
+public class CommunityIdProcessorFactoryTests extends OpenSearchTestCase {
+ private CommunityIdProcessor.Factory factory;
+
+ @Before
+ public void init() {
+ factory = new CommunityIdProcessor.Factory();
+ }
+
+ public void testCreate() throws Exception {
+ boolean ignoreMissing = randomBoolean();
+ int seed = randomIntBetween(0, 65535);
+ Map config = new HashMap<>();
+ config.put("source_ip_field", "source_ip");
+ config.put("source_port_field", "source_port");
+ config.put("destination_ip_field", "destination_ip");
+ config.put("destination_port_field", "destination_port");
+ config.put("iana_protocol_number_field", "iana_protocol_number");
+ config.put("protocol_field", "protocol");
+ config.put("icmp_type_field", "icmp_type");
+ config.put("icmp_code_field", "icmp_code");
+ config.put("seed", seed);
+ config.put("target_field", "community_id_hash");
+ config.put("ignore_missing", ignoreMissing);
+ String processorTag = randomAlphaOfLength(10);
+ CommunityIdProcessor communityIDProcessor = factory.create(null, processorTag, null, config);
+ assertThat(communityIDProcessor.getTag(), equalTo(processorTag));
+ assertThat(communityIDProcessor.getSourceIPField(), equalTo("source_ip"));
+ assertThat(communityIDProcessor.getSourcePortField(), equalTo("source_port"));
+ assertThat(communityIDProcessor.getDestinationIPField(), equalTo("destination_ip"));
+ assertThat(communityIDProcessor.getDestinationPortField(), equalTo("destination_port"));
+ assertThat(communityIDProcessor.getIANAProtocolNumberField(), equalTo("iana_protocol_number"));
+ assertThat(communityIDProcessor.getProtocolField(), equalTo("protocol"));
+ assertThat(communityIDProcessor.getIcmpTypeField(), equalTo("icmp_type"));
+ assertThat(communityIDProcessor.getIcmpCodeField(), equalTo("icmp_code"));
+ assertThat(communityIDProcessor.getSeed(), equalTo(seed));
+ assertThat(communityIDProcessor.getTargetField(), equalTo("community_id_hash"));
+ assertThat(communityIDProcessor.isIgnoreMissing(), equalTo(ignoreMissing));
+ }
+
+ public void testCreateWithSourceIPField() throws Exception {
+ Map config = new HashMap<>();
+ try {
+ factory.create(null, null, null, config);
+ fail("factory create should have failed");
+ } catch (OpenSearchParseException e) {
+ assertThat(e.getMessage(), equalTo("[source_ip_field] required property is missing"));
+ }
+
+ config.put("source_ip_field", null);
+ try {
+ factory.create(null, null, null, config);
+ fail("factory create should have failed");
+ } catch (OpenSearchParseException e) {
+ assertThat(e.getMessage(), equalTo("[source_ip_field] required property is missing"));
+ }
+ }
+
+ public void testCreateWithDestinationIPField() throws Exception {
+ Map config = new HashMap<>();
+ config.put("source_ip_field", "source_ip");
+ try {
+ factory.create(null, null, null, config);
+ fail("factory create should have failed");
+ } catch (OpenSearchParseException e) {
+ assertThat(e.getMessage(), equalTo("[destination_ip_field] required property is missing"));
+ }
+
+ config.put("source_ip_field", "source_ip");
+ config.put("destination_ip_field", null);
+ try {
+ factory.create(null, null, null, config);
+ fail("factory create should have failed");
+ } catch (OpenSearchParseException e) {
+ assertThat(e.getMessage(), equalTo("[destination_ip_field] required property is missing"));
+ }
+ }
+
+ public void testInvalidSeed() throws Exception {
+ Map config = new HashMap<>();
+ int seed;
+ if (randomBoolean()) {
+ seed = -1;
+ } else {
+ seed = 65536;
+ }
+ config.put("source_ip_field", "source_ip");
+ config.put("destination_ip_field", "destination_ip");
+ config.put("seed", seed);
+ try {
+ factory.create(null, null, null, config);
+ fail("factory create should have failed");
+ } catch (OpenSearchException e) {
+ assertThat(e.getMessage(), equalTo("[seed] seed must be between 0 and 65535"));
+ }
+ }
+
+}
diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/CommunityIdProcessorTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/CommunityIdProcessorTests.java
new file mode 100644
index 0000000000000..2bda9db80dbcc
--- /dev/null
+++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/CommunityIdProcessorTests.java
@@ -0,0 +1,910 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.ingest.common;
+
+import org.opensearch.ingest.IngestDocument;
+import org.opensearch.ingest.Processor;
+import org.opensearch.ingest.RandomDocumentPicks;
+import org.opensearch.test.OpenSearchTestCase;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import static org.hamcrest.Matchers.equalTo;
+
+public class CommunityIdProcessorTests extends OpenSearchTestCase {
+
+ public void testResolveProtocol() throws Exception {
+ Map source = new HashMap<>();
+ source.put("source_ip", "1.1.1.1");
+ source.put("destination_ip", "2.2.2.2");
+ source.put("source_port", 1000);
+ source.put("destination_port", 2000);
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), source);
+
+ String targetFieldName = randomAlphaOfLength(100);
+ boolean ignore_missing = randomBoolean();
+ Processor processor = createCommunityIdProcessor(
+ "source_ip",
+ "source_port",
+ "destination_ip",
+ "destination_port",
+ null,
+ "protocol",
+ null,
+ null,
+ randomIntBetween(0, 65535),
+ targetFieldName,
+ ignore_missing
+ );
+ if (ignore_missing) {
+ processor.execute(ingestDocument);
+ assertThat(ingestDocument.hasField(targetFieldName), equalTo(false));
+ } else {
+ assertThrows(
+ "cannot resolve protocol by neither iana protocol number field [iana_protocol_number] nor protocol name field [protocol]",
+ IllegalArgumentException.class,
+ () -> processor.execute(ingestDocument)
+ );
+ }
+
+ source = new HashMap<>();
+ source.put("source_ip", "1.1.1.1");
+ source.put("destination_ip", "2.2.2.2");
+ source.put("source_port", 1000);
+ source.put("destination_port", 2000);
+ String protocol = randomAlphaOfLength(10);
+ source.put("protocol", protocol);
+ IngestDocument ingestDocumentWithProtocol = RandomDocumentPicks.randomIngestDocument(random(), source);
+ Processor processorWithProtocol = createCommunityIdProcessor(
+ "source_ip",
+ "source_port",
+ "destination_ip",
+ "destination_port",
+ "iana_protocol_number",
+ "protocol",
+ null,
+ null,
+ randomIntBetween(0, 65535),
+ targetFieldName,
+ randomBoolean()
+ );
+ assertThrows(
+ "unsupported protocol [" + protocol + "]",
+ IllegalArgumentException.class,
+ () -> processorWithProtocol.execute(ingestDocumentWithProtocol)
+ );
+
+ source = new HashMap<>();
+ source.put("source_ip", "1.1.1.1");
+ source.put("destination_ip", "2.2.2.2");
+ source.put("source_port", 1000);
+ source.put("destination_port", 2000);
+ int ianaProtocolNumber = randomIntBetween(1000, 10000);
+ source.put("iana_protocol_number", ianaProtocolNumber);
+ IngestDocument ingestDocumentWithProtocolNumber = RandomDocumentPicks.randomIngestDocument(random(), source);
+
+ Processor processorWithProtocolNumber = createCommunityIdProcessor(
+ "source_ip",
+ "source_port",
+ "destination_ip",
+ "destination_port",
+ "iana_protocol_number",
+ null,
+ null,
+ null,
+ randomIntBetween(0, 65535),
+ targetFieldName,
+ randomBoolean()
+ );
+ assertThrows(
+ "unsupported iana protocol number [" + ianaProtocolNumber + "]",
+ IllegalArgumentException.class,
+ () -> processorWithProtocolNumber.execute(ingestDocumentWithProtocolNumber)
+ );
+ }
+
+ public void testResolveIPAndPort() throws Exception {
+ Map source = new HashMap<>();
+ source.put("source_ip", "");
+ source.put("destination_ip", "2.2.2.2");
+ source.put("source_port", 1000);
+ source.put("destination_port", 2000);
+ source.put("protocol", "tcp");
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), source);
+
+ String targetFieldName = randomAlphaOfLength(100);
+ boolean ignore_missing = randomBoolean();
+ Processor processor = createCommunityIdProcessor(
+ "source_ip",
+ "source_port",
+ "destination_ip",
+ "destination_port",
+ null,
+ "protocol",
+ null,
+ null,
+ randomIntBetween(0, 65535),
+ targetFieldName,
+ ignore_missing
+ );
+ if (ignore_missing) {
+ processor.execute(ingestDocument);
+ assertThat(ingestDocument.hasField(targetFieldName), equalTo(false));
+ } else {
+ assertThrows(
+ "ip address in the field [source_ip] is null or empty",
+ IllegalArgumentException.class,
+ () -> processor.execute(ingestDocument)
+ );
+ }
+
+ source = new HashMap<>();
+ source.put("source_ip", "1.1.1");
+ source.put("destination_ip", "2.2.2.2");
+ source.put("source_port", 1000);
+ source.put("destination_port", 2000);
+ source.put("protocol", "tcp");
+ IngestDocument ingestDocumentWithInvalidSourceIP = RandomDocumentPicks.randomIngestDocument(random(), source);
+ Processor processorWithInvalidSourceIP = createCommunityIdProcessor(
+ "source_ip",
+ "source_port",
+ "destination_ip",
+ "destination_port",
+ "iana_protocol_number",
+ "protocol",
+ null,
+ null,
+ randomIntBetween(0, 65535),
+ targetFieldName,
+ randomBoolean()
+ );
+
+ assertThrows(
+ "ip address in the field [source_ip] is not a valid ipv4/ipv6 address",
+ IllegalArgumentException.class,
+ () -> processorWithInvalidSourceIP.execute(ingestDocumentWithInvalidSourceIP)
+ );
+
+ source = new HashMap<>();
+ source.put("source_ip", "1.1.1.1");
+ source.put("destination_ip", "");
+ source.put("source_port", 1000);
+ source.put("destination_port", 2000);
+ source.put("protocol", "tcp");
+ ignore_missing = randomBoolean();
+ IngestDocument ingestDocumentWithEmptyDestIP = RandomDocumentPicks.randomIngestDocument(random(), source);
+ Processor processorWithEmptyDestIP = createCommunityIdProcessor(
+ "source_ip",
+ "source_port",
+ "destination_ip",
+ "destination_port",
+ "iana_protocol_number",
+ "protocol",
+ null,
+ null,
+ randomIntBetween(0, 65535),
+ targetFieldName,
+ ignore_missing
+ );
+ if (ignore_missing) {
+ processorWithEmptyDestIP.execute(ingestDocumentWithEmptyDestIP);
+ assertThat(ingestDocumentWithEmptyDestIP.hasField(targetFieldName), equalTo(false));
+ } else {
+ assertThrows(
+ "ip address in the field [destination_ip] is null or empty",
+ IllegalArgumentException.class,
+ () -> processorWithEmptyDestIP.execute(ingestDocumentWithEmptyDestIP)
+ );
+ }
+
+ source = new HashMap<>();
+ source.put("source_ip", "1.1.1.1");
+ source.put("destination_ip", "2.2.2");
+ source.put("source_port", 1000);
+ source.put("destination_port", 2000);
+ source.put("protocol", "tcp");
+ IngestDocument ingestDocumentWithInvalidDestIP = RandomDocumentPicks.randomIngestDocument(random(), source);
+ Processor processorWithInvalidDestIP = createCommunityIdProcessor(
+ "source_ip",
+ "source_port",
+ "destination_ip",
+ "destination_port",
+ "iana_protocol_number",
+ "protocol",
+ null,
+ null,
+ randomIntBetween(0, 65535),
+ targetFieldName,
+ randomBoolean()
+ );
+ assertThrows(
+ "ip address in the field [destination_ip] is not a valid ipv4/ipv6 address",
+ IllegalArgumentException.class,
+ () -> processorWithInvalidDestIP.execute(ingestDocumentWithInvalidDestIP)
+ );
+
+ source = new HashMap<>();
+ source.put("source_ip", "1.1.1.1");
+ source.put("destination_ip", "2.2.2.2");
+ source.put("source_port", 1000);
+ source.put("destination_port", 2000);
+ source.put("protocol", "tcp");
+ ignore_missing = randomBoolean();
+ IngestDocument normalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), source);
+ Processor processorWithEmptySourceIPFieldPath = createCommunityIdProcessor(
+ "",
+ "source_port",
+ "destination_ip",
+ "destination_port",
+ "iana_protocol_number",
+ "protocol",
+ null,
+ null,
+ randomIntBetween(0, 65535),
+ targetFieldName,
+ ignore_missing
+ );
+ if (ignore_missing) {
+ processorWithEmptySourceIPFieldPath.execute(normalIngestDocument);
+ assertThat(normalIngestDocument.hasField(targetFieldName), equalTo(false));
+ } else {
+ assertThrows(
+ "both source ip field path and destination ip field path cannot be null nor empty",
+ IllegalArgumentException.class,
+ () -> processorWithEmptySourceIPFieldPath.execute(normalIngestDocument)
+ );
+ }
+ ignore_missing = randomBoolean();
+ Processor processorWithEmptyDestIPFieldPath = createCommunityIdProcessor(
+ "source_ip",
+ "source_port",
+ "",
+ "destination_port",
+ "iana_protocol_number",
+ "protocol",
+ null,
+ null,
+ randomIntBetween(0, 65535),
+ targetFieldName,
+ ignore_missing
+ );
+ if (ignore_missing) {
+ processorWithEmptyDestIPFieldPath.execute(normalIngestDocument);
+ assertThat(normalIngestDocument.hasField(targetFieldName), equalTo(false));
+ } else {
+ assertThrows(
+ "both source ip field path and destination ip field path cannot be null nor empty",
+ IllegalArgumentException.class,
+ () -> processorWithEmptyDestIPFieldPath.execute(normalIngestDocument)
+ );
+ }
+
+ source = new HashMap<>();
+ source.put("source_ip", "1.1.1.1");
+ source.put("destination_ip", "2.2.2.2");
+ source.put("source_port", null);
+ source.put("destination_port", 2000);
+ source.put("protocol", "tcp");
+ ignore_missing = randomBoolean();
+ IngestDocument ingestDocumentWithEmptySourcePort = RandomDocumentPicks.randomIngestDocument(random(), source);
+ Processor processorWithEmptySourcePort = createCommunityIdProcessor(
+ "source_ip",
+ "source_port",
+ "destination_ip",
+ "destination_port",
+ "iana_protocol_number",
+ "protocol",
+ null,
+ null,
+ randomIntBetween(0, 65535),
+ targetFieldName,
+ ignore_missing
+ );
+ if (ignore_missing) {
+ processorWithEmptySourcePort.execute(ingestDocumentWithEmptySourcePort);
+ assertThat(ingestDocumentWithEmptySourcePort.hasField(targetFieldName), equalTo(false));
+ } else {
+ assertThrows(
+ "both source port and destination port field path cannot be null nor empty",
+ IllegalArgumentException.class,
+ () -> processorWithEmptySourcePort.execute(ingestDocumentWithEmptySourcePort)
+ );
+ }
+
+ source = new HashMap<>();
+ source.put("source_ip", "1.1.1.1");
+ source.put("destination_ip", "2.2.2.2");
+ source.put("source_port", 65536);
+ source.put("destination_port", 2000);
+ source.put("protocol", "tcp");
+ IngestDocument ingestDocumentWithInvalidSourcePort = RandomDocumentPicks.randomIngestDocument(random(), source);
+ Processor processorWithInvalidSourcePort = createCommunityIdProcessor(
+ "source_ip",
+ "source_port",
+ "destination_ip",
+ "destination_port",
+ "iana_protocol_number",
+ "protocol",
+ null,
+ null,
+ randomIntBetween(0, 65535),
+ targetFieldName,
+ randomBoolean()
+ );
+ assertThrows(
+ "both source port and destination port must be between 0 and 65535, but port in the field path [source_port] is [65536]",
+ IllegalArgumentException.class,
+ () -> processorWithInvalidSourcePort.execute(ingestDocumentWithInvalidSourcePort)
+ );
+
+ source = new HashMap<>();
+ source.put("source_ip", "1.1.1.1");
+ source.put("destination_ip", "2.2.2.2");
+ source.put("source_port", 1000);
+ source.put("destination_port", null);
+ source.put("protocol", "tcp");
+ ignore_missing = randomBoolean();
+ IngestDocument ingestDocumentWithEmptyDestPort = RandomDocumentPicks.randomIngestDocument(random(), source);
+ Processor processorWithEmptyDestPort = createCommunityIdProcessor(
+ "source_ip",
+ "source_port",
+ "destination_ip",
+ "destination_port",
+ "iana_protocol_number",
+ "protocol",
+ null,
+ null,
+ randomIntBetween(0, 65535),
+ targetFieldName,
+ ignore_missing
+ );
+ if (ignore_missing) {
+ processorWithEmptyDestPort.execute(ingestDocumentWithEmptyDestPort);
+ assertThat(ingestDocumentWithEmptyDestPort.hasField(targetFieldName), equalTo(false));
+ } else {
+ assertThrows(
+ "both source port and destination port cannot be null, but port in the field path [destination_port] is null",
+ IllegalArgumentException.class,
+ () -> processorWithEmptyDestPort.execute(ingestDocumentWithEmptyDestPort)
+ );
+ }
+
+ source = new HashMap<>();
+ source.put("source_ip", "1.1.1.1");
+ source.put("destination_ip", "2.2.2.2");
+ source.put("source_port", 1000);
+ source.put("destination_port", -1);
+ source.put("protocol", "tcp");
+ IngestDocument ingestDocumentWithInvalidDestPort = RandomDocumentPicks.randomIngestDocument(random(), source);
+ Processor processorWithInvalidDestPort = createCommunityIdProcessor(
+ "source_ip",
+ "source_port",
+ "destination_ip",
+ "destination_port",
+ "iana_protocol_number",
+ "protocol",
+ null,
+ null,
+ randomIntBetween(0, 65535),
+ targetFieldName,
+ randomBoolean()
+ );
+ assertThrows(
+ "both source port and destination port cannot be null, but port in the field path [destination_port] is [-1]",
+ IllegalArgumentException.class,
+ () -> processorWithInvalidDestPort.execute(ingestDocumentWithInvalidDestPort)
+ );
+ }
+
+ public void testResolveICMPTypeAndCode() throws Exception {
+ Map source = new HashMap<>();
+ source.put("source_ip", "1.1.1.1");
+ source.put("destination_ip", "2.2.2.2");
+ int protocolNumber = randomFrom(1, 58);
+ source.put("iana_protocol_number", protocolNumber);
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), source);
+ String targetFieldName = randomAlphaOfLength(100);
+ boolean ignoreMissing = randomBoolean();
+ Processor processor = createCommunityIdProcessor(
+ "source_ip",
+ "source_port",
+ "destination_ip",
+ "destination_port",
+ "iana_protocol_number",
+ null,
+ null,
+ null,
+ randomIntBetween(0, 65535),
+ targetFieldName,
+ ignoreMissing
+ );
+ if (ignoreMissing) {
+ processor.execute(ingestDocument);
+ assertThat(ingestDocument.hasField(targetFieldName), equalTo(false));
+ } else {
+ assertThrows(
+ "icmp message type field path cannot be null nor empty",
+ IllegalArgumentException.class,
+ () -> processor.execute(ingestDocument)
+ );
+ }
+
+ source = new HashMap<>();
+ source.put("source_ip", "1.1.1.1");
+ source.put("destination_ip", "2.2.2.2");
+ protocolNumber = randomFrom(1, 58);
+ source.put("iana_protocol_number", protocolNumber);
+ source.put("icmp_type", null);
+ IngestDocument ingestDocumentWithNullType = RandomDocumentPicks.randomIngestDocument(random(), source);
+ ignoreMissing = randomBoolean();
+ Processor processorWithNullType = createCommunityIdProcessor(
+ "source_ip",
+ "source_port",
+ "destination_ip",
+ "destination_port",
+ "iana_protocol_number",
+ "protocol",
+ "icmp_type",
+ null,
+ randomIntBetween(0, 65535),
+ targetFieldName,
+ ignoreMissing
+ );
+ if (ignoreMissing) {
+ processorWithNullType.execute(ingestDocumentWithNullType);
+ assertThat(ingestDocumentWithNullType.hasField(targetFieldName), equalTo(false));
+ } else {
+ assertThrows(
+ "icmp message type cannot be null nor empty",
+ IllegalArgumentException.class,
+ () -> processorWithNullType.execute(ingestDocumentWithNullType)
+ );
+ }
+
+ source = new HashMap<>();
+ source.put("source_ip", "1.1.1.1");
+ source.put("destination_ip", "2.2.2.2");
+ protocolNumber = randomFrom(1, 58);
+ source.put("iana_protocol_number", protocolNumber);
+ int icmpType;
+ if (randomBoolean()) {
+ icmpType = randomIntBetween(256, 1000);
+ } else {
+ icmpType = randomIntBetween(-100, -1);
+ }
+ source.put("icmp_type", icmpType);
+ IngestDocument ingestDocumentWithInvalidICMPType = RandomDocumentPicks.randomIngestDocument(random(), source);
+ Processor processorWithInvalidICMPType = createCommunityIdProcessor(
+ "source_ip",
+ "source_port",
+ "destination_ip",
+ "destination_port",
+ "iana_protocol_number",
+ "protocol",
+ "icmp_type",
+ null,
+ randomIntBetween(0, 65535),
+ targetFieldName,
+ false
+ );
+ assertThrows(
+ "invalid icmp message type [" + icmpType + "]",
+ IllegalArgumentException.class,
+ () -> processorWithInvalidICMPType.execute(ingestDocumentWithInvalidICMPType)
+ );
+
+ source = new HashMap<>();
+ source.put("source_ip", "1.1.1.1");
+ source.put("destination_ip", "2.2.2.2");
+ protocolNumber = randomFrom(1, 58);
+ source.put("iana_protocol_number", protocolNumber);
+ if (protocolNumber == 1) {
+ icmpType = randomIntBetween(3, 6);
+ } else {
+ icmpType = randomIntBetween(146, 161);
+ }
+ source.put("icmp_type", icmpType);
+ IngestDocument ingestDocumentWithNoCode = RandomDocumentPicks.randomIngestDocument(random(), source);
+ ignoreMissing = randomBoolean();
+ Processor processorWithNoCode = createCommunityIdProcessor(
+ "source_ip",
+ null,
+ "destination_ip",
+ null,
+ "iana_protocol_number",
+ "protocol",
+ "icmp_type",
+ null,
+ randomIntBetween(0, 65535),
+ targetFieldName,
+ ignoreMissing
+ );
+ if (ignoreMissing) {
+ processorWithNoCode.execute(ingestDocumentWithNoCode);
+ assertThat(ingestDocumentWithNoCode.hasField(targetFieldName), equalTo(false));
+ } else {
+ assertThrows(
+ "icmp message code field path cannot be null nor empty",
+ IllegalArgumentException.class,
+ () -> processorWithNoCode.execute(ingestDocumentWithNoCode)
+ );
+ }
+
+ source = new HashMap<>();
+ source.put("source_ip", "1.1.1.1");
+ source.put("destination_ip", "2.2.2.2");
+ protocolNumber = randomFrom(1, 58);
+ source.put("iana_protocol_number", protocolNumber);
+ if (protocolNumber == 1) {
+ icmpType = randomIntBetween(3, 6);
+ } else {
+ icmpType = randomIntBetween(146, 161);
+ }
+ source.put("icmp_type", icmpType);
+ source.put("icmp_code", null);
+ IngestDocument ingestDocumentWithNullCode = RandomDocumentPicks.randomIngestDocument(random(), source);
+ ignoreMissing = randomBoolean();
+ Processor processorWithNullCode = createCommunityIdProcessor(
+ "source_ip",
+ null,
+ "destination_ip",
+ null,
+ "iana_protocol_number",
+ "protocol",
+ "icmp_type",
+ "icmp_code",
+ randomIntBetween(0, 65535),
+ targetFieldName,
+ ignoreMissing
+ );
+ if (ignoreMissing) {
+ processorWithNullCode.execute(ingestDocumentWithNullCode);
+ assertThat(ingestDocumentWithNullCode.hasField(targetFieldName), equalTo(false));
+ } else {
+ assertThrows(
+ "icmp message code cannot be null nor empty",
+ IllegalArgumentException.class,
+ () -> processorWithNullCode.execute(ingestDocumentWithNullCode)
+ );
+ }
+
+ source = new HashMap<>();
+ source.put("source_ip", "1.1.1.1");
+ source.put("destination_ip", "2.2.2.2");
+ protocolNumber = randomFrom(1, 58);
+ source.put("iana_protocol_number", protocolNumber);
+ if (protocolNumber == 1) {
+ icmpType = randomIntBetween(3, 6);
+ } else {
+ icmpType = randomIntBetween(146, 161);
+ }
+ source.put("icmp_type", icmpType);
+ int icmpCode;
+ if (randomBoolean()) {
+ icmpCode = randomIntBetween(256, 1000);
+ } else {
+ icmpCode = randomIntBetween(-100, -1);
+ }
+ source.put("icmp_code", icmpCode);
+ IngestDocument ingestDocumentWithInvalidCode = RandomDocumentPicks.randomIngestDocument(random(), source);
+ Processor processorWithInvalidCode = createCommunityIdProcessor(
+ "source_ip",
+ null,
+ "destination_ip",
+ null,
+ "iana_protocol_number",
+ null,
+ "icmp_type",
+ "icmp_code",
+ randomIntBetween(0, 65535),
+ targetFieldName,
+ randomBoolean()
+ );
+ assertThrows(
+ "invalid icmp message code [" + icmpCode + "]",
+ IllegalArgumentException.class,
+ () -> processorWithInvalidCode.execute(ingestDocumentWithInvalidCode)
+ );
+ }
+
+ public void testTransportProtocols() throws Exception {
+ Map source = new HashMap<>();
+ source.put("source_ip", "1.1.1.1");
+ source.put("destination_ip", "2.2.2.2");
+ source.put("source_port", 1000);
+ source.put("destination_port", 2000);
+ boolean isProtocolNameSpecified = randomBoolean();
+ if (isProtocolNameSpecified) {
+ source.put("protocol", randomFrom("tcp", "udp", "sctp"));
+ } else {
+ source.put("iana_number", randomFrom(6, 17, 132));
+ }
+
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), source);
+
+ String targetFieldName = randomAlphaOfLength(100);
+ Processor processor;
+ if (isProtocolNameSpecified) {
+ processor = createCommunityIdProcessor(
+ "source_ip",
+ "source_port",
+ "destination_ip",
+ "destination_port",
+ null,
+ "protocol",
+ null,
+ null,
+ randomIntBetween(0, 65535),
+ targetFieldName,
+ randomBoolean()
+ );
+ } else {
+ processor = createCommunityIdProcessor(
+ "source_ip",
+ "source_port",
+ "destination_ip",
+ "destination_port",
+ "iana_number",
+ null,
+ null,
+ null,
+ randomIntBetween(0, 65535),
+ targetFieldName,
+ randomBoolean()
+ );
+ }
+
+ processor.execute(ingestDocument);
+ assertThat(ingestDocument.hasField(targetFieldName), equalTo(true));
+ String communityIDHash = ingestDocument.getFieldValue(targetFieldName, String.class);
+ assertThat(communityIDHash.startsWith("1:"), equalTo(true));
+ }
+
+ public void testICMP() throws Exception {
+ Map source = new HashMap<>();
+ source.put("source_ip", "1.1.1.1");
+ source.put("destination_ip", "2.2.2.2");
+ boolean isICMP = randomBoolean();
+ if (isICMP) {
+ source.put("protocol", "icmp");
+ source.put("type", randomFrom(0, 8, 9, 10, 13, 15, 17, 18));
+ } else {
+ source.put("protocol", "ipv6-icmp");
+ source.put("type", randomFrom(128, 129, 130, 131, 133, 134, 135, 136, 139, 140, 144, 145));
+ }
+
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), source);
+
+ String targetFieldName = randomAlphaOfLength(100);
+ Processor processor = createCommunityIdProcessor(
+ "source_ip",
+ null,
+ "destination_ip",
+ null,
+ null,
+ "protocol",
+ "type",
+ null,
+ randomIntBetween(0, 65535),
+ targetFieldName,
+ randomBoolean()
+ );
+
+ processor.execute(ingestDocument);
+ assertThat(ingestDocument.hasField(targetFieldName), equalTo(true));
+ assertThat(ingestDocument.getFieldValue(targetFieldName, String.class).startsWith("1:"), equalTo(true));
+
+ source = new HashMap<>();
+ source.put("source_ip", "1.1.1.1");
+ source.put("destination_ip", "2.2.2.2");
+ isICMP = randomBoolean();
+ if (isICMP) {
+ source.put("protocol", "icmp");
+ // see https://www.iana.org/assignments/icmp-parameters/icmp-parameters.xhtml#icmp-parameters-codes-5
+ source.put("type", randomIntBetween(3, 6));
+ source.put("code", 0);
+ } else {
+ source.put("protocol", "ipv6-icmp");
+ // see https://www.iana.org/assignments/icmpv6-parameters/icmpv6-parameters.xhtml#icmpv6-parameters-codes-23
+ source.put("type", randomIntBetween(146, 161));
+ source.put("code", 0);
+ }
+
+ IngestDocument ingestDocumentWithOnewayFlow = RandomDocumentPicks.randomIngestDocument(random(), source);
+
+ targetFieldName = randomAlphaOfLength(100);
+ Processor processorWithOnewayFlow = createCommunityIdProcessor(
+ "source_ip",
+ null,
+ "destination_ip",
+ null,
+ null,
+ "protocol",
+ "type",
+ "code",
+ randomIntBetween(0, 65535),
+ targetFieldName,
+ randomBoolean()
+ );
+
+ processorWithOnewayFlow.execute(ingestDocumentWithOnewayFlow);
+ assertThat(ingestDocumentWithOnewayFlow.hasField(targetFieldName), equalTo(true));
+ assertThat(ingestDocumentWithOnewayFlow.getFieldValue(targetFieldName, String.class).startsWith("1:"), equalTo(true));
+ }
+
+ // test that the hash result is consistent with the known value
+ public void testHashResult() throws Exception {
+ int index = randomIntBetween(0, CommunityIdHashInstance.values().length - 1);
+ CommunityIdHashInstance instance = CommunityIdHashInstance.values()[index];
+ final boolean isTransportProtocol = instance.name().equals("TCP")
+ || instance.name().equals("UDP")
+ || instance.name().equals("SCTP");
+ Map source = new HashMap<>();
+ source.put("source_ip", instance.getSourceIp());
+ source.put("destination_ip", instance.getDestIP());
+ if (isTransportProtocol) {
+ source.put("source_port", instance.getSourcePort());
+ source.put("destination_port", instance.getDestPort());
+ source.put("iana_number", instance.getProtocolNumber());
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), source);
+
+ String targetFieldName = randomAlphaOfLength(100);
+ boolean ignore_missing = randomBoolean();
+ Processor processor = createCommunityIdProcessor(
+ "source_ip",
+ "source_port",
+ "destination_ip",
+ "destination_port",
+ "iana_number",
+ null,
+ null,
+ null,
+ 0,
+ targetFieldName,
+ ignore_missing
+ );
+
+ processor.execute(ingestDocument);
+ assertThat(ingestDocument.hasField(targetFieldName), equalTo(true));
+ assertThat(ingestDocument.getFieldValue(targetFieldName, String.class), equalTo(instance.getHash()));
+
+ // test the flow tuple in reversed direction, the hash result should be the same value
+ source = new HashMap<>();
+ source.put("source_ip", instance.getDestIP());
+ source.put("destination_ip", instance.getSourceIp());
+ source.put("source_port", instance.getDestPort());
+ source.put("destination_port", instance.getSourcePort());
+ source.put("iana_number", instance.getProtocolNumber());
+ IngestDocument ingestDocumentWithReversedDirection = RandomDocumentPicks.randomIngestDocument(random(), source);
+
+ targetFieldName = randomAlphaOfLength(100);
+ Processor processorWithReversedDirection = createCommunityIdProcessor(
+ "source_ip",
+ "source_port",
+ "destination_ip",
+ "destination_port",
+ "iana_number",
+ null,
+ null,
+ null,
+ 0,
+ targetFieldName,
+ randomBoolean()
+ );
+
+ processorWithReversedDirection.execute(ingestDocumentWithReversedDirection);
+ assertThat(ingestDocumentWithReversedDirection.hasField(targetFieldName), equalTo(true));
+ assertThat(ingestDocumentWithReversedDirection.getFieldValue(targetFieldName, String.class), equalTo(instance.getHash()));
+ } else {
+ source.put("type", instance.getSourcePort());
+ source.put("code", instance.getDestPort());
+ source.put("iana_number", instance.getProtocolNumber());
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), source);
+
+ String targetFieldName = randomAlphaOfLength(100);
+ boolean ignore_missing = randomBoolean();
+ Processor processor = createCommunityIdProcessor(
+ "source_ip",
+ null,
+ "destination_ip",
+ null,
+ "iana_number",
+ null,
+ "type",
+ "code",
+ 0,
+ targetFieldName,
+ ignore_missing
+ );
+
+ processor.execute(ingestDocument);
+ assertThat(ingestDocument.hasField(targetFieldName), equalTo(true));
+ assertThat(ingestDocument.getFieldValue(targetFieldName, String.class), equalTo(instance.getHash()));
+ }
+ }
+
+ private enum CommunityIdHashInstance {
+ TCP("66.35.250.204", "128.232.110.120", 6, 80, 34855, "1:LQU9qZlK+B5F3KDmev6m5PMibrg="),
+ UDP("8.8.8.8", "192.168.1.52", 17, 53, 54585, "1:d/FP5EW3wiY1vCndhwleRRKHowQ="),
+ SCTP("192.168.170.8", "192.168.170.56", 132, 7, 7, "1:MP2EtRCAUIZvTw6MxJHLV7N7JDs="),
+ ICMP("192.168.0.89", "192.168.0.1", 1, 8, 0, "1:X0snYXpgwiv9TZtqg64sgzUn6Dk="),
+ ICMP_V6("fe80::260:97ff:fe07:69ea", "ff02::1", 58, 134, 0, "1:pkvHqCL88/tg1k4cPigmZXUtL00=");
+
+ private final String sourceIp;
+ private final String destIP;
+ private final int protocolNumber;
+ private final int sourcePort;
+ private final int destPort;
+ private final String hash;
+
+ CommunityIdHashInstance(String sourceIp, String destIP, int protocolNumber, int sourcePort, int destPort, String hash) {
+ this.sourceIp = sourceIp;
+ this.destIP = destIP;
+ this.protocolNumber = protocolNumber;
+ this.sourcePort = sourcePort;
+ this.destPort = destPort;
+ this.hash = hash;
+ }
+
+ private String getSourceIp() {
+ return this.sourceIp;
+ }
+
+ private String getDestIP() {
+ return this.destIP;
+ }
+
+ private int getProtocolNumber() {
+ return this.protocolNumber;
+ }
+
+ private int getSourcePort() {
+ return this.sourcePort;
+ }
+
+ private int getDestPort() {
+ return this.destPort;
+ }
+
+ private String getHash() {
+ return this.hash;
+ }
+ }
+
+ private static Processor createCommunityIdProcessor(
+ String sourceIPField,
+ String sourcePortField,
+ String destinationIPField,
+ String destinationPortField,
+ String ianaProtocolNumberField,
+ String protocolField,
+ String icmpTypeField,
+ String icmpCodeField,
+ int seed,
+ String targetField,
+ boolean ignoreMissing
+ ) {
+ return new CommunityIdProcessor(
+ randomAlphaOfLength(10),
+ null,
+ sourceIPField,
+ sourcePortField,
+ destinationIPField,
+ destinationPortField,
+ ianaProtocolNumberField,
+ protocolField,
+ icmpTypeField,
+ icmpCodeField,
+ seed,
+ targetField,
+ ignoreMissing
+ );
+ }
+}
diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RemoveByPatternProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RemoveByPatternProcessorFactoryTests.java
new file mode 100644
index 0000000000000..09ba97ebb4595
--- /dev/null
+++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RemoveByPatternProcessorFactoryTests.java
@@ -0,0 +1,114 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.ingest.common;
+
+import org.opensearch.OpenSearchException;
+import org.opensearch.OpenSearchParseException;
+import org.opensearch.test.OpenSearchTestCase;
+import org.junit.Before;
+
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import static org.hamcrest.CoreMatchers.equalTo;
+
+public class RemoveByPatternProcessorFactoryTests extends OpenSearchTestCase {
+
+ private RemoveByPatternProcessor.Factory factory;
+
+ @Before
+ public void init() {
+ factory = new RemoveByPatternProcessor.Factory();
+ }
+
+ public void testCreateFieldPatterns() throws Exception {
+ Map config = new HashMap<>();
+ config.put("field_pattern", "field1*");
+ String processorTag = randomAlphaOfLength(10);
+ RemoveByPatternProcessor removeByPatternProcessor = factory.create(null, processorTag, null, config);
+ assertThat(removeByPatternProcessor.getTag(), equalTo(processorTag));
+ assertThat(removeByPatternProcessor.getFieldPatterns().get(0), equalTo("field1*"));
+
+ Map config2 = new HashMap<>();
+ config2.put("field_pattern", List.of("field1*", "field2*"));
+ removeByPatternProcessor = factory.create(null, processorTag, null, config2);
+ assertThat(removeByPatternProcessor.getTag(), equalTo(processorTag));
+ assertThat(removeByPatternProcessor.getFieldPatterns().get(0), equalTo("field1*"));
+ assertThat(removeByPatternProcessor.getFieldPatterns().get(1), equalTo("field2*"));
+
+ Map config3 = new HashMap<>();
+ List patterns = Arrays.asList("foo*", "*", " ", ",", "#", ":", "_");
+ config3.put("field_pattern", patterns);
+ Exception exception = expectThrows(OpenSearchParseException.class, () -> factory.create(null, processorTag, null, config3));
+ assertThat(
+ exception.getMessage(),
+ equalTo(
+ "[field_pattern] Validation Failed: "
+ + "1: field_pattern [ ] must not contain the following characters [ , \", *, \\, <, |, ,, >, /, ?];"
+ + "2: field_pattern [,] must not contain the following characters [ , \", *, \\, <, |, ,, >, /, ?];"
+ + "3: field_pattern [#] must not contain a '#';"
+ + "4: field_pattern [:] must not contain a ':';"
+ + "5: field_pattern [_] must not start with '_';"
+ )
+ );
+ }
+
+ public void testCreateExcludeFieldPatterns() throws Exception {
+ Map config = new HashMap<>();
+ config.put("exclude_field_pattern", "field1*");
+ String processorTag = randomAlphaOfLength(10);
+ RemoveByPatternProcessor removeByPatternProcessor = factory.create(null, processorTag, null, config);
+ assertThat(removeByPatternProcessor.getTag(), equalTo(processorTag));
+ assertThat(removeByPatternProcessor.getExcludeFieldPatterns().get(0), equalTo("field1*"));
+
+ Map config2 = new HashMap<>();
+ config2.put("exclude_field_pattern", List.of("field1*", "field2*"));
+ removeByPatternProcessor = factory.create(null, processorTag, null, config2);
+ assertThat(removeByPatternProcessor.getTag(), equalTo(processorTag));
+ assertThat(removeByPatternProcessor.getExcludeFieldPatterns().get(0), equalTo("field1*"));
+ assertThat(removeByPatternProcessor.getExcludeFieldPatterns().get(1), equalTo("field2*"));
+
+ Map config3 = new HashMap<>();
+ List patterns = Arrays.asList("foo*", "*", " ", ",", "#", ":", "_");
+ config3.put("exclude_field_pattern", patterns);
+ Exception exception = expectThrows(OpenSearchParseException.class, () -> factory.create(null, processorTag, null, config3));
+ assertThat(
+ exception.getMessage(),
+ equalTo(
+ "[exclude_field_pattern] Validation Failed: "
+ + "1: exclude_field_pattern [ ] must not contain the following characters [ , \", *, \\, <, |, ,, >, /, ?];"
+ + "2: exclude_field_pattern [,] must not contain the following characters [ , \", *, \\, <, |, ,, >, /, ?];"
+ + "3: exclude_field_pattern [#] must not contain a '#';"
+ + "4: exclude_field_pattern [:] must not contain a ':';"
+ + "5: exclude_field_pattern [_] must not start with '_';"
+ )
+ );
+ }
+
+ public void testCreatePatternsFailed() throws Exception {
+ Map config = new HashMap<>();
+ config.put("field_pattern", List.of("foo*"));
+ config.put("exclude_field_pattern", List.of("bar*"));
+ String processorTag = randomAlphaOfLength(10);
+ OpenSearchException exception = expectThrows(
+ OpenSearchParseException.class,
+ () -> factory.create(null, processorTag, null, config)
+ );
+ assertThat(exception.getMessage(), equalTo("[field_pattern] either field_pattern or exclude_field_pattern must be set"));
+
+ Map config2 = new HashMap<>();
+ config2.put("field_pattern", null);
+ config2.put("exclude_field_pattern", null);
+
+ exception = expectThrows(OpenSearchParseException.class, () -> factory.create(null, processorTag, null, config2));
+ assertThat(exception.getMessage(), equalTo("[field_pattern] either field_pattern or exclude_field_pattern must be set"));
+ }
+}
diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RemoveByPatternProcessorTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RemoveByPatternProcessorTests.java
new file mode 100644
index 0000000000000..82ff93de1f44e
--- /dev/null
+++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RemoveByPatternProcessorTests.java
@@ -0,0 +1,96 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.ingest.common;
+
+import org.opensearch.ingest.IngestDocument;
+import org.opensearch.ingest.Processor;
+import org.opensearch.ingest.RandomDocumentPicks;
+import org.opensearch.test.OpenSearchTestCase;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import static org.hamcrest.Matchers.equalTo;
+
+public class RemoveByPatternProcessorTests extends OpenSearchTestCase {
+
+ public void testRemoveWithFieldPatterns() throws Exception {
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
+ ingestDocument.setFieldValue("foo_1", "value");
+ ingestDocument.setFieldValue("foo_2", "value");
+ ingestDocument.setFieldValue("bar_1", "value");
+ ingestDocument.setFieldValue("bar_2", "value");
+ List fieldPatterns = new ArrayList<>();
+ fieldPatterns.add("foo*");
+ fieldPatterns.add("_index*");
+ fieldPatterns.add("_id*");
+ fieldPatterns.add("_version*");
+ Processor processor = new RemoveByPatternProcessor(randomAlphaOfLength(10), null, fieldPatterns, null);
+ processor.execute(ingestDocument);
+ assertThat(ingestDocument.hasField("foo_1"), equalTo(false));
+ assertThat(ingestDocument.hasField("foo_2"), equalTo(false));
+ assertThat(ingestDocument.hasField("bar_1"), equalTo(true));
+ assertThat(ingestDocument.hasField("bar_2"), equalTo(true));
+ assertThat(ingestDocument.hasField(IngestDocument.Metadata.INDEX.getFieldName()), equalTo(true));
+ assertThat(ingestDocument.hasField(IngestDocument.Metadata.ID.getFieldName()), equalTo(true));
+ assertThat(ingestDocument.hasField(IngestDocument.Metadata.VERSION.getFieldName()), equalTo(true));
+ assertThat(ingestDocument.hasField(IngestDocument.Metadata.VERSION_TYPE.getFieldName()), equalTo(true));
+ }
+
+ public void testRemoveWithExcludeFieldPatterns() throws Exception {
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
+ ingestDocument.setFieldValue("foo_1", "value");
+ ingestDocument.setFieldValue("foo_2", "value");
+ ingestDocument.setFieldValue("foo_3", "value");
+ List excludeFieldPatterns = new ArrayList<>();
+ excludeFieldPatterns.add("foo_3*");
+ Processor processorWithExcludeFieldsAndPatterns = new RemoveByPatternProcessor(
+ randomAlphaOfLength(10),
+ null,
+ null,
+ excludeFieldPatterns
+ );
+ processorWithExcludeFieldsAndPatterns.execute(ingestDocument);
+ assertThat(ingestDocument.hasField("foo_1"), equalTo(false));
+ assertThat(ingestDocument.hasField("foo_2"), equalTo(false));
+ assertThat(ingestDocument.hasField("foo_3"), equalTo(true));
+ assertThat(ingestDocument.hasField(IngestDocument.Metadata.INDEX.getFieldName()), equalTo(true));
+ assertThat(ingestDocument.hasField(IngestDocument.Metadata.ID.getFieldName()), equalTo(true));
+ assertThat(ingestDocument.hasField(IngestDocument.Metadata.VERSION.getFieldName()), equalTo(true));
+ assertThat(ingestDocument.hasField(IngestDocument.Metadata.VERSION_TYPE.getFieldName()), equalTo(true));
+ }
+
+ public void testCreateRemoveByPatternProcessorWithBothFieldsAndExcludeFields() throws Exception {
+ assertThrows(
+ "either fieldPatterns and excludeFieldPatterns must be set",
+ IllegalArgumentException.class,
+ () -> new RemoveByPatternProcessor(randomAlphaOfLength(10), null, null, null)
+ );
+
+ final List fieldPatterns;
+ if (randomBoolean()) {
+ fieldPatterns = new ArrayList<>();
+ } else {
+ fieldPatterns = List.of("foo_1*");
+ }
+
+ final List excludeFieldPatterns;
+ if (randomBoolean()) {
+ excludeFieldPatterns = new ArrayList<>();
+ } else {
+ excludeFieldPatterns = List.of("foo_2*");
+ }
+
+ assertThrows(
+ "either fieldPatterns and excludeFieldPatterns must be set",
+ IllegalArgumentException.class,
+ () -> new RemoveByPatternProcessor(randomAlphaOfLength(10), null, fieldPatterns, excludeFieldPatterns)
+ );
+ }
+}
diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/10_basic.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/10_basic.yml
index 0719082c887f2..2a816f0386667 100644
--- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/10_basic.yml
+++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/10_basic.yml
@@ -53,3 +53,36 @@
nodes.info: {}
- contains: { nodes.$cluster_manager.ingest.processors: { type: copy } }
+
+---
+"Remove_by_pattern processor exists":
+ - skip:
+ version: " - 2.11.99"
+ features: contains
+ reason: "remove_by_pattern processor was introduced in 2.12.0 and contains is a newly added assertion"
+ - do:
+ cluster.state: {}
+
+ # Get cluster-manager node id
+ - set: { cluster_manager_node: cluster_manager }
+
+ - do:
+ nodes.info: {}
+
+ - contains: { nodes.$cluster_manager.ingest.processors: { type: remove_by_pattern } }
+
+---
+"Community_id processor exists":
+ - skip:
+ version: " - 2.12.99"
+ features: contains
+ reason: "community_id processor was introduced in 2.13.0 and contains is a newly added assertion"
+ - do:
+ cluster.state: {}
+
+ # Get cluster-manager node id
+ - set: { cluster_manager_node: cluster_manager }
+
+ - do:
+ nodes.info: {}
+ - contains: { nodes.$cluster_manager.ingest.processors: { type: community_id } }
diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/310_remove_by_pattern_processor.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/310_remove_by_pattern_processor.yml
new file mode 100644
index 0000000000000..397eb8f7b6033
--- /dev/null
+++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/310_remove_by_pattern_processor.yml
@@ -0,0 +1,146 @@
+---
+teardown:
+ - do:
+ ingest.delete_pipeline:
+ id: "my_pipeline"
+ ignore: 404
+
+---
+"Test creating remove_by_pattern processor failed":
+ - skip:
+ version: " - 2.11.99"
+ reason: "introduced in 2.12.0"
+ - do:
+ catch: /\[field\_pattern\] either field\_pattern or exclude\_field\_pattern must be set/
+ ingest.put_pipeline:
+ id: "my_pipeline"
+ body: >
+ {
+ "description": "_description",
+ "processors": [
+ {
+ "remove_by_pattern" : {
+ "field_pattern" : "foo*",
+ "exclude_field_pattern" : "bar*"
+ }
+ }
+ ]
+ }
+
+ - do:
+ catch: /\[field\_pattern\] either field\_pattern or exclude\_field\_pattern must be set/
+ ingest.put_pipeline:
+ id: "my_pipeline"
+ body: >
+ {
+ "description": "_description",
+ "processors": [
+ {
+ "remove_by_pattern" : {
+ }
+ }
+ ]
+ }
+
+---
+"Test remove_by_pattern processor with field_pattern":
+ - skip:
+ version: " - 2.11.99"
+ reason: "introduced in 2.12.0"
+ - do:
+ ingest.put_pipeline:
+ id: "my_pipeline"
+ body: >
+ {
+ "description": "_description",
+ "processors": [
+ {
+ "remove_by_pattern" : {
+ "field_pattern" : ["foo*", "*a*b"]
+ }
+ }
+ ]
+ }
+ - match: { acknowledged: true }
+
+ - do:
+ index:
+ index: test
+ id: 1
+ pipeline: "my_pipeline"
+ body: {
+ foo1: "bar",
+ foo2: "bar",
+ zoo: "bar",
+ ab: "bar",
+ aabb: "bar"
+ }
+
+ - do:
+ get:
+ index: test
+ id: 1
+ - match: { _source: {zoo: "bar" }}
+
+---
+"Test remove_by_pattern processor with exclude_field_pattern":
+ - skip:
+ version: " - 2.11.99"
+ reason: "introduced in 2.12.0"
+ - do:
+ ingest.put_pipeline:
+ id: "my_pipeline"
+ body: >
+ {
+ "description": "_description",
+ "processors": [
+ {
+ "remove_by_pattern" : {
+ "exclude_field_pattern": ["foo*", "a*b*"]
+ }
+ }
+ ]
+ }
+ - match: { acknowledged: true }
+
+ - do:
+ index:
+ index: test
+ id: 1
+ pipeline: "my_pipeline"
+ body: {
+ foo1: "bar",
+ foo2: "bar",
+ bar: "zoo",
+ zoo: "bar",
+ ab: "bar",
+ aabb: "bar"
+ }
+
+ - do:
+ get:
+ index: test
+ id: 1
+ - match: { _source: { foo1: "bar", foo2: "bar", ab: "bar", aabb: "bar"}}
+
+
+---
+"Test cannot remove metadata fields by remove_by_pattern processor":
+ - skip:
+ version: " - 2.11.99"
+ reason: "introduced in 2.12.0"
+ - do:
+ catch: /field\_pattern \[\_id\] must not start with \'\_\'\;/
+ ingest.put_pipeline:
+ id: "my_pipeline"
+ body: >
+ {
+ "description": "_description",
+ "processors": [
+ {
+ "remove_by_pattern" : {
+ "field_pattern": "_id"
+ }
+ }
+ ]
+ }
diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/320_community_id_processor.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/320_community_id_processor.yml
new file mode 100644
index 0000000000000..6de5371bb49f7
--- /dev/null
+++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/320_community_id_processor.yml
@@ -0,0 +1,370 @@
+---
+teardown:
+ - do:
+ ingest.delete_pipeline:
+ id: "1"
+ ignore: 404
+
+---
+"Test creat community_id processor":
+ - skip:
+ version: " - 2.12.99"
+ reason: "introduced in 2.13"
+ - do:
+ catch: /\[source\_ip\_field\] required property is missing/
+ ingest.put_pipeline:
+ id: "1"
+ body: >
+ {
+ "processors": [
+ {
+ "community_id" : {
+ "destination_ip_field" : "dest"
+ }
+ }
+ ]
+ }
+ - do:
+ catch: /\[destination\_ip\_field\] required property is missing/
+ ingest.put_pipeline:
+ id: "1"
+ body: >
+ {
+ "processors": [
+ {
+ "community_id" : {
+ "source_ip_field" : "src"
+ }
+ }
+ ]
+ }
+ - do:
+ ingest.put_pipeline:
+ id: "1"
+ body: >
+ {
+ "processors": [
+ {
+ "community_id" : {
+ "source_ip_field" : "source",
+ "destination_ip_field" : "dest",
+ "source_port_field" : "srcPort",
+ "destination_port_field" : "destPort",
+ "iana_protocol_number_field" : "iana_number",
+ "protocol_field" : "protocol",
+ "icmp_type_field" : "icmp",
+ "icmp_code_field" : "code",
+ "seed" : 0,
+ "target_field" : "community_id",
+ "ignore_missing" : false
+ }
+ }
+ ]
+ }
+ - match: { acknowledged: true }
+
+---
+"Test community_id processor with ignore_missing":
+ - skip:
+ version: " - 2.12.99"
+ reason: "introduced in 2.13"
+ - do:
+ ingest.put_pipeline:
+ id: "1"
+ body: >
+ {
+ "processors": [
+ {
+ "community_id" : {
+ "source_ip_field" : "source",
+ "destination_ip_field" : "dest",
+ "source_port_field" : "srcPort",
+ "destination_port_field" : "destPort",
+ "protocol_field" : "protocol"
+ }
+ }
+ ]
+ }
+ - match: { acknowledged: true }
+
+ - do:
+ catch: /ip address in the field \[source\] is null or empty/
+ index:
+ index: test
+ id: 1
+ pipeline: "1"
+ body: {
+ dest: "1.1.1.1",
+ protocol: "tcp"
+ }
+
+ - do:
+ catch: /ip address in the field \[dest\] is null or empty/
+ index:
+ index: test
+ id: 1
+ pipeline: "1"
+ body: {
+ source: "1.1.1.1",
+ protocol: "tcp"
+ }
+
+ - do:
+ ingest.put_pipeline:
+ id: "1"
+ body: >
+ {
+ "processors": [
+ {
+ "community_id" : {
+ "source_ip_field" : "source",
+ "destination_ip_field" : "dest",
+ "source_port_field" : "srcPort",
+ "destination_port_field" : "destPort",
+ "protocol_field" : "protocol",
+ "ignore_missing" : true
+ }
+ }
+ ]
+ }
+ - match: { acknowledged: true }
+
+ - do:
+ index:
+ index: test
+ id: 1
+ pipeline: "1"
+ body: {
+ source: "1.1.1.1",
+ protocol: "tcp"
+ }
+ - do:
+ get:
+ index: test
+ id: 1
+ - match: { _source: { source: "1.1.1.1", protocol: "tcp" } }
+
+ - do:
+ index:
+ index: test
+ id: 1
+ pipeline: "1"
+ body: {
+ dest: "2.2.2.2",
+ protocol: "tcp"
+ }
+ - do:
+ get:
+ index: test
+ id: 1
+ - match: { _source: { dest: "2.2.2.2", protocol: "tcp" } }
+
+---
+"Test community_id processor for tcp":
+ - skip:
+ version: " - 2.12.99"
+ reason: "introduced in 2.13"
+ - do:
+ ingest.put_pipeline:
+ id: "1"
+ body: >
+ {
+ "processors": [
+ {
+ "community_id" : {
+ "source_ip_field" : "source",
+ "destination_ip_field" : "dest",
+ "source_port_field" : "srcPort",
+ "destination_port_field" : "destPort",
+ "protocol_field" : "protocol"
+ }
+ }
+ ]
+ }
+ - match: { acknowledged: true }
+
+ - do:
+ index:
+ index: test
+ id: 1
+ pipeline: "1"
+ body: {
+ source: "66.35.250.204",
+ dest: "128.232.110.120",
+ protocol: "tcp",
+ srcPort: 80,
+ destPort: 34855
+ }
+ - do:
+ get:
+ index: test
+ id: 1
+ - match: { _source.community_id: "1:LQU9qZlK+B5F3KDmev6m5PMibrg=" }
+
+---
+"Test community_id processor for udp":
+ - skip:
+ version: " - 2.12.99"
+ reason: "introduced in 2.13"
+ - do:
+ ingest.put_pipeline:
+ id: "1"
+ body: >
+ {
+ "processors": [
+ {
+ "community_id" : {
+ "source_ip_field" : "source",
+ "destination_ip_field" : "dest",
+ "source_port_field" : "srcPort",
+ "destination_port_field" : "destPort",
+ "protocol_field" : "protocol"
+ }
+ }
+ ]
+ }
+ - match: { acknowledged: true }
+
+ - do:
+ index:
+ index: test
+ id: 1
+ pipeline: "1"
+ body: {
+ source: "8.8.8.8",
+ dest: "192.168.1.52",
+ protocol: "udp",
+ srcPort: 53,
+ destPort: 54585
+ }
+ - do:
+ get:
+ index: test
+ id: 1
+ - match: { _source.community_id: "1:d/FP5EW3wiY1vCndhwleRRKHowQ=" }
+
+---
+"Test community_id processor for sctp":
+ - skip:
+ version: " - 2.12.99"
+ reason: "introduced in 2.13"
+ - do:
+ ingest.put_pipeline:
+ id: "1"
+ body: >
+ {
+ "processors": [
+ {
+ "community_id" : {
+ "source_ip_field" : "source",
+ "destination_ip_field" : "dest",
+ "source_port_field" : "srcPort",
+ "destination_port_field" : "destPort",
+ "protocol_field" : "protocol"
+ }
+ }
+ ]
+ }
+ - match: { acknowledged: true }
+
+ - do:
+ index:
+ index: test
+ id: 1
+ pipeline: "1"
+ body: {
+ source: "192.168.170.8",
+ dest: "192.168.170.56",
+ protocol: "sctp",
+ srcPort: 7,
+ destPort: 7
+ }
+ - do:
+ get:
+ index: test
+ id: 1
+ - match: { _source.community_id: "1:MP2EtRCAUIZvTw6MxJHLV7N7JDs=" }
+
+---
+"Test community_id processor for icmp":
+ - skip:
+ version: " - 2.12.99"
+ reason: "introduced in 2.13"
+ - do:
+ ingest.put_pipeline:
+ id: "1"
+ body: >
+ {
+ "processors": [
+ {
+ "community_id" : {
+ "source_ip_field" : "source",
+ "destination_ip_field" : "dest",
+ "icmp_type_field" : "type",
+ "icmp_code_field" : "code",
+ "protocol_field" : "protocol"
+ }
+ }
+ ]
+ }
+ - match: { acknowledged: true }
+
+ - do:
+ index:
+ index: test
+ id: 1
+ pipeline: "1"
+ body: {
+ source: "192.168.0.89",
+ dest: "192.168.0.1",
+ protocol: "icmp",
+ type: 8,
+ code: 0
+ }
+ - do:
+ get:
+ index: test
+ id: 1
+ - match: { _source.community_id: "1:X0snYXpgwiv9TZtqg64sgzUn6Dk=" }
+
+---
+"Test community_id processor for icmp-v6":
+ - skip:
+ version: " - 2.12.99"
+ reason: "introduced in 2.13"
+ - do:
+ ingest.put_pipeline:
+ id: "1"
+ body: >
+ {
+ "processors": [
+ {
+ "community_id" : {
+ "source_ip_field" : "source",
+ "destination_ip_field" : "dest",
+ "icmp_type_field" : "type",
+ "icmp_code_field" : "code",
+ "protocol_field" : "protocol"
+ }
+ }
+ ]
+ }
+ - match: { acknowledged: true }
+
+ - do:
+ index:
+ index: test
+ id: 1
+ pipeline: "1"
+ body: {
+ source: "fe80::260:97ff:fe07:69ea",
+ dest: "ff02::1",
+ protocol: "ipv6-icmp",
+ type: 134,
+ code: 0
+ }
+ - do:
+ get:
+ index: test
+ id: 1
+ - match: { _source.community_id: "1:pkvHqCL88/tg1k4cPigmZXUtL00=" }
diff --git a/modules/lang-expression/licenses/antlr4-runtime-4.11.1.jar.sha1 b/modules/lang-expression/licenses/antlr4-runtime-4.11.1.jar.sha1
deleted file mode 100644
index f1b328a6de624..0000000000000
--- a/modules/lang-expression/licenses/antlr4-runtime-4.11.1.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-069214c1de1960040729702eb58deac8827135e7
\ No newline at end of file
diff --git a/modules/lang-expression/licenses/antlr4-runtime-4.13.1.jar.sha1 b/modules/lang-expression/licenses/antlr4-runtime-4.13.1.jar.sha1
new file mode 100644
index 0000000000000..e50b9bb646727
--- /dev/null
+++ b/modules/lang-expression/licenses/antlr4-runtime-4.13.1.jar.sha1
@@ -0,0 +1 @@
+17125bae1d965624e265ef49552f6465a2bfa307
\ No newline at end of file
diff --git a/modules/lang-expression/licenses/lucene-expressions-9.11.0-snapshot-8a555eb.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-9.11.0-snapshot-8a555eb.jar.sha1
new file mode 100644
index 0000000000000..82a17e2b79290
--- /dev/null
+++ b/modules/lang-expression/licenses/lucene-expressions-9.11.0-snapshot-8a555eb.jar.sha1
@@ -0,0 +1 @@
+00759eaff8f62b38ba66a05f26ab784c268908d3
\ No newline at end of file
diff --git a/modules/lang-expression/licenses/lucene-expressions-9.9.2.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-9.9.2.jar.sha1
deleted file mode 100644
index e073455415e24..0000000000000
--- a/modules/lang-expression/licenses/lucene-expressions-9.9.2.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-045df3828b6460c032a6551040e31ea432b0aad6
\ No newline at end of file
diff --git a/modules/lang-expression/src/test/java/org/opensearch/script/expression/ExpressionFieldScriptTests.java b/modules/lang-expression/src/test/java/org/opensearch/script/expression/ExpressionFieldScriptTests.java
index 143ff4f5c51bd..d7be890014add 100644
--- a/modules/lang-expression/src/test/java/org/opensearch/script/expression/ExpressionFieldScriptTests.java
+++ b/modules/lang-expression/src/test/java/org/opensearch/script/expression/ExpressionFieldScriptTests.java
@@ -77,7 +77,7 @@ public void setUp() throws Exception {
when(fieldData.load(any())).thenReturn(atomicFieldData);
service = new ExpressionScriptEngine();
- lookup = new SearchLookup(mapperService, (ignored, lookup) -> fieldData);
+ lookup = new SearchLookup(mapperService, (ignored, lookup) -> fieldData, SearchLookup.UNKNOWN_SHARD_ID);
}
private FieldScript.LeafFactory compile(String expression) {
diff --git a/modules/lang-expression/src/test/java/org/opensearch/script/expression/ExpressionNumberSortScriptTests.java b/modules/lang-expression/src/test/java/org/opensearch/script/expression/ExpressionNumberSortScriptTests.java
index 498c0542e9c3e..94a422503d6bd 100644
--- a/modules/lang-expression/src/test/java/org/opensearch/script/expression/ExpressionNumberSortScriptTests.java
+++ b/modules/lang-expression/src/test/java/org/opensearch/script/expression/ExpressionNumberSortScriptTests.java
@@ -77,7 +77,7 @@ public void setUp() throws Exception {
when(fieldData.load(any())).thenReturn(atomicFieldData);
service = new ExpressionScriptEngine();
- lookup = new SearchLookup(mapperService, (ignored, lookup) -> fieldData);
+ lookup = new SearchLookup(mapperService, (ignored, lookup) -> fieldData, SearchLookup.UNKNOWN_SHARD_ID);
}
private NumberSortScript.LeafFactory compile(String expression) {
diff --git a/modules/lang-expression/src/test/java/org/opensearch/script/expression/ExpressionTermsSetQueryTests.java b/modules/lang-expression/src/test/java/org/opensearch/script/expression/ExpressionTermsSetQueryTests.java
index 499f94afcb6af..a1d6df80715be 100644
--- a/modules/lang-expression/src/test/java/org/opensearch/script/expression/ExpressionTermsSetQueryTests.java
+++ b/modules/lang-expression/src/test/java/org/opensearch/script/expression/ExpressionTermsSetQueryTests.java
@@ -77,7 +77,7 @@ public void setUp() throws Exception {
when(fieldData.load(any())).thenReturn(atomicFieldData);
service = new ExpressionScriptEngine();
- lookup = new SearchLookup(mapperService, (ignored, lookup) -> fieldData);
+ lookup = new SearchLookup(mapperService, (ignored, lookup) -> fieldData, SearchLookup.UNKNOWN_SHARD_ID);
}
private TermsSetQueryScript.LeafFactory compile(String expression) {
diff --git a/modules/lang-painless/licenses/antlr4-runtime-4.11.1.jar.sha1 b/modules/lang-painless/licenses/antlr4-runtime-4.11.1.jar.sha1
deleted file mode 100644
index f1b328a6de624..0000000000000
--- a/modules/lang-painless/licenses/antlr4-runtime-4.11.1.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-069214c1de1960040729702eb58deac8827135e7
\ No newline at end of file
diff --git a/modules/lang-painless/licenses/antlr4-runtime-4.13.1.jar.sha1 b/modules/lang-painless/licenses/antlr4-runtime-4.13.1.jar.sha1
new file mode 100644
index 0000000000000..e50b9bb646727
--- /dev/null
+++ b/modules/lang-painless/licenses/antlr4-runtime-4.13.1.jar.sha1
@@ -0,0 +1 @@
+17125bae1d965624e265ef49552f6465a2bfa307
\ No newline at end of file
diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/antlr/PainlessLexer.java b/modules/lang-painless/src/main/java/org/opensearch/painless/antlr/PainlessLexer.java
index 260a2fc0c062c..6e3448e5eea77 100644
--- a/modules/lang-painless/src/main/java/org/opensearch/painless/antlr/PainlessLexer.java
+++ b/modules/lang-painless/src/main/java/org/opensearch/painless/antlr/PainlessLexer.java
@@ -45,10 +45,10 @@
import org.antlr.v4.runtime.atn.PredictionContextCache;
import org.antlr.v4.runtime.dfa.DFA;
-@SuppressWarnings({ "all", "warnings", "unchecked", "unused", "cast", "CheckReturnValue" })
+@SuppressWarnings({ "all", "warnings", "unchecked", "unused", "cast", "CheckReturnValue", "this-escape" })
abstract class PainlessLexer extends Lexer {
static {
- RuntimeMetaData.checkVersion("4.11.1", RuntimeMetaData.VERSION);
+ RuntimeMetaData.checkVersion("4.13.1", RuntimeMetaData.VERSION);
}
protected static final DFA[] _decisionToDFA;
diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/antlr/PainlessParser.java b/modules/lang-painless/src/main/java/org/opensearch/painless/antlr/PainlessParser.java
index 40e76194f50b2..7ad5d113637c8 100644
--- a/modules/lang-painless/src/main/java/org/opensearch/painless/antlr/PainlessParser.java
+++ b/modules/lang-painless/src/main/java/org/opensearch/painless/antlr/PainlessParser.java
@@ -56,7 +56,7 @@
@SuppressWarnings({ "all", "warnings", "unchecked", "unused", "cast", "CheckReturnValue" })
class PainlessParser extends Parser {
static {
- RuntimeMetaData.checkVersion("4.11.1", RuntimeMetaData.VERSION);
+ RuntimeMetaData.checkVersion("4.13.1", RuntimeMetaData.VERSION);
}
protected static final DFA[] _decisionToDFA;
@@ -337,7 +337,7 @@ public Vocabulary getVocabulary() {
@Override
public String getGrammarFileName() {
- return "java-escape";
+ return "PainlessParser.g4";
}
@Override
@@ -425,8 +425,8 @@ public final SourceContext source() throws RecognitionException {
setState(87);
_errHandler.sync(this);
_la = _input.LA(1);
- while (((_la) & ~0x3f) == 0 && ((1L << _la) & 864691155080519840L) != 0
- || (((_la - 72)) & ~0x3f) == 0 && ((1L << (_la - 72)) & 4095L) != 0) {
+ while ((((_la) & ~0x3f) == 0 && ((1L << _la) & 864691155080519840L) != 0)
+ || ((((_la - 72)) & ~0x3f) == 0 && ((1L << (_la - 72)) & 4095L) != 0)) {
{
{
setState(84);
@@ -571,7 +571,7 @@ public final ParametersContext parameters() throws RecognitionException {
setState(109);
_errHandler.sync(this);
_la = _input.LA(1);
- if ((((_la - 81)) & ~0x3f) == 0 && ((1L << (_la - 81)) & 7L) != 0) {
+ if (((((_la - 81)) & ~0x3f) == 0 && ((1L << (_la - 81)) & 7L) != 0)) {
{
setState(98);
decltype();
@@ -1088,8 +1088,8 @@ public final RstatementContext rstatement() throws RecognitionException {
setState(140);
_errHandler.sync(this);
_la = _input.LA(1);
- if (((_la) & ~0x3f) == 0 && ((1L << _la) & 864691155034439840L) != 0
- || (((_la - 72)) & ~0x3f) == 0 && ((1L << (_la - 72)) & 4095L) != 0) {
+ if ((((_la) & ~0x3f) == 0 && ((1L << _la) & 864691155034439840L) != 0)
+ || ((((_la - 72)) & ~0x3f) == 0 && ((1L << (_la - 72)) & 4095L) != 0)) {
{
setState(139);
initializer();
@@ -1101,8 +1101,8 @@ public final RstatementContext rstatement() throws RecognitionException {
setState(144);
_errHandler.sync(this);
_la = _input.LA(1);
- if (((_la) & ~0x3f) == 0 && ((1L << _la) & 864691155034439840L) != 0
- || (((_la - 72)) & ~0x3f) == 0 && ((1L << (_la - 72)) & 2559L) != 0) {
+ if ((((_la) & ~0x3f) == 0 && ((1L << _la) & 864691155034439840L) != 0)
+ || ((((_la - 72)) & ~0x3f) == 0 && ((1L << (_la - 72)) & 2559L) != 0)) {
{
setState(143);
expression();
@@ -1114,8 +1114,8 @@ public final RstatementContext rstatement() throws RecognitionException {
setState(148);
_errHandler.sync(this);
_la = _input.LA(1);
- if (((_la) & ~0x3f) == 0 && ((1L << _la) & 864691155034439840L) != 0
- || (((_la - 72)) & ~0x3f) == 0 && ((1L << (_la - 72)) & 2559L) != 0) {
+ if ((((_la) & ~0x3f) == 0 && ((1L << _la) & 864691155034439840L) != 0)
+ || ((((_la - 72)) & ~0x3f) == 0 && ((1L << (_la - 72)) & 2559L) != 0)) {
{
setState(147);
afterthought();
@@ -1470,8 +1470,8 @@ public final DstatementContext dstatement() throws RecognitionException {
setState(193);
_errHandler.sync(this);
_la = _input.LA(1);
- if (((_la) & ~0x3f) == 0 && ((1L << _la) & 864691155034439840L) != 0
- || (((_la - 72)) & ~0x3f) == 0 && ((1L << (_la - 72)) & 2559L) != 0) {
+ if ((((_la) & ~0x3f) == 0 && ((1L << _la) & 864691155034439840L) != 0)
+ || ((((_la - 72)) & ~0x3f) == 0 && ((1L << (_la - 72)) & 2559L) != 0)) {
{
setState(192);
expression();
@@ -1661,8 +1661,8 @@ public final BlockContext block() throws RecognitionException {
setState(212);
_errHandler.sync(this);
_la = _input.LA(1);
- if (((_la) & ~0x3f) == 0 && ((1L << _la) & 864691155071795360L) != 0
- || (((_la - 72)) & ~0x3f) == 0 && ((1L << (_la - 72)) & 4095L) != 0) {
+ if ((((_la) & ~0x3f) == 0 && ((1L << _la) & 864691155071795360L) != 0)
+ || ((((_la - 72)) & ~0x3f) == 0 && ((1L << (_la - 72)) & 4095L) != 0)) {
{
setState(211);
dstatement();
@@ -2491,7 +2491,7 @@ private NoncondexpressionContext noncondexpression(int _p) throws RecognitionExc
if (!(precpred(_ctx, 13))) throw new FailedPredicateException(this, "precpred(_ctx, 13)");
setState(269);
_la = _input.LA(1);
- if (!(((_la) & ~0x3f) == 0 && ((1L << _la) & 7516192768L) != 0)) {
+ if (!((((_la) & ~0x3f) == 0 && ((1L << _la) & 7516192768L) != 0))) {
_errHandler.recoverInline(this);
} else {
if (_input.LA(1) == Token.EOF) matchedEOF = true;
@@ -2545,7 +2545,7 @@ private NoncondexpressionContext noncondexpression(int _p) throws RecognitionExc
if (!(precpred(_ctx, 10))) throw new FailedPredicateException(this, "precpred(_ctx, 10)");
setState(278);
_la = _input.LA(1);
- if (!(((_la) & ~0x3f) == 0 && ((1L << _la) & 240518168576L) != 0)) {
+ if (!((((_la) & ~0x3f) == 0 && ((1L << _la) & 240518168576L) != 0))) {
_errHandler.recoverInline(this);
} else {
if (_input.LA(1) == Token.EOF) matchedEOF = true;
@@ -2563,7 +2563,7 @@ private NoncondexpressionContext noncondexpression(int _p) throws RecognitionExc
if (!(precpred(_ctx, 9))) throw new FailedPredicateException(this, "precpred(_ctx, 9)");
setState(281);
_la = _input.LA(1);
- if (!(((_la) & ~0x3f) == 0 && ((1L << _la) & 4123168604160L) != 0)) {
+ if (!((((_la) & ~0x3f) == 0 && ((1L << _la) & 4123168604160L) != 0))) {
_errHandler.recoverInline(this);
} else {
if (_input.LA(1) == Token.EOF) matchedEOF = true;
@@ -2581,7 +2581,7 @@ private NoncondexpressionContext noncondexpression(int _p) throws RecognitionExc
if (!(precpred(_ctx, 7))) throw new FailedPredicateException(this, "precpred(_ctx, 7)");
setState(284);
_la = _input.LA(1);
- if (!(((_la) & ~0x3f) == 0 && ((1L << _la) & 65970697666560L) != 0)) {
+ if (!((((_la) & ~0x3f) == 0 && ((1L << _la) & 65970697666560L) != 0))) {
_errHandler.recoverInline(this);
} else {
if (_input.LA(1) == Token.EOF) matchedEOF = true;
@@ -2861,7 +2861,7 @@ public final ExpressionContext expression() throws RecognitionException {
noncondexpression(0);
setState(320);
_la = _input.LA(1);
- if (!((((_la - 60)) & ~0x3f) == 0 && ((1L << (_la - 60)) & 4095L) != 0)) {
+ if (!(((((_la - 60)) & ~0x3f) == 0 && ((1L << (_la - 60)) & 4095L) != 0))) {
_errHandler.recoverInline(this);
} else {
if (_input.LA(1) == Token.EOF) matchedEOF = true;
@@ -3938,7 +3938,7 @@ public final PrimaryContext primary() throws RecognitionException {
enterOuterAlt(_localctx, 2); {
setState(400);
_la = _input.LA(1);
- if (!((((_la - 72)) & ~0x3f) == 0 && ((1L << (_la - 72)) & 15L) != 0)) {
+ if (!(((((_la - 72)) & ~0x3f) == 0 && ((1L << (_la - 72)) & 15L) != 0))) {
_errHandler.recoverInline(this);
} else {
if (_input.LA(1) == Token.EOF) matchedEOF = true;
@@ -4564,8 +4564,8 @@ public final ArrayinitializerContext arrayinitializer() throws RecognitionExcept
setState(469);
_errHandler.sync(this);
_la = _input.LA(1);
- if (((_la) & ~0x3f) == 0 && ((1L << _la) & 864691155034439840L) != 0
- || (((_la - 72)) & ~0x3f) == 0 && ((1L << (_la - 72)) & 2559L) != 0) {
+ if ((((_la) & ~0x3f) == 0 && ((1L << _la) & 864691155034439840L) != 0)
+ || ((((_la - 72)) & ~0x3f) == 0 && ((1L << (_la - 72)) & 2559L) != 0)) {
{
setState(461);
expression();
@@ -4923,8 +4923,8 @@ public final ArgumentsContext arguments() throws RecognitionException {
setState(524);
_errHandler.sync(this);
_la = _input.LA(1);
- if (((_la) & ~0x3f) == 0 && ((1L << _la) & 864691155101548704L) != 0
- || (((_la - 72)) & ~0x3f) == 0 && ((1L << (_la - 72)) & 4095L) != 0) {
+ if ((((_la) & ~0x3f) == 0 && ((1L << _la) & 864691155101548704L) != 0)
+ || ((((_la - 72)) & ~0x3f) == 0 && ((1L << (_la - 72)) & 4095L) != 0)) {
{
setState(516);
argument();
@@ -5104,7 +5104,7 @@ public final LambdaContext lambda() throws RecognitionException {
setState(543);
_errHandler.sync(this);
_la = _input.LA(1);
- if ((((_la - 81)) & ~0x3f) == 0 && ((1L << (_la - 81)) & 7L) != 0) {
+ if (((((_la - 81)) & ~0x3f) == 0 && ((1L << (_la - 81)) & 7L) != 0)) {
{
setState(535);
lamtype();
diff --git a/modules/transport-netty4/licenses/netty-buffer-4.1.106.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-buffer-4.1.106.Final.jar.sha1
deleted file mode 100644
index 1f170375e9347..0000000000000
--- a/modules/transport-netty4/licenses/netty-buffer-4.1.106.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-c5495ba59a627641b3a7c23f6bcb801874c7f7b0
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-buffer-4.1.107.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-buffer-4.1.107.Final.jar.sha1
new file mode 100644
index 0000000000000..beb44fc0f4cf9
--- /dev/null
+++ b/modules/transport-netty4/licenses/netty-buffer-4.1.107.Final.jar.sha1
@@ -0,0 +1 @@
+8509a72b8a5a2d33d611e99254aed39765c3ad82
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-codec-4.1.106.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-4.1.106.Final.jar.sha1
deleted file mode 100644
index a75ea81b7ee03..0000000000000
--- a/modules/transport-netty4/licenses/netty-codec-4.1.106.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-025171b63aa1e7a5fd8a7e4e660d6d3110241ea7
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-codec-4.1.107.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-4.1.107.Final.jar.sha1
new file mode 100644
index 0000000000000..4c74bb06fd83b
--- /dev/null
+++ b/modules/transport-netty4/licenses/netty-codec-4.1.107.Final.jar.sha1
@@ -0,0 +1 @@
+0a1d32debf2ed07c5852ab5b2904c43adb76c39e
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-codec-http-4.1.106.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-http-4.1.106.Final.jar.sha1
deleted file mode 100644
index 7f5d1adbff740..0000000000000
--- a/modules/transport-netty4/licenses/netty-codec-http-4.1.106.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-21a07cdf0fc46b313fe2248f1275cdbdac0ba87b
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-codec-http-4.1.107.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-http-4.1.107.Final.jar.sha1
new file mode 100644
index 0000000000000..38eb2e5bad80a
--- /dev/null
+++ b/modules/transport-netty4/licenses/netty-codec-http-4.1.107.Final.jar.sha1
@@ -0,0 +1 @@
+04d8e9e51b7254bd26a42fe17bdcae32e4c6ebb3
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-codec-http2-4.1.106.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-http2-4.1.106.Final.jar.sha1
deleted file mode 100644
index c4a0c1fae8e24..0000000000000
--- a/modules/transport-netty4/licenses/netty-codec-http2-4.1.106.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-41515e8c51eeaaddceabdb4f86fbc5dbfc25b70e
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-codec-http2-4.1.107.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-http2-4.1.107.Final.jar.sha1
new file mode 100644
index 0000000000000..5b3d3311edc9f
--- /dev/null
+++ b/modules/transport-netty4/licenses/netty-codec-http2-4.1.107.Final.jar.sha1
@@ -0,0 +1 @@
+3885ffe7dd05c9773df70c61009f34a5a8a383ec
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-common-4.1.106.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-common-4.1.106.Final.jar.sha1
deleted file mode 100644
index bda8b9376e992..0000000000000
--- a/modules/transport-netty4/licenses/netty-common-4.1.106.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-132defb4689f41b51b483b7202b22b6e89fe35fd
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-common-4.1.107.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-common-4.1.107.Final.jar.sha1
new file mode 100644
index 0000000000000..bbe91c6ccfb1d
--- /dev/null
+++ b/modules/transport-netty4/licenses/netty-common-4.1.107.Final.jar.sha1
@@ -0,0 +1 @@
+4f17a547530d64becd7179507b25f4154bcfba57
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-handler-4.1.106.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-handler-4.1.106.Final.jar.sha1
deleted file mode 100644
index 749cc807bcce2..0000000000000
--- a/modules/transport-netty4/licenses/netty-handler-4.1.106.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-874c970c4ff958b1140dde52bc17e6a9e7cde662
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-handler-4.1.107.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-handler-4.1.107.Final.jar.sha1
new file mode 100644
index 0000000000000..ba27b38632622
--- /dev/null
+++ b/modules/transport-netty4/licenses/netty-handler-4.1.107.Final.jar.sha1
@@ -0,0 +1 @@
+d4c6b05f4d9aca117981297fb7f02953102ebb5e
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-resolver-4.1.106.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-resolver-4.1.106.Final.jar.sha1
deleted file mode 100644
index bd160c07ad0ff..0000000000000
--- a/modules/transport-netty4/licenses/netty-resolver-4.1.106.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-e185ae573db04939215f94d6ba869758dcecbde9
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-resolver-4.1.107.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-resolver-4.1.107.Final.jar.sha1
new file mode 100644
index 0000000000000..3bc0f7b3fed09
--- /dev/null
+++ b/modules/transport-netty4/licenses/netty-resolver-4.1.107.Final.jar.sha1
@@ -0,0 +1 @@
+dfee84308341a42131dd0f8ac0e1e02d627c19f3
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-transport-4.1.106.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-transport-4.1.106.Final.jar.sha1
deleted file mode 100644
index 2dab4bff2cc0e..0000000000000
--- a/modules/transport-netty4/licenses/netty-transport-4.1.106.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-8fcca44ae16b98e15965093e7696832019fd6f27
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-transport-4.1.107.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-transport-4.1.107.Final.jar.sha1
new file mode 100644
index 0000000000000..19419999300dd
--- /dev/null
+++ b/modules/transport-netty4/licenses/netty-transport-4.1.107.Final.jar.sha1
@@ -0,0 +1 @@
+d6a105c621b47d1410e0e09419d7209d2d46e914
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.106.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.106.Final.jar.sha1
deleted file mode 100644
index b4b977fdad7de..0000000000000
--- a/modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.106.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-2da179bd95903f0fa73218b8f0d02690c0cfbc94
\ No newline at end of file
diff --git a/modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.107.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.107.Final.jar.sha1
new file mode 100644
index 0000000000000..407ecaffdad30
--- /dev/null
+++ b/modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.107.Final.jar.sha1
@@ -0,0 +1 @@
+4d61d4959741109b3eccd7337f11fc89fa90a74a
\ No newline at end of file
diff --git a/plugins/analysis-icu/licenses/lucene-analysis-icu-9.11.0-snapshot-8a555eb.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analysis-icu-9.11.0-snapshot-8a555eb.jar.sha1
new file mode 100644
index 0000000000000..0643f16dc1052
--- /dev/null
+++ b/plugins/analysis-icu/licenses/lucene-analysis-icu-9.11.0-snapshot-8a555eb.jar.sha1
@@ -0,0 +1 @@
+44a4e095d7e047a9452d81b224905b72c830f8ae
\ No newline at end of file
diff --git a/plugins/analysis-icu/licenses/lucene-analysis-icu-9.9.2.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analysis-icu-9.9.2.jar.sha1
deleted file mode 100644
index b318a2d89db7d..0000000000000
--- a/plugins/analysis-icu/licenses/lucene-analysis-icu-9.9.2.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-f7e549fdac07140f4cd379a0f517c38434165e95
\ No newline at end of file
diff --git a/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.11.0-snapshot-8a555eb.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.11.0-snapshot-8a555eb.jar.sha1
new file mode 100644
index 0000000000000..3f4d49a78791b
--- /dev/null
+++ b/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.11.0-snapshot-8a555eb.jar.sha1
@@ -0,0 +1 @@
+7f57fe7322e6d3a9b4edcc3da0b1ee0791a814ec
\ No newline at end of file
diff --git a/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.9.2.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.9.2.jar.sha1
deleted file mode 100644
index 87510efa881bc..0000000000000
--- a/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.9.2.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-eb19738fd3ca85071fef96978a056a7c94d8e793
\ No newline at end of file
diff --git a/plugins/analysis-nori/licenses/lucene-analysis-nori-9.11.0-snapshot-8a555eb.jar.sha1 b/plugins/analysis-nori/licenses/lucene-analysis-nori-9.11.0-snapshot-8a555eb.jar.sha1
new file mode 100644
index 0000000000000..1f110011ca9c6
--- /dev/null
+++ b/plugins/analysis-nori/licenses/lucene-analysis-nori-9.11.0-snapshot-8a555eb.jar.sha1
@@ -0,0 +1 @@
+9929da235100f8df323cfed165b8111fb2840093
\ No newline at end of file
diff --git a/plugins/analysis-nori/licenses/lucene-analysis-nori-9.9.2.jar.sha1 b/plugins/analysis-nori/licenses/lucene-analysis-nori-9.9.2.jar.sha1
deleted file mode 100644
index fb3746eb27840..0000000000000
--- a/plugins/analysis-nori/licenses/lucene-analysis-nori-9.9.2.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-7e83a83741155ececf9193a4f967e570e170236d
\ No newline at end of file
diff --git a/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.11.0-snapshot-8a555eb.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.11.0-snapshot-8a555eb.jar.sha1
new file mode 100644
index 0000000000000..8d6bf9fa0fa1b
--- /dev/null
+++ b/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.11.0-snapshot-8a555eb.jar.sha1
@@ -0,0 +1 @@
+8564c86d880c6ce002250002e2fd0936cbfff61d
\ No newline at end of file
diff --git a/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.9.2.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.9.2.jar.sha1
deleted file mode 100644
index e88299f106bb2..0000000000000
--- a/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.9.2.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-dabdea96c7a6d00363b0093a580e7d783efa69a4
\ No newline at end of file
diff --git a/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.11.0-snapshot-8a555eb.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.11.0-snapshot-8a555eb.jar.sha1
new file mode 100644
index 0000000000000..cbe4aec98fae4
--- /dev/null
+++ b/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.11.0-snapshot-8a555eb.jar.sha1
@@ -0,0 +1 @@
+7e71777cfb5beb4ffd5b03030576d2f062eef13c
\ No newline at end of file
diff --git a/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.9.2.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.9.2.jar.sha1
deleted file mode 100644
index 05dd3328b032d..0000000000000
--- a/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.9.2.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-fb551d9ae6a1bf12bc90a4d26dd8fa3eefac8cb9
\ No newline at end of file
diff --git a/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.11.0-snapshot-8a555eb.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.11.0-snapshot-8a555eb.jar.sha1
new file mode 100644
index 0000000000000..e1c7aecc104d0
--- /dev/null
+++ b/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.11.0-snapshot-8a555eb.jar.sha1
@@ -0,0 +1 @@
+a597265bd6fb0a7e954e948a295d31507dd73cce
\ No newline at end of file
diff --git a/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.9.2.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.9.2.jar.sha1
deleted file mode 100644
index b47a1e4052407..0000000000000
--- a/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.9.2.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-0d495b478861b2d29e0a58d273ca0e6e755292e9
\ No newline at end of file
diff --git a/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.11.0-snapshot-8a555eb.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.11.0-snapshot-8a555eb.jar.sha1
new file mode 100644
index 0000000000000..eefa2809f3540
--- /dev/null
+++ b/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.11.0-snapshot-8a555eb.jar.sha1
@@ -0,0 +1 @@
+c9e534845bb08985d7fa21e2e71a14bc68c46089
\ No newline at end of file
diff --git a/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.9.2.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.9.2.jar.sha1
deleted file mode 100644
index b7d54cb230445..0000000000000
--- a/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.9.2.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-9958d813d4cfdd890b4611c679ed36775480fa0d
\ No newline at end of file
diff --git a/plugins/cache-ehcache/build.gradle b/plugins/cache-ehcache/build.gradle
new file mode 100644
index 0000000000000..65e7daaaacf26
--- /dev/null
+++ b/plugins/cache-ehcache/build.gradle
@@ -0,0 +1,97 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+import org.apache.tools.ant.taskdefs.condition.Os
+import org.opensearch.gradle.Architecture
+import org.opensearch.gradle.OS
+import org.opensearch.gradle.info.BuildParams
+
+apply plugin: 'opensearch.internal-cluster-test'
+
+opensearchplugin {
+ description 'Ehcache based cache implementation.'
+ classname 'org.opensearch.cache.EhcacheCachePlugin'
+}
+
+versions << [
+ 'ehcache' : '3.10.8'
+]
+
+dependencies {
+ api "org.ehcache:ehcache:${versions.ehcache}"
+}
+
+thirdPartyAudit {
+ ignoreViolations(
+ 'org.ehcache.impl.internal.concurrent.ConcurrentHashMap',
+ 'org.ehcache.impl.internal.concurrent.ConcurrentHashMap$CounterCell',
+ 'org.ehcache.impl.internal.concurrent.ConcurrentHashMap$TreeBin',
+ 'org.ehcache.impl.internal.concurrent.ThreadLocalRandomUtil',
+ 'org.ehcache.sizeof.impl.UnsafeSizeOf'
+ )
+
+ ignoreMissingClasses(
+ 'javax.cache.Cache',
+ 'javax.cache.Cache$Entry',
+ 'javax.cache.CacheException',
+ 'javax.cache.CacheManager',
+ 'javax.cache.configuration.CacheEntryListenerConfiguration',
+ 'javax.cache.configuration.CompleteConfiguration',
+ 'javax.cache.configuration.Configuration',
+ 'javax.cache.configuration.Factory',
+ 'javax.cache.configuration.OptionalFeature',
+ 'javax.cache.event.CacheEntryCreatedListener',
+ 'javax.cache.event.CacheEntryEvent',
+ 'javax.cache.event.CacheEntryEventFilter',
+ 'javax.cache.event.CacheEntryExpiredListener',
+ 'javax.cache.event.CacheEntryListener',
+ 'javax.cache.event.CacheEntryRemovedListener',
+ 'javax.cache.event.CacheEntryUpdatedListener',
+ 'javax.cache.event.EventType',
+ 'javax.cache.expiry.Duration',
+ 'javax.cache.expiry.EternalExpiryPolicy',
+ 'javax.cache.expiry.ExpiryPolicy',
+ 'javax.cache.integration.CacheLoader',
+ 'javax.cache.integration.CacheLoaderException',
+ 'javax.cache.integration.CacheWriter',
+ 'javax.cache.integration.CacheWriterException',
+ 'javax.cache.integration.CompletionListener',
+ 'javax.cache.management.CacheMXBean',
+ 'javax.cache.management.CacheStatisticsMXBean',
+ 'javax.cache.processor.EntryProcessor',
+ 'javax.cache.processor.EntryProcessorResult',
+ 'javax.cache.processor.MutableEntry',
+ 'javax.cache.spi.CachingProvider',
+ 'javax.xml.bind.JAXBContext',
+ 'javax.xml.bind.JAXBElement',
+ 'javax.xml.bind.Marshaller',
+ 'javax.xml.bind.Unmarshaller',
+ 'javax.xml.bind.annotation.XmlElement',
+ 'javax.xml.bind.annotation.XmlRootElement',
+ 'javax.xml.bind.annotation.XmlSchema',
+ 'javax.xml.bind.annotation.adapters.XmlAdapter',
+ 'org.osgi.framework.BundleActivator',
+ 'org.osgi.framework.BundleContext',
+ 'org.osgi.framework.ServiceReference',
+ 'org.slf4j.Logger',
+ 'org.slf4j.LoggerFactory',
+ 'org.slf4j.Marker',
+ 'org.slf4j.event.Level'
+ )
+}
+
+tasks.named("bundlePlugin").configure {
+ from('config/cache-ehcache') {
+ into 'config'
+ }
+}
+
+test {
+ // TODO: Adding permission in plugin-security.policy doesn't seem to work.
+ systemProperty 'tests.security.manager', 'false'
+}
diff --git a/plugins/cache-ehcache/licenses/ehcache-3.10.8.jar.sha1 b/plugins/cache-ehcache/licenses/ehcache-3.10.8.jar.sha1
new file mode 100644
index 0000000000000..dee07e9238ebf
--- /dev/null
+++ b/plugins/cache-ehcache/licenses/ehcache-3.10.8.jar.sha1
@@ -0,0 +1 @@
+f0d50ede46609db78413ca7f4250d348a597b101
\ No newline at end of file
diff --git a/plugins/cache-ehcache/licenses/ehcache-LICENSE.txt b/plugins/cache-ehcache/licenses/ehcache-LICENSE.txt
new file mode 100644
index 0000000000000..8dada3edaf50d
--- /dev/null
+++ b/plugins/cache-ehcache/licenses/ehcache-LICENSE.txt
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "{}"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright {yyyy} {name of copyright owner}
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/plugins/cache-ehcache/licenses/ehcache-NOTICE.txt b/plugins/cache-ehcache/licenses/ehcache-NOTICE.txt
new file mode 100644
index 0000000000000..1dbd38242cc98
--- /dev/null
+++ b/plugins/cache-ehcache/licenses/ehcache-NOTICE.txt
@@ -0,0 +1,5 @@
+Ehcache V3
+Copyright 2014-2023 Terracotta, Inc.
+
+The product includes software from the Apache Commons Lang project,
+under the Apache License 2.0 (see: org.ehcache.impl.internal.classes.commonslang)
diff --git a/plugins/cache-ehcache/src/main/java/org/opensearch/cache/EhcacheCachePlugin.java b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/EhcacheCachePlugin.java
new file mode 100644
index 0000000000000..ceda96e4a7d7d
--- /dev/null
+++ b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/EhcacheCachePlugin.java
@@ -0,0 +1,56 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.cache;
+
+import org.opensearch.cache.store.disk.EhcacheDiskCache;
+import org.opensearch.common.cache.CacheType;
+import org.opensearch.common.cache.ICache;
+import org.opensearch.common.settings.Setting;
+import org.opensearch.plugins.CachePlugin;
+import org.opensearch.plugins.Plugin;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+import static org.opensearch.cache.EhcacheDiskCacheSettings.CACHE_TYPE_MAP;
+
+/**
+ * Ehcache based cache plugin.
+ */
+public class EhcacheCachePlugin extends Plugin implements CachePlugin {
+
+ private static final String EHCACHE_CACHE_PLUGIN = "EhcachePlugin";
+
+ /**
+ * Default constructor to avoid javadoc related failures.
+ */
+ public EhcacheCachePlugin() {}
+
+ @Override
+ public Map getCacheFactoryMap() {
+ return Map.of(EhcacheDiskCache.EhcacheDiskCacheFactory.EHCACHE_DISK_CACHE_NAME, new EhcacheDiskCache.EhcacheDiskCacheFactory());
+ }
+
+ @Override
+ public List> getSettings() {
+ List> settingList = new ArrayList<>();
+ for (Map.Entry>> entry : CACHE_TYPE_MAP.entrySet()) {
+ for (Map.Entry> entry1 : entry.getValue().entrySet()) {
+ settingList.add(entry1.getValue());
+ }
+ }
+ return settingList;
+ }
+
+ @Override
+ public String getName() {
+ return EHCACHE_CACHE_PLUGIN;
+ }
+}
diff --git a/plugins/cache-ehcache/src/main/java/org/opensearch/cache/EhcacheDiskCacheSettings.java b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/EhcacheDiskCacheSettings.java
new file mode 100644
index 0000000000000..837fd6b268ce6
--- /dev/null
+++ b/plugins/cache-ehcache/src/main/java/org/opensearch/cache/EhcacheDiskCacheSettings.java
@@ -0,0 +1,222 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ */
+
+package org.opensearch.cache;
+
+import org.opensearch.cache.store.disk.EhcacheDiskCache;
+import org.opensearch.common.cache.CacheType;
+import org.opensearch.common.settings.Setting;
+import org.opensearch.common.unit.TimeValue;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import static org.opensearch.common.settings.Setting.Property.NodeScope;
+
+/**
+ * Settings related to ehcache disk cache.
+ */
+public class EhcacheDiskCacheSettings {
+
+ /**
+ * Ehcache disk write minimum threads for its pool
+ *
+ * Setting pattern: {cache_type}.ehcache_disk.min_threads
+ */
+
+ public static final Setting.AffixSetting DISK_WRITE_MINIMUM_THREADS_SETTING = Setting.suffixKeySetting(
+ EhcacheDiskCache.EhcacheDiskCacheFactory.EHCACHE_DISK_CACHE_NAME + ".min_threads",
+ (key) -> Setting.intSetting(key, 2, 1, 5, NodeScope)
+ );
+
+ /**
+ * Ehcache disk write maximum threads for its pool
+ *
+ * Setting pattern: {cache_type}.ehcache_disk.max_threads
+ */
+ public static final Setting.AffixSetting DISK_WRITE_MAXIMUM_THREADS_SETTING = Setting.suffixKeySetting(
+ EhcacheDiskCache.EhcacheDiskCacheFactory.EHCACHE_DISK_CACHE_NAME + ".max_threads",
+ (key) -> Setting.intSetting(key, 2, 1, 20, NodeScope)
+ );
+
+ /**
+ * Not be to confused with number of disk segments, this is different. Defines
+ * distinct write queues created for disk store where a group of segments share a write queue. This is
+ * implemented with ehcache using a partitioned thread pool exectutor By default all segments share a single write
+ * queue ie write concurrency is 1. Check OffHeapDiskStoreConfiguration and DiskWriteThreadPool.
+ *
+ * Default is 1 within ehcache.
+ *
+ *
+ */
+ public static final Setting.AffixSetting DISK_WRITE_CONCURRENCY_SETTING = Setting.suffixKeySetting(
+ EhcacheDiskCache.EhcacheDiskCacheFactory.EHCACHE_DISK_CACHE_NAME + ".concurrency",
+ (key) -> Setting.intSetting(key, 1, 1, 3, NodeScope)
+ );
+
+ /**
+ * Defines how many segments the disk cache is separated into. Higher number achieves greater concurrency but
+ * will hold that many file pointers. Default is 16.
+ *
+ * Default value is 16 within Ehcache.
+ */
+ public static final Setting.AffixSetting