Skip to content

Commit

Permalink
Expose lucene's RemoveDuplicatesTokenFilter (#31275)
Browse files Browse the repository at this point in the history
  • Loading branch information
romseygeek committed Jun 18, 2018
1 parent 2d184e3 commit a26b9b7
Show file tree
Hide file tree
Showing 5 changed files with 112 additions and 1 deletion.
4 changes: 3 additions & 1 deletion docs/reference/analysis/tokenfilters.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -95,4 +95,6 @@ include::tokenfilters/decimal-digit-tokenfilter.asciidoc[]

include::tokenfilters/fingerprint-tokenfilter.asciidoc[]

include::tokenfilters/minhash-tokenfilter.asciidoc[]
include::tokenfilters/minhash-tokenfilter.asciidoc[]

include::tokenfilters/remove-duplicates-tokenfilter.asciidoc[]
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
[[analysis-remove-duplicates-tokenfilter]]
=== Remove Duplicates Token Filter

A token filter of type `remove_duplicates` that drops identical tokens at the
same position.
Original file line number Diff line number Diff line change
Expand Up @@ -194,6 +194,7 @@ public Map<String, AnalysisProvider<TokenFilterFactory>> getTokenFilters() {
filters.put("pattern_replace", requriesAnalysisSettings(PatternReplaceTokenFilterFactory::new));
filters.put("persian_normalization", PersianNormalizationFilterFactory::new);
filters.put("porter_stem", PorterStemTokenFilterFactory::new);
filters.put("remove_duplicates", RemoveDuplicatesTokenFilterFactory::new);
filters.put("reverse", ReverseTokenFilterFactory::new);
filters.put("russian_stem", RussianStemTokenFilterFactory::new);
filters.put("scandinavian_folding", ScandinavianFoldingFilterFactory::new);
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

package org.elasticsearch.analysis.common;

import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.miscellaneous.RemoveDuplicatesTokenFilter;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.analysis.AbstractTokenFilterFactory;

/**
* Filter factory for the lucene RemoveDuplicatesTokenFilter
*/
class RemoveDuplicatesTokenFilterFactory extends AbstractTokenFilterFactory {

RemoveDuplicatesTokenFilterFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) {
super(indexSettings, name, settings);
}

@Override
public TokenStream create(TokenStream tokenStream) {
return new RemoveDuplicatesTokenFilter(tokenStream);
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

package org.elasticsearch.analysis.common;

import org.apache.lucene.analysis.CannedTokenStream;
import org.apache.lucene.analysis.Token;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.analysis.AnalysisTestsHelper;
import org.elasticsearch.index.analysis.TokenFilterFactory;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.ESTokenStreamTestCase;

import java.io.IOException;

import static org.hamcrest.Matchers.instanceOf;

public class RemoveDuplicatesFilterFactoryTests extends ESTokenStreamTestCase {

public void testRemoveDuplicatesFilter() throws IOException {
Settings settings = Settings.builder()
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.put("index.analysis.filter.removedups.type", "remove_duplicates")
.build();
ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings, new CommonAnalysisPlugin());
TokenFilterFactory tokenFilter = analysis.tokenFilter.get("removedups");
assertThat(tokenFilter, instanceOf(RemoveDuplicatesTokenFilterFactory.class));

CannedTokenStream cts = new CannedTokenStream(
new Token("a", 1, 0, 1),
new Token("b", 1, 2, 3),
new Token("c", 0, 2, 3),
new Token("b", 0, 2, 3),
new Token("d", 1, 4, 5)
);

assertTokenStreamContents(tokenFilter.create(cts), new String[]{
"a", "b", "c", "d"
}, new int[]{
1, 1, 0, 1
});
}

}

0 comments on commit a26b9b7

Please sign in to comment.