From c5b57ab47fe64f52e438d250f338e9ad4e082448 Mon Sep 17 00:00:00 2001 From: Andriy Redko Date: Thu, 17 Oct 2024 12:21:39 -0400 Subject: [PATCH] Update to Apache Lucene 10 for 3.0.0 Signed-off-by: Andriy Redko --- .github/workflows/assemble.yml | 17 +- .github/workflows/precommit.yml | 15 +- buildSrc/build.gradle | 4 +- buildSrc/reaper/build.gradle | 4 +- .../src/testKit/thirdPartyAudit/build.gradle | 4 +- gradle/libs.versions.toml | 2 +- .../core/licenses/lucene-core-10.0.0.jar.sha1 | 1 + .../core/licenses/lucene-core-9.12.0.jar.sha1 | 1 - .../java/org/opensearch/LegacyESVersion.java | 2 +- .../src/main/java/org/opensearch/Version.java | 2 +- .../lucene-expressions-10.0.0.jar.sha1 | 1 + .../lucene-expressions-9.12.0.jar.sha1 | 1 - .../lucene-analysis-icu-10.0.0.jar.sha1 | 1 + .../lucene-analysis-icu-9.12.0.jar.sha1 | 1 - .../lucene-analysis-kuromoji-10.0.0.jar.sha1 | 1 + .../lucene-analysis-kuromoji-9.12.0.jar.sha1 | 1 - .../lucene-analysis-nori-10.0.0.jar.sha1 | 1 + .../lucene-analysis-nori-9.12.0.jar.sha1 | 1 - .../lucene-analysis-phonetic-10.0.0.jar.sha1 | 1 + .../lucene-analysis-phonetic-9.12.0.jar.sha1 | 1 - .../lucene-analysis-smartcn-10.0.0.jar.sha1 | 1 + .../lucene-analysis-smartcn-9.12.0.jar.sha1 | 1 - .../lucene-analysis-stempel-10.0.0.jar.sha1 | 1 + .../lucene-analysis-stempel-9.12.0.jar.sha1 | 1 - ...lucene-analysis-morfologik-10.0.0.jar.sha1 | 1 + ...lucene-analysis-morfologik-9.12.0.jar.sha1 | 1 - .../lucene-analysis-common-10.0.0.jar.sha1 | 1 + .../lucene-analysis-common-9.12.0.jar.sha1 | 1 - .../lucene-backward-codecs-10.0.0.jar.sha1 | 1 + .../lucene-backward-codecs-9.12.0.jar.sha1 | 1 - server/licenses/lucene-core-10.0.0.jar.sha1 | 1 + server/licenses/lucene-core-9.12.0.jar.sha1 | 1 - .../licenses/lucene-grouping-10.0.0.jar.sha1 | 1 + .../licenses/lucene-grouping-9.12.0.jar.sha1 | 1 - .../lucene-highlighter-10.0.0.jar.sha1 | 1 + .../lucene-highlighter-9.12.0.jar.sha1 | 1 - server/licenses/lucene-join-10.0.0.jar.sha1 | 1 + server/licenses/lucene-join-9.12.0.jar.sha1 | 1 - server/licenses/lucene-memory-10.0.0.jar.sha1 | 1 + server/licenses/lucene-memory-9.12.0.jar.sha1 | 1 - server/licenses/lucene-misc-10.0.0.jar.sha1 | 1 + server/licenses/lucene-misc-9.12.0.jar.sha1 | 1 - .../licenses/lucene-queries-10.0.0.jar.sha1 | 1 + .../licenses/lucene-queries-9.12.0.jar.sha1 | 1 - .../lucene-queryparser-10.0.0.jar.sha1 | 1 + .../lucene-queryparser-9.12.0.jar.sha1 | 1 - .../licenses/lucene-sandbox-10.0.0.jar.sha1 | 1 + .../licenses/lucene-sandbox-9.12.0.jar.sha1 | 1 - .../lucene-spatial-extras-10.0.0.jar.sha1 | 1 + .../lucene-spatial-extras-9.12.0.jar.sha1 | 1 - .../licenses/lucene-spatial3d-10.0.0.jar.sha1 | 1 + .../licenses/lucene-spatial3d-9.12.0.jar.sha1 | 1 - .../licenses/lucene-suggest-10.0.0.jar.sha1 | 1 + .../licenses/lucene-suggest-9.12.0.jar.sha1 | 1 - .../Lucene90DocValuesConsumerWrapper.java | 10 +- .../queries/BinaryDocValuesRangeQuery.java | 7 +- .../lucene/queries/BlendedTermQuery.java | 18 +- .../search/grouping/CollapseTopFieldDocs.java | 4 +- .../grouping/CollapsingDocValuesSource.java | 2 +- .../automaton/MinimizationOperations.java | 331 ++++++++++++++++++ .../lucene/util/packed/XPackedInts.java | 10 +- .../search/BottomSortValuesCollector.java | 2 +- .../action/search/SearchPhaseController.java | 4 +- .../opensearch/bootstrap/BootstrapChecks.java | 2 +- .../org/opensearch/common/lucene/Lucene.java | 4 +- .../lucene/index/FilterableTermsEnum.java | 6 + .../lucene/search/AutomatonQueries.java | 2 +- .../common/lucene/search/Queries.java | 10 +- .../SpanBooleanQueryRewriteWithMaxClause.java | 14 +- .../common/lucene/search/XMoreLikeThis.java | 5 +- .../search/function/FunctionScoreQuery.java | 6 +- .../search/function/MinScoreScorer.java | 2 +- .../opensearch/index/codec/CodecService.java | 12 +- .../PerFieldMappingPostingFormatCodec.java | 4 +- .../composite/CompositeCodecFactory.java | 11 +- .../LuceneDocValuesConsumerFactory.java | 2 + .../composite100/Composite100Codec.java | 58 +++ .../composite/composite100/package-info.java | 12 + .../composite912/Composite912Codec.java | 8 +- .../Composite912DocValuesReader.java | 7 +- .../Composite912DocValuesWriter.java | 1 + .../startree/utils/StarTreeUtils.java | 3 + ...SortedUnsignedLongDocValuesRangeQuery.java | 6 +- .../SortedUnsignedLongDocValuesSetQuery.java | 6 +- .../org/opensearch/index/engine/Engine.java | 2 +- .../index/engine/LuceneChangesSnapshot.java | 2 +- .../RecoverySourcePruneMergePolicy.java | 6 + .../index/engine/TranslogLeafReader.java | 21 +- .../opensearch/index/fielddata/FieldData.java | 2 +- .../ordinals/GlobalOrdinalMapping.java | 4 +- .../fielddata/ordinals/MultiOrdinals.java | 6 +- .../opensearch/index/get/ShardGetService.java | 2 + .../mapper/ConstantKeywordFieldMapper.java | 5 +- .../index/mapper/DateFieldMapper.java | 5 +- .../index/mapper/DocCountFieldMapper.java | 4 +- .../index/mapper/IdFieldMapper.java | 11 +- .../index/mapper/IpFieldMapper.java | 2 +- .../index/mapper/KeywordFieldMapper.java | 20 +- .../index/mapper/MappedFieldType.java | 7 +- .../index/mapper/TermBasedFieldType.java | 11 +- .../index/mapper/WildcardFieldMapper.java | 19 +- .../index/query/DerivedFieldQuery.java | 6 +- .../index/query/IntervalBuilder.java | 6 +- .../opensearch/index/query/RegexpFlag.java | 2 +- .../index/query/ScriptQueryBuilder.java | 6 +- .../index/query/SourceFieldMatchQuery.java | 5 +- .../index/query/TermsSetQueryBuilder.java | 5 +- .../reindex/ClientScrollableHitSource.java | 2 +- .../opensearch/index/search/MatchQuery.java | 13 +- .../index/search/MultiMatchQuery.java | 2 +- .../opensearch/index/search/NestedHelper.java | 12 +- .../comparators/HalfFloatComparator.java | 29 +- .../comparators/UnsignedLongComparator.java | 29 +- .../index/shard/ShardSplittingQuery.java | 9 +- .../org/opensearch/index/store/Store.java | 2 +- .../store/remote/utils/TransferManager.java | 2 +- .../opensearch/indices/IndicesQueryCache.java | 14 - .../SegmentFileTransferHandler.java | 2 +- .../lucene/queries/MinDocQuery.java | 6 +- .../queries/SearchAfterSortedDocQuery.java | 8 +- .../main/java/org/opensearch/node/Node.java | 4 +- .../blobstore/BlobStoreRepository.java | 2 +- .../rest/action/cat/RestCountAction.java | 4 +- .../rest/action/search/RestCountAction.java | 2 +- .../org/opensearch/search/MultiValueMode.java | 2 +- .../org/opensearch/search/SearchHits.java | 6 +- .../org/opensearch/search/SearchModule.java | 4 +- .../bucket/composite/CompositeAggregator.java | 2 +- .../composite/GlobalOrdinalValuesSource.java | 6 +- .../bucket/composite/LongValuesSource.java | 6 +- .../bucket/range/BinaryRangeAggregator.java | 2 +- .../DiversifiedOrdinalsSamplerAggregator.java | 2 +- .../GlobalOrdinalsStringTermsAggregator.java | 9 +- .../metrics/CardinalityAggregator.java | 2 +- .../aggregations/metrics/InternalTopHits.java | 10 +- .../metrics/MetricInspectionHelper.java | 2 +- .../aggregations/support/MissingValues.java | 4 +- .../ApproximatePointRangeQuery.java | 13 +- .../highlight/FragmentBuilderHelper.java | 6 +- .../search/profile/query/ProfileScorer.java | 11 +- .../search/profile/query/ProfileWeight.java | 25 +- .../search/query/BitmapDocValuesQuery.java | 6 +- .../opensearch/search/query/QueryPhase.java | 2 +- .../search/query/TopDocsCollectorContext.java | 8 +- .../search/slice/DocValuesSliceQuery.java | 6 +- .../search/slice/TermsSliceQuery.java | 6 +- .../sort/SortedWiderNumericSortField.java | 10 + .../phrase/DirectCandidateGenerator.java | 6 +- .../search/suggest/phrase/LaplaceScorer.java | 2 +- .../phrase/LinearInterpolatingScorer.java | 2 +- .../phrase/NoisyChannelSpellChecker.java | 2 +- .../suggest/phrase/StupidBackoffScorer.java | 4 +- ...ceableSearchRequestOperationsListener.java | 2 +- .../services/org.apache.lucene.codecs.Codec | 1 + .../action/search/DfsQueryPhaseTests.java | 6 +- .../action/search/FetchSearchPhaseTests.java | 10 +- .../search/SearchPhaseControllerTests.java | 24 +- .../SearchQueryThenFetchAsyncActionTests.java | 8 +- .../search/SearchResponseMergerTests.java | 20 +- .../action/search/SearchResponseTests.java | 12 +- .../indices/IndicesQueryCacheTests.java | 4 +- .../test/OpenSearchIntegTestCase.java | 2 +- 162 files changed, 812 insertions(+), 409 deletions(-) create mode 100644 libs/core/licenses/lucene-core-10.0.0.jar.sha1 delete mode 100644 libs/core/licenses/lucene-core-9.12.0.jar.sha1 create mode 100644 modules/lang-expression/licenses/lucene-expressions-10.0.0.jar.sha1 delete mode 100644 modules/lang-expression/licenses/lucene-expressions-9.12.0.jar.sha1 create mode 100644 plugins/analysis-icu/licenses/lucene-analysis-icu-10.0.0.jar.sha1 delete mode 100644 plugins/analysis-icu/licenses/lucene-analysis-icu-9.12.0.jar.sha1 create mode 100644 plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-10.0.0.jar.sha1 delete mode 100644 plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.12.0.jar.sha1 create mode 100644 plugins/analysis-nori/licenses/lucene-analysis-nori-10.0.0.jar.sha1 delete mode 100644 plugins/analysis-nori/licenses/lucene-analysis-nori-9.12.0.jar.sha1 create mode 100644 plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-10.0.0.jar.sha1 delete mode 100644 plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.12.0.jar.sha1 create mode 100644 plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-10.0.0.jar.sha1 delete mode 100644 plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.12.0.jar.sha1 create mode 100644 plugins/analysis-stempel/licenses/lucene-analysis-stempel-10.0.0.jar.sha1 delete mode 100644 plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.12.0.jar.sha1 create mode 100644 plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-10.0.0.jar.sha1 delete mode 100644 plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.12.0.jar.sha1 create mode 100644 server/licenses/lucene-analysis-common-10.0.0.jar.sha1 delete mode 100644 server/licenses/lucene-analysis-common-9.12.0.jar.sha1 create mode 100644 server/licenses/lucene-backward-codecs-10.0.0.jar.sha1 delete mode 100644 server/licenses/lucene-backward-codecs-9.12.0.jar.sha1 create mode 100644 server/licenses/lucene-core-10.0.0.jar.sha1 delete mode 100644 server/licenses/lucene-core-9.12.0.jar.sha1 create mode 100644 server/licenses/lucene-grouping-10.0.0.jar.sha1 delete mode 100644 server/licenses/lucene-grouping-9.12.0.jar.sha1 create mode 100644 server/licenses/lucene-highlighter-10.0.0.jar.sha1 delete mode 100644 server/licenses/lucene-highlighter-9.12.0.jar.sha1 create mode 100644 server/licenses/lucene-join-10.0.0.jar.sha1 delete mode 100644 server/licenses/lucene-join-9.12.0.jar.sha1 create mode 100644 server/licenses/lucene-memory-10.0.0.jar.sha1 delete mode 100644 server/licenses/lucene-memory-9.12.0.jar.sha1 create mode 100644 server/licenses/lucene-misc-10.0.0.jar.sha1 delete mode 100644 server/licenses/lucene-misc-9.12.0.jar.sha1 create mode 100644 server/licenses/lucene-queries-10.0.0.jar.sha1 delete mode 100644 server/licenses/lucene-queries-9.12.0.jar.sha1 create mode 100644 server/licenses/lucene-queryparser-10.0.0.jar.sha1 delete mode 100644 server/licenses/lucene-queryparser-9.12.0.jar.sha1 create mode 100644 server/licenses/lucene-sandbox-10.0.0.jar.sha1 delete mode 100644 server/licenses/lucene-sandbox-9.12.0.jar.sha1 create mode 100644 server/licenses/lucene-spatial-extras-10.0.0.jar.sha1 delete mode 100644 server/licenses/lucene-spatial-extras-9.12.0.jar.sha1 create mode 100644 server/licenses/lucene-spatial3d-10.0.0.jar.sha1 delete mode 100644 server/licenses/lucene-spatial3d-9.12.0.jar.sha1 create mode 100644 server/licenses/lucene-suggest-10.0.0.jar.sha1 delete mode 100644 server/licenses/lucene-suggest-9.12.0.jar.sha1 create mode 100644 server/src/main/java/org/apache/lucene/util/automaton/MinimizationOperations.java create mode 100644 server/src/main/java/org/opensearch/index/codec/composite/composite100/Composite100Codec.java create mode 100644 server/src/main/java/org/opensearch/index/codec/composite/composite100/package-info.java diff --git a/.github/workflows/assemble.yml b/.github/workflows/assemble.yml index d90b05c323cf1..4146a74c47a1f 100644 --- a/.github/workflows/assemble.yml +++ b/.github/workflows/assemble.yml @@ -7,7 +7,7 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - java: [ 11, 17, 21 ] + java: [ 21, 23 ] os: [ubuntu-latest, windows-latest, macos-13] steps: - uses: actions/checkout@v4 @@ -16,17 +16,6 @@ jobs: with: java-version: ${{ matrix.java }} distribution: temurin - - name: Set up JDK 17 - # See please https://docs.gradle.org/8.10/userguide/upgrading_version_8.html#minimum_daemon_jvm_version - if: matrix.java == 11 - uses: actions/setup-java@v4 - with: - java-version: 17 - distribution: temurin - - name: Set JAVA${{ matrix.java }}_HOME - shell: bash - run: | - echo "JAVA${{ matrix.java }}_HOME=$JAVA_HOME_${{ matrix.java }}_${{ runner.arch }}" >> $GITHUB_ENV - name: Setup docker (missing on MacOS) id: setup_docker if: runner.os == 'macos' @@ -47,8 +36,8 @@ jobs: shell: bash if: runner.os != 'macos' run: | - ./gradlew assemble --parallel --no-build-cache -PDISABLE_BUILD_CACHE -Druntime.java=${{ matrix.java }} + ./gradlew assemble --parallel --no-build-cache -PDISABLE_BUILD_CACHE - name: Run Gradle (assemble) if: runner.os == 'macos' && steps.setup_docker.outcome == 'success' run: | - ./gradlew assemble --parallel --no-build-cache -PDISABLE_BUILD_CACHE -Druntime.java=${{ matrix.java }} + ./gradlew assemble --parallel --no-build-cache -PDISABLE_BUILD_CACHE diff --git a/.github/workflows/precommit.yml b/.github/workflows/precommit.yml index 572f6c981a052..917634c5e94e9 100644 --- a/.github/workflows/precommit.yml +++ b/.github/workflows/precommit.yml @@ -7,7 +7,7 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - java: [ 11, 17, 21, 23 ] + java: [ 21, 23 ] os: [ubuntu-latest, windows-latest, macos-latest, macos-13] steps: - uses: actions/checkout@v4 @@ -17,18 +17,7 @@ jobs: java-version: ${{ matrix.java }} distribution: temurin cache: gradle - - name: Set up JDK 17 - # See please https://docs.gradle.org/8.10/userguide/upgrading_version_8.html#minimum_daemon_jvm_version - if: matrix.java == 11 - uses: actions/setup-java@v4 - with: - java-version: 17 - distribution: temurin - - name: Set JAVA${{ matrix.java }}_HOME - shell: bash - run: | - echo "JAVA${{ matrix.java }}_HOME=$JAVA_HOME_${{ matrix.java }}_${{ runner.arch }}" >> $GITHUB_ENV - name: Run Gradle (precommit) shell: bash run: | - ./gradlew javadoc precommit --parallel -Druntime.java=${{ matrix.java }} + ./gradlew javadoc precommit --parallel diff --git a/buildSrc/build.gradle b/buildSrc/build.gradle index c62f20e106e8c..1b7fe7a73fae6 100644 --- a/buildSrc/build.gradle +++ b/buildSrc/build.gradle @@ -171,8 +171,8 @@ if (project != rootProject) { allprojects { java { - targetCompatibility = JavaVersion.VERSION_11 - sourceCompatibility = JavaVersion.VERSION_11 + targetCompatibility = JavaVersion.VERSION_21 + sourceCompatibility = JavaVersion.VERSION_21 } } diff --git a/buildSrc/reaper/build.gradle b/buildSrc/reaper/build.gradle index 58d06b02e9f4b..55efcf5d5dfff 100644 --- a/buildSrc/reaper/build.gradle +++ b/buildSrc/reaper/build.gradle @@ -12,8 +12,8 @@ apply plugin: 'java' java { - targetCompatibility = JavaVersion.VERSION_11 - sourceCompatibility = JavaVersion.VERSION_11 + targetCompatibility = JavaVersion.VERSION_21 + sourceCompatibility = JavaVersion.VERSION_21 } jar { diff --git a/buildSrc/src/testKit/thirdPartyAudit/build.gradle b/buildSrc/src/testKit/thirdPartyAudit/build.gradle index 553ff5d8e6ed2..caaaa6bd1aceb 100644 --- a/buildSrc/src/testKit/thirdPartyAudit/build.gradle +++ b/buildSrc/src/testKit/thirdPartyAudit/build.gradle @@ -46,11 +46,11 @@ dependencies { } tasks.register("empty", ThirdPartyAuditTask) { - targetCompatibility = JavaVersion.VERSION_11 + targetCompatibility = JavaVersion.VERSION_21 signatureFile = file('third-party-audit-empty.txt') } tasks.register("absurd", ThirdPartyAuditTask) { - targetCompatibility = JavaVersion.VERSION_11 + targetCompatibility = JavaVersion.VERSION_21 signatureFile = file('third-party-audit-absurd.txt') } diff --git a/gradle/libs.versions.toml b/gradle/libs.versions.toml index 8ff3f6e45397d..19b0a2bc6b4ab 100644 --- a/gradle/libs.versions.toml +++ b/gradle/libs.versions.toml @@ -1,6 +1,6 @@ [versions] opensearch = "3.0.0" -lucene = "9.12.0" +lucene = "10.0.0" bundled_jdk_vendor = "adoptium" bundled_jdk = "23.0.1+11" diff --git a/libs/core/licenses/lucene-core-10.0.0.jar.sha1 b/libs/core/licenses/lucene-core-10.0.0.jar.sha1 new file mode 100644 index 0000000000000..96cc85a5c56a2 --- /dev/null +++ b/libs/core/licenses/lucene-core-10.0.0.jar.sha1 @@ -0,0 +1 @@ +5a9b3f728041df5b054aaaed3c3fd7ff0fed8ee7 \ No newline at end of file diff --git a/libs/core/licenses/lucene-core-9.12.0.jar.sha1 b/libs/core/licenses/lucene-core-9.12.0.jar.sha1 deleted file mode 100644 index e55f896dedb63..0000000000000 --- a/libs/core/licenses/lucene-core-9.12.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -fdb055d569bb20bfce9618fe2b01c29bab7f290c \ No newline at end of file diff --git a/libs/core/src/main/java/org/opensearch/LegacyESVersion.java b/libs/core/src/main/java/org/opensearch/LegacyESVersion.java index 5d8e067a8fd8b..6de19c89053ad 100644 --- a/libs/core/src/main/java/org/opensearch/LegacyESVersion.java +++ b/libs/core/src/main/java/org/opensearch/LegacyESVersion.java @@ -50,7 +50,7 @@ public class LegacyESVersion extends Version { public static final LegacyESVersion V_6_0_0 = new LegacyESVersion(6000099, org.apache.lucene.util.Version.fromBits(7, 0, 0)); public static final LegacyESVersion V_6_5_0 = new LegacyESVersion(6050099, org.apache.lucene.util.Version.fromBits(7, 0, 0)); - public static final LegacyESVersion V_7_2_0 = new LegacyESVersion(7020099, org.apache.lucene.util.Version.LUCENE_8_0_0); + public static final LegacyESVersion V_7_2_0 = new LegacyESVersion(7020099, org.apache.lucene.util.Version.fromBits(8, 0, 0)); // todo move back to Version.java if retiring legacy version support protected static final Map idToVersion; diff --git a/libs/core/src/main/java/org/opensearch/Version.java b/libs/core/src/main/java/org/opensearch/Version.java index 4d685e3bc654a..627d06e0fbd16 100644 --- a/libs/core/src/main/java/org/opensearch/Version.java +++ b/libs/core/src/main/java/org/opensearch/Version.java @@ -112,8 +112,8 @@ public class Version implements Comparable, ToXContentFragment { public static final Version V_2_17_1 = new Version(2170199, org.apache.lucene.util.Version.LUCENE_9_11_1); public static final Version V_2_17_2 = new Version(2170299, org.apache.lucene.util.Version.LUCENE_9_11_1); public static final Version V_2_18_0 = new Version(2180099, org.apache.lucene.util.Version.LUCENE_9_12_0); - public static final Version V_3_0_0 = new Version(3000099, org.apache.lucene.util.Version.LUCENE_9_12_0); public static final Version V_2_19_0 = new Version(2190099, org.apache.lucene.util.Version.LUCENE_9_12_0); + public static final Version V_3_0_0 = new Version(3000099, org.apache.lucene.util.Version.LUCENE_10_0_0); public static final Version CURRENT = V_3_0_0; public static Version fromId(int id) { diff --git a/modules/lang-expression/licenses/lucene-expressions-10.0.0.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-10.0.0.jar.sha1 new file mode 100644 index 0000000000000..ee70628a05318 --- /dev/null +++ b/modules/lang-expression/licenses/lucene-expressions-10.0.0.jar.sha1 @@ -0,0 +1 @@ +89b26348ec305598fc224cc9583939564b67b2cf \ No newline at end of file diff --git a/modules/lang-expression/licenses/lucene-expressions-9.12.0.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-9.12.0.jar.sha1 deleted file mode 100644 index 476049a66cc08..0000000000000 --- a/modules/lang-expression/licenses/lucene-expressions-9.12.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5ba843374a0aab3dfe0b11cb28b251844d85bf5b \ No newline at end of file diff --git a/plugins/analysis-icu/licenses/lucene-analysis-icu-10.0.0.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analysis-icu-10.0.0.jar.sha1 new file mode 100644 index 0000000000000..058ece7137757 --- /dev/null +++ b/plugins/analysis-icu/licenses/lucene-analysis-icu-10.0.0.jar.sha1 @@ -0,0 +1 @@ +082c38c1335c069a73622c37ca3d39e64c1b2d33 \ No newline at end of file diff --git a/plugins/analysis-icu/licenses/lucene-analysis-icu-9.12.0.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analysis-icu-9.12.0.jar.sha1 deleted file mode 100644 index 31398b27708a3..0000000000000 --- a/plugins/analysis-icu/licenses/lucene-analysis-icu-9.12.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a9232b6a4882979118d3281b98dfdb6e0e1cb5ca \ No newline at end of file diff --git a/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-10.0.0.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-10.0.0.jar.sha1 new file mode 100644 index 0000000000000..7948076b1413a --- /dev/null +++ b/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-10.0.0.jar.sha1 @@ -0,0 +1 @@ +26071742008630779523d08c0b46b2f371ef23a0 \ No newline at end of file diff --git a/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.12.0.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.12.0.jar.sha1 deleted file mode 100644 index fa4c9d2d09d6e..0000000000000 --- a/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.12.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a3a6950ffc22e76a082e1b3cefb022b9f7870d29 \ No newline at end of file diff --git a/plugins/analysis-nori/licenses/lucene-analysis-nori-10.0.0.jar.sha1 b/plugins/analysis-nori/licenses/lucene-analysis-nori-10.0.0.jar.sha1 new file mode 100644 index 0000000000000..ed21c354a6ea1 --- /dev/null +++ b/plugins/analysis-nori/licenses/lucene-analysis-nori-10.0.0.jar.sha1 @@ -0,0 +1 @@ +4a6ff02a1bd34a3c0165da05f714bb8188074bdc \ No newline at end of file diff --git a/plugins/analysis-nori/licenses/lucene-analysis-nori-9.12.0.jar.sha1 b/plugins/analysis-nori/licenses/lucene-analysis-nori-9.12.0.jar.sha1 deleted file mode 100644 index 576b924286d2d..0000000000000 --- a/plugins/analysis-nori/licenses/lucene-analysis-nori-9.12.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e71f85b72ed3939039ba8897b28b065dd11918b9 \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-10.0.0.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-10.0.0.jar.sha1 new file mode 100644 index 0000000000000..3c82cd3de2aca --- /dev/null +++ b/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-10.0.0.jar.sha1 @@ -0,0 +1 @@ +4c488697df5038a78e5e65bb9b6da120af62d824 \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.12.0.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.12.0.jar.sha1 deleted file mode 100644 index c8c146bbd0d25..0000000000000 --- a/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.12.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -6baa3ae7ab20d6e644cf0bedb271c50a44c0e259 \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-10.0.0.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-10.0.0.jar.sha1 new file mode 100644 index 0000000000000..a1682a17383d5 --- /dev/null +++ b/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-10.0.0.jar.sha1 @@ -0,0 +1 @@ +1137b9846ec000b49c70c3fe5f8cd79b7129be22 \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.12.0.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.12.0.jar.sha1 deleted file mode 100644 index 54ea0b19f2a7b..0000000000000 --- a/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.12.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f183e1e8b1eaaa4dec444774a285bb8b66518522 \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analysis-stempel-10.0.0.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analysis-stempel-10.0.0.jar.sha1 new file mode 100644 index 0000000000000..f9abb518f0000 --- /dev/null +++ b/plugins/analysis-stempel/licenses/lucene-analysis-stempel-10.0.0.jar.sha1 @@ -0,0 +1 @@ +3fd86db5e9748063369db4bed84f1bd2ca62d387 \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.12.0.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.12.0.jar.sha1 deleted file mode 100644 index 5442a40f5bba2..0000000000000 --- a/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.12.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b81a609934e65d12ab9d2d84bc2ea6f56a360e57 \ No newline at end of file diff --git a/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-10.0.0.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-10.0.0.jar.sha1 new file mode 100644 index 0000000000000..f8a7e894dc05f --- /dev/null +++ b/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-10.0.0.jar.sha1 @@ -0,0 +1 @@ +4e6b940b3b934d6de174fedaaeaefd647698648d \ No newline at end of file diff --git a/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.12.0.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.12.0.jar.sha1 deleted file mode 100644 index 60fd4015cfde0..0000000000000 --- a/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.12.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -bec069f286b45f20b743c81e84202369cd0467e7 \ No newline at end of file diff --git a/server/licenses/lucene-analysis-common-10.0.0.jar.sha1 b/server/licenses/lucene-analysis-common-10.0.0.jar.sha1 new file mode 100644 index 0000000000000..fd1abb6a867bc --- /dev/null +++ b/server/licenses/lucene-analysis-common-10.0.0.jar.sha1 @@ -0,0 +1 @@ +13eb016bab14973158554a2e6cdf2abbc5c3eda1 \ No newline at end of file diff --git a/server/licenses/lucene-analysis-common-9.12.0.jar.sha1 b/server/licenses/lucene-analysis-common-9.12.0.jar.sha1 deleted file mode 100644 index fd952034f3742..0000000000000 --- a/server/licenses/lucene-analysis-common-9.12.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -4c2503cfaba37249e20ea877555cb52ee89d1ae1 \ No newline at end of file diff --git a/server/licenses/lucene-backward-codecs-10.0.0.jar.sha1 b/server/licenses/lucene-backward-codecs-10.0.0.jar.sha1 new file mode 100644 index 0000000000000..640543c4c5ec0 --- /dev/null +++ b/server/licenses/lucene-backward-codecs-10.0.0.jar.sha1 @@ -0,0 +1 @@ +8e21f708eb1bbb71ce79cbfea093b6ca913f4abf \ No newline at end of file diff --git a/server/licenses/lucene-backward-codecs-9.12.0.jar.sha1 b/server/licenses/lucene-backward-codecs-9.12.0.jar.sha1 deleted file mode 100644 index 2993134edd610..0000000000000 --- a/server/licenses/lucene-backward-codecs-9.12.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -68fe98c94e9644a584ea1bf525e68d9406fc61ec \ No newline at end of file diff --git a/server/licenses/lucene-core-10.0.0.jar.sha1 b/server/licenses/lucene-core-10.0.0.jar.sha1 new file mode 100644 index 0000000000000..96cc85a5c56a2 --- /dev/null +++ b/server/licenses/lucene-core-10.0.0.jar.sha1 @@ -0,0 +1 @@ +5a9b3f728041df5b054aaaed3c3fd7ff0fed8ee7 \ No newline at end of file diff --git a/server/licenses/lucene-core-9.12.0.jar.sha1 b/server/licenses/lucene-core-9.12.0.jar.sha1 deleted file mode 100644 index e55f896dedb63..0000000000000 --- a/server/licenses/lucene-core-9.12.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -fdb055d569bb20bfce9618fe2b01c29bab7f290c \ No newline at end of file diff --git a/server/licenses/lucene-grouping-10.0.0.jar.sha1 b/server/licenses/lucene-grouping-10.0.0.jar.sha1 new file mode 100644 index 0000000000000..178d7c5896031 --- /dev/null +++ b/server/licenses/lucene-grouping-10.0.0.jar.sha1 @@ -0,0 +1 @@ +17145d786d31e7ecd68d149ccc3e7ab83270f282 \ No newline at end of file diff --git a/server/licenses/lucene-grouping-9.12.0.jar.sha1 b/server/licenses/lucene-grouping-9.12.0.jar.sha1 deleted file mode 100644 index 48388974bb38f..0000000000000 --- a/server/licenses/lucene-grouping-9.12.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ccf99f8db57aa97b2c1f95c5cc2a11156a043921 \ No newline at end of file diff --git a/server/licenses/lucene-highlighter-10.0.0.jar.sha1 b/server/licenses/lucene-highlighter-10.0.0.jar.sha1 new file mode 100644 index 0000000000000..808bd4177fa7c --- /dev/null +++ b/server/licenses/lucene-highlighter-10.0.0.jar.sha1 @@ -0,0 +1 @@ +b8324f1b859620912c186b27d9666215ce3d258b \ No newline at end of file diff --git a/server/licenses/lucene-highlighter-9.12.0.jar.sha1 b/server/licenses/lucene-highlighter-9.12.0.jar.sha1 deleted file mode 100644 index 3d457579da892..0000000000000 --- a/server/licenses/lucene-highlighter-9.12.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e93429f66fbcd3b58d81f01223d6ce5688047296 \ No newline at end of file diff --git a/server/licenses/lucene-join-10.0.0.jar.sha1 b/server/licenses/lucene-join-10.0.0.jar.sha1 new file mode 100644 index 0000000000000..cb3fcc7dacb41 --- /dev/null +++ b/server/licenses/lucene-join-10.0.0.jar.sha1 @@ -0,0 +1 @@ +3a4c5bf84c855b011e740f30cb8a23f2ee85e1c1 \ No newline at end of file diff --git a/server/licenses/lucene-join-9.12.0.jar.sha1 b/server/licenses/lucene-join-9.12.0.jar.sha1 deleted file mode 100644 index c5f6d16598a60..0000000000000 --- a/server/licenses/lucene-join-9.12.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -14c802d6955eaf11107375a2ada8fe8ec53b3e01 \ No newline at end of file diff --git a/server/licenses/lucene-memory-10.0.0.jar.sha1 b/server/licenses/lucene-memory-10.0.0.jar.sha1 new file mode 100644 index 0000000000000..0b2cdc53ccd97 --- /dev/null +++ b/server/licenses/lucene-memory-10.0.0.jar.sha1 @@ -0,0 +1 @@ +bc0f37a0a06b445555d07e5fe199d73436d51352 \ No newline at end of file diff --git a/server/licenses/lucene-memory-9.12.0.jar.sha1 b/server/licenses/lucene-memory-9.12.0.jar.sha1 deleted file mode 100644 index e7ac44089c006..0000000000000 --- a/server/licenses/lucene-memory-9.12.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ffe090339540876b40df792aee51a42af6b3f37f \ No newline at end of file diff --git a/server/licenses/lucene-misc-10.0.0.jar.sha1 b/server/licenses/lucene-misc-10.0.0.jar.sha1 new file mode 100644 index 0000000000000..10d6a7e729e99 --- /dev/null +++ b/server/licenses/lucene-misc-10.0.0.jar.sha1 @@ -0,0 +1 @@ +5f619b32c62bb9405e7af595cf7311113ed62e33 \ No newline at end of file diff --git a/server/licenses/lucene-misc-9.12.0.jar.sha1 b/server/licenses/lucene-misc-9.12.0.jar.sha1 deleted file mode 100644 index afb546be4e032..0000000000000 --- a/server/licenses/lucene-misc-9.12.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ad17704ee90eb926b6d3105f7027485cdadbecd9 \ No newline at end of file diff --git a/server/licenses/lucene-queries-10.0.0.jar.sha1 b/server/licenses/lucene-queries-10.0.0.jar.sha1 new file mode 100644 index 0000000000000..dbc47789c752b --- /dev/null +++ b/server/licenses/lucene-queries-10.0.0.jar.sha1 @@ -0,0 +1 @@ +3d2f98787e27e8e2a65d994c86563edf16dd92f1 \ No newline at end of file diff --git a/server/licenses/lucene-queries-9.12.0.jar.sha1 b/server/licenses/lucene-queries-9.12.0.jar.sha1 deleted file mode 100644 index e24756e38dad2..0000000000000 --- a/server/licenses/lucene-queries-9.12.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -3ac2a62b0b55c5725bb65f0c5454f9f8a401cf43 \ No newline at end of file diff --git a/server/licenses/lucene-queryparser-10.0.0.jar.sha1 b/server/licenses/lucene-queryparser-10.0.0.jar.sha1 new file mode 100644 index 0000000000000..3d91eddfd0cb7 --- /dev/null +++ b/server/licenses/lucene-queryparser-10.0.0.jar.sha1 @@ -0,0 +1 @@ +e11886c913058ef20378715dee715d942d04babc \ No newline at end of file diff --git a/server/licenses/lucene-queryparser-9.12.0.jar.sha1 b/server/licenses/lucene-queryparser-9.12.0.jar.sha1 deleted file mode 100644 index e93e00a063dd0..0000000000000 --- a/server/licenses/lucene-queryparser-9.12.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -55959399373876f4c184944315458dc6b88fbd81 \ No newline at end of file diff --git a/server/licenses/lucene-sandbox-10.0.0.jar.sha1 b/server/licenses/lucene-sandbox-10.0.0.jar.sha1 new file mode 100644 index 0000000000000..017398715a38f --- /dev/null +++ b/server/licenses/lucene-sandbox-10.0.0.jar.sha1 @@ -0,0 +1 @@ +1795ce5b066bda61483c375f6b8e358aaa4f6348 \ No newline at end of file diff --git a/server/licenses/lucene-sandbox-9.12.0.jar.sha1 b/server/licenses/lucene-sandbox-9.12.0.jar.sha1 deleted file mode 100644 index a3fd8446e0dbc..0000000000000 --- a/server/licenses/lucene-sandbox-9.12.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f65882536d681c11a1cbc920e5679201101e3603 \ No newline at end of file diff --git a/server/licenses/lucene-spatial-extras-10.0.0.jar.sha1 b/server/licenses/lucene-spatial-extras-10.0.0.jar.sha1 new file mode 100644 index 0000000000000..320c07f51fbaa --- /dev/null +++ b/server/licenses/lucene-spatial-extras-10.0.0.jar.sha1 @@ -0,0 +1 @@ +a701eb363cf0a75ebacd1844398314250abcf592 \ No newline at end of file diff --git a/server/licenses/lucene-spatial-extras-9.12.0.jar.sha1 b/server/licenses/lucene-spatial-extras-9.12.0.jar.sha1 deleted file mode 100644 index b0f11fb667faf..0000000000000 --- a/server/licenses/lucene-spatial-extras-9.12.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9d00cc7cc2279822ef6740f0677cafacfb439fa8 \ No newline at end of file diff --git a/server/licenses/lucene-spatial3d-10.0.0.jar.sha1 b/server/licenses/lucene-spatial3d-10.0.0.jar.sha1 new file mode 100644 index 0000000000000..059721a317028 --- /dev/null +++ b/server/licenses/lucene-spatial3d-10.0.0.jar.sha1 @@ -0,0 +1 @@ +d9e0a8a6084d7657a633c1aa94d750414f5288c4 \ No newline at end of file diff --git a/server/licenses/lucene-spatial3d-9.12.0.jar.sha1 b/server/licenses/lucene-spatial3d-9.12.0.jar.sha1 deleted file mode 100644 index 858eee25ac191..0000000000000 --- a/server/licenses/lucene-spatial3d-9.12.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e3092632ca1d4427d3ebb2c866ac89d90f5b61ec \ No newline at end of file diff --git a/server/licenses/lucene-suggest-10.0.0.jar.sha1 b/server/licenses/lucene-suggest-10.0.0.jar.sha1 new file mode 100644 index 0000000000000..72db888179d00 --- /dev/null +++ b/server/licenses/lucene-suggest-10.0.0.jar.sha1 @@ -0,0 +1 @@ +d79099abc148a1906e129abbabd5e1b18a20c117 \ No newline at end of file diff --git a/server/licenses/lucene-suggest-9.12.0.jar.sha1 b/server/licenses/lucene-suggest-9.12.0.jar.sha1 deleted file mode 100644 index 973a7726d845d..0000000000000 --- a/server/licenses/lucene-suggest-9.12.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e1c6636499317ebe498f3490a1ec8b86b8a363dd \ No newline at end of file diff --git a/server/src/main/java/org/apache/lucene/codecs/lucene90/Lucene90DocValuesConsumerWrapper.java b/server/src/main/java/org/apache/lucene/codecs/lucene90/Lucene90DocValuesConsumerWrapper.java index 67ee45f4c9306..580f7a1cc576b 100644 --- a/server/src/main/java/org/apache/lucene/codecs/lucene90/Lucene90DocValuesConsumerWrapper.java +++ b/server/src/main/java/org/apache/lucene/codecs/lucene90/Lucene90DocValuesConsumerWrapper.java @@ -27,12 +27,20 @@ public class Lucene90DocValuesConsumerWrapper implements Closeable { public Lucene90DocValuesConsumerWrapper( SegmentWriteState state, + int skipIndexIntervalSize, String dataCodec, String dataExtension, String metaCodec, String metaExtension ) throws IOException { - lucene90DocValuesConsumer = new Lucene90DocValuesConsumer(state, dataCodec, dataExtension, metaCodec, metaExtension); + lucene90DocValuesConsumer = new Lucene90DocValuesConsumer( + state, + skipIndexIntervalSize, + dataCodec, + dataExtension, + metaCodec, + metaExtension + ); } public Lucene90DocValuesConsumer getLucene90DocValuesConsumer() { diff --git a/server/src/main/java/org/apache/lucene/queries/BinaryDocValuesRangeQuery.java b/server/src/main/java/org/apache/lucene/queries/BinaryDocValuesRangeQuery.java index 963044a3f58d4..ca4e375bd4a02 100644 --- a/server/src/main/java/org/apache/lucene/queries/BinaryDocValuesRangeQuery.java +++ b/server/src/main/java/org/apache/lucene/queries/BinaryDocValuesRangeQuery.java @@ -42,6 +42,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; import org.apache.lucene.util.BytesRef; @@ -84,7 +85,7 @@ public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float bo return new ConstantScoreWeight(this, boost) { @Override - public Scorer scorer(LeafReaderContext context) throws IOException { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { final BinaryDocValues values = context.reader().getBinaryDocValues(fieldName); if (values == null) { return null; @@ -129,7 +130,9 @@ public float matchCost() { return 4; // at most 4 comparisons } }; - return new ConstantScoreScorer(this, score(), scoreMode, iterator); + + final Scorer scorer = new ConstantScoreScorer(score(), scoreMode, iterator); + return new DefaultScorerSupplier(scorer); } @Override diff --git a/server/src/main/java/org/apache/lucene/queries/BlendedTermQuery.java b/server/src/main/java/org/apache/lucene/queries/BlendedTermQuery.java index 34e1e210d7137..2edc684677ea5 100644 --- a/server/src/main/java/org/apache/lucene/queries/BlendedTermQuery.java +++ b/server/src/main/java/org/apache/lucene/queries/BlendedTermQuery.java @@ -47,6 +47,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.TermQuery; import org.apache.lucene.util.ArrayUtil; +import org.apache.lucene.util.IOSupplier; import org.apache.lucene.util.InPlaceMergeSorter; import java.io.IOException; @@ -208,7 +209,12 @@ private TermStates adjustTTF(IndexReaderContext readerContext, TermStates termCo int df = termContext.docFreq(); long ttf = sumTTF; for (int i = 0; i < len; i++) { - TermState termState = termContext.get(leaves.get(i)); + final IOSupplier termStateSupplier = termContext.get(leaves.get(i)); + if (termStateSupplier == null) { + continue; + } + + final TermState termState = termStateSupplier.get(); if (termState == null) { continue; } @@ -232,10 +238,16 @@ private static TermStates adjustDF(IndexReaderContext readerContext, TermStates } TermStates newCtx = new TermStates(readerContext); for (int i = 0; i < len; ++i) { - TermState termState = ctx.get(leaves.get(i)); + final IOSupplier termStateSupplier = ctx.get(leaves.get(i)); + if (termStateSupplier == null) { + continue; + } + + final TermState termState = termStateSupplier.get(); if (termState == null) { continue; } + newCtx.register(termState, i, newDocFreq, newTTF); newDocFreq = 0; newTTF = 0; @@ -385,7 +397,7 @@ protected Query topLevelQuery(Term[] terms, TermStates[] ctx, int[] docFreqs, in if (low.clauses().isEmpty()) { BooleanQuery.Builder queryBuilder = new BooleanQuery.Builder(); for (BooleanClause booleanClause : high) { - queryBuilder.add(booleanClause.getQuery(), Occur.MUST); + queryBuilder.add(booleanClause.query(), Occur.MUST); } return queryBuilder.build(); } else if (high.clauses().isEmpty()) { diff --git a/server/src/main/java/org/apache/lucene/search/grouping/CollapseTopFieldDocs.java b/server/src/main/java/org/apache/lucene/search/grouping/CollapseTopFieldDocs.java index 961587113173d..4ab1eee4e089f 100644 --- a/server/src/main/java/org/apache/lucene/search/grouping/CollapseTopFieldDocs.java +++ b/server/src/main/java/org/apache/lucene/search/grouping/CollapseTopFieldDocs.java @@ -192,10 +192,10 @@ public static CollapseTopFieldDocs merge(Sort sort, int start, int size, Collaps final CollapseTopFieldDocs shard = shardHits[shardIDX]; // totalHits can be non-zero even if no hits were // collected, when searchAfter was used: - totalHitCount += shard.totalHits.value; + totalHitCount += shard.totalHits.value(); // If any hit count is a lower bound then the merged // total hit count is a lower bound as well - if (shard.totalHits.relation == TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO) { + if (shard.totalHits.relation() == TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO) { totalHitsRelation = TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO; } if (CollectionUtils.isEmpty(shard.scoreDocs) == false) { diff --git a/server/src/main/java/org/apache/lucene/search/grouping/CollapsingDocValuesSource.java b/server/src/main/java/org/apache/lucene/search/grouping/CollapsingDocValuesSource.java index d747bb47a30ad..661ab1374bd28 100644 --- a/server/src/main/java/org/apache/lucene/search/grouping/CollapsingDocValuesSource.java +++ b/server/src/main/java/org/apache/lucene/search/grouping/CollapsingDocValuesSource.java @@ -229,7 +229,7 @@ public void setNextReader(LeafReaderContext readerContext) throws IOException { public boolean advanceExact(int target) throws IOException { if (sorted.advanceExact(target)) { ord = (int) sorted.nextOrd(); - if (sorted.nextOrd() != SortedSetDocValues.NO_MORE_ORDS) { + if (sorted.nextOrd() != SortedSetDocValues.NO_MORE_DOCS) { throw new IllegalStateException( "failed to collapse " + target + ", the collapse field must be single valued" ); diff --git a/server/src/main/java/org/apache/lucene/util/automaton/MinimizationOperations.java b/server/src/main/java/org/apache/lucene/util/automaton/MinimizationOperations.java new file mode 100644 index 0000000000000..d387b36481e4a --- /dev/null +++ b/server/src/main/java/org/apache/lucene/util/automaton/MinimizationOperations.java @@ -0,0 +1,331 @@ +/* + * dk.brics.automaton + * + * Copyright (c) 2001-2009 Anders Moeller + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * 3. The name of the author may not be used to endorse or promote products + * derived from this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR + * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES + * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. + * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, + * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT + * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF + * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/* + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +package org.apache.lucene.util.automaton; + +import org.apache.lucene.internal.hppc.IntArrayList; +import org.apache.lucene.internal.hppc.IntCursor; +import org.apache.lucene.internal.hppc.IntHashSet; + +import java.util.BitSet; +import java.util.LinkedList; + +/** + * Operations for minimizing automata. + * Copied from https://github.com/apache/lucene/blob/main/lucene/core/src/test/org/apache/lucene/util/automaton/MinimizationOperations.java + * + * @lucene.experimental + */ +public final class MinimizationOperations { + + private MinimizationOperations() {} + + /** + * Minimizes (and determinizes if not already deterministic) the given automaton using Hopcroft's + * algorithm. + * + * @param determinizeWorkLimit maximum effort to spend determinizing the automaton. Set higher to + * allow more complex queries and lower to prevent memory exhaustion. Use {@link + * Operations#DEFAULT_DETERMINIZE_WORK_LIMIT} as a decent default if you don't otherwise know + * what to specify. + */ + public static Automaton minimize(Automaton a, int determinizeWorkLimit) { + + if (a.getNumStates() == 0 || (a.isAccept(0) == false && a.getNumTransitions(0) == 0)) { + // Fastmatch for common case + return new Automaton(); + } + a = Operations.determinize(a, determinizeWorkLimit); + // a.writeDot("adet"); + if (a.getNumTransitions(0) == 1) { + Transition t = new Transition(); + a.getTransition(0, 0, t); + if (t.dest == 0 && t.min == Character.MIN_CODE_POINT && t.max == Character.MAX_CODE_POINT) { + // Accepts all strings + return a; + } + } + a = Operations.totalize(a); + // a.writeDot("atot"); + + // initialize data structures + final int[] sigma = a.getStartPoints(); + final int sigmaLen = sigma.length, statesLen = a.getNumStates(); + + final IntArrayList[][] reverse = new IntArrayList[statesLen][sigmaLen]; + final IntHashSet[] partition = new IntHashSet[statesLen]; + final IntArrayList[] splitblock = new IntArrayList[statesLen]; + final int[] block = new int[statesLen]; + final StateList[][] active = new StateList[statesLen][sigmaLen]; + final StateListNode[][] active2 = new StateListNode[statesLen][sigmaLen]; + final LinkedList pending = new LinkedList<>(); + final BitSet pending2 = new BitSet(sigmaLen * statesLen); + final BitSet split = new BitSet(statesLen), refine = new BitSet(statesLen), refine2 = new BitSet(statesLen); + for (int q = 0; q < statesLen; q++) { + splitblock[q] = new IntArrayList(); + partition[q] = new IntHashSet(); + for (int x = 0; x < sigmaLen; x++) { + active[q][x] = StateList.EMPTY; + } + } + // find initial partition and reverse edges + Transition transition = new Transition(); + for (int q = 0; q < statesLen; q++) { + final int j = a.isAccept(q) ? 0 : 1; + partition[j].add(q); + block[q] = j; + transition.source = q; + transition.transitionUpto = -1; + for (int x = 0; x < sigmaLen; x++) { + final IntArrayList[] r = reverse[a.next(transition, sigma[x])]; + if (r[x] == null) { + r[x] = new IntArrayList(); + } + r[x].add(q); + } + } + // initialize active sets + for (int j = 0; j <= 1; j++) { + for (int x = 0; x < sigmaLen; x++) { + for (IntCursor qCursor : partition[j]) { + int q = qCursor.value; + if (reverse[q][x] != null) { + StateList stateList = active[j][x]; + if (stateList == StateList.EMPTY) { + stateList = new StateList(); + active[j][x] = stateList; + } + active2[q][x] = stateList.add(q); + } + } + } + } + + // initialize pending + for (int x = 0; x < sigmaLen; x++) { + final int j = (active[0][x].size <= active[1][x].size) ? 0 : 1; + pending.add(new IntPair(j, x)); + pending2.set(x * statesLen + j); + } + + // process pending until fixed point + int k = 2; + // System.out.println("start min"); + while (!pending.isEmpty()) { + // System.out.println(" cycle pending"); + final IntPair ip = pending.removeFirst(); + final int p = ip.n1; + final int x = ip.n2; + // System.out.println(" pop n1=" + ip.n1 + " n2=" + ip.n2); + pending2.clear(x * statesLen + p); + // find states that need to be split off their blocks + for (StateListNode m = active[p][x].first; m != null; m = m.next) { + final IntArrayList r = reverse[m.q][x]; + if (r != null) { + for (IntCursor iCursor : r) { + final int i = iCursor.value; + if (!split.get(i)) { + split.set(i); + final int j = block[i]; + splitblock[j].add(i); + if (!refine2.get(j)) { + refine2.set(j); + refine.set(j); + } + } + } + } + } + + // refine blocks + for (int j = refine.nextSetBit(0); j >= 0; j = refine.nextSetBit(j + 1)) { + final IntArrayList sb = splitblock[j]; + if (sb.size() < partition[j].size()) { + final IntHashSet b1 = partition[j]; + final IntHashSet b2 = partition[k]; + for (IntCursor iCursor : sb) { + final int s = iCursor.value; + b1.remove(s); + b2.add(s); + block[s] = k; + for (int c = 0; c < sigmaLen; c++) { + final StateListNode sn = active2[s][c]; + if (sn != null && sn.sl == active[j][c]) { + sn.remove(); + StateList stateList = active[k][c]; + if (stateList == StateList.EMPTY) { + stateList = new StateList(); + active[k][c] = stateList; + } + active2[s][c] = stateList.add(s); + } + } + } + // update pending + for (int c = 0; c < sigmaLen; c++) { + final int aj = active[j][c].size, ak = active[k][c].size, ofs = c * statesLen; + if (!pending2.get(ofs + j) && 0 < aj && aj <= ak) { + pending2.set(ofs + j); + pending.add(new IntPair(j, c)); + } else { + pending2.set(ofs + k); + pending.add(new IntPair(k, c)); + } + } + k++; + } + refine2.clear(j); + for (IntCursor iCursor : sb) { + final int s = iCursor.value; + split.clear(s); + } + sb.clear(); + } + refine.clear(); + } + + Automaton result = new Automaton(); + + Transition t = new Transition(); + + // System.out.println(" k=" + k); + + // make a new state for each equivalence class, set initial state + int[] stateMap = new int[statesLen]; + int[] stateRep = new int[k]; + + result.createState(); + + // System.out.println("min: k=" + k); + for (int n = 0; n < k; n++) { + // System.out.println(" n=" + n); + + boolean isInitial = partition[n].contains(0); + + int newState; + if (isInitial) { + // System.out.println(" isInitial!"); + newState = 0; + } else { + newState = result.createState(); + } + + // System.out.println(" newState=" + newState); + + for (IntCursor qCursor : partition[n]) { + int q = qCursor.value; + stateMap[q] = newState; + // System.out.println(" q=" + q + " isAccept?=" + a.isAccept(q)); + result.setAccept(newState, a.isAccept(q)); + stateRep[newState] = q; // select representative + } + } + + // build transitions and set acceptance + for (int n = 0; n < k; n++) { + int numTransitions = a.initTransition(stateRep[n], t); + for (int i = 0; i < numTransitions; i++) { + a.getNextTransition(t); + // System.out.println(" add trans"); + result.addTransition(n, stateMap[t.dest], t.min, t.max); + } + } + result.finishState(); + // System.out.println(result.getNumStates() + " states"); + + return Operations.removeDeadStates(result); + } + + static final class IntPair { + + final int n1, n2; + + IntPair(int n1, int n2) { + this.n1 = n1; + this.n2 = n2; + } + } + + static final class StateList { + + // Empty list that should never be mutated, used as a memory saving optimization instead of null + // so we don't need to branch the read path in #minimize + static final StateList EMPTY = new StateList(); + + int size; + + StateListNode first, last; + + StateListNode add(int q) { + assert this != EMPTY; + return new StateListNode(q, this); + } + } + + static final class StateListNode { + + final int q; + + StateListNode next, prev; + + final StateList sl; + + StateListNode(int q, StateList sl) { + this.q = q; + this.sl = sl; + if (sl.size++ == 0) sl.first = sl.last = this; + else { + sl.last.next = this; + prev = sl.last; + sl.last = this; + } + } + + void remove() { + sl.size--; + if (sl.first == this) sl.first = next; + else prev.next = next; + if (sl.last == this) sl.last = prev; + else next.prev = prev; + } + } +} diff --git a/server/src/main/java/org/apache/lucene/util/packed/XPackedInts.java b/server/src/main/java/org/apache/lucene/util/packed/XPackedInts.java index 4260d34ead7c9..4d45485ce2f7c 100644 --- a/server/src/main/java/org/apache/lucene/util/packed/XPackedInts.java +++ b/server/src/main/java/org/apache/lucene/util/packed/XPackedInts.java @@ -534,7 +534,7 @@ public static Reader getDirectReader(IndexInput in) throws IOException { */ public static Mutable getMutable(int valueCount, int bitsPerValue, float acceptableOverheadRatio) { final FormatAndBits formatAndBits = fastestFormatAndBits(valueCount, bitsPerValue, acceptableOverheadRatio); - return getMutable(valueCount, formatAndBits.bitsPerValue, formatAndBits.format); + return getMutable(valueCount, formatAndBits.bitsPerValue(), formatAndBits.format()); } /** @@ -629,7 +629,13 @@ public static Writer getWriter(DataOutput out, int valueCount, int bitsPerValue, assert valueCount >= 0; final FormatAndBits formatAndBits = fastestFormatAndBits(valueCount, bitsPerValue, acceptableOverheadRatio); - final XWriter writer = getWriterNoHeader(out, formatAndBits.format, valueCount, formatAndBits.bitsPerValue, DEFAULT_BUFFER_SIZE); + final XWriter writer = getWriterNoHeader( + out, + formatAndBits.format(), + valueCount, + formatAndBits.bitsPerValue(), + DEFAULT_BUFFER_SIZE + ); writer.writeHeader(); return writer; } diff --git a/server/src/main/java/org/opensearch/action/search/BottomSortValuesCollector.java b/server/src/main/java/org/opensearch/action/search/BottomSortValuesCollector.java index bce8d9fb2b1ca..73d821ff472a2 100644 --- a/server/src/main/java/org/opensearch/action/search/BottomSortValuesCollector.java +++ b/server/src/main/java/org/opensearch/action/search/BottomSortValuesCollector.java @@ -77,7 +77,7 @@ SearchSortValuesAndFormats getBottomSortValues() { } synchronized void consumeTopDocs(TopFieldDocs topDocs, DocValueFormat[] sortValuesFormat) { - totalHits += topDocs.totalHits.value; + totalHits += topDocs.totalHits.value(); if (validateShardSortFields(topDocs.fields) == false) { return; } diff --git a/server/src/main/java/org/opensearch/action/search/SearchPhaseController.java b/server/src/main/java/org/opensearch/action/search/SearchPhaseController.java index 161a103cdf36a..452bacfc6fd1f 100644 --- a/server/src/main/java/org/opensearch/action/search/SearchPhaseController.java +++ b/server/src/main/java/org/opensearch/action/search/SearchPhaseController.java @@ -802,8 +802,8 @@ TotalHits getTotalHits() { void add(TopDocsAndMaxScore topDocs, boolean timedOut, Boolean terminatedEarly) { if (trackTotalHitsUpTo != SearchContext.TRACK_TOTAL_HITS_DISABLED) { - totalHits += topDocs.topDocs.totalHits.value; - if (topDocs.topDocs.totalHits.relation == Relation.GREATER_THAN_OR_EQUAL_TO) { + totalHits += topDocs.topDocs.totalHits.value(); + if (topDocs.topDocs.totalHits.relation() == Relation.GREATER_THAN_OR_EQUAL_TO) { totalHitsRelation = TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO; } } diff --git a/server/src/main/java/org/opensearch/bootstrap/BootstrapChecks.java b/server/src/main/java/org/opensearch/bootstrap/BootstrapChecks.java index 485dd43a5999c..0e0b4e9be261a 100644 --- a/server/src/main/java/org/opensearch/bootstrap/BootstrapChecks.java +++ b/server/src/main/java/org/opensearch/bootstrap/BootstrapChecks.java @@ -703,7 +703,7 @@ String jvmVendor() { } String javaVersion() { - return Constants.JAVA_VERSION; + return Runtime.version().toString(); } } diff --git a/server/src/main/java/org/opensearch/common/lucene/Lucene.java b/server/src/main/java/org/opensearch/common/lucene/Lucene.java index 361ee859e3e6a..a999cf321eba6 100644 --- a/server/src/main/java/org/opensearch/common/lucene/Lucene.java +++ b/server/src/main/java/org/opensearch/common/lucene/Lucene.java @@ -423,8 +423,8 @@ public static ScoreDoc readScoreDoc(StreamInput in) throws IOException { private static final Class GEO_DISTANCE_SORT_TYPE_CLASS = LatLonDocValuesField.newDistanceSort("some_geo_field", 0, 0).getClass(); public static void writeTotalHits(StreamOutput out, TotalHits totalHits) throws IOException { - out.writeVLong(totalHits.value); - out.writeEnum(totalHits.relation); + out.writeVLong(totalHits.value()); + out.writeEnum(totalHits.relation()); } public static void writeTopDocs(StreamOutput out, TopDocsAndMaxScore topDocs) throws IOException { diff --git a/server/src/main/java/org/opensearch/common/lucene/index/FilterableTermsEnum.java b/server/src/main/java/org/opensearch/common/lucene/index/FilterableTermsEnum.java index 224c5b600c930..4e1ce331571b6 100644 --- a/server/src/main/java/org/opensearch/common/lucene/index/FilterableTermsEnum.java +++ b/server/src/main/java/org/opensearch/common/lucene/index/FilterableTermsEnum.java @@ -50,6 +50,7 @@ import org.apache.lucene.util.BitSet; import org.apache.lucene.util.Bits; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.IOBooleanSupplier; import org.opensearch.common.Nullable; import java.io.IOException; @@ -256,4 +257,9 @@ public ImpactsEnum impacts(int flags) throws IOException { public BytesRef next() throws IOException { throw new UnsupportedOperationException(UNSUPPORTED_MESSAGE); } + + @Override + public IOBooleanSupplier prepareSeekExact(BytesRef text) throws IOException { + throw new UnsupportedOperationException(UNSUPPORTED_MESSAGE); + } } diff --git a/server/src/main/java/org/opensearch/common/lucene/search/AutomatonQueries.java b/server/src/main/java/org/opensearch/common/lucene/search/AutomatonQueries.java index ada5bc0598478..1c548e0c10bb6 100644 --- a/server/src/main/java/org/opensearch/common/lucene/search/AutomatonQueries.java +++ b/server/src/main/java/org/opensearch/common/lucene/search/AutomatonQueries.java @@ -102,7 +102,7 @@ public static AutomatonQuery createAutomatonQuery(Term term, Automaton automaton if (method == null) { method = MultiTermQuery.CONSTANT_SCORE_REWRITE; } - return new AutomatonQuery(term, automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT, false, method); + return new AutomatonQuery(term, automaton, false, method); } /** diff --git a/server/src/main/java/org/opensearch/common/lucene/search/Queries.java b/server/src/main/java/org/opensearch/common/lucene/search/Queries.java index 125eab9512be8..d042938280717 100644 --- a/server/src/main/java/org/opensearch/common/lucene/search/Queries.java +++ b/server/src/main/java/org/opensearch/common/lucene/search/Queries.java @@ -37,15 +37,15 @@ import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.DocValuesFieldExistsQuery; import org.apache.lucene.search.Explanation; +import org.apache.lucene.search.FieldExistsQuery; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; -import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Weight; import org.opensearch.OpenSearchException; import org.opensearch.common.Nullable; @@ -91,7 +91,7 @@ public static Query newLenientFieldQuery(String field, RuntimeException e) { * Creates a new non-nested docs query */ public static Query newNonNestedFilter() { - return new DocValuesFieldExistsQuery(SeqNoFieldMapper.PRIMARY_TERM_NAME); + return new FieldExistsQuery(SeqNoFieldMapper.PRIMARY_TERM_NAME); } public static BooleanQuery filtered(@Nullable Query query, @Nullable Query filter) { @@ -137,7 +137,7 @@ public static Query applyMinimumShouldMatch(BooleanQuery query, @Nullable String } int optionalClauses = 0; for (BooleanClause c : query.clauses()) { - if (c.getOccur() == BooleanClause.Occur.SHOULD) { + if (c.occur() == BooleanClause.Occur.SHOULD) { optionalClauses++; } } @@ -232,7 +232,7 @@ public Explanation explain(LeafReaderContext context, int doc) { } @Override - public Scorer scorer(LeafReaderContext context) { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { return null; } diff --git a/server/src/main/java/org/opensearch/common/lucene/search/SpanBooleanQueryRewriteWithMaxClause.java b/server/src/main/java/org/opensearch/common/lucene/search/SpanBooleanQueryRewriteWithMaxClause.java index 65cffa208a47f..399f71de05371 100644 --- a/server/src/main/java/org/opensearch/common/lucene/search/SpanBooleanQueryRewriteWithMaxClause.java +++ b/server/src/main/java/org/opensearch/common/lucene/search/SpanBooleanQueryRewriteWithMaxClause.java @@ -43,7 +43,7 @@ import org.apache.lucene.queries.spans.SpanOrQuery; import org.apache.lucene.queries.spans.SpanQuery; import org.apache.lucene.queries.spans.SpanTermQuery; -import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.AttributeSource; @@ -67,7 +67,7 @@ public class SpanBooleanQueryRewriteWithMaxClause extends SpanMultiTermQueryWrap private final boolean hardLimit; public SpanBooleanQueryRewriteWithMaxClause() { - this(BooleanQuery.getMaxClauseCount(), true); + this(IndexSearcher.getMaxClauseCount(), true); } public SpanBooleanQueryRewriteWithMaxClause(int maxExpansions, boolean hardLimit) { @@ -84,11 +84,11 @@ public boolean isHardLimit() { } @Override - public SpanQuery rewrite(IndexReader reader, MultiTermQuery query) throws IOException { + public SpanQuery rewrite(IndexSearcher searcher, MultiTermQuery query) throws IOException { final MultiTermQuery.RewriteMethod delegate = new MultiTermQuery.RewriteMethod() { @Override - public Query rewrite(IndexReader reader, MultiTermQuery query) throws IOException { - Collection queries = collectTerms(reader, query); + public Query rewrite(IndexSearcher searcher, MultiTermQuery query) throws IOException { + Collection queries = collectTerms(searcher.getIndexReader(), query); if (queries.size() == 0) { return new SpanMatchNoDocsQuery(query.getField(), "no expansion found for " + query.toString()); } else if (queries.size() == 1) { @@ -124,7 +124,7 @@ private Collection collectTerms(IndexReader reader, MultiTermQuery qu + query.toString() + " ] " + "exceeds maxClauseCount [ Boolean maxClauseCount is set to " - + BooleanQuery.getMaxClauseCount() + + IndexSearcher.getMaxClauseCount() + "]" ); } else { @@ -137,6 +137,6 @@ private Collection collectTerms(IndexReader reader, MultiTermQuery qu return queries; } }; - return (SpanQuery) delegate.rewrite(reader, query); + return (SpanQuery) delegate.rewrite(searcher, query); } } diff --git a/server/src/main/java/org/opensearch/common/lucene/search/XMoreLikeThis.java b/server/src/main/java/org/opensearch/common/lucene/search/XMoreLikeThis.java index 49148890abd55..539ebb58d8b5d 100644 --- a/server/src/main/java/org/opensearch/common/lucene/search/XMoreLikeThis.java +++ b/server/src/main/java/org/opensearch/common/lucene/search/XMoreLikeThis.java @@ -65,6 +65,7 @@ import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.similarities.ClassicSimilarity; @@ -256,7 +257,7 @@ public final class XMoreLikeThis { /** * Return a Query with no more than this many terms. * - * @see BooleanQuery#getMaxClauseCount + * @see IndexSearcher#getMaxClauseCount * @see #getMaxQueryTerms * @see #setMaxQueryTerms */ @@ -711,7 +712,7 @@ private void addToQuery(PriorityQueue q, BooleanQuery.Builder query) try { query.add(tq, BooleanClause.Occur.SHOULD); - } catch (BooleanQuery.TooManyClauses ignore) { + } catch (IndexSearcher.TooManyClauses ignore) { break; } } diff --git a/server/src/main/java/org/opensearch/common/lucene/search/function/FunctionScoreQuery.java b/server/src/main/java/org/opensearch/common/lucene/search/function/FunctionScoreQuery.java index cb93e80288a98..da80946dfa567 100644 --- a/server/src/main/java/org/opensearch/common/lucene/search/function/FunctionScoreQuery.java +++ b/server/src/main/java/org/opensearch/common/lucene/search/function/FunctionScoreQuery.java @@ -412,12 +412,12 @@ private FunctionFactorScorer functionScorer(LeafReaderContext context) throws IO } @Override - public Scorer scorer(LeafReaderContext context) throws IOException { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { Scorer scorer = functionScorer(context); if (scorer != null && minScore != null) { scorer = new MinScoreScorer(this, scorer, minScore); } - return scorer; + return new DefaultScorerSupplier(scorer); } @Override @@ -518,7 +518,7 @@ private FunctionFactorScorer( CombineFunction scoreCombiner, boolean needsScores ) throws IOException { - super(scorer, w); + super(scorer); this.scoreMode = scoreMode; this.functions = functions; this.leafFunctions = leafFunctions; diff --git a/server/src/main/java/org/opensearch/common/lucene/search/function/MinScoreScorer.java b/server/src/main/java/org/opensearch/common/lucene/search/function/MinScoreScorer.java index 30be06489dea5..e2b6b9a8f4feb 100644 --- a/server/src/main/java/org/opensearch/common/lucene/search/function/MinScoreScorer.java +++ b/server/src/main/java/org/opensearch/common/lucene/search/function/MinScoreScorer.java @@ -53,7 +53,7 @@ final class MinScoreScorer extends Scorer { private float curScore; MinScoreScorer(Weight weight, Scorer scorer, float minScore) { - super(weight); + super(); this.in = scorer; this.minScore = minScore; } diff --git a/server/src/main/java/org/opensearch/index/codec/CodecService.java b/server/src/main/java/org/opensearch/index/codec/CodecService.java index 3a93795ef61ec..fe672444c46a9 100644 --- a/server/src/main/java/org/opensearch/index/codec/CodecService.java +++ b/server/src/main/java/org/opensearch/index/codec/CodecService.java @@ -34,8 +34,8 @@ import org.apache.logging.log4j.Logger; import org.apache.lucene.codecs.Codec; -import org.apache.lucene.codecs.lucene912.Lucene912Codec; -import org.apache.lucene.codecs.lucene912.Lucene912Codec.Mode; +import org.apache.lucene.codecs.lucene100.Lucene100Codec; +import org.apache.lucene.codecs.lucene100.Lucene100Codec.Mode; import org.opensearch.common.Nullable; import org.opensearch.common.collect.MapBuilder; import org.opensearch.index.IndexSettings; @@ -70,10 +70,10 @@ public CodecService(@Nullable MapperService mapperService, IndexSettings indexSe final MapBuilder codecs = MapBuilder.newMapBuilder(); assert null != indexSettings; if (mapperService == null) { - codecs.put(DEFAULT_CODEC, new Lucene912Codec()); - codecs.put(LZ4, new Lucene912Codec()); - codecs.put(BEST_COMPRESSION_CODEC, new Lucene912Codec(Mode.BEST_COMPRESSION)); - codecs.put(ZLIB, new Lucene912Codec(Mode.BEST_COMPRESSION)); + codecs.put(DEFAULT_CODEC, new Lucene100Codec()); + codecs.put(LZ4, new Lucene100Codec()); + codecs.put(BEST_COMPRESSION_CODEC, new Lucene100Codec(Mode.BEST_COMPRESSION)); + codecs.put(ZLIB, new Lucene100Codec(Mode.BEST_COMPRESSION)); } else { // CompositeCodec still delegates to PerFieldMappingPostingFormatCodec // We can still support all the compression codecs when composite index is present diff --git a/server/src/main/java/org/opensearch/index/codec/PerFieldMappingPostingFormatCodec.java b/server/src/main/java/org/opensearch/index/codec/PerFieldMappingPostingFormatCodec.java index 0ed8c3880f2ea..e795d80279d2c 100644 --- a/server/src/main/java/org/opensearch/index/codec/PerFieldMappingPostingFormatCodec.java +++ b/server/src/main/java/org/opensearch/index/codec/PerFieldMappingPostingFormatCodec.java @@ -36,8 +36,8 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.DocValuesFormat; import org.apache.lucene.codecs.PostingsFormat; +import org.apache.lucene.codecs.lucene100.Lucene100Codec; import org.apache.lucene.codecs.lucene90.Lucene90DocValuesFormat; -import org.apache.lucene.codecs.lucene912.Lucene912Codec; import org.opensearch.common.lucene.Lucene; import org.opensearch.index.codec.fuzzy.FuzzyFilterPostingsFormat; import org.opensearch.index.codec.fuzzy.FuzzySetFactory; @@ -59,7 +59,7 @@ * * @opensearch.internal */ -public class PerFieldMappingPostingFormatCodec extends Lucene912Codec { +public class PerFieldMappingPostingFormatCodec extends Lucene100Codec { private final Logger logger; private final MapperService mapperService; private final DocValuesFormat dvFormat = new Lucene90DocValuesFormat(); diff --git a/server/src/main/java/org/opensearch/index/codec/composite/CompositeCodecFactory.java b/server/src/main/java/org/opensearch/index/codec/composite/CompositeCodecFactory.java index 674773bb86354..5c3a8aa23595f 100644 --- a/server/src/main/java/org/opensearch/index/codec/composite/CompositeCodecFactory.java +++ b/server/src/main/java/org/opensearch/index/codec/composite/CompositeCodecFactory.java @@ -10,8 +10,9 @@ import org.apache.logging.log4j.Logger; import org.apache.lucene.codecs.Codec; -import org.apache.lucene.codecs.lucene912.Lucene912Codec; +import org.apache.lucene.codecs.lucene100.Lucene100Codec; import org.opensearch.common.annotation.ExperimentalApi; +import org.opensearch.index.codec.composite.composite100.Composite100Codec; import org.opensearch.index.codec.composite.composite912.Composite912Codec; import org.opensearch.index.mapper.MapperService; @@ -38,10 +39,10 @@ public CompositeCodecFactory() {} public Map getCompositeIndexCodecs(MapperService mapperService, Logger logger) { Map codecs = new HashMap<>(); - codecs.put(DEFAULT_CODEC, new Composite912Codec(Lucene912Codec.Mode.BEST_SPEED, mapperService, logger)); - codecs.put(LZ4, new Composite912Codec(Lucene912Codec.Mode.BEST_SPEED, mapperService, logger)); - codecs.put(BEST_COMPRESSION_CODEC, new Composite912Codec(Lucene912Codec.Mode.BEST_COMPRESSION, mapperService, logger)); - codecs.put(ZLIB, new Composite912Codec(Lucene912Codec.Mode.BEST_COMPRESSION, mapperService, logger)); + codecs.put(DEFAULT_CODEC, new Composite100Codec(Lucene100Codec.Mode.BEST_SPEED, mapperService, logger)); + codecs.put(LZ4, new Composite100Codec(Lucene100Codec.Mode.BEST_SPEED, mapperService, logger)); + codecs.put(BEST_COMPRESSION_CODEC, new Composite100Codec(Lucene100Codec.Mode.BEST_COMPRESSION, mapperService, logger)); + codecs.put(ZLIB, new Composite100Codec(Lucene100Codec.Mode.BEST_COMPRESSION, mapperService, logger)); return codecs; } } diff --git a/server/src/main/java/org/opensearch/index/codec/composite/LuceneDocValuesConsumerFactory.java b/server/src/main/java/org/opensearch/index/codec/composite/LuceneDocValuesConsumerFactory.java index 4b3f62b6171da..1c300003dd9bf 100644 --- a/server/src/main/java/org/opensearch/index/codec/composite/LuceneDocValuesConsumerFactory.java +++ b/server/src/main/java/org/opensearch/index/codec/composite/LuceneDocValuesConsumerFactory.java @@ -29,6 +29,7 @@ public class LuceneDocValuesConsumerFactory { public static DocValuesConsumer getDocValuesConsumerForCompositeCodec( SegmentWriteState state, + int skipIndexIntervalSize, String dataCodec, String dataExtension, String metaCodec, @@ -36,6 +37,7 @@ public static DocValuesConsumer getDocValuesConsumerForCompositeCodec( ) throws IOException { Lucene90DocValuesConsumerWrapper lucene90DocValuesConsumerWrapper = new Lucene90DocValuesConsumerWrapper( state, + skipIndexIntervalSize, dataCodec, dataExtension, metaCodec, diff --git a/server/src/main/java/org/opensearch/index/codec/composite/composite100/Composite100Codec.java b/server/src/main/java/org/opensearch/index/codec/composite/composite100/Composite100Codec.java new file mode 100644 index 0000000000000..5e90a69ab3474 --- /dev/null +++ b/server/src/main/java/org/opensearch/index/codec/composite/composite100/Composite100Codec.java @@ -0,0 +1,58 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.index.codec.composite.composite100; + +import org.apache.logging.log4j.Logger; +import org.apache.lucene.codecs.Codec; +import org.apache.lucene.codecs.DocValuesFormat; +import org.apache.lucene.codecs.FilterCodec; +import org.apache.lucene.codecs.lucene100.Lucene100Codec; +import org.opensearch.common.annotation.ExperimentalApi; +import org.opensearch.index.codec.PerFieldMappingPostingFormatCodec; +import org.opensearch.index.codec.composite.composite912.Composite912DocValuesFormat; +import org.opensearch.index.mapper.MapperService; + +/** + * Extends the Codec to support new file formats for composite indices eg: star tree index + * based on the mappings. + * + * @opensearch.experimental + */ +@ExperimentalApi +public class Composite100Codec extends FilterCodec { + public static final String COMPOSITE_INDEX_CODEC_NAME = "Composite100Codec"; + private final MapperService mapperService; + + // needed for SPI - this is used in reader path + public Composite100Codec() { + this(COMPOSITE_INDEX_CODEC_NAME, new Lucene100Codec(), null); + } + + public Composite100Codec(Lucene100Codec.Mode compressionMode, MapperService mapperService, Logger logger) { + this(COMPOSITE_INDEX_CODEC_NAME, new PerFieldMappingPostingFormatCodec(compressionMode, mapperService, logger), mapperService); + } + + /** + * Sole constructor. When subclassing this codec, create a no-arg ctor and pass the delegate codec and a unique name to + * this ctor. + * + * @param name name of the codec + * @param delegate codec delegate + * @param mapperService mapper service instance + */ + protected Composite100Codec(String name, Codec delegate, MapperService mapperService) { + super(name, delegate); + this.mapperService = mapperService; + } + + @Override + public DocValuesFormat docValuesFormat() { + return new Composite912DocValuesFormat(mapperService); + } +} diff --git a/server/src/main/java/org/opensearch/index/codec/composite/composite100/package-info.java b/server/src/main/java/org/opensearch/index/codec/composite/composite100/package-info.java new file mode 100644 index 0000000000000..b767ddbe8901f --- /dev/null +++ b/server/src/main/java/org/opensearch/index/codec/composite/composite100/package-info.java @@ -0,0 +1,12 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/** + * Responsible for handling all composite index codecs and operations associated with Composite100 codec + */ +package org.opensearch.index.codec.composite.composite100; diff --git a/server/src/main/java/org/opensearch/index/codec/composite/composite912/Composite912Codec.java b/server/src/main/java/org/opensearch/index/codec/composite/composite912/Composite912Codec.java index 6f7a3c6852cf6..35b28db54733a 100644 --- a/server/src/main/java/org/opensearch/index/codec/composite/composite912/Composite912Codec.java +++ b/server/src/main/java/org/opensearch/index/codec/composite/composite912/Composite912Codec.java @@ -8,13 +8,11 @@ package org.opensearch.index.codec.composite.composite912; -import org.apache.logging.log4j.Logger; +import org.apache.lucene.backward_codecs.lucene912.Lucene912Codec; import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.DocValuesFormat; import org.apache.lucene.codecs.FilterCodec; -import org.apache.lucene.codecs.lucene912.Lucene912Codec; import org.opensearch.common.annotation.ExperimentalApi; -import org.opensearch.index.codec.PerFieldMappingPostingFormatCodec; import org.opensearch.index.mapper.MapperService; /** @@ -33,10 +31,6 @@ public Composite912Codec() { this(COMPOSITE_INDEX_CODEC_NAME, new Lucene912Codec(), null); } - public Composite912Codec(Lucene912Codec.Mode compressionMode, MapperService mapperService, Logger logger) { - this(COMPOSITE_INDEX_CODEC_NAME, new PerFieldMappingPostingFormatCodec(compressionMode, mapperService, logger), mapperService); - } - /** * Sole constructor. When subclassing this codec, create a no-arg ctor and pass the delegate codec and a unique name to * this ctor. diff --git a/server/src/main/java/org/opensearch/index/codec/composite/composite912/Composite912DocValuesReader.java b/server/src/main/java/org/opensearch/index/codec/composite/composite912/Composite912DocValuesReader.java index 637d3250fda3f..c78bb8c17b5f4 100644 --- a/server/src/main/java/org/opensearch/index/codec/composite/composite912/Composite912DocValuesReader.java +++ b/server/src/main/java/org/opensearch/index/codec/composite/composite912/Composite912DocValuesReader.java @@ -15,6 +15,7 @@ import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.index.DocValues; +import org.apache.lucene.index.DocValuesSkipper; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfos; import org.apache.lucene.index.IndexFileNames; @@ -100,7 +101,7 @@ public Composite912DocValuesReader(DocValuesProducer producer, SegmentReadState ); // initialize data input - metaIn = readState.directory.openChecksumInput(metaFileName, readState.context); + metaIn = readState.directory.openChecksumInput(metaFileName); Throwable priorE = null; try { CodecUtil.checkIndexHeader( @@ -304,4 +305,8 @@ public static SortedNumericDocValues getSortedNumericDocValues(SortedNumericDocV return sortedNumeric == null ? DocValues.emptySortedNumeric() : sortedNumeric; } + @Override + public DocValuesSkipper getSkipper(FieldInfo field) throws IOException { + return delegate.getSkipper(field); + } } diff --git a/server/src/main/java/org/opensearch/index/codec/composite/composite912/Composite912DocValuesWriter.java b/server/src/main/java/org/opensearch/index/codec/composite/composite912/Composite912DocValuesWriter.java index dd35091dece2f..20fcee7392734 100644 --- a/server/src/main/java/org/opensearch/index/codec/composite/composite912/Composite912DocValuesWriter.java +++ b/server/src/main/java/org/opensearch/index/codec/composite/composite912/Composite912DocValuesWriter.java @@ -103,6 +103,7 @@ public Composite912DocValuesWriter(DocValuesConsumer delegate, SegmentWriteState this.compositeDocValuesConsumer = LuceneDocValuesConsumerFactory.getDocValuesConsumerForCompositeCodec( consumerWriteState, + 4096, /* Lucene90DocValuesFormat#DEFAULT_SKIP_INDEX_INTERVAL_SIZE */ Composite912DocValuesFormat.DATA_DOC_VALUES_CODEC, Composite912DocValuesFormat.DATA_DOC_VALUES_EXTENSION, Composite912DocValuesFormat.META_DOC_VALUES_CODEC, diff --git a/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/utils/StarTreeUtils.java b/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/utils/StarTreeUtils.java index 2aae0d4ca7e29..4f56c01d6b941 100644 --- a/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/utils/StarTreeUtils.java +++ b/server/src/main/java/org/opensearch/index/compositeindex/datacube/startree/utils/StarTreeUtils.java @@ -7,6 +7,7 @@ */ package org.opensearch.index.compositeindex.datacube.startree.utils; +import org.apache.lucene.index.DocValuesSkipIndexType; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.IndexOptions; @@ -96,6 +97,7 @@ public static FieldInfo getFieldInfo(String fieldName, DocValuesType docValuesTy true, IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS, docValuesType, + DocValuesSkipIndexType.RANGE, -1, Collections.emptyMap(), 0, @@ -129,6 +131,7 @@ public static FieldInfo getFieldInfo(String fieldName, DocValuesType docValuesTy true, IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS, docValuesType, + DocValuesSkipIndexType.RANGE, -1, Collections.emptyMap(), 0, diff --git a/server/src/main/java/org/opensearch/index/document/SortedUnsignedLongDocValuesRangeQuery.java b/server/src/main/java/org/opensearch/index/document/SortedUnsignedLongDocValuesRangeQuery.java index 234c67cc637f1..ca6c1f74bb535 100644 --- a/server/src/main/java/org/opensearch/index/document/SortedUnsignedLongDocValuesRangeQuery.java +++ b/server/src/main/java/org/opensearch/index/document/SortedUnsignedLongDocValuesRangeQuery.java @@ -22,6 +22,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; import org.opensearch.common.Numbers; @@ -106,7 +107,7 @@ public boolean isCacheable(LeafReaderContext ctx) { } @Override - public Scorer scorer(LeafReaderContext context) throws IOException { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { SortedNumericDocValues values = getValues(context.reader(), field); if (values == null) { return null; @@ -148,7 +149,8 @@ public float matchCost() { } }; } - return new ConstantScoreScorer(this, score(), scoreMode, iterator); + final Scorer scorer = new ConstantScoreScorer(score(), scoreMode, iterator); + return new DefaultScorerSupplier(scorer); } }; } diff --git a/server/src/main/java/org/opensearch/index/document/SortedUnsignedLongDocValuesSetQuery.java b/server/src/main/java/org/opensearch/index/document/SortedUnsignedLongDocValuesSetQuery.java index 669dbb1e1bfc7..7f4f47054207e 100644 --- a/server/src/main/java/org/opensearch/index/document/SortedUnsignedLongDocValuesSetQuery.java +++ b/server/src/main/java/org/opensearch/index/document/SortedUnsignedLongDocValuesSetQuery.java @@ -22,6 +22,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; import org.apache.lucene.util.LongHashSet; @@ -93,7 +94,7 @@ public boolean isCacheable(LeafReaderContext ctx) { } @Override - public Scorer scorer(LeafReaderContext context) throws IOException { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { SortedNumericDocValues values = getValues(context.reader(), field); if (values == null) { return null; @@ -139,7 +140,8 @@ public float matchCost() { } }; } - return new ConstantScoreScorer(this, score(), scoreMode, iterator); + final Scorer scorer = new ConstantScoreScorer(score(), scoreMode, iterator); + return new DefaultScorerSupplier(scorer); } }; } diff --git a/server/src/main/java/org/opensearch/index/engine/Engine.java b/server/src/main/java/org/opensearch/index/engine/Engine.java index c945d082c9a35..4e3d04ae9e5f8 100644 --- a/server/src/main/java/org/opensearch/index/engine/Engine.java +++ b/server/src/main/java/org/opensearch/index/engine/Engine.java @@ -983,7 +983,7 @@ private Map getSegmentFileSizes(SegmentReader segmentReader) { try { directory = engineConfig.getCodec() .compoundFormat() - .getCompoundReader(segmentReader.directory(), segmentCommitInfo.info, IOContext.READ); + .getCompoundReader(segmentReader.directory(), segmentCommitInfo.info, IOContext.READONCE); } catch (IOException e) { logger.warn( () -> new ParameterizedMessage( diff --git a/server/src/main/java/org/opensearch/index/engine/LuceneChangesSnapshot.java b/server/src/main/java/org/opensearch/index/engine/LuceneChangesSnapshot.java index 00d15478f9866..dd0874ea565ef 100644 --- a/server/src/main/java/org/opensearch/index/engine/LuceneChangesSnapshot.java +++ b/server/src/main/java/org/opensearch/index/engine/LuceneChangesSnapshot.java @@ -122,7 +122,7 @@ final class LuceneChangesSnapshot implements Translog.Snapshot { this.indexSearcher.setQueryCache(null); this.parallelArray = new ParallelArray(this.searchBatchSize); final TopDocs topDocs = searchOperations(null, accurateCount); - this.totalHits = Math.toIntExact(topDocs.totalHits.value); + this.totalHits = Math.toIntExact(topDocs.totalHits.value()); this.scoreDocs = topDocs.scoreDocs; fillParallelArray(scoreDocs, parallelArray); } diff --git a/server/src/main/java/org/opensearch/index/engine/RecoverySourcePruneMergePolicy.java b/server/src/main/java/org/opensearch/index/engine/RecoverySourcePruneMergePolicy.java index 493ccbb69a244..112346299a427 100644 --- a/server/src/main/java/org/opensearch/index/engine/RecoverySourcePruneMergePolicy.java +++ b/server/src/main/java/org/opensearch/index/engine/RecoverySourcePruneMergePolicy.java @@ -36,6 +36,7 @@ import org.apache.lucene.codecs.StoredFieldsReader; import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.CodecReader; +import org.apache.lucene.index.DocValuesSkipper; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FilterCodecReader; import org.apache.lucene.index.FilterNumericDocValues; @@ -217,6 +218,11 @@ public void checkIntegrity() throws IOException { public void close() throws IOException { in.close(); } + + @Override + public DocValuesSkipper getSkipper(FieldInfo field) throws IOException { + return in.getSkipper(field); + } } /** diff --git a/server/src/main/java/org/opensearch/index/engine/TranslogLeafReader.java b/server/src/main/java/org/opensearch/index/engine/TranslogLeafReader.java index 94b8c6181de4e..e459358c16934 100644 --- a/server/src/main/java/org/opensearch/index/engine/TranslogLeafReader.java +++ b/server/src/main/java/org/opensearch/index/engine/TranslogLeafReader.java @@ -33,10 +33,11 @@ import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.ByteVectorValues; +import org.apache.lucene.index.DocValuesSkipIndexType; +import org.apache.lucene.index.DocValuesSkipper; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfos; -import org.apache.lucene.index.Fields; import org.apache.lucene.index.FloatVectorValues; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.LeafMetaData; @@ -82,6 +83,7 @@ public final class TranslogLeafReader extends LeafReader { false, IndexOptions.NONE, DocValuesType.NONE, + DocValuesSkipIndexType.NONE, -1, Collections.emptyMap(), 0, @@ -101,6 +103,7 @@ public final class TranslogLeafReader extends LeafReader { false, IndexOptions.NONE, DocValuesType.NONE, + DocValuesSkipIndexType.NONE, -1, Collections.emptyMap(), 0, @@ -120,6 +123,7 @@ public final class TranslogLeafReader extends LeafReader { false, IndexOptions.NONE, DocValuesType.NONE, + DocValuesSkipIndexType.NONE, -1, Collections.emptyMap(), 0, @@ -202,11 +206,6 @@ public LeafMetaData getMetaData() { throw new UnsupportedOperationException(); } - @Override - public Fields getTermVectors(int docID) { - throw new UnsupportedOperationException(); - } - @Override public TermVectors termVectors() throws IOException { throw new UnsupportedOperationException(); @@ -222,11 +221,6 @@ public int maxDoc() { return 1; } - @Override - public void document(int docID, StoredFieldVisitor visitor) throws IOException { - storedFields().document(docID, visitor); - } - @Override public StoredFields storedFields() throws IOException { return new StoredFields() { @@ -282,4 +276,9 @@ public void searchNearestVectors(String field, byte[] target, KnnCollector k, Bi public void searchNearestVectors(String field, float[] target, KnnCollector k, Bits acceptDocs) throws IOException { throw new UnsupportedOperationException(); } + + @Override + public DocValuesSkipper getDocValuesSkipper(String field) throws IOException { + throw new UnsupportedOperationException(); + } } diff --git a/server/src/main/java/org/opensearch/index/fielddata/FieldData.java b/server/src/main/java/org/opensearch/index/fielddata/FieldData.java index 6db6bbccacae5..e91dc7a5045ab 100644 --- a/server/src/main/java/org/opensearch/index/fielddata/FieldData.java +++ b/server/src/main/java/org/opensearch/index/fielddata/FieldData.java @@ -435,7 +435,7 @@ public boolean advanceExact(int doc) throws IOException { return false; } for (int i = 0;; ++i) { - if (values.nextOrd() == SortedSetDocValues.NO_MORE_ORDS) { + if (values.nextOrd() == SortedSetDocValues.NO_MORE_DOCS) { count = i; break; } diff --git a/server/src/main/java/org/opensearch/index/fielddata/ordinals/GlobalOrdinalMapping.java b/server/src/main/java/org/opensearch/index/fielddata/ordinals/GlobalOrdinalMapping.java index cc5415aacd276..bd2ce5c100ae4 100644 --- a/server/src/main/java/org/opensearch/index/fielddata/ordinals/GlobalOrdinalMapping.java +++ b/server/src/main/java/org/opensearch/index/fielddata/ordinals/GlobalOrdinalMapping.java @@ -77,8 +77,8 @@ public boolean advanceExact(int target) throws IOException { @Override public long nextOrd() throws IOException { long segmentOrd = values.nextOrd(); - if (segmentOrd == SortedSetDocValues.NO_MORE_ORDS) { - return SortedSetDocValues.NO_MORE_ORDS; + if (segmentOrd == SortedSetDocValues.NO_MORE_DOCS) { + return SortedSetDocValues.NO_MORE_DOCS; } else { return getGlobalOrd(segmentOrd); } diff --git a/server/src/main/java/org/opensearch/index/fielddata/ordinals/MultiOrdinals.java b/server/src/main/java/org/opensearch/index/fielddata/ordinals/MultiOrdinals.java index daccb5dfe9fca..ea91e6ddd1820 100644 --- a/server/src/main/java/org/opensearch/index/fielddata/ordinals/MultiOrdinals.java +++ b/server/src/main/java/org/opensearch/index/fielddata/ordinals/MultiOrdinals.java @@ -68,13 +68,13 @@ public static boolean significantlySmallerThanSinglePackedOrdinals( float acceptableOverheadRatio ) { int bitsPerOrd = PackedInts.bitsRequired(numOrds); - bitsPerOrd = PackedInts.fastestFormatAndBits(numDocsWithValue, bitsPerOrd, acceptableOverheadRatio).bitsPerValue; + bitsPerOrd = PackedInts.fastestFormatAndBits(numDocsWithValue, bitsPerOrd, acceptableOverheadRatio).bitsPerValue(); // Compute the worst-case number of bits per value for offsets in the worst case, eg. if no docs have a value at the // beginning of the block and all docs have one at the end of the block final float avgValuesPerDoc = (float) numDocsWithValue / maxDoc; final int maxDelta = (int) Math.ceil(OFFSETS_PAGE_SIZE * (1 - avgValuesPerDoc) * avgValuesPerDoc); int bitsPerOffset = PackedInts.bitsRequired(maxDelta) + 1; // +1 because of the sign - bitsPerOffset = PackedInts.fastestFormatAndBits(maxDoc, bitsPerOffset, acceptableOverheadRatio).bitsPerValue; + bitsPerOffset = PackedInts.fastestFormatAndBits(maxDoc, bitsPerOffset, acceptableOverheadRatio).bitsPerValue(); final long expectedMultiSizeInBytes = (long) numDocsWithValue * bitsPerOrd + (long) maxDoc * bitsPerOffset; final long expectedSingleSizeInBytes = (long) maxDoc * bitsPerOrd; @@ -219,7 +219,7 @@ public boolean advanceExact(int docId) throws IOException { @Override public long nextOrd() throws IOException { if (currentOffset == currentEndOffset) { - return SortedSetDocValues.NO_MORE_ORDS; + return SortedSetDocValues.NO_MORE_DOCS; } else { return ords.get(currentOffset++); } diff --git a/server/src/main/java/org/opensearch/index/get/ShardGetService.java b/server/src/main/java/org/opensearch/index/get/ShardGetService.java index d4eeb8aae8e24..082a06c8f7773 100644 --- a/server/src/main/java/org/opensearch/index/get/ShardGetService.java +++ b/server/src/main/java/org/opensearch/index/get/ShardGetService.java @@ -32,6 +32,7 @@ package org.opensearch.index.get; +import org.apache.lucene.index.DocValuesSkipIndexType; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.IndexOptions; @@ -323,6 +324,7 @@ private GetResult innerGetLoadFromStoredFields( false, IndexOptions.NONE, DocValuesType.NONE, + DocValuesSkipIndexType.NONE, -1, Collections.emptyMap(), 0, diff --git a/server/src/main/java/org/opensearch/index/mapper/ConstantKeywordFieldMapper.java b/server/src/main/java/org/opensearch/index/mapper/ConstantKeywordFieldMapper.java index 02c2214c18e72..9eaf68b1b72b8 100644 --- a/server/src/main/java/org/opensearch/index/mapper/ConstantKeywordFieldMapper.java +++ b/server/src/main/java/org/opensearch/index/mapper/ConstantKeywordFieldMapper.java @@ -175,10 +175,7 @@ public Query regexpQuery( @Nullable MultiTermQuery.RewriteMethod method, QueryShardContext context ) { - Automaton automaton = new RegExp(value, syntaxFlags, matchFlags).toAutomaton( - RegexpQuery.DEFAULT_PROVIDER, - maxDeterminizedStates - ); + Automaton automaton = new RegExp(value, syntaxFlags, matchFlags).toAutomaton(RegexpQuery.DEFAULT_PROVIDER); ByteRunAutomaton byteRunAutomaton = new ByteRunAutomaton(automaton); BytesRef valueBytes = BytesRefs.toBytesRef(this.value); if (byteRunAutomaton.run(valueBytes.bytes, valueBytes.offset, valueBytes.length)) { diff --git a/server/src/main/java/org/opensearch/index/mapper/DateFieldMapper.java b/server/src/main/java/org/opensearch/index/mapper/DateFieldMapper.java index 7fbb38c47572c..da95f6c61210e 100644 --- a/server/src/main/java/org/opensearch/index/mapper/DateFieldMapper.java +++ b/server/src/main/java/org/opensearch/index/mapper/DateFieldMapper.java @@ -32,6 +32,7 @@ package org.opensearch.index.mapper; +import org.apache.lucene.document.LongField; import org.apache.lucene.document.LongPoint; import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.document.StoredField; @@ -142,7 +143,7 @@ public long parsePointAsMillis(byte[] value) { @Override protected Query distanceFeatureQuery(String field, float boost, long origin, TimeValue pivot) { - return LongPoint.newDistanceFeatureQuery(field, boost, origin, pivot.getMillis()); + return LongField.newDistanceFeatureQuery(field, boost, origin, pivot.getMillis()); } }, NANOSECONDS(DATE_NANOS_CONTENT_TYPE, NumericType.DATE_NANOSECONDS) { @@ -168,7 +169,7 @@ public long parsePointAsMillis(byte[] value) { @Override protected Query distanceFeatureQuery(String field, float boost, long origin, TimeValue pivot) { - return LongPoint.newDistanceFeatureQuery(field, boost, origin, pivot.getNanos()); + return LongField.newDistanceFeatureQuery(field, boost, origin, pivot.getNanos()); } }; diff --git a/server/src/main/java/org/opensearch/index/mapper/DocCountFieldMapper.java b/server/src/main/java/org/opensearch/index/mapper/DocCountFieldMapper.java index db4770adf6666..240d7fed16b60 100644 --- a/server/src/main/java/org/opensearch/index/mapper/DocCountFieldMapper.java +++ b/server/src/main/java/org/opensearch/index/mapper/DocCountFieldMapper.java @@ -33,7 +33,7 @@ import org.apache.lucene.document.Field; import org.apache.lucene.document.NumericDocValuesField; -import org.apache.lucene.search.DocValuesFieldExistsQuery; +import org.apache.lucene.search.FieldExistsQuery; import org.apache.lucene.search.Query; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.core.xcontent.XContentParserUtils; @@ -109,7 +109,7 @@ public String familyTypeName() { @Override public Query existsQuery(QueryShardContext context) { - return new DocValuesFieldExistsQuery(NAME); + return new FieldExistsQuery(NAME); } @Override diff --git a/server/src/main/java/org/opensearch/index/mapper/IdFieldMapper.java b/server/src/main/java/org/opensearch/index/mapper/IdFieldMapper.java index 658f4228cb0c6..a386e8b11eb38 100644 --- a/server/src/main/java/org/opensearch/index/mapper/IdFieldMapper.java +++ b/server/src/main/java/org/opensearch/index/mapper/IdFieldMapper.java @@ -37,6 +37,7 @@ import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.SortField; import org.apache.lucene.search.TermInSetQuery; @@ -65,7 +66,9 @@ import org.opensearch.search.sort.SortOrder; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.function.Supplier; @@ -163,15 +166,15 @@ public Query existsQuery(QueryShardContext context) { @Override public Query termsQuery(List values, QueryShardContext context) { failIfNotIndexed(); - BytesRef[] bytesRefs = new BytesRef[values.size()]; - for (int i = 0; i < bytesRefs.length; i++) { + Collection bytesRefs = new ArrayList<>(values.size()); + for (int i = 0; i < values.size(); i++) { Object idObject = values.get(i); if (idObject instanceof BytesRef) { idObject = ((BytesRef) idObject).utf8ToString(); } - bytesRefs[i] = Uid.encodeId(idObject.toString()); + bytesRefs.add(Uid.encodeId(idObject.toString())); } - return new TermInSetQuery(name(), bytesRefs); + return new TermInSetQuery(MultiTermQuery.CONSTANT_SCORE_BLENDED_REWRITE, name(), bytesRefs); } @Override diff --git a/server/src/main/java/org/opensearch/index/mapper/IpFieldMapper.java b/server/src/main/java/org/opensearch/index/mapper/IpFieldMapper.java index db8da8a949d6f..e51d62cfedd7e 100644 --- a/server/src/main/java/org/opensearch/index/mapper/IpFieldMapper.java +++ b/server/src/main/java/org/opensearch/index/mapper/IpFieldMapper.java @@ -377,7 +377,7 @@ public IpScriptDocValues(SortedSetDocValues in) { public void setNextDocId(int docId) throws IOException { count = 0; if (in.advanceExact(docId)) { - for (long ord = in.nextOrd(); ord != SortedSetDocValues.NO_MORE_ORDS; ord = in.nextOrd()) { + for (long ord = in.nextOrd(); ord != SortedSetDocValues.NO_MORE_DOCS; ord = in.nextOrd()) { ords = ArrayUtil.grow(ords, count + 1); ords[count++] = ord; } diff --git a/server/src/main/java/org/opensearch/index/mapper/KeywordFieldMapper.java b/server/src/main/java/org/opensearch/index/mapper/KeywordFieldMapper.java index 54a1aead5fcc7..8bab85467dc01 100644 --- a/server/src/main/java/org/opensearch/index/mapper/KeywordFieldMapper.java +++ b/server/src/main/java/org/opensearch/index/mapper/KeywordFieldMapper.java @@ -68,7 +68,9 @@ import java.io.IOException; import java.io.UncheckedIOException; +import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; @@ -399,21 +401,21 @@ public Query termsQuery(List values, QueryShardContext context) { if (!context.keywordFieldIndexOrDocValuesEnabled()) { return super.termsQuery(values, context); } - BytesRef[] iBytesRefs = new BytesRef[values.size()]; - BytesRef[] dVByteRefs = new BytesRef[values.size()]; - for (int i = 0; i < iBytesRefs.length; i++) { - iBytesRefs[i] = indexedValueForSearch(values.get(i)); - dVByteRefs[i] = indexedValueForSearch(rewriteForDocValue(values.get(i))); + Collection iBytesRefs = new ArrayList<>(values.size()); + Collection dVByteRefs = new ArrayList<>(values.size()); + for (int i = 0; i < values.size(); i++) { + iBytesRefs.add(indexedValueForSearch(values.get(i))); + dVByteRefs.add(indexedValueForSearch(rewriteForDocValue(values.get(i)))); } - Query indexQuery = new TermInSetQuery(name(), iBytesRefs); + Query indexQuery = new TermInSetQuery(MultiTermQuery.CONSTANT_SCORE_BLENDED_REWRITE, name(), iBytesRefs); Query dvQuery = new TermInSetQuery(MultiTermQuery.DOC_VALUES_REWRITE, name(), dVByteRefs); return new IndexOrDocValuesQuery(indexQuery, dvQuery); } // if we only have doc_values enabled, we construct a new query with doc_values re-written if (hasDocValues()) { - BytesRef[] bytesRefs = new BytesRef[values.size()]; - for (int i = 0; i < bytesRefs.length; i++) { - bytesRefs[i] = indexedValueForSearch(rewriteForDocValue(values.get(i))); + Collection bytesRefs = new ArrayList<>(values.size()); + for (int i = 0; i < values.size(); i++) { + bytesRefs.add(indexedValueForSearch(rewriteForDocValue(values.get(i)))); } return new TermInSetQuery(MultiTermQuery.DOC_VALUES_REWRITE, name(), bytesRefs); } diff --git a/server/src/main/java/org/opensearch/index/mapper/MappedFieldType.java b/server/src/main/java/org/opensearch/index/mapper/MappedFieldType.java index 66d4654e543a2..1a1fccc1621ff 100644 --- a/server/src/main/java/org/opensearch/index/mapper/MappedFieldType.java +++ b/server/src/main/java/org/opensearch/index/mapper/MappedFieldType.java @@ -44,9 +44,8 @@ import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.ConstantScoreQuery; -import org.apache.lucene.search.DocValuesFieldExistsQuery; +import org.apache.lucene.search.FieldExistsQuery; import org.apache.lucene.search.MultiTermQuery; -import org.apache.lucene.search.NormsFieldExistsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermInSetQuery; import org.apache.lucene.search.TermQuery; @@ -345,9 +344,9 @@ public Query regexpQuery( public Query existsQuery(QueryShardContext context) { if (hasDocValues()) { - return new DocValuesFieldExistsQuery(name()); + return new FieldExistsQuery(name()); } else if (getTextSearchInfo().hasNorms()) { - return new NormsFieldExistsQuery(name()); + return new FieldExistsQuery(name()); } else { return new TermQuery(new Term(FieldNamesFieldMapper.NAME, name())); } diff --git a/server/src/main/java/org/opensearch/index/mapper/TermBasedFieldType.java b/server/src/main/java/org/opensearch/index/mapper/TermBasedFieldType.java index 78dae2d2c27fc..da01c6490ea37 100644 --- a/server/src/main/java/org/opensearch/index/mapper/TermBasedFieldType.java +++ b/server/src/main/java/org/opensearch/index/mapper/TermBasedFieldType.java @@ -34,6 +34,7 @@ import org.apache.lucene.index.Term; import org.apache.lucene.search.BoostQuery; +import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermInSetQuery; import org.apache.lucene.search.TermQuery; @@ -42,6 +43,8 @@ import org.opensearch.common.lucene.search.AutomatonQueries; import org.opensearch.index.query.QueryShardContext; +import java.util.ArrayList; +import java.util.Collection; import java.util.List; import java.util.Map; @@ -93,11 +96,11 @@ public Query termQuery(Object value, QueryShardContext context) { @Override public Query termsQuery(List values, QueryShardContext context) { failIfNotIndexed(); - BytesRef[] bytesRefs = new BytesRef[values.size()]; - for (int i = 0; i < bytesRefs.length; i++) { - bytesRefs[i] = indexedValueForSearch(values.get(i)); + Collection bytesRefs = new ArrayList<>(values.size()); + for (int i = 0; i < values.size(); i++) { + bytesRefs.add(indexedValueForSearch(values.get(i))); } - return new TermInSetQuery(name(), bytesRefs); + return new TermInSetQuery(MultiTermQuery.CONSTANT_SCORE_BLENDED_REWRITE, name(), bytesRefs); } } diff --git a/server/src/main/java/org/opensearch/index/mapper/WildcardFieldMapper.java b/server/src/main/java/org/opensearch/index/mapper/WildcardFieldMapper.java index e43e3bda692e7..c10e96d5f5f07 100644 --- a/server/src/main/java/org/opensearch/index/mapper/WildcardFieldMapper.java +++ b/server/src/main/java/org/opensearch/index/mapper/WildcardFieldMapper.java @@ -37,6 +37,7 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.CompiledAutomaton; +import org.apache.lucene.util.automaton.Operations; import org.apache.lucene.util.automaton.RegExp; import org.opensearch.common.lucene.BytesRefs; import org.opensearch.common.lucene.Lucene; @@ -430,7 +431,7 @@ public Query wildcardQuery(String value, MultiTermQuery.RewriteMethod method, bo finalValue = value; } Predicate matchPredicate; - Automaton automaton = WildcardQuery.toAutomaton(new Term(name(), finalValue)); + Automaton automaton = WildcardQuery.toAutomaton(new Term(name(), finalValue), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); CompiledAutomaton compiledAutomaton = new CompiledAutomaton(automaton); if (compiledAutomaton.type == CompiledAutomaton.AUTOMATON_TYPE.SINGLE) { // when type equals SINGLE, #compiledAutomaton.runAutomaton is null @@ -573,7 +574,7 @@ public Query regexpQuery( } RegExp regExp = new RegExp(value, syntaxFlags, matchFlags); - Automaton automaton = regExp.toAutomaton(maxDeterminizedStates); + Automaton automaton = regExp.toAutomaton(); CompiledAutomaton compiledAutomaton = new CompiledAutomaton(automaton); Predicate regexpPredicate; @@ -650,7 +651,7 @@ private static Query regexpToQuery(String fieldName, RegExp regExp) { return new MatchAllDocsQuery(); } if (query.clauses().size() == 1) { - return query.iterator().next().getQuery(); + return query.iterator().next().query(); } else if (query.clauses().size() == 0) { return new MatchAllDocsQuery(); } @@ -804,18 +805,8 @@ public Query rewrite(IndexSearcher indexSearcher) throws IOException { public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException { Weight firstPhaseWeight = firstPhaseQuery.createWeight(searcher, scoreMode, boost); return new ConstantScoreWeight(this, boost) { - @Override - public Scorer scorer(LeafReaderContext leafReaderContext) throws IOException { - ScorerSupplier supplier = scorerSupplier(leafReaderContext); - if (supplier == null) { - return null; - } - return supplier.get(Long.MAX_VALUE); - } - @Override public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { - Weight weight = this; ScorerSupplier firstPhaseSupplier = firstPhaseWeight.scorerSupplier(context); if (firstPhaseSupplier == null) { return null; @@ -846,7 +837,7 @@ public float matchCost() { return MATCH_COST_ESTIMATE; } }; - return new ConstantScoreScorer(weight, score(), scoreMode, twoPhaseIterator); + return new ConstantScoreScorer(score(), scoreMode, twoPhaseIterator); } @Override diff --git a/server/src/main/java/org/opensearch/index/query/DerivedFieldQuery.java b/server/src/main/java/org/opensearch/index/query/DerivedFieldQuery.java index dcc02726cb0ef..04469db9c46ec 100644 --- a/server/src/main/java/org/opensearch/index/query/DerivedFieldQuery.java +++ b/server/src/main/java/org/opensearch/index/query/DerivedFieldQuery.java @@ -20,6 +20,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; import org.opensearch.index.mapper.DerivedFieldValueFetcher; @@ -93,7 +94,7 @@ public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float bo return new ConstantScoreWeight(this, boost) { @Override - public Scorer scorer(LeafReaderContext context) { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { DocIdSetIterator approximation; approximation = DocIdSetIterator.all(context.reader().maxDoc()); @@ -130,7 +131,8 @@ public float matchCost() { return 1000f; } }; - return new ConstantScoreScorer(this, score(), scoreMode, twoPhase); + final Scorer scorer = new ConstantScoreScorer(score(), scoreMode, twoPhase); + return new DefaultScorerSupplier(scorer); } @Override diff --git a/server/src/main/java/org/opensearch/index/query/IntervalBuilder.java b/server/src/main/java/org/opensearch/index/query/IntervalBuilder.java index 0e42e79f67d0c..d37519120b325 100644 --- a/server/src/main/java/org/opensearch/index/query/IntervalBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/IntervalBuilder.java @@ -43,7 +43,7 @@ import org.apache.lucene.queries.intervals.IntervalMatchesIterator; import org.apache.lucene.queries.intervals.Intervals; import org.apache.lucene.queries.intervals.IntervalsSource; -import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.graph.GraphTokenStreamFiniteStrings; @@ -229,7 +229,7 @@ protected List analyzeGraph(TokenStream source) throws IOExcept List clauses = new ArrayList<>(); int[] articulationPoints = graph.articulationPoints(); int lastState = 0; - int maxClauseCount = BooleanQuery.getMaxClauseCount(); + int maxClauseCount = IndexSearcher.getMaxClauseCount(); for (int i = 0; i <= articulationPoints.length; i++) { int start = lastState; int end = -1; @@ -244,7 +244,7 @@ protected List analyzeGraph(TokenStream source) throws IOExcept TokenStream ts = it.next(); IntervalsSource phrase = combineSources(analyzeTerms(ts), 0, IntervalMode.ORDERED); if (paths.size() >= maxClauseCount) { - throw new BooleanQuery.TooManyClauses(); + throw new IndexSearcher.TooManyClauses(); } paths.add(phrase); } diff --git a/server/src/main/java/org/opensearch/index/query/RegexpFlag.java b/server/src/main/java/org/opensearch/index/query/RegexpFlag.java index b7e7569f67127..f5eb1ebc98d04 100644 --- a/server/src/main/java/org/opensearch/index/query/RegexpFlag.java +++ b/server/src/main/java/org/opensearch/index/query/RegexpFlag.java @@ -63,7 +63,7 @@ public enum RegexpFlag { /** * Enables complement expression of the form: {@code ~<expression>} */ - COMPLEMENT(RegExp.COMPLEMENT), + COMPLEMENT(RegExp.DEPRECATED_COMPLEMENT), /** * Enables empty language expression: {@code #} diff --git a/server/src/main/java/org/opensearch/index/query/ScriptQueryBuilder.java b/server/src/main/java/org/opensearch/index/query/ScriptQueryBuilder.java index ded6fd0528c33..75b8e14a5ca1f 100644 --- a/server/src/main/java/org/opensearch/index/query/ScriptQueryBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/ScriptQueryBuilder.java @@ -41,6 +41,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; import org.opensearch.OpenSearchException; @@ -215,7 +216,7 @@ public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float bo return new ConstantScoreWeight(this, boost) { @Override - public Scorer scorer(LeafReaderContext context) throws IOException { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { DocIdSetIterator approximation = DocIdSetIterator.all(context.reader().maxDoc()); final FilterScript leafScript = filterScript.newInstance(context); TwoPhaseIterator twoPhase = new TwoPhaseIterator(approximation) { @@ -232,7 +233,8 @@ public float matchCost() { return 1000f; } }; - return new ConstantScoreScorer(this, score(), scoreMode, twoPhase); + final Scorer scorer = new ConstantScoreScorer(score(), scoreMode, twoPhase); + return new DefaultScorerSupplier(scorer); } @Override diff --git a/server/src/main/java/org/opensearch/index/query/SourceFieldMatchQuery.java b/server/src/main/java/org/opensearch/index/query/SourceFieldMatchQuery.java index e9abcb698f68f..d189c90233da8 100644 --- a/server/src/main/java/org/opensearch/index/query/SourceFieldMatchQuery.java +++ b/server/src/main/java/org/opensearch/index/query/SourceFieldMatchQuery.java @@ -18,6 +18,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; import org.opensearch.index.mapper.MappedFieldType; @@ -88,7 +89,7 @@ public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float bo return new ConstantScoreWeight(this, boost) { @Override - public Scorer scorer(LeafReaderContext context) throws IOException { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { Scorer scorer = weight.scorer(context); if (scorer == null) { @@ -121,7 +122,7 @@ public float matchCost() { return 1000f; } }; - return new ConstantScoreScorer(this, score(), ScoreMode.TOP_DOCS, twoPhase); + return new DefaultScorerSupplier(new ConstantScoreScorer(score(), ScoreMode.TOP_DOCS, twoPhase)); } @Override diff --git a/server/src/main/java/org/opensearch/index/query/TermsSetQueryBuilder.java b/server/src/main/java/org/opensearch/index/query/TermsSetQueryBuilder.java index e2cf7384ecac7..5582feb86bf5e 100644 --- a/server/src/main/java/org/opensearch/index/query/TermsSetQueryBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/TermsSetQueryBuilder.java @@ -35,7 +35,6 @@ import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.index.Term; import org.apache.lucene.sandbox.search.CoveringQuery; -import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.DoubleValues; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.LongValues; @@ -251,8 +250,8 @@ protected Query doToQuery(QueryShardContext context) { return Queries.newMatchNoDocsQuery("No terms supplied for \"" + getName() + "\" query."); } // Fail before we attempt to create the term queries: - if (values.size() > BooleanQuery.getMaxClauseCount()) { - throw new BooleanQuery.TooManyClauses(); + if (values.size() > IndexSearcher.getMaxClauseCount()) { + throw new IndexSearcher.TooManyClauses(); } List queries = createTermQueries(context); diff --git a/server/src/main/java/org/opensearch/index/reindex/ClientScrollableHitSource.java b/server/src/main/java/org/opensearch/index/reindex/ClientScrollableHitSource.java index 55d018af46970..8c76b0a62f6bb 100644 --- a/server/src/main/java/org/opensearch/index/reindex/ClientScrollableHitSource.java +++ b/server/src/main/java/org/opensearch/index/reindex/ClientScrollableHitSource.java @@ -175,7 +175,7 @@ private Response wrapSearchResponse(SearchResponse response) { } hits = unmodifiableList(hits); } - long total = response.getHits().getTotalHits().value; + long total = response.getHits().getTotalHits().value(); return new Response(response.isTimedOut(), failures, total, hits, response.getScrollId()); } diff --git a/server/src/main/java/org/opensearch/index/search/MatchQuery.java b/server/src/main/java/org/opensearch/index/search/MatchQuery.java index 86ea799ab311d..dbf3e958d70e5 100644 --- a/server/src/main/java/org/opensearch/index/search/MatchQuery.java +++ b/server/src/main/java/org/opensearch/index/search/MatchQuery.java @@ -52,6 +52,7 @@ import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BoostAttribute; import org.apache.lucene.search.FuzzyQuery; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; @@ -359,10 +360,10 @@ private Query createCommonTermsQuery( private Query boolToExtendedCommonTermsQuery(BooleanQuery bq, Occur highFreqOccur, Occur lowFreqOccur, float maxTermFrequency) { ExtendedCommonTermsQuery query = new ExtendedCommonTermsQuery(highFreqOccur, lowFreqOccur, maxTermFrequency); for (BooleanClause clause : bq.clauses()) { - if ((clause.getQuery() instanceof TermQuery) == false) { + if ((clause.query() instanceof TermQuery) == false) { return bq; } - query.add(((TermQuery) clause.getQuery()).getTerm()); + query.add(((TermQuery) clause.query()).getTerm()); } return query; } @@ -829,7 +830,7 @@ private Query analyzeGraphPhrase(TokenStream source, String field, Type type, in List clauses = new ArrayList<>(); int[] articulationPoints = graph.articulationPoints(); int lastState = 0; - int maxClauseCount = BooleanQuery.getMaxClauseCount(); + int maxClauseCount = IndexSearcher.getMaxClauseCount(); for (int i = 0; i <= articulationPoints.length; i++) { int start = lastState; int end = -1; @@ -847,7 +848,7 @@ private Query analyzeGraphPhrase(TokenStream source, String field, Type type, in SpanQuery q = createSpanQuery(ts, field, usePrefix); if (q != null) { if (queries.size() >= maxClauseCount) { - throw new BooleanQuery.TooManyClauses(); + throw new IndexSearcher.TooManyClauses(); } queries.add(q); } @@ -861,14 +862,14 @@ private Query analyzeGraphPhrase(TokenStream source, String field, Type type, in Term[] terms = graph.getTerms(field, start); assert terms.length > 0; if (terms.length >= maxClauseCount) { - throw new BooleanQuery.TooManyClauses(); + throw new IndexSearcher.TooManyClauses(); } queryPos = newSpanQuery(terms, usePrefix); } if (queryPos != null) { if (clauses.size() >= maxClauseCount) { - throw new BooleanQuery.TooManyClauses(); + throw new IndexSearcher.TooManyClauses(); } clauses.add(queryPos); } diff --git a/server/src/main/java/org/opensearch/index/search/MultiMatchQuery.java b/server/src/main/java/org/opensearch/index/search/MultiMatchQuery.java index 8c0c87e8c9d0c..a2a8dd9795117 100644 --- a/server/src/main/java/org/opensearch/index/search/MultiMatchQuery.java +++ b/server/src/main/java/org/opensearch/index/search/MultiMatchQuery.java @@ -221,7 +221,7 @@ private class BlendedQueryBuilder extends MatchQueryBuilder { protected Query newSynonymQuery(String field, TermAndBoost[] terms) { BytesRef[] values = new BytesRef[terms.length]; for (int i = 0; i < terms.length; i++) { - values[i] = terms[i].term; + values[i] = terms[i].term(); } return blendTerms(context, values, commonTermsCutoff, tieBreaker, lenient, blendedFields); } diff --git a/server/src/main/java/org/opensearch/index/search/NestedHelper.java b/server/src/main/java/org/opensearch/index/search/NestedHelper.java index 50e7e41b95be7..a046ec62ec14a 100644 --- a/server/src/main/java/org/opensearch/index/search/NestedHelper.java +++ b/server/src/main/java/org/opensearch/index/search/NestedHelper.java @@ -95,13 +95,13 @@ public boolean mightMatchNestedDocs(Query query) { return bq.clauses() .stream() .filter(BooleanClause::isRequired) - .map(BooleanClause::getQuery) + .map(BooleanClause::query) .allMatch(this::mightMatchNestedDocs); } else { return bq.clauses() .stream() - .filter(c -> c.getOccur() == Occur.SHOULD) - .map(BooleanClause::getQuery) + .filter(c -> c.occur() == Occur.SHOULD) + .map(BooleanClause::query) .anyMatch(this::mightMatchNestedDocs); } } else if (query instanceof OpenSearchToParentBlockJoinQuery) { @@ -167,13 +167,13 @@ public boolean mightMatchNonNestedDocs(Query query, String nestedPath) { return bq.clauses() .stream() .filter(BooleanClause::isRequired) - .map(BooleanClause::getQuery) + .map(BooleanClause::query) .allMatch(q -> mightMatchNonNestedDocs(q, nestedPath)); } else { return bq.clauses() .stream() - .filter(c -> c.getOccur() == Occur.SHOULD) - .map(BooleanClause::getQuery) + .filter(c -> c.occur() == Occur.SHOULD) + .map(BooleanClause::query) .anyMatch(q -> mightMatchNonNestedDocs(q, nestedPath)); } } else { diff --git a/server/src/main/java/org/opensearch/index/search/comparators/HalfFloatComparator.java b/server/src/main/java/org/opensearch/index/search/comparators/HalfFloatComparator.java index b2e2ba8001b88..a74edbf5d1c07 100644 --- a/server/src/main/java/org/opensearch/index/search/comparators/HalfFloatComparator.java +++ b/server/src/main/java/org/opensearch/index/search/comparators/HalfFloatComparator.java @@ -13,6 +13,7 @@ import org.apache.lucene.search.LeafFieldComparator; import org.apache.lucene.search.Pruning; import org.apache.lucene.search.comparators.NumericComparator; +import org.apache.lucene.util.NumericUtils; import java.io.IOException; @@ -52,6 +53,16 @@ public LeafFieldComparator getLeafComparator(LeafReaderContext context) throws I return new HalfFloatLeafComparator(context); } + @Override + protected long missingValueAsComparableLong() { + return NumericUtils.floatToSortableInt(missingValue); + } + + @Override + protected long sortableBytesToLong(byte[] bytes) { + return NumericUtils.sortableBytesToInt(bytes, 0); + } + /** Leaf comparator for {@link HalfFloatComparator} that provides skipping functionality */ public class HalfFloatLeafComparator extends NumericLeafComparator { @@ -90,23 +101,13 @@ public void copy(int slot, int doc) throws IOException { } @Override - protected int compareMissingValueWithBottomValue() { - return Float.compare(missingValue, bottom); - } - - @Override - protected int compareMissingValueWithTopValue() { - return Float.compare(missingValue, topValue); - } - - @Override - protected void encodeBottom(byte[] packedValue) { - HalfFloatPoint.encodeDimension(bottom, packedValue, 0); + protected long bottomAsComparableLong() { + return NumericUtils.floatToSortableInt(bottom); } @Override - protected void encodeTop(byte[] packedValue) { - HalfFloatPoint.encodeDimension(topValue, packedValue, 0); + protected long topAsComparableLong() { + return NumericUtils.floatToSortableInt(topValue); } } } diff --git a/server/src/main/java/org/opensearch/index/search/comparators/UnsignedLongComparator.java b/server/src/main/java/org/opensearch/index/search/comparators/UnsignedLongComparator.java index 2b6bd9933e553..1e97c010e517c 100644 --- a/server/src/main/java/org/opensearch/index/search/comparators/UnsignedLongComparator.java +++ b/server/src/main/java/org/opensearch/index/search/comparators/UnsignedLongComparator.java @@ -13,6 +13,7 @@ import org.apache.lucene.search.LeafFieldComparator; import org.apache.lucene.search.Pruning; import org.apache.lucene.search.comparators.NumericComparator; +import org.apache.lucene.util.NumericUtils; import org.opensearch.common.Numbers; import java.io.IOException; @@ -49,6 +50,16 @@ public BigInteger value(int slot) { public LeafFieldComparator getLeafComparator(LeafReaderContext context) throws IOException { return new UnsignedLongLeafComparator(context); } + + @Override + protected long missingValueAsComparableLong() { + return missingValue.longValue(); + } + + @Override + protected long sortableBytesToLong(byte[] bytes) { + return NumericUtils.sortableBytesToBigInt(bytes, 0, 0).longValue(); + } /** Leaf comparator for {@link UnsignedLongComparator} that provides skipping functionality */ public class UnsignedLongLeafComparator extends NumericLeafComparator { @@ -88,23 +99,13 @@ public void copy(int slot, int doc) throws IOException { } @Override - protected void encodeBottom(byte[] packedValue) { - BigIntegerPoint.encodeDimension(bottom, packedValue, 0); - } - - @Override - protected void encodeTop(byte[] packedValue) { - BigIntegerPoint.encodeDimension(topValue, packedValue, 0); - } - - @Override - protected int compareMissingValueWithBottomValue() { - return missingValue.compareTo(bottom); + protected long bottomAsComparableLong() { + return bottom.longValue(); } @Override - protected int compareMissingValueWithTopValue() { - return missingValue.compareTo(topValue); + protected long topAsComparableLong() { + return topValue.longValue(); } } } diff --git a/server/src/main/java/org/opensearch/index/shard/ShardSplittingQuery.java b/server/src/main/java/org/opensearch/index/shard/ShardSplittingQuery.java index 219ead931797a..4c1c4796ef6b2 100644 --- a/server/src/main/java/org/opensearch/index/shard/ShardSplittingQuery.java +++ b/server/src/main/java/org/opensearch/index/shard/ShardSplittingQuery.java @@ -48,6 +48,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; import org.apache.lucene.search.join.BitSetProducer; @@ -95,7 +96,7 @@ public String toString() { } @Override - public Scorer scorer(LeafReaderContext context) throws IOException { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { LeafReader leafReader = context.reader(); FixedBitSet bitSet = new FixedBitSet(leafReader.maxDoc()); Terms terms = leafReader.terms(RoutingFieldMapper.NAME); @@ -130,7 +131,8 @@ public Scorer scorer(LeafReaderContext context) throws IOException { TwoPhaseIterator twoPhaseIterator = parentBitSet == null ? new RoutingPartitionedDocIdSetIterator(visitor) : new NestedRoutingPartitionedDocIdSetIterator(visitor, parentBitSet); - return new ConstantScoreScorer(this, score(), scoreMode, twoPhaseIterator); + final Scorer scorer = new ConstantScoreScorer(score(), scoreMode, twoPhaseIterator); + return new DefaultScorerSupplier(scorer); } else { // here we potentially guard the docID consumers with our parent bitset if we have one. // this ensures that we are only marking root documents in the nested case and if necessary @@ -171,7 +173,8 @@ public Scorer scorer(LeafReaderContext context) throws IOException { } } - return new ConstantScoreScorer(this, score(), scoreMode, new BitSetIterator(bitSet, bitSet.length())); + final Scorer scorer = new ConstantScoreScorer(score(), scoreMode, new BitSetIterator(bitSet, bitSet.length())); + return new DefaultScorerSupplier(scorer); } @Override diff --git a/server/src/main/java/org/opensearch/index/store/Store.java b/server/src/main/java/org/opensearch/index/store/Store.java index e7de61ff764f6..48ed4d9e75dcb 100644 --- a/server/src/main/java/org/opensearch/index/store/Store.java +++ b/server/src/main/java/org/opensearch/index/store/Store.java @@ -737,7 +737,7 @@ private static void failIfCorrupted(Directory directory) throws IOException { List ex = new ArrayList<>(); for (String file : files) { if (file.startsWith(CORRUPTED_MARKER_NAME_PREFIX)) { - try (ChecksumIndexInput input = directory.openChecksumInput(file, IOContext.READONCE)) { + try (ChecksumIndexInput input = directory.openChecksumInput(file)) { CodecUtil.checkHeader(input, CODEC, CORRUPTED_MARKER_CODEC_VERSION, CORRUPTED_MARKER_CODEC_VERSION); final int size = input.readVInt(); final byte[] buffer = new byte[size]; diff --git a/server/src/main/java/org/opensearch/index/store/remote/utils/TransferManager.java b/server/src/main/java/org/opensearch/index/store/remote/utils/TransferManager.java index 94c25202ac90c..93afe429c3d04 100644 --- a/server/src/main/java/org/opensearch/index/store/remote/utils/TransferManager.java +++ b/server/src/main/java/org/opensearch/index/store/remote/utils/TransferManager.java @@ -127,7 +127,7 @@ private static FileCachedIndexInput createIndexInput(FileCache fileCache, Stream } } } - final IndexInput luceneIndexInput = request.getDirectory().openInput(request.getFileName(), IOContext.READ); + final IndexInput luceneIndexInput = request.getDirectory().openInput(request.getFileName(), IOContext.DEFAULT); return new FileCachedIndexInput(fileCache, request.getFilePath(), luceneIndexInput); } catch (IOException e) { throw new UncheckedIOException(e); diff --git a/server/src/main/java/org/opensearch/indices/IndicesQueryCache.java b/server/src/main/java/org/opensearch/indices/IndicesQueryCache.java index 52ed311a1eb92..f16b16ce395b7 100644 --- a/server/src/main/java/org/opensearch/indices/IndicesQueryCache.java +++ b/server/src/main/java/org/opensearch/indices/IndicesQueryCache.java @@ -35,13 +35,11 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.search.BulkScorer; import org.apache.lucene.search.Explanation; import org.apache.lucene.search.LRUQueryCache; import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryCache; import org.apache.lucene.search.QueryCachingPolicy; -import org.apache.lucene.search.Scorer; import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Weight; import org.opensearch.common.annotation.PublicApi; @@ -170,24 +168,12 @@ public Explanation explain(LeafReaderContext context, int doc) throws IOExceptio return in.explain(context, doc); } - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - shardKeyMap.add(context.reader()); - return in.scorer(context); - } - @Override public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { shardKeyMap.add(context.reader()); return in.scorerSupplier(context); } - @Override - public BulkScorer bulkScorer(LeafReaderContext context) throws IOException { - shardKeyMap.add(context.reader()); - return in.bulkScorer(context); - } - @Override public int count(LeafReaderContext context) throws IOException { shardKeyMap.add(context.reader()); diff --git a/server/src/main/java/org/opensearch/indices/replication/SegmentFileTransferHandler.java b/server/src/main/java/org/opensearch/indices/replication/SegmentFileTransferHandler.java index 80ba5146535aa..756751cf0b87d 100644 --- a/server/src/main/java/org/opensearch/indices/replication/SegmentFileTransferHandler.java +++ b/server/src/main/java/org/opensearch/indices/replication/SegmentFileTransferHandler.java @@ -110,7 +110,7 @@ protected void onNewResource(StoreFileMetadata md) throws IOException { // Segments* files require IOContext.READONCE // https://github.com/apache/lucene/blob/b2d3a2b37e00f19a74949097736be8fd64745f61/lucene/test-framework/src/java/org/apache/lucene/tests/store/MockDirectoryWrapper.java#L817 if (md.name().startsWith(IndexFileNames.SEGMENTS) == false) { - final IndexInput indexInput = store.directory().openInput(md.name(), IOContext.READ); + final IndexInput indexInput = store.directory().openInput(md.name(), IOContext.DEFAULT); currentInput = new InputStreamIndexInput(indexInput, md.length()) { @Override public void close() throws IOException { diff --git a/server/src/main/java/org/opensearch/lucene/queries/MinDocQuery.java b/server/src/main/java/org/opensearch/lucene/queries/MinDocQuery.java index 5c904d8a7770d..197e20ef30799 100644 --- a/server/src/main/java/org/opensearch/lucene/queries/MinDocQuery.java +++ b/server/src/main/java/org/opensearch/lucene/queries/MinDocQuery.java @@ -42,6 +42,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Weight; import java.io.IOException; @@ -103,14 +104,15 @@ public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float bo } return new ConstantScoreWeight(this, boost) { @Override - public Scorer scorer(LeafReaderContext context) throws IOException { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { final int maxDoc = context.reader().maxDoc(); if (context.docBase + maxDoc <= minDoc) { return null; } final int segmentMinDoc = Math.max(0, minDoc - context.docBase); final DocIdSetIterator disi = new MinDocIterator(segmentMinDoc, maxDoc); - return new ConstantScoreScorer(this, score(), scoreMode, disi); + final Scorer scorer = new ConstantScoreScorer(score(), scoreMode, disi); + return new DefaultScorerSupplier(scorer); } @Override diff --git a/server/src/main/java/org/opensearch/lucene/queries/SearchAfterSortedDocQuery.java b/server/src/main/java/org/opensearch/lucene/queries/SearchAfterSortedDocQuery.java index 600ba5b5a92d8..81778c3387863 100644 --- a/server/src/main/java/org/opensearch/lucene/queries/SearchAfterSortedDocQuery.java +++ b/server/src/main/java/org/opensearch/lucene/queries/SearchAfterSortedDocQuery.java @@ -45,6 +45,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; import org.apache.lucene.search.Weight; @@ -91,8 +92,8 @@ public SearchAfterSortedDocQuery(Sort sort, FieldDoc after) { public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException { return new ConstantScoreWeight(this, 1.0f) { @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - Sort segmentSort = context.reader().getMetaData().getSort(); + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { + Sort segmentSort = context.reader().getMetaData().sort(); if (segmentSort == null || Lucene.canEarlyTerminate(sort, segmentSort) == false) { throw new IOException("search sort :[" + sort.getSort() + "] does not match the index sort:[" + segmentSort + "]"); } @@ -104,7 +105,8 @@ public Scorer scorer(LeafReaderContext context) throws IOException { return null; } final DocIdSetIterator disi = new MinDocQuery.MinDocIterator(firstDoc, maxDoc); - return new ConstantScoreScorer(this, score(), scoreMode, disi); + final Scorer scorer = new ConstantScoreScorer(score(), scoreMode, disi); + return new DefaultScorerSupplier(scorer); } @Override diff --git a/server/src/main/java/org/opensearch/node/Node.java b/server/src/main/java/org/opensearch/node/Node.java index e74fca60b0201..73ffb3946f3e4 100644 --- a/server/src/main/java/org/opensearch/node/Node.java +++ b/server/src/main/java/org/opensearch/node/Node.java @@ -487,8 +487,8 @@ protected Node( Constants.OS_ARCH, Constants.JVM_VENDOR, Constants.JVM_NAME, - Constants.JAVA_VERSION, - Constants.JVM_VERSION + System.getProperty("java.version"), + Runtime.version().toString() ); if (jvmInfo.getBundledJdk()) { logger.info("JVM home [{}], using bundled JDK/JRE [{}]", System.getProperty("java.home"), jvmInfo.getUsingBundledJdk()); diff --git a/server/src/main/java/org/opensearch/repositories/blobstore/BlobStoreRepository.java b/server/src/main/java/org/opensearch/repositories/blobstore/BlobStoreRepository.java index 243d0021fac2e..ace6a8231e780 100644 --- a/server/src/main/java/org/opensearch/repositories/blobstore/BlobStoreRepository.java +++ b/server/src/main/java/org/opensearch/repositories/blobstore/BlobStoreRepository.java @@ -4534,7 +4534,7 @@ private void snapshotFile( ) throws IOException { final BlobContainer shardContainer = shardContainer(indexId, shardId); final String file = fileInfo.physicalName(); - try (IndexInput indexInput = store.openVerifyingInput(file, IOContext.READ, fileInfo.metadata())) { + try (IndexInput indexInput = store.openVerifyingInput(file, IOContext.READONCE, fileInfo.metadata())) { for (int i = 0; i < fileInfo.numberOfParts(); i++) { final long partBytes = fileInfo.partBytes(i); diff --git a/server/src/main/java/org/opensearch/rest/action/cat/RestCountAction.java b/server/src/main/java/org/opensearch/rest/action/cat/RestCountAction.java index 9c054ffe1bcc7..cc85f4141023c 100644 --- a/server/src/main/java/org/opensearch/rest/action/cat/RestCountAction.java +++ b/server/src/main/java/org/opensearch/rest/action/cat/RestCountAction.java @@ -99,7 +99,7 @@ public RestChannelConsumer doCatRequest(final RestRequest request, final NodeCli return channel -> client.search(countRequest, new RestResponseListener(channel) { @Override public RestResponse buildResponse(SearchResponse countResponse) throws Exception { - assert countResponse.getHits().getTotalHits().relation == TotalHits.Relation.EQUAL_TO; + assert countResponse.getHits().getTotalHits().relation() == TotalHits.Relation.EQUAL_TO; return RestTable.buildResponse(buildTable(request, countResponse), channel); } }); @@ -117,7 +117,7 @@ protected Table getTableWithHeader(final RestRequest request) { private Table buildTable(RestRequest request, SearchResponse response) { Table table = getTableWithHeader(request); table.startRow(); - table.addCell(response.getHits().getTotalHits().value); + table.addCell(response.getHits().getTotalHits().value()); table.endRow(); return table; diff --git a/server/src/main/java/org/opensearch/rest/action/search/RestCountAction.java b/server/src/main/java/org/opensearch/rest/action/search/RestCountAction.java index acedd5a884596..56c20ed956037 100644 --- a/server/src/main/java/org/opensearch/rest/action/search/RestCountAction.java +++ b/server/src/main/java/org/opensearch/rest/action/search/RestCountAction.java @@ -118,7 +118,7 @@ public RestResponse buildResponse(SearchResponse response, XContentBuilder build if (terminateAfter != DEFAULT_TERMINATE_AFTER) { builder.field("terminated_early", response.isTerminatedEarly()); } - builder.field("count", response.getHits().getTotalHits().value); + builder.field("count", response.getHits().getTotalHits().value()); buildBroadcastShardsHeader( builder, request, diff --git a/server/src/main/java/org/opensearch/search/MultiValueMode.java b/server/src/main/java/org/opensearch/search/MultiValueMode.java index a99da674836f2..9595dfab0e41f 100644 --- a/server/src/main/java/org/opensearch/search/MultiValueMode.java +++ b/server/src/main/java/org/opensearch/search/MultiValueMode.java @@ -505,7 +505,7 @@ protected BytesRef pick( @Override protected int pick(SortedSetDocValues values) throws IOException { long maxOrd = -1; - for (long ord = values.nextOrd(); ord != SortedSetDocValues.NO_MORE_ORDS; ord = values.nextOrd()) { + for (long ord = values.nextOrd(); ord != SortedSetDocValues.NO_MORE_DOCS; ord = values.nextOrd()) { maxOrd = ord; } return Math.toIntExact(maxOrd); diff --git a/server/src/main/java/org/opensearch/search/SearchHits.java b/server/src/main/java/org/opensearch/search/SearchHits.java index 8232643b353f5..963ce82e636cf 100644 --- a/server/src/main/java/org/opensearch/search/SearchHits.java +++ b/server/src/main/java/org/opensearch/search/SearchHits.java @@ -219,12 +219,12 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.startObject(Fields.HITS); boolean totalHitAsInt = params.paramAsBoolean(RestSearchAction.TOTAL_HITS_AS_INT_PARAM, false); if (totalHitAsInt) { - long total = totalHits == null ? -1 : totalHits.value; + long total = totalHits == null ? -1 : totalHits.value(); builder.field(Fields.TOTAL, total); } else if (totalHits != null) { builder.startObject(Fields.TOTAL); - builder.field("value", totalHits.value); - builder.field("relation", totalHits.relation == Relation.EQUAL_TO ? "eq" : "gte"); + builder.field("value", totalHits.value()); + builder.field("relation", totalHits.relation() == Relation.EQUAL_TO ? "eq" : "gte"); builder.endObject(); } if (Float.isNaN(maxScore)) { diff --git a/server/src/main/java/org/opensearch/search/SearchModule.java b/server/src/main/java/org/opensearch/search/SearchModule.java index 3a746259af3b5..40e0293f88f07 100644 --- a/server/src/main/java/org/opensearch/search/SearchModule.java +++ b/server/src/main/java/org/opensearch/search/SearchModule.java @@ -32,7 +32,7 @@ package org.opensearch.search; -import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.IndexSearcher; import org.opensearch.common.NamedRegistry; import org.opensearch.common.Nullable; import org.opensearch.common.geo.GeoShapeType; @@ -324,7 +324,7 @@ public class SearchModule { * Constructs a new SearchModule object *

* NOTE: This constructor should not be called in production unless an accurate {@link Settings} object is provided. - * When constructed, a static flag is set in Lucene {@link BooleanQuery#setMaxClauseCount} according to the settings. + * When constructed, a static flag is set in Lucene {@link IndexSearcher#setMaxClauseCount} according to the settings. * @param settings Current settings * @param plugins List of included {@link SearchPlugin} objects. */ diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/CompositeAggregator.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/CompositeAggregator.java index cfe716eb57ca8..7a0bffa0cf74a 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/CompositeAggregator.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/CompositeAggregator.java @@ -384,7 +384,7 @@ private boolean isMaybeMultivalued(LeafReaderContext context, SortField sortFiel * optimization and null if index sort is not applicable. */ private Sort buildIndexSortPrefix(LeafReaderContext context) throws IOException { - Sort indexSort = context.reader().getMetaData().getSort(); + Sort indexSort = context.reader().getMetaData().sort(); if (indexSort == null) { return null; } diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/GlobalOrdinalValuesSource.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/GlobalOrdinalValuesSource.java index 3e5c53d470f79..e093d45476435 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/GlobalOrdinalValuesSource.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/GlobalOrdinalValuesSource.java @@ -50,7 +50,7 @@ import java.io.IOException; -import static org.apache.lucene.index.SortedSetDocValues.NO_MORE_ORDS; +import static org.apache.lucene.index.SortedSetDocValues.NO_MORE_DOCS; /** * A {@link SingleDimensionValuesSource} for global ordinals. @@ -176,7 +176,7 @@ LeafBucketCollector getLeafCollector(LeafReaderContext context, LeafBucketCollec public void collect(int doc, long bucket) throws IOException { if (dvs.advanceExact(doc)) { long ord; - while ((ord = dvs.nextOrd()) != NO_MORE_ORDS) { + while ((ord = dvs.nextOrd()) != NO_MORE_DOCS) { currentValue = ord; next.collect(doc, bucket); } @@ -206,7 +206,7 @@ public void collect(int doc, long bucket) throws IOException { if (currentValueIsSet == false) { if (dvs.advanceExact(doc)) { long ord; - while ((ord = dvs.nextOrd()) != NO_MORE_ORDS) { + while ((ord = dvs.nextOrd()) != NO_MORE_DOCS) { if (term.equals(lookup.lookupOrd(ord))) { currentValueIsSet = true; currentValue = ord; diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/LongValuesSource.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/LongValuesSource.java index 48e080c1576dd..da855e3d14bce 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/LongValuesSource.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/LongValuesSource.java @@ -39,7 +39,7 @@ import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.ConstantScoreQuery; -import org.apache.lucene.search.DocValuesFieldExistsQuery; +import org.apache.lucene.search.FieldExistsQuery; import org.apache.lucene.search.IndexOrDocValuesQuery; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.PointRangeQuery; @@ -242,8 +242,8 @@ static boolean checkMatchAllOrRangeQuery(Query query, String fieldName) { } else if (query instanceof PointRangeQuery) { PointRangeQuery pointQuery = (PointRangeQuery) query; return fieldName.equals(pointQuery.getField()); - } else if (query instanceof DocValuesFieldExistsQuery) { - DocValuesFieldExistsQuery existsQuery = (DocValuesFieldExistsQuery) query; + } else if (query instanceof FieldExistsQuery) { + FieldExistsQuery existsQuery = (FieldExistsQuery) query; return fieldName.equals(existsQuery.getField()); } else { return false; diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/range/BinaryRangeAggregator.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/range/BinaryRangeAggregator.java index a48649af99be3..e46800a937800 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/range/BinaryRangeAggregator.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/range/BinaryRangeAggregator.java @@ -202,7 +202,7 @@ abstract static class SortedSetRangeLeafCollector extends LeafBucketCollectorBas public void collect(int doc, long bucket) throws IOException { if (values.advanceExact(doc)) { int lo = 0; - for (long ord = values.nextOrd(); ord != SortedSetDocValues.NO_MORE_ORDS; ord = values.nextOrd()) { + for (long ord = values.nextOrd(); ord != SortedSetDocValues.NO_MORE_DOCS; ord = values.nextOrd()) { lo = collect(doc, ord, bucket, lo); } } diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/sampler/DiversifiedOrdinalsSamplerAggregator.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/sampler/DiversifiedOrdinalsSamplerAggregator.java index 953a45971919c..8708615690621 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/sampler/DiversifiedOrdinalsSamplerAggregator.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/sampler/DiversifiedOrdinalsSamplerAggregator.java @@ -148,7 +148,7 @@ public boolean advanceExact(int target) throws IOException { value = globalOrds.nextOrd(); // Check there isn't a second value for this // document - if (globalOrds.nextOrd() != SortedSetDocValues.NO_MORE_ORDS) { + if (globalOrds.nextOrd() != SortedSetDocValues.NO_MORE_DOCS) { throw new IllegalArgumentException("Sample diversifying key must be a single valued-field"); } return true; diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java index 9e40f7b4c9b3e..2a739ffb3d74b 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java @@ -79,8 +79,7 @@ import java.util.function.LongUnaryOperator; import static org.opensearch.search.aggregations.InternalOrder.isKeyOrder; -import static org.apache.lucene.index.SortedSetDocValues.NO_MORE_ORDS; -import static org.apache.lucene.search.DocIdSetIterator.NO_MORE_DOCS; +import static org.apache.lucene.index.SortedSetDocValues.NO_MORE_DOCS; /** * An aggregator of string values that relies on global ordinals in order to build buckets. @@ -289,7 +288,7 @@ public void collect(int doc, long owningBucketOrd) throws IOException { if (false == globalOrds.advanceExact(doc)) { return; } - for (long globalOrd = globalOrds.nextOrd(); globalOrd != NO_MORE_ORDS; globalOrd = globalOrds.nextOrd()) { + for (long globalOrd = globalOrds.nextOrd(); globalOrd != NO_MORE_DOCS; globalOrd = globalOrds.nextOrd()) { collectionStrategy.collectGlobalOrd(owningBucketOrd, doc, globalOrd, sub); } } @@ -301,7 +300,7 @@ public void collect(int doc, long owningBucketOrd) throws IOException { if (false == globalOrds.advanceExact(doc)) { return; } - for (long globalOrd = globalOrds.nextOrd(); globalOrd != NO_MORE_ORDS; globalOrd = globalOrds.nextOrd()) { + for (long globalOrd = globalOrds.nextOrd(); globalOrd != NO_MORE_DOCS; globalOrd = globalOrds.nextOrd()) { if (false == acceptedGlobalOrdinals.test(globalOrd)) { continue; } @@ -478,7 +477,7 @@ public void collect(int doc, long owningBucketOrd) throws IOException { if (false == segmentOrds.advanceExact(doc)) { return; } - for (long segmentOrd = segmentOrds.nextOrd(); segmentOrd != NO_MORE_ORDS; segmentOrd = segmentOrds.nextOrd()) { + for (long segmentOrd = segmentOrds.nextOrd(); segmentOrd != NO_MORE_DOCS; segmentOrd = segmentOrds.nextOrd()) { long docCount = docCountProvider.getDocCount(doc); segmentDocCounts.increment(segmentOrd + 1, docCount); } diff --git a/server/src/main/java/org/opensearch/search/aggregations/metrics/CardinalityAggregator.java b/server/src/main/java/org/opensearch/search/aggregations/metrics/CardinalityAggregator.java index 0f3d975960364..523eb69e0bc00 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/metrics/CardinalityAggregator.java +++ b/server/src/main/java/org/opensearch/search/aggregations/metrics/CardinalityAggregator.java @@ -554,7 +554,7 @@ public void collect(int doc, long bucketOrd) throws IOException { visitedOrds.set(bucketOrd, bits); } if (values.advanceExact(doc)) { - for (long ord = values.nextOrd(); ord != SortedSetDocValues.NO_MORE_ORDS; ord = values.nextOrd()) { + for (long ord = values.nextOrd(); ord != SortedSetDocValues.NO_MORE_DOCS; ord = values.nextOrd()) { bits.set((int) ord); } } diff --git a/server/src/main/java/org/opensearch/search/aggregations/metrics/InternalTopHits.java b/server/src/main/java/org/opensearch/search/aggregations/metrics/InternalTopHits.java index e82f49aa13600..fab6113b397da 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/metrics/InternalTopHits.java +++ b/server/src/main/java/org/opensearch/search/aggregations/metrics/InternalTopHits.java @@ -183,7 +183,7 @@ public InternalAggregation reduce(List aggregations, Reduce } while (shardDocs[scoreDoc.shardIndex].scoreDocs[position] != scoreDoc); hits[i] = shardHits[scoreDoc.shardIndex].getAt(position); } - assert reducedTopDocs.totalHits.relation == Relation.EQUAL_TO; + assert reducedTopDocs.totalHits.relation() == Relation.EQUAL_TO; return new InternalTopHits( name, this.from, @@ -224,8 +224,8 @@ public boolean equals(Object obj) { InternalTopHits other = (InternalTopHits) obj; if (from != other.from) return false; if (size != other.size) return false; - if (topDocs.topDocs.totalHits.value != other.topDocs.topDocs.totalHits.value) return false; - if (topDocs.topDocs.totalHits.relation != other.topDocs.topDocs.totalHits.relation) return false; + if (topDocs.topDocs.totalHits.value() != other.topDocs.topDocs.totalHits.value()) return false; + if (topDocs.topDocs.totalHits.relation() != other.topDocs.topDocs.totalHits.relation()) return false; if (topDocs.topDocs.scoreDocs.length != other.topDocs.topDocs.scoreDocs.length) return false; for (int d = 0; d < topDocs.topDocs.scoreDocs.length; d++) { ScoreDoc thisDoc = topDocs.topDocs.scoreDocs[d]; @@ -251,8 +251,8 @@ public int hashCode() { int hashCode = super.hashCode(); hashCode = 31 * hashCode + Integer.hashCode(from); hashCode = 31 * hashCode + Integer.hashCode(size); - hashCode = 31 * hashCode + Long.hashCode(topDocs.topDocs.totalHits.value); - hashCode = 31 * hashCode + topDocs.topDocs.totalHits.relation.hashCode(); + hashCode = 31 * hashCode + Long.hashCode(topDocs.topDocs.totalHits.value()); + hashCode = 31 * hashCode + topDocs.topDocs.totalHits.relation().hashCode(); for (int d = 0; d < topDocs.topDocs.scoreDocs.length; d++) { ScoreDoc doc = topDocs.topDocs.scoreDocs[d]; hashCode = 31 * hashCode + doc.doc; diff --git a/server/src/main/java/org/opensearch/search/aggregations/metrics/MetricInspectionHelper.java b/server/src/main/java/org/opensearch/search/aggregations/metrics/MetricInspectionHelper.java index 47e27da5394b3..8408036772673 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/metrics/MetricInspectionHelper.java +++ b/server/src/main/java/org/opensearch/search/aggregations/metrics/MetricInspectionHelper.java @@ -78,7 +78,7 @@ public static boolean hasValue(InternalTDigestPercentiles agg) { } public static boolean hasValue(InternalTopHits agg) { - return (agg.getHits().getTotalHits().value == 0 + return (agg.getHits().getTotalHits().value() == 0 && Double.isNaN(agg.getHits().getMaxScore()) && Double.isNaN(agg.getTopDocs().maxScore)) == false; } diff --git a/server/src/main/java/org/opensearch/search/aggregations/support/MissingValues.java b/server/src/main/java/org/opensearch/search/aggregations/support/MissingValues.java index d21737a8366b2..b9595941608d0 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/support/MissingValues.java +++ b/server/src/main/java/org/opensearch/search/aggregations/support/MissingValues.java @@ -308,7 +308,7 @@ public long nextOrd() throws IOException { // NO_MORE_ORDS so on the next call we indicate there are no // more values long ordToReturn = nextMissingOrd; - nextMissingOrd = SortedSetDocValues.NO_MORE_ORDS; + nextMissingOrd = SortedSetDocValues.NO_MORE_DOCS; return ordToReturn; } } @@ -376,7 +376,7 @@ public long nextOrd() throws IOException { // NO_MORE_ORDS so on the next call we indicate there are no // more values long ordToReturn = nextMissingOrd; - nextMissingOrd = SortedSetDocValues.NO_MORE_ORDS; + nextMissingOrd = SortedSetDocValues.NO_MORE_DOCS; return ordToReturn; } } diff --git a/server/src/main/java/org/opensearch/search/approximate/ApproximatePointRangeQuery.java b/server/src/main/java/org/opensearch/search/approximate/ApproximatePointRangeQuery.java index 6ff01f5f39d36..fe3d66c674a82 100644 --- a/server/src/main/java/org/opensearch/search/approximate/ApproximatePointRangeQuery.java +++ b/server/src/main/java/org/opensearch/search/approximate/ApproximatePointRangeQuery.java @@ -359,7 +359,7 @@ public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOExcepti public Scorer get(long leadCost) throws IOException { intersectLeft(values.getPointTree(), visitor, docCount); DocIdSetIterator iterator = result.build().iterator(); - return new ConstantScoreScorer(weight, score(), scoreMode, iterator); + return new ConstantScoreScorer(score(), scoreMode, iterator); } @Override @@ -387,7 +387,7 @@ public long cost() { public Scorer get(long leadCost) throws IOException { intersectRight(values.getPointTree(), visitor, docCount); DocIdSetIterator iterator = result.build().iterator(); - return new ConstantScoreScorer(weight, score(), scoreMode, iterator); + return new ConstantScoreScorer(score(), scoreMode, iterator); } @Override @@ -404,15 +404,6 @@ public long cost() { } } - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - ScorerSupplier scorerSupplier = scorerSupplier(context); - if (scorerSupplier == null) { - return null; - } - return scorerSupplier.get(Long.MAX_VALUE); - } - @Override public int count(LeafReaderContext context) throws IOException { return pointRangeQueryWeight.count(context); diff --git a/server/src/main/java/org/opensearch/search/fetch/subphase/highlight/FragmentBuilderHelper.java b/server/src/main/java/org/opensearch/search/fetch/subphase/highlight/FragmentBuilderHelper.java index e037ae5544a07..2e94a7ba0b3f6 100644 --- a/server/src/main/java/org/opensearch/search/fetch/subphase/highlight/FragmentBuilderHelper.java +++ b/server/src/main/java/org/opensearch/search/fetch/subphase/highlight/FragmentBuilderHelper.java @@ -75,13 +75,13 @@ public static WeightedFragInfo fixWeightedFragInfo(MappedFieldType fieldType, Fi CollectionUtil.introSort(subInfos, new Comparator() { @Override public int compare(SubInfo o1, SubInfo o2) { - int startOffset = o1.getTermsOffsets().get(0).getStartOffset(); - int startOffset2 = o2.getTermsOffsets().get(0).getStartOffset(); + int startOffset = o1.termsOffsets().get(0).getStartOffset(); + int startOffset2 = o2.termsOffsets().get(0).getStartOffset(); return FragmentBuilderHelper.compare(startOffset, startOffset2); } }); return new WeightedFragInfo( - Math.min(fragInfo.getSubInfos().get(0).getTermsOffsets().get(0).getStartOffset(), fragInfo.getStartOffset()), + Math.min(fragInfo.getSubInfos().get(0).termsOffsets().get(0).getStartOffset(), fragInfo.getStartOffset()), fragInfo.getEndOffset(), subInfos, fragInfo.getTotalBoost() diff --git a/server/src/main/java/org/opensearch/search/profile/query/ProfileScorer.java b/server/src/main/java/org/opensearch/search/profile/query/ProfileScorer.java index 319281449195b..28b693ee03ad5 100644 --- a/server/src/main/java/org/opensearch/search/profile/query/ProfileScorer.java +++ b/server/src/main/java/org/opensearch/search/profile/query/ProfileScorer.java @@ -35,7 +35,6 @@ import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.TwoPhaseIterator; -import org.apache.lucene.search.Weight; import org.opensearch.search.profile.AbstractProfileBreakdown; import org.opensearch.search.profile.Timer; @@ -51,15 +50,12 @@ final class ProfileScorer extends Scorer { private final Scorer scorer; - private ProfileWeight profileWeight; private final Timer scoreTimer, nextDocTimer, advanceTimer, matchTimer, shallowAdvanceTimer, computeMaxScoreTimer, setMinCompetitiveScoreTimer; - ProfileScorer(ProfileWeight w, Scorer scorer, AbstractProfileBreakdown profile) throws IOException { - super(w); + ProfileScorer(Scorer scorer, AbstractProfileBreakdown profile) throws IOException { this.scorer = scorer; - this.profileWeight = w; scoreTimer = profile.getTimer(QueryTimingType.SCORE); nextDocTimer = profile.getTimer(QueryTimingType.NEXT_DOC); advanceTimer = profile.getTimer(QueryTimingType.ADVANCE); @@ -84,11 +80,6 @@ public float score() throws IOException { } } - @Override - public Weight getWeight() { - return profileWeight; - } - @Override public Collection getChildren() throws IOException { return scorer.getChildren(); diff --git a/server/src/main/java/org/opensearch/search/profile/query/ProfileWeight.java b/server/src/main/java/org/opensearch/search/profile/query/ProfileWeight.java index c7e70d8d88007..f190a9734c1a5 100644 --- a/server/src/main/java/org/opensearch/search/profile/query/ProfileWeight.java +++ b/server/src/main/java/org/opensearch/search/profile/query/ProfileWeight.java @@ -33,7 +33,6 @@ package org.opensearch.search.profile.query; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.search.BulkScorer; import org.apache.lucene.search.Collector; import org.apache.lucene.search.Explanation; import org.apache.lucene.search.Query; @@ -63,15 +62,6 @@ public ProfileWeight(Query query, Weight subQueryWeight, ContextualProfileBreakd this.profile = profile; } - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - ScorerSupplier supplier = scorerSupplier(context); - if (supplier == null) { - return null; - } - return supplier.get(Long.MAX_VALUE); - } - @Override public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { Timer timer = profile.context(context).getTimer(QueryTimingType.BUILD_SCORER); @@ -86,14 +76,13 @@ public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOExcepti return null; } - final ProfileWeight weight = this; return new ScorerSupplier() { @Override public Scorer get(long loadCost) throws IOException { timer.start(); try { - return new ProfileScorer(weight, subQueryScorerSupplier.get(loadCost), profile.context(context)); + return new ProfileScorer(subQueryScorerSupplier.get(loadCost), profile.context(context)); } finally { timer.stop(); } @@ -111,18 +100,6 @@ public long cost() { }; } - @Override - public BulkScorer bulkScorer(LeafReaderContext context) throws IOException { - // We use the default bulk scorer instead of the specialized one. The reason - // is that Lucene's BulkScorers do everything at once: finding matches, - // scoring them and calling the collector, so they make it impossible to - // see where time is spent, which is the purpose of query profiling. - // The default bulk scorer will pull a scorer and iterate over matches, - // this might be a significantly different execution path for some queries - // like disjunctions, but in general this is what is done anyway - return super.bulkScorer(context); - } - @Override public Explanation explain(LeafReaderContext context, int doc) throws IOException { return subQueryWeight.explain(context, doc); diff --git a/server/src/main/java/org/opensearch/search/query/BitmapDocValuesQuery.java b/server/src/main/java/org/opensearch/search/query/BitmapDocValuesQuery.java index dfa5fc4567f80..a2918202de37a 100644 --- a/server/src/main/java/org/opensearch/search/query/BitmapDocValuesQuery.java +++ b/server/src/main/java/org/opensearch/search/query/BitmapDocValuesQuery.java @@ -20,6 +20,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; import org.apache.lucene.util.Accountable; @@ -58,7 +59,7 @@ public BitmapDocValuesQuery(String field, RoaringBitmap bitmap) { public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException { return new ConstantScoreWeight(this, boost) { @Override - public Scorer scorer(LeafReaderContext context) throws IOException { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { SortedNumericDocValues values = DocValues.getSortedNumeric(context.reader(), field); final NumericDocValues singleton = DocValues.unwrapSingleton(values); final TwoPhaseIterator iterator; @@ -99,7 +100,8 @@ public float matchCost() { } }; } - return new ConstantScoreScorer(this, score(), scoreMode, iterator); + final Scorer scorer = new ConstantScoreScorer(score(), scoreMode, iterator); + return new DefaultScorerSupplier(scorer); } @Override diff --git a/server/src/main/java/org/opensearch/search/query/QueryPhase.java b/server/src/main/java/org/opensearch/search/query/QueryPhase.java index 55b7c0bc5178d..58be02cc413dd 100644 --- a/server/src/main/java/org/opensearch/search/query/QueryPhase.java +++ b/server/src/main/java/org/opensearch/search/query/QueryPhase.java @@ -388,7 +388,7 @@ private static boolean canEarlyTerminate(IndexReader reader, SortAndFormats sort } final Sort sort = sortAndFormats.sort; for (LeafReaderContext ctx : reader.leaves()) { - Sort indexSort = ctx.reader().getMetaData().getSort(); + Sort indexSort = ctx.reader().getMetaData().sort(); if (indexSort == null || Lucene.canEarlyTerminate(sort, indexSort) == false) { return false; } diff --git a/server/src/main/java/org/opensearch/search/query/TopDocsCollectorContext.java b/server/src/main/java/org/opensearch/search/query/TopDocsCollectorContext.java index f780f6fe32af2..30f2a550d6a20 100644 --- a/server/src/main/java/org/opensearch/search/query/TopDocsCollectorContext.java +++ b/server/src/main/java/org/opensearch/search/query/TopDocsCollectorContext.java @@ -46,8 +46,8 @@ import org.apache.lucene.search.Collector; import org.apache.lucene.search.CollectorManager; import org.apache.lucene.search.ConstantScoreQuery; -import org.apache.lucene.search.DocValuesFieldExistsQuery; import org.apache.lucene.search.FieldDoc; +import org.apache.lucene.search.FieldExistsQuery; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MultiCollector; import org.apache.lucene.search.Query; @@ -581,7 +581,7 @@ TopDocsAndMaxScore newTopDocs(final TopDocs topDocs, final float maxScore, final // artificially reducing the number of total hits and doc scores. ScoreDoc[] scoreDocs = topDocs.scoreDocs; if (terminatedAfter != null) { - if (totalHits.value > terminatedAfter) { + if (totalHits.value() > terminatedAfter) { totalHits = new TotalHits(terminatedAfter, TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO); } @@ -735,8 +735,8 @@ static int shortcutTotalHitCount(IndexReader reader, Query query) throws IOExcep count += context.reader().docFreq(term); } return count; - } else if (query.getClass() == DocValuesFieldExistsQuery.class && reader.hasDeletions() == false) { - final String field = ((DocValuesFieldExistsQuery) query).getField(); + } else if (query.getClass() == FieldExistsQuery.class && reader.hasDeletions() == false) { + final String field = ((FieldExistsQuery) query).getField(); int count = 0; for (LeafReaderContext context : reader.leaves()) { FieldInfos fieldInfos = context.reader().getFieldInfos(); diff --git a/server/src/main/java/org/opensearch/search/slice/DocValuesSliceQuery.java b/server/src/main/java/org/opensearch/search/slice/DocValuesSliceQuery.java index 856e103193463..33c300c69abaf 100644 --- a/server/src/main/java/org/opensearch/search/slice/DocValuesSliceQuery.java +++ b/server/src/main/java/org/opensearch/search/slice/DocValuesSliceQuery.java @@ -41,6 +41,7 @@ import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; import org.opensearch.common.util.BitMixer; @@ -65,7 +66,7 @@ public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float bo return new ConstantScoreWeight(this, boost) { @Override - public Scorer scorer(LeafReaderContext context) throws IOException { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { final SortedNumericDocValues values = DocValues.getSortedNumeric(context.reader(), getField()); final DocIdSetIterator approximation = DocIdSetIterator.all(context.reader().maxDoc()); final TwoPhaseIterator twoPhase = new TwoPhaseIterator(approximation) { @@ -90,7 +91,8 @@ public float matchCost() { return 10; } }; - return new ConstantScoreScorer(this, score(), scoreMode, twoPhase); + final Scorer scorer = new ConstantScoreScorer(score(), scoreMode, twoPhase); + return new DefaultScorerSupplier(scorer); } @Override diff --git a/server/src/main/java/org/opensearch/search/slice/TermsSliceQuery.java b/server/src/main/java/org/opensearch/search/slice/TermsSliceQuery.java index 05f36b0d6f3cf..4515b470895bc 100644 --- a/server/src/main/java/org/opensearch/search/slice/TermsSliceQuery.java +++ b/server/src/main/java/org/opensearch/search/slice/TermsSliceQuery.java @@ -44,6 +44,7 @@ import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Weight; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.DocIdSetBuilder; @@ -74,10 +75,11 @@ public TermsSliceQuery(String field, int id, int max) { public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException { return new ConstantScoreWeight(this, boost) { @Override - public Scorer scorer(LeafReaderContext context) throws IOException { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { final DocIdSet disi = build(context.reader()); final DocIdSetIterator leafIt = disi.iterator(); - return new ConstantScoreScorer(this, score(), scoreMode, leafIt); + final Scorer scorer = new ConstantScoreScorer(score(), scoreMode, leafIt); + return new DefaultScorerSupplier(scorer); } @Override diff --git a/server/src/main/java/org/opensearch/search/sort/SortedWiderNumericSortField.java b/server/src/main/java/org/opensearch/search/sort/SortedWiderNumericSortField.java index 10cc832fdb684..65d8c60bc404e 100644 --- a/server/src/main/java/org/opensearch/search/sort/SortedWiderNumericSortField.java +++ b/server/src/main/java/org/opensearch/search/sort/SortedWiderNumericSortField.java @@ -81,6 +81,16 @@ public int compareValues(Number first, Number second) { return Double.compare(first.doubleValue(), second.doubleValue()); } } + + @Override + protected long missingValueAsComparableLong() { + throw new UnsupportedOperationException(); + } + + @Override + protected long sortableBytesToLong(byte[] bytes) { + throw new UnsupportedOperationException(); + } }; } diff --git a/server/src/main/java/org/opensearch/search/suggest/phrase/DirectCandidateGenerator.java b/server/src/main/java/org/opensearch/search/suggest/phrase/DirectCandidateGenerator.java index 1a00cb9465771..4fa6e262e89ad 100644 --- a/server/src/main/java/org/opensearch/search/suggest/phrase/DirectCandidateGenerator.java +++ b/server/src/main/java/org/opensearch/search/suggest/phrase/DirectCandidateGenerator.java @@ -131,7 +131,7 @@ public DirectCandidateGenerator( */ @Override public boolean isKnownWord(BytesRef term) throws IOException { - return termStats(term).docFreq > 0; + return termStats(term).docFreq() > 0; } /* (non-Javadoc) @@ -173,7 +173,7 @@ public CandidateSet drawCandidates(CandidateSet set) throws IOException { because that's what {@link DirectSpellChecker#suggestSimilar} expects when filtering terms. */ - int threshold = thresholdTermFrequency(original.termStats.docFreq); + int threshold = thresholdTermFrequency(original.termStats.docFreq()); if (threshold == Integer.MAX_VALUE) { // the threshold is the max possible frequency so we can skip the search return set; @@ -267,7 +267,7 @@ public void nextToken() throws IOException { } private double score(TermStats termStats, double errorScore, long dictionarySize) { - return errorScore * (((double) termStats.totalTermFreq + 1) / ((double) dictionarySize + 1)); + return errorScore * (((double) termStats.totalTermFreq() + 1) / ((double) dictionarySize + 1)); } // package protected for test diff --git a/server/src/main/java/org/opensearch/search/suggest/phrase/LaplaceScorer.java b/server/src/main/java/org/opensearch/search/suggest/phrase/LaplaceScorer.java index 7ac1d14576ea1..2c307e9464705 100644 --- a/server/src/main/java/org/opensearch/search/suggest/phrase/LaplaceScorer.java +++ b/server/src/main/java/org/opensearch/search/suggest/phrase/LaplaceScorer.java @@ -64,7 +64,7 @@ protected double scoreUnigram(Candidate word) throws IOException { @Override protected double scoreBigram(Candidate word, Candidate w_1) throws IOException { join(separator, spare, w_1.term, word.term); - return (alpha + frequency(spare.get())) / (w_1.termStats.totalTermFreq + alpha * numTerms); + return (alpha + frequency(spare.get())) / (w_1.termStats.totalTermFreq() + alpha * numTerms); } @Override diff --git a/server/src/main/java/org/opensearch/search/suggest/phrase/LinearInterpolatingScorer.java b/server/src/main/java/org/opensearch/search/suggest/phrase/LinearInterpolatingScorer.java index e012dde78c59e..6da733e97efdd 100644 --- a/server/src/main/java/org/opensearch/search/suggest/phrase/LinearInterpolatingScorer.java +++ b/server/src/main/java/org/opensearch/search/suggest/phrase/LinearInterpolatingScorer.java @@ -86,7 +86,7 @@ protected double scoreBigram(Candidate word, Candidate w_1) throws IOException { if (count < 1) { return unigramLambda * scoreUnigram(word); } - return bigramLambda * (count / (0.5d + w_1.termStats.totalTermFreq)) + unigramLambda * scoreUnigram(word); + return bigramLambda * (count / (0.5d + w_1.termStats.totalTermFreq())) + unigramLambda * scoreUnigram(word); } @Override diff --git a/server/src/main/java/org/opensearch/search/suggest/phrase/NoisyChannelSpellChecker.java b/server/src/main/java/org/opensearch/search/suggest/phrase/NoisyChannelSpellChecker.java index e8ba90f353f02..c4b531229ae7b 100644 --- a/server/src/main/java/org/opensearch/search/suggest/phrase/NoisyChannelSpellChecker.java +++ b/server/src/main/java/org/opensearch/search/suggest/phrase/NoisyChannelSpellChecker.java @@ -98,7 +98,7 @@ public void nextToken() throws IOException { if (posIncAttr.getPositionIncrement() == 0 && typeAttribute.type() == SynonymFilter.TYPE_SYNONYM) { assert currentSet != null; TermStats termStats = generator.termStats(term); - if (termStats.docFreq > 0) { + if (termStats.docFreq() > 0) { currentSet.addOneCandidate(generator.createCandidate(BytesRef.deepCopyOf(term), termStats, realWordLikelihood)); } } else { diff --git a/server/src/main/java/org/opensearch/search/suggest/phrase/StupidBackoffScorer.java b/server/src/main/java/org/opensearch/search/suggest/phrase/StupidBackoffScorer.java index 35de07015b853..4b66eb0f10301 100644 --- a/server/src/main/java/org/opensearch/search/suggest/phrase/StupidBackoffScorer.java +++ b/server/src/main/java/org/opensearch/search/suggest/phrase/StupidBackoffScorer.java @@ -63,7 +63,7 @@ protected double scoreBigram(Candidate word, Candidate w_1) throws IOException { if (count < 1) { return discount * scoreUnigram(word); } - return count / (w_1.termStats.totalTermFreq + 0.00000000001d); + return count / (w_1.termStats.totalTermFreq() + 0.00000000001d); } @Override @@ -78,7 +78,7 @@ protected double scoreTrigram(Candidate w, Candidate w_1, Candidate w_2) throws join(separator, spare, w_2.term, w_1.term, w.term); long trigramCount = frequency(spare.get()); if (trigramCount < 1) { - return discount * (bigramCount / (w_1.termStats.totalTermFreq + 0.00000000001d)); + return discount * (bigramCount / (w_1.termStats.totalTermFreq() + 0.00000000001d)); } return trigramCount / (bigramCount + 0.00000000001d); } diff --git a/server/src/main/java/org/opensearch/telemetry/tracing/listener/TraceableSearchRequestOperationsListener.java b/server/src/main/java/org/opensearch/telemetry/tracing/listener/TraceableSearchRequestOperationsListener.java index 71fb59194c447..e74f172c85189 100644 --- a/server/src/main/java/org/opensearch/telemetry/tracing/listener/TraceableSearchRequestOperationsListener.java +++ b/server/src/main/java/org/opensearch/telemetry/tracing/listener/TraceableSearchRequestOperationsListener.java @@ -69,7 +69,7 @@ public void onRequestEnd(SearchPhaseContext context, SearchRequestContext search // add response-related attributes on request end requestSpan.addAttribute( AttributeNames.TOTAL_HITS, - searchRequestContext.totalHits() == null ? 0 : searchRequestContext.totalHits().value + searchRequestContext.totalHits() == null ? 0 : searchRequestContext.totalHits().value() ); } } diff --git a/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec b/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec index 16742fd33ed43..dd744d91dc63a 100644 --- a/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec +++ b/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec @@ -1 +1,2 @@ org.opensearch.index.codec.composite.composite912.Composite912Codec +org.opensearch.index.codec.composite.composite100.Composite100Codec diff --git a/server/src/test/java/org/opensearch/action/search/DfsQueryPhaseTests.java b/server/src/test/java/org/opensearch/action/search/DfsQueryPhaseTests.java index 6952841c295e2..2dd2d9f9576a9 100644 --- a/server/src/test/java/org/opensearch/action/search/DfsQueryPhaseTests.java +++ b/server/src/test/java/org/opensearch/action/search/DfsQueryPhaseTests.java @@ -155,11 +155,11 @@ public void run() throws IOException { assertNotNull(responseRef.get()); assertNotNull(responseRef.get().get(0)); assertNull(responseRef.get().get(0).fetchResult()); - assertEquals(1, responseRef.get().get(0).queryResult().topDocs().topDocs.totalHits.value); + assertEquals(1, responseRef.get().get(0).queryResult().topDocs().topDocs.totalHits.value()); assertEquals(42, responseRef.get().get(0).queryResult().topDocs().topDocs.scoreDocs[0].doc); assertNotNull(responseRef.get().get(1)); assertNull(responseRef.get().get(1).fetchResult()); - assertEquals(1, responseRef.get().get(1).queryResult().topDocs().topDocs.totalHits.value); + assertEquals(1, responseRef.get().get(1).queryResult().topDocs().topDocs.totalHits.value()); assertEquals(84, responseRef.get().get(1).queryResult().topDocs().topDocs.scoreDocs[0].doc); assertTrue(mockSearchPhaseContext.releasedSearchContexts.isEmpty()); assertEquals(2, mockSearchPhaseContext.numSuccess.get()); @@ -240,7 +240,7 @@ public void run() throws IOException { assertNotNull(responseRef.get()); assertNotNull(responseRef.get().get(0)); assertNull(responseRef.get().get(0).fetchResult()); - assertEquals(1, responseRef.get().get(0).queryResult().topDocs().topDocs.totalHits.value); + assertEquals(1, responseRef.get().get(0).queryResult().topDocs().topDocs.totalHits.value()); assertEquals(42, responseRef.get().get(0).queryResult().topDocs().topDocs.scoreDocs[0].doc); assertNull(responseRef.get().get(1)); diff --git a/server/src/test/java/org/opensearch/action/search/FetchSearchPhaseTests.java b/server/src/test/java/org/opensearch/action/search/FetchSearchPhaseTests.java index 1eb3a44642806..c5db475e9db02 100644 --- a/server/src/test/java/org/opensearch/action/search/FetchSearchPhaseTests.java +++ b/server/src/test/java/org/opensearch/action/search/FetchSearchPhaseTests.java @@ -114,7 +114,7 @@ public void run() { mockSearchPhaseContext.assertNoFailure(); SearchResponse searchResponse = mockSearchPhaseContext.searchResponse.get(); assertNotNull(searchResponse); - assertEquals(numHits, searchResponse.getHits().getTotalHits().value); + assertEquals(numHits, searchResponse.getHits().getTotalHits().value()); if (numHits != 0) { assertEquals(42, searchResponse.getHits().getAt(0).docId()); } @@ -209,7 +209,7 @@ public void run() { mockSearchPhaseContext.assertNoFailure(); SearchResponse searchResponse = mockSearchPhaseContext.searchResponse.get(); assertNotNull(searchResponse); - assertEquals(2, searchResponse.getHits().getTotalHits().value); + assertEquals(2, searchResponse.getHits().getTotalHits().value()); assertEquals(84, searchResponse.getHits().getAt(0).docId()); assertEquals(42, searchResponse.getHits().getAt(1).docId()); assertEquals(0, searchResponse.getFailedShards()); @@ -302,7 +302,7 @@ public void run() { mockSearchPhaseContext.assertNoFailure(); SearchResponse searchResponse = mockSearchPhaseContext.searchResponse.get(); assertNotNull(searchResponse); - assertEquals(2, searchResponse.getHits().getTotalHits().value); + assertEquals(2, searchResponse.getHits().getTotalHits().value()); assertEquals(84, searchResponse.getHits().getAt(0).docId()); assertEquals(1, searchResponse.getFailedShards()); assertEquals(1, searchResponse.getSuccessfulShards()); @@ -387,7 +387,7 @@ public void run() { mockSearchPhaseContext.assertNoFailure(); SearchResponse searchResponse = mockSearchPhaseContext.searchResponse.get(); assertNotNull(searchResponse); - assertEquals(numHits, searchResponse.getHits().getTotalHits().value); + assertEquals(numHits, searchResponse.getHits().getTotalHits().value()); assertEquals(Math.min(numHits, resultSetSize), searchResponse.getHits().getHits().length); SearchHit[] hits = searchResponse.getHits().getHits(); for (int i = 0; i < hits.length; i++) { @@ -582,7 +582,7 @@ public void run() { mockSearchPhaseContext.assertNoFailure(); SearchResponse searchResponse = mockSearchPhaseContext.searchResponse.get(); assertNotNull(searchResponse); - assertEquals(2, searchResponse.getHits().getTotalHits().value); + assertEquals(2, searchResponse.getHits().getTotalHits().value()); assertEquals(1, searchResponse.getHits().getHits().length); assertEquals(84, searchResponse.getHits().getAt(0).docId()); assertEquals(0, searchResponse.getFailedShards()); diff --git a/server/src/test/java/org/opensearch/action/search/SearchPhaseControllerTests.java b/server/src/test/java/org/opensearch/action/search/SearchPhaseControllerTests.java index a927f733cc504..964f79d23447a 100644 --- a/server/src/test/java/org/opensearch/action/search/SearchPhaseControllerTests.java +++ b/server/src/test/java/org/opensearch/action/search/SearchPhaseControllerTests.java @@ -356,8 +356,8 @@ public void testMerge() { if (trackTotalHits == SearchContext.TRACK_TOTAL_HITS_DISABLED) { assertNull(mergedResponse.hits.getTotalHits()); } else { - assertThat(mergedResponse.hits.getTotalHits().value, equalTo(0L)); - assertEquals(mergedResponse.hits.getTotalHits().relation, Relation.EQUAL_TO); + assertThat(mergedResponse.hits.getTotalHits().value(), equalTo(0L)); + assertEquals(mergedResponse.hits.getTotalHits().relation(), Relation.EQUAL_TO); } for (SearchHit hit : mergedResponse.hits().getHits()) { SearchPhaseResult searchPhaseResult = fetchResults.get(hit.getShard().getShardId().id()); @@ -583,8 +583,8 @@ private static int getTotalQueryHits(AtomicArray results) { int resultCount = 0; for (SearchPhaseResult shardResult : results.asList()) { TopDocs topDocs = shardResult.queryResult().topDocs().topDocs; - assert topDocs.totalHits.relation == Relation.EQUAL_TO; - resultCount += (int) topDocs.totalHits.value; + assert topDocs.totalHits.relation() == Relation.EQUAL_TO; + resultCount += (int) topDocs.totalHits.value(); } return resultCount; } @@ -820,7 +820,7 @@ public void testConsumerConcurrently() throws Exception { assertEquals(max.get(), internalMax.getValue(), 0.0D); assertEquals(1, reduce.sortedTopDocs.scoreDocs.length); assertEquals(max.get(), reduce.maxScore, 0.0f); - assertEquals(expectedNumResults, reduce.totalHits.value); + assertEquals(expectedNumResults, reduce.totalHits.value()); assertEquals(max.get(), reduce.sortedTopDocs.scoreDocs[0].score, 0.0f); assertFalse(reduce.sortedTopDocs.isSortedByField); assertNull(reduce.sortedTopDocs.sortFields); @@ -872,7 +872,7 @@ public void testConsumerOnlyAggs() throws Exception { assertEquals(max.get(), internalMax.getValue(), 0.0D); assertEquals(0, reduce.sortedTopDocs.scoreDocs.length); assertEquals(max.get(), reduce.maxScore, 0.0f); - assertEquals(expectedNumResults, reduce.totalHits.value); + assertEquals(expectedNumResults, reduce.totalHits.value()); assertFalse(reduce.sortedTopDocs.isSortedByField); assertNull(reduce.sortedTopDocs.sortFields); assertNull(reduce.sortedTopDocs.collapseField); @@ -921,7 +921,7 @@ public void testConsumerOnlyHits() throws Exception { assertAggReduction(request); assertEquals(1, reduce.sortedTopDocs.scoreDocs.length); assertEquals(max.get(), reduce.maxScore, 0.0f); - assertEquals(expectedNumResults, reduce.totalHits.value); + assertEquals(expectedNumResults, reduce.totalHits.value()); assertEquals(max.get(), reduce.sortedTopDocs.scoreDocs[0].score, 0.0f); assertFalse(reduce.sortedTopDocs.isSortedByField); assertNull(reduce.sortedTopDocs.sortFields); @@ -980,7 +980,7 @@ public void testReduceTopNWithFromOffset() throws Exception { ScoreDoc[] scoreDocs = reduce.sortedTopDocs.scoreDocs; assertEquals(5, scoreDocs.length); assertEquals(100.f, reduce.maxScore, 0.0f); - assertEquals(12, reduce.totalHits.value); + assertEquals(12, reduce.totalHits.value()); assertEquals(95.0f, scoreDocs[0].score, 0.0f); assertEquals(94.0f, scoreDocs[1].score, 0.0f); assertEquals(93.0f, scoreDocs[2].score, 0.0f); @@ -1025,7 +1025,7 @@ public void testConsumerSortByField() throws Exception { SearchPhaseController.ReducedQueryPhase reduce = consumer.reduce(); assertAggReduction(request); assertEquals(Math.min(expectedNumResults, size), reduce.sortedTopDocs.scoreDocs.length); - assertEquals(expectedNumResults, reduce.totalHits.value); + assertEquals(expectedNumResults, reduce.totalHits.value()); assertEquals(max.get(), ((FieldDoc) reduce.sortedTopDocs.scoreDocs[0]).fields[0]); assertTrue(reduce.sortedTopDocs.isSortedByField); assertEquals(1, reduce.sortedTopDocs.sortFields.length); @@ -1074,7 +1074,7 @@ public void testConsumerFieldCollapsing() throws Exception { SearchPhaseController.ReducedQueryPhase reduce = consumer.reduce(); assertAggReduction(request); assertEquals(3, reduce.sortedTopDocs.scoreDocs.length); - assertEquals(expectedNumResults, reduce.totalHits.value); + assertEquals(expectedNumResults, reduce.totalHits.value()); assertEquals(a, ((FieldDoc) reduce.sortedTopDocs.scoreDocs[0]).fields[0]); assertEquals(b, ((FieldDoc) reduce.sortedTopDocs.scoreDocs[1]).fields[0]); assertEquals(c, ((FieldDoc) reduce.sortedTopDocs.scoreDocs[2]).fields[0]); @@ -1186,7 +1186,7 @@ public void testConsumerSuggestions() throws Exception { assertEquals(maxScoreCompletion, reduce.sortedTopDocs.scoreDocs[0].score, 0f); assertEquals(0, reduce.sortedTopDocs.scoreDocs[0].doc); assertNotEquals(-1, reduce.sortedTopDocs.scoreDocs[0].shardIndex); - assertEquals(0, reduce.totalHits.value); + assertEquals(0, reduce.totalHits.value()); assertFalse(reduce.sortedTopDocs.isSortedByField); assertNull(reduce.sortedTopDocs.sortFields); assertNull(reduce.sortedTopDocs.collapseField); @@ -1277,7 +1277,7 @@ public void onFinalReduce(List shards, TotalHits totalHits, Interna assertEquals(max.get(), internalMax.getValue(), 0.0D); assertEquals(1, reduce.sortedTopDocs.scoreDocs.length); assertEquals(max.get(), reduce.maxScore, 0.0f); - assertEquals(expectedNumResults, reduce.totalHits.value); + assertEquals(expectedNumResults, reduce.totalHits.value()); assertEquals(max.get(), reduce.sortedTopDocs.scoreDocs[0].score, 0.0f); assertFalse(reduce.sortedTopDocs.isSortedByField); assertNull(reduce.sortedTopDocs.sortFields); diff --git a/server/src/test/java/org/opensearch/action/search/SearchQueryThenFetchAsyncActionTests.java b/server/src/test/java/org/opensearch/action/search/SearchQueryThenFetchAsyncActionTests.java index f6a06a51c7b43..1ccbbf4196505 100644 --- a/server/src/test/java/org/opensearch/action/search/SearchQueryThenFetchAsyncActionTests.java +++ b/server/src/test/java/org/opensearch/action/search/SearchQueryThenFetchAsyncActionTests.java @@ -274,11 +274,11 @@ public void run() { SearchPhaseController.ReducedQueryPhase phase = action.results.reduce(); assertThat(phase.numReducePhases, greaterThanOrEqualTo(1)); if (withScroll) { - assertThat(phase.totalHits.value, equalTo((long) numShards)); - assertThat(phase.totalHits.relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(phase.totalHits.value(), equalTo((long) numShards)); + assertThat(phase.totalHits.relation(), equalTo(TotalHits.Relation.EQUAL_TO)); } else { - assertThat(phase.totalHits.value, equalTo(2L)); - assertThat(phase.totalHits.relation, equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); + assertThat(phase.totalHits.value(), equalTo(2L)); + assertThat(phase.totalHits.relation(), equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); } assertThat(phase.sortedTopDocs.scoreDocs.length, equalTo(1)); assertThat(phase.sortedTopDocs.scoreDocs[0], instanceOf(FieldDoc.class)); diff --git a/server/src/test/java/org/opensearch/action/search/SearchResponseMergerTests.java b/server/src/test/java/org/opensearch/action/search/SearchResponseMergerTests.java index 0eefa413c1864..e93f4553063ac 100644 --- a/server/src/test/java/org/opensearch/action/search/SearchResponseMergerTests.java +++ b/server/src/test/java/org/opensearch/action/search/SearchResponseMergerTests.java @@ -665,11 +665,11 @@ public void testMergeSearchHits() throws InterruptedException { TotalHits totalHits = null; if (trackTotalHitsUpTo != SearchContext.TRACK_TOTAL_HITS_DISABLED) { totalHits = new TotalHits(randomLongBetween(0, 1000), totalHitsRelation); - long previousValue = expectedTotalHits == null ? 0 : expectedTotalHits.value; - expectedTotalHits = new TotalHits(Math.min(previousValue + totalHits.value, trackTotalHitsUpTo), totalHitsRelation); + long previousValue = expectedTotalHits == null ? 0 : expectedTotalHits.value(); + expectedTotalHits = new TotalHits(Math.min(previousValue + totalHits.value(), trackTotalHitsUpTo), totalHitsRelation); } - final int numDocs = totalHits == null || totalHits.value >= requestedSize ? requestedSize : (int) totalHits.value; + final int numDocs = totalHits == null || totalHits.value() >= requestedSize ? requestedSize : (int) totalHits.value(); int scoreFactor = randomIntBetween(1, numResponses); float maxScore = scoreSort ? numDocs * scoreFactor : Float.NaN; SearchHit[] hits = randomSearchHitArray( @@ -771,8 +771,8 @@ public void testMergeSearchHits() throws InterruptedException { assertNull(searchHits.getTotalHits()); } else { assertNotNull(searchHits.getTotalHits()); - assertEquals(expectedTotalHits.value, searchHits.getTotalHits().value); - assertSame(expectedTotalHits.relation, searchHits.getTotalHits().relation); + assertEquals(expectedTotalHits.value(), searchHits.getTotalHits().value()); + assertSame(expectedTotalHits.relation(), searchHits.getTotalHits().relation()); } if (expectedMaxScore == Float.NEGATIVE_INFINITY) { assertTrue(Float.isNaN(searchHits.getMaxScore())); @@ -821,9 +821,9 @@ public void testMergeNoResponsesAdded() { assertEquals(0, response.getNumReducePhases()); assertFalse(response.isTimedOut()); assertNotNull(response.getHits().getTotalHits()); - assertEquals(0, response.getHits().getTotalHits().value); + assertEquals(0, response.getHits().getTotalHits().value()); assertEquals(0, response.getHits().getHits().length); - assertEquals(TotalHits.Relation.EQUAL_TO, response.getHits().getTotalHits().relation); + assertEquals(TotalHits.Relation.EQUAL_TO, response.getHits().getTotalHits().relation()); assertNull(response.getScrollId()); assertSame(InternalAggregations.EMPTY, response.getAggregations()); assertNull(response.getSuggest()); @@ -892,7 +892,7 @@ public void testMergeEmptySearchHitsWithNonEmpty() { () -> null ) ); - assertEquals(10, mergedResponse.getHits().getTotalHits().value); + assertEquals(10, mergedResponse.getHits().getTotalHits().value()); assertEquals(10, mergedResponse.getHits().getHits().length); assertEquals(2, mergedResponse.getTotalShards()); assertEquals(2, mergedResponse.getSuccessfulShards()); @@ -916,8 +916,8 @@ public void testMergeOnlyEmptyHits() { TotalHits totalHits = null; if (trackTotalHitsUpTo != SearchContext.TRACK_TOTAL_HITS_DISABLED) { totalHits = new TotalHits(randomLongBetween(0, 1000), totalHitsRelation); - long previousValue = expectedTotalHits == null ? 0 : expectedTotalHits.value; - expectedTotalHits = new TotalHits(Math.min(previousValue + totalHits.value, trackTotalHitsUpTo), totalHitsRelation); + long previousValue = expectedTotalHits == null ? 0 : expectedTotalHits.value(); + expectedTotalHits = new TotalHits(Math.min(previousValue + totalHits.value(), trackTotalHitsUpTo), totalHitsRelation); } SearchHits empty = new SearchHits(new SearchHit[0], totalHits, Float.NaN, null, null, null); InternalSearchResponse response = new InternalSearchResponse(empty, null, null, null, false, false, 1); diff --git a/server/src/test/java/org/opensearch/action/search/SearchResponseTests.java b/server/src/test/java/org/opensearch/action/search/SearchResponseTests.java index c9e59ab4ea04d..f5a53be43f267 100644 --- a/server/src/test/java/org/opensearch/action/search/SearchResponseTests.java +++ b/server/src/test/java/org/opensearch/action/search/SearchResponseTests.java @@ -434,8 +434,8 @@ public void testSerialization() throws IOException { if (searchResponse.getHits().getTotalHits() == null) { assertNull(deserialized.getHits().getTotalHits()); } else { - assertEquals(searchResponse.getHits().getTotalHits().value, deserialized.getHits().getTotalHits().value); - assertEquals(searchResponse.getHits().getTotalHits().relation, deserialized.getHits().getTotalHits().relation); + assertEquals(searchResponse.getHits().getTotalHits().value(), deserialized.getHits().getTotalHits().value()); + assertEquals(searchResponse.getHits().getTotalHits().relation(), deserialized.getHits().getTotalHits().relation()); } assertEquals(searchResponse.getHits().getHits().length, deserialized.getHits().getHits().length); assertEquals(searchResponse.getNumReducePhases(), deserialized.getNumReducePhases()); @@ -452,8 +452,8 @@ public void testSerializationWithSearchExtBuilders() throws IOException { if (searchResponse.getHits().getTotalHits() == null) { assertNull(deserialized.getHits().getTotalHits()); } else { - assertEquals(searchResponse.getHits().getTotalHits().value, deserialized.getHits().getTotalHits().value); - assertEquals(searchResponse.getHits().getTotalHits().relation, deserialized.getHits().getTotalHits().relation); + assertEquals(searchResponse.getHits().getTotalHits().value(), deserialized.getHits().getTotalHits().value()); + assertEquals(searchResponse.getHits().getTotalHits().relation(), deserialized.getHits().getTotalHits().relation()); } assertEquals(searchResponse.getHits().getHits().length, deserialized.getHits().getHits().length); assertEquals(searchResponse.getNumReducePhases(), deserialized.getNumReducePhases()); @@ -474,8 +474,8 @@ public void testSerializationWithSearchExtBuildersOnUnsupportedWriterVersion() t if (searchResponse.getHits().getTotalHits() == null) { assertNull(deserialized.getHits().getTotalHits()); } else { - assertEquals(searchResponse.getHits().getTotalHits().value, deserialized.getHits().getTotalHits().value); - assertEquals(searchResponse.getHits().getTotalHits().relation, deserialized.getHits().getTotalHits().relation); + assertEquals(searchResponse.getHits().getTotalHits().value(), deserialized.getHits().getTotalHits().value()); + assertEquals(searchResponse.getHits().getTotalHits().relation(), deserialized.getHits().getTotalHits().relation()); } assertEquals(searchResponse.getHits().getHits().length, deserialized.getHits().getHits().length); assertEquals(searchResponse.getNumReducePhases(), deserialized.getNumReducePhases()); diff --git a/server/src/test/java/org/opensearch/indices/IndicesQueryCacheTests.java b/server/src/test/java/org/opensearch/indices/IndicesQueryCacheTests.java index ba40343fb2130..7aa07a4bdb9c6 100644 --- a/server/src/test/java/org/opensearch/indices/IndicesQueryCacheTests.java +++ b/server/src/test/java/org/opensearch/indices/IndicesQueryCacheTests.java @@ -91,10 +91,10 @@ public String toString(String field) { @Override public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException { - return new ConstantScoreWeight(this, boost) { + return new ConstantScoreWeight(boost) { @Override public Scorer scorer(LeafReaderContext context) throws IOException { - return new ConstantScoreScorer(this, score(), scoreMode, DocIdSetIterator.all(context.reader().maxDoc())); + return new ConstantScoreScorer(score(), scoreMode, DocIdSetIterator.all(context.reader().maxDoc())); } @Override diff --git a/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java b/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java index e27ff311c06f6..7a6050fe158ee 100644 --- a/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java +++ b/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java @@ -844,7 +844,7 @@ public void waitNoPendingTasksOnAll() throws Exception { /** Ensures the result counts are as expected, and logs the results if different */ public void assertResultsAndLogOnFailure(long expectedResults, SearchResponse searchResponse) { final TotalHits totalHits = searchResponse.getHits().getTotalHits(); - if (totalHits.value != expectedResults || totalHits.relation != TotalHits.Relation.EQUAL_TO) { + if (totalHits.value() != expectedResults || totalHits.relation() != TotalHits.Relation.EQUAL_TO) { StringBuilder sb = new StringBuilder("search result contains ["); String value = Long.toString(totalHits.value) + (totalHits.relation == TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO ? "+" : ""); sb.append(value).append("] results. expected [").append(expectedResults).append("]");