Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 0 additions & 17 deletions lucene/core/src/java/org/apache/lucene/util/BitDocIdSet.java
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@
*/
package org.apache.lucene.util;

import java.io.IOException;
import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.DocIdSetIterator;

Expand Down Expand Up @@ -58,22 +57,6 @@ public DocIdSetIterator iterator() {
return new BitSetIterator(set, cost);
}

/**
* Provides a {@link Bits} interface for random access to matching documents.
*
* @return {@code null}, if this {@code DocIdSet} does not support random access. In contrast to
* {@link #iterator()}, a return value of {@code null} <b>does not</b> imply that no documents
* match the filter! The default implementation does not provide random access, so you only
* need to implement this method if your DocIdSet can guarantee random access to every docid
* in O(1) time without external disk access (as {@link Bits} interface cannot throw {@link
* IOException}). This is generally true for bit sets like {@link
* org.apache.lucene.util.FixedBitSet}, which return itself if they are used as {@code
* DocIdSet}.
*/
public BitSet bits() {
return set;
}

@Override
public long ramBytesUsed() {
return BASE_RAM_BYTES_USED + set.ramBytesUsed();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,7 @@
*/
package org.apache.lucene.util;

import java.io.IOException;
import java.util.BitSet;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.tests.util.BaseDocIdSetTestCase;

public class TestFixedBitDocIdSet extends BaseDocIdSetTestCase<BitDocIdSet> {
Expand All @@ -31,25 +29,4 @@ public BitDocIdSet copyOf(BitSet bs, int length) {
}
return new BitDocIdSet(set);
}

@Override
public void assertEquals(int numBits, BitSet ds1, BitDocIdSet ds2) throws IOException {
super.assertEquals(numBits, ds1, ds2);
// bits()
final Bits bits = ds2.bits();
if (bits != null) {
// test consistency between bits and iterator
DocIdSetIterator it2 = ds2.iterator();
for (int previousDoc = -1, doc = it2.nextDoc(); ; previousDoc = doc, doc = it2.nextDoc()) {
final int max = doc == DocIdSetIterator.NO_MORE_DOCS ? bits.length() : doc;
for (int i = previousDoc + 1; i < max; ++i) {
assertFalse(bits.get(i));
}
if (doc == DocIdSetIterator.NO_MORE_DOCS) {
break;
}
assertTrue(bits.get(doc));
}
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@
*/
package org.apache.lucene.util;

import java.io.IOException;
import java.util.ArrayList;
import java.util.BitSet;
import java.util.Collections;
Expand Down Expand Up @@ -47,13 +46,4 @@ public BitDocIdSet copyOf(BitSet bs, int length) {
}
return new BitDocIdSet(set, set.approximateCardinality());
}

@Override
public void assertEquals(int numBits, BitSet ds1, BitDocIdSet ds2) throws IOException {
for (int i = 0; i < numBits; ++i) {
assertEquals(ds1.get(i), ds2.bits().get(i));
}
assertEquals(ds1.cardinality(), ds2.bits().cardinality());
super.assertEquals(numBits, ds1, ds2);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -25,19 +25,17 @@
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.ReaderUtil;
import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Weight;
import org.apache.lucene.util.BitDocIdSet;
import org.apache.lucene.util.BitSet;
import org.apache.lucene.util.FixedBitSet;

/** A {@link BitSetProducer} that wraps a query and caches matching {@link BitSet}s per segment. */
public class QueryBitSetProducer implements BitSetProducer {
private final Query query;
final Map<IndexReader.CacheKey, DocIdSet> cache =
Collections.synchronizedMap(new WeakHashMap<>());
final Map<IndexReader.CacheKey, BitSet> cache = Collections.synchronizedMap(new WeakHashMap<>());

/**
* Wraps another query's result and caches it into bitsets.
Expand All @@ -57,16 +55,18 @@ public Query getQuery() {
return query;
}

private static final BitSet SENTINEL = new FixedBitSet(0);

@Override
public BitSet getBitSet(LeafReaderContext context) throws IOException {
final LeafReader reader = context.reader();
final IndexReader.CacheHelper cacheHelper = reader.getCoreCacheHelper();

DocIdSet docIdSet = null;
BitSet bitSet = null;
if (cacheHelper != null) {
docIdSet = cache.get(cacheHelper.getKey());
bitSet = cache.get(cacheHelper.getKey());
}
if (docIdSet == null) {
if (bitSet == null) {
final IndexReaderContext topLevelContext = ReaderUtil.getTopLevelContext(context);
final IndexSearcher searcher = new IndexSearcher(topLevelContext);
searcher.setQueryCache(null);
Expand All @@ -77,15 +77,15 @@ public BitSet getBitSet(LeafReaderContext context) throws IOException {
final Scorer s = weight.scorer(context);

if (s == null) {
docIdSet = DocIdSet.EMPTY;
bitSet = SENTINEL;
} else {
docIdSet = new BitDocIdSet(BitSet.of(s.iterator(), context.reader().maxDoc()));
bitSet = BitSet.of(s.iterator(), context.reader().maxDoc());
}
if (cacheHelper != null) {
cache.put(cacheHelper.getKey(), docIdSet);
cache.put(cacheHelper.getKey(), bitSet);
}
}
return docIdSet == DocIdSet.EMPTY ? null : ((BitDocIdSet) docIdSet).bits();
return bitSet == SENTINEL ? null : bitSet;
}

@Override
Expand Down