diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetBitsetCache.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetBitsetCache.java new file mode 100644 index 0000000000000..6b47c3da2fb58 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetBitsetCache.java @@ -0,0 +1,206 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.core.security.authz.accesscontrol; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.IndexReaderContext; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.ReaderUtil; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.ScoreMode; +import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.Weight; +import org.apache.lucene.util.Accountable; +import org.apache.lucene.util.BitSet; +import org.apache.lucene.util.FixedBitSet; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.cache.Cache; +import org.elasticsearch.common.cache.CacheBuilder; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.Property; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeUnit; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.set.Sets; + +import java.io.Closeable; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ExecutionException; + +/** + * This is a cache for {@link BitSet} instances that are used with the {@link DocumentSubsetReader}. + * It is bounded by memory size and access time. + * + * @see org.elasticsearch.index.cache.bitset.BitsetFilterCache + */ +public final class DocumentSubsetBitsetCache implements IndexReader.ClosedListener, Closeable, Accountable { + + /** + * The TTL defaults to 1 week. We depend on the {@code max_bytes} setting to keep the cache to a sensible size, by evicting LRU + * entries, however there is benefit in reclaiming memory by expiring bitsets that have not be used for some period of time. + * Because {@link org.elasticsearch.xpack.core.security.authz.permission.IndicesPermission.Group#query} can be templated, it is + * not uncommon for a query to only be used for a relatively short period of time (e.g. because a user's metadata changed, or because + * that user is an infrequent user of Elasticsearch). This access time expiry helps free up memory in those circumstances even if the + * cache is never filled. + */ + static final Setting CACHE_TTL_SETTING = + Setting.timeSetting("xpack.security.dls.bitset.cache.ttl", TimeValue.timeValueHours(24 * 7), Property.NodeScope); + + static final Setting CACHE_SIZE_SETTING = Setting.byteSizeSetting("xpack.security.dls.bitset.cache.size", + new ByteSizeValue(50, ByteSizeUnit.MB), Property.NodeScope); + + private static final BitSet NULL_MARKER = new FixedBitSet(0); + + private final Logger logger; + private final Cache bitsetCache; + private final Map> keysByIndex; + + public DocumentSubsetBitsetCache(Settings settings) { + this.logger = LogManager.getLogger(getClass()); + final TimeValue ttl = CACHE_TTL_SETTING.get(settings); + final ByteSizeValue size = CACHE_SIZE_SETTING.get(settings); + this.bitsetCache = CacheBuilder.builder() + .setExpireAfterAccess(ttl) + .setMaximumWeight(size.getBytes()) + .weigher((key, bitSet) -> bitSet == NULL_MARKER ? 0 : bitSet.ramBytesUsed()).build(); + this.keysByIndex = new ConcurrentHashMap<>(); + } + + @Override + public void onClose(IndexReader.CacheKey ownerCoreCacheKey) { + final Set keys = keysByIndex.remove(ownerCoreCacheKey); + if (keys != null) { + // Because this Set has been removed from the map, and the only update to the set is performed in a + // Map#compute call, it should not be possible to get a concurrent modification here. + keys.forEach(bitsetCache::invalidate); + } + } + + @Override + public void close() { + clear("close"); + } + + public void clear(String reason) { + logger.debug("clearing all DLS bitsets because [{}]", reason); + // Due to the order here, it is possible than a new entry could be added _after_ the keysByIndex map is cleared + // but _before_ the cache is cleared. This would mean it sits orphaned in keysByIndex, but this is not a issue. + // When the index is closed, the key will be removed from the map, and there will not be a corresponding item + // in the cache, which will make the cache-invalidate a no-op. + // Since the entry is not in the cache, if #getBitSet is called, it will be loaded, and the new key will be added + // to the index without issue. + keysByIndex.clear(); + bitsetCache.invalidateAll(); + } + + int entryCount() { + return this.bitsetCache.count(); + } + + @Override + public long ramBytesUsed() { + return this.bitsetCache.weight(); + } + + /** + * Obtain the {@link BitSet} for the given {@code query} in the given {@code context}. + * If there is a cached entry for that query and context, it will be returned. + * Otherwise a new BitSet will be created and stored in the cache. + * The returned BitSet may be null (e.g. if the query has no results). + */ + @Nullable + public BitSet getBitSet(final Query query, final LeafReaderContext context) throws ExecutionException { + final IndexReader.CacheHelper coreCacheHelper = context.reader().getCoreCacheHelper(); + if (coreCacheHelper == null) { + throw new IllegalArgumentException("Reader " + context.reader() + " does not support caching"); + } + coreCacheHelper.addClosedListener(this); + final IndexReader.CacheKey indexKey = coreCacheHelper.getKey(); + final BitsetCacheKey cacheKey = new BitsetCacheKey(indexKey, query); + + final BitSet bitSet = bitsetCache.computeIfAbsent(cacheKey, ignore1 -> { + // This ensures all insertions into the set are guarded by ConcurrentHashMap's atomicity guarantees. + keysByIndex.compute(indexKey, (ignore2, set) -> { + if (set == null) { + set = Sets.newConcurrentHashSet(); + } + set.add(cacheKey); + return set; + }); + final IndexReaderContext topLevelContext = ReaderUtil.getTopLevelContext(context); + final IndexSearcher searcher = new IndexSearcher(topLevelContext); + searcher.setQueryCache(null); + final Weight weight = searcher.createWeight(searcher.rewrite(query), ScoreMode.COMPLETE_NO_SCORES, 1f); + Scorer s = weight.scorer(context); + if (s == null) { + // A cache loader is not allowed to return null, return a marker object instead. + return NULL_MARKER; + } else { + return BitSet.of(s.iterator(), context.reader().maxDoc()); + } + }); + if (bitSet == NULL_MARKER) { + return null; + } else { + return bitSet; + } + } + + public static List> getSettings() { + return List.of(CACHE_TTL_SETTING, CACHE_SIZE_SETTING); + } + + public Map usageStats() { + final ByteSizeValue ram = new ByteSizeValue(ramBytesUsed(), ByteSizeUnit.BYTES); + return Map.of( + "count", entryCount(), + "memory", ram.toString(), + "memory_in_bytes", ram.getBytes() + ); + } + + private class BitsetCacheKey { + final IndexReader.CacheKey index; + final Query query; + + private BitsetCacheKey(IndexReader.CacheKey index, Query query) { + this.index = index; + this.query = query; + } + + @Override + public boolean equals(Object other) { + if (this == other) { + return true; + } + if (other == null || getClass() != other.getClass()) { + return false; + } + final BitsetCacheKey that = (BitsetCacheKey) other; + return Objects.equals(this.index, that.index) && + Objects.equals(this.query, that.query); + } + + @Override + public int hashCode() { + return Objects.hash(index, query); + } + + @Override + public String toString() { + return getClass().getSimpleName() + "(" + index + "," + query + ")"; + } + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetReader.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetReader.java index af84315abf4eb..1cda15c8e3c5e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetReader.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetReader.java @@ -21,7 +21,6 @@ import org.elasticsearch.common.cache.Cache; import org.elasticsearch.common.cache.CacheBuilder; import org.elasticsearch.common.logging.LoggerMessageFormat; -import org.elasticsearch.index.cache.bitset.BitsetFilterCache; import java.io.IOException; import java.io.UncheckedIOException; @@ -34,9 +33,9 @@ */ public final class DocumentSubsetReader extends FilterLeafReader { - public static DocumentSubsetDirectoryReader wrap(DirectoryReader in, BitsetFilterCache bitsetFilterCache, + public static DocumentSubsetDirectoryReader wrap(DirectoryReader in, DocumentSubsetBitsetCache bitsetCache, Query roleQuery) throws IOException { - return new DocumentSubsetDirectoryReader(in, bitsetFilterCache, roleQuery); + return new DocumentSubsetDirectoryReader(in, bitsetCache, roleQuery); } /** @@ -110,21 +109,21 @@ private static int getNumDocs(LeafReader reader, Query roleQuery, BitSet roleQue public static final class DocumentSubsetDirectoryReader extends FilterDirectoryReader { private final Query roleQuery; - private final BitsetFilterCache bitsetFilterCache; + private final DocumentSubsetBitsetCache bitsetCache; - DocumentSubsetDirectoryReader(final DirectoryReader in, final BitsetFilterCache bitsetFilterCache, final Query roleQuery) - throws IOException { + DocumentSubsetDirectoryReader(final DirectoryReader in, final DocumentSubsetBitsetCache bitsetCache, + final Query roleQuery) throws IOException { super(in, new SubReaderWrapper() { @Override public LeafReader wrap(LeafReader reader) { try { - return new DocumentSubsetReader(reader, bitsetFilterCache, roleQuery); + return new DocumentSubsetReader(reader, bitsetCache, roleQuery); } catch (Exception e) { throw ExceptionsHelper.convertToElastic(e); } } }); - this.bitsetFilterCache = bitsetFilterCache; + this.bitsetCache = bitsetCache; this.roleQuery = roleQuery; verifyNoOtherDocumentSubsetDirectoryReaderIsWrapped(in); @@ -132,7 +131,7 @@ public LeafReader wrap(LeafReader reader) { @Override protected DirectoryReader doWrapDirectoryReader(DirectoryReader in) throws IOException { - return new DocumentSubsetDirectoryReader(in, bitsetFilterCache, roleQuery); + return new DocumentSubsetDirectoryReader(in, bitsetCache, roleQuery); } private static void verifyNoOtherDocumentSubsetDirectoryReaderIsWrapped(DirectoryReader reader) { @@ -156,9 +155,9 @@ public CacheHelper getReaderCacheHelper() { private final BitSet roleQueryBits; private final int numDocs; - private DocumentSubsetReader(final LeafReader in, BitsetFilterCache bitsetFilterCache, final Query roleQuery) throws Exception { + private DocumentSubsetReader(final LeafReader in, DocumentSubsetBitsetCache bitsetCache, final Query roleQuery) throws Exception { super(in); - this.roleQueryBits = bitsetFilterCache.getBitSetProducer(roleQuery).getBitSet(in.getContext()); + this.roleQueryBits = bitsetCache.getBitSet(roleQuery, in.getContext()); this.numDocs = getNumDocs(in, roleQuery, roleQueryBits); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapper.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapper.java index 6ea8ae84e118d..ea8f005be0397 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapper.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapper.java @@ -14,7 +14,6 @@ import org.elasticsearch.common.CheckedFunction; import org.elasticsearch.common.logging.LoggerMessageFormat; import org.elasticsearch.common.util.concurrent.ThreadContext; -import org.elasticsearch.index.cache.bitset.BitsetFilterCache; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardUtils; @@ -44,17 +43,17 @@ public class SecurityIndexReaderWrapper implements CheckedFunction queryShardContextProvider; - private final BitsetFilterCache bitsetFilterCache; + private final DocumentSubsetBitsetCache bitsetCache; private final XPackLicenseState licenseState; private final ThreadContext threadContext; private final ScriptService scriptService; public SecurityIndexReaderWrapper(Function queryShardContextProvider, - BitsetFilterCache bitsetFilterCache, ThreadContext threadContext, XPackLicenseState licenseState, + DocumentSubsetBitsetCache bitsetCache, ThreadContext threadContext, XPackLicenseState licenseState, ScriptService scriptService) { this.scriptService = scriptService; this.queryShardContextProvider = queryShardContextProvider; - this.bitsetFilterCache = bitsetFilterCache; + this.bitsetCache = bitsetCache; this.threadContext = threadContext; this.licenseState = licenseState; } @@ -84,7 +83,7 @@ public DirectoryReader apply(final DirectoryReader reader) { if (documentPermissions != null && documentPermissions.hasDocumentLevelPermissions()) { BooleanQuery filterQuery = documentPermissions.filter(getUser(), scriptService, shardId, queryShardContextProvider); if (filterQuery != null) { - wrappedReader = DocumentSubsetReader.wrap(wrappedReader, bitsetFilterCache, new ConstantScoreQuery(filterQuery)); + wrappedReader = DocumentSubsetReader.wrap(wrappedReader, bitsetCache, new ConstantScoreQuery(filterQuery)); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/CacheIteratorHelper.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/CacheIteratorHelper.java new file mode 100644 index 0000000000000..0dfdab26815a5 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/CacheIteratorHelper.java @@ -0,0 +1,59 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.core.security.support; + +import org.elasticsearch.common.cache.Cache; +import org.elasticsearch.common.util.concurrent.ReleasableLock; + +import java.util.Iterator; +import java.util.concurrent.locks.ReadWriteLock; +import java.util.concurrent.locks.ReentrantReadWriteLock; +import java.util.function.Predicate; + +/** + * A utility class to facilitate iterating over (and modifying) a {@link org.elasticsearch.common.cache.Cache}. + * The semantics of the cache are such that when iterating (with the potential to call {@link Iterator#remove()}), we must prevent any + * other modifications. + * This class provides the necessary methods to support this constraint in a clear manner. + */ +public class CacheIteratorHelper { + private final Cache cache; + private final ReleasableLock updateLock; + private final ReleasableLock iteratorLock; + + public CacheIteratorHelper(Cache cache) { + this.cache = cache; + final ReadWriteLock lock = new ReentrantReadWriteLock(); + // the lock is used in an odd manner; when iterating over the cache we cannot have modifiers other than deletes using the + // iterator but when not iterating we can modify the cache without external locking. When making normal modifications to the cache + // the read lock is obtained so that we can allow concurrent modifications; however when we need to iterate over the keys or values + // of the cache the write lock must obtained to prevent any modifications. + updateLock = new ReleasableLock(lock.readLock()); + iteratorLock = new ReleasableLock(lock.writeLock()); + } + + public ReleasableLock acquireUpdateLock() { + return updateLock.acquire(); + } + + private ReleasableLock acquireForIterator() { + return iteratorLock.acquire(); + } + + public void removeKeysIf(Predicate removeIf) { + // the cache cannot be modified while doing this operation per the terms of the cache iterator + try (ReleasableLock ignored = this.acquireForIterator()) { + Iterator iterator = cache.keys().iterator(); + while (iterator.hasNext()) { + K key = iterator.next(); + if (removeIf.test(key)) { + iterator.remove(); + } + } + } + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetBitsetCacheTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetBitsetCacheTests.java new file mode 100644 index 0000000000000..df2c63f357a60 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetBitsetCacheTests.java @@ -0,0 +1,247 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.core.security.authz.accesscontrol; + +import org.apache.lucene.analysis.standard.StandardAnalyzer; +import org.apache.lucene.document.Document; +import org.apache.lucene.document.Field; +import org.apache.lucene.document.StringField; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.NoMergePolicy; +import org.apache.lucene.search.Query; +import org.apache.lucene.store.Directory; +import org.apache.lucene.util.BitSet; +import org.elasticsearch.client.Client; +import org.elasticsearch.common.CheckedBiConsumer; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.index.query.QueryShardContext; +import org.elasticsearch.index.query.TermQueryBuilder; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.IndexSettingsModule; +import org.hamcrest.Matchers; + +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicInteger; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.Matchers.sameInstance; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class DocumentSubsetBitsetCacheTests extends ESTestCase { + + public void testSameBitSetIsReturnedForIdenticalQuery() throws Exception { + final DocumentSubsetBitsetCache cache = new DocumentSubsetBitsetCache(Settings.EMPTY); + runTestOnIndex((shardContext, leafContext) -> { + final Query query1 = QueryBuilders.termQuery("field-1", "value-1").toQuery(shardContext); + final BitSet bitSet1 = cache.getBitSet(query1, leafContext); + assertThat(bitSet1, notNullValue()); + + final Query query2 = QueryBuilders.termQuery("field-1", "value-1").toQuery(shardContext); + final BitSet bitSet2 = cache.getBitSet(query2, leafContext); + assertThat(bitSet2, notNullValue()); + + assertThat(bitSet2, Matchers.sameInstance(bitSet1)); + }); + } + + public void testNullBitSetIsReturnedForNonMatchingQuery() throws Exception { + final DocumentSubsetBitsetCache cache = new DocumentSubsetBitsetCache(Settings.EMPTY); + runTestOnIndex((shardContext, leafContext) -> { + final Query query = QueryBuilders.termQuery("does-not-exist", "any-value").toQuery(shardContext); + final BitSet bitSet = cache.getBitSet(query, leafContext); + assertThat(bitSet, nullValue()); + }); + } + + public void testNullEntriesAreNotCountedInMemoryUsage() throws Exception { + final DocumentSubsetBitsetCache cache = new DocumentSubsetBitsetCache(Settings.EMPTY); + assertThat(cache.ramBytesUsed(), equalTo(0L)); + + runTestOnIndex((shardContext, leafContext) -> { + for (int i = 1; i <= randomIntBetween(3, 6); i++) { + final Query query = QueryBuilders.termQuery("dne-" + i, "dne- " + i).toQuery(shardContext); + final BitSet bitSet = cache.getBitSet(query, leafContext); + assertThat(bitSet, nullValue()); + assertThat(cache.ramBytesUsed(), equalTo(0L)); + assertThat(cache.entryCount(), equalTo(i)); + } + }); + } + + public void testCacheRespectsMemoryLimit() throws Exception { + // This value is based on the internal implementation details of lucene's FixedBitSet + // If the implementation changes, this can be safely updated to match the new ram usage for a single bitset + final long expectedBytesPerBitSet = 56; + + // Enough to hold exactly 2 bit-sets in the cache + final long maxCacheBytes = expectedBytesPerBitSet * 2; + final Settings settings = Settings.builder() + .put(DocumentSubsetBitsetCache.CACHE_SIZE_SETTING.getKey(), maxCacheBytes + "b") + .build(); + final DocumentSubsetBitsetCache cache = new DocumentSubsetBitsetCache(settings); + assertThat(cache.entryCount(), equalTo(0)); + assertThat(cache.ramBytesUsed(), equalTo(0L)); + + runTestOnIndex((shardContext, leafContext) -> { + Query previousQuery = null; + BitSet previousBitSet = null; + for (int i = 1; i <= 5; i++) { + final TermQueryBuilder queryBuilder = QueryBuilders.termQuery("field-" + i, "value-" + i); + final Query query = queryBuilder.toQuery(shardContext); + final BitSet bitSet = cache.getBitSet(query, leafContext); + assertThat(bitSet, notNullValue()); + assertThat(bitSet.ramBytesUsed(), equalTo(expectedBytesPerBitSet)); + + // The first time through we have 1 entry, after that we have 2 + final int expectedCount = i == 1 ? 1 : 2; + assertThat(cache.entryCount(), equalTo(expectedCount)); + assertThat(cache.ramBytesUsed(), equalTo(expectedCount * expectedBytesPerBitSet)); + + // Older queries should get evicted, but the query from last iteration should still be cached + if (previousQuery != null) { + assertThat(cache.getBitSet(previousQuery, leafContext), sameInstance(previousBitSet)); + assertThat(cache.entryCount(), equalTo(expectedCount)); + assertThat(cache.ramBytesUsed(), equalTo(expectedCount * expectedBytesPerBitSet)); + } + previousQuery = query; + previousBitSet = bitSet; + + assertThat(cache.getBitSet(queryBuilder.toQuery(shardContext), leafContext), sameInstance(bitSet)); + assertThat(cache.entryCount(), equalTo(expectedCount)); + assertThat(cache.ramBytesUsed(), equalTo(expectedCount * expectedBytesPerBitSet)); + } + + assertThat(cache.entryCount(), equalTo(2)); + assertThat(cache.ramBytesUsed(), equalTo(2 * expectedBytesPerBitSet)); + + cache.clear("testing"); + + assertThat(cache.entryCount(), equalTo(0)); + assertThat(cache.ramBytesUsed(), equalTo(0L)); + }); + } + + public void testCacheRespectsAccessTimeExpiry() throws Exception { + final Settings settings = Settings.builder() + .put(DocumentSubsetBitsetCache.CACHE_TTL_SETTING.getKey(), "10ms") + .build(); + final DocumentSubsetBitsetCache cache = new DocumentSubsetBitsetCache(settings); + assertThat(cache.entryCount(), equalTo(0)); + assertThat(cache.ramBytesUsed(), equalTo(0L)); + + runTestOnIndex((shardContext, leafContext) -> { + final Query query1 = QueryBuilders.termQuery("field-1", "value-1").toQuery(shardContext); + final BitSet bitSet1 = cache.getBitSet(query1, leafContext); + assertThat(bitSet1, notNullValue()); + + final Query query2 = QueryBuilders.termQuery("field-2", "value-2").toQuery(shardContext); + assertBusy(() -> { + // Force the cache to perform eviction + final BitSet bitSet2 = cache.getBitSet(query2, leafContext); + assertThat(bitSet2, notNullValue()); + + // Loop until the cache has less than 2 items, which mean that something we evicted + assertThat(cache.entryCount(), Matchers.lessThan(2)); + + }, 100, TimeUnit.MILLISECONDS); + + // Check that the original bitset is no longer in the cache (a new instance is returned) + assertThat(cache.getBitSet(query1, leafContext), not(sameInstance(bitSet1))); + }); + } + + public void testCacheIsPerIndex() throws Exception { + final DocumentSubsetBitsetCache cache = new DocumentSubsetBitsetCache(Settings.EMPTY); + assertThat(cache.entryCount(), equalTo(0)); + assertThat(cache.ramBytesUsed(), equalTo(0L)); + + final int iterations = randomIntBetween(3, 10); + AtomicInteger counter = new AtomicInteger(0); + + final CheckedBiConsumer consumer = new CheckedBiConsumer<>() { + @Override + public void accept(QueryShardContext shardContext, LeafReaderContext leafContext) throws Exception { + final int count = counter.incrementAndGet(); + final Query query = QueryBuilders.termQuery("field-1", "value-1").toQuery(shardContext); + final BitSet bitSet = cache.getBitSet(query, leafContext); + + assertThat(bitSet, notNullValue()); + assertThat(cache.entryCount(), equalTo(count)); + + if (count < iterations) { + // Need to do this nested, or else the cache will be cleared when the index reader is closed + runTestOnIndex(this); + } + } + }; + runTestOnIndex(consumer); + } + + public void testCacheClearEntriesWhenIndexIsClosed() throws Exception { + final DocumentSubsetBitsetCache cache = new DocumentSubsetBitsetCache(Settings.EMPTY); + assertThat(cache.entryCount(), equalTo(0)); + assertThat(cache.ramBytesUsed(), equalTo(0L)); + + for (int i = 1; i <= randomIntBetween(2, 5); i++) { + runTestOnIndex((shardContext, leafContext) -> { + for (int j = 1; j <= randomIntBetween(2, 10); j++) { + final Query query = QueryBuilders.termQuery("field-" + j, "value-1").toQuery(shardContext); + final BitSet bitSet = cache.getBitSet(query, leafContext); + assertThat(bitSet, notNullValue()); + } + assertThat(cache.entryCount(), not(equalTo(0))); + assertThat(cache.ramBytesUsed(), not(equalTo(0L))); + }); + assertThat(cache.entryCount(), equalTo(0)); + assertThat(cache.ramBytesUsed(), equalTo(0L)); + } + } + + private void runTestOnIndex(CheckedBiConsumer body) throws Exception { + final ShardId shardId = new ShardId("idx_" + randomAlphaOfLengthBetween(2, 8), randomAlphaOfLength(12), 0); + final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(shardId.getIndex(), Settings.EMPTY); + final MapperService mapperService = mock(MapperService.class); + final long nowInMillis = randomNonNegativeLong(); + + final Client client = mock(Client.class); + when(client.settings()).thenReturn(Settings.EMPTY); + + final IndexWriterConfig writerConfig = new IndexWriterConfig(new StandardAnalyzer()).setMergePolicy(NoMergePolicy.INSTANCE); + try (Directory directory = newDirectory(); + IndexWriter iw = new IndexWriter(directory, writerConfig)) { + for (int i = 1; i <= 100; i++) { + Document document = new Document(); + for (int j = 1; j <= 10; j++) { + document.add(new StringField("field-" + j, "value-" + i, Field.Store.NO)); + } + iw.addDocument(document); + } + iw.commit(); + + try (DirectoryReader directoryReader = DirectoryReader.open(directory)) { + final LeafReaderContext leaf = directoryReader.leaves().get(0); + + final QueryShardContext context = new QueryShardContext(shardId.id(), indexSettings, null, null, null, mapperService, + null, null, xContentRegistry(), writableRegistry(), client, leaf.reader(), () -> nowInMillis, null); + + body.accept(context, leaf); + } + } + } + +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetReaderTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetReaderTests.java index bd6ac12ee3c1b..c84c0027302e6 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetReaderTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetReaderTests.java @@ -21,18 +21,13 @@ import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TopDocs; import org.apache.lucene.store.Directory; -import org.apache.lucene.util.Accountable; import org.apache.lucene.util.Bits; import org.elasticsearch.core.internal.io.IOUtils; import org.apache.lucene.util.TestUtil; import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.cache.bitset.BitsetFilterCache; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.IndexSettingsModule; -import org.elasticsearch.xpack.core.security.authz.accesscontrol.DocumentSubsetReader; import org.junit.After; import org.junit.Before; @@ -45,7 +40,7 @@ public class DocumentSubsetReaderTests extends ESTestCase { private Directory directory; private DirectoryReader directoryReader; - private BitsetFilterCache bitsetFilterCache; + private DocumentSubsetBitsetCache bitsetCache; @Before public void setUpDirectory() { @@ -55,18 +50,7 @@ public void setUpDirectory() { assertTrue(DocumentSubsetReader.NUM_DOCS_CACHE.toString(), DocumentSubsetReader.NUM_DOCS_CACHE.isEmpty()); directory = newDirectory(); - IndexSettings settings = IndexSettingsModule.newIndexSettings("_index", Settings.EMPTY); - bitsetFilterCache = new BitsetFilterCache(settings, new BitsetFilterCache.Listener() { - @Override - public void onCache(ShardId shardId, Accountable accountable) { - - } - - @Override - public void onRemoval(ShardId shardId, Accountable accountable) { - - } - }); + bitsetCache = new DocumentSubsetBitsetCache(Settings.EMPTY); } @After @@ -77,7 +61,7 @@ public void cleanDirectory() throws Exception { assertTrue(DocumentSubsetReader.NUM_DOCS_CACHE.toString(), DocumentSubsetReader.NUM_DOCS_CACHE.isEmpty()); directory.close(); - bitsetFilterCache.close(); + bitsetCache.close(); } public void testSearch() throws Exception { @@ -104,14 +88,14 @@ public void testSearch() throws Exception { iw.close(); openDirectoryReader(); - IndexSearcher indexSearcher = new IndexSearcher(DocumentSubsetReader.wrap(directoryReader, bitsetFilterCache, + IndexSearcher indexSearcher = new IndexSearcher(DocumentSubsetReader.wrap(directoryReader, bitsetCache, new TermQuery(new Term("field", "value1")))); assertThat(indexSearcher.getIndexReader().numDocs(), equalTo(1)); TopDocs result = indexSearcher.search(new MatchAllDocsQuery(), 1); assertThat(result.totalHits.value, equalTo(1L)); assertThat(result.scoreDocs[0].doc, equalTo(0)); - indexSearcher = new IndexSearcher(DocumentSubsetReader.wrap(directoryReader, bitsetFilterCache, + indexSearcher = new IndexSearcher(DocumentSubsetReader.wrap(directoryReader, bitsetCache, new TermQuery(new Term("field", "value2")))); assertThat(indexSearcher.getIndexReader().numDocs(), equalTo(1)); result = indexSearcher.search(new MatchAllDocsQuery(), 1); @@ -119,13 +103,13 @@ public void testSearch() throws Exception { assertThat(result.scoreDocs[0].doc, equalTo(1)); // this doc has been marked as deleted: - indexSearcher = new IndexSearcher(DocumentSubsetReader.wrap(directoryReader, bitsetFilterCache, + indexSearcher = new IndexSearcher(DocumentSubsetReader.wrap(directoryReader, bitsetCache, new TermQuery(new Term("field", "value3")))); assertThat(indexSearcher.getIndexReader().numDocs(), equalTo(0)); result = indexSearcher.search(new MatchAllDocsQuery(), 1); assertThat(result.totalHits.value, equalTo(0L)); - indexSearcher = new IndexSearcher(DocumentSubsetReader.wrap(directoryReader, bitsetFilterCache, + indexSearcher = new IndexSearcher(DocumentSubsetReader.wrap(directoryReader, bitsetCache, new TermQuery(new Term("field", "value4")))); assertThat(indexSearcher.getIndexReader().numDocs(), equalTo(1)); result = indexSearcher.search(new MatchAllDocsQuery(), 1); @@ -154,7 +138,7 @@ public void testLiveDocs() throws Exception { for (int i = 0; i < numDocs; i++) { Query roleQuery = new TermQuery(new Term("field", "value" + i)); - DirectoryReader wrappedReader = DocumentSubsetReader.wrap(directoryReader, bitsetFilterCache, roleQuery); + DirectoryReader wrappedReader = DocumentSubsetReader.wrap(directoryReader, bitsetCache, roleQuery); LeafReader leafReader = wrappedReader.leaves().get(0).reader(); assertThat(leafReader.hasDeletions(), is(true)); @@ -176,26 +160,16 @@ public void testWrapTwice() throws Exception { IndexWriterConfig iwc = new IndexWriterConfig(null); IndexWriter iw = new IndexWriter(dir, iwc); iw.close(); - IndexSettings settings = IndexSettingsModule.newIndexSettings("_index", Settings.EMPTY); - BitsetFilterCache bitsetFilterCache = new BitsetFilterCache(settings, new BitsetFilterCache.Listener() { - @Override - public void onCache(ShardId shardId, Accountable accountable) { - } - - @Override - public void onRemoval(ShardId shardId, Accountable accountable) { - } - }); - DirectoryReader directoryReader = DocumentSubsetReader.wrap(DirectoryReader.open(dir), bitsetFilterCache, new MatchAllDocsQuery()); + DirectoryReader directoryReader = DocumentSubsetReader.wrap(DirectoryReader.open(dir), bitsetCache, new MatchAllDocsQuery()); try { - DocumentSubsetReader.wrap(directoryReader, bitsetFilterCache, new MatchAllDocsQuery()); + DocumentSubsetReader.wrap(directoryReader, bitsetCache, new MatchAllDocsQuery()); fail("shouldn't be able to wrap DocumentSubsetDirectoryReader twice"); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), equalTo("Can't wrap [class org.elasticsearch.xpack.core.security.authz.accesscontrol" + ".DocumentSubsetReader$DocumentSubsetDirectoryReader] twice")); } - bitsetFilterCache.close(); + bitsetCache.close(); directoryReader.close(); dir.close(); } @@ -219,7 +193,7 @@ public void testCoreCacheKey() throws Exception { // open reader DirectoryReader ir = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(iw), new ShardId("_index", "_na_", 0)); - ir = DocumentSubsetReader.wrap(ir, bitsetFilterCache, new MatchAllDocsQuery()); + ir = DocumentSubsetReader.wrap(ir, bitsetCache, new MatchAllDocsQuery()); assertEquals(2, ir.numDocs()); assertEquals(1, ir.leaves().size()); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapperIntegrationTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapperIntegrationTests.java index 0b188ff7075f9..3be46a031a0b2 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapperIntegrationTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapperIntegrationTests.java @@ -21,7 +21,6 @@ import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TotalHitCountCollector; import org.apache.lucene.store.Directory; -import org.apache.lucene.util.Accountable; import org.elasticsearch.client.Client; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; @@ -30,7 +29,6 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.cache.bitset.BitsetFilterCache; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.query.ParsedQuery; import org.elasticsearch.index.query.QueryShardContext; @@ -87,21 +85,11 @@ public void testDLS() throws Exception { QueryShardContext realQueryShardContext = new QueryShardContext(shardId.id(), indexSettings, null, null, null, mapperService, null, null, xContentRegistry(), writableRegistry(), client, null, () -> nowInMillis, null); QueryShardContext queryShardContext = spy(realQueryShardContext); - IndexSettings settings = IndexSettingsModule.newIndexSettings("_index", Settings.EMPTY); - BitsetFilterCache bitsetFilterCache = new BitsetFilterCache(settings, new BitsetFilterCache.Listener() { - @Override - public void onCache(ShardId shardId, Accountable accountable) { - } - - @Override - public void onRemoval(ShardId shardId, Accountable accountable) { - - } - }); + DocumentSubsetBitsetCache bitsetCache = new DocumentSubsetBitsetCache(Settings.EMPTY); XPackLicenseState licenseState = mock(XPackLicenseState.class); when(licenseState.isDocumentAndFieldLevelSecurityAllowed()).thenReturn(true); SecurityIndexReaderWrapper wrapper = new SecurityIndexReaderWrapper(s -> queryShardContext, - bitsetFilterCache, threadContext, licenseState, scriptService) { + bitsetCache, threadContext, licenseState, scriptService) { @Override protected IndicesAccessControl getIndicesAccessControl() { @@ -169,7 +157,7 @@ protected IndicesAccessControl getIndicesAccessControl() { assertThat(wrappedDirectoryReader.numDocs(), equalTo(expectedHitCount)); } - bitsetFilterCache.close(); + bitsetCache.close(); directoryReader.close(); directory.close(); } @@ -211,21 +199,12 @@ public void testDLSWithLimitedPermissions() throws Exception { QueryShardContext realQueryShardContext = new QueryShardContext(shardId.id(), indexSettings, null, null, null, mapperService, null, null, xContentRegistry(), writableRegistry(), client, null, () -> nowInMillis, null); QueryShardContext queryShardContext = spy(realQueryShardContext); - IndexSettings settings = IndexSettingsModule.newIndexSettings("_index", Settings.EMPTY); - BitsetFilterCache bitsetFilterCache = new BitsetFilterCache(settings, new BitsetFilterCache.Listener() { - @Override - public void onCache(ShardId shardId, Accountable accountable) { - } - - @Override - public void onRemoval(ShardId shardId, Accountable accountable) { - } - }); + DocumentSubsetBitsetCache bitsetCache = new DocumentSubsetBitsetCache(Settings.EMPTY); XPackLicenseState licenseState = mock(XPackLicenseState.class); when(licenseState.isDocumentAndFieldLevelSecurityAllowed()).thenReturn(true); SecurityIndexReaderWrapper wrapper = new SecurityIndexReaderWrapper(s -> queryShardContext, - bitsetFilterCache, threadContext, licenseState, scriptService) { + bitsetCache, threadContext, licenseState, scriptService) { @Override protected IndicesAccessControl getIndicesAccessControl() { @@ -281,7 +260,7 @@ protected IndicesAccessControl getIndicesAccessControl() { } } - bitsetFilterCache.close(); + bitsetCache.close(); directoryReader.close(); directory.close(); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index 1f4f87e858176..df265b58d1911 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -118,6 +118,7 @@ import org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken; import org.elasticsearch.xpack.core.security.authz.AuthorizationEngine; import org.elasticsearch.xpack.core.security.authz.AuthorizationServiceField; +import org.elasticsearch.xpack.core.security.authz.accesscontrol.DocumentSubsetBitsetCache; import org.elasticsearch.xpack.core.security.authz.accesscontrol.IndicesAccessControl; import org.elasticsearch.xpack.core.security.authz.accesscontrol.SecurityIndexReaderWrapper; import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissions; @@ -283,6 +284,7 @@ public class Security extends Plugin implements ActionPlugin, IngestPlugin, Netw private final SetOnce securityActionFilter = new SetOnce<>(); private final SetOnce securityIndex = new SetOnce<>(); private final SetOnce groupFactory = new SetOnce<>(); + private final SetOnce dlsBitsetCache = new SetOnce<>(); private final List bootstrapChecks; private final List securityExtensions = new ArrayList<>(); @@ -353,6 +355,10 @@ Collection createComponents(Client client, ThreadPool threadPool, Cluste securityContext.set(new SecurityContext(settings, threadPool.getThreadContext())); components.add(securityContext.get()); + if (XPackSettings.DLS_FLS_ENABLED.get(settings)) { + dlsBitsetCache.set(new DocumentSubsetBitsetCache(settings)); + } + // audit trail service construction final List auditTrails = XPackSettings.AUDIT_ENABLED.get(settings) ? Collections.singletonList(new LoggingAuditTrail(settings, clusterService, threadPool)) @@ -410,7 +416,7 @@ Collection createComponents(Client client, ThreadPool threadPool, Cluste components.add(apiKeyService); final CompositeRolesStore allRolesStore = new CompositeRolesStore(settings, fileRolesStore, nativeRolesStore, reservedRolesStore, privilegeStore, rolesProviders, threadPool.getThreadContext(), getLicenseState(), fieldPermissionsCache, apiKeyService, - new DeprecationRoleDescriptorConsumer(clusterService, threadPool)); + dlsBitsetCache.get(), new DeprecationRoleDescriptorConsumer(clusterService, threadPool)); securityIndex.get().addIndexStateListener(allRolesStore::onSecurityIndexStateChange); // to keep things simple, just invalidate all cached entries on license change. this happens so rarely that the impact should be @@ -590,6 +596,7 @@ public static List> getSettings(List securityExten AuthorizationService.addSettings(settingsList); Automatons.addSettings(settingsList); settingsList.addAll(CompositeRolesStore.getSettings()); + settingsList.addAll(DocumentSubsetBitsetCache.getSettings()); settingsList.add(FieldPermissionsCache.CACHE_SIZE_SETTING); settingsList.add(TokenService.TOKEN_EXPIRATION); settingsList.add(TokenService.DELETE_INTERVAL); @@ -641,6 +648,7 @@ public void onIndexModule(IndexModule module) { if (enabled) { assert getLicenseState() != null; if (XPackSettings.DLS_FLS_ENABLED.get(settings)) { + assert dlsBitsetCache.get() != null; module.setReaderWrapper(indexService -> new SecurityIndexReaderWrapper( shardId -> indexService.newQueryShardContext(shardId.id(), @@ -651,7 +659,7 @@ public void onIndexModule(IndexModule module) { throw new IllegalArgumentException("permission filters are not allowed to use the current timestamp"); }, null), - indexService.cache() != null ? indexService.cache().bitsetFilterCache() : null, + dlsBitsetCache.get(), indexService.getThreadPool().getThreadContext(), getLicenseState(), indexService.getScriptService())); /* diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStore.java index 7454ec59da55f..a33a1dc686b41 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStore.java @@ -28,6 +28,7 @@ import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor.IndicesPrivileges; +import org.elasticsearch.xpack.core.security.authz.accesscontrol.DocumentSubsetBitsetCache; import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissionsCache; import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissionsDefinition; import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissionsDefinition.FieldGrantExcludeGroup; @@ -39,6 +40,7 @@ import org.elasticsearch.xpack.core.security.authz.privilege.Privilege; import org.elasticsearch.xpack.core.security.authz.store.ReservedRolesStore; import org.elasticsearch.xpack.core.security.authz.store.RoleRetrievalResult; +import org.elasticsearch.xpack.core.security.support.CacheIteratorHelper; import org.elasticsearch.xpack.core.security.user.AnonymousUser; import org.elasticsearch.xpack.core.security.user.SystemUser; import org.elasticsearch.xpack.core.security.user.User; @@ -53,14 +55,11 @@ import java.util.Collections; import java.util.HashMap; import java.util.HashSet; -import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.concurrent.atomic.AtomicLong; -import java.util.concurrent.locks.ReadWriteLock; -import java.util.concurrent.locks.ReentrantReadWriteLock; import java.util.function.BiConsumer; import java.util.function.Consumer; import java.util.function.Function; @@ -84,18 +83,6 @@ public class CompositeRolesStore { Setting.intSetting("xpack.security.authz.store.roles.negative_lookup_cache.max_size", 10000, Property.NodeScope); private static final Logger logger = LogManager.getLogger(CompositeRolesStore.class); - // the lock is used in an odd manner; when iterating over the cache we cannot have modifiers other than deletes using - // the iterator but when not iterating we can modify the cache without external locking. When making normal modifications to the cache - // the read lock is obtained so that we can allow concurrent modifications; however when we need to iterate over the keys or values of - // the cache the write lock must obtained to prevent any modifications - private final ReleasableLock readLock; - private final ReleasableLock writeLock; - - { - final ReadWriteLock iterationLock = new ReentrantReadWriteLock(); - readLock = new ReleasableLock(iterationLock.readLock()); - writeLock = new ReleasableLock(iterationLock.writeLock()); - } private final FileRolesStore fileRolesStore; private final NativeRolesStore nativeRolesStore; @@ -104,7 +91,9 @@ public class CompositeRolesStore { private final Consumer> effectiveRoleDescriptorsConsumer; private final FieldPermissionsCache fieldPermissionsCache; private final Cache roleCache; + private final CacheIteratorHelper roleCacheHelper; private final Cache negativeLookupCache; + private final DocumentSubsetBitsetCache dlsBitsetCache; private final ThreadContext threadContext; private final AtomicLong numInvalidation = new AtomicLong(); private final AnonymousUser anonymousUser; @@ -117,8 +106,10 @@ public CompositeRolesStore(Settings settings, FileRolesStore fileRolesStore, Nat ReservedRolesStore reservedRolesStore, NativePrivilegeStore privilegeStore, List, ActionListener>> rolesProviders, ThreadContext threadContext, XPackLicenseState licenseState, FieldPermissionsCache fieldPermissionsCache, - ApiKeyService apiKeyService, Consumer> effectiveRoleDescriptorsConsumer) { + ApiKeyService apiKeyService, @Nullable DocumentSubsetBitsetCache dlsBitsetCache, + Consumer> effectiveRoleDescriptorsConsumer) { this.fileRolesStore = fileRolesStore; + this.dlsBitsetCache = dlsBitsetCache; fileRolesStore.addListener(this::invalidate); this.nativeRolesStore = nativeRolesStore; this.privilegeStore = privilegeStore; @@ -132,6 +123,7 @@ public CompositeRolesStore(Settings settings, FileRolesStore fileRolesStore, Nat builder.setMaximumWeight(cacheSize); } this.roleCache = builder.build(); + this.roleCacheHelper = new CacheIteratorHelper(roleCache); this.threadContext = threadContext; CacheBuilder nlcBuilder = CacheBuilder.builder(); final int nlcCacheSize = NEGATIVE_LOOKUP_CACHE_SIZE_SETTING.get(settings); @@ -261,7 +253,7 @@ private void buildThenMaybeCacheRole(RoleKey roleKey, Collection logger.trace("Building role from descriptors [{}] for names [{}] from source [{}]", roleDescriptors, roleKey.names, roleKey.source); buildRoleFromDescriptors(roleDescriptors, fieldPermissionsCache, privilegeStore, ActionListener.wrap(role -> { if (role != null && tryCache) { - try (ReleasableLock ignored = readLock.acquire()) { + try (ReleasableLock ignored = roleCacheHelper.acquireUpdateLock()) { /* this is kinda spooky. We use a read/write lock to ensure we don't modify the cache if we hold * the write lock (fetching stats for instance - which is kinda overkill?) but since we fetching * stuff in an async fashion we need to make sure that if the cache got invalidated since we @@ -420,47 +412,31 @@ public static void buildRoleFromDescriptors(Collection roleDescr public void invalidateAll() { numInvalidation.incrementAndGet(); negativeLookupCache.invalidateAll(); - try (ReleasableLock ignored = readLock.acquire()) { + try (ReleasableLock ignored = roleCacheHelper.acquireUpdateLock()) { roleCache.invalidateAll(); } + if (dlsBitsetCache != null) { + dlsBitsetCache.clear("role store invalidation"); + } } public void invalidate(String role) { numInvalidation.incrementAndGet(); - // the cache cannot be modified while doing this operation per the terms of the cache iterator - try (ReleasableLock ignored = writeLock.acquire()) { - Iterator keyIter = roleCache.keys().iterator(); - while (keyIter.hasNext()) { - RoleKey key = keyIter.next(); - if (key.names.contains(role)) { - keyIter.remove(); - } - } - } + roleCacheHelper.removeKeysIf(key -> key.names.contains(role)); negativeLookupCache.invalidate(role); } public void invalidate(Set roles) { numInvalidation.incrementAndGet(); - - // the cache cannot be modified while doing this operation per the terms of the cache iterator - try (ReleasableLock ignored = writeLock.acquire()) { - Iterator keyIter = roleCache.keys().iterator(); - while (keyIter.hasNext()) { - RoleKey key = keyIter.next(); - if (Sets.haveEmptyIntersection(key.names, roles) == false) { - keyIter.remove(); - } - } - } - + roleCacheHelper.removeKeysIf(key -> Sets.haveEmptyIntersection(key.names, roles) == false); roles.forEach(negativeLookupCache::invalidate); } public void usageStats(ActionListener> listener) { final Map usage = new HashMap<>(2); usage.put("file", fileRolesStore.usageStats()); + usage.put("dls", Map.of("bit_set_cache", dlsBitsetCache.usageStats())); nativeRolesStore.usageStats(ActionListener.wrap(map -> { usage.put("native", map); listener.onResponse(usage); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java index b4e0a6a22cf81..57b172f47f0cd 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java @@ -149,7 +149,7 @@ public void testRolesWhenDlsFlsUnlicensed() throws IOException { final AtomicReference> effectiveRoleDescriptors = new AtomicReference>(); CompositeRolesStore compositeRolesStore = new CompositeRolesStore(Settings.EMPTY, fileRolesStore, nativeRolesStore, reservedRolesStore, mock(NativePrivilegeStore.class), Collections.emptyList(), - new ThreadContext(Settings.EMPTY), licenseState, cache, mock(ApiKeyService.class), + new ThreadContext(Settings.EMPTY), licenseState, cache, mock(ApiKeyService.class), null, rds -> effectiveRoleDescriptors.set(rds)); PlainActionFuture roleFuture = new PlainActionFuture<>(); @@ -224,7 +224,7 @@ public void testRolesWhenDlsFlsLicensed() throws IOException { final AtomicReference> effectiveRoleDescriptors = new AtomicReference>(); CompositeRolesStore compositeRolesStore = new CompositeRolesStore(Settings.EMPTY, fileRolesStore, nativeRolesStore, reservedRolesStore, mock(NativePrivilegeStore.class), Collections.emptyList(), - new ThreadContext(Settings.EMPTY), licenseState, cache, mock(ApiKeyService.class), + new ThreadContext(Settings.EMPTY), licenseState, cache, mock(ApiKeyService.class), null, rds -> effectiveRoleDescriptors.set(rds)); PlainActionFuture roleFuture = new PlainActionFuture<>(); @@ -276,7 +276,7 @@ public void testNegativeLookupsAreCached() { final CompositeRolesStore compositeRolesStore = new CompositeRolesStore(SECURITY_ENABLED_SETTINGS, fileRolesStore, nativeRolesStore, reservedRolesStore, nativePrivilegeStore, Collections.emptyList(), new ThreadContext(SECURITY_ENABLED_SETTINGS), - new XPackLicenseState(SECURITY_ENABLED_SETTINGS), cache, mock(ApiKeyService.class), + new XPackLicenseState(SECURITY_ENABLED_SETTINGS), cache, mock(ApiKeyService.class), null, rds -> effectiveRoleDescriptors.set(rds)); verify(fileRolesStore).addListener(any(Consumer.class)); // adds a listener in ctor @@ -337,7 +337,7 @@ public void testNegativeLookupsCacheDisabled() { final AtomicReference> effectiveRoleDescriptors = new AtomicReference>(); final CompositeRolesStore compositeRolesStore = new CompositeRolesStore(settings, fileRolesStore, nativeRolesStore, reservedRolesStore, mock(NativePrivilegeStore.class), Collections.emptyList(), new ThreadContext(settings), - new XPackLicenseState(settings), cache, mock(ApiKeyService.class), rds -> effectiveRoleDescriptors.set(rds)); + new XPackLicenseState(settings), cache, mock(ApiKeyService.class), null, rds -> effectiveRoleDescriptors.set(rds)); verify(fileRolesStore).addListener(any(Consumer.class)); // adds a listener in ctor final String roleName = randomAlphaOfLengthBetween(1, 10); @@ -374,7 +374,7 @@ public void testNegativeLookupsAreNotCachedWithFailures() { final CompositeRolesStore compositeRolesStore = new CompositeRolesStore(SECURITY_ENABLED_SETTINGS, fileRolesStore, nativeRolesStore, reservedRolesStore, mock(NativePrivilegeStore.class), Collections.emptyList(), new ThreadContext(SECURITY_ENABLED_SETTINGS), - new XPackLicenseState(SECURITY_ENABLED_SETTINGS), cache, mock(ApiKeyService.class), + new XPackLicenseState(SECURITY_ENABLED_SETTINGS), cache, mock(ApiKeyService.class), null, rds -> effectiveRoleDescriptors.set(rds)); verify(fileRolesStore).addListener(any(Consumer.class)); // adds a listener in ctor @@ -460,7 +460,7 @@ public void testCustomRolesProviders() { new CompositeRolesStore(SECURITY_ENABLED_SETTINGS, fileRolesStore, nativeRolesStore, reservedRolesStore, mock(NativePrivilegeStore.class), Arrays.asList(inMemoryProvider1, inMemoryProvider2), new ThreadContext(SECURITY_ENABLED_SETTINGS), new XPackLicenseState(SECURITY_ENABLED_SETTINGS), - cache, mock(ApiKeyService.class), rds -> effectiveRoleDescriptors.set(rds)); + cache, mock(ApiKeyService.class), null, rds -> effectiveRoleDescriptors.set(rds)); final Set roleNames = Sets.newHashSet("roleA", "roleB", "unknown"); PlainActionFuture future = new PlainActionFuture<>(); @@ -674,7 +674,7 @@ public void testCustomRolesProviderFailures() throws Exception { new CompositeRolesStore(SECURITY_ENABLED_SETTINGS, fileRolesStore, nativeRolesStore, reservedRolesStore, mock(NativePrivilegeStore.class), Arrays.asList(inMemoryProvider1, failingProvider), new ThreadContext(SECURITY_ENABLED_SETTINGS), new XPackLicenseState(SECURITY_ENABLED_SETTINGS), - cache, mock(ApiKeyService.class), rds -> effectiveRoleDescriptors.set(rds)); + cache, mock(ApiKeyService.class), null, rds -> effectiveRoleDescriptors.set(rds)); final Set roleNames = Sets.newHashSet("roleA", "roleB", "unknown"); PlainActionFuture future = new PlainActionFuture<>(); @@ -720,7 +720,7 @@ public void testCustomRolesProvidersLicensing() { CompositeRolesStore compositeRolesStore = new CompositeRolesStore( Settings.EMPTY, fileRolesStore, nativeRolesStore, reservedRolesStore, mock(NativePrivilegeStore.class), Arrays.asList(inMemoryProvider), new ThreadContext(Settings.EMPTY), xPackLicenseState, cache, - mock(ApiKeyService.class), rds -> effectiveRoleDescriptors.set(rds)); + mock(ApiKeyService.class), null, rds -> effectiveRoleDescriptors.set(rds)); Set roleNames = Sets.newHashSet("roleA"); PlainActionFuture future = new PlainActionFuture<>(); @@ -735,7 +735,7 @@ Settings.EMPTY, fileRolesStore, nativeRolesStore, reservedRolesStore, mock(Nativ compositeRolesStore = new CompositeRolesStore( Settings.EMPTY, fileRolesStore, nativeRolesStore, reservedRolesStore, mock(NativePrivilegeStore.class), Arrays.asList(inMemoryProvider), new ThreadContext(Settings.EMPTY), xPackLicenseState, cache, - mock(ApiKeyService.class), rds -> effectiveRoleDescriptors.set(rds)); + mock(ApiKeyService.class), null, rds -> effectiveRoleDescriptors.set(rds)); // these licenses allow custom role providers xPackLicenseState.update(randomFrom(OperationMode.PLATINUM, OperationMode.TRIAL), true, null); roleNames = Sets.newHashSet("roleA"); @@ -752,7 +752,7 @@ Settings.EMPTY, fileRolesStore, nativeRolesStore, reservedRolesStore, mock(Nativ compositeRolesStore = new CompositeRolesStore( Settings.EMPTY, fileRolesStore, nativeRolesStore, reservedRolesStore, mock(NativePrivilegeStore.class), Arrays.asList(inMemoryProvider), new ThreadContext(Settings.EMPTY), xPackLicenseState, cache, - mock(ApiKeyService.class), rds -> effectiveRoleDescriptors.set(rds)); + mock(ApiKeyService.class), null, rds -> effectiveRoleDescriptors.set(rds)); xPackLicenseState.update(randomFrom(OperationMode.PLATINUM, OperationMode.TRIAL), false, null); roleNames = Sets.newHashSet("roleA"); future = new PlainActionFuture<>(); @@ -783,7 +783,7 @@ public void testCacheClearOnIndexHealthChange() { CompositeRolesStore compositeRolesStore = new CompositeRolesStore( Settings.EMPTY, fileRolesStore, nativeRolesStore, reservedRolesStore, mock(NativePrivilegeStore.class), Collections.emptyList(), new ThreadContext(Settings.EMPTY), - new XPackLicenseState(SECURITY_ENABLED_SETTINGS), cache, mock(ApiKeyService.class), rds -> {}) { + new XPackLicenseState(SECURITY_ENABLED_SETTINGS), cache, mock(ApiKeyService.class), null, rds -> {}) { @Override public void invalidateAll() { numInvalidation.incrementAndGet(); @@ -835,7 +835,7 @@ public void testCacheClearOnIndexOutOfDateChange() { CompositeRolesStore compositeRolesStore = new CompositeRolesStore(SECURITY_ENABLED_SETTINGS, fileRolesStore, nativeRolesStore, reservedRolesStore, mock(NativePrivilegeStore.class), Collections.emptyList(), new ThreadContext(SECURITY_ENABLED_SETTINGS), - new XPackLicenseState(SECURITY_ENABLED_SETTINGS), cache, mock(ApiKeyService.class), rds -> {}) { + new XPackLicenseState(SECURITY_ENABLED_SETTINGS), cache, mock(ApiKeyService.class), null, rds -> {}) { @Override public void invalidateAll() { numInvalidation.incrementAndGet(); @@ -865,7 +865,7 @@ public void testDefaultRoleUserWithoutRoles() { final CompositeRolesStore compositeRolesStore = new CompositeRolesStore(SECURITY_ENABLED_SETTINGS, fileRolesStore, nativeRolesStore, reservedRolesStore, mock(NativePrivilegeStore.class), Collections.emptyList(), new ThreadContext(SECURITY_ENABLED_SETTINGS), - new XPackLicenseState(SECURITY_ENABLED_SETTINGS), cache, mock(ApiKeyService.class), rds -> {}); + new XPackLicenseState(SECURITY_ENABLED_SETTINGS), cache, mock(ApiKeyService.class), null, rds -> {}); verify(fileRolesStore).addListener(any(Consumer.class)); // adds a listener in ctor PlainActionFuture rolesFuture = new PlainActionFuture<>(); @@ -904,7 +904,7 @@ public void testAnonymousUserEnabledRoleAdded() { final CompositeRolesStore compositeRolesStore = new CompositeRolesStore(settings, fileRolesStore, nativeRolesStore, reservedRolesStore, mock(NativePrivilegeStore.class), Collections.emptyList(), new ThreadContext(settings), - new XPackLicenseState(settings), cache, mock(ApiKeyService.class), rds -> {}); + new XPackLicenseState(settings), cache, mock(ApiKeyService.class), null, rds -> {}); verify(fileRolesStore).addListener(any(Consumer.class)); // adds a listener in ctor PlainActionFuture rolesFuture = new PlainActionFuture<>(); @@ -932,7 +932,7 @@ public void testDoesNotUseRolesStoreForXPackUser() { final CompositeRolesStore compositeRolesStore = new CompositeRolesStore(SECURITY_ENABLED_SETTINGS, fileRolesStore, nativeRolesStore, reservedRolesStore, mock(NativePrivilegeStore.class), Collections.emptyList(), new ThreadContext(SECURITY_ENABLED_SETTINGS), - new XPackLicenseState(SECURITY_ENABLED_SETTINGS), cache, mock(ApiKeyService.class), + new XPackLicenseState(SECURITY_ENABLED_SETTINGS), cache, mock(ApiKeyService.class), null, rds -> effectiveRoleDescriptors.set(rds)); verify(fileRolesStore).addListener(any(Consumer.class)); // adds a listener in ctor @@ -962,7 +962,7 @@ public void testGetRolesForSystemUserThrowsException() { final CompositeRolesStore compositeRolesStore = new CompositeRolesStore(SECURITY_ENABLED_SETTINGS, fileRolesStore, nativeRolesStore, reservedRolesStore, mock(NativePrivilegeStore.class), Collections.emptyList(), new ThreadContext(SECURITY_ENABLED_SETTINGS), - new XPackLicenseState(SECURITY_ENABLED_SETTINGS), cache, mock(ApiKeyService.class), + new XPackLicenseState(SECURITY_ENABLED_SETTINGS), cache, mock(ApiKeyService.class), null, rds -> effectiveRoleDescriptors.set(rds)); verify(fileRolesStore).addListener(any(Consumer.class)); // adds a listener in ctor IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, @@ -997,7 +997,7 @@ public void testApiKeyAuthUsesApiKeyService() throws IOException { final CompositeRolesStore compositeRolesStore = new CompositeRolesStore(SECURITY_ENABLED_SETTINGS, fileRolesStore, nativeRolesStore, reservedRolesStore, nativePrivStore, Collections.emptyList(), new ThreadContext(SECURITY_ENABLED_SETTINGS), - new XPackLicenseState(SECURITY_ENABLED_SETTINGS), cache, apiKeyService, + new XPackLicenseState(SECURITY_ENABLED_SETTINGS), cache, apiKeyService, null, rds -> effectiveRoleDescriptors.set(rds)); AuditUtil.getOrGenerateRequestId(threadContext); final Authentication authentication = new Authentication(new User("test api key user", "superuser"), @@ -1042,7 +1042,7 @@ public void testApiKeyAuthUsesApiKeyServiceWithScopedRole() throws IOException { final CompositeRolesStore compositeRolesStore = new CompositeRolesStore(SECURITY_ENABLED_SETTINGS, fileRolesStore, nativeRolesStore, reservedRolesStore, nativePrivStore, Collections.emptyList(), new ThreadContext(SECURITY_ENABLED_SETTINGS), - new XPackLicenseState(SECURITY_ENABLED_SETTINGS), cache, apiKeyService, + new XPackLicenseState(SECURITY_ENABLED_SETTINGS), cache, apiKeyService, null, rds -> effectiveRoleDescriptors.set(rds)); AuditUtil.getOrGenerateRequestId(threadContext); final Authentication authentication = new Authentication(new User("test api key user", "api_key"),