From b2f624274e3b94d1af4d24e69286f998de171907 Mon Sep 17 00:00:00 2001 From: bbaker Date: Sat, 20 Sep 2025 09:17:30 +1000 Subject: [PATCH 1/8] Caching parse and validate by default --- build.gradle | 6 + src/main/java/graphql/GraphQL.java | 4 +- .../caching/CachingDocumentProvider.java | 56 +++++++ .../caching/CaffeineDocumentCache.java | 72 +++++++++ .../caching/CaffeineDocumentCacheOptions.java | 82 ++++++++++ .../preparsed/caching/DocumentCache.java | 66 ++++++++ src/main/java/graphql/util/ClassKit.java | 13 ++ src/test/groovy/graphql/GraphQLTest.groovy | 4 +- .../CachingDocumentProviderTest.groovy | 147 ++++++++++++++++++ 9 files changed, 446 insertions(+), 4 deletions(-) create mode 100644 src/main/java/graphql/execution/preparsed/caching/CachingDocumentProvider.java create mode 100644 src/main/java/graphql/execution/preparsed/caching/CaffeineDocumentCache.java create mode 100644 src/main/java/graphql/execution/preparsed/caching/CaffeineDocumentCacheOptions.java create mode 100644 src/main/java/graphql/execution/preparsed/caching/DocumentCache.java create mode 100644 src/main/java/graphql/util/ClassKit.java create mode 100644 src/test/groovy/graphql/execution/preparsed/caching/CachingDocumentProviderTest.groovy diff --git a/build.gradle b/build.gradle index 8cc48c3a7f..7221f154da 100644 --- a/build.gradle +++ b/build.gradle @@ -126,6 +126,12 @@ dependencies { implementation 'org.antlr:antlr4-runtime:' + antlrVersion implementation 'com.google.guava:guava:' + guavaVersion + // we can compile against caffeine but its not shipped as a runtime dependency + compileOnly 'com.github.ben-manes.caffeine:caffeine:3.1.8' + // we need caffeine to write tests however + testImplementation 'com.github.ben-manes.caffeine:caffeine:3.1.8' + + testImplementation group: 'junit', name: 'junit', version: '4.13.2' testImplementation 'org.spockframework:spock-core:2.3-groovy-4.0' testImplementation 'net.bytebuddy:byte-buddy:1.17.7' diff --git a/src/main/java/graphql/GraphQL.java b/src/main/java/graphql/GraphQL.java index 16d14ab4b9..1bbf6fdbbb 100644 --- a/src/main/java/graphql/GraphQL.java +++ b/src/main/java/graphql/GraphQL.java @@ -20,9 +20,9 @@ import graphql.execution.instrumentation.parameters.InstrumentationCreateStateParameters; import graphql.execution.instrumentation.parameters.InstrumentationExecutionParameters; import graphql.execution.instrumentation.parameters.InstrumentationValidationParameters; -import graphql.execution.preparsed.NoOpPreparsedDocumentProvider; import graphql.execution.preparsed.PreparsedDocumentEntry; import graphql.execution.preparsed.PreparsedDocumentProvider; +import graphql.execution.preparsed.caching.CachingDocumentProvider; import graphql.language.Document; import graphql.schema.GraphQLSchema; import graphql.validation.ValidationError; @@ -279,7 +279,7 @@ public static class Builder { private DataFetcherExceptionHandler defaultExceptionHandler = new SimpleDataFetcherExceptionHandler(); private ExecutionIdProvider idProvider = DEFAULT_EXECUTION_ID_PROVIDER; private Instrumentation instrumentation = null; // deliberate default here - private PreparsedDocumentProvider preparsedDocumentProvider = NoOpPreparsedDocumentProvider.INSTANCE; + private PreparsedDocumentProvider preparsedDocumentProvider = new CachingDocumentProvider(); private boolean doNotAutomaticallyDispatchDataLoader = false; private ValueUnboxer valueUnboxer = ValueUnboxer.DEFAULT; diff --git a/src/main/java/graphql/execution/preparsed/caching/CachingDocumentProvider.java b/src/main/java/graphql/execution/preparsed/caching/CachingDocumentProvider.java new file mode 100644 index 0000000000..bfe8162ad5 --- /dev/null +++ b/src/main/java/graphql/execution/preparsed/caching/CachingDocumentProvider.java @@ -0,0 +1,56 @@ +package graphql.execution.preparsed.caching; + +import com.github.benmanes.caffeine.cache.Caffeine; +import graphql.ExecutionInput; +import graphql.PublicApi; +import graphql.execution.preparsed.PreparsedDocumentEntry; +import graphql.execution.preparsed.PreparsedDocumentProvider; +import org.jspecify.annotations.NullMarked; + +import java.util.concurrent.CompletableFuture; +import java.util.function.Function; + +/** + * By default, graphql-java will cache the parsed {@link PreparsedDocumentEntry} that represents + * a parsed and validated graphql query IF {@link Caffeine} is present on the class path + * at runtime. If it's not then no caching takes place. + */ +@PublicApi +@NullMarked +public class CachingDocumentProvider implements PreparsedDocumentProvider { + private final DocumentCache documentCache; + + /** + * By default, it will try to use a {@link Caffeine} backed implementation if it's on the class + * path otherwise it will become a non caching mechanism. + */ + public CachingDocumentProvider() { + this(new CaffeineDocumentCache()); + } + + /** + * You can use your own cache implementation and provide that to this class to use + * + * @param documentCache the cache to use + */ + public CachingDocumentProvider(DocumentCache documentCache) { + this.documentCache = documentCache; + } + + @Override + public CompletableFuture getDocumentAsync(ExecutionInput executionInput, Function parseAndValidateFunction) { + if (documentCache.isNoop()) { + // saves creating keys and doing a lookup that will just call this function anyway + return CompletableFuture.completedFuture(parseAndValidateFunction.apply(executionInput)); + } + DocumentCache.DocumentCacheKey cacheKey = new DocumentCache.DocumentCacheKey(executionInput.getQuery(), executionInput.getOperationName()); + PreparsedDocumentEntry cacheEntry = documentCache.get(cacheKey, missFunction(executionInput, parseAndValidateFunction)); + return CompletableFuture.completedFuture(cacheEntry); + } + + private static Function missFunction(ExecutionInput executionInput, Function parseAndValidateFunction) { + return key -> { + return parseAndValidateFunction.apply(executionInput); + }; + } +} diff --git a/src/main/java/graphql/execution/preparsed/caching/CaffeineDocumentCache.java b/src/main/java/graphql/execution/preparsed/caching/CaffeineDocumentCache.java new file mode 100644 index 0000000000..3faf42bfbd --- /dev/null +++ b/src/main/java/graphql/execution/preparsed/caching/CaffeineDocumentCache.java @@ -0,0 +1,72 @@ +package graphql.execution.preparsed.caching; + +import com.github.benmanes.caffeine.cache.Cache; +import com.github.benmanes.caffeine.cache.Caffeine; +import graphql.PublicApi; +import graphql.execution.preparsed.PreparsedDocumentEntry; +import graphql.util.ClassKit; +import org.jspecify.annotations.NullMarked; +import org.jspecify.annotations.Nullable; + +import java.util.function.Function; + +import static java.util.Objects.requireNonNull; + +@PublicApi +@NullMarked +public class CaffeineDocumentCache implements DocumentCache { + + private final static boolean isCaffeineAvailable = ClassKit.isClassAvailable("com.github.benmanes.caffeine.cache.Caffeine"); + + @Nullable + private final Object caffeineCacheObj; + + CaffeineDocumentCache(boolean isCaffeineAvailable) { + if (isCaffeineAvailable) { + CaffeineDocumentCacheOptions options = CaffeineDocumentCacheOptions.getDefaultJvmOptions(); + caffeineCacheObj = Caffeine.newBuilder() + .expireAfterAccess(options.getExpireAfterAccess()) + .maximumSize(options.getMaxSize()) + .build(); + } else { + caffeineCacheObj = null; + } + } + + public CaffeineDocumentCache() { + this(isCaffeineAvailable); + } + + /** + * If you want to control the {@link Caffeine} configuration, using this constructor and pass in your own {@link Caffeine} cache + * + * @param caffeineCache the custom {@link Caffeine} cache to use + */ + public CaffeineDocumentCache(Cache caffeineCache) { + this.caffeineCacheObj = caffeineCache; + } + + @Override + public PreparsedDocumentEntry get(DocumentCacheKey key, Function mappingFunction) { + if (isNoop()) { + return mappingFunction.apply(key); + } + return cache().get(key, mappingFunction); + } + + private Cache cache() { + //noinspection unchecked + return (Cache) requireNonNull(caffeineCacheObj); + } + + @Override + public boolean isNoop() { + return caffeineCacheObj == null; + } + + public void clear() { + if (!isNoop()) { + cache().invalidateAll(); + } + } +} diff --git a/src/main/java/graphql/execution/preparsed/caching/CaffeineDocumentCacheOptions.java b/src/main/java/graphql/execution/preparsed/caching/CaffeineDocumentCacheOptions.java new file mode 100644 index 0000000000..d32aee2f02 --- /dev/null +++ b/src/main/java/graphql/execution/preparsed/caching/CaffeineDocumentCacheOptions.java @@ -0,0 +1,82 @@ +package graphql.execution.preparsed.caching; + +import graphql.PublicApi; +import org.jspecify.annotations.NullMarked; + +import java.time.Duration; + +@PublicApi +@NullMarked +public class CaffeineDocumentCacheOptions { + + /** + * By default, we cache documents for 5 minutes + */ + public static final Duration EXPIRED_AFTER_ACCESS = Duration.ofMinutes(5); + /** + * By default, we hold 1000 entries + */ + public static final int MAX_SIZE = 1000; + + private static CaffeineDocumentCacheOptions defaultJvmOptions = newOptions() + .expireAfterAccess(EXPIRED_AFTER_ACCESS) + .maxSize(MAX_SIZE) + .build(); + + /** + * This returns the JVM wide default options for the {@link CaffeineDocumentCache} + * + * @return the JVM wide default options + */ + public static CaffeineDocumentCacheOptions getDefaultJvmOptions() { + return defaultJvmOptions; + } + + /** + * This sets new JVM wide default options for the {@link CaffeineDocumentCache} + * + * @param jvmOptions + */ + public static void setDefaultJvmOptions(CaffeineDocumentCacheOptions jvmOptions) { + defaultJvmOptions = jvmOptions; + } + + private final Duration expireAfterAccess; + private final int maxSize; + + private CaffeineDocumentCacheOptions(Builder builder) { + this.expireAfterAccess = builder.expireAfterAccess; + this.maxSize = builder.maxSize; + } + + public Duration getExpireAfterAccess() { + return expireAfterAccess; + } + + public int getMaxSize() { + return maxSize; + } + + public static Builder newOptions() { + return new Builder(); + } + + public static class Builder { + Duration expireAfterAccess = Duration.ofMinutes(5); + int maxSize = 1000; + + public Builder maxSize(int maxSize) { + this.maxSize = maxSize; + return this; + } + + public Builder expireAfterAccess(Duration expireAfterAccess) { + this.expireAfterAccess = expireAfterAccess; + return this; + } + + CaffeineDocumentCacheOptions build() { + return new CaffeineDocumentCacheOptions(this); + } + } +} diff --git a/src/main/java/graphql/execution/preparsed/caching/DocumentCache.java b/src/main/java/graphql/execution/preparsed/caching/DocumentCache.java new file mode 100644 index 0000000000..d9f93155fd --- /dev/null +++ b/src/main/java/graphql/execution/preparsed/caching/DocumentCache.java @@ -0,0 +1,66 @@ +package graphql.execution.preparsed.caching; + +import graphql.execution.preparsed.PreparsedDocumentEntry; +import org.jspecify.annotations.NullMarked; +import org.jspecify.annotations.Nullable; + +import java.util.Objects; +import java.util.function.Function; + +/** + * This represents a cache interface to get a document from a cache key. You can use your own cache implementation + * to back the caching of parsed graphql documents. + */ +@NullMarked +public interface DocumentCache { + /** + * Called to get a document that has previously been parsed ad validated. + * + * @param key the cache key + * @param mappingFunction if the value is missing in cache this function can be called to create a value + * + * @return a non null document entry + */ + PreparsedDocumentEntry get(DocumentCacheKey key, Function mappingFunction); + + /** + * @return true if the cache in fact does no caching otherwise false. This helps the implementation optimise how the cache is used or not. + */ + boolean isNoop(); + + /** + * This represents the key to the document cache + */ + class DocumentCacheKey { + private final String query; + @Nullable + private final String operationName; + + DocumentCacheKey(String query, @Nullable String operationName) { + this.query = query; + this.operationName = operationName; + } + + public String getQuery() { + return query; + } + + public @Nullable String getOperationName() { + return operationName; + } + + @Override + public boolean equals(Object o) { + if (o == null || getClass() != o.getClass()) { + return false; + } + DocumentCacheKey cacheKey = (DocumentCacheKey) o; + return Objects.equals(query, cacheKey.query) && Objects.equals(operationName, cacheKey.operationName); + } + + @Override + public int hashCode() { + return Objects.hash(query, operationName); + } + } +} diff --git a/src/main/java/graphql/util/ClassKit.java b/src/main/java/graphql/util/ClassKit.java new file mode 100644 index 0000000000..059622d61b --- /dev/null +++ b/src/main/java/graphql/util/ClassKit.java @@ -0,0 +1,13 @@ +package graphql.util; + +public class ClassKit { + public static boolean isClassAvailable(String className) { + ClassLoader classLoader = ClassKit.class.getClassLoader(); + Class caffieneClass = null; + try { + caffieneClass = classLoader.loadClass(className); + } catch (ClassNotFoundException ignored) { + } + return caffieneClass != null; + } +} diff --git a/src/test/groovy/graphql/GraphQLTest.groovy b/src/test/groovy/graphql/GraphQLTest.groovy index 7b9c48ad7d..83a2f43aa7 100644 --- a/src/test/groovy/graphql/GraphQLTest.groovy +++ b/src/test/groovy/graphql/GraphQLTest.groovy @@ -19,7 +19,7 @@ import graphql.execution.instrumentation.Instrumentation import graphql.execution.instrumentation.InstrumentationState import graphql.execution.instrumentation.SimplePerformantInstrumentation import graphql.execution.instrumentation.parameters.InstrumentationCreateStateParameters -import graphql.execution.preparsed.NoOpPreparsedDocumentProvider +import graphql.execution.preparsed.caching.CachingDocumentProvider import graphql.language.SourceLocation import graphql.schema.DataFetcher import graphql.schema.DataFetchingEnvironment @@ -1414,7 +1414,7 @@ many lines'''] graphQL.getGraphQLSchema() == StarWarsSchema.starWarsSchema graphQL.getIdProvider() == ExecutionIdProvider.DEFAULT_EXECUTION_ID_PROVIDER graphQL.getValueUnboxer() == ValueUnboxer.DEFAULT - graphQL.getPreparsedDocumentProvider() == NoOpPreparsedDocumentProvider.INSTANCE + graphQL.getPreparsedDocumentProvider() instanceof CachingDocumentProvider graphQL.getInstrumentation() instanceof Instrumentation graphQL.getQueryStrategy() instanceof AsyncExecutionStrategy graphQL.getMutationStrategy() instanceof AsyncSerialExecutionStrategy diff --git a/src/test/groovy/graphql/execution/preparsed/caching/CachingDocumentProviderTest.groovy b/src/test/groovy/graphql/execution/preparsed/caching/CachingDocumentProviderTest.groovy new file mode 100644 index 0000000000..68324a0725 --- /dev/null +++ b/src/test/groovy/graphql/execution/preparsed/caching/CachingDocumentProviderTest.groovy @@ -0,0 +1,147 @@ +package graphql.execution.preparsed.caching + +import graphql.ExecutionInput +import graphql.GraphQL +import graphql.StarWarsSchema +import graphql.execution.preparsed.PreparsedDocumentEntry +import graphql.parser.Parser +import spock.lang.Specification + +import java.util.function.Function + +class CachingDocumentProviderTest extends Specification { + private String heroQuery1 + + void setup() { + heroQuery1 = """ + query HeroNameQuery { + hero { + name + } + } + """ + } + + def "basic integration test"() { + + def cachingDocumentProvider = new CachingDocumentProvider() + GraphQL graphQL = GraphQL.newGraphQL(StarWarsSchema.starWarsSchema) + .preparsedDocumentProvider(cachingDocumentProvider) + .build() + + when: + def executionInput = ExecutionInput.newExecutionInput(heroQuery1) + .operationName("HeroNameQuery").build() + + def er = graphQL.execute(executionInput) + + then: + er.errors.isEmpty() + er.data == [hero: [name: "R2-D2"]] + } + + def "integration still works when caffeine is not on the class path"() { + // we fake out the test here saying its NOT on the classpath + def cache = new CaffeineDocumentCache(false) + def cachingDocumentProvider = new CachingDocumentProvider(cache) + GraphQL graphQL = GraphQL.newGraphQL(StarWarsSchema.starWarsSchema) + .preparsedDocumentProvider(cachingDocumentProvider) + .build() + when: + def executionInput = ExecutionInput.newExecutionInput(heroQuery1) + .operationName("HeroNameQuery").build() + + def er = graphQL.execute(executionInput) + + then: + er.errors.isEmpty() + er.data == [hero: [name: "R2-D2"]] + } + + class CountingDocProvider implements Function { + int count = 0 + + @Override + PreparsedDocumentEntry apply(ExecutionInput executionInput) { + count++ + def document = Parser.parse(executionInput.query) + return new PreparsedDocumentEntry(document) + } + } + + def "caching happens and the parse and validated function is avoided"() { + def cache = new CaffeineDocumentCache(true) + def cachingDocumentProvider = new CachingDocumentProvider(cache) + + def ei = ExecutionInput.newExecutionInput("query q { f }").build() + def callback = new CountingDocProvider() + + when: + def cf = cachingDocumentProvider.getDocumentAsync(ei, callback) + def documentEntry = cf.join() + + then: + !documentEntry.hasErrors() + documentEntry.document != null + callback.count == 1 + + when: + cf = cachingDocumentProvider.getDocumentAsync(ei, callback) + documentEntry = cf.join() + + then: + !documentEntry.hasErrors() + documentEntry.document != null + // cached + callback.count == 1 + + when: + cache.clear() + cf = cachingDocumentProvider.getDocumentAsync(ei, callback) + documentEntry = cf.join() + + then: + !documentEntry.hasErrors() + documentEntry.document != null + // after cleared cached + callback.count == 2 + + } + + def "when caching is not present then parse and validated function is always called"() { + def cache = new CaffeineDocumentCache(false) + def cachingDocumentProvider = new CachingDocumentProvider(cache) + + def ei = ExecutionInput.newExecutionInput("query q { f }").build() + def callback = new CountingDocProvider() + + when: + def cf = cachingDocumentProvider.getDocumentAsync(ei, callback) + def documentEntry = cf.join() + + then: + !documentEntry.hasErrors() + documentEntry.document != null + callback.count == 1 + + when: + cf = cachingDocumentProvider.getDocumentAsync(ei, callback) + documentEntry = cf.join() + + then: + !documentEntry.hasErrors() + documentEntry.document != null + // not cached + callback.count == 2 + + when: + cache.clear() + cf = cachingDocumentProvider.getDocumentAsync(ei, callback) + documentEntry = cf.join() + + then: + !documentEntry.hasErrors() + documentEntry.document != null + callback.count == 3 + } +} From 4225518343ec8b62ea67aeae922856573fa7c61d Mon Sep 17 00:00:00 2001 From: bbaker Date: Mon, 22 Sep 2025 08:05:28 +1000 Subject: [PATCH 2/8] Added bench mark --- .../benchmark/CachingDocumentBenchmark.java | 83 ++++++++++ .../caching/CachingDocumentProvider.java | 7 +- .../preparsed/caching/DocumentCache.java | 2 + .../CachingDocumentProviderTest.groovy | 155 ++++++++++++++++-- 4 files changed, 225 insertions(+), 22 deletions(-) create mode 100644 src/jmh/java/benchmark/CachingDocumentBenchmark.java diff --git a/src/jmh/java/benchmark/CachingDocumentBenchmark.java b/src/jmh/java/benchmark/CachingDocumentBenchmark.java new file mode 100644 index 0000000000..6f339a488c --- /dev/null +++ b/src/jmh/java/benchmark/CachingDocumentBenchmark.java @@ -0,0 +1,83 @@ +package benchmark; + +import graphql.GraphQL; +import graphql.StarWarsSchema; +import graphql.execution.preparsed.NoOpPreparsedDocumentProvider; +import graphql.execution.preparsed.PreparsedDocumentProvider; +import graphql.execution.preparsed.caching.CachingDocumentProvider; +import org.openjdk.jmh.annotations.Benchmark; +import org.openjdk.jmh.annotations.BenchmarkMode; +import org.openjdk.jmh.annotations.Fork; +import org.openjdk.jmh.annotations.Level; +import org.openjdk.jmh.annotations.Measurement; +import org.openjdk.jmh.annotations.Mode; +import org.openjdk.jmh.annotations.OutputTimeUnit; +import org.openjdk.jmh.annotations.Param; +import org.openjdk.jmh.annotations.Scope; +import org.openjdk.jmh.annotations.Setup; +import org.openjdk.jmh.annotations.State; +import org.openjdk.jmh.annotations.Warmup; + +import java.util.concurrent.TimeUnit; + +@State(Scope.Benchmark) +@Warmup(iterations = 2, time = 5) +@Measurement(iterations = 3) +@Fork(2) +public class CachingDocumentBenchmark { + + @Param({"50", "500", "5000"}) + public int querySize; + + @Param({"10", "50", "500"}) + public int queryCount; + + @Setup(Level.Trial) + public void setUp() { + } + + private static final GraphQL GRAPHQL_CACHING_ON = buildGraphQL(true); + private static final GraphQL GRAPHQL_CACHING_OFF = buildGraphQL(true); + + @Benchmark + @BenchmarkMode(Mode.AverageTime) + @OutputTimeUnit(TimeUnit.MILLISECONDS) + public void benchMarkCachingOnAvgTime() { + executeQuery(true); + } + + @Benchmark + @BenchmarkMode(Mode.AverageTime) + @OutputTimeUnit(TimeUnit.MILLISECONDS) + public void benchMarkCachingOffAvgTime() { + executeQuery(false); + } + + public void executeQuery(boolean cachingOn) { + String query = buildQuery(querySize); + GraphQL graphQL = cachingOn ? GRAPHQL_CACHING_ON : GRAPHQL_CACHING_OFF; + + for (int i = 0; i < queryCount; i++) { + graphQL.execute(query); + } + } + + private static String buildQuery(int howManyAliases) { + StringBuilder query = new StringBuilder("query q { hero { \n"); + for (int i = 0; i < howManyAliases; i++) { + query.append("nameAlias").append(i).append(" : name\n"); + } + query.append("}}"); + return query.toString(); + } + + private static GraphQL buildGraphQL(boolean cachingOn) { + PreparsedDocumentProvider documentProvider = NoOpPreparsedDocumentProvider.INSTANCE; + if (cachingOn) { + documentProvider = new CachingDocumentProvider(); + } + return GraphQL.newGraphQL(StarWarsSchema.starWarsSchema) + .preparsedDocumentProvider(documentProvider) + .build(); + } +} diff --git a/src/main/java/graphql/execution/preparsed/caching/CachingDocumentProvider.java b/src/main/java/graphql/execution/preparsed/caching/CachingDocumentProvider.java index bfe8162ad5..dd4bd0014a 100644 --- a/src/main/java/graphql/execution/preparsed/caching/CachingDocumentProvider.java +++ b/src/main/java/graphql/execution/preparsed/caching/CachingDocumentProvider.java @@ -44,13 +44,8 @@ public CompletableFuture getDocumentAsync(ExecutionInput return CompletableFuture.completedFuture(parseAndValidateFunction.apply(executionInput)); } DocumentCache.DocumentCacheKey cacheKey = new DocumentCache.DocumentCacheKey(executionInput.getQuery(), executionInput.getOperationName()); - PreparsedDocumentEntry cacheEntry = documentCache.get(cacheKey, missFunction(executionInput, parseAndValidateFunction)); + PreparsedDocumentEntry cacheEntry = documentCache.get(cacheKey, key -> parseAndValidateFunction.apply(executionInput)); return CompletableFuture.completedFuture(cacheEntry); } - private static Function missFunction(ExecutionInput executionInput, Function parseAndValidateFunction) { - return key -> { - return parseAndValidateFunction.apply(executionInput); - }; - } } diff --git a/src/main/java/graphql/execution/preparsed/caching/DocumentCache.java b/src/main/java/graphql/execution/preparsed/caching/DocumentCache.java index d9f93155fd..a3e7515034 100644 --- a/src/main/java/graphql/execution/preparsed/caching/DocumentCache.java +++ b/src/main/java/graphql/execution/preparsed/caching/DocumentCache.java @@ -1,5 +1,6 @@ package graphql.execution.preparsed.caching; +import graphql.PublicApi; import graphql.execution.preparsed.PreparsedDocumentEntry; import org.jspecify.annotations.NullMarked; import org.jspecify.annotations.Nullable; @@ -11,6 +12,7 @@ * This represents a cache interface to get a document from a cache key. You can use your own cache implementation * to back the caching of parsed graphql documents. */ +@PublicApi @NullMarked public interface DocumentCache { /** diff --git a/src/test/groovy/graphql/execution/preparsed/caching/CachingDocumentProviderTest.groovy b/src/test/groovy/graphql/execution/preparsed/caching/CachingDocumentProviderTest.groovy index 68324a0725..bb0f308352 100644 --- a/src/test/groovy/graphql/execution/preparsed/caching/CachingDocumentProviderTest.groovy +++ b/src/test/groovy/graphql/execution/preparsed/caching/CachingDocumentProviderTest.groovy @@ -1,5 +1,8 @@ package graphql.execution.preparsed.caching + +import com.github.benmanes.caffeine.cache.Caffeine +import com.github.benmanes.caffeine.cache.Ticker import graphql.ExecutionInput import graphql.GraphQL import graphql.StarWarsSchema @@ -7,8 +10,11 @@ import graphql.execution.preparsed.PreparsedDocumentEntry import graphql.parser.Parser import spock.lang.Specification +import java.time.Duration import java.util.function.Function +import static graphql.ExecutionInput.newExecutionInput + class CachingDocumentProviderTest extends Specification { private String heroQuery1 @@ -30,7 +36,7 @@ class CachingDocumentProviderTest extends Specification { .build() when: - def executionInput = ExecutionInput.newExecutionInput(heroQuery1) + def executionInput = newExecutionInput(heroQuery1) .operationName("HeroNameQuery").build() def er = graphQL.execute(executionInput) @@ -40,6 +46,84 @@ class CachingDocumentProviderTest extends Specification { er.data == [hero: [name: "R2-D2"]] } + def "different outcomes are cached correctly integration test"() { + + def cachingDocumentProvider = new CachingDocumentProvider() + GraphQL graphQL = GraphQL.newGraphQL(StarWarsSchema.starWarsSchema) + .preparsedDocumentProvider(cachingDocumentProvider) + .build() + + + def query = """ + query HeroNameQuery { + hero { + name + } + } + query HeroNameQuery2 { + hero { + nameAlias : name + } + } + """ + def invalidQuery = """ + query InvalidQuery { + hero { + nameX + } + } + """ + when: + def ei = newExecutionInput(query).operationName("HeroNameQuery").build() + def er = graphQL.execute(ei) + + then: + er.errors.isEmpty() + er.data == [hero: [name: "R2-D2"]] + + when: + ei = newExecutionInput(query).operationName("HeroNameQuery2").build() + er = graphQL.execute(ei) + + then: + er.errors.isEmpty() + er.data == [hero: [nameAlias: "R2-D2"]] + + when: + ei = newExecutionInput(invalidQuery).operationName("InvalidQuery").build() + er = graphQL.execute(ei) + + then: + !er.errors.isEmpty() + er.errors[0].message == "Validation error (FieldUndefined@[hero/nameX]) : Field 'nameX' in type 'Character' is undefined" + + // now do them all again and they are cached but the outcome is the same + + when: + ei = newExecutionInput(query).operationName("HeroNameQuery").build() + er = graphQL.execute(ei) + + then: + er.errors.isEmpty() + er.data == [hero: [name: "R2-D2"]] + + when: + ei = newExecutionInput(query).operationName("HeroNameQuery2").build() + er = graphQL.execute(ei) + + then: + er.errors.isEmpty() + er.data == [hero: [nameAlias: "R2-D2"]] + + when: + ei = newExecutionInput(invalidQuery).operationName("InvalidQuery").build() + er = graphQL.execute(ei) + + then: + !er.errors.isEmpty() + er.errors[0].message == "Validation error (FieldUndefined@[hero/nameX]) : Field 'nameX' in type 'Character' is undefined" + } + def "integration still works when caffeine is not on the class path"() { // we fake out the test here saying its NOT on the classpath def cache = new CaffeineDocumentCache(false) @@ -48,7 +132,7 @@ class CachingDocumentProviderTest extends Specification { .preparsedDocumentProvider(cachingDocumentProvider) .build() when: - def executionInput = ExecutionInput.newExecutionInput(heroQuery1) + def executionInput = newExecutionInput(heroQuery1) .operationName("HeroNameQuery").build() def er = graphQL.execute(executionInput) @@ -73,12 +157,11 @@ class CachingDocumentProviderTest extends Specification { def cache = new CaffeineDocumentCache(true) def cachingDocumentProvider = new CachingDocumentProvider(cache) - def ei = ExecutionInput.newExecutionInput("query q { f }").build() + def ei = newExecutionInput("query q { f }").build() def callback = new CountingDocProvider() when: - def cf = cachingDocumentProvider.getDocumentAsync(ei, callback) - def documentEntry = cf.join() + def documentEntry = cachingDocumentProvider.getDocumentAsync(ei, callback).join() then: !documentEntry.hasErrors() @@ -86,8 +169,7 @@ class CachingDocumentProviderTest extends Specification { callback.count == 1 when: - cf = cachingDocumentProvider.getDocumentAsync(ei, callback) - documentEntry = cf.join() + documentEntry = cachingDocumentProvider.getDocumentAsync(ei, callback).join() then: !documentEntry.hasErrors() @@ -97,8 +179,7 @@ class CachingDocumentProviderTest extends Specification { when: cache.clear() - cf = cachingDocumentProvider.getDocumentAsync(ei, callback) - documentEntry = cf.join() + documentEntry = cachingDocumentProvider.getDocumentAsync(ei, callback).join() then: !documentEntry.hasErrors() @@ -112,12 +193,11 @@ class CachingDocumentProviderTest extends Specification { def cache = new CaffeineDocumentCache(false) def cachingDocumentProvider = new CachingDocumentProvider(cache) - def ei = ExecutionInput.newExecutionInput("query q { f }").build() + def ei = newExecutionInput("query q { f }").build() def callback = new CountingDocProvider() when: - def cf = cachingDocumentProvider.getDocumentAsync(ei, callback) - def documentEntry = cf.join() + def documentEntry = cachingDocumentProvider.getDocumentAsync(ei, callback).join() then: !documentEntry.hasErrors() @@ -125,8 +205,7 @@ class CachingDocumentProviderTest extends Specification { callback.count == 1 when: - cf = cachingDocumentProvider.getDocumentAsync(ei, callback) - documentEntry = cf.join() + documentEntry = cachingDocumentProvider.getDocumentAsync(ei, callback).join() then: !documentEntry.hasErrors() @@ -136,12 +215,56 @@ class CachingDocumentProviderTest extends Specification { when: cache.clear() - cf = cachingDocumentProvider.getDocumentAsync(ei, callback) - documentEntry = cf.join() + documentEntry = cachingDocumentProvider.getDocumentAsync(ei, callback).join() then: !documentEntry.hasErrors() documentEntry.document != null callback.count == 3 } + + def "time can pass and entries can expire and the code handles that"() { + long nanoTime = 0 + Ticker ticker = { return nanoTime } + def caffeineCache = Caffeine.newBuilder() + .ticker(ticker) + .expireAfterAccess(Duration.ofMinutes(2)) + . build() + def documentCache = new CaffeineDocumentCache(caffeineCache) + def cachingDocumentProvider = new CachingDocumentProvider(documentCache) + + def ei = newExecutionInput("query q { f }").build() + def callback = new CountingDocProvider() + + when: + def documentEntry = cachingDocumentProvider.getDocumentAsync(ei, callback).join() + + then: + !documentEntry.hasErrors() + documentEntry.document != null + callback.count == 1 + + when: + documentEntry = cachingDocumentProvider.getDocumentAsync(ei, callback).join() + + then: + !documentEntry.hasErrors() + documentEntry.document != null + callback.count == 1 + + when: + // + // this is kinda testing Caffeine but I am also trying to make sure that th wrapper + // code does the mappingFunction if its expired + // + // advance time + // + nanoTime += Duration.ofMinutes(5).toNanos() + documentEntry = cachingDocumentProvider.getDocumentAsync(ei, callback).join() + + then: + !documentEntry.hasErrors() + documentEntry.document != null + callback.count == 2 + } } From daa4663c1a1294f61051d72b1a3ccfa61fbc62e1 Mon Sep 17 00:00:00 2001 From: bbaker Date: Mon, 22 Sep 2025 09:28:41 +1000 Subject: [PATCH 3/8] Added locale to cache key --- .../caching/CachingDocumentProvider.java | 2 +- .../preparsed/caching/DocumentCache.java | 15 ++++++++++++--- .../groovy/graphql/ParseAndValidateTest.groovy | 5 ++++- 3 files changed, 17 insertions(+), 5 deletions(-) diff --git a/src/main/java/graphql/execution/preparsed/caching/CachingDocumentProvider.java b/src/main/java/graphql/execution/preparsed/caching/CachingDocumentProvider.java index dd4bd0014a..0436d0e76f 100644 --- a/src/main/java/graphql/execution/preparsed/caching/CachingDocumentProvider.java +++ b/src/main/java/graphql/execution/preparsed/caching/CachingDocumentProvider.java @@ -43,7 +43,7 @@ public CompletableFuture getDocumentAsync(ExecutionInput // saves creating keys and doing a lookup that will just call this function anyway return CompletableFuture.completedFuture(parseAndValidateFunction.apply(executionInput)); } - DocumentCache.DocumentCacheKey cacheKey = new DocumentCache.DocumentCacheKey(executionInput.getQuery(), executionInput.getOperationName()); + DocumentCache.DocumentCacheKey cacheKey = new DocumentCache.DocumentCacheKey(executionInput.getQuery(), executionInput.getOperationName(), executionInput.getLocale()); PreparsedDocumentEntry cacheEntry = documentCache.get(cacheKey, key -> parseAndValidateFunction.apply(executionInput)); return CompletableFuture.completedFuture(cacheEntry); } diff --git a/src/main/java/graphql/execution/preparsed/caching/DocumentCache.java b/src/main/java/graphql/execution/preparsed/caching/DocumentCache.java index a3e7515034..5fca34db76 100644 --- a/src/main/java/graphql/execution/preparsed/caching/DocumentCache.java +++ b/src/main/java/graphql/execution/preparsed/caching/DocumentCache.java @@ -5,6 +5,7 @@ import org.jspecify.annotations.NullMarked; import org.jspecify.annotations.Nullable; +import java.util.Locale; import java.util.Objects; import java.util.function.Function; @@ -37,10 +38,12 @@ class DocumentCacheKey { private final String query; @Nullable private final String operationName; + private final Locale locale; - DocumentCacheKey(String query, @Nullable String operationName) { + DocumentCacheKey(String query, @Nullable String operationName, Locale locale) { this.query = query; this.operationName = operationName; + this.locale = locale; } public String getQuery() { @@ -51,18 +54,24 @@ public String getQuery() { return operationName; } + public Locale getLocale() { + return locale; + } + @Override public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) { return false; } DocumentCacheKey cacheKey = (DocumentCacheKey) o; - return Objects.equals(query, cacheKey.query) && Objects.equals(operationName, cacheKey.operationName); + return Objects.equals(query, cacheKey.query) && + Objects.equals(operationName, cacheKey.operationName) && + Objects.equals(locale, cacheKey.locale); } @Override public int hashCode() { - return Objects.hash(query, operationName); + return Objects.hash(query, operationName, locale); } } } diff --git a/src/test/groovy/graphql/ParseAndValidateTest.groovy b/src/test/groovy/graphql/ParseAndValidateTest.groovy index fa66c3cbed..ea2a3d392b 100644 --- a/src/test/groovy/graphql/ParseAndValidateTest.groovy +++ b/src/test/groovy/graphql/ParseAndValidateTest.groovy @@ -1,5 +1,6 @@ package graphql +import graphql.execution.preparsed.NoOpPreparsedDocumentProvider import graphql.language.Document import graphql.language.SourceLocation import graphql.parser.InvalidSyntaxException @@ -121,7 +122,9 @@ class ParseAndValidateTest extends Specification { def "can use the graphql context to stop certain validation rules"() { def sdl = '''type Query { foo : ID } ''' - def graphQL = TestUtil.graphQL(sdl).build() + // if you use validation rule predicates - then you cant use caching + def graphQL = TestUtil.graphQL(sdl) + .preparsedDocumentProvider(NoOpPreparsedDocumentProvider.INSTANCE).build() Predicate> predicate = new Predicate>() { @Override From fe77fd8d71a034ca2a9ae4d08413bea80b221bf4 Mon Sep 17 00:00:00 2001 From: bbaker Date: Mon, 22 Sep 2025 12:44:21 +1000 Subject: [PATCH 4/8] Tweaked benchmark --- src/jmh/java/benchmark/CachingDocumentBenchmark.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/jmh/java/benchmark/CachingDocumentBenchmark.java b/src/jmh/java/benchmark/CachingDocumentBenchmark.java index 6f339a488c..bf15bb4f7f 100644 --- a/src/jmh/java/benchmark/CachingDocumentBenchmark.java +++ b/src/jmh/java/benchmark/CachingDocumentBenchmark.java @@ -22,7 +22,7 @@ @State(Scope.Benchmark) @Warmup(iterations = 2, time = 5) -@Measurement(iterations = 3) +@Measurement(iterations = 3, time = 2) @Fork(2) public class CachingDocumentBenchmark { @@ -37,7 +37,7 @@ public void setUp() { } private static final GraphQL GRAPHQL_CACHING_ON = buildGraphQL(true); - private static final GraphQL GRAPHQL_CACHING_OFF = buildGraphQL(true); + private static final GraphQL GRAPHQL_CACHING_OFF = buildGraphQL(false); @Benchmark @BenchmarkMode(Mode.AverageTime) From 02299f389c596d8c4deb70dd3522e8c54cb16a2a Mon Sep 17 00:00:00 2001 From: bbaker Date: Mon, 22 Sep 2025 14:43:53 +1000 Subject: [PATCH 5/8] More code tweaks and javadoc --- .../caching/CachingDocumentProvider.java | 15 ++++ .../caching/CaffeineDocumentCache.java | 7 +- .../caching/CaffeineDocumentCacheOptions.java | 11 +-- .../preparsed/caching/DocumentCache.java | 4 ++ .../CachingDocumentProviderTest.groovy | 68 ++++++++++++++++++- 5 files changed, 97 insertions(+), 8 deletions(-) diff --git a/src/main/java/graphql/execution/preparsed/caching/CachingDocumentProvider.java b/src/main/java/graphql/execution/preparsed/caching/CachingDocumentProvider.java index 0436d0e76f..f7e02cbbdc 100644 --- a/src/main/java/graphql/execution/preparsed/caching/CachingDocumentProvider.java +++ b/src/main/java/graphql/execution/preparsed/caching/CachingDocumentProvider.java @@ -11,9 +11,15 @@ import java.util.function.Function; /** + * The CachingDocumentProvider allows previously parsed and validated operations to be cached and + * hence re-used. This can lead to significant time savings, especially for large operations. + *

* By default, graphql-java will cache the parsed {@link PreparsedDocumentEntry} that represents * a parsed and validated graphql query IF {@link Caffeine} is present on the class path * at runtime. If it's not then no caching takes place. + *

+ * You can provide your own {@link DocumentCache} implementation and hence use any cache + * technology you like. */ @PublicApi @NullMarked @@ -23,6 +29,8 @@ public class CachingDocumentProvider implements PreparsedDocumentProvider { /** * By default, it will try to use a {@link Caffeine} backed implementation if it's on the class * path otherwise it will become a non caching mechanism. + * + * @see CaffeineDocumentCache */ public CachingDocumentProvider() { this(new CaffeineDocumentCache()); @@ -37,6 +45,13 @@ public CachingDocumentProvider(DocumentCache documentCache) { this.documentCache = documentCache; } + /** + * @return the {@link DocumentCache} being used + */ + public DocumentCache getDocumentCache() { + return documentCache; + } + @Override public CompletableFuture getDocumentAsync(ExecutionInput executionInput, Function parseAndValidateFunction) { if (documentCache.isNoop()) { diff --git a/src/main/java/graphql/execution/preparsed/caching/CaffeineDocumentCache.java b/src/main/java/graphql/execution/preparsed/caching/CaffeineDocumentCache.java index 3faf42bfbd..2f918936f2 100644 --- a/src/main/java/graphql/execution/preparsed/caching/CaffeineDocumentCache.java +++ b/src/main/java/graphql/execution/preparsed/caching/CaffeineDocumentCache.java @@ -33,6 +33,10 @@ public class CaffeineDocumentCache implements DocumentCache { } } + /** + * Creates a cache that works if Caffeine is on the class path otherwise its + * a no op. + */ public CaffeineDocumentCache() { this(isCaffeineAvailable); } @@ -64,7 +68,8 @@ public boolean isNoop() { return caffeineCacheObj == null; } - public void clear() { + @Override + public void invalidateAll() { if (!isNoop()) { cache().invalidateAll(); } diff --git a/src/main/java/graphql/execution/preparsed/caching/CaffeineDocumentCacheOptions.java b/src/main/java/graphql/execution/preparsed/caching/CaffeineDocumentCacheOptions.java index d32aee2f02..ef5a5d6454 100644 --- a/src/main/java/graphql/execution/preparsed/caching/CaffeineDocumentCacheOptions.java +++ b/src/main/java/graphql/execution/preparsed/caching/CaffeineDocumentCacheOptions.java @@ -5,18 +5,21 @@ import java.time.Duration; +/** + * This controls the default options that get use in the {@link CaffeineDocumentCache} creation + */ @PublicApi @NullMarked public class CaffeineDocumentCacheOptions { /** - * By default, we cache documents for 5 minutes + * By default, we expire documents after 5 minutes if they are not accessed */ public static final Duration EXPIRED_AFTER_ACCESS = Duration.ofMinutes(5); /** - * By default, we hold 1000 entries + * By default, we hold 500 operations */ - public static final int MAX_SIZE = 1000; + public static final int MAX_SIZE = 500; private static CaffeineDocumentCacheOptions defaultJvmOptions = newOptions() .expireAfterAccess(EXPIRED_AFTER_ACCESS) @@ -35,7 +38,7 @@ public static CaffeineDocumentCacheOptions getDefaultJvmOptions() { /** * This sets new JVM wide default options for the {@link CaffeineDocumentCache} * - * @param jvmOptions + * @param jvmOptions the options to use */ public static void setDefaultJvmOptions(CaffeineDocumentCacheOptions jvmOptions) { defaultJvmOptions = jvmOptions; diff --git a/src/main/java/graphql/execution/preparsed/caching/DocumentCache.java b/src/main/java/graphql/execution/preparsed/caching/DocumentCache.java index 5fca34db76..b52903b988 100644 --- a/src/main/java/graphql/execution/preparsed/caching/DocumentCache.java +++ b/src/main/java/graphql/execution/preparsed/caching/DocumentCache.java @@ -31,6 +31,10 @@ public interface DocumentCache { */ boolean isNoop(); + /** + * Called to clear the cache. If your implementation doesn't support this, then just no op the method + */ + void invalidateAll(); /** * This represents the key to the document cache */ diff --git a/src/test/groovy/graphql/execution/preparsed/caching/CachingDocumentProviderTest.groovy b/src/test/groovy/graphql/execution/preparsed/caching/CachingDocumentProviderTest.groovy index bb0f308352..174851d7f6 100644 --- a/src/test/groovy/graphql/execution/preparsed/caching/CachingDocumentProviderTest.groovy +++ b/src/test/groovy/graphql/execution/preparsed/caching/CachingDocumentProviderTest.groovy @@ -4,6 +4,7 @@ package graphql.execution.preparsed.caching import com.github.benmanes.caffeine.cache.Caffeine import com.github.benmanes.caffeine.cache.Ticker import graphql.ExecutionInput +import graphql.ExecutionResult import graphql.GraphQL import graphql.StarWarsSchema import graphql.execution.preparsed.PreparsedDocumentEntry @@ -44,6 +45,8 @@ class CachingDocumentProviderTest extends Specification { then: er.errors.isEmpty() er.data == [hero: [name: "R2-D2"]] + + cachingDocumentProvider.getDocumentCache() instanceof CaffeineDocumentCache } def "different outcomes are cached correctly integration test"() { @@ -125,21 +128,78 @@ class CachingDocumentProviderTest extends Specification { } def "integration still works when caffeine is not on the class path"() { + when: // we fake out the test here saying its NOT on the classpath def cache = new CaffeineDocumentCache(false) def cachingDocumentProvider = new CachingDocumentProvider(cache) GraphQL graphQL = GraphQL.newGraphQL(StarWarsSchema.starWarsSchema) .preparsedDocumentProvider(cachingDocumentProvider) .build() + def executionInput = newExecutionInput(heroQuery1) + .operationName("HeroNameQuery").build() + + ExecutionResult er = null + for (int i = 0; i < count; i++) { + er = graphQL.execute(executionInput) + assert er.data == [hero: [name: "R2-D2"]] + } + + + then: + er.errors.isEmpty() + er.data == [hero: [name: "R2-D2"]] + + where: + count || _ + 1 || _ + 5 || _ + } + + def "integration of a custom cache"() { when: + + def cache = new DocumentCache() { + // not really useful in production since its unbounded + def map = new HashMap() + + @Override + PreparsedDocumentEntry get(DocumentCache.DocumentCacheKey key, Function mappingFunction) { + return map.computeIfAbsent(key,mappingFunction) + } + + @Override + boolean isNoop() { + return false + } + + @Override + void invalidateAll() { + map.clear() + } + } + // a custom cache in play + def cachingDocumentProvider = new CachingDocumentProvider(cache) + GraphQL graphQL = GraphQL.newGraphQL(StarWarsSchema.starWarsSchema) + .preparsedDocumentProvider(cachingDocumentProvider) + .build() def executionInput = newExecutionInput(heroQuery1) .operationName("HeroNameQuery").build() - def er = graphQL.execute(executionInput) + ExecutionResult er = null + for (int i = 0; i < count; i++) { + er = graphQL.execute(executionInput) + assert er.data == [hero: [name: "R2-D2"]] + } + then: er.errors.isEmpty() er.data == [hero: [name: "R2-D2"]] + + where: + count || _ + 1 || _ + 5 || _ } class CountingDocProvider implements Function { @@ -178,7 +238,7 @@ class CachingDocumentProviderTest extends Specification { callback.count == 1 when: - cache.clear() + cache.invalidateAll() documentEntry = cachingDocumentProvider.getDocumentAsync(ei, callback).join() then: @@ -214,7 +274,7 @@ class CachingDocumentProviderTest extends Specification { callback.count == 2 when: - cache.clear() + cache.invalidateAll() documentEntry = cachingDocumentProvider.getDocumentAsync(ei, callback).join() then: @@ -231,6 +291,8 @@ class CachingDocumentProviderTest extends Specification { .expireAfterAccess(Duration.ofMinutes(2)) . build() def documentCache = new CaffeineDocumentCache(caffeineCache) + + // note this is a custom caffeine instance pass in def cachingDocumentProvider = new CachingDocumentProvider(documentCache) def ei = newExecutionInput("query q { f }").build() From adea9a607723c7a264bbe678d9877ca86ec3f062 Mon Sep 17 00:00:00 2001 From: bbaker Date: Tue, 23 Sep 2025 15:53:39 +1000 Subject: [PATCH 6/8] Added option in graphql to turn off document caching --- src/main/java/graphql/GraphQL.java | 28 +++++++++++++++++++++- src/test/groovy/graphql/GraphQLTest.groovy | 24 +++++++++++++++++++ 2 files changed, 51 insertions(+), 1 deletion(-) diff --git a/src/main/java/graphql/GraphQL.java b/src/main/java/graphql/GraphQL.java index 1bbf6fdbbb..ef9e66f8fb 100644 --- a/src/main/java/graphql/GraphQL.java +++ b/src/main/java/graphql/GraphQL.java @@ -20,6 +20,7 @@ import graphql.execution.instrumentation.parameters.InstrumentationCreateStateParameters; import graphql.execution.instrumentation.parameters.InstrumentationExecutionParameters; import graphql.execution.instrumentation.parameters.InstrumentationValidationParameters; +import graphql.execution.preparsed.NoOpPreparsedDocumentProvider; import graphql.execution.preparsed.PreparsedDocumentEntry; import graphql.execution.preparsed.PreparsedDocumentProvider; import graphql.execution.preparsed.caching.CachingDocumentProvider; @@ -279,7 +280,8 @@ public static class Builder { private DataFetcherExceptionHandler defaultExceptionHandler = new SimpleDataFetcherExceptionHandler(); private ExecutionIdProvider idProvider = DEFAULT_EXECUTION_ID_PROVIDER; private Instrumentation instrumentation = null; // deliberate default here - private PreparsedDocumentProvider preparsedDocumentProvider = new CachingDocumentProvider(); + private PreparsedDocumentProvider preparsedDocumentProvider = null; + private boolean doNotCacheOperationDocuments = false; private boolean doNotAutomaticallyDispatchDataLoader = false; private ValueUnboxer valueUnboxer = ValueUnboxer.DEFAULT; @@ -325,11 +327,32 @@ public Builder instrumentation(Instrumentation instrumentation) { return this; } + /** + * A {@link PreparsedDocumentProvider} allows you to provide a custom implementation of how + * operation documents are retrieved and possibly cached. By default, the inbuilt {@link CachingDocumentProvider} + * will be used, but you can replace that with your own implementation + * + * @param preparsedDocumentProvider the provider to use + * + * @return this builder + */ public Builder preparsedDocumentProvider(PreparsedDocumentProvider preparsedDocumentProvider) { this.preparsedDocumentProvider = assertNotNull(preparsedDocumentProvider, () -> "PreparsedDocumentProvider must be non null"); return this; } + + /** + * Deactivates the caching of operation documents via the inbuilt {@link graphql.execution.preparsed.caching.CachingDocumentProvider} + * If deactivated, no caching will be performed + * + * @return this builder + */ + public Builder doNotCacheOperationDocuments() { + this.doNotCacheOperationDocuments = true; + return this; + } + public Builder executionIdProvider(ExecutionIdProvider executionIdProvider) { this.idProvider = assertNotNull(executionIdProvider, () -> "ExecutionIdProvider must be non null"); return this; @@ -367,6 +390,9 @@ public GraphQL build() { if (instrumentation == null) { this.instrumentation = SimplePerformantInstrumentation.INSTANCE; } + if (preparsedDocumentProvider == null) { + preparsedDocumentProvider = (doNotCacheOperationDocuments ? NoOpPreparsedDocumentProvider.INSTANCE : new CachingDocumentProvider()); + } return new GraphQL(this); } } diff --git a/src/test/groovy/graphql/GraphQLTest.groovy b/src/test/groovy/graphql/GraphQLTest.groovy index 83a2f43aa7..c806c9330e 100644 --- a/src/test/groovy/graphql/GraphQLTest.groovy +++ b/src/test/groovy/graphql/GraphQLTest.groovy @@ -19,6 +19,8 @@ import graphql.execution.instrumentation.Instrumentation import graphql.execution.instrumentation.InstrumentationState import graphql.execution.instrumentation.SimplePerformantInstrumentation import graphql.execution.instrumentation.parameters.InstrumentationCreateStateParameters +import graphql.execution.preparsed.NoOpPreparsedDocumentProvider +import graphql.execution.preparsed.PreparsedDocumentProvider import graphql.execution.preparsed.caching.CachingDocumentProvider import graphql.language.SourceLocation import graphql.schema.DataFetcher @@ -1605,4 +1607,26 @@ many lines'''] !er.errors.isEmpty() er.errors[0].message.contains("Unknown operation named 'X'") } + + def "caching document provider is default unless they say otherwise"() { + when: + def graphQL = GraphQL.newGraphQL(StarWarsSchema.starWarsSchema).build() + then: + graphQL.getPreparsedDocumentProvider() instanceof CachingDocumentProvider + + when: + graphQL = GraphQL.newGraphQL(StarWarsSchema.starWarsSchema).doNotCacheOperationDocuments().build() + then: + graphQL.getPreparsedDocumentProvider() == NoOpPreparsedDocumentProvider.INSTANCE + + + PreparsedDocumentProvider customProvider = { ei, func -> func.apply(ei) } + when: + graphQL = GraphQL.newGraphQL(StarWarsSchema.starWarsSchema) + .doNotCacheOperationDocuments() // doesnt matter if they provide an implementation + .preparsedDocumentProvider(customProvider) + .build() + then: + graphQL.getPreparsedDocumentProvider() == customProvider + } } From 1e3785a9ef3f1f531f1aea7c9c97a72f3ce281be Mon Sep 17 00:00:00 2001 From: bbaker Date: Fri, 26 Sep 2025 09:13:25 +1000 Subject: [PATCH 7/8] Made graphql.execution.preparsed.NoOpPreparsedDocumentProvider api --- .../preparsed/NoOpPreparsedDocumentProvider.java | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/src/main/java/graphql/execution/preparsed/NoOpPreparsedDocumentProvider.java b/src/main/java/graphql/execution/preparsed/NoOpPreparsedDocumentProvider.java index 03a96776b6..2736adcab8 100644 --- a/src/main/java/graphql/execution/preparsed/NoOpPreparsedDocumentProvider.java +++ b/src/main/java/graphql/execution/preparsed/NoOpPreparsedDocumentProvider.java @@ -2,12 +2,17 @@ import graphql.ExecutionInput; -import graphql.Internal; +import graphql.PublicApi; +import org.jspecify.annotations.NullMarked; import java.util.concurrent.CompletableFuture; import java.util.function.Function; -@Internal +/** + * A {@link PreparsedDocumentProvider that does nothing} + */ +@PublicApi +@NullMarked public class NoOpPreparsedDocumentProvider implements PreparsedDocumentProvider { public static final NoOpPreparsedDocumentProvider INSTANCE = new NoOpPreparsedDocumentProvider(); From c42baa71fec551439fd3523a8a8f84e6647f1cf5 Mon Sep 17 00:00:00 2001 From: bbaker Date: Sun, 28 Sep 2025 18:59:14 +1000 Subject: [PATCH 8/8] Made the cache lookup be either async or not --- .../preparsed/caching/CachingDocumentProvider.java | 8 +++++--- .../execution/preparsed/caching/DocumentCache.java | 12 +++++++++--- .../caching/CachingDocumentProviderTest.groovy | 14 +++++++++----- 3 files changed, 23 insertions(+), 11 deletions(-) diff --git a/src/main/java/graphql/execution/preparsed/caching/CachingDocumentProvider.java b/src/main/java/graphql/execution/preparsed/caching/CachingDocumentProvider.java index f7e02cbbdc..5c4d1e5d92 100644 --- a/src/main/java/graphql/execution/preparsed/caching/CachingDocumentProvider.java +++ b/src/main/java/graphql/execution/preparsed/caching/CachingDocumentProvider.java @@ -10,6 +10,8 @@ import java.util.concurrent.CompletableFuture; import java.util.function.Function; +import static graphql.execution.Async.toCompletableFuture; + /** * The CachingDocumentProvider allows previously parsed and validated operations to be cached and * hence re-used. This can lead to significant time savings, especially for large operations. @@ -56,11 +58,11 @@ public DocumentCache getDocumentCache() { public CompletableFuture getDocumentAsync(ExecutionInput executionInput, Function parseAndValidateFunction) { if (documentCache.isNoop()) { // saves creating keys and doing a lookup that will just call this function anyway - return CompletableFuture.completedFuture(parseAndValidateFunction.apply(executionInput)); + return toCompletableFuture(parseAndValidateFunction.apply(executionInput)); } DocumentCache.DocumentCacheKey cacheKey = new DocumentCache.DocumentCacheKey(executionInput.getQuery(), executionInput.getOperationName(), executionInput.getLocale()); - PreparsedDocumentEntry cacheEntry = documentCache.get(cacheKey, key -> parseAndValidateFunction.apply(executionInput)); - return CompletableFuture.completedFuture(cacheEntry); + Object cacheEntry = documentCache.get(cacheKey, key -> parseAndValidateFunction.apply(executionInput)); + return toCompletableFuture(cacheEntry); } } diff --git a/src/main/java/graphql/execution/preparsed/caching/DocumentCache.java b/src/main/java/graphql/execution/preparsed/caching/DocumentCache.java index b52903b988..f4231804da 100644 --- a/src/main/java/graphql/execution/preparsed/caching/DocumentCache.java +++ b/src/main/java/graphql/execution/preparsed/caching/DocumentCache.java @@ -1,5 +1,6 @@ package graphql.execution.preparsed.caching; +import graphql.DuckTyped; import graphql.PublicApi; import graphql.execution.preparsed.PreparsedDocumentEntry; import org.jspecify.annotations.NullMarked; @@ -17,14 +18,18 @@ @NullMarked public interface DocumentCache { /** - * Called to get a document that has previously been parsed ad validated. + * Called to get a document that has previously been parsed ad validated. The return value of this method + * can be either a {@link PreparsedDocumentEntry} or a {@link java.util.concurrent.CompletableFuture} promise + * to a {@link PreparsedDocumentEntry}. This allows caches that are in memory to return direct values OR + * if the cache is distributed, it can return a promise to a value. * * @param key the cache key * @param mappingFunction if the value is missing in cache this function can be called to create a value * - * @return a non null document entry + * @return a non-null {@link PreparsedDocumentEntry} or a promise to one via a {@link java.util.concurrent.CompletableFuture} */ - PreparsedDocumentEntry get(DocumentCacheKey key, Function mappingFunction); + @DuckTyped(shape = "PreparsedDocumentEntry | CompletableFuture mappingFunction); /** * @return true if the cache in fact does no caching otherwise false. This helps the implementation optimise how the cache is used or not. @@ -35,6 +40,7 @@ public interface DocumentCache { * Called to clear the cache. If your implementation doesn't support this, then just no op the method */ void invalidateAll(); + /** * This represents the key to the document cache */ diff --git a/src/test/groovy/graphql/execution/preparsed/caching/CachingDocumentProviderTest.groovy b/src/test/groovy/graphql/execution/preparsed/caching/CachingDocumentProviderTest.groovy index 174851d7f6..8fa7553640 100644 --- a/src/test/groovy/graphql/execution/preparsed/caching/CachingDocumentProviderTest.groovy +++ b/src/test/groovy/graphql/execution/preparsed/caching/CachingDocumentProviderTest.groovy @@ -1,6 +1,5 @@ package graphql.execution.preparsed.caching - import com.github.benmanes.caffeine.cache.Caffeine import com.github.benmanes.caffeine.cache.Ticker import graphql.ExecutionInput @@ -12,9 +11,11 @@ import graphql.parser.Parser import spock.lang.Specification import java.time.Duration +import java.util.concurrent.CompletableFuture import java.util.function.Function import static graphql.ExecutionInput.newExecutionInput +import static graphql.execution.preparsed.caching.DocumentCache.DocumentCacheKey class CachingDocumentProviderTest extends Specification { private String heroQuery1 @@ -160,11 +161,14 @@ class CachingDocumentProviderTest extends Specification { def cache = new DocumentCache() { // not really useful in production since its unbounded - def map = new HashMap() + def map = new HashMap() @Override - PreparsedDocumentEntry get(DocumentCache.DocumentCacheKey key, Function mappingFunction) { - return map.computeIfAbsent(key,mappingFunction) + Object get(DocumentCacheKey key, Function mappingFunction) { + // we can have async values + return CompletableFuture.supplyAsync { + return map.computeIfAbsent(key, mappingFunction) + } } @Override @@ -289,7 +293,7 @@ class CachingDocumentProviderTest extends Specification { def caffeineCache = Caffeine.newBuilder() .ticker(ticker) .expireAfterAccess(Duration.ofMinutes(2)) - . build() + . build() def documentCache = new CaffeineDocumentCache(caffeineCache) // note this is a custom caffeine instance pass in